rohan-arora-ibm commited on
Commit
91627a8
·
unverified ·
1 Parent(s): 7af85b6

fix: remove duplicate nested directory

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-30-09.384366.json +0 -124
  2. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-31-09.380495.json +0 -124
  3. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-32-09.380433.json +0 -124
  4. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-33-09.380792.json +0 -144
  5. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-34-09.380692.json +0 -145
  6. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-35-09.381008.json +0 -145
  7. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-36-09.381009.json +0 -145
  8. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-37-09.381177.json +0 -145
  9. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-38-09.381218.json +0 -173
  10. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-39-09.381400.json +0 -191
  11. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-40-09.381500.json +0 -159
  12. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-41-09.381896.json +0 -159
  13. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-42-09.382037.json +0 -159
  14. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-43-09.381859.json +0 -178
  15. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-44-09.381897.json +0 -197
  16. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-45-09.382080.json +0 -197
  17. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-46-09.382244.json +0 -197
  18. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-47-09.382368.json +0 -197
  19. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-48-09.382475.json +0 -197
  20. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-49-09.382660.json +0 -197
  21. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-50-09.382774.json +0 -197
  22. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-51-09.382943.json +0 -178
  23. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-52-09.383006.json +0 -178
  24. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-53-09.383213.json +0 -210
  25. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-54-09.383291.json +0 -210
  26. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-55-09.383832.json +0 -210
  27. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-56-09.383899.json +0 -178
  28. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-57-09.385743.json +0 -178
  29. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-58-09.384546.json +0 -178
  30. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-59-09.384642.json +0 -178
  31. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-00-09.384838.json +0 -198
  32. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-01-09.384887.json +0 -198
  33. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-02-09.384962.json +0 -198
  34. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-03-09.385430.json +0 -198
  35. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-04-09.385226.json +0 -198
  36. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-05-09.385457.json +0 -198
  37. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-06-09.386364.json +0 -198
  38. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-07-09.385816.json +0 -198
  39. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-08-09.386632.json +0 -198
  40. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-09-09.386069.json +0 -178
  41. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-10-09.386150.json +0 -178
  42. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-11-09.386746.json +0 -178
  43. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-12-09.387432.json +0 -197
  44. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-13-09.386938.json +0 -197
  45. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-14-09.387023.json +0 -197
  46. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-15-09.387101.json +0 -197
  47. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-16-09.387264.json +0 -197
  48. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-17-09.387695.json +0 -197
  49. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_in_alerting_state_2025-12-15T175546.713186Z.json +0 -64
  50. snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/ground_truth.yaml +0 -67
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-30-09.384366.json DELETED
@@ -1,124 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "4.403798540145985e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "4.403798540145985e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "checkout",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.39999999999999997)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "3.9999999999999997e-01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7999999999999999)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "7.999999999999999e-01"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend-proxy",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.066666666666666)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.6066666666666666e+01"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "neo4j",
109
- "instance": "10.0.167.3:10250",
110
- "namespace": "kube-system",
111
- "pod": "topology-monitor-0",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:27:29.2925657Z",
122
- "value": "1e+00"
123
- }
124
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-31-09.380495.json DELETED
@@ -1,124 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "4.406754461538461e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "4.406754461538461e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "checkout",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7999999999999999)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "7.999999999999999e-01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4666666666666663)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "1.4666666666666663e+00"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend-proxy",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.41111111111111)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.741111111111111e+01"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "neo4j",
109
- "instance": "10.0.167.3:10250",
110
- "namespace": "kube-system",
111
- "pod": "topology-monitor-0",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:27:29.2925657Z",
122
- "value": "1e+00"
123
- }
124
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-32-09.380433.json DELETED
@@ -1,124 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "4.4110767857142855e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "4.4110767857142855e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "checkout",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5777777777777777)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "5.777777777777777e-01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.133333333333333)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "1.133333333333333e+00"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend-proxy",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.688888888888886)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.7688888888888886e+01"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "neo4j",
109
- "instance": "10.0.167.3:10250",
110
- "namespace": "kube-system",
111
- "pod": "topology-monitor-0",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:27:29.2925657Z",
122
- "value": "1e+00"
123
- }
124
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-33-09.380792.json DELETED
@@ -1,144 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "5.2873043478260865e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "5.2873043478260865e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "frontend-proxy",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.4)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "1.74e+01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "checkout",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7777777777777777)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "7.777777777777777e-01"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4555555555555553)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.4555555555555553e+00"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "neo4j",
109
- "instance": "10.0.167.3:10250",
110
- "namespace": "kube-system",
111
- "pod": "topology-monitor-0",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:27:29.2925657Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "InfoInhibitor",
127
- "alertstate": "pending",
128
- "container": "recorder",
129
- "instance": "10.0.167.70:10250",
130
- "namespace": "data-recorders",
131
- "pod": "clickhouse-unified-recorder-ntc4f",
132
- "service": "prometheus-kube-prometheus-kubelet",
133
- "severity": "none"
134
- },
135
- "annotations": {
136
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
137
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
138
- "summary": "Info-level alert inhibition."
139
- },
140
- "state": "firing",
141
- "activeAt": "2025-12-15T17:32:29.2925657Z",
142
- "value": "1e+00"
143
- }
144
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-34-09.380692.json DELETED
@@ -1,145 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6444444444444444)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "6.444444444444444e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.222222222222222)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "1.222222222222222e+00"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.677777777777774)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "1.6677777777777774e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "InfoInhibitor",
75
- "alertstate": "pending",
76
- "container": "recorder",
77
- "instance": "10.0.167.70:10250",
78
- "namespace": "data-recorders",
79
- "pod": "clickhouse-unified-recorder-ntc4f",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "none"
82
- },
83
- "annotations": {
84
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
86
- "summary": "Info-level alert inhibition."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:32:29.2925657Z",
90
- "value": "1e+00"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "PrometheusNotConnectedToAlertmanagers",
95
- "container": "prometheus",
96
- "endpoint": "http-web",
97
- "instance": "100.96.3.12:9090",
98
- "job": "prometheus-kube-prometheus-prometheus",
99
- "namespace": "prometheus",
100
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
101
- "service": "prometheus-kube-prometheus-prometheus",
102
- "severity": "warning"
103
- },
104
- "annotations": {
105
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
106
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
107
- "summary": "Prometheus is not connected to any Alertmanagers."
108
- },
109
- "state": "firing",
110
- "activeAt": "2025-12-15T17:23:51.749891565Z",
111
- "value": "0e+00"
112
- },
113
- {
114
- "labels": {
115
- "alertname": "KubeClientCertificateExpiration",
116
- "instance": "10.0.166.142:443",
117
- "job": "apiserver",
118
- "severity": "warning"
119
- },
120
- "annotations": {
121
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
122
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
123
- "summary": "Client certificate is about to expire."
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:24:02.466081961Z",
127
- "value": "5.281429787234042e+04"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "KubeClientCertificateExpiration",
132
- "instance": "10.0.166.142:443",
133
- "job": "apiserver",
134
- "severity": "critical"
135
- },
136
- "annotations": {
137
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
138
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
139
- "summary": "Client certificate is about to expire."
140
- },
141
- "state": "firing",
142
- "activeAt": "2025-12-15T17:24:02.466081961Z",
143
- "value": "5.281429787234042e+04"
144
- }
145
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-35-09.381008.json DELETED
@@ -1,145 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.3.12:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-15T17:23:51.749891565Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "KubeClientCertificateExpiration",
26
- "instance": "10.0.166.142:443",
27
- "job": "apiserver",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
32
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
33
- "summary": "Client certificate is about to expire."
34
- },
35
- "state": "firing",
36
- "activeAt": "2025-12-15T17:24:02.466081961Z",
37
- "value": "5.513684210526316e+04"
38
- },
39
- {
40
- "labels": {
41
- "alertname": "KubeClientCertificateExpiration",
42
- "instance": "10.0.166.142:443",
43
- "job": "apiserver",
44
- "severity": "critical"
45
- },
46
- "annotations": {
47
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
48
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
49
- "summary": "Client certificate is about to expire."
50
- },
51
- "state": "firing",
52
- "activeAt": "2025-12-15T17:24:02.466081961Z",
53
- "value": "5.513684210526316e+04"
54
- },
55
- {
56
- "labels": {
57
- "alertname": "RequestLatency",
58
- "namespace": "otel-demo",
59
- "service_name": "frontend-proxy",
60
- "severity": "warning"
61
- },
62
- "annotations": {
63
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
64
- },
65
- "state": "firing",
66
- "activeAt": "2025-12-15T17:25:19.431042761Z",
67
- "value": "1.5e+04"
68
- },
69
- {
70
- "labels": {
71
- "alertname": "RequestErrorRate",
72
- "namespace": "otel-demo",
73
- "service_name": "frontend",
74
- "severity": "warning"
75
- },
76
- "annotations": {
77
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.111111111111111)"
78
- },
79
- "state": "firing",
80
- "activeAt": "2025-12-15T17:25:19.431042761Z",
81
- "value": "1.111111111111111e+00"
82
- },
83
- {
84
- "labels": {
85
- "alertname": "RequestErrorRate",
86
- "namespace": "otel-demo",
87
- "service_name": "frontend-proxy",
88
- "severity": "warning"
89
- },
90
- "annotations": {
91
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.24444444444444)"
92
- },
93
- "state": "firing",
94
- "activeAt": "2025-12-15T17:25:19.431042761Z",
95
- "value": "1.724444444444444e+01"
96
- },
97
- {
98
- "labels": {
99
- "alertname": "RequestErrorRate",
100
- "namespace": "otel-demo",
101
- "service_name": "checkout",
102
- "severity": "warning"
103
- },
104
- "annotations": {
105
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6)"
106
- },
107
- "state": "firing",
108
- "activeAt": "2025-12-15T17:25:19.431042761Z",
109
- "value": "6e-01"
110
- },
111
- {
112
- "labels": {
113
- "alertname": "Watchdog",
114
- "severity": "none"
115
- },
116
- "annotations": {
117
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
118
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
119
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
120
- },
121
- "state": "firing",
122
- "activeAt": "2025-12-15T17:23:29.2925657Z",
123
- "value": "1e+00"
124
- },
125
- {
126
- "labels": {
127
- "alertname": "InfoInhibitor",
128
- "alertstate": "pending",
129
- "container": "recorder",
130
- "instance": "10.0.167.70:10250",
131
- "namespace": "data-recorders",
132
- "pod": "clickhouse-unified-recorder-ntc4f",
133
- "service": "prometheus-kube-prometheus-kubelet",
134
- "severity": "none"
135
- },
136
- "annotations": {
137
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
138
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
139
- "summary": "Info-level alert inhibition."
140
- },
141
- "state": "firing",
142
- "activeAt": "2025-12-15T17:32:29.2925657Z",
143
- "value": "1e+00"
144
- }
145
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-36-09.381009.json DELETED
@@ -1,145 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "4.756890566037736e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "4.756890566037736e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "checkout",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6666666666666666)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "6.666666666666666e-01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.322222222222222)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "1.322222222222222e+00"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend-proxy",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.577777777777776)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.6577777777777776e+01"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "recorder",
109
- "instance": "10.0.167.70:10250",
110
- "namespace": "data-recorders",
111
- "pod": "clickhouse-unified-recorder-ntc4f",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:32:29.2925657Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "PrometheusNotConnectedToAlertmanagers",
127
- "container": "prometheus",
128
- "endpoint": "http-web",
129
- "instance": "100.96.3.12:9090",
130
- "job": "prometheus-kube-prometheus-prometheus",
131
- "namespace": "prometheus",
132
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
133
- "service": "prometheus-kube-prometheus-prometheus",
134
- "severity": "warning"
135
- },
136
- "annotations": {
137
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
138
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
139
- "summary": "Prometheus is not connected to any Alertmanagers."
140
- },
141
- "state": "firing",
142
- "activeAt": "2025-12-15T17:23:51.749891565Z",
143
- "value": "0e+00"
144
- }
145
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-37-09.381177.json DELETED
@@ -1,145 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.166.142:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-15T17:24:02.466081961Z",
16
- "value": "4.77560412371134e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.166.142:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-15T17:24:02.466081961Z",
32
- "value": "4.77560412371134e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:25:19.431042761Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "frontend-proxy",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 18.755555555555553)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "1.8755555555555553e+01"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "checkout",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.8888888888888888)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "8.888888888888888e-01"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.633333333333333)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.633333333333333e+00"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "Watchdog",
93
- "severity": "none"
94
- },
95
- "annotations": {
96
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
98
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:23:29.2925657Z",
102
- "value": "1e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "InfoInhibitor",
107
- "alertstate": "pending",
108
- "container": "recorder",
109
- "instance": "10.0.167.70:10250",
110
- "namespace": "data-recorders",
111
- "pod": "clickhouse-unified-recorder-ntc4f",
112
- "service": "prometheus-kube-prometheus-kubelet",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
118
- "summary": "Info-level alert inhibition."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:32:29.2925657Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "PrometheusNotConnectedToAlertmanagers",
127
- "container": "prometheus",
128
- "endpoint": "http-web",
129
- "instance": "100.96.3.12:9090",
130
- "job": "prometheus-kube-prometheus-prometheus",
131
- "namespace": "prometheus",
132
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
133
- "service": "prometheus-kube-prometheus-prometheus",
134
- "severity": "warning"
135
- },
136
- "annotations": {
137
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
138
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
139
- "summary": "Prometheus is not connected to any Alertmanagers."
140
- },
141
- "state": "firing",
142
- "activeAt": "2025-12-15T17:23:51.749891565Z",
143
- "value": "0e+00"
144
- }
145
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-38-09.381218.json DELETED
@@ -1,173 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "KubeClientCertificateExpiration",
19
- "instance": "10.0.166.142:443",
20
- "job": "apiserver",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
25
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
26
- "summary": "Client certificate is about to expire."
27
- },
28
- "state": "firing",
29
- "activeAt": "2025-12-15T17:24:02.466081961Z",
30
- "value": "4.77855e+04"
31
- },
32
- {
33
- "labels": {
34
- "alertname": "KubeClientCertificateExpiration",
35
- "instance": "10.0.166.142:443",
36
- "job": "apiserver",
37
- "severity": "critical"
38
- },
39
- "annotations": {
40
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
41
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
42
- "summary": "Client certificate is about to expire."
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-15T17:24:02.466081961Z",
46
- "value": "4.77855e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestLatency",
51
- "namespace": "otel-demo",
52
- "service_name": "frontend-proxy",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-15T17:25:19.431042761Z",
60
- "value": "1.5e+04"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4888888888888887)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-15T17:25:19.431042761Z",
74
- "value": "1.4888888888888887e+00"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend-proxy",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.34444444444444)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-15T17:25:19.431042761Z",
88
- "value": "1.734444444444444e+01"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "RequestErrorRate",
93
- "namespace": "otel-demo",
94
- "service_name": "checkout",
95
- "severity": "warning"
96
- },
97
- "annotations": {
98
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7333333333333333)"
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-15T17:25:19.431042761Z",
102
- "value": "7.333333333333333e-01"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "Watchdog",
107
- "severity": "none"
108
- },
109
- "annotations": {
110
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
112
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-12-15T17:23:29.2925657Z",
116
- "value": "1e+00"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "InfoInhibitor",
121
- "alertstate": "pending",
122
- "container": "recorder",
123
- "instance": "10.0.167.70:10250",
124
- "namespace": "data-recorders",
125
- "pod": "clickhouse-unified-recorder-ntc4f",
126
- "service": "prometheus-kube-prometheus-kubelet",
127
- "severity": "none"
128
- },
129
- "annotations": {
130
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
131
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
132
- "summary": "Info-level alert inhibition."
133
- },
134
- "state": "firing",
135
- "activeAt": "2025-12-15T17:32:29.2925657Z",
136
- "value": "1e+00"
137
- },
138
- {
139
- "labels": {
140
- "alertname": "KubeSchedulerDown",
141
- "severity": "critical"
142
- },
143
- "annotations": {
144
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
145
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
146
- "summary": "Target disappeared from Prometheus target discovery."
147
- },
148
- "state": "firing",
149
- "activeAt": "2025-12-15T17:23:03.093305093Z",
150
- "value": "1e+00"
151
- },
152
- {
153
- "labels": {
154
- "alertname": "PrometheusNotConnectedToAlertmanagers",
155
- "container": "prometheus",
156
- "endpoint": "http-web",
157
- "instance": "100.96.3.12:9090",
158
- "job": "prometheus-kube-prometheus-prometheus",
159
- "namespace": "prometheus",
160
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
161
- "service": "prometheus-kube-prometheus-prometheus",
162
- "severity": "warning"
163
- },
164
- "annotations": {
165
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
166
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
167
- "summary": "Prometheus is not connected to any Alertmanagers."
168
- },
169
- "state": "firing",
170
- "activeAt": "2025-12-15T17:23:51.749891565Z",
171
- "value": "0e+00"
172
- }
173
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-39-09.381400.json DELETED
@@ -1,191 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-675fd7b5c5-jlpzp",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "89.25% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:23:45.017672005Z",
33
- "value": "8.924692251991311e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "product-catalog",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "product-catalog-7c7f8b68dc-prcsr",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "33.54% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "3.35431654676259e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "KubeClientCertificateExpiration",
57
- "instance": "10.0.166.142:443",
58
- "job": "apiserver",
59
- "severity": "warning"
60
- },
61
- "annotations": {
62
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
64
- "summary": "Client certificate is about to expire."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:24:02.466081961Z",
68
- "value": "4.784170212765958e+04"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "KubeClientCertificateExpiration",
73
- "instance": "10.0.166.142:443",
74
- "job": "apiserver",
75
- "severity": "critical"
76
- },
77
- "annotations": {
78
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
80
- "summary": "Client certificate is about to expire."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-15T17:24:02.466081961Z",
84
- "value": "4.784170212765958e+04"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "RequestLatency",
89
- "namespace": "otel-demo",
90
- "service_name": "frontend-proxy",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
95
- },
96
- "state": "firing",
97
- "activeAt": "2025-12-15T17:25:19.431042761Z",
98
- "value": "1.5e+04"
99
- },
100
- {
101
- "labels": {
102
- "alertname": "RequestErrorRate",
103
- "namespace": "otel-demo",
104
- "service_name": "checkout",
105
- "severity": "warning"
106
- },
107
- "annotations": {
108
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7333333333333333)"
109
- },
110
- "state": "firing",
111
- "activeAt": "2025-12-15T17:25:19.431042761Z",
112
- "value": "7.333333333333333e-01"
113
- },
114
- {
115
- "labels": {
116
- "alertname": "RequestErrorRate",
117
- "namespace": "otel-demo",
118
- "service_name": "frontend",
119
- "severity": "warning"
120
- },
121
- "annotations": {
122
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.3666666666666665)"
123
- },
124
- "state": "firing",
125
- "activeAt": "2025-12-15T17:25:19.431042761Z",
126
- "value": "1.3666666666666665e+00"
127
- },
128
- {
129
- "labels": {
130
- "alertname": "RequestErrorRate",
131
- "namespace": "otel-demo",
132
- "service_name": "frontend-proxy",
133
- "severity": "warning"
134
- },
135
- "annotations": {
136
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.099999999999998)"
137
- },
138
- "state": "firing",
139
- "activeAt": "2025-12-15T17:25:19.431042761Z",
140
- "value": "1.7099999999999998e+01"
141
- },
142
- {
143
- "labels": {
144
- "alertname": "Watchdog",
145
- "severity": "none"
146
- },
147
- "annotations": {
148
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
149
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
150
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
151
- },
152
- "state": "firing",
153
- "activeAt": "2025-12-15T17:23:29.2925657Z",
154
- "value": "1e+00"
155
- },
156
- {
157
- "labels": {
158
- "alertname": "KubeSchedulerDown",
159
- "severity": "critical"
160
- },
161
- "annotations": {
162
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
163
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
164
- "summary": "Target disappeared from Prometheus target discovery."
165
- },
166
- "state": "firing",
167
- "activeAt": "2025-12-15T17:23:03.093305093Z",
168
- "value": "1e+00"
169
- },
170
- {
171
- "labels": {
172
- "alertname": "PrometheusNotConnectedToAlertmanagers",
173
- "container": "prometheus",
174
- "endpoint": "http-web",
175
- "instance": "100.96.3.12:9090",
176
- "job": "prometheus-kube-prometheus-prometheus",
177
- "namespace": "prometheus",
178
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
179
- "service": "prometheus-kube-prometheus-prometheus",
180
- "severity": "warning"
181
- },
182
- "annotations": {
183
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
184
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
185
- "summary": "Prometheus is not connected to any Alertmanagers."
186
- },
187
- "state": "firing",
188
- "activeAt": "2025-12-15T17:23:51.749891565Z",
189
- "value": "0e+00"
190
- }
191
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-40-09.381500.json DELETED
@@ -1,159 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-675fd7b5c5-jlpzp",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "90.11% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:23:45.017672005Z",
33
- "value": "9.01110713006091e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "product-catalog",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "product-catalog-7c7f8b68dc-prcsr",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "34.05% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "3.4049079754601225e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "RequestLatency",
57
- "namespace": "otel-demo",
58
- "service_name": "frontend-proxy",
59
- "severity": "warning"
60
- },
61
- "annotations": {
62
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
63
- },
64
- "state": "firing",
65
- "activeAt": "2025-12-15T17:25:19.431042761Z",
66
- "value": "1.5e+04"
67
- },
68
- {
69
- "labels": {
70
- "alertname": "RequestErrorRate",
71
- "namespace": "otel-demo",
72
- "service_name": "checkout",
73
- "severity": "warning"
74
- },
75
- "annotations": {
76
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6444444444444444)"
77
- },
78
- "state": "firing",
79
- "activeAt": "2025-12-15T17:25:19.431042761Z",
80
- "value": "6.444444444444444e-01"
81
- },
82
- {
83
- "labels": {
84
- "alertname": "RequestErrorRate",
85
- "namespace": "otel-demo",
86
- "service_name": "frontend",
87
- "severity": "warning"
88
- },
89
- "annotations": {
90
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.211111111111111)"
91
- },
92
- "state": "firing",
93
- "activeAt": "2025-12-15T17:25:19.431042761Z",
94
- "value": "1.211111111111111e+00"
95
- },
96
- {
97
- "labels": {
98
- "alertname": "RequestErrorRate",
99
- "namespace": "otel-demo",
100
- "service_name": "frontend-proxy",
101
- "severity": "warning"
102
- },
103
- "annotations": {
104
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.911111111111108)"
105
- },
106
- "state": "firing",
107
- "activeAt": "2025-12-15T17:25:19.431042761Z",
108
- "value": "1.7911111111111108e+01"
109
- },
110
- {
111
- "labels": {
112
- "alertname": "Watchdog",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
118
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:23:29.2925657Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "KubeSchedulerDown",
127
- "severity": "critical"
128
- },
129
- "annotations": {
130
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
131
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
132
- "summary": "Target disappeared from Prometheus target discovery."
133
- },
134
- "state": "firing",
135
- "activeAt": "2025-12-15T17:23:03.093305093Z",
136
- "value": "1e+00"
137
- },
138
- {
139
- "labels": {
140
- "alertname": "PrometheusNotConnectedToAlertmanagers",
141
- "container": "prometheus",
142
- "endpoint": "http-web",
143
- "instance": "100.96.3.12:9090",
144
- "job": "prometheus-kube-prometheus-prometheus",
145
- "namespace": "prometheus",
146
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
147
- "service": "prometheus-kube-prometheus-prometheus",
148
- "severity": "warning"
149
- },
150
- "annotations": {
151
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
153
- "summary": "Prometheus is not connected to any Alertmanagers."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:23:51.749891565Z",
157
- "value": "0e+00"
158
- }
159
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-41-09.381896.json DELETED
@@ -1,159 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.3.12:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-15T17:23:51.749891565Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "KubeControllerManagerDown",
26
- "severity": "critical"
27
- },
28
- "annotations": {
29
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
31
- "summary": "Target disappeared from Prometheus target discovery."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:03.0192481Z",
35
- "value": "1e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "CPUThrottlingHigh",
40
- "container": "frontend",
41
- "instance": "10.0.167.38:10250",
42
- "namespace": "otel-demo",
43
- "pod": "frontend-675fd7b5c5-jlpzp",
44
- "service": "prometheus-kube-prometheus-kubelet",
45
- "severity": "info"
46
- },
47
- "annotations": {
48
- "description": "90.38% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
49
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
50
- "summary": "Processes experience elevated CPU throttling."
51
- },
52
- "state": "firing",
53
- "activeAt": "2025-12-15T17:23:45.017672005Z",
54
- "value": "9.038255273507328e-01"
55
- },
56
- {
57
- "labels": {
58
- "alertname": "CPUThrottlingHigh",
59
- "container": "product-catalog",
60
- "instance": "10.0.167.38:10250",
61
- "namespace": "otel-demo",
62
- "pod": "product-catalog-7c7f8b68dc-prcsr",
63
- "service": "prometheus-kube-prometheus-kubelet",
64
- "severity": "info"
65
- },
66
- "annotations": {
67
- "description": "35.47% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
68
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
69
- "summary": "Processes experience elevated CPU throttling."
70
- },
71
- "state": "firing",
72
- "activeAt": "2025-12-15T17:23:45.017672005Z",
73
- "value": "3.5471204188481675e-01"
74
- },
75
- {
76
- "labels": {
77
- "alertname": "RequestLatency",
78
- "namespace": "otel-demo",
79
- "service_name": "frontend-proxy",
80
- "severity": "warning"
81
- },
82
- "annotations": {
83
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:25:19.431042761Z",
87
- "value": "1.5e+04"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "RequestErrorRate",
92
- "namespace": "otel-demo",
93
- "service_name": "frontend",
94
- "severity": "warning"
95
- },
96
- "annotations": {
97
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4555555555555553)"
98
- },
99
- "state": "firing",
100
- "activeAt": "2025-12-15T17:25:19.431042761Z",
101
- "value": "1.4555555555555553e+00"
102
- },
103
- {
104
- "labels": {
105
- "alertname": "RequestErrorRate",
106
- "namespace": "otel-demo",
107
- "service_name": "frontend-proxy",
108
- "severity": "warning"
109
- },
110
- "annotations": {
111
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.93333333333333)"
112
- },
113
- "state": "firing",
114
- "activeAt": "2025-12-15T17:25:19.431042761Z",
115
- "value": "1.793333333333333e+01"
116
- },
117
- {
118
- "labels": {
119
- "alertname": "RequestErrorRate",
120
- "namespace": "otel-demo",
121
- "service_name": "checkout",
122
- "severity": "warning"
123
- },
124
- "annotations": {
125
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.8222222222222221)"
126
- },
127
- "state": "firing",
128
- "activeAt": "2025-12-15T17:25:19.431042761Z",
129
- "value": "8.222222222222221e-01"
130
- },
131
- {
132
- "labels": {
133
- "alertname": "Watchdog",
134
- "severity": "none"
135
- },
136
- "annotations": {
137
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
138
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
139
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
140
- },
141
- "state": "firing",
142
- "activeAt": "2025-12-15T17:23:29.2925657Z",
143
- "value": "1e+00"
144
- },
145
- {
146
- "labels": {
147
- "alertname": "KubeSchedulerDown",
148
- "severity": "critical"
149
- },
150
- "annotations": {
151
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
153
- "summary": "Target disappeared from Prometheus target discovery."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:23:03.093305093Z",
157
- "value": "1e+00"
158
- }
159
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-42-09.382037.json DELETED
@@ -1,159 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-675fd7b5c5-jlpzp",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "90.72% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:23:45.017672005Z",
33
- "value": "9.072453622681134e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "product-catalog",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "product-catalog-7c7f8b68dc-prcsr",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "35.36% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "3.536423841059603e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "RequestLatency",
57
- "namespace": "otel-demo",
58
- "service_name": "frontend-proxy",
59
- "severity": "warning"
60
- },
61
- "annotations": {
62
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
63
- },
64
- "state": "firing",
65
- "activeAt": "2025-12-15T17:25:19.431042761Z",
66
- "value": "1.5e+04"
67
- },
68
- {
69
- "labels": {
70
- "alertname": "RequestErrorRate",
71
- "namespace": "otel-demo",
72
- "service_name": "checkout",
73
- "severity": "warning"
74
- },
75
- "annotations": {
76
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5777777777777777)"
77
- },
78
- "state": "firing",
79
- "activeAt": "2025-12-15T17:25:19.431042761Z",
80
- "value": "5.777777777777777e-01"
81
- },
82
- {
83
- "labels": {
84
- "alertname": "RequestErrorRate",
85
- "namespace": "otel-demo",
86
- "service_name": "frontend",
87
- "severity": "warning"
88
- },
89
- "annotations": {
90
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.0111111111111108)"
91
- },
92
- "state": "firing",
93
- "activeAt": "2025-12-15T17:25:19.431042761Z",
94
- "value": "1.0111111111111108e+00"
95
- },
96
- {
97
- "labels": {
98
- "alertname": "RequestErrorRate",
99
- "namespace": "otel-demo",
100
- "service_name": "frontend-proxy",
101
- "severity": "warning"
102
- },
103
- "annotations": {
104
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.02222222222222)"
105
- },
106
- "state": "firing",
107
- "activeAt": "2025-12-15T17:25:19.431042761Z",
108
- "value": "1.502222222222222e+01"
109
- },
110
- {
111
- "labels": {
112
- "alertname": "Watchdog",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
118
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:23:29.2925657Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "KubeSchedulerDown",
127
- "severity": "critical"
128
- },
129
- "annotations": {
130
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
131
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
132
- "summary": "Target disappeared from Prometheus target discovery."
133
- },
134
- "state": "firing",
135
- "activeAt": "2025-12-15T17:23:03.093305093Z",
136
- "value": "1e+00"
137
- },
138
- {
139
- "labels": {
140
- "alertname": "PrometheusNotConnectedToAlertmanagers",
141
- "container": "prometheus",
142
- "endpoint": "http-web",
143
- "instance": "100.96.3.12:9090",
144
- "job": "prometheus-kube-prometheus-prometheus",
145
- "namespace": "prometheus",
146
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
147
- "service": "prometheus-kube-prometheus-prometheus",
148
- "severity": "warning"
149
- },
150
- "annotations": {
151
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
153
- "summary": "Prometheus is not connected to any Alertmanagers."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:23:51.749891565Z",
157
- "value": "0e+00"
158
- }
159
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-43-09.381859.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-675fd7b5c5-jlpzp",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "92.05% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:23:45.017672005Z",
68
- "value": "9.205489346334417e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "product-catalog",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "product-catalog-7c7f8b68dc-prcsr",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "35.27% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "3.5270629991126884e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "frontend-proxy",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "frontend-proxy-6b4d584985-kxvn6",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "73.94% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:27:45.017672005Z",
106
- "value": "7.394468704512372e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "RequestLatency",
111
- "namespace": "otel-demo",
112
- "service_name": "frontend-proxy",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
117
- },
118
- "state": "firing",
119
- "activeAt": "2025-12-15T17:25:19.431042761Z",
120
- "value": "1.5e+04"
121
- },
122
- {
123
- "labels": {
124
- "alertname": "RequestErrorRate",
125
- "namespace": "otel-demo",
126
- "service_name": "checkout",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3777777777777777)"
131
- },
132
- "state": "firing",
133
- "activeAt": "2025-12-15T17:25:19.431042761Z",
134
- "value": "3.777777777777777e-01"
135
- },
136
- {
137
- "labels": {
138
- "alertname": "RequestErrorRate",
139
- "namespace": "otel-demo",
140
- "service_name": "frontend",
141
- "severity": "warning"
142
- },
143
- "annotations": {
144
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7888888888888888)"
145
- },
146
- "state": "firing",
147
- "activeAt": "2025-12-15T17:25:19.431042761Z",
148
- "value": "7.888888888888888e-01"
149
- },
150
- {
151
- "labels": {
152
- "alertname": "RequestErrorRate",
153
- "namespace": "otel-demo",
154
- "service_name": "frontend-proxy",
155
- "severity": "warning"
156
- },
157
- "annotations": {
158
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.988888888888887)"
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:25:19.431042761Z",
162
- "value": "1.6988888888888887e+01"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "Watchdog",
167
- "severity": "none"
168
- },
169
- "annotations": {
170
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
172
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:29.2925657Z",
176
- "value": "1e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-44-09.381897.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "74.06% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "7.405693950177936e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "opentelemetry-collector",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "otel-collector-564d9c7987-ls78p",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "29.8% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:28:15.017672005Z",
87
- "value": "2.980269989615784e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "frontend",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "frontend-675fd7b5c5-jlpzp",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "92.16% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "9.216034054629302e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "CPUThrottlingHigh",
111
- "container": "product-catalog",
112
- "instance": "10.0.167.38:10250",
113
- "namespace": "otel-demo",
114
- "pod": "product-catalog-7c7f8b68dc-prcsr",
115
- "service": "prometheus-kube-prometheus-kubelet",
116
- "severity": "info"
117
- },
118
- "annotations": {
119
- "description": "35.99% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
120
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
121
- "summary": "Processes experience elevated CPU throttling."
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:23:45.017672005Z",
125
- "value": "3.599439775910364e-01"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "RequestLatency",
130
- "namespace": "otel-demo",
131
- "service_name": "frontend-proxy",
132
- "severity": "warning"
133
- },
134
- "annotations": {
135
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:25:19.431042761Z",
139
- "value": "1.5e+04"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "RequestErrorRate",
144
- "namespace": "otel-demo",
145
- "service_name": "frontend",
146
- "severity": "warning"
147
- },
148
- "annotations": {
149
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.3444444444444443)"
150
- },
151
- "state": "firing",
152
- "activeAt": "2025-12-15T17:25:19.431042761Z",
153
- "value": "1.3444444444444443e+00"
154
- },
155
- {
156
- "labels": {
157
- "alertname": "RequestErrorRate",
158
- "namespace": "otel-demo",
159
- "service_name": "frontend-proxy",
160
- "severity": "warning"
161
- },
162
- "annotations": {
163
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.866666666666664)"
164
- },
165
- "state": "firing",
166
- "activeAt": "2025-12-15T17:25:19.431042761Z",
167
- "value": "1.5866666666666664e+01"
168
- },
169
- {
170
- "labels": {
171
- "alertname": "RequestErrorRate",
172
- "namespace": "otel-demo",
173
- "service_name": "checkout",
174
- "severity": "warning"
175
- },
176
- "annotations": {
177
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.711111111111111)"
178
- },
179
- "state": "firing",
180
- "activeAt": "2025-12-15T17:25:19.431042761Z",
181
- "value": "7.11111111111111e-01"
182
- },
183
- {
184
- "labels": {
185
- "alertname": "Watchdog",
186
- "severity": "none"
187
- },
188
- "annotations": {
189
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
191
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:29.2925657Z",
195
- "value": "1e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-45-09.382080.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "CPUThrottlingHigh",
5
- "container": "frontend",
6
- "instance": "10.0.167.38:10250",
7
- "namespace": "otel-demo",
8
- "pod": "frontend-675fd7b5c5-jlpzp",
9
- "service": "prometheus-kube-prometheus-kubelet",
10
- "severity": "info"
11
- },
12
- "annotations": {
13
- "description": "88.84% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
14
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
15
- "summary": "Processes experience elevated CPU throttling."
16
- },
17
- "state": "firing",
18
- "activeAt": "2025-12-15T17:23:45.017672005Z",
19
- "value": "8.884010245151848e-01"
20
- },
21
- {
22
- "labels": {
23
- "alertname": "CPUThrottlingHigh",
24
- "container": "product-catalog",
25
- "instance": "10.0.167.38:10250",
26
- "namespace": "otel-demo",
27
- "pod": "product-catalog-7c7f8b68dc-prcsr",
28
- "service": "prometheus-kube-prometheus-kubelet",
29
- "severity": "info"
30
- },
31
- "annotations": {
32
- "description": "35.76% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
33
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
34
- "summary": "Processes experience elevated CPU throttling."
35
- },
36
- "state": "firing",
37
- "activeAt": "2025-12-15T17:23:45.017672005Z",
38
- "value": "3.5756302521008404e-01"
39
- },
40
- {
41
- "labels": {
42
- "alertname": "CPUThrottlingHigh",
43
- "container": "frontend-proxy",
44
- "instance": "10.0.167.38:10250",
45
- "namespace": "otel-demo",
46
- "pod": "frontend-proxy-6b4d584985-kxvn6",
47
- "service": "prometheus-kube-prometheus-kubelet",
48
- "severity": "info"
49
- },
50
- "annotations": {
51
- "description": "72.74% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
52
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
53
- "summary": "Processes experience elevated CPU throttling."
54
- },
55
- "state": "firing",
56
- "activeAt": "2025-12-15T17:27:45.017672005Z",
57
- "value": "7.27371469949312e-01"
58
- },
59
- {
60
- "labels": {
61
- "alertname": "CPUThrottlingHigh",
62
- "container": "opentelemetry-collector",
63
- "instance": "10.0.167.38:10250",
64
- "namespace": "otel-demo",
65
- "pod": "otel-collector-564d9c7987-ls78p",
66
- "service": "prometheus-kube-prometheus-kubelet",
67
- "severity": "info"
68
- },
69
- "annotations": {
70
- "description": "29.41% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
71
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
72
- "summary": "Processes experience elevated CPU throttling."
73
- },
74
- "state": "firing",
75
- "activeAt": "2025-12-15T17:28:15.017672005Z",
76
- "value": "2.940524565940155e-01"
77
- },
78
- {
79
- "labels": {
80
- "alertname": "RequestLatency",
81
- "namespace": "otel-demo",
82
- "service_name": "frontend-proxy",
83
- "severity": "warning"
84
- },
85
- "annotations": {
86
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:25:19.431042761Z",
90
- "value": "1.5e+04"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestErrorRate",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.1222222222222222)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.1222222222222222e+00"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "frontend-proxy",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.41111111111111)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "1.641111111111111e+01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "checkout",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5333333333333332)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "5.333333333333332e-01"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "Watchdog",
137
- "severity": "none"
138
- },
139
- "annotations": {
140
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
141
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
142
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:23:29.2925657Z",
146
- "value": "1e+00"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "KubeSchedulerDown",
151
- "severity": "critical"
152
- },
153
- "annotations": {
154
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
156
- "summary": "Target disappeared from Prometheus target discovery."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:03.093305093Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "PrometheusNotConnectedToAlertmanagers",
165
- "container": "prometheus",
166
- "endpoint": "http-web",
167
- "instance": "100.96.3.12:9090",
168
- "job": "prometheus-kube-prometheus-prometheus",
169
- "namespace": "prometheus",
170
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
171
- "service": "prometheus-kube-prometheus-prometheus",
172
- "severity": "warning"
173
- },
174
- "annotations": {
175
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
176
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
177
- "summary": "Prometheus is not connected to any Alertmanagers."
178
- },
179
- "state": "firing",
180
- "activeAt": "2025-12-15T17:23:51.749891565Z",
181
- "value": "0e+00"
182
- },
183
- {
184
- "labels": {
185
- "alertname": "KubeControllerManagerDown",
186
- "severity": "critical"
187
- },
188
- "annotations": {
189
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
191
- "summary": "Target disappeared from Prometheus target discovery."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:03.0192481Z",
195
- "value": "1e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-46-09.382244.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "72.34% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "7.234347048300537e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "opentelemetry-collector",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "otel-collector-564d9c7987-ls78p",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "28.89% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:28:15.017672005Z",
52
- "value": "2.8892921960072593e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-675fd7b5c5-jlpzp",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "88.19% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "8.819469669451508e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "product-catalog",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "product-catalog-7c7f8b68dc-prcsr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "35.41% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:23:45.017672005Z",
90
- "value": "3.541395752058951e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5333333333333332)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "5.333333333333332e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.9333333333333332)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "9.333333333333332e-01"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.144444444444442)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.6144444444444442e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-47-09.382368.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "product-catalog",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "product-catalog-7c7f8b68dc-prcsr",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "34.65% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:23:45.017672005Z",
68
- "value": "3.4650455927051677e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend-proxy",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-proxy-6b4d584985-kxvn6",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "71.53% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:27:45.017672005Z",
87
- "value": "7.153419593345656e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "opentelemetry-collector",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "otel-collector-564d9c7987-ls78p",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "28.4% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:28:15.017672005Z",
106
- "value": "2.840383931745467e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "CPUThrottlingHigh",
111
- "container": "frontend",
112
- "instance": "10.0.167.38:10250",
113
- "namespace": "otel-demo",
114
- "pod": "frontend-675fd7b5c5-jlpzp",
115
- "service": "prometheus-kube-prometheus-kubelet",
116
- "severity": "info"
117
- },
118
- "annotations": {
119
- "description": "87.3% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
120
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
121
- "summary": "Processes experience elevated CPU throttling."
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:23:45.017672005Z",
125
- "value": "8.730158730158729e-01"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "RequestLatency",
130
- "namespace": "otel-demo",
131
- "service_name": "frontend-proxy",
132
- "severity": "warning"
133
- },
134
- "annotations": {
135
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:25:19.431042761Z",
139
- "value": "1.5e+04"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "RequestErrorRate",
144
- "namespace": "otel-demo",
145
- "service_name": "checkout",
146
- "severity": "warning"
147
- },
148
- "annotations": {
149
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6666666666666666)"
150
- },
151
- "state": "firing",
152
- "activeAt": "2025-12-15T17:25:19.431042761Z",
153
- "value": "6.666666666666666e-01"
154
- },
155
- {
156
- "labels": {
157
- "alertname": "RequestErrorRate",
158
- "namespace": "otel-demo",
159
- "service_name": "frontend",
160
- "severity": "warning"
161
- },
162
- "annotations": {
163
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.2444444444444445)"
164
- },
165
- "state": "firing",
166
- "activeAt": "2025-12-15T17:25:19.431042761Z",
167
- "value": "1.2444444444444445e+00"
168
- },
169
- {
170
- "labels": {
171
- "alertname": "RequestErrorRate",
172
- "namespace": "otel-demo",
173
- "service_name": "frontend-proxy",
174
- "severity": "warning"
175
- },
176
- "annotations": {
177
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.911111111111108)"
178
- },
179
- "state": "firing",
180
- "activeAt": "2025-12-15T17:25:19.431042761Z",
181
- "value": "1.6911111111111108e+01"
182
- },
183
- {
184
- "labels": {
185
- "alertname": "Watchdog",
186
- "severity": "none"
187
- },
188
- "annotations": {
189
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
191
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:29.2925657Z",
195
- "value": "1e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-48-09.382475.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-675fd7b5c5-jlpzp",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "88.76% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:23:45.017672005Z",
33
- "value": "8.876445086705202e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "product-catalog",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "product-catalog-7c7f8b68dc-prcsr",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "35.75% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "3.574796399485641e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend-proxy",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-proxy-6b4d584985-kxvn6",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "71.61% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:27:45.017672005Z",
71
- "value": "7.161449173408371e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "opentelemetry-collector",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "otel-collector-564d9c7987-ls78p",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "27.66% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:28:15.017672005Z",
90
- "value": "2.7657226921662376e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7777777777777777)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "7.777777777777777e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.3999999999999997)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "1.3999999999999997e+00"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.499999999999996)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.6499999999999996e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-49-09.382660.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "70.88% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "7.088278931750741e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "opentelemetry-collector",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "otel-collector-564d9c7987-ls78p",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "27.01% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:28:15.017672005Z",
52
- "value": "2.701109570041609e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-675fd7b5c5-jlpzp",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "93.66% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "9.366401158580739e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "product-catalog",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "product-catalog-7c7f8b68dc-prcsr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "35.04% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:23:45.017672005Z",
90
- "value": "3.5039717563989403e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6444444444444444)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "6.444444444444444e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.1888888888888887)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "1.1888888888888887e+00"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.455555555555552)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.7455555555555552e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-50-09.382774.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-675fd7b5c5-jlpzp",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "95.3% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:23:45.017672005Z",
33
- "value": "9.529536611248673e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "product-catalog",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "product-catalog-7c7f8b68dc-prcsr",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "34.52% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "3.451910408432147e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend-proxy",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-proxy-6b4d584985-kxvn6",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "71.07% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:27:45.017672005Z",
71
- "value": "7.106741573033707e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "opentelemetry-collector",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "otel-collector-564d9c7987-ls78p",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "26.61% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:28:15.017672005Z",
90
- "value": "2.6612605971249537e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7555555555555554)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "7.555555555555554e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4666666666666666)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "1.4666666666666666e+00"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 16.588888888888885)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.6588888888888885e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-51-09.382943.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "64.27% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "6.426744610887833e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-675fd7b5c5-jlpzp",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "97.42% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "9.741592920353981e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "product-catalog",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "product-catalog-7c7f8b68dc-prcsr",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "33.81% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "3.380749170222854e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "RequestLatency",
111
- "namespace": "otel-demo",
112
- "service_name": "frontend-proxy",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
117
- },
118
- "state": "firing",
119
- "activeAt": "2025-12-15T17:25:19.431042761Z",
120
- "value": "1.5e+04"
121
- },
122
- {
123
- "labels": {
124
- "alertname": "RequestErrorRate",
125
- "namespace": "otel-demo",
126
- "service_name": "checkout",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6222222222222221)"
131
- },
132
- "state": "firing",
133
- "activeAt": "2025-12-15T17:25:19.431042761Z",
134
- "value": "6.222222222222221e-01"
135
- },
136
- {
137
- "labels": {
138
- "alertname": "RequestErrorRate",
139
- "namespace": "otel-demo",
140
- "service_name": "frontend",
141
- "severity": "warning"
142
- },
143
- "annotations": {
144
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.1888888888888887)"
145
- },
146
- "state": "firing",
147
- "activeAt": "2025-12-15T17:25:19.431042761Z",
148
- "value": "1.1888888888888887e+00"
149
- },
150
- {
151
- "labels": {
152
- "alertname": "RequestErrorRate",
153
- "namespace": "otel-demo",
154
- "service_name": "frontend-proxy",
155
- "severity": "warning"
156
- },
157
- "annotations": {
158
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 18.188888888888886)"
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:25:19.431042761Z",
162
- "value": "1.8188888888888886e+01"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "Watchdog",
167
- "severity": "none"
168
- },
169
- "annotations": {
170
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
172
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:29.2925657Z",
176
- "value": "1e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-52-09.383006.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.4444444444444444)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "4.444444444444444e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7888888888888888)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "7.888888888888888e-01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 9.133333333333333)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "9.133333333333333e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeSchedulerDown",
75
- "severity": "critical"
76
- },
77
- "annotations": {
78
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
80
- "summary": "Target disappeared from Prometheus target discovery."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-15T17:23:03.093305093Z",
84
- "value": "1e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "PrometheusNotConnectedToAlertmanagers",
89
- "container": "prometheus",
90
- "endpoint": "http-web",
91
- "instance": "100.96.3.12:9090",
92
- "job": "prometheus-kube-prometheus-prometheus",
93
- "namespace": "prometheus",
94
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
95
- "service": "prometheus-kube-prometheus-prometheus",
96
- "severity": "warning"
97
- },
98
- "annotations": {
99
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
100
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
101
- "summary": "Prometheus is not connected to any Alertmanagers."
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-12-15T17:23:51.749891565Z",
105
- "value": "0e+00"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "KubeControllerManagerDown",
110
- "severity": "critical"
111
- },
112
- "annotations": {
113
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
114
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
115
- "summary": "Target disappeared from Prometheus target discovery."
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-12-15T17:23:03.0192481Z",
119
- "value": "1e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "CPUThrottlingHigh",
124
- "container": "frontend-proxy",
125
- "instance": "10.0.167.38:10250",
126
- "namespace": "otel-demo",
127
- "pod": "frontend-proxy-6b4d584985-kxvn6",
128
- "service": "prometheus-kube-prometheus-kubelet",
129
- "severity": "info"
130
- },
131
- "annotations": {
132
- "description": "53.51% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
133
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
134
- "summary": "Processes experience elevated CPU throttling."
135
- },
136
- "state": "firing",
137
- "activeAt": "2025-12-15T17:27:45.017672005Z",
138
- "value": "5.350812407680946e-01"
139
- },
140
- {
141
- "labels": {
142
- "alertname": "CPUThrottlingHigh",
143
- "container": "frontend",
144
- "instance": "10.0.167.38:10250",
145
- "namespace": "otel-demo",
146
- "pod": "frontend-675fd7b5c5-jlpzp",
147
- "service": "prometheus-kube-prometheus-kubelet",
148
- "severity": "info"
149
- },
150
- "annotations": {
151
- "description": "94.45% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
153
- "summary": "Processes experience elevated CPU throttling."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:23:45.017672005Z",
157
- "value": "9.445419445419445e-01"
158
- },
159
- {
160
- "labels": {
161
- "alertname": "CPUThrottlingHigh",
162
- "container": "product-catalog",
163
- "instance": "10.0.167.38:10250",
164
- "namespace": "otel-demo",
165
- "pod": "product-catalog-7c7f8b68dc-prcsr",
166
- "service": "prometheus-kube-prometheus-kubelet",
167
- "severity": "info"
168
- },
169
- "annotations": {
170
- "description": "32.71% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
172
- "summary": "Processes experience elevated CPU throttling."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:45.017672005Z",
176
- "value": "3.27116212338594e-01"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-53-09.383213.json DELETED
@@ -1,210 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "43.03% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "4.303006638032018e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-675fd7b5c5-jlpzp",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "95.25% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "9.525374224169405e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "product-catalog",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "product-catalog-7c7f8b68dc-prcsr",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "36.34% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "3.633748801534037e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "KubeClientCertificateExpiration",
111
- "instance": "10.0.166.142:443",
112
- "job": "apiserver",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
118
- "summary": "Client certificate is about to expire."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:47:32.466081961Z",
122
- "value": "5.815028571428572e+04"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "KubeClientCertificateExpiration",
127
- "instance": "10.0.166.142:443",
128
- "job": "apiserver",
129
- "severity": "critical"
130
- },
131
- "annotations": {
132
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
133
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
134
- "summary": "Client certificate is about to expire."
135
- },
136
- "state": "firing",
137
- "activeAt": "2025-12-15T17:47:32.466081961Z",
138
- "value": "5.815028571428572e+04"
139
- },
140
- {
141
- "labels": {
142
- "alertname": "RequestLatency",
143
- "namespace": "otel-demo",
144
- "service_name": "frontend-proxy",
145
- "severity": "warning"
146
- },
147
- "annotations": {
148
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
149
- },
150
- "state": "firing",
151
- "activeAt": "2025-12-15T17:25:19.431042761Z",
152
- "value": "1.5e+04"
153
- },
154
- {
155
- "labels": {
156
- "alertname": "RequestErrorRate",
157
- "namespace": "otel-demo",
158
- "service_name": "checkout",
159
- "severity": "warning"
160
- },
161
- "annotations": {
162
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3777777777777777)"
163
- },
164
- "state": "firing",
165
- "activeAt": "2025-12-15T17:25:19.431042761Z",
166
- "value": "3.777777777777777e-01"
167
- },
168
- {
169
- "labels": {
170
- "alertname": "RequestErrorRate",
171
- "namespace": "otel-demo",
172
- "service_name": "frontend",
173
- "severity": "warning"
174
- },
175
- "annotations": {
176
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.5222222222222221)"
177
- },
178
- "state": "firing",
179
- "activeAt": "2025-12-15T17:25:19.431042761Z",
180
- "value": "5.222222222222221e-01"
181
- },
182
- {
183
- "labels": {
184
- "alertname": "RequestErrorRate",
185
- "namespace": "otel-demo",
186
- "service_name": "frontend-proxy",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.588888888888887)"
191
- },
192
- "state": "firing",
193
- "activeAt": "2025-12-15T17:25:19.431042761Z",
194
- "value": "1.5588888888888887e+01"
195
- },
196
- {
197
- "labels": {
198
- "alertname": "Watchdog",
199
- "severity": "none"
200
- },
201
- "annotations": {
202
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
203
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
204
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
205
- },
206
- "state": "firing",
207
- "activeAt": "2025-12-15T17:23:29.2925657Z",
208
- "value": "1e+00"
209
- }
210
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-54-09.383291.json DELETED
@@ -1,210 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "36.93% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "3.6927122464312545e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "93.48% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.347980155917789e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "38.2% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.820009573958832e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "KubeClientCertificateExpiration",
76
- "instance": "10.0.166.142:443",
77
- "job": "apiserver",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
83
- "summary": "Client certificate is about to expire."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:47:32.466081961Z",
87
- "value": "6.858e+04"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "KubeClientCertificateExpiration",
92
- "instance": "10.0.166.142:443",
93
- "job": "apiserver",
94
- "severity": "critical"
95
- },
96
- "annotations": {
97
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
98
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
99
- "summary": "Client certificate is about to expire."
100
- },
101
- "state": "firing",
102
- "activeAt": "2025-12-15T17:47:32.466081961Z",
103
- "value": "6.858e+04"
104
- },
105
- {
106
- "labels": {
107
- "alertname": "RequestLatency",
108
- "namespace": "otel-demo",
109
- "service_name": "frontend-proxy",
110
- "severity": "warning"
111
- },
112
- "annotations": {
113
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
114
- },
115
- "state": "firing",
116
- "activeAt": "2025-12-15T17:25:19.431042761Z",
117
- "value": "1.5e+04"
118
- },
119
- {
120
- "labels": {
121
- "alertname": "RequestErrorRate",
122
- "namespace": "otel-demo",
123
- "service_name": "checkout",
124
- "severity": "warning"
125
- },
126
- "annotations": {
127
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.02222222222222222)"
128
- },
129
- "state": "firing",
130
- "activeAt": "2025-12-15T17:25:19.431042761Z",
131
- "value": "2.222222222222222e-02"
132
- },
133
- {
134
- "labels": {
135
- "alertname": "RequestErrorRate",
136
- "namespace": "otel-demo",
137
- "service_name": "frontend",
138
- "severity": "warning"
139
- },
140
- "annotations": {
141
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.1222222222222222)"
142
- },
143
- "state": "firing",
144
- "activeAt": "2025-12-15T17:25:19.431042761Z",
145
- "value": "1.222222222222222e-01"
146
- },
147
- {
148
- "labels": {
149
- "alertname": "RequestErrorRate",
150
- "namespace": "otel-demo",
151
- "service_name": "frontend-proxy",
152
- "severity": "warning"
153
- },
154
- "annotations": {
155
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 11.588888888888887)"
156
- },
157
- "state": "firing",
158
- "activeAt": "2025-12-15T17:25:19.431042761Z",
159
- "value": "1.1588888888888887e+01"
160
- },
161
- {
162
- "labels": {
163
- "alertname": "Watchdog",
164
- "severity": "none"
165
- },
166
- "annotations": {
167
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
168
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
169
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
170
- },
171
- "state": "firing",
172
- "activeAt": "2025-12-15T17:23:29.2925657Z",
173
- "value": "1e+00"
174
- },
175
- {
176
- "labels": {
177
- "alertname": "KubeSchedulerDown",
178
- "severity": "critical"
179
- },
180
- "annotations": {
181
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
182
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
183
- "summary": "Target disappeared from Prometheus target discovery."
184
- },
185
- "state": "firing",
186
- "activeAt": "2025-12-15T17:23:03.093305093Z",
187
- "value": "1e+00"
188
- },
189
- {
190
- "labels": {
191
- "alertname": "PrometheusNotConnectedToAlertmanagers",
192
- "container": "prometheus",
193
- "endpoint": "http-web",
194
- "instance": "100.96.3.12:9090",
195
- "job": "prometheus-kube-prometheus-prometheus",
196
- "namespace": "prometheus",
197
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
198
- "service": "prometheus-kube-prometheus-prometheus",
199
- "severity": "warning"
200
- },
201
- "annotations": {
202
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
203
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
204
- "summary": "Prometheus is not connected to any Alertmanagers."
205
- },
206
- "state": "firing",
207
- "activeAt": "2025-12-15T17:23:51.749891565Z",
208
- "value": "0e+00"
209
- }
210
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-55-09.383832.json DELETED
@@ -1,210 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "33.62% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "3.362038033728023e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-675fd7b5c5-jlpzp",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "91.88% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "9.188111634650237e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "product-catalog",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "product-catalog-7c7f8b68dc-prcsr",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "39.42% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "3.942486085343228e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "KubeClientCertificateExpiration",
111
- "instance": "10.0.166.142:443",
112
- "job": "apiserver",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
118
- "summary": "Client certificate is about to expire."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-15T17:47:32.466081961Z",
122
- "value": "6.8526e+04"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "KubeClientCertificateExpiration",
127
- "instance": "10.0.166.142:443",
128
- "job": "apiserver",
129
- "severity": "critical"
130
- },
131
- "annotations": {
132
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
133
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
134
- "summary": "Client certificate is about to expire."
135
- },
136
- "state": "firing",
137
- "activeAt": "2025-12-15T17:47:32.466081961Z",
138
- "value": "6.8526e+04"
139
- },
140
- {
141
- "labels": {
142
- "alertname": "RequestLatency",
143
- "namespace": "otel-demo",
144
- "service_name": "frontend-proxy",
145
- "severity": "warning"
146
- },
147
- "annotations": {
148
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
149
- },
150
- "state": "firing",
151
- "activeAt": "2025-12-15T17:25:19.431042761Z",
152
- "value": "1.5e+04"
153
- },
154
- {
155
- "labels": {
156
- "alertname": "RequestErrorRate",
157
- "namespace": "otel-demo",
158
- "service_name": "frontend",
159
- "severity": "warning"
160
- },
161
- "annotations": {
162
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.08888888888888888)"
163
- },
164
- "state": "firing",
165
- "activeAt": "2025-12-15T17:25:19.431042761Z",
166
- "value": "8.888888888888888e-02"
167
- },
168
- {
169
- "labels": {
170
- "alertname": "RequestErrorRate",
171
- "namespace": "otel-demo",
172
- "service_name": "frontend-proxy",
173
- "severity": "warning"
174
- },
175
- "annotations": {
176
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 11.066666666666665)"
177
- },
178
- "state": "firing",
179
- "activeAt": "2025-12-15T17:25:19.431042761Z",
180
- "value": "1.1066666666666665e+01"
181
- },
182
- {
183
- "labels": {
184
- "alertname": "RequestErrorRate",
185
- "namespace": "otel-demo",
186
- "service_name": "checkout",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.04444444444444444)"
191
- },
192
- "state": "firing",
193
- "activeAt": "2025-12-15T17:25:19.431042761Z",
194
- "value": "4.444444444444444e-02"
195
- },
196
- {
197
- "labels": {
198
- "alertname": "Watchdog",
199
- "severity": "none"
200
- },
201
- "annotations": {
202
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
203
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
204
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
205
- },
206
- "state": "firing",
207
- "activeAt": "2025-12-15T17:23:29.2925657Z",
208
- "value": "1e+00"
209
- }
210
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-56-09.383899.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "36.91% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "3.690875232774674e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "87.49% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "8.749124036440084e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "41.07% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "4.1070681723128405e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "frontend-proxy",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 10.799999999999999)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "1.0799999999999999e+01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "checkout",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.19999999999999998)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "1.9999999999999998e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.39999999999999997)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "3.9999999999999997e-01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "KubeSchedulerDown",
146
- "severity": "critical"
147
- },
148
- "annotations": {
149
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
150
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
151
- "summary": "Target disappeared from Prometheus target discovery."
152
- },
153
- "state": "firing",
154
- "activeAt": "2025-12-15T17:23:03.093305093Z",
155
- "value": "1e+00"
156
- },
157
- {
158
- "labels": {
159
- "alertname": "PrometheusNotConnectedToAlertmanagers",
160
- "container": "prometheus",
161
- "endpoint": "http-web",
162
- "instance": "100.96.3.12:9090",
163
- "job": "prometheus-kube-prometheus-prometheus",
164
- "namespace": "prometheus",
165
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
166
- "service": "prometheus-kube-prometheus-prometheus",
167
- "severity": "warning"
168
- },
169
- "annotations": {
170
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
172
- "summary": "Prometheus is not connected to any Alertmanagers."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:51.749891565Z",
176
- "value": "0e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-57-09.385743.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.28888888888888886)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "2.8888888888888886e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.4999999999999999)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "4.999999999999999e-01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 13.877777777777776)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "1.3877777777777776e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeSchedulerDown",
75
- "severity": "critical"
76
- },
77
- "annotations": {
78
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
80
- "summary": "Target disappeared from Prometheus target discovery."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-15T17:23:03.093305093Z",
84
- "value": "1e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "PrometheusNotConnectedToAlertmanagers",
89
- "container": "prometheus",
90
- "endpoint": "http-web",
91
- "instance": "100.96.3.12:9090",
92
- "job": "prometheus-kube-prometheus-prometheus",
93
- "namespace": "prometheus",
94
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
95
- "service": "prometheus-kube-prometheus-prometheus",
96
- "severity": "warning"
97
- },
98
- "annotations": {
99
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
100
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
101
- "summary": "Prometheus is not connected to any Alertmanagers."
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-12-15T17:23:51.749891565Z",
105
- "value": "0e+00"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "KubeControllerManagerDown",
110
- "severity": "critical"
111
- },
112
- "annotations": {
113
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
114
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
115
- "summary": "Target disappeared from Prometheus target discovery."
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-12-15T17:23:03.0192481Z",
119
- "value": "1e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "CPUThrottlingHigh",
124
- "container": "frontend",
125
- "instance": "10.0.167.38:10250",
126
- "namespace": "otel-demo",
127
- "pod": "frontend-675fd7b5c5-jlpzp",
128
- "service": "prometheus-kube-prometheus-kubelet",
129
- "severity": "info"
130
- },
131
- "annotations": {
132
- "description": "90.38% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
133
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
134
- "summary": "Processes experience elevated CPU throttling."
135
- },
136
- "state": "firing",
137
- "activeAt": "2025-12-15T17:23:45.017672005Z",
138
- "value": "9.037921348314606e-01"
139
- },
140
- {
141
- "labels": {
142
- "alertname": "CPUThrottlingHigh",
143
- "container": "product-catalog",
144
- "instance": "10.0.167.38:10250",
145
- "namespace": "otel-demo",
146
- "pod": "product-catalog-7c7f8b68dc-prcsr",
147
- "service": "prometheus-kube-prometheus-kubelet",
148
- "severity": "info"
149
- },
150
- "annotations": {
151
- "description": "40.34% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
153
- "summary": "Processes experience elevated CPU throttling."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:23:45.017672005Z",
157
- "value": "4.0338266384778015e-01"
158
- },
159
- {
160
- "labels": {
161
- "alertname": "CPUThrottlingHigh",
162
- "container": "frontend-proxy",
163
- "instance": "10.0.167.38:10250",
164
- "namespace": "otel-demo",
165
- "pod": "frontend-proxy-6b4d584985-kxvn6",
166
- "service": "prometheus-kube-prometheus-kubelet",
167
- "severity": "info"
168
- },
169
- "annotations": {
170
- "description": "43.71% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
172
- "summary": "Processes experience elevated CPU throttling."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:27:45.017672005Z",
176
- "value": "4.370761115297664e-01"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-58-09.384546.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "50.8% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "5.080313784086664e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "92.21% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.221194879089617e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "39.72% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.972425678586816e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.39999999999999997)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "3.9999999999999997e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.811111111111111)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "8.11111111111111e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.077777777777776)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.4077777777777776e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "KubeSchedulerDown",
146
- "severity": "critical"
147
- },
148
- "annotations": {
149
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
150
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
151
- "summary": "Target disappeared from Prometheus target discovery."
152
- },
153
- "state": "firing",
154
- "activeAt": "2025-12-15T17:23:03.093305093Z",
155
- "value": "1e+00"
156
- },
157
- {
158
- "labels": {
159
- "alertname": "PrometheusNotConnectedToAlertmanagers",
160
- "container": "prometheus",
161
- "endpoint": "http-web",
162
- "instance": "100.96.3.12:9090",
163
- "job": "prometheus-kube-prometheus-prometheus",
164
- "namespace": "prometheus",
165
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
166
- "service": "prometheus-kube-prometheus-prometheus",
167
- "severity": "warning"
168
- },
169
- "annotations": {
170
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
172
- "summary": "Prometheus is not connected to any Alertmanagers."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:51.749891565Z",
176
- "value": "0e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T17-59-09.384642.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "56.31% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "5.63114134542706e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "90.06% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.006233956729006e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "37.11% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.7113821138211384e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3333333333333333)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "3.333333333333333e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.5555555555555555)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "5.555555555555555e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.077777777777776)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.5077777777777776e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "KubeSchedulerDown",
146
- "severity": "critical"
147
- },
148
- "annotations": {
149
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
150
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
151
- "summary": "Target disappeared from Prometheus target discovery."
152
- },
153
- "state": "firing",
154
- "activeAt": "2025-12-15T17:23:03.093305093Z",
155
- "value": "1e+00"
156
- },
157
- {
158
- "labels": {
159
- "alertname": "PrometheusNotConnectedToAlertmanagers",
160
- "container": "prometheus",
161
- "endpoint": "http-web",
162
- "instance": "100.96.3.12:9090",
163
- "job": "prometheus-kube-prometheus-prometheus",
164
- "namespace": "prometheus",
165
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
166
- "service": "prometheus-kube-prometheus-prometheus",
167
- "severity": "warning"
168
- },
169
- "annotations": {
170
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
172
- "summary": "Prometheus is not connected to any Alertmanagers."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:51.749891565Z",
176
- "value": "0e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-00-09.384838.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "59.11% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "5.911223385059545e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "90.86% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.085545722713864e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "38.26% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.825726141078838e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.4888888888888888)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "4.888888888888888e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.8555555555555554)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "8.555555555555554e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 13.31111111111111)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.331111111111111e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "InfoInhibitor",
146
- "alertstate": "pending",
147
- "container": "recorder",
148
- "instance": "10.0.167.70:10250",
149
- "namespace": "data-recorders",
150
- "pod": "clickhouse-unified-recorder-qbgbr",
151
- "service": "prometheus-kube-prometheus-kubelet",
152
- "severity": "none"
153
- },
154
- "annotations": {
155
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
156
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
157
- "summary": "Info-level alert inhibition."
158
- },
159
- "state": "firing",
160
- "activeAt": "2025-12-15T17:59:59.2925657Z",
161
- "value": "1e+00"
162
- },
163
- {
164
- "labels": {
165
- "alertname": "KubeSchedulerDown",
166
- "severity": "critical"
167
- },
168
- "annotations": {
169
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
170
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
171
- "summary": "Target disappeared from Prometheus target discovery."
172
- },
173
- "state": "firing",
174
- "activeAt": "2025-12-15T17:23:03.093305093Z",
175
- "value": "1e+00"
176
- },
177
- {
178
- "labels": {
179
- "alertname": "PrometheusNotConnectedToAlertmanagers",
180
- "container": "prometheus",
181
- "endpoint": "http-web",
182
- "instance": "100.96.3.12:9090",
183
- "job": "prometheus-kube-prometheus-prometheus",
184
- "namespace": "prometheus",
185
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
186
- "service": "prometheus-kube-prometheus-prometheus",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
192
- "summary": "Prometheus is not connected to any Alertmanagers."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:51.749891565Z",
196
- "value": "0e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-01-09.384887.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "61.53% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.152716593245228e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "92.6% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.259528130671507e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "37.67% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.766973280770915e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3333333333333333)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "3.333333333333333e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.5111111111111111)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "5.111111111111111e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 17.022222222222222)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.7022222222222222e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "InfoInhibitor",
146
- "alertstate": "pending",
147
- "container": "recorder",
148
- "instance": "10.0.167.70:10250",
149
- "namespace": "data-recorders",
150
- "pod": "clickhouse-unified-recorder-qbgbr",
151
- "service": "prometheus-kube-prometheus-kubelet",
152
- "severity": "none"
153
- },
154
- "annotations": {
155
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
156
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
157
- "summary": "Info-level alert inhibition."
158
- },
159
- "state": "firing",
160
- "activeAt": "2025-12-15T17:59:59.2925657Z",
161
- "value": "1e+00"
162
- },
163
- {
164
- "labels": {
165
- "alertname": "KubeSchedulerDown",
166
- "severity": "critical"
167
- },
168
- "annotations": {
169
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
170
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
171
- "summary": "Target disappeared from Prometheus target discovery."
172
- },
173
- "state": "firing",
174
- "activeAt": "2025-12-15T17:23:03.093305093Z",
175
- "value": "1e+00"
176
- },
177
- {
178
- "labels": {
179
- "alertname": "PrometheusNotConnectedToAlertmanagers",
180
- "container": "prometheus",
181
- "endpoint": "http-web",
182
- "instance": "100.96.3.12:9090",
183
- "job": "prometheus-kube-prometheus-prometheus",
184
- "namespace": "prometheus",
185
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
186
- "service": "prometheus-kube-prometheus-prometheus",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
192
- "summary": "Prometheus is not connected to any Alertmanagers."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:51.749891565Z",
196
- "value": "0e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-02-09.384962.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "60.8% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.079566003616637e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "89.94% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "8.994334277620397e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "37.68% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.768054375531011e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5333333333333332)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "5.333333333333332e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.1444444444444444)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "1.1444444444444444e+00"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.12222222222222)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.412222222222222e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "InfoInhibitor",
146
- "alertstate": "pending",
147
- "container": "recorder",
148
- "instance": "10.0.167.70:10250",
149
- "namespace": "data-recorders",
150
- "pod": "clickhouse-unified-recorder-qbgbr",
151
- "service": "prometheus-kube-prometheus-kubelet",
152
- "severity": "none"
153
- },
154
- "annotations": {
155
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
156
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
157
- "summary": "Info-level alert inhibition."
158
- },
159
- "state": "firing",
160
- "activeAt": "2025-12-15T17:59:59.2925657Z",
161
- "value": "1e+00"
162
- },
163
- {
164
- "labels": {
165
- "alertname": "KubeSchedulerDown",
166
- "severity": "critical"
167
- },
168
- "annotations": {
169
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
170
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
171
- "summary": "Target disappeared from Prometheus target discovery."
172
- },
173
- "state": "firing",
174
- "activeAt": "2025-12-15T17:23:03.093305093Z",
175
- "value": "1e+00"
176
- },
177
- {
178
- "labels": {
179
- "alertname": "PrometheusNotConnectedToAlertmanagers",
180
- "container": "prometheus",
181
- "endpoint": "http-web",
182
- "instance": "100.96.3.12:9090",
183
- "job": "prometheus-kube-prometheus-prometheus",
184
- "namespace": "prometheus",
185
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
186
- "service": "prometheus-kube-prometheus-prometheus",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
192
- "summary": "Prometheus is not connected to any Alertmanagers."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:51.749891565Z",
196
- "value": "0e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-03-09.385430.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "60.1% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "6.009887005649718e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-675fd7b5c5-jlpzp",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "87.28% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "8.727941176470588e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "product-catalog",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "product-catalog-7c7f8b68dc-prcsr",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "36.95% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "3.694602896007021e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "RequestLatency",
111
- "namespace": "otel-demo",
112
- "service_name": "frontend-proxy",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
117
- },
118
- "state": "firing",
119
- "activeAt": "2025-12-15T17:25:19.431042761Z",
120
- "value": "1.5e+04"
121
- },
122
- {
123
- "labels": {
124
- "alertname": "RequestErrorRate",
125
- "namespace": "otel-demo",
126
- "service_name": "checkout",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.2444444444444444)"
131
- },
132
- "state": "firing",
133
- "activeAt": "2025-12-15T17:25:19.431042761Z",
134
- "value": "2.444444444444444e-01"
135
- },
136
- {
137
- "labels": {
138
- "alertname": "RequestErrorRate",
139
- "namespace": "otel-demo",
140
- "service_name": "frontend",
141
- "severity": "warning"
142
- },
143
- "annotations": {
144
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.4666666666666666)"
145
- },
146
- "state": "firing",
147
- "activeAt": "2025-12-15T17:25:19.431042761Z",
148
- "value": "4.666666666666666e-01"
149
- },
150
- {
151
- "labels": {
152
- "alertname": "RequestErrorRate",
153
- "namespace": "otel-demo",
154
- "service_name": "frontend-proxy",
155
- "severity": "warning"
156
- },
157
- "annotations": {
158
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.888888888888888)"
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:25:19.431042761Z",
162
- "value": "1.4888888888888888e+01"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "Watchdog",
167
- "severity": "none"
168
- },
169
- "annotations": {
170
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
172
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:29.2925657Z",
176
- "value": "1e+00"
177
- },
178
- {
179
- "labels": {
180
- "alertname": "InfoInhibitor",
181
- "alertstate": "pending",
182
- "container": "recorder",
183
- "instance": "10.0.167.70:10250",
184
- "namespace": "data-recorders",
185
- "pod": "clickhouse-unified-recorder-qbgbr",
186
- "service": "prometheus-kube-prometheus-kubelet",
187
- "severity": "none"
188
- },
189
- "annotations": {
190
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
192
- "summary": "Info-level alert inhibition."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:59:59.2925657Z",
196
- "value": "1e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-04-09.385226.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5111111111111111)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "5.111111111111111e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.8999999999999999)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "8.999999999999999e-01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.755555555555553)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "1.5755555555555553e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "InfoInhibitor",
75
- "alertstate": "pending",
76
- "container": "recorder",
77
- "instance": "10.0.167.70:10250",
78
- "namespace": "data-recorders",
79
- "pod": "clickhouse-unified-recorder-qbgbr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "none"
82
- },
83
- "annotations": {
84
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
86
- "summary": "Info-level alert inhibition."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:59:59.2925657Z",
90
- "value": "1e+00"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "KubeSchedulerDown",
95
- "severity": "critical"
96
- },
97
- "annotations": {
98
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
99
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
100
- "summary": "Target disappeared from Prometheus target discovery."
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:23:03.093305093Z",
104
- "value": "1e+00"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "PrometheusNotConnectedToAlertmanagers",
109
- "container": "prometheus",
110
- "endpoint": "http-web",
111
- "instance": "100.96.3.12:9090",
112
- "job": "prometheus-kube-prometheus-prometheus",
113
- "namespace": "prometheus",
114
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
115
- "service": "prometheus-kube-prometheus-prometheus",
116
- "severity": "warning"
117
- },
118
- "annotations": {
119
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
120
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
121
- "summary": "Prometheus is not connected to any Alertmanagers."
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:23:51.749891565Z",
125
- "value": "0e+00"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "KubeControllerManagerDown",
130
- "severity": "critical"
131
- },
132
- "annotations": {
133
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
134
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
135
- "summary": "Target disappeared from Prometheus target discovery."
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:23:03.0192481Z",
139
- "value": "1e+00"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "CPUThrottlingHigh",
144
- "container": "product-catalog",
145
- "instance": "10.0.167.38:10250",
146
- "namespace": "otel-demo",
147
- "pod": "product-catalog-7c7f8b68dc-prcsr",
148
- "service": "prometheus-kube-prometheus-kubelet",
149
- "severity": "info"
150
- },
151
- "annotations": {
152
- "description": "37.86% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
153
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
154
- "summary": "Processes experience elevated CPU throttling."
155
- },
156
- "state": "firing",
157
- "activeAt": "2025-12-15T17:23:45.017672005Z",
158
- "value": "3.7860780984719866e-01"
159
- },
160
- {
161
- "labels": {
162
- "alertname": "CPUThrottlingHigh",
163
- "container": "frontend-proxy",
164
- "instance": "10.0.167.38:10250",
165
- "namespace": "otel-demo",
166
- "pod": "frontend-proxy-6b4d584985-kxvn6",
167
- "service": "prometheus-kube-prometheus-kubelet",
168
- "severity": "info"
169
- },
170
- "annotations": {
171
- "description": "60.47% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
172
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
173
- "summary": "Processes experience elevated CPU throttling."
174
- },
175
- "state": "firing",
176
- "activeAt": "2025-12-15T17:27:45.017672005Z",
177
- "value": "6.046762589928057e-01"
178
- },
179
- {
180
- "labels": {
181
- "alertname": "CPUThrottlingHigh",
182
- "container": "frontend",
183
- "instance": "10.0.167.38:10250",
184
- "namespace": "otel-demo",
185
- "pod": "frontend-675fd7b5c5-jlpzp",
186
- "service": "prometheus-kube-prometheus-kubelet",
187
- "severity": "info"
188
- },
189
- "annotations": {
190
- "description": "87.5% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
192
- "summary": "Processes experience elevated CPU throttling."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:45.017672005Z",
196
- "value": "8.750439985920451e-01"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-05-09.385457.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "product-catalog",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "product-catalog-7c7f8b68dc-prcsr",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "36.97% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:23:45.017672005Z",
68
- "value": "3.6971830985915494e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend-proxy",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-proxy-6b4d584985-kxvn6",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "62.67% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:27:45.017672005Z",
87
- "value": "6.267472610502456e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "frontend",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "frontend-675fd7b5c5-jlpzp",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "88.02% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "8.802197802197803e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "RequestLatency",
111
- "namespace": "otel-demo",
112
- "service_name": "frontend-proxy",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
117
- },
118
- "state": "firing",
119
- "activeAt": "2025-12-15T17:25:19.431042761Z",
120
- "value": "1.5e+04"
121
- },
122
- {
123
- "labels": {
124
- "alertname": "RequestErrorRate",
125
- "namespace": "otel-demo",
126
- "service_name": "frontend-proxy",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.244444444444442)"
131
- },
132
- "state": "firing",
133
- "activeAt": "2025-12-15T17:25:19.431042761Z",
134
- "value": "1.4244444444444442e+01"
135
- },
136
- {
137
- "labels": {
138
- "alertname": "RequestErrorRate",
139
- "namespace": "otel-demo",
140
- "service_name": "checkout",
141
- "severity": "warning"
142
- },
143
- "annotations": {
144
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5333333333333332)"
145
- },
146
- "state": "firing",
147
- "activeAt": "2025-12-15T17:25:19.431042761Z",
148
- "value": "5.333333333333332e-01"
149
- },
150
- {
151
- "labels": {
152
- "alertname": "RequestErrorRate",
153
- "namespace": "otel-demo",
154
- "service_name": "frontend",
155
- "severity": "warning"
156
- },
157
- "annotations": {
158
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.0777777777777777)"
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:25:19.431042761Z",
162
- "value": "1.0777777777777777e+00"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "Watchdog",
167
- "severity": "none"
168
- },
169
- "annotations": {
170
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
172
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:29.2925657Z",
176
- "value": "1e+00"
177
- },
178
- {
179
- "labels": {
180
- "alertname": "InfoInhibitor",
181
- "alertstate": "pending",
182
- "container": "recorder",
183
- "instance": "10.0.167.70:10250",
184
- "namespace": "data-recorders",
185
- "pod": "clickhouse-unified-recorder-qbgbr",
186
- "service": "prometheus-kube-prometheus-kubelet",
187
- "severity": "none"
188
- },
189
- "annotations": {
190
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
192
- "summary": "Info-level alert inhibition."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:59:59.2925657Z",
196
- "value": "1e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-06-09.386364.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.39999999999999997)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "3.9999999999999997e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7777777777777777)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "7.777777777777777e-01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.099999999999998)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "1.4099999999999998e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "InfoInhibitor",
75
- "alertstate": "pending",
76
- "container": "recorder",
77
- "instance": "10.0.167.70:10250",
78
- "namespace": "data-recorders",
79
- "pod": "clickhouse-unified-recorder-qbgbr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "none"
82
- },
83
- "annotations": {
84
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
86
- "summary": "Info-level alert inhibition."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:59:59.2925657Z",
90
- "value": "1e+00"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "KubeSchedulerDown",
95
- "severity": "critical"
96
- },
97
- "annotations": {
98
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
99
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
100
- "summary": "Target disappeared from Prometheus target discovery."
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:23:03.093305093Z",
104
- "value": "1e+00"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "PrometheusNotConnectedToAlertmanagers",
109
- "container": "prometheus",
110
- "endpoint": "http-web",
111
- "instance": "100.96.3.12:9090",
112
- "job": "prometheus-kube-prometheus-prometheus",
113
- "namespace": "prometheus",
114
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
115
- "service": "prometheus-kube-prometheus-prometheus",
116
- "severity": "warning"
117
- },
118
- "annotations": {
119
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
120
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
121
- "summary": "Prometheus is not connected to any Alertmanagers."
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:23:51.749891565Z",
125
- "value": "0e+00"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "KubeControllerManagerDown",
130
- "severity": "critical"
131
- },
132
- "annotations": {
133
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
134
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
135
- "summary": "Target disappeared from Prometheus target discovery."
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:23:03.0192481Z",
139
- "value": "1e+00"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "CPUThrottlingHigh",
144
- "container": "frontend-proxy",
145
- "instance": "10.0.167.38:10250",
146
- "namespace": "otel-demo",
147
- "pod": "frontend-proxy-6b4d584985-kxvn6",
148
- "service": "prometheus-kube-prometheus-kubelet",
149
- "severity": "info"
150
- },
151
- "annotations": {
152
- "description": "61.4% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
153
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
154
- "summary": "Processes experience elevated CPU throttling."
155
- },
156
- "state": "firing",
157
- "activeAt": "2025-12-15T17:27:45.017672005Z",
158
- "value": "6.139773143066227e-01"
159
- },
160
- {
161
- "labels": {
162
- "alertname": "CPUThrottlingHigh",
163
- "container": "frontend",
164
- "instance": "10.0.167.38:10250",
165
- "namespace": "otel-demo",
166
- "pod": "frontend-675fd7b5c5-jlpzp",
167
- "service": "prometheus-kube-prometheus-kubelet",
168
- "severity": "info"
169
- },
170
- "annotations": {
171
- "description": "91.15% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
172
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
173
- "summary": "Processes experience elevated CPU throttling."
174
- },
175
- "state": "firing",
176
- "activeAt": "2025-12-15T17:23:45.017672005Z",
177
- "value": "9.115398616672734e-01"
178
- },
179
- {
180
- "labels": {
181
- "alertname": "CPUThrottlingHigh",
182
- "container": "product-catalog",
183
- "instance": "10.0.167.38:10250",
184
- "namespace": "otel-demo",
185
- "pod": "product-catalog-7c7f8b68dc-prcsr",
186
- "service": "prometheus-kube-prometheus-kubelet",
187
- "severity": "info"
188
- },
189
- "annotations": {
190
- "description": "38.23% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
192
- "summary": "Processes experience elevated CPU throttling."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:45.017672005Z",
196
- "value": "3.8228941684665224e-01"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-07-09.385816.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "61.52% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.151634926338484e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "90.98% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.097798628653916e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "38.92% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.89168765743073e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "checkout",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6888888888888888)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "6.888888888888888e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.2999999999999998)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "1.2999999999999998e+00"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend-proxy",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.83333333333333)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "1.483333333333333e+01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "InfoInhibitor",
146
- "alertstate": "pending",
147
- "container": "recorder",
148
- "instance": "10.0.167.70:10250",
149
- "namespace": "data-recorders",
150
- "pod": "clickhouse-unified-recorder-qbgbr",
151
- "service": "prometheus-kube-prometheus-kubelet",
152
- "severity": "none"
153
- },
154
- "annotations": {
155
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
156
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
157
- "summary": "Info-level alert inhibition."
158
- },
159
- "state": "firing",
160
- "activeAt": "2025-12-15T17:59:59.2925657Z",
161
- "value": "1e+00"
162
- },
163
- {
164
- "labels": {
165
- "alertname": "KubeSchedulerDown",
166
- "severity": "critical"
167
- },
168
- "annotations": {
169
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
170
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
171
- "summary": "Target disappeared from Prometheus target discovery."
172
- },
173
- "state": "firing",
174
- "activeAt": "2025-12-15T17:23:03.093305093Z",
175
- "value": "1e+00"
176
- },
177
- {
178
- "labels": {
179
- "alertname": "PrometheusNotConnectedToAlertmanagers",
180
- "container": "prometheus",
181
- "endpoint": "http-web",
182
- "instance": "100.96.3.12:9090",
183
- "job": "prometheus-kube-prometheus-prometheus",
184
- "namespace": "prometheus",
185
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
186
- "service": "prometheus-kube-prometheus-prometheus",
187
- "severity": "warning"
188
- },
189
- "annotations": {
190
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
192
- "summary": "Prometheus is not connected to any Alertmanagers."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:23:51.749891565Z",
196
- "value": "0e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-08-09.386632.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeSchedulerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.093305093Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "PrometheusNotConnectedToAlertmanagers",
19
- "container": "prometheus",
20
- "endpoint": "http-web",
21
- "instance": "100.96.3.12:9090",
22
- "job": "prometheus-kube-prometheus-prometheus",
23
- "namespace": "prometheus",
24
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
25
- "service": "prometheus-kube-prometheus-prometheus",
26
- "severity": "warning"
27
- },
28
- "annotations": {
29
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
31
- "summary": "Prometheus is not connected to any Alertmanagers."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:51.749891565Z",
35
- "value": "0e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeControllerManagerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-15T17:23:03.0192481Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "CPUThrottlingHigh",
54
- "container": "frontend-proxy",
55
- "instance": "10.0.167.38:10250",
56
- "namespace": "otel-demo",
57
- "pod": "frontend-proxy-6b4d584985-kxvn6",
58
- "service": "prometheus-kube-prometheus-kubelet",
59
- "severity": "info"
60
- },
61
- "annotations": {
62
- "description": "60.24% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
63
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
64
- "summary": "Processes experience elevated CPU throttling."
65
- },
66
- "state": "firing",
67
- "activeAt": "2025-12-15T17:27:45.017672005Z",
68
- "value": "6.024137931034482e-01"
69
- },
70
- {
71
- "labels": {
72
- "alertname": "CPUThrottlingHigh",
73
- "container": "frontend",
74
- "instance": "10.0.167.38:10250",
75
- "namespace": "otel-demo",
76
- "pod": "frontend-675fd7b5c5-jlpzp",
77
- "service": "prometheus-kube-prometheus-kubelet",
78
- "severity": "info"
79
- },
80
- "annotations": {
81
- "description": "94.55% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
82
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
83
- "summary": "Processes experience elevated CPU throttling."
84
- },
85
- "state": "firing",
86
- "activeAt": "2025-12-15T17:23:45.017672005Z",
87
- "value": "9.454676778620441e-01"
88
- },
89
- {
90
- "labels": {
91
- "alertname": "CPUThrottlingHigh",
92
- "container": "product-catalog",
93
- "instance": "10.0.167.38:10250",
94
- "namespace": "otel-demo",
95
- "pod": "product-catalog-7c7f8b68dc-prcsr",
96
- "service": "prometheus-kube-prometheus-kubelet",
97
- "severity": "info"
98
- },
99
- "annotations": {
100
- "description": "38.95% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
101
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
102
- "summary": "Processes experience elevated CPU throttling."
103
- },
104
- "state": "firing",
105
- "activeAt": "2025-12-15T17:23:45.017672005Z",
106
- "value": "3.895348837209302e-01"
107
- },
108
- {
109
- "labels": {
110
- "alertname": "RequestLatency",
111
- "namespace": "otel-demo",
112
- "service_name": "frontend-proxy",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
117
- },
118
- "state": "firing",
119
- "activeAt": "2025-12-15T17:25:19.431042761Z",
120
- "value": "1.5e+04"
121
- },
122
- {
123
- "labels": {
124
- "alertname": "RequestErrorRate",
125
- "namespace": "otel-demo",
126
- "service_name": "frontend-proxy",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.711111111111109)"
131
- },
132
- "state": "firing",
133
- "activeAt": "2025-12-15T17:25:19.431042761Z",
134
- "value": "1.4711111111111109e+01"
135
- },
136
- {
137
- "labels": {
138
- "alertname": "RequestErrorRate",
139
- "namespace": "otel-demo",
140
- "service_name": "checkout",
141
- "severity": "warning"
142
- },
143
- "annotations": {
144
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.42222222222222217)"
145
- },
146
- "state": "firing",
147
- "activeAt": "2025-12-15T17:25:19.431042761Z",
148
- "value": "4.2222222222222217e-01"
149
- },
150
- {
151
- "labels": {
152
- "alertname": "RequestErrorRate",
153
- "namespace": "otel-demo",
154
- "service_name": "frontend",
155
- "severity": "warning"
156
- },
157
- "annotations": {
158
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7555555555555554)"
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:25:19.431042761Z",
162
- "value": "7.555555555555554e-01"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "Watchdog",
167
- "severity": "none"
168
- },
169
- "annotations": {
170
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
172
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:29.2925657Z",
176
- "value": "1e+00"
177
- },
178
- {
179
- "labels": {
180
- "alertname": "InfoInhibitor",
181
- "alertstate": "pending",
182
- "container": "recorder",
183
- "instance": "10.0.167.70:10250",
184
- "namespace": "data-recorders",
185
- "pod": "clickhouse-unified-recorder-qbgbr",
186
- "service": "prometheus-kube-prometheus-kubelet",
187
- "severity": "none"
188
- },
189
- "annotations": {
190
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
192
- "summary": "Info-level alert inhibition."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-12-15T17:59:59.2925657Z",
196
- "value": "1e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-09-09.386069.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "56.81% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "5.680580762250453e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "96.63% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.663444325098461e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "38.69% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.8689217758985206e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "frontend-proxy",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.31111111111111)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "1.431111111111111e+01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "checkout",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.4444444444444444)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "4.444444444444444e-01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "frontend",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7888888888888889)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "7.888888888888889e-01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "KubeSchedulerDown",
146
- "severity": "critical"
147
- },
148
- "annotations": {
149
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
150
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
151
- "summary": "Target disappeared from Prometheus target discovery."
152
- },
153
- "state": "firing",
154
- "activeAt": "2025-12-15T17:23:03.093305093Z",
155
- "value": "1e+00"
156
- },
157
- {
158
- "labels": {
159
- "alertname": "PrometheusNotConnectedToAlertmanagers",
160
- "container": "prometheus",
161
- "endpoint": "http-web",
162
- "instance": "100.96.3.12:9090",
163
- "job": "prometheus-kube-prometheus-prometheus",
164
- "namespace": "prometheus",
165
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
166
- "service": "prometheus-kube-prometheus-prometheus",
167
- "severity": "warning"
168
- },
169
- "annotations": {
170
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
172
- "summary": "Prometheus is not connected to any Alertmanagers."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:51.749891565Z",
176
- "value": "0e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-10-09.386150.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "CPUThrottlingHigh",
5
- "container": "frontend-proxy",
6
- "instance": "10.0.167.38:10250",
7
- "namespace": "otel-demo",
8
- "pod": "frontend-proxy-6b4d584985-kxvn6",
9
- "service": "prometheus-kube-prometheus-kubelet",
10
- "severity": "info"
11
- },
12
- "annotations": {
13
- "description": "54.71% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
14
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
15
- "summary": "Processes experience elevated CPU throttling."
16
- },
17
- "state": "firing",
18
- "activeAt": "2025-12-15T17:27:45.017672005Z",
19
- "value": "5.47121378804547e-01"
20
- },
21
- {
22
- "labels": {
23
- "alertname": "CPUThrottlingHigh",
24
- "container": "frontend",
25
- "instance": "10.0.167.38:10250",
26
- "namespace": "otel-demo",
27
- "pod": "frontend-675fd7b5c5-jlpzp",
28
- "service": "prometheus-kube-prometheus-kubelet",
29
- "severity": "info"
30
- },
31
- "annotations": {
32
- "description": "96.13% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
33
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
34
- "summary": "Processes experience elevated CPU throttling."
35
- },
36
- "state": "firing",
37
- "activeAt": "2025-12-15T17:23:45.017672005Z",
38
- "value": "9.612599565532223e-01"
39
- },
40
- {
41
- "labels": {
42
- "alertname": "CPUThrottlingHigh",
43
- "container": "product-catalog",
44
- "instance": "10.0.167.38:10250",
45
- "namespace": "otel-demo",
46
- "pod": "product-catalog-7c7f8b68dc-prcsr",
47
- "service": "prometheus-kube-prometheus-kubelet",
48
- "severity": "info"
49
- },
50
- "annotations": {
51
- "description": "39.23% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
52
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
53
- "summary": "Processes experience elevated CPU throttling."
54
- },
55
- "state": "firing",
56
- "activeAt": "2025-12-15T17:23:45.017672005Z",
57
- "value": "3.9225076186329993e-01"
58
- },
59
- {
60
- "labels": {
61
- "alertname": "RequestLatency",
62
- "namespace": "otel-demo",
63
- "service_name": "frontend-proxy",
64
- "severity": "warning"
65
- },
66
- "annotations": {
67
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:25:19.431042761Z",
71
- "value": "1.5e+04"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestErrorRate",
76
- "namespace": "otel-demo",
77
- "service_name": "checkout",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6666666666666666)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "6.666666666666666e-01"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "frontend",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.2444444444444445)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "1.2444444444444445e+00"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend-proxy",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.688888888888886)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "1.4688888888888886e+01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "Watchdog",
118
- "severity": "none"
119
- },
120
- "annotations": {
121
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
122
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
123
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:23:29.2925657Z",
127
- "value": "1e+00"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "KubeSchedulerDown",
132
- "severity": "critical"
133
- },
134
- "annotations": {
135
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
137
- "summary": "Target disappeared from Prometheus target discovery."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:03.093305093Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "PrometheusNotConnectedToAlertmanagers",
146
- "container": "prometheus",
147
- "endpoint": "http-web",
148
- "instance": "100.96.3.12:9090",
149
- "job": "prometheus-kube-prometheus-prometheus",
150
- "namespace": "prometheus",
151
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
152
- "service": "prometheus-kube-prometheus-prometheus",
153
- "severity": "warning"
154
- },
155
- "annotations": {
156
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
157
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
158
- "summary": "Prometheus is not connected to any Alertmanagers."
159
- },
160
- "state": "firing",
161
- "activeAt": "2025-12-15T17:23:51.749891565Z",
162
- "value": "0e+00"
163
- },
164
- {
165
- "labels": {
166
- "alertname": "KubeControllerManagerDown",
167
- "severity": "critical"
168
- },
169
- "annotations": {
170
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
172
- "summary": "Target disappeared from Prometheus target discovery."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:03.0192481Z",
176
- "value": "1e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-11-09.386746.json DELETED
@@ -1,178 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "54.47% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "5.447125621007807e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "frontend",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "frontend-675fd7b5c5-jlpzp",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "93.69% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:23:45.017672005Z",
52
- "value": "9.36896807720861e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "product-catalog",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "product-catalog-7c7f8b68dc-prcsr",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "39.02% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "3.902439024390244e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "RequestLatency",
76
- "namespace": "otel-demo",
77
- "service_name": "frontend-proxy",
78
- "severity": "warning"
79
- },
80
- "annotations": {
81
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
82
- },
83
- "state": "firing",
84
- "activeAt": "2025-12-15T17:25:19.431042761Z",
85
- "value": "1.5e+04"
86
- },
87
- {
88
- "labels": {
89
- "alertname": "RequestErrorRate",
90
- "namespace": "otel-demo",
91
- "service_name": "frontend",
92
- "severity": "warning"
93
- },
94
- "annotations": {
95
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.9444444444444442)"
96
- },
97
- "state": "firing",
98
- "activeAt": "2025-12-15T17:25:19.431042761Z",
99
- "value": "9.444444444444442e-01"
100
- },
101
- {
102
- "labels": {
103
- "alertname": "RequestErrorRate",
104
- "namespace": "otel-demo",
105
- "service_name": "frontend-proxy",
106
- "severity": "warning"
107
- },
108
- "annotations": {
109
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.177777777777775)"
110
- },
111
- "state": "firing",
112
- "activeAt": "2025-12-15T17:25:19.431042761Z",
113
- "value": "1.5177777777777775e+01"
114
- },
115
- {
116
- "labels": {
117
- "alertname": "RequestErrorRate",
118
- "namespace": "otel-demo",
119
- "service_name": "checkout",
120
- "severity": "warning"
121
- },
122
- "annotations": {
123
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.4888888888888888)"
124
- },
125
- "state": "firing",
126
- "activeAt": "2025-12-15T17:25:19.431042761Z",
127
- "value": "4.888888888888888e-01"
128
- },
129
- {
130
- "labels": {
131
- "alertname": "Watchdog",
132
- "severity": "none"
133
- },
134
- "annotations": {
135
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
136
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
137
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
138
- },
139
- "state": "firing",
140
- "activeAt": "2025-12-15T17:23:29.2925657Z",
141
- "value": "1e+00"
142
- },
143
- {
144
- "labels": {
145
- "alertname": "KubeSchedulerDown",
146
- "severity": "critical"
147
- },
148
- "annotations": {
149
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
150
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
151
- "summary": "Target disappeared from Prometheus target discovery."
152
- },
153
- "state": "firing",
154
- "activeAt": "2025-12-15T17:23:03.093305093Z",
155
- "value": "1e+00"
156
- },
157
- {
158
- "labels": {
159
- "alertname": "PrometheusNotConnectedToAlertmanagers",
160
- "container": "prometheus",
161
- "endpoint": "http-web",
162
- "instance": "100.96.3.12:9090",
163
- "job": "prometheus-kube-prometheus-prometheus",
164
- "namespace": "prometheus",
165
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
166
- "service": "prometheus-kube-prometheus-prometheus",
167
- "severity": "warning"
168
- },
169
- "annotations": {
170
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
172
- "summary": "Prometheus is not connected to any Alertmanagers."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:51.749891565Z",
176
- "value": "0e+00"
177
- }
178
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-12-09.387432.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.3.12:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-15T17:23:51.749891565Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "KubeControllerManagerDown",
26
- "severity": "critical"
27
- },
28
- "annotations": {
29
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
31
- "summary": "Target disappeared from Prometheus target discovery."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:03.0192481Z",
35
- "value": "1e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "CPUThrottlingHigh",
40
- "container": "frontend",
41
- "instance": "10.0.167.38:10250",
42
- "namespace": "otel-demo",
43
- "pod": "frontend-675fd7b5c5-jlpzp",
44
- "service": "prometheus-kube-prometheus-kubelet",
45
- "severity": "info"
46
- },
47
- "annotations": {
48
- "description": "94.94% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
49
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
50
- "summary": "Processes experience elevated CPU throttling."
51
- },
52
- "state": "firing",
53
- "activeAt": "2025-12-15T17:23:45.017672005Z",
54
- "value": "9.494002998500749e-01"
55
- },
56
- {
57
- "labels": {
58
- "alertname": "CPUThrottlingHigh",
59
- "container": "product-catalog",
60
- "instance": "10.0.167.38:10250",
61
- "namespace": "otel-demo",
62
- "pod": "product-catalog-7c7f8b68dc-prcsr",
63
- "service": "prometheus-kube-prometheus-kubelet",
64
- "severity": "info"
65
- },
66
- "annotations": {
67
- "description": "37.88% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
68
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
69
- "summary": "Processes experience elevated CPU throttling."
70
- },
71
- "state": "firing",
72
- "activeAt": "2025-12-15T17:23:45.017672005Z",
73
- "value": "3.7876254180602004e-01"
74
- },
75
- {
76
- "labels": {
77
- "alertname": "CPUThrottlingHigh",
78
- "container": "frontend-proxy",
79
- "instance": "10.0.167.38:10250",
80
- "namespace": "otel-demo",
81
- "pod": "frontend-proxy-6b4d584985-kxvn6",
82
- "service": "prometheus-kube-prometheus-kubelet",
83
- "severity": "info"
84
- },
85
- "annotations": {
86
- "description": "55.27% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
87
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
88
- "summary": "Processes experience elevated CPU throttling."
89
- },
90
- "state": "firing",
91
- "activeAt": "2025-12-15T17:27:45.017672005Z",
92
- "value": "5.527246992215145e-01"
93
- },
94
- {
95
- "labels": {
96
- "alertname": "CPUThrottlingHigh",
97
- "container": "opentelemetry-collector",
98
- "instance": "10.0.167.38:10250",
99
- "namespace": "otel-demo",
100
- "pod": "otel-collector-564d9c7987-ls78p",
101
- "service": "prometheus-kube-prometheus-kubelet",
102
- "severity": "info"
103
- },
104
- "annotations": {
105
- "description": "28.33% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
106
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
107
- "summary": "Processes experience elevated CPU throttling."
108
- },
109
- "state": "firing",
110
- "activeAt": "2025-12-15T17:56:15.017672005Z",
111
- "value": "2.833333333333333e-01"
112
- },
113
- {
114
- "labels": {
115
- "alertname": "RequestLatency",
116
- "namespace": "otel-demo",
117
- "service_name": "frontend-proxy",
118
- "severity": "warning"
119
- },
120
- "annotations": {
121
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:25:19.431042761Z",
125
- "value": "1.5e+04"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "RequestErrorRate",
130
- "namespace": "otel-demo",
131
- "service_name": "checkout",
132
- "severity": "warning"
133
- },
134
- "annotations": {
135
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6666666666666666)"
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:25:19.431042761Z",
139
- "value": "6.666666666666666e-01"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "RequestErrorRate",
144
- "namespace": "otel-demo",
145
- "service_name": "frontend",
146
- "severity": "warning"
147
- },
148
- "annotations": {
149
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.0555555555555554)"
150
- },
151
- "state": "firing",
152
- "activeAt": "2025-12-15T17:25:19.431042761Z",
153
- "value": "1.0555555555555554e+00"
154
- },
155
- {
156
- "labels": {
157
- "alertname": "RequestErrorRate",
158
- "namespace": "otel-demo",
159
- "service_name": "frontend-proxy",
160
- "severity": "warning"
161
- },
162
- "annotations": {
163
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.33333333333333)"
164
- },
165
- "state": "firing",
166
- "activeAt": "2025-12-15T17:25:19.431042761Z",
167
- "value": "1.433333333333333e+01"
168
- },
169
- {
170
- "labels": {
171
- "alertname": "Watchdog",
172
- "severity": "none"
173
- },
174
- "annotations": {
175
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
176
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
177
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
178
- },
179
- "state": "firing",
180
- "activeAt": "2025-12-15T17:23:29.2925657Z",
181
- "value": "1e+00"
182
- },
183
- {
184
- "labels": {
185
- "alertname": "KubeSchedulerDown",
186
- "severity": "critical"
187
- },
188
- "annotations": {
189
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
191
- "summary": "Target disappeared from Prometheus target discovery."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:03.093305093Z",
195
- "value": "1e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-13-09.386938.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.3.12:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-15T17:23:51.749891565Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "KubeControllerManagerDown",
26
- "severity": "critical"
27
- },
28
- "annotations": {
29
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
31
- "summary": "Target disappeared from Prometheus target discovery."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-15T17:23:03.0192481Z",
35
- "value": "1e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "CPUThrottlingHigh",
40
- "container": "product-catalog",
41
- "instance": "10.0.167.38:10250",
42
- "namespace": "otel-demo",
43
- "pod": "product-catalog-7c7f8b68dc-prcsr",
44
- "service": "prometheus-kube-prometheus-kubelet",
45
- "severity": "info"
46
- },
47
- "annotations": {
48
- "description": "37.48% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
49
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
50
- "summary": "Processes experience elevated CPU throttling."
51
- },
52
- "state": "firing",
53
- "activeAt": "2025-12-15T17:23:45.017672005Z",
54
- "value": "3.7479061976549416e-01"
55
- },
56
- {
57
- "labels": {
58
- "alertname": "CPUThrottlingHigh",
59
- "container": "frontend-proxy",
60
- "instance": "10.0.167.38:10250",
61
- "namespace": "otel-demo",
62
- "pod": "frontend-proxy-6b4d584985-kxvn6",
63
- "service": "prometheus-kube-prometheus-kubelet",
64
- "severity": "info"
65
- },
66
- "annotations": {
67
- "description": "57.44% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
68
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
69
- "summary": "Processes experience elevated CPU throttling."
70
- },
71
- "state": "firing",
72
- "activeAt": "2025-12-15T17:27:45.017672005Z",
73
- "value": "5.744211686879824e-01"
74
- },
75
- {
76
- "labels": {
77
- "alertname": "CPUThrottlingHigh",
78
- "container": "opentelemetry-collector",
79
- "instance": "10.0.167.38:10250",
80
- "namespace": "otel-demo",
81
- "pod": "otel-collector-564d9c7987-ls78p",
82
- "service": "prometheus-kube-prometheus-kubelet",
83
- "severity": "info"
84
- },
85
- "annotations": {
86
- "description": "28.41% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
87
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
88
- "summary": "Processes experience elevated CPU throttling."
89
- },
90
- "state": "firing",
91
- "activeAt": "2025-12-15T17:56:15.017672005Z",
92
- "value": "2.840537595350527e-01"
93
- },
94
- {
95
- "labels": {
96
- "alertname": "CPUThrottlingHigh",
97
- "container": "frontend",
98
- "instance": "10.0.167.38:10250",
99
- "namespace": "otel-demo",
100
- "pod": "frontend-675fd7b5c5-jlpzp",
101
- "service": "prometheus-kube-prometheus-kubelet",
102
- "severity": "info"
103
- },
104
- "annotations": {
105
- "description": "94.64% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
106
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
107
- "summary": "Processes experience elevated CPU throttling."
108
- },
109
- "state": "firing",
110
- "activeAt": "2025-12-15T17:23:45.017672005Z",
111
- "value": "9.46384479717813e-01"
112
- },
113
- {
114
- "labels": {
115
- "alertname": "RequestLatency",
116
- "namespace": "otel-demo",
117
- "service_name": "frontend-proxy",
118
- "severity": "warning"
119
- },
120
- "annotations": {
121
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
122
- },
123
- "state": "firing",
124
- "activeAt": "2025-12-15T17:25:19.431042761Z",
125
- "value": "1.5e+04"
126
- },
127
- {
128
- "labels": {
129
- "alertname": "RequestErrorRate",
130
- "namespace": "otel-demo",
131
- "service_name": "checkout",
132
- "severity": "warning"
133
- },
134
- "annotations": {
135
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.6222222222222221)"
136
- },
137
- "state": "firing",
138
- "activeAt": "2025-12-15T17:25:19.431042761Z",
139
- "value": "6.222222222222221e-01"
140
- },
141
- {
142
- "labels": {
143
- "alertname": "RequestErrorRate",
144
- "namespace": "otel-demo",
145
- "service_name": "frontend",
146
- "severity": "warning"
147
- },
148
- "annotations": {
149
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.2)"
150
- },
151
- "state": "firing",
152
- "activeAt": "2025-12-15T17:25:19.431042761Z",
153
- "value": "1.2e+00"
154
- },
155
- {
156
- "labels": {
157
- "alertname": "RequestErrorRate",
158
- "namespace": "otel-demo",
159
- "service_name": "frontend-proxy",
160
- "severity": "warning"
161
- },
162
- "annotations": {
163
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.933333333333332)"
164
- },
165
- "state": "firing",
166
- "activeAt": "2025-12-15T17:25:19.431042761Z",
167
- "value": "1.4933333333333332e+01"
168
- },
169
- {
170
- "labels": {
171
- "alertname": "Watchdog",
172
- "severity": "none"
173
- },
174
- "annotations": {
175
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
176
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
177
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
178
- },
179
- "state": "firing",
180
- "activeAt": "2025-12-15T17:23:29.2925657Z",
181
- "value": "1e+00"
182
- },
183
- {
184
- "labels": {
185
- "alertname": "KubeSchedulerDown",
186
- "severity": "critical"
187
- },
188
- "annotations": {
189
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
191
- "summary": "Target disappeared from Prometheus target discovery."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:03.093305093Z",
195
- "value": "1e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-14-09.387023.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:25:19.431042761Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.42222222222222217)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-15T17:25:19.431042761Z",
28
- "value": "4.2222222222222217e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.8222222222222222)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-15T17:25:19.431042761Z",
42
- "value": "8.222222222222222e-01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 15.244444444444442)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-15T17:25:19.431042761Z",
56
- "value": "1.5244444444444442e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-15T17:23:29.2925657Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeSchedulerDown",
75
- "severity": "critical"
76
- },
77
- "annotations": {
78
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
80
- "summary": "Target disappeared from Prometheus target discovery."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-15T17:23:03.093305093Z",
84
- "value": "1e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "PrometheusNotConnectedToAlertmanagers",
89
- "container": "prometheus",
90
- "endpoint": "http-web",
91
- "instance": "100.96.3.12:9090",
92
- "job": "prometheus-kube-prometheus-prometheus",
93
- "namespace": "prometheus",
94
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
95
- "service": "prometheus-kube-prometheus-prometheus",
96
- "severity": "warning"
97
- },
98
- "annotations": {
99
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
100
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
101
- "summary": "Prometheus is not connected to any Alertmanagers."
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-12-15T17:23:51.749891565Z",
105
- "value": "0e+00"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "KubeControllerManagerDown",
110
- "severity": "critical"
111
- },
112
- "annotations": {
113
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
114
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
115
- "summary": "Target disappeared from Prometheus target discovery."
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-12-15T17:23:03.0192481Z",
119
- "value": "1e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "CPUThrottlingHigh",
124
- "container": "frontend-proxy",
125
- "instance": "10.0.167.38:10250",
126
- "namespace": "otel-demo",
127
- "pod": "frontend-proxy-6b4d584985-kxvn6",
128
- "service": "prometheus-kube-prometheus-kubelet",
129
- "severity": "info"
130
- },
131
- "annotations": {
132
- "description": "59.99% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
133
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
134
- "summary": "Processes experience elevated CPU throttling."
135
- },
136
- "state": "firing",
137
- "activeAt": "2025-12-15T17:27:45.017672005Z",
138
- "value": "5.998531571218796e-01"
139
- },
140
- {
141
- "labels": {
142
- "alertname": "CPUThrottlingHigh",
143
- "container": "opentelemetry-collector",
144
- "instance": "10.0.167.38:10250",
145
- "namespace": "otel-demo",
146
- "pod": "otel-collector-564d9c7987-ls78p",
147
- "service": "prometheus-kube-prometheus-kubelet",
148
- "severity": "info"
149
- },
150
- "annotations": {
151
- "description": "27.78% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
152
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
153
- "summary": "Processes experience elevated CPU throttling."
154
- },
155
- "state": "firing",
156
- "activeAt": "2025-12-15T17:56:15.017672005Z",
157
- "value": "2.7779751332149205e-01"
158
- },
159
- {
160
- "labels": {
161
- "alertname": "CPUThrottlingHigh",
162
- "container": "frontend",
163
- "instance": "10.0.167.38:10250",
164
- "namespace": "otel-demo",
165
- "pod": "frontend-675fd7b5c5-jlpzp",
166
- "service": "prometheus-kube-prometheus-kubelet",
167
- "severity": "info"
168
- },
169
- "annotations": {
170
- "description": "94.12% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
171
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
172
- "summary": "Processes experience elevated CPU throttling."
173
- },
174
- "state": "firing",
175
- "activeAt": "2025-12-15T17:23:45.017672005Z",
176
- "value": "9.412408759124088e-01"
177
- },
178
- {
179
- "labels": {
180
- "alertname": "CPUThrottlingHigh",
181
- "container": "product-catalog",
182
- "instance": "10.0.167.38:10250",
183
- "namespace": "otel-demo",
184
- "pod": "product-catalog-7c7f8b68dc-prcsr",
185
- "service": "prometheus-kube-prometheus-kubelet",
186
- "severity": "info"
187
- },
188
- "annotations": {
189
- "description": "37.62% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
191
- "summary": "Processes experience elevated CPU throttling."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:45.017672005Z",
195
- "value": "3.7615486141663e-01"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-15-09.387101.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "61.14% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.113861386138614e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "opentelemetry-collector",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "otel-collector-564d9c7987-ls78p",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "28.04% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:56:15.017672005Z",
52
- "value": "2.804123711340206e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-675fd7b5c5-jlpzp",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "95.49% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "9.549356223175965e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "product-catalog",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "product-catalog-7c7f8b68dc-prcsr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "37.14% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:23:45.017672005Z",
90
- "value": "3.713791612624297e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3555555555555555)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "3.555555555555555e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.6888888888888888)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "6.888888888888888e-01"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.12222222222222)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.412222222222222e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-16-09.387264.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "65.76% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.576345660930063e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "opentelemetry-collector",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "otel-collector-564d9c7987-ls78p",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "27.5% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:56:15.017672005Z",
52
- "value": "2.75035765379113e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-675fd7b5c5-jlpzp",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "92.43% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "9.242704100480236e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "product-catalog",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "product-catalog-7c7f8b68dc-prcsr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "34.54% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:23:45.017672005Z",
90
- "value": "3.453781512605042e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.42222222222222217)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "4.2222222222222217e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.6888888888888888)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "6.888888888888888e-01"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.22222222222222)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.422222222222222e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_at_2025-12-15T18-17-09.387695.json DELETED
@@ -1,197 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeControllerManagerDown",
5
- "severity": "critical"
6
- },
7
- "annotations": {
8
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
10
- "summary": "Target disappeared from Prometheus target discovery."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-15T17:23:03.0192481Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "CPUThrottlingHigh",
19
- "container": "frontend-proxy",
20
- "instance": "10.0.167.38:10250",
21
- "namespace": "otel-demo",
22
- "pod": "frontend-proxy-6b4d584985-kxvn6",
23
- "service": "prometheus-kube-prometheus-kubelet",
24
- "severity": "info"
25
- },
26
- "annotations": {
27
- "description": "66.19% throttling of CPU in namespace otel-demo for container frontend-proxy in pod frontend-proxy-6b4d584985-kxvn6 on cluster .",
28
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
29
- "summary": "Processes experience elevated CPU throttling."
30
- },
31
- "state": "firing",
32
- "activeAt": "2025-12-15T17:27:45.017672005Z",
33
- "value": "6.618705035971223e-01"
34
- },
35
- {
36
- "labels": {
37
- "alertname": "CPUThrottlingHigh",
38
- "container": "opentelemetry-collector",
39
- "instance": "10.0.167.38:10250",
40
- "namespace": "otel-demo",
41
- "pod": "otel-collector-564d9c7987-ls78p",
42
- "service": "prometheus-kube-prometheus-kubelet",
43
- "severity": "info"
44
- },
45
- "annotations": {
46
- "description": "28.13% throttling of CPU in namespace otel-demo for container opentelemetry-collector in pod otel-collector-564d9c7987-ls78p on cluster .",
47
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
48
- "summary": "Processes experience elevated CPU throttling."
49
- },
50
- "state": "firing",
51
- "activeAt": "2025-12-15T17:56:15.017672005Z",
52
- "value": "2.812846068660022e-01"
53
- },
54
- {
55
- "labels": {
56
- "alertname": "CPUThrottlingHigh",
57
- "container": "frontend",
58
- "instance": "10.0.167.38:10250",
59
- "namespace": "otel-demo",
60
- "pod": "frontend-675fd7b5c5-jlpzp",
61
- "service": "prometheus-kube-prometheus-kubelet",
62
- "severity": "info"
63
- },
64
- "annotations": {
65
- "description": "91.52% throttling of CPU in namespace otel-demo for container frontend in pod frontend-675fd7b5c5-jlpzp on cluster .",
66
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
67
- "summary": "Processes experience elevated CPU throttling."
68
- },
69
- "state": "firing",
70
- "activeAt": "2025-12-15T17:23:45.017672005Z",
71
- "value": "9.15199425080848e-01"
72
- },
73
- {
74
- "labels": {
75
- "alertname": "CPUThrottlingHigh",
76
- "container": "product-catalog",
77
- "instance": "10.0.167.38:10250",
78
- "namespace": "otel-demo",
79
- "pod": "product-catalog-7c7f8b68dc-prcsr",
80
- "service": "prometheus-kube-prometheus-kubelet",
81
- "severity": "info"
82
- },
83
- "annotations": {
84
- "description": "36.04% throttling of CPU in namespace otel-demo for container product-catalog in pod product-catalog-7c7f8b68dc-prcsr on cluster .",
85
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/cputhrottlinghigh",
86
- "summary": "Processes experience elevated CPU throttling."
87
- },
88
- "state": "firing",
89
- "activeAt": "2025-12-15T17:23:45.017672005Z",
90
- "value": "3.604301075268817e-01"
91
- },
92
- {
93
- "labels": {
94
- "alertname": "RequestLatency",
95
- "namespace": "otel-demo",
96
- "service_name": "frontend-proxy",
97
- "severity": "warning"
98
- },
99
- "annotations": {
100
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
101
- },
102
- "state": "firing",
103
- "activeAt": "2025-12-15T17:25:19.431042761Z",
104
- "value": "1.5e+04"
105
- },
106
- {
107
- "labels": {
108
- "alertname": "RequestErrorRate",
109
- "namespace": "otel-demo",
110
- "service_name": "checkout",
111
- "severity": "warning"
112
- },
113
- "annotations": {
114
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.28888888888888886)"
115
- },
116
- "state": "firing",
117
- "activeAt": "2025-12-15T17:25:19.431042761Z",
118
- "value": "2.8888888888888886e-01"
119
- },
120
- {
121
- "labels": {
122
- "alertname": "RequestErrorRate",
123
- "namespace": "otel-demo",
124
- "service_name": "frontend",
125
- "severity": "warning"
126
- },
127
- "annotations": {
128
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.6)"
129
- },
130
- "state": "firing",
131
- "activeAt": "2025-12-15T17:25:19.431042761Z",
132
- "value": "6e-01"
133
- },
134
- {
135
- "labels": {
136
- "alertname": "RequestErrorRate",
137
- "namespace": "otel-demo",
138
- "service_name": "frontend-proxy",
139
- "severity": "warning"
140
- },
141
- "annotations": {
142
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 14.62222222222222)"
143
- },
144
- "state": "firing",
145
- "activeAt": "2025-12-15T17:25:19.431042761Z",
146
- "value": "1.462222222222222e+01"
147
- },
148
- {
149
- "labels": {
150
- "alertname": "Watchdog",
151
- "severity": "none"
152
- },
153
- "annotations": {
154
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
155
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
156
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
157
- },
158
- "state": "firing",
159
- "activeAt": "2025-12-15T17:23:29.2925657Z",
160
- "value": "1e+00"
161
- },
162
- {
163
- "labels": {
164
- "alertname": "KubeSchedulerDown",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
170
- "summary": "Target disappeared from Prometheus target discovery."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-15T17:23:03.093305093Z",
174
- "value": "1e+00"
175
- },
176
- {
177
- "labels": {
178
- "alertname": "PrometheusNotConnectedToAlertmanagers",
179
- "container": "prometheus",
180
- "endpoint": "http-web",
181
- "instance": "100.96.3.12:9090",
182
- "job": "prometheus-kube-prometheus-prometheus",
183
- "namespace": "prometheus",
184
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
185
- "service": "prometheus-kube-prometheus-prometheus",
186
- "severity": "warning"
187
- },
188
- "annotations": {
189
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
190
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
191
- "summary": "Prometheus is not connected to any Alertmanagers."
192
- },
193
- "state": "firing",
194
- "activeAt": "2025-12-15T17:23:51.749891565Z",
195
- "value": "0e+00"
196
- }
197
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/alerts/alerts_in_alerting_state_2025-12-15T175546.713186Z.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "data": {
3
- "alerts": [
4
- {
5
- "activeAt": "2025-12-15T17:25:19.431042761Z",
6
- "annotations": {
7
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
8
- },
9
- "labels": {
10
- "alertname": "RequestLatency",
11
- "namespace": "otel-demo",
12
- "service_name": "frontend-proxy",
13
- "severity": "warning"
14
- },
15
- "state": "firing",
16
- "value": "1.5e+04"
17
- },
18
- {
19
- "activeAt": "2025-12-15T17:25:19.431042761Z",
20
- "annotations": {
21
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.19999999999999998)"
22
- },
23
- "labels": {
24
- "alertname": "RequestErrorRate",
25
- "namespace": "otel-demo",
26
- "service_name": "checkout",
27
- "severity": "warning"
28
- },
29
- "state": "firing",
30
- "value": "1.9999999999999998e-01"
31
- },
32
- {
33
- "activeAt": "2025-12-15T17:25:19.431042761Z",
34
- "annotations": {
35
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.39999999999999997)"
36
- },
37
- "labels": {
38
- "alertname": "RequestErrorRate",
39
- "namespace": "otel-demo",
40
- "service_name": "frontend",
41
- "severity": "warning"
42
- },
43
- "state": "firing",
44
- "value": "3.9999999999999997e-01"
45
- },
46
- {
47
- "activeAt": "2025-12-15T17:25:19.431042761Z",
48
- "annotations": {
49
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 10.799999999999999)"
50
- },
51
- "labels": {
52
- "alertname": "RequestErrorRate",
53
- "namespace": "otel-demo",
54
- "service_name": "frontend-proxy",
55
- "severity": "warning"
56
- },
57
- "state": "firing",
58
- "value": "1.0799999999999999e+01"
59
- }
60
- ]
61
- },
62
- "status": "success",
63
- "timestamp": "2025-12-15T17:55:46Z"
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/v0.2-B96DF826-4BB2-4B62-97AB-6D84254C53D7/Scenario-1/ground_truth.yaml DELETED
@@ -1,67 +0,0 @@
1
- ---
2
- fault:
3
- - entity:
4
- name: load-generator-pod-1
5
- group_id: load-generator-pod-1
6
- kind: Pod
7
- condition: To be specified
8
- category: Change
9
- fault_mechanism: custom
10
- alerts:
11
- - id: RequestErrorRate
12
- group_id: frontend-proxy-service-1
13
- metadata:
14
- description: Error rate is above threshold for frontend-proxy service
15
- - id: RequestLatency
16
- group_id: frontend-proxy-service-1
17
- metadata:
18
- description: Latency is above threshold for frontend-proxy service
19
- groups:
20
- - id: load-generator-pod-1
21
- kind: Pod
22
- filter:
23
- - load-generator-.*
24
- namespace: otel-demo
25
- root_cause: true
26
- - id: load-generator-service-1
27
- kind: Service
28
- filter:
29
- - load-generator\b
30
- namespace: otel-demo
31
- - id: frontend-proxy-service-1
32
- kind: Service
33
- filter:
34
- - frontend-proxy\b
35
- namespace: otel-demo
36
- - id: frontend-proxy-pod-1
37
- kind: Pod
38
- namespace: otel-demo
39
- filter:
40
- - frontend-proxy-.*
41
- aliases:
42
- - - load-generator-service-1
43
- - load-generator-pod-1
44
- - frontend-proxy-service-1
45
- - frontend-proxy-pod-1
46
- propagations:
47
- - source: load-generator-pod-1
48
- target: load-generator-service-1
49
- condition: load-generator pod is configured with a higher number of users
50
- effect: load-generator creates a high number of requests as a result
51
- - source: load-generator-service-1
52
- target: frontend-proxy-service-1
53
- condition: frontend-proxy service overloaded - request volume exceeds configured capacity
54
- effect: frontend-proxy service error rate is above threshold
55
- - source: load-generator-service-1
56
- target: frontend-proxy-service-1
57
- condition: frontend-proxy service overloaded - request volume exceeds configured capacity
58
- effect: frontend-proxy service latency is above threshold
59
- recommended_actions:
60
- - solution:
61
- id: deem_if_traffic_is_legit
62
- actions:
63
- - deem if traffic is legit
64
- - solution:
65
- id: set_up_autoscaler
66
- actions:
67
- - set up autoscaler