mishrabp commited on
Commit
af6a0bb
·
verified ·
1 Parent(s): 383cff1

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +1 -0
  2. src/mcp-github/Dockerfile +1 -0
  3. src/mcp_telemetry.py +66 -0
Dockerfile CHANGED
@@ -11,6 +11,7 @@ COPY pyproject.toml .
11
  RUN pip install --no-cache-dir .
12
 
13
  COPY src/mcp-github ./src/mcp-github
 
14
 
15
  ENV PYTHONPATH=/app/src
16
 
 
11
  RUN pip install --no-cache-dir .
12
 
13
  COPY src/mcp-github ./src/mcp-github
14
+ COPY src/mcp_telemetry.py ./src/mcp_telemetry.py
15
 
16
  ENV PYTHONPATH=/app/src
17
 
src/mcp-github/Dockerfile CHANGED
@@ -11,6 +11,7 @@ COPY pyproject.toml .
11
  RUN pip install --no-cache-dir .
12
 
13
  COPY src/mcp-github ./src/mcp-github
 
14
 
15
  ENV PYTHONPATH=/app/src
16
 
 
11
  RUN pip install --no-cache-dir .
12
 
13
  COPY src/mcp-github ./src/mcp-github
14
+ COPY src/mcp_telemetry.py ./src/mcp_telemetry.py
15
 
16
  ENV PYTHONPATH=/app/src
17
 
src/mcp_telemetry.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ import json
4
+ import time
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+
8
+ # Use a central log file for usages
9
+ LOG_FILE = Path(__file__).parent.parent.parent / "mcp_usage_log.json"
10
+
11
+ def log_usage(server_name: str, tool_name: str):
12
+ """Logs a tool call with timestamp."""
13
+ try:
14
+ data = []
15
+ if LOG_FILE.exists():
16
+ with open(LOG_FILE, "r") as f:
17
+ data = json.load(f)
18
+
19
+ entry = {
20
+ "timestamp": datetime.now().isoformat(),
21
+ "server": server_name,
22
+ "tool": tool_name
23
+ }
24
+ data.append(entry)
25
+
26
+ # Keep only last 10,000 logs for performance
27
+ if len(data) > 10000:
28
+ data = data[-10000:]
29
+
30
+ with open(LOG_FILE, "w") as f:
31
+ json.dump(data, f, indent=2)
32
+ except Exception as e:
33
+ print(f"Failed to log usage: {e}")
34
+
35
+ def get_metrics():
36
+ """Aggregates metrics from the log file."""
37
+ if not LOG_FILE.exists():
38
+ return {}
39
+
40
+ try:
41
+ with open(LOG_FILE, "r") as f:
42
+ data = json.load(f)
43
+
44
+ now = datetime.now()
45
+ metrics = {}
46
+
47
+ for entry in data:
48
+ server = entry["server"]
49
+ ts = datetime.fromisoformat(entry["timestamp"])
50
+
51
+ if server not in metrics:
52
+ metrics[server] = {"hourly": 0, "weekly": 0, "monthly": 0}
53
+
54
+ # Simple duration checks
55
+ delta = now - ts
56
+ if delta.total_seconds() < 3600:
57
+ metrics[server]["hourly"] += 1
58
+ if delta.days < 7:
59
+ metrics[server]["weekly"] += 1
60
+ if delta.days < 30:
61
+ metrics[server]["monthly"] += 1
62
+
63
+ return metrics
64
+ except Exception as e:
65
+ print(f"Failed to read metrics: {e}")
66
+ return {}