File size: 2,103 Bytes
6f0e0d2 d60af1d 6f0e0d2 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
import os
import json
import time
from datetime import datetime
from pathlib import Path
# Use a central log file for usages
# In Docker/HF, /tmp is writable. Locally, use the project root.
import sys
if os.path.exists("/app"):
LOG_FILE = Path("/tmp/mcp_usage_log.json")
else:
LOG_FILE = Path(__file__).parent.parent / "mcp_usage_log.json"
def log_usage(server_name: str, tool_name: str):
"""Logs a tool call with timestamp."""
try:
data = []
if LOG_FILE.exists():
with open(LOG_FILE, "r") as f:
data = json.load(f)
entry = {
"timestamp": datetime.now().isoformat(),
"server": server_name,
"tool": tool_name
}
data.append(entry)
# Keep only last 10,000 logs for performance
if len(data) > 10000:
data = data[-10000:]
with open(LOG_FILE, "w") as f:
json.dump(data, f, indent=2)
except Exception as e:
print(f"Failed to log usage: {e}")
def get_metrics():
"""Aggregates metrics from the log file."""
if not LOG_FILE.exists():
return {}
try:
with open(LOG_FILE, "r") as f:
data = json.load(f)
now = datetime.now()
metrics = {}
for entry in data:
server = entry["server"]
ts = datetime.fromisoformat(entry["timestamp"])
if server not in metrics:
metrics[server] = {"hourly": 0, "weekly": 0, "monthly": 0}
# Simple duration checks
delta = now - ts
if delta.total_seconds() < 3600:
metrics[server]["hourly"] += 1
if delta.days < 7:
metrics[server]["weekly"] += 1
if delta.days < 30:
metrics[server]["monthly"] += 1
return metrics
except Exception as e:
print(f"Failed to read metrics: {e}")
return {}
|