Spaces:
Build error
Build error
File size: 1,486 Bytes
2ca143e fc8f673 c486d43 fc8f673 2ca143e fc8f673 2ca143e 2d78b6e c486d43 e3c9980 fc8f673 c486d43 fc8f673 c486d43 fc8f673 c486d43 fc8f673 c486d43 fc8f673 c486d43 fc8f673 c486d43 fc8f673 c486d43 fc8f673 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import os
import time
from telemetry import log_event
from memory_core import update_memory, load_memory
from identity_core import IdentityManager
from context_graph import ContextGraph
class AgentCore:
def __init__(self, model="gpt-4o-mini"):
self.identity = IdentityManager()
self.agent_id = self.identity.agent_id
self.context = ContextGraph(self.agent_id)
log_event(self.agent_id, "init", "success", {"model": model})
print(f"[INIT] Agent {self.agent_id} initialized with model {model}")
def run(self, prompt):
log_event(self.agent_id, "run_start", "in_progress", {"prompt": prompt})
try:
# Simulate model output
response = f"Agent {self.agent_id} processed: {prompt}"
# Save memory + context
update_memory(self.agent_id, "last_prompt", prompt)
update_memory(self.agent_id, "last_response", response)
prompt_node = self.context.add_node("prompt", prompt)
response_node = self.context.add_node("response", response)
self.context.connect(prompt_node, response_node)
# Telemetry success
log_event(self.agent_id, "run_complete", "success", {"response": response})
print(f"[RUN] {response}")
return response
except Exception as e:
log_event(self.agent_id, "run_failed", "error", {"error": str(e)})
print(f"[ERROR] {e}")
raise e |