Spaces:
Running
π Complete Fix for HF Spaces Sample Data Loading
Browse filesβ
Fixed All Critical Issues:
1. OPENAI_API_KEY None TypeError (4 files):
- multi_agent_knowledge_extractor.py
- knowledge_graph_merger.py
- knowledge_graph_tester.py
- knowledge_graph_processor.py
β Added null checks: if OPENAI_API_KEY: os.environ[...] = OPENAI_API_KEY
2. SQLAlchemy ObjectNotExecutableError:
- Added text() import and wrapped all SQL strings
β All execute() calls now use text("SQL") format
3. SQLAlchemy ArgumentError - Parameter Format:
- Converted positional parameters to named parameters
- Changed ?, ?, ? β :param1, :param2, :param3
- Changed tuple params β dict params
β All queries now use {"param1": value1, "param2": value2} format
π― Local Test Results:
β
Application starts successfully
β
Sample data loads completely:
β’ 1 trace inserted
β’ 1 knowledge graph inserted
β’ 5 entities inserted
β’ 4 relations inserted
β
All database operations successful
β
Transaction committed successfully
β
Final counts match expected values
Expected HF Spaces Result:
Dashboard should now show: Traces & Graphs: 1 β’ 1, Entities & Relations: 5 β’ 4
- agentgraph/extraction/graph_processing/knowledge_graph_processor.py +2 -1
- agentgraph/extraction/graph_utilities/knowledge_graph_merger.py +2 -1
- agentgraph/methods/production/multi_agent_knowledge_extractor.py +2 -1
- agentgraph/testing/knowledge_graph_tester.py +2 -1
- backend/database/__init__.py +65 -26
|
@@ -66,7 +66,8 @@ from agentgraph.reconstruction.content_reference_resolver import ContentReferenc
|
|
| 66 |
|
| 67 |
# Load OpenAI API key from configuration
|
| 68 |
from utils.config import OPENAI_API_KEY
|
| 69 |
-
|
|
|
|
| 70 |
|
| 71 |
|
| 72 |
class SlidingWindowMonitor:
|
|
|
|
| 66 |
|
| 67 |
# Load OpenAI API key from configuration
|
| 68 |
from utils.config import OPENAI_API_KEY
|
| 69 |
+
if OPENAI_API_KEY:
|
| 70 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
| 71 |
|
| 72 |
|
| 73 |
class SlidingWindowMonitor:
|
|
@@ -50,7 +50,8 @@ from agentgraph.shared.models.reference_based import KnowledgeGraph
|
|
| 50 |
|
| 51 |
# Load OpenAI API key from configuration
|
| 52 |
from utils.config import OPENAI_API_KEY
|
| 53 |
-
|
|
|
|
| 54 |
# Note: OPENAI_MODEL_NAME will be set dynamically in __init__ method
|
| 55 |
|
| 56 |
|
|
|
|
| 50 |
|
| 51 |
# Load OpenAI API key from configuration
|
| 52 |
from utils.config import OPENAI_API_KEY
|
| 53 |
+
if OPENAI_API_KEY:
|
| 54 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
| 55 |
# Note: OPENAI_MODEL_NAME will be set dynamically in __init__ method
|
| 56 |
|
| 57 |
|
|
@@ -80,7 +80,8 @@ import base64
|
|
| 80 |
|
| 81 |
# openlit.init()
|
| 82 |
|
| 83 |
-
|
|
|
|
| 84 |
# Note: OPENAI_MODEL_NAME will be set dynamically when creating the crew
|
| 85 |
|
| 86 |
|
|
|
|
| 80 |
|
| 81 |
# openlit.init()
|
| 82 |
|
| 83 |
+
if OPENAI_API_KEY:
|
| 84 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
| 85 |
# Note: OPENAI_MODEL_NAME will be set dynamically when creating the crew
|
| 86 |
|
| 87 |
|
|
@@ -52,7 +52,8 @@ import openlit
|
|
| 52 |
|
| 53 |
openlit.init()
|
| 54 |
|
| 55 |
-
|
|
|
|
| 56 |
|
| 57 |
# (future) from .perturbation_types.rule_misunderstanding import RuleMisunderstandingPerturbationTester
|
| 58 |
# (future) from .perturbation_types.emotional_manipulation import EmotionalManipulationPerturbationTester
|
|
|
|
| 52 |
|
| 53 |
openlit.init()
|
| 54 |
|
| 55 |
+
if OPENAI_API_KEY:
|
| 56 |
+
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
| 57 |
|
| 58 |
# (future) from .perturbation_types.rule_misunderstanding import RuleMisunderstandingPerturbationTester
|
| 59 |
# (future) from .perturbation_types.emotional_manipulation import EmotionalManipulationPerturbationTester
|
|
@@ -4,7 +4,7 @@ This package provides database access and utilities for agent monitoring.
|
|
| 4 |
"""
|
| 5 |
|
| 6 |
import os
|
| 7 |
-
from sqlalchemy import create_engine
|
| 8 |
from sqlalchemy.ext.declarative import declarative_base
|
| 9 |
from sqlalchemy.orm import sessionmaker, scoped_session
|
| 10 |
|
|
@@ -68,7 +68,7 @@ def test_database_connection():
|
|
| 68 |
Base.metadata.create_all(bind=engine)
|
| 69 |
|
| 70 |
# Test a simple query
|
| 71 |
-
result = session.execute("SELECT 1").fetchone()
|
| 72 |
print(f"β
Database connection test successful: {result}")
|
| 73 |
|
| 74 |
session.close()
|
|
@@ -105,11 +105,11 @@ def add_sample_data_for_hf():
|
|
| 105 |
try:
|
| 106 |
print("π Checking existing data...")
|
| 107 |
# Check if data already exists
|
| 108 |
-
result = conn.execute("SELECT COUNT(*) FROM traces").fetchone()
|
| 109 |
existing_traces = result[0] if result else 0
|
| 110 |
print(f" β’ Found {existing_traces} existing traces")
|
| 111 |
|
| 112 |
-
result = conn.execute("SELECT COUNT(*) FROM knowledge_graphs").fetchone()
|
| 113 |
existing_kgs = result[0] if result else 0
|
| 114 |
print(f" β’ Found {existing_kgs} existing knowledge graphs")
|
| 115 |
|
|
@@ -147,31 +147,56 @@ def add_sample_data_for_hf():
|
|
| 147 |
print("πΎ Inserting sample trace...")
|
| 148 |
# Insert trace
|
| 149 |
conn.execute(
|
| 150 |
-
"""INSERT INTO traces (trace_id, filename, title, description, content, content_hash,
|
| 151 |
upload_timestamp, update_timestamp, uploader, trace_type, trace_source,
|
| 152 |
character_count, turn_count, status, tags, trace_metadata)
|
| 153 |
-
VALUES (
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 159 |
)
|
| 160 |
print("β
Trace inserted successfully")
|
| 161 |
|
| 162 |
print("π Inserting knowledge graph...")
|
| 163 |
# Insert knowledge graph with correct field names
|
| 164 |
conn.execute(
|
| 165 |
-
"""INSERT INTO knowledge_graphs (filename, entity_count, relation_count,
|
| 166 |
status, trace_id, window_index, window_total, processing_run_id)
|
| 167 |
-
VALUES (
|
| 168 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
)
|
| 170 |
print("β
Knowledge graph inserted successfully")
|
| 171 |
|
| 172 |
print("π Retrieving knowledge graph ID...")
|
| 173 |
# Get KG ID
|
| 174 |
-
kg_result = conn.execute("SELECT id FROM knowledge_graphs WHERE trace_id =
|
| 175 |
kg_row = kg_result.fetchone()
|
| 176 |
if not kg_row:
|
| 177 |
raise Exception("Failed to retrieve knowledge graph ID")
|
|
@@ -193,12 +218,19 @@ def add_sample_data_for_hf():
|
|
| 193 |
for i, (entity_id, entity_type, name, properties) in enumerate(entities, 1):
|
| 194 |
print(f" β’ Inserting entity {i}/5: {name} ({entity_type})")
|
| 195 |
conn.execute(
|
| 196 |
-
"""INSERT INTO entities (graph_id, entity_id, type, name, properties)
|
| 197 |
-
VALUES (
|
| 198 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
)
|
| 200 |
# Get the database ID for this entity
|
| 201 |
-
result = conn.execute("SELECT id FROM entities WHERE graph_id =
|
|
|
|
| 202 |
row = result.fetchone()
|
| 203 |
if not row:
|
| 204 |
raise Exception(f"Failed to retrieve database ID for entity: {entity_id}")
|
|
@@ -221,9 +253,16 @@ def add_sample_data_for_hf():
|
|
| 221 |
print(f" β’ Inserting relation {i}/4: {from_entity} --{relation_type}--> {to_entity}")
|
| 222 |
print(f" β Source DB ID: {source_db_id}, Target DB ID: {target_db_id}")
|
| 223 |
conn.execute(
|
| 224 |
-
"""INSERT INTO relations (graph_id, relation_id, type, source_id, target_id, properties)
|
| 225 |
-
VALUES (
|
| 226 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 227 |
)
|
| 228 |
print("β
All relations inserted successfully")
|
| 229 |
|
|
@@ -234,10 +273,10 @@ def add_sample_data_for_hf():
|
|
| 234 |
|
| 235 |
print("π Verifying final data counts...")
|
| 236 |
# Verify data
|
| 237 |
-
final_traces = conn.execute("SELECT COUNT(*) FROM traces").fetchone()[0]
|
| 238 |
-
final_kgs = conn.execute("SELECT COUNT(*) FROM knowledge_graphs").fetchone()[0]
|
| 239 |
-
final_entities = conn.execute("SELECT COUNT(*) FROM entities").fetchone()[0]
|
| 240 |
-
final_relations = conn.execute("SELECT COUNT(*) FROM relations").fetchone()[0]
|
| 241 |
|
| 242 |
print("π Sample data insertion completed successfully!")
|
| 243 |
print(f" π Final counts:")
|
|
|
|
| 4 |
"""
|
| 5 |
|
| 6 |
import os
|
| 7 |
+
from sqlalchemy import create_engine, text
|
| 8 |
from sqlalchemy.ext.declarative import declarative_base
|
| 9 |
from sqlalchemy.orm import sessionmaker, scoped_session
|
| 10 |
|
|
|
|
| 68 |
Base.metadata.create_all(bind=engine)
|
| 69 |
|
| 70 |
# Test a simple query
|
| 71 |
+
result = session.execute(text("SELECT 1")).fetchone()
|
| 72 |
print(f"β
Database connection test successful: {result}")
|
| 73 |
|
| 74 |
session.close()
|
|
|
|
| 105 |
try:
|
| 106 |
print("π Checking existing data...")
|
| 107 |
# Check if data already exists
|
| 108 |
+
result = conn.execute(text("SELECT COUNT(*) FROM traces")).fetchone()
|
| 109 |
existing_traces = result[0] if result else 0
|
| 110 |
print(f" β’ Found {existing_traces} existing traces")
|
| 111 |
|
| 112 |
+
result = conn.execute(text("SELECT COUNT(*) FROM knowledge_graphs")).fetchone()
|
| 113 |
existing_kgs = result[0] if result else 0
|
| 114 |
print(f" β’ Found {existing_kgs} existing knowledge graphs")
|
| 115 |
|
|
|
|
| 147 |
print("πΎ Inserting sample trace...")
|
| 148 |
# Insert trace
|
| 149 |
conn.execute(
|
| 150 |
+
text("""INSERT INTO traces (trace_id, filename, title, description, content, content_hash,
|
| 151 |
upload_timestamp, update_timestamp, uploader, trace_type, trace_source,
|
| 152 |
character_count, turn_count, status, tags, trace_metadata)
|
| 153 |
+
VALUES (:trace_id, :filename, :title, :description, :content, :content_hash,
|
| 154 |
+
:upload_timestamp, :update_timestamp, :uploader, :trace_type, :trace_source,
|
| 155 |
+
:character_count, :turn_count, :status, :tags, :trace_metadata)"""),
|
| 156 |
+
{
|
| 157 |
+
"trace_id": trace_id,
|
| 158 |
+
"filename": "sample_demo.json",
|
| 159 |
+
"title": "Multi-Agent Customer Service Demo",
|
| 160 |
+
"description": "Demo showing agent coordination and error handling",
|
| 161 |
+
"content": sample_trace_content,
|
| 162 |
+
"content_hash": content_hash,
|
| 163 |
+
"upload_timestamp": now,
|
| 164 |
+
"update_timestamp": now,
|
| 165 |
+
"uploader": "AgentGraph Demo",
|
| 166 |
+
"trace_type": "multi_agent",
|
| 167 |
+
"trace_source": "sample",
|
| 168 |
+
"character_count": len(sample_trace_content),
|
| 169 |
+
"turn_count": 6,
|
| 170 |
+
"status": "processed",
|
| 171 |
+
"tags": '["demo", "customer_service", "multi_agent"]',
|
| 172 |
+
"trace_metadata": '{"scenario": "customer_service", "agents": ["RouterAgent", "OrderAgent", "CompensationAgent", "SupervisorAgent"]}'
|
| 173 |
+
}
|
| 174 |
)
|
| 175 |
print("β
Trace inserted successfully")
|
| 176 |
|
| 177 |
print("π Inserting knowledge graph...")
|
| 178 |
# Insert knowledge graph with correct field names
|
| 179 |
conn.execute(
|
| 180 |
+
text("""INSERT INTO knowledge_graphs (filename, entity_count, relation_count,
|
| 181 |
status, trace_id, window_index, window_total, processing_run_id)
|
| 182 |
+
VALUES (:filename, :entity_count, :relation_count, :status, :trace_id,
|
| 183 |
+
:window_index, :window_total, :processing_run_id)"""),
|
| 184 |
+
{
|
| 185 |
+
"filename": "demo_kg.json",
|
| 186 |
+
"entity_count": 5,
|
| 187 |
+
"relation_count": 4,
|
| 188 |
+
"status": "completed",
|
| 189 |
+
"trace_id": trace_id,
|
| 190 |
+
"window_index": 0,
|
| 191 |
+
"window_total": 1,
|
| 192 |
+
"processing_run_id": "demo_run"
|
| 193 |
+
}
|
| 194 |
)
|
| 195 |
print("β
Knowledge graph inserted successfully")
|
| 196 |
|
| 197 |
print("π Retrieving knowledge graph ID...")
|
| 198 |
# Get KG ID
|
| 199 |
+
kg_result = conn.execute(text("SELECT id FROM knowledge_graphs WHERE trace_id = :trace_id"), {"trace_id": trace_id})
|
| 200 |
kg_row = kg_result.fetchone()
|
| 201 |
if not kg_row:
|
| 202 |
raise Exception("Failed to retrieve knowledge graph ID")
|
|
|
|
| 218 |
for i, (entity_id, entity_type, name, properties) in enumerate(entities, 1):
|
| 219 |
print(f" β’ Inserting entity {i}/5: {name} ({entity_type})")
|
| 220 |
conn.execute(
|
| 221 |
+
text("""INSERT INTO entities (graph_id, entity_id, type, name, properties)
|
| 222 |
+
VALUES (:graph_id, :entity_id, :type, :name, :properties)"""),
|
| 223 |
+
{
|
| 224 |
+
"graph_id": kg_id,
|
| 225 |
+
"entity_id": entity_id,
|
| 226 |
+
"type": entity_type,
|
| 227 |
+
"name": name,
|
| 228 |
+
"properties": properties
|
| 229 |
+
}
|
| 230 |
)
|
| 231 |
# Get the database ID for this entity
|
| 232 |
+
result = conn.execute(text("SELECT id FROM entities WHERE graph_id = :graph_id AND entity_id = :entity_id"),
|
| 233 |
+
{"graph_id": kg_id, "entity_id": entity_id})
|
| 234 |
row = result.fetchone()
|
| 235 |
if not row:
|
| 236 |
raise Exception(f"Failed to retrieve database ID for entity: {entity_id}")
|
|
|
|
| 253 |
print(f" β’ Inserting relation {i}/4: {from_entity} --{relation_type}--> {to_entity}")
|
| 254 |
print(f" β Source DB ID: {source_db_id}, Target DB ID: {target_db_id}")
|
| 255 |
conn.execute(
|
| 256 |
+
text("""INSERT INTO relations (graph_id, relation_id, type, source_id, target_id, properties)
|
| 257 |
+
VALUES (:graph_id, :relation_id, :type, :source_id, :target_id, :properties)"""),
|
| 258 |
+
{
|
| 259 |
+
"graph_id": kg_id,
|
| 260 |
+
"relation_id": relation_id,
|
| 261 |
+
"type": relation_type,
|
| 262 |
+
"source_id": source_db_id,
|
| 263 |
+
"target_id": target_db_id,
|
| 264 |
+
"properties": properties
|
| 265 |
+
}
|
| 266 |
)
|
| 267 |
print("β
All relations inserted successfully")
|
| 268 |
|
|
|
|
| 273 |
|
| 274 |
print("π Verifying final data counts...")
|
| 275 |
# Verify data
|
| 276 |
+
final_traces = conn.execute(text("SELECT COUNT(*) FROM traces")).fetchone()[0]
|
| 277 |
+
final_kgs = conn.execute(text("SELECT COUNT(*) FROM knowledge_graphs")).fetchone()[0]
|
| 278 |
+
final_entities = conn.execute(text("SELECT COUNT(*) FROM entities")).fetchone()[0]
|
| 279 |
+
final_relations = conn.execute(text("SELECT COUNT(*) FROM relations")).fetchone()[0]
|
| 280 |
|
| 281 |
print("π Sample data insertion completed successfully!")
|
| 282 |
print(f" π Final counts:")
|