Spaces:
Sleeping
Sleeping
Tuyen Pham commited on
Add Langfuse Code Logic
Browse files
app.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
import yaml
|
| 2 |
import os
|
| 3 |
from smolagents import GradioUI, CodeAgent, InferenceClientModel
|
|
|
|
| 4 |
|
| 5 |
# Get current directory path
|
| 6 |
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
@@ -12,11 +13,16 @@ from tools.catering_service_tool import SimpleTool as CateringServiceTool
|
|
| 12 |
from tools.superhero_party_theme_generator import SuperheroPartyThemeTool as SuperheroPartyThemeGenerator
|
| 13 |
from tools.final_answer import FinalAnswerTool as FinalAnswer
|
| 14 |
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
model = InferenceClientModel(
|
| 18 |
-
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
|
| 19 |
-
provider=None,
|
| 20 |
)
|
| 21 |
|
| 22 |
web_search = WebSearch()
|
|
@@ -26,7 +32,6 @@ catering_service_tool = CateringServiceTool()
|
|
| 26 |
superhero_party_theme_generator = SuperheroPartyThemeGenerator()
|
| 27 |
final_answer = FinalAnswer()
|
| 28 |
|
| 29 |
-
|
| 30 |
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
|
| 31 |
prompt_templates = yaml.safe_load(stream)
|
| 32 |
|
|
@@ -39,7 +44,9 @@ agent = CodeAgent(
|
|
| 39 |
planning_interval=None,
|
| 40 |
name=None,
|
| 41 |
description=None,
|
| 42 |
-
prompt_templates=prompt_templates
|
|
|
|
| 43 |
)
|
|
|
|
| 44 |
if __name__ == "__main__":
|
| 45 |
GradioUI(agent).launch()
|
|
|
|
| 1 |
import yaml
|
| 2 |
import os
|
| 3 |
from smolagents import GradioUI, CodeAgent, InferenceClientModel
|
| 4 |
+
from smolagents import LangfuseCallback # @mangoobee
|
| 5 |
|
| 6 |
# Get current directory path
|
| 7 |
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
| 13 |
from tools.superhero_party_theme_generator import SuperheroPartyThemeTool as SuperheroPartyThemeGenerator
|
| 14 |
from tools.final_answer import FinalAnswerTool as FinalAnswer
|
| 15 |
|
| 16 |
+
# Initialize Langfuse callback (reads from HF Spaces secrets) - @mangoobee
|
| 17 |
+
langfuse_callback = LangfuseCallback(
|
| 18 |
+
public_key=os.environ["LANGFUSE_PUBLIC_KEY"],
|
| 19 |
+
secret_key=os.environ["LANGFUSE_SECRET_KEY"],
|
| 20 |
+
host=os.environ["LANGFUSE_HOST"]
|
| 21 |
+
)
|
| 22 |
|
| 23 |
model = InferenceClientModel(
|
| 24 |
+
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
|
| 25 |
+
provider=None,
|
| 26 |
)
|
| 27 |
|
| 28 |
web_search = WebSearch()
|
|
|
|
| 32 |
superhero_party_theme_generator = SuperheroPartyThemeGenerator()
|
| 33 |
final_answer = FinalAnswer()
|
| 34 |
|
|
|
|
| 35 |
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
|
| 36 |
prompt_templates = yaml.safe_load(stream)
|
| 37 |
|
|
|
|
| 44 |
planning_interval=None,
|
| 45 |
name=None,
|
| 46 |
description=None,
|
| 47 |
+
prompt_templates=prompt_templates,
|
| 48 |
+
callbacks=[langfuse_callback] # Add this parameter
|
| 49 |
)
|
| 50 |
+
|
| 51 |
if __name__ == "__main__":
|
| 52 |
GradioUI(agent).launch()
|