Spaces:
Sleeping
Sleeping
File size: 937 Bytes
badd0f9 527c162 badd0f9 527c162 badd0f9 527c162 badd0f9 527c162 badd0f9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
from huggingface_hub import InferenceClient
import os
# HF automatically injects token
client = InferenceClient(
token=os.getenv("HF_TOKEN")
)
MODEL = "HuggingFaceH4/zephyr-7b-beta"
def call_llm(prompt):
response = client.text_generation(
model=MODEL,
prompt=prompt,
max_new_tokens=400,
temperature=0.7
)
return response
def market_agent(problem, memory, prompt):
return call_llm(prompt.format(problem=problem, memory=memory))
def finance_agent(problem, memory, prompt):
return call_llm(prompt.format(problem=problem, memory=memory))
def risk_agent(problem, memory, prompt):
return call_llm(prompt.format(problem=problem, memory=memory))
def ethics_agent(problem, memory, prompt):
return call_llm(prompt.format(problem=problem, memory=memory))
def synthesis_agent(problem, memory, prompt):
return call_llm(prompt.format(problem=problem, memory=memory))
|