Update app.py
Browse files
app.py
CHANGED
|
@@ -4,9 +4,11 @@ from langchain.schema import AIMessage, HumanMessage
|
|
| 4 |
from langchain_chroma import Chroma
|
| 5 |
import gradio as gr
|
| 6 |
from huggingface_hub import InferenceClient
|
|
|
|
| 7 |
|
| 8 |
# Load environment variables
|
| 9 |
CHROMA_PATH = "chroma"
|
|
|
|
| 10 |
|
| 11 |
# Hugging Face API setup
|
| 12 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
|
@@ -44,6 +46,7 @@ class LLM:
|
|
| 44 |
def generate_response(self, prompt):
|
| 45 |
client = InferenceClient(
|
| 46 |
provider="hyperbolic",
|
|
|
|
| 47 |
)
|
| 48 |
completion = client.chat.completions.create(
|
| 49 |
model=repo_id,
|
|
|
|
| 4 |
from langchain_chroma import Chroma
|
| 5 |
import gradio as gr
|
| 6 |
from huggingface_hub import InferenceClient
|
| 7 |
+
import os
|
| 8 |
|
| 9 |
# Load environment variables
|
| 10 |
CHROMA_PATH = "chroma"
|
| 11 |
+
KEY = os.getenv["token"]
|
| 12 |
|
| 13 |
# Hugging Face API setup
|
| 14 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
|
|
|
| 46 |
def generate_response(self, prompt):
|
| 47 |
client = InferenceClient(
|
| 48 |
provider="hyperbolic",
|
| 49 |
+
api_key = KEY
|
| 50 |
)
|
| 51 |
completion = client.chat.completions.create(
|
| 52 |
model=repo_id,
|