Spaces:
Build error
Build error
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
from llama_cpp import Llama
|
| 3 |
+
|
| 4 |
+
app = FastAPI()
|
| 5 |
+
|
| 6 |
+
# تحميل الموديل ديالك
|
| 7 |
+
llm = Llama.from_pretrained(
|
| 8 |
+
repo_id="othmanezaid77/my-eva-model",
|
| 9 |
+
filename="Llama-3.1-8B-Instruct.gguf",
|
| 10 |
+
n_ctx=2048
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
@app.post("/chat")
|
| 14 |
+
async def chat(data: dict):
|
| 15 |
+
prompt = data.get("prompt", "")
|
| 16 |
+
full_prompt = f"<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
|
| 17 |
+
output = llm(full_prompt, max_tokens=500, stop=["<|eot_id|>"])
|
| 18 |
+
return {"response": output['choices'][0]['text']}
|