Update app/llm.py
Browse files- app/llm.py +6 -5
app/llm.py
CHANGED
|
@@ -104,11 +104,12 @@ async def generate(gen:GenModel):#, user: schemas.BaseUser = fastapi.Depends(cur
|
|
| 104 |
gen.seed = 42
|
| 105 |
try:
|
| 106 |
st = time()
|
| 107 |
-
output = llm_generate.
|
| 108 |
-
messages=[
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
|
|
|
| 112 |
temperature = gen.temperature,
|
| 113 |
seed= gen.seed,
|
| 114 |
chat_format="llama-2",
|
|
|
|
| 104 |
gen.seed = 42
|
| 105 |
try:
|
| 106 |
st = time()
|
| 107 |
+
output = llm_generate.create_completion(
|
| 108 |
+
#messages=[
|
| 109 |
+
# {"role": "system", "content": gen.system},
|
| 110 |
+
# {"role": "user", "content": gen.question},
|
| 111 |
+
# ],
|
| 112 |
+
gen.question,
|
| 113 |
temperature = gen.temperature,
|
| 114 |
seed= gen.seed,
|
| 115 |
chat_format="llama-2",
|