krupakar-reddy commited on
Commit
03affe6
·
verified ·
1 Parent(s): 1a11952

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +10 -10
main.py CHANGED
@@ -2,7 +2,7 @@ from fastapi import FastAPI, Request
2
  from pydantic import BaseModel
3
  from llama_cpp import Llama
4
 
5
- model_path = "meta-llama-3-8b.Q4_K_M.gguf"
6
 
7
  llm = Llama(
8
  model_path = model_path,
@@ -23,13 +23,13 @@ async def solve_dsa_problem(item: DSAPrompt):
23
  If the input is not a DSA problem, politely refuse their request and reinsist to provide a DSA problem.
24
  '''
25
 
26
- # res = llm.create_chat_completion(
27
- # messages=[
28
- # {"role": "system", "content": system_prompt},
29
- # {"role": "user", "content": item.prompt}
30
- # ],
31
- # temperature = 0.7,
32
- # max_tokens = 2048,
33
- # )
34
 
35
- return llm(item.prompt, temperature = 0.9, max_tokens = 2048)
 
2
  from pydantic import BaseModel
3
  from llama_cpp import Llama
4
 
5
+ model_path = "meta-llama-3-8b-instruct.Q4_K_M.gguf"
6
 
7
  llm = Llama(
8
  model_path = model_path,
 
23
  If the input is not a DSA problem, politely refuse their request and reinsist to provide a DSA problem.
24
  '''
25
 
26
+ res = llm.create_chat_completion(
27
+ messages=[
28
+ {"role": "system", "content": system_prompt},
29
+ {"role": "user", "content": item.prompt}
30
+ ],
31
+ temperature = 0.7,
32
+ max_tokens = 2048,
33
+ )
34
 
35
+ return res