hypeconqueror1 commited on
Commit
ff73272
·
verified ·
1 Parent(s): 18dd6f3

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +5 -6
main.py CHANGED
@@ -29,13 +29,12 @@ async def PromptLLM(file: UploadFile = File(...)):
29
  llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q4_1.bin", model_type="llama",
30
  config={'max_new_tokens': 1024, 'context_length': 2048, 'temperature': 0.01})
31
 
32
- template = """Summarise the report {pages}
33
- """
34
- prompt_template = PromptTemplate(input_variables=["pages"], template=template)
35
- chain = LLMChain(llm=llm, prompt=prompt_template)
36
 
37
-
38
- result = chain.run(pages=data[0].page_content)
39
  return result
40
 
41
 
 
29
  llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q4_1.bin", model_type="llama",
30
  config={'max_new_tokens': 1024, 'context_length': 2048, 'temperature': 0.01})
31
 
32
+ template = """Summarise the report {pages}"""
33
+ prompt_template = PromptTemplate(input_variables=["pages"], template=template)
34
+ chain = LLMChain(llm=llm, prompt=prompt_template)
 
35
 
36
+ result = chain.run(pages=data[0].page_content)
37
+
38
  return result
39
 
40