MissingBreath commited on
Commit
7145c80
·
verified ·
1 Parent(s): 95334d6

Update api.py

Browse files
Files changed (1) hide show
  1. api.py +50 -6
api.py CHANGED
@@ -101,13 +101,15 @@ retriever = db.as_retriever()
101
  # 5. RAG Pipeline
102
  llm = ChatGoogleGenerativeAI(model=gemini_model_name, temperature=0.7)
103
 
104
- def create_prompt(disease_name, severity):
105
- prompt = """
106
- You are an expert in plant disease treatment.
 
 
107
  Use the following context to answer the user's question.
108
  If you don't know the answer, try to find the answer from the context try to make similair answer to the context.
109
  If the question is not related to the context, Generate from your exsisting knowlodge about the context.
110
-
111
  Context:
112
  {context}
113
 
@@ -128,7 +130,19 @@ def create_prompt(disease_name, severity):
128
  - Organic:"""
129
  return prompt
130
 
131
-
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
 
134
  # Load the saved model
@@ -383,10 +397,11 @@ async def classify(image: UploadFile = File(...)):
383
  class DiseaseQuery(BaseModel):
384
  disease: str
385
  severity: str # "normal" or "severe"
 
386
  @app.post("/RAG")
387
  async def rag_classify(query: DiseaseQuery):
388
  try:
389
- prompt_template = create_prompt(query.disease, query.severity)
390
 
391
  prompt = PromptTemplate(
392
  input_variables=["context", "question"],
@@ -409,6 +424,35 @@ async def rag_classify(query: DiseaseQuery):
409
  except Exception as e:
410
  raise HTTPException(status_code=500, detail=str(e))
411
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
412
  class GeoSenQuery(BaseModel):
413
  region: str
414
  crop_type: str
 
101
  # 5. RAG Pipeline
102
  llm = ChatGoogleGenerativeAI(model=gemini_model_name, temperature=0.7)
103
 
104
+ def create_prompt(disease_name, severity,language="english"):
105
+ prompt = f"""
106
+ for the next prompt you should answer with the following language and it is obligatory : {language}
107
+ """ +"""
108
+ You are an expert in plant disease treatment for a platform called Growth.
109
  Use the following context to answer the user's question.
110
  If you don't know the answer, try to find the answer from the context try to make similair answer to the context.
111
  If the question is not related to the context, Generate from your exsisting knowlodge about the context.
112
+
113
  Context:
114
  {context}
115
 
 
130
  - Organic:"""
131
  return prompt
132
 
133
+ def create_prompt_chat(question,language):
134
+ prompt = f"""
135
+ for the next prompt you should answer with the following language and it is obligatory : {language}
136
+ """ +"""
137
+ You are a chatbot called Growth, and you assist users with plant leaf disease identification and solutions.
138
+ Use the following context to answer the user's question.
139
+ If you don't know the answer, try to find the answer from the context try to make similair answer to the context.
140
+ If the question is not related to the context, Generate from your exsisting knowlodge about the context.
141
+ Context:
142
+ {context}
143
+ Answer the following question:
144
+ """+f"""{question}"""
145
+ return prompt
146
 
147
 
148
  # Load the saved model
 
397
  class DiseaseQuery(BaseModel):
398
  disease: str
399
  severity: str # "normal" or "severe"
400
+ language: str
401
  @app.post("/RAG")
402
  async def rag_classify(query: DiseaseQuery):
403
  try:
404
+ prompt_template = create_prompt(query.disease, query.severity,query.language)
405
 
406
  prompt = PromptTemplate(
407
  input_variables=["context", "question"],
 
424
  except Exception as e:
425
  raise HTTPException(status_code=500, detail=str(e))
426
 
427
+ class ChatQuery(BaseModel):
428
+ question: str
429
+ language: str
430
+ @app.post("/RAGChat")
431
+ async def rag_chat(query: ChatQuery):
432
+ try:
433
+ prompt_template = create_prompt(query.question,query.language)
434
+
435
+ prompt = PromptTemplate(
436
+ input_variables=["context", "question"],
437
+ template=prompt_template,
438
+ )
439
+
440
+ qa = RetrievalQA.from_chain_type(
441
+ llm=llm,
442
+ chain_type="stuff",
443
+ retriever=retriever,
444
+ chain_type_kwargs={"prompt": prompt},
445
+ verbose=True,
446
+ )
447
+
448
+ final_query = f"Answer the Provided questino like a human, and remember the chat history {query.question}"
449
+ result = qa.run(final_query)
450
+
451
+ return {"answer": result}
452
+
453
+ except Exception as e:
454
+ raise HTTPException(status_code=500, detail=str(e))
455
+
456
  class GeoSenQuery(BaseModel):
457
  region: str
458
  crop_type: str