Spaces:
Sleeping
Sleeping
Commit
·
6f17894
1
Parent(s):
6f0e56d
Update app.py
Browse files
app.py
CHANGED
|
@@ -50,8 +50,10 @@ prompt = PromptTemplate(
|
|
| 50 |
def Get_Inference(text_input):
|
| 51 |
llm = ChatGoogleGenerativeAI(model="gemini-pro")
|
| 52 |
# Assuming 'prompt', 'llm', and 'output_parser' are defined somewhere in your code
|
| 53 |
-
prompt =
|
| 54 |
-
|
|
|
|
|
|
|
| 55 |
# Generate the prompt with the text input
|
| 56 |
formatted_prompt = prompt.format(format_instructions=format_instructions, text=text_input)
|
| 57 |
# Send the prompt to the LLM and get the response
|
|
|
|
| 50 |
def Get_Inference(text_input):
|
| 51 |
llm = ChatGoogleGenerativeAI(model="gemini-pro")
|
| 52 |
# Assuming 'prompt', 'llm', and 'output_parser' are defined somewhere in your code
|
| 53 |
+
prompt = PromptTemplate(
|
| 54 |
+
template="Analyze the provided Customer and Agent Conversation in Mixed English and Roman Urdu Critically.\n{format_instructions}\nConversation:' {text} '",
|
| 55 |
+
input_variables=["format_instructions","text"])
|
| 56 |
+
# llm = YourLLMClass() # Replace with the actual instantiation of your language model class
|
| 57 |
# Generate the prompt with the text input
|
| 58 |
formatted_prompt = prompt.format(format_instructions=format_instructions, text=text_input)
|
| 59 |
# Send the prompt to the LLM and get the response
|