VcRlAgent commited on
Commit
b837f76
·
1 Parent(s): bfd34e2

Starter LLM Inference Call

Browse files
Files changed (2) hide show
  1. app.py +3 -2
  2. requirements.txt +1 -0
app.py CHANGED
@@ -11,7 +11,7 @@ import gradio as gr
11
  #from openai import OpenAI
12
  from huggingface_hub import InferenceClient
13
  from hybrid_rag import HybridJiraRAG
14
-
15
 
16
  # Sample Jira data structure
17
  jira_data = {
@@ -94,7 +94,8 @@ def ask_with_formatting(question: str):
94
  input_variables=["question", "result"]
95
  )
96
 
97
- format_chain = LLMChain(llm=llm, prompt=format_prompt)
 
98
  formatted = format_chain.invoke({
99
  "question": question,
100
  "result": raw_result
 
11
  #from openai import OpenAI
12
  from huggingface_hub import InferenceClient
13
  from hybrid_rag import HybridJiraRAG
14
+ from langchain_core.output_parsers import StrOutputParser
15
 
16
  # Sample Jira data structure
17
  jira_data = {
 
94
  input_variables=["question", "result"]
95
  )
96
 
97
+ #format_chain = LLMChain(llm=llm, prompt=format_prompt)
98
+ format_chain = prompt | llm | StrOutputParser()
99
  formatted = format_chain.invoke({
100
  "question": question,
101
  "result": raw_result
requirements.txt CHANGED
@@ -3,6 +3,7 @@ langchain
3
  langchain-community
4
  langchain-huggingface
5
  langchain-openai
 
6
  transformers
7
  torch
8
  sentence-transformers
 
3
  langchain-community
4
  langchain-huggingface
5
  langchain-openai
6
+ langchain-core
7
  transformers
8
  torch
9
  sentence-transformers