chatbot / app.py
SHAILJA1's picture
Update app.py
da09273 verified
import gradio as gr
# -----------------------------
# LlamaIndex (LLM Layer)
# -----------------------------
from llama_index.llms.huggingface import HuggingFaceLLM
llm = HuggingFaceLLM(
model_name="google/flan-t5-base",
tokenizer_name="google/flan-t5-base",
context_window=512,
max_new_tokens=256,
generate_kwargs={
"temperature": 0.3,
"do_sample": False
}
)
# -----------------------------
# LangChain (Prompt + Chain)
# -----------------------------
from langchain_core.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms.base import LLM
# -----------------------------
# Adapter: LlamaIndex → LangChain
# -----------------------------
class LlamaIndexLLMAdapter(LLM):
"""Adapter to use LlamaIndex LLM inside LangChain"""
@property
def _llm_type(self):
return "llamaindex-huggingface"
def _call(self, prompt: str, stop=None):
response = llm.complete(prompt)
return response.text
# Create LangChain-compatible LLM
langchain_llm = LlamaIndexLLMAdapter()
# Prompt Template
prompt = PromptTemplate(
input_variables=["question"],
template="""
You are a helpful AI assistant.
Answer clearly and concisely.
Question: {question}
Answer:
"""
)
# LangChain Chain
chain = LLMChain(
llm=langchain_llm,
prompt=prompt
)
# -----------------------------
# Chat Function
# -----------------------------
def chat(user_input):
if not user_input.strip():
return "Please enter a message."
return chain.run(user_input)
# -----------------------------
# Gradio UI
# -----------------------------
demo = gr.Interface(
fn=chat,
inputs=gr.Textbox(lines=2, placeholder="Ask something..."),
outputs="text",
title="LangChain + LlamaIndex Chatbot",
description="Integrated chatbot (No RAG, No Vector DB)"
)
demo.launch()