File size: 1,877 Bytes
d3774ee 2adc5d3 da09273 2adc5d3 2d9f809 da09273 d3774ee da09273 d3774ee da09273 d3774ee da09273 d3774ee da09273 d3774ee da09273 2d9f809 da09273 d3774ee 2d9f809 d3774ee da09273 d3774ee da09273 2d9f809 da09273 957f886 2d9f809 da09273 957f886 da09273 957f886 da09273 957f886 da09273 2d9f809 d3774ee 957f886 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import gradio as gr
# -----------------------------
# LlamaIndex (LLM Layer)
# -----------------------------
from llama_index.llms.huggingface import HuggingFaceLLM
llm = HuggingFaceLLM(
model_name="google/flan-t5-base",
tokenizer_name="google/flan-t5-base",
context_window=512,
max_new_tokens=256,
generate_kwargs={
"temperature": 0.3,
"do_sample": False
}
)
# -----------------------------
# LangChain (Prompt + Chain)
# -----------------------------
from langchain_core.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms.base import LLM
# -----------------------------
# Adapter: LlamaIndex → LangChain
# -----------------------------
class LlamaIndexLLMAdapter(LLM):
"""Adapter to use LlamaIndex LLM inside LangChain"""
@property
def _llm_type(self):
return "llamaindex-huggingface"
def _call(self, prompt: str, stop=None):
response = llm.complete(prompt)
return response.text
# Create LangChain-compatible LLM
langchain_llm = LlamaIndexLLMAdapter()
# Prompt Template
prompt = PromptTemplate(
input_variables=["question"],
template="""
You are a helpful AI assistant.
Answer clearly and concisely.
Question: {question}
Answer:
"""
)
# LangChain Chain
chain = LLMChain(
llm=langchain_llm,
prompt=prompt
)
# -----------------------------
# Chat Function
# -----------------------------
def chat(user_input):
if not user_input.strip():
return "Please enter a message."
return chain.run(user_input)
# -----------------------------
# Gradio UI
# -----------------------------
demo = gr.Interface(
fn=chat,
inputs=gr.Textbox(lines=2, placeholder="Ask something..."),
outputs="text",
title="LangChain + LlamaIndex Chatbot",
description="Integrated chatbot (No RAG, No Vector DB)"
)
demo.launch()
|