| import gradio as gr | |
| import torch | |
| from sentence_transformers import SentenceTransformer, util | |
| model = SentenceTransformer("all-MiniLM-L6-v2") | |
| supply_demand_text = """ | |
| Supply refers to the quantity of a good or service that a producer is willing and able to offer for sale at various prices. | |
| Demand refers to how much of a product consumers are willing and able to purchase at different prices. | |
| When supply exceeds demand, there is a surplus. | |
| When demand exceeds supply, there is a shortage. | |
| Equilibrium is the point where supply equals demand. | |
| Price acts as a signal for both producers and consumers. | |
| Market dynamics are influenced by shifts in supply and demand. | |
| """ | |
| cleaned_text = supply_demand_text.strip() | |
| chunks = cleaned_text.split("\n") | |
| cleaned_chunks = [] | |
| for chunk in chunks: | |
| stripped_chunk = chunk.strip() | |
| if stripped_chunk: | |
| cleaned_chunks.append(stripped_chunk) | |
| chunk_embeddings = model.encode(cleaned_chunks, convert_to_tensor=True) | |
| def get_top_chunks(query): | |
| query_embedding = model.encode(query, convert_to_tensor=True) | |
| query_embedding = query_embedding / query_embedding.norm() | |
| normalized_chunks = chunk_embeddings / chunk_embeddings.norm(dim=1, keepdim=True) | |
| similarities = torch.matmul(normalized_chunks, query_embedding) | |
| top_indices = torch.topk(similarities, k=3).indices | |
| top_chunks = [cleaned_chunks[i] for i in top_indices] | |
| return top_chunks | |
| def chatbot_response(message, history): | |
| top_chunks = get_top_chunks(message) | |
| numbered_response = "\n".join([f"{i+1}. {chunk}" for i, chunk in enumerate(top_chunks)]) | |
| history = history or [] | |
| history.append((message, numbered_response)) | |
| return numbered_response, history | |
| chatbot = gr.ChatInterface(fn=chatbot_response, title="RAG Chatbot") | |
| chatbot.launch() |