Spaces:
Runtime error
Runtime error
| # FRONTEND: Python library that makes it super easy to build simple user interfaces (UIs) | |
| import gradio as gr | |
| # BACKEND: tool from Hugging Face library to send messages to AI models and get answers back | |
| from huggingface_hub import InferenceClient | |
| # Helpful commentary from ChatGPT: | |
| # Gradio is the face and mouth — it lets people talk to the robot. | |
| # InferenceClient is the brain connector — it lets your robot talk to a super-smart brain (the Hugging Face model) and get answers. | |
| from sentence_transformers import SentenceTransformer | |
| # a Python library that allows you to turn sentences into numerical vector embeddings | |
| import torch | |
| # a machine learning library that that performs cosine similarity calculations | |
| import numpy as np | |
| # upload knowledge base - from sentiment analysis lab | |
| with open("essay_writing.txt", "r", encoding="utf-8") as f: | |
| essay_writing = f.read() | |
| # split the text into chunks | |
| cleaned_chunks = [chunk.strip() for chunk in essay_writing.split("\n\n") if chunk.strip()] | |
| # load an embedding model | |
| model = SentenceTransformer('all-MiniLM-L6-v2') | |
| chunk_embeddings = model.encode(cleaned_chunks, convert_to_tensor=True) | |
| def pull_relevant_info(query, top_k=3): | |
| query_embedding = model.encode(query, convert_to_tensor=True) | |
| query_embedding = query_embedding / query_embedding.norm() | |
| norm_chunk_embeddings = chunk_embeddings / chunk_embeddings.norm(dim=1, keepdim=True) | |
| similarities = torch.matmul(norm_chunk_embeddings, query_embedding) | |
| top_indices = torch.topk(similarities, k=top_k).indices.cpu().numpy() | |
| relevant_info = "\n\n".join([cleaned_chunks[i] for i in top_indices]) | |
| return relevant_info | |
| client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", provider="auto") | |
| def respond(message, history): | |
| info = pull_relevant_info(message, top_k=3) | |
| system_message = (f"You are a friendly chatbot. Use the following information to help answer the user's question:\n\n{info}\n\n") | |
| messages = [{"role": "system", "content": system_message}] | |
| if history: | |
| messages.extend(history) | |
| messages.append({"role": "user", "content": message}) | |
| response = "" | |
| for message_chunk in client.chat_completion( | |
| messages, | |
| max_tokens=100, | |
| stream=True | |
| ): | |
| token = message_chunk['choices'][0]['delta'].get('content', '') | |
| respond += token | |
| yield response | |
| title = "# Writing Tutor" | |
| topics = """ | |
| ### Meet your friendly writing tutor, an AI-driven partner to turn to when you need help writing an essay. | |
| Feel free to ask me about the topics below: | |
| - How to organize your essay | |
| - What a thesis is and how to write it | |
| - How to craft an introduction paragraph | |
| - What your body paragraphs should accomplish | |
| - Important things to include in your conclusion | |
| - Examples of topic sentences | |
| """ | |
| with gr.Blocks(theme='JohnSmith9982/small_and_pretty') as chatbot: | |
| # gr.Markdown(welcome_message) | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.Markdown(title) | |
| gr.Markdown(topics) | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.ChatInterface( | |
| fn=respond, | |
| type="messages" | |
| ) | |
| question = gr.Textbox(label="Your question", placeholder="What do you want to ask about?") | |
| answer = gr.Textbox(label="Writing Tutor Response", placeholder="Writing Tutor will respond here...", interactive=False, lines=10) | |
| submit_button = gr.Button("Submit") | |
| submit_button.click(fn=query_model, inputs=question, outputs=answer) | |
| # chatbot = gr.ChatInterface(respond, type="messages") | |
| chatbot.launch() | |