Spaces:
Sleeping
Sleeping
fix
Browse files
app.py
CHANGED
|
@@ -1,35 +1,112 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
if key in message_lower:
|
| 15 |
-
return response
|
| 16 |
|
| 17 |
-
|
|
|
|
|
|
|
| 18 |
|
| 19 |
-
#
|
| 20 |
-
with
|
| 21 |
-
|
|
|
|
| 22 |
|
| 23 |
-
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
|
| 26 |
-
|
| 27 |
-
response = chatbot_response(message, history)
|
| 28 |
-
history.append((message, response))
|
| 29 |
-
return "", history
|
| 30 |
|
| 31 |
-
|
|
|
|
| 32 |
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
|
| 3 |
+
theme = gr.themes.Ocean(
|
| 4 |
+
secondary_hue="lime",
|
| 5 |
+
neutral_hue="teal",
|
| 6 |
+
text_size="lg",
|
| 7 |
+
spacing_size="lg",
|
| 8 |
+
).set(
|
| 9 |
+
body_background_fill='*primary_400',
|
| 10 |
+
body_background_fill_dark='*primary_950',
|
| 11 |
+
body_text_color='*primary_50',
|
| 12 |
+
body_text_color_dark='*primary_50',
|
| 13 |
+
background_fill_primary_dark='*secondary_500',
|
| 14 |
+
background_fill_secondary='*primary_700',
|
| 15 |
+
background_fill_secondary_dark='*primary_900',
|
| 16 |
+
button_primary_background_fill='linear-gradient(120deg, *secondary_800 0%, *primary_300 60%, *primary_800 100%)',
|
| 17 |
+
button_primary_background_fill_dark='linear-gradient(120deg, *secondary_400 0%, *primary_400 60%, *primary_600 100%)',
|
| 18 |
+
button_primary_background_fill_hover='linear-gradient(120deg, *secondary_400 0%, *primary_300 60%, *neutral_300 100%)'
|
| 19 |
+
)
|
| 20 |
|
| 21 |
+
import random
|
| 22 |
+
from huggingface_hub import InferenceClient
|
|
|
|
|
|
|
| 23 |
|
| 24 |
+
# SEMANTIC SEARCH STEP 1
|
| 25 |
+
from sentence_transformers import SentenceTransformer
|
| 26 |
+
import torch
|
| 27 |
|
| 28 |
+
# SEMANTIC SEARCH STEP 2 --> EDIT WITH YOUR OWN KNOWLEDGE BASE WHEN READY
|
| 29 |
+
with open("Skin_cancer_harvard.txt", "r", encoding="utf-8") as file:
|
| 30 |
+
Skin_cancer_harvard_text = file.read()
|
| 31 |
+
print(Skin_cancer_harvard_text)
|
| 32 |
|
| 33 |
+
# SEMANTIC SEARCH STEP 3
|
| 34 |
+
def preprocess_text(text):
|
| 35 |
+
cleaned_text = text.strip()
|
| 36 |
+
chunks = cleaned_text.split("\n")
|
| 37 |
+
cleaned_chunks = []
|
| 38 |
+
for chunk in chunks:
|
| 39 |
+
stripped_chunk = chunk.strip()
|
| 40 |
+
cleaned_chunks.append(stripped_chunk)
|
| 41 |
+
print(cleaned_chunks)
|
| 42 |
+
print(len(cleaned_chunks))
|
| 43 |
+
return cleaned_chunks
|
| 44 |
|
| 45 |
+
cleaned_chunks = preprocess_text(Skin_cancer_harvard_text)
|
|
|
|
|
|
|
|
|
|
| 46 |
|
| 47 |
+
# SEMANTIC SEARCH STEP 4
|
| 48 |
+
model = SentenceTransformer('all-MiniLM-L6-v2')
|
| 49 |
|
| 50 |
+
def create_embeddings(text_chunks):
|
| 51 |
+
chunk_embeddings = model.encode(text_chunks, convert_to_tensor=True)
|
| 52 |
+
print(chunk_embeddings)
|
| 53 |
+
print(chunk_embeddings.shape)
|
| 54 |
+
return chunk_embeddings
|
| 55 |
+
|
| 56 |
+
chunk_embeddings = create_embeddings(cleaned_chunks)
|
| 57 |
+
|
| 58 |
+
# SEMANTIC SEARCH STEP 5
|
| 59 |
+
def get_top_chunks(query, chunk_embeddings, text_chunks):
|
| 60 |
+
query_embedding = model.encode(query, convert_to_tensor=True)
|
| 61 |
+
query_embedding_normalized = query_embedding / query_embedding.norm()
|
| 62 |
+
chunk_embeddings_normalized = chunk_embeddings / chunk_embeddings.norm(dim=1, keepdim=True)
|
| 63 |
+
similarities = torch.matmul(chunk_embeddings_normalized, query_embedding_normalized)
|
| 64 |
+
print(similarities)
|
| 65 |
+
top_indices = torch.topk(similarities, k=3).indices
|
| 66 |
+
print(top_indices)
|
| 67 |
+
top_chunks = []
|
| 68 |
+
for i in top_indices:
|
| 69 |
+
relevant_info = cleaned_chunks[i]
|
| 70 |
+
top_chunks.append(relevant_info)
|
| 71 |
+
return top_chunks
|
| 72 |
+
|
| 73 |
+
top_results = get_top_chunks('What causes skin cancer?', chunk_embeddings, cleaned_chunks)
|
| 74 |
+
print(top_results)
|
| 75 |
+
|
| 76 |
+
# LLM Client
|
| 77 |
+
client = InferenceClient("microsoft/phi-4")
|
| 78 |
+
|
| 79 |
+
def respond(message, history):
|
| 80 |
+
info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 81 |
+
messages = [{'role': 'system','content':f'You are a friendly chatbot using {info} to answer questions. You are always willing to help and want the best for the user. You need to emphasize that you are not a medical professional at the end of the message, but you are here to help to the best of your ability. Be confident and comforting to the users when helping them. In your response add suggestions for a couple follow up questions to further the conversation with the chatbot.'}]
|
| 82 |
+
if history:
|
| 83 |
+
messages.extend(history)
|
| 84 |
+
messages.append({'role': 'user','content': message})
|
| 85 |
+
response = client.chat_completion(messages, max_tokens=500, top_p=0.8)
|
| 86 |
+
return response['choices'][0]['message']['content'].strip()
|
| 87 |
+
|
| 88 |
+
with gr.Blocks(theme=theme) as chatbot:
|
| 89 |
+
with gr.Row(scale=1):
|
| 90 |
+
gr.Image("Capstone_Banner.png")
|
| 91 |
+
with gr.Row():
|
| 92 |
+
with gr.Column(scale=1):
|
| 93 |
+
gr.Image("Aloe_the_Turtle.png")
|
| 94 |
+
with gr.Row():
|
| 95 |
+
gr.Markdown("Click the button below to access the teachable machine, an AI Visual Scanner to detect Skin Cancer. The main purpose of this teachable machine is to check if you have a cancerous or non-cancerous mole. Place your mole near your camera and the analysis will be represented below. Note that these results are not 100% accurate, so be sure to consult a medical professional if you have any concerns.")
|
| 96 |
+
with gr.Row(scale=1):
|
| 97 |
+
gr.Button(value="AI Visual Testing Moles for Skin Cancer!", link="https://teachablemachine.withgoogle.com/models/onfoEa0p-/")
|
| 98 |
+
with gr.Column(scale=3):
|
| 99 |
+
gr.ChatInterface(
|
| 100 |
+
respond,
|
| 101 |
+
title="Your Personal Skin Chatbot!",
|
| 102 |
+
description="Welcome, my name is Aloe the Turtle and I am here to help you address any dermatology-related questions you may have on topics such as Skin Cancer, Acne, Eczema, and much more. Just remember, while I have comprehensive knowledge on skin concerns, I am not a medical professional!",
|
| 103 |
+
type='messages',
|
| 104 |
+
theme=theme,
|
| 105 |
+
examples=[
|
| 106 |
+
"What ingredients should I use to clear my Acne?",
|
| 107 |
+
"What can I do to proactively prevent Skin Cancer?",
|
| 108 |
+
"How do I tell the difference between eczema and psoriasis?"
|
| 109 |
+
]
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
chatbot.launch()
|