Spaces:
Sleeping
Sleeping
| import requests | |
| import pandas as pd | |
| import os | |
| import gradio as gr | |
| from transformers import pipeline | |
| # Load environment variables | |
| NOTION_TOKEN = os.getenv('NOTION_TOKEN') | |
| DATABASE_ID = os.getenv('DATABASE_ID') | |
| headers = { | |
| "Authorization": f"Bearer {NOTION_TOKEN}", | |
| "Content-Type": "application/json", | |
| "Notion-Version": "2022-06-28" | |
| } | |
| def get_notion_entries(): | |
| url = f"https://api.notion.com/v1/databases/{DATABASE_ID}/query" | |
| response = requests.post(url, headers=headers) | |
| response.raise_for_status() # Raise an exception for HTTP errors | |
| return response.json() | |
| def get_full_text(property): | |
| if property and 'rich_text' in property and property['rich_text']: | |
| return ''.join([text_part['text']['content'] for text_part in property['rich_text']]) | |
| elif property and 'title' in property and property['title']: | |
| return ''.join([text_part['text']['content'] for text_part in property['title']]) | |
| return "" | |
| def notion_to_dataframe(notion_data): | |
| # Prepare lists to create DataFrame | |
| condition_names = [] | |
| condition_full_names = [] | |
| villains = [] | |
| heroes = [] | |
| hero_images = [] | |
| product_urls = [] | |
| # Iterate through Notion entries and collect data | |
| for entry in notion_data['results']: | |
| condition_names.append(get_full_text(entry['properties']['condition_name'])) | |
| condition_full_names.append(get_full_text(entry['properties']['condition_full_name'])) | |
| villains.append(get_full_text(entry['properties']['villain'])) | |
| heroes.append(get_full_text(entry['properties']['hero'])) | |
| hero_images.append(entry['properties']['hero_image']['url'] if entry['properties']['hero_image'] else None) | |
| product_urls.append(entry['properties']['product_url']['url'] if entry['properties']['product_url'] else None) | |
| # Create DataFrame | |
| df = pd.DataFrame({ | |
| 'Condition Name': condition_names, | |
| 'Condition Full Name': condition_full_names, | |
| 'Villain': villains, | |
| 'Hero': heroes, | |
| 'Hero Image': hero_images, | |
| 'Product URL': product_urls | |
| }) | |
| return df | |
| # Fetch data | |
| notion_data = get_notion_entries() | |
| # Convert to DataFrame | |
| df = notion_to_dataframe(notion_data) | |
| # Initialize the image classification pipeline | |
| classifier = pipeline("image-classification", model="ahishamm/vit-base-HAM-10000-patch-32") | |
| def classify_image(image): | |
| results = classifier(image) | |
| condition_name = results[0]['label'] | |
| condition_data = df[df['Condition Name'] == condition_name].iloc[0] | |
| classification = results[0] | |
| confidence = round(classification['score'] * 100, 2) | |
| condition_full_name = condition_data['Condition Full Name'] | |
| villain = condition_data['Villain'] | |
| hero = condition_data['Hero'] | |
| hero_image = condition_data['Hero Image'] | |
| product_url = condition_data['Product URL'] | |
| enriched_output = f""" | |
| **{condition_full_name} ({confidence}% confident)** | |
| {villain} | |
| {hero} Find out if he is also your hero! | |
|  | |
| [Learn More]({product_url}) | |
| """ | |
| return enriched_output | |
| # Create the Gradio interface | |
| iface = gr.Interface( | |
| fn=classify_image, | |
| inputs=gr.Image(type="pil"), | |
| outputs=gr.Markdown(), | |
| title="Skin Condition Classifier", | |
| description="Upload an image to classify the skin condition and get enriched data from Notion." | |
| ) | |
| iface.launch() |