DataCollection / app.py
Abu1998's picture
Update app.py
d91441c verified
import gradio as gr
import pandas as pd
from datetime import datetime
import os
from huggingface_hub import Repository, HfFolder
# Store your Hugging Face token securely (avoid hardcoding it in your app.py)
HF_TOKEN = os.getenv("HF_TOKEN") # Retrieve token from environment variables
# Save Hugging Face token to the Hugging Face folder for authentication
HfFolder.save_token(HF_TOKEN)
# Define constants for the dataset path and local storage
STORAGE_PATH = "appointments.csv"
REPO_ID = "/huggingface.co/datasets/Abu1998/DataCollection" # This is your dataset repository on Hugging Face
# Function to upload CSV to Hugging Face
def upload_to_huggingface():
try:
# Clone the dataset repository from Hugging Face
repo = Repository(local_dir="temp_repo", clone_from=REPO_ID, use_auth_token=HF_TOKEN)
repo.git_pull() # Ensure the latest version
# Make sure the local repository folder exists
os.makedirs("temp_repo", exist_ok=True)
# Load the data and save it in the temp repository directory
df = pd.read_csv(STORAGE_PATH)
df.to_csv(os.path.join("temp_repo", "appointments.csv"), index=False)
# Commit and push the updated CSV file to Hugging Face
repo.git_add("appointments.csv")
repo.git_commit("Updated appointments data")
repo.git_push()
return "Data uploaded to Hugging Face successfully!"
except Exception as e:
return f"Error uploading data: {e}"
# Function to save appointment data locally and upload to Hugging Face
def save_and_upload_to_csv(appointment_timing, services, products, contact, customer_name, rating, location, key_points, price):
# Check if the CSV exists, if not, create a new DataFrame
if not os.path.exists(STORAGE_PATH):
df = pd.DataFrame(columns=[
"Date", "Appointment", "Appointment Timing", "Services", "Products",
"Contact", "Customer Name", "Rating", "Location", "Key-points", "Price"
])
df.to_csv(STORAGE_PATH, index=False)
else:
# Load the existing CSV data
df = pd.read_csv(STORAGE_PATH)
# Auto-detect date and appointment ID
date = datetime.now().strftime("%Y-%m-%d")
appointment = len(df) + 1
# Add a new row of appointment data
new_row = pd.DataFrame([{
"Date": date,
"Appointment": appointment,
"Appointment Timing": appointment_timing,
"Services": services,
"Products": products,
"Contact": contact,
"Customer Name": customer_name,
"Rating": rating,
"Location": location,
"Key-points": key_points,
"Price": price
}])
# Append new data to the existing DataFrame
df = pd.concat([df, new_row], ignore_index=True)
df.to_csv(STORAGE_PATH, index=False) # Save updated data locally
# Upload the updated data to Hugging Face
return upload_to_huggingface()
# Gradio Interface
with gr.Blocks() as app:
gr.Markdown("# Appointment Data Storage Application")
appointment_timing = gr.Textbox(label="Appointment Timing")
services = gr.Dropdown(
label="Services",
choices=["Full arm Rica", "Full leg", "Underarms", "Eyebrow", "Upper lips"],
multiselect=True
)
products = gr.Textbox(label="Products")
contact = gr.Textbox(label="Contact")
customer_name = gr.Textbox(label="Customer Name")
rating = gr.Radio(label="Rating", choices=["Very good", "Good", "Normal", "Bad", "Too bad"])
location = gr.Textbox(label="Location")
key_points = gr.Textbox(label="Key-points")
price = gr.Dropdown(label="Price", choices=["999", "1499", "2499", "3499", "4499"])
submit_button = gr.Button("Submit")
output = gr.Textbox(label="Output")
submit_button.click(
save_and_upload_to_csv,
inputs=[
appointment_timing, services, products, contact, customer_name, rating, location, key_points, price
],
outputs=output
)
# Launch the app
app.launch(share=True)