Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, File, UploadFile, Form | |
| from fastapi import APIRouter, Depends, HTTPException, status | |
| import pandas as pd | |
| from google.cloud import storage | |
| import io | |
| import os | |
| import tempfile | |
| from pydantic import BaseModel | |
| app = FastAPI() | |
| gcs_bucket_name = "ow-stu-us-ce1-ai-platform" | |
| # process of getting credentials | |
| def get_credentials(): | |
| creds_json_str = os.getenv("BOB") # get json credentials stored as a string | |
| if creds_json_str is None: | |
| raise ValueError("GOOGLE_APPLICATION_CREDENTIALS_JSON not found in environment") | |
| # create a temporary file | |
| with tempfile.NamedTemporaryFile(mode="w+", delete=False, suffix=".json") as temp: | |
| temp.write(creds_json_str) # write in json format | |
| temp_filename = temp.name | |
| return temp_filename | |
| # pass | |
| os.environ["GOOGLE_APPLICATION_CREDENTIALS"]= get_credentials() | |
| # Ensure the GCS bucket exists | |
| gcs_client = storage.Client() | |
| gcs_bucket = gcs_client.bucket(gcs_bucket_name) | |
| # File path in GCS bucket | |
| gcs_file_path = "deepak_6593/db.csv" | |
| def append_to_gcs_csv(new_data, gcs_file_path): | |
| # Standardize column names for new data | |
| new_data.columns = ['category', 'score'] | |
| # Check if the file exists in GCS bucket | |
| blob = gcs_bucket.blob(gcs_file_path) | |
| if exists := blob.exists(): | |
| existing_data = pd.read_csv(io.BytesIO(blob.download_as_bytes())) | |
| # Ensure existing data has the right columns | |
| existing_data = existing_data[['category', 'score']] | |
| # Append new data to existing data | |
| combined_data = pd.concat([existing_data, new_data], ignore_index=True).dropna(how='all') | |
| else: | |
| combined_data = new_data | |
| # Convert combined DataFrame to CSV and upload it | |
| csv_data = combined_data.to_csv(index=False).encode('utf-8') | |
| blob.upload_from_string(csv_data, content_type='text/csv') | |
| def read_from_gcs_csv(gcs_file_path): | |
| blob = gcs_bucket.blob(gcs_file_path) | |
| return pd.read_csv(io.BytesIO(blob.download_as_text())) | |
| async def upload_file(file: UploadFile = File(...)): | |
| df = pd.read_csv(io.StringIO((await file.read()).decode('utf-8'))) | |
| append_to_gcs_csv(df, gcs_file_path) | |
| return {"message": "File uploaded successfully"} | |
| async def upload_data(category: str = Form(...), score: float = Form(...)): | |
| try: | |
| df = pd.DataFrame([[category, score]], columns=['category', 'score']) | |
| append_to_gcs_csv(df, gcs_file_path) | |
| return {"message": "Data uploaded successfully"} | |
| except Exception as e: | |
| raise HTTPException(status_code=422, detail=str(e)) | |
| async def upload_data_raw(payload: dict): | |
| # sourcery skip: raise-from-previous-error | |
| try: | |
| category = payload['category'] | |
| score = payload['score'] | |
| except KeyError: | |
| raise HTTPException(status_code=400, detail="Invalid payload format") | |
| df = pd.DataFrame([[category, score]], columns=['category', 'score']) | |
| append_to_gcs_csv(df, gcs_file_path) | |
| return {"message": "Data uploaded successfully"} | |
| async def clear_data(): | |
| # Create an empty DataFrame with the same columns | |
| empty_df = pd.DataFrame(columns=['category', 'score']) | |
| # Convert the empty DataFrame to CSV | |
| csv_data = empty_df.to_csv(index=False).encode('utf-8') | |
| # Overwrite the existing file in GCS with the empty CSV data | |
| gcs_bucket.blob(gcs_file_path).upload_from_string(csv_data, content_type='text/csv') | |
| return {"message": "Data cleared successfully"} |