Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, HTTPException | |
| from pydantic import BaseModel | |
| from huggingface_hub import HfApi, hf_hub_download | |
| import pandas as pd | |
| import os | |
| import time | |
| app = FastAPI() | |
| # Konfiguracja Datasetu | |
| HF_TOKEN = os.environ.get("HF_TOKEN") # Ustaw to w Secrets w ustawieniach Space! | |
| # REPO_ID = "OjciecTadeusz/test" | |
| REPO_ID = os.environ.get("REPO_ID") | |
| DATASET_REPO = "OjciecTadeusz/test" | |
| DATASET_FILENAME = "data.csv" | |
| # Model danych zgodny z definicją z AI Studio | |
| class ConversationItem(BaseModel): | |
| user_message: str | |
| category: str | |
| priority: str = "medium" | |
| def home(): | |
| return {"status": "Serwer działa. Wyślij POST na /save"} | |
| async def save_to_dataset(item: ConversationItem): | |
| """ | |
| Odbiera dane z Gemini i zapisuje do HF Dataset (CSV). | |
| """ | |
| print(f"Otrzymano dane: {item}") | |
| if not HF_TOKEN: | |
| raise HTTPException(status_code=500, detail="Brak HF_TOKEN w zmiennych środowiskowych") | |
| api = HfApi(token=HF_TOKEN) | |
| # 1. Próba pobrania istniejącego pliku CSV z Datasetu | |
| try: | |
| file_path = hf_hub_download( | |
| repo_id=DATASET_REPO, | |
| filename=DATASET_FILENAME, | |
| repo_type="dataset", | |
| token=HF_TOKEN | |
| ) | |
| df = pd.read_csv(file_path) | |
| except Exception: | |
| # Jeśli plik nie istnieje, tworzymy nowy pusty DataFrame | |
| print("Plik nie istnieje lub błąd pobierania. Tworzę nowy.") | |
| df = pd.DataFrame(columns=["timestamp", "category", "priority", "message"]) | |
| # 2. Dodanie nowego wiersza | |
| new_row = { | |
| "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"), | |
| "category": item.category, | |
| "priority": item.priority, | |
| "message": item.user_message | |
| } | |
| # Używamy pd.concat zamiast append (pandas deprecation) | |
| new_df = pd.DataFrame([new_row]) | |
| df = pd.concat([df, new_df], ignore_index=True) | |
| # 3. Zapisanie do pliku lokalnie | |
| local_filename = "/tmp/data.csv" | |
| df.to_csv(local_filename, index=False) | |
| # 4. Upload zaktualizowanego pliku do Datasetu | |
| try: | |
| api.upload_file( | |
| path_or_fileobj=local_filename, | |
| path_in_repo=DATASET_FILENAME, | |
| repo_id=DATASET_REPO, | |
| repo_type="dataset", | |
| commit_message=f"Dodano wpis przez API: {item.category}" | |
| ) | |
| return {"status": "success", "message": "Zapisano w Datasecie", "data": new_row} | |
| except Exception as e: | |
| print(f"Błąd uploadu: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) | |