deep / app.py
kspchary's picture
Upload 5 files
fbfef4d verified
import os
import json
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import StreamingResponse, HTMLResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List, Optional
from huggingface_hub import AsyncInferenceClient
from dotenv import load_dotenv
load_dotenv()
app = FastAPI(title="Vibe Coding Backend - 100% Free Edition")
# Enable CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Initialize Hugging Face Inference Client (100% Free)
# User needs a FREE HF Token from huggingface.co/settings/tokens
HF_TOKEN = os.getenv("HF_TOKEN")
# Model: DeepSeek-Coder-V2-Lite-Instruct (Very powerful and free on HF API)
MODEL_ID = "deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct"
client = AsyncInferenceClient(model=MODEL_ID, token=HF_TOKEN)
class Message(BaseModel):
role: str
content: str
class ChatRequest(BaseModel):
messages: List[Message]
stream: bool = True
temperature: float = 0.6
max_tokens: int = 4096
VIBE_SYSTEM_PROMPT = """You are the ultimate 'Vibe Coder'. Your purpose is to build entire, high-end web applications from top to bottom in a single response.
When a user gives you a 'vibe' or a concept:
1. Write 100% complete, functional code (HTML, CSS, JS).
2. Use modern, premium aesthetics: Glassmorphism, deep gradients, silky animations (Framer Motion style), and high-end typography (Inter/Outfit).
3. Do not use placeholders. If you need images, use Unsplash URLs.
4. Structure the code so it's ready to be copied into a single file or a clean project structure.
5. Be bold. Be creative. Make it look like a $100k startup design."""
@app.get("/", response_class=HTMLResponse)
async def root():
with open("index.html", "r") as f:
return f.read()
@app.post("/vibe")
async def vibe_coding(request: ChatRequest):
"""
100% Free Vibe Coding using Hugging Face Serverless API.
"""
# Inject Vibe System Prompt
messages = [{"role": "system", "content": VIBE_SYSTEM_PROMPT}]
for msg in request.messages:
messages.append({"role": msg.role, "content": msg.content})
async def stream_generator():
try:
# Using HF's free streaming capability
stream = await client.chat_completion(
messages=messages,
max_tokens=request.max_tokens,
stream=True,
temperature=request.temperature,
)
async for chunk in stream:
content = chunk.choices[0].delta.content
if content:
yield f"data: {json.dumps({'content': content})}\n\n"
except Exception as e:
yield f"data: {json.dumps({'error': str(e)})}\n\n"
return StreamingResponse(stream_generator(), media_type="text/event-stream")
if __name__ == "__main__":
import uvicorn
port = int(os.getenv("PORT", 7860))
uvicorn.run(app, host="0.0.0.0", port=port)