Valtry commited on
Commit
7c9d195
·
verified ·
1 Parent(s): 67c9c6c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -32
app.py CHANGED
@@ -7,7 +7,7 @@ from fastapi.responses import HTMLResponse, JSONResponse
7
  from fastapi.staticfiles import StaticFiles
8
  from pydantic import BaseModel
9
 
10
- # Set cache directories to writable paths
11
  os.environ["HF_HOME"] = "/app/cache"
12
  os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
13
  os.makedirs("/app/cache", exist_ok=True)
@@ -20,12 +20,12 @@ from diffusers import StableDiffusionPipeline
20
  MODEL_ID = "runwayml/stable-diffusion-v1-5"
21
  DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
22
  STATIC_FOLDER = "/app/static"
23
- SPACE_URL = "https://valtry-my-image.hf.space" # <-- CHANGE THIS to your Space's URL
24
  # ------------------------
25
 
26
  app = FastAPI(title="Valtry Text→Image API")
27
 
28
- # Serve static folder
29
  app.mount("/static", StaticFiles(directory=STATIC_FOLDER), name="static")
30
 
31
  print(f"Loading model {MODEL_ID} on {DEVICE}...")
@@ -45,7 +45,7 @@ class GenerateReq(BaseModel):
45
 
46
  @app.post("/generate")
47
  async def generate(req: GenerateReq):
48
- if not req.prompt.strip():
49
  return JSONResponse({"error": "prompt is required"}, status_code=400)
50
 
51
  seed = req.seed if req.seed is not None else int(time.time() * 1000) % 2**32
@@ -67,68 +67,76 @@ async def generate(req: GenerateReq):
67
  file_path = os.path.join(STATIC_FOLDER, filename)
68
  image.save(file_path)
69
 
70
- # Full public URL so it loads in browser
71
  public_url = f"{SPACE_URL}/static/{filename}"
72
  return {"url": public_url, "filename": filename}
73
 
 
74
  @app.get("/", response_class=HTMLResponse)
75
  async def home():
76
- return f"""
77
  <!doctype html>
78
  <html>
79
  <head>
80
  <meta charset="utf-8"/>
81
- <title>Valtry — Text→Image</title>
82
  <style>
83
- body{{font-family:Arial,sans-serif;margin:32px;background:#f7f7f7}}
84
- input, button, textarea{{font-size:16px;padding:10px;width:100%;margin-top:8px}}
85
- img{{max-width:100%;border:1px solid #ccc;padding:6px;background:#fff;margin-top:20px}}
86
  </style>
87
  </head>
88
  <body>
89
  <h2>Valtry — Text → Image</h2>
90
  <textarea id="prompt" rows="3" placeholder="A fantasy castle on a cliff at sunset"></textarea><br>
91
- <input id="steps" type="number" value="25" min="1" max="150"/>
92
- <input id="scale" type="number" value="7.5" step="0.1" min="1" max="20"/>
93
- <input id="seed" type="number" placeholder="optional seed"/>
 
 
 
94
  <button onclick="generate()">Generate Image</button>
95
  <div id="status"></div>
96
  <div id="result"></div>
97
 
98
  <script>
99
- async function generate(){{
100
  const prompt = document.getElementById('prompt').value;
101
- const steps = parseInt(document.getElementById('steps').value);
102
- const scale = parseFloat(document.getElementById('scale').value);
103
  const seedVal = document.getElementById('seed').value;
104
 
105
- document.getElementById('status').textContent = "⏳ Generating...";
106
  document.getElementById('result').innerHTML = "";
107
 
108
- const body = {{ prompt, num_inference_steps: steps, guidance_scale: scale }};
109
  if (seedVal) body.seed = parseInt(seedVal);
110
 
111
- try {{
112
- const res = await fetch('/generate', {{
113
  method: 'POST',
114
- headers: {{ 'Content-Type': 'application/json' }},
115
  body: JSON.stringify(body)
116
- }});
 
 
 
 
 
 
 
117
  const data = await res.json();
118
- if (res.ok) {{
119
- document.getElementById('status').textContent = " Done";
120
- document.getElementById('result').innerHTML = `<img src="${data.url}" alt="Generated image"/>`;
121
- }} else {{
122
- document.getElementById('status').textContent = "❌ Error: " + data.error;
123
- }}
124
- }} catch (err) {{
125
- document.getElementById('status').textContent = "❌ " + err.message;
126
- }}
127
- }}
128
  </script>
129
  </body>
130
  </html>
131
  """
 
132
 
133
  @app.get("/health")
134
  async def health():
 
7
  from fastapi.staticfiles import StaticFiles
8
  from pydantic import BaseModel
9
 
10
+ # Make caches and static directories writable
11
  os.environ["HF_HOME"] = "/app/cache"
12
  os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
13
  os.makedirs("/app/cache", exist_ok=True)
 
20
  MODEL_ID = "runwayml/stable-diffusion-v1-5"
21
  DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
22
  STATIC_FOLDER = "/app/static"
23
+ SPACE_URL = "https://valtry-my-image.hf.space" # <- set your space URL here
24
  # ------------------------
25
 
26
  app = FastAPI(title="Valtry Text→Image API")
27
 
28
+ # Serve static files publicly at /static/...
29
  app.mount("/static", StaticFiles(directory=STATIC_FOLDER), name="static")
30
 
31
  print(f"Loading model {MODEL_ID} on {DEVICE}...")
 
45
 
46
  @app.post("/generate")
47
  async def generate(req: GenerateReq):
48
+ if not req.prompt or not req.prompt.strip():
49
  return JSONResponse({"error": "prompt is required"}, status_code=400)
50
 
51
  seed = req.seed if req.seed is not None else int(time.time() * 1000) % 2**32
 
67
  file_path = os.path.join(STATIC_FOLDER, filename)
68
  image.save(file_path)
69
 
70
+ # Return an absolute public URL (so external pages can load it)
71
  public_url = f"{SPACE_URL}/static/{filename}"
72
  return {"url": public_url, "filename": filename}
73
 
74
+ # Home page: NOTE -> regular string (NOT an f-string) to avoid Python interpolating JS {..}
75
  @app.get("/", response_class=HTMLResponse)
76
  async def home():
77
+ html = """
78
  <!doctype html>
79
  <html>
80
  <head>
81
  <meta charset="utf-8"/>
82
+ <title>Valtry — Text Image</title>
83
  <style>
84
+ body{font-family:Arial,sans-serif;margin:32px;background:#f7f7f7}
85
+ textarea,input,button{font-size:16px;padding:10px;width:100%;margin-top:8px;box-sizing:border-box}
86
+ img{max-width:100%;border:1px solid #ccc;padding:6px;background:#fff;margin-top:20px}
87
  </style>
88
  </head>
89
  <body>
90
  <h2>Valtry — Text → Image</h2>
91
  <textarea id="prompt" rows="3" placeholder="A fantasy castle on a cliff at sunset"></textarea><br>
92
+ <label>Steps (num_inference_steps)</label>
93
+ <input id="steps" type="number" value="25" min="1" max="150"/><br>
94
+ <label>Guidance scale</label>
95
+ <input id="scale" type="number" value="7.5" step="0.1" min="1" max="20"/><br>
96
+ <label>Seed (optional)</label>
97
+ <input id="seed" type="number" placeholder="optional seed"/><br>
98
  <button onclick="generate()">Generate Image</button>
99
  <div id="status"></div>
100
  <div id="result"></div>
101
 
102
  <script>
103
+ async function generate(){
104
  const prompt = document.getElementById('prompt').value;
105
+ const steps = parseInt(document.getElementById('steps').value || 25);
106
+ const scale = parseFloat(document.getElementById('scale').value || 7.5);
107
  const seedVal = document.getElementById('seed').value;
108
 
109
+ document.getElementById('status').textContent = "⏳ Generating — this may take a bit...";
110
  document.getElementById('result').innerHTML = "";
111
 
112
+ const body = { prompt: prompt, num_inference_steps: steps, guidance_scale: scale };
113
  if (seedVal) body.seed = parseInt(seedVal);
114
 
115
+ try {
116
+ const res = await fetch('/generate', {
117
  method: 'POST',
118
+ headers: { 'Content-Type': 'application/json' },
119
  body: JSON.stringify(body)
120
+ });
121
+
122
+ if (!res.ok) {
123
+ const txt = await res.text();
124
+ document.getElementById('status').textContent = '❌ Error ' + res.status + ': ' + txt;
125
+ return;
126
+ }
127
+
128
  const data = await res.json();
129
+ document.getElementById('status').textContent = '✅ Done — image below';
130
+ document.getElementById('result').innerHTML = `<img src="${data.url}" alt="generated-image"/>`;
131
+ } catch (err) {
132
+ document.getElementById('status').textContent = '❌ Exception: ' + err.message;
133
+ }
134
+ }
 
 
 
 
135
  </script>
136
  </body>
137
  </html>
138
  """
139
+ return HTMLResponse(content=html)
140
 
141
  @app.get("/health")
142
  async def health():