Spaces:
Sleeping
Sleeping
Bobby commited on
Commit ·
c1ebc5b
1
Parent(s): 42b4ed2
- app.py +1 -1
- local_app.py +1 -1
app.py
CHANGED
|
@@ -430,6 +430,7 @@ def process_image(
|
|
| 430 |
prompt=str(get_prompt(prompt, a_prompt))
|
| 431 |
negative_prompt=str(n_prompt)
|
| 432 |
print(prompt)
|
|
|
|
| 433 |
start = time.time()
|
| 434 |
results = pipe(
|
| 435 |
prompt=prompt,
|
|
@@ -440,7 +441,6 @@ def process_image(
|
|
| 440 |
generator=generator,
|
| 441 |
image=control_image,
|
| 442 |
).images[0]
|
| 443 |
-
print(f"\n-------------------------Preprocess done in: {preprocess_time:.2f} seconds-------------------------")
|
| 444 |
print(f"\n-------------------------Inference done in: {time.time() - start:.2f} seconds-------------------------")
|
| 445 |
results.save("temp_image.jpg")
|
| 446 |
# torch.cuda.synchronize()
|
|
|
|
| 430 |
prompt=str(get_prompt(prompt, a_prompt))
|
| 431 |
negative_prompt=str(n_prompt)
|
| 432 |
print(prompt)
|
| 433 |
+
print(f"\n-------------------------Preprocess done in: {preprocess_time:.2f} seconds-------------------------")
|
| 434 |
start = time.time()
|
| 435 |
results = pipe(
|
| 436 |
prompt=prompt,
|
|
|
|
| 441 |
generator=generator,
|
| 442 |
image=control_image,
|
| 443 |
).images[0]
|
|
|
|
| 444 |
print(f"\n-------------------------Inference done in: {time.time() - start:.2f} seconds-------------------------")
|
| 445 |
results.save("temp_image.jpg")
|
| 446 |
# torch.cuda.synchronize()
|
local_app.py
CHANGED
|
@@ -401,6 +401,7 @@ def process_image(
|
|
| 401 |
prompt=str(get_prompt(prompt, a_prompt))
|
| 402 |
negative_prompt=str(n_prompt)
|
| 403 |
print(prompt)
|
|
|
|
| 404 |
start = time.time()
|
| 405 |
results = pipe(
|
| 406 |
prompt=prompt,
|
|
@@ -411,7 +412,6 @@ def process_image(
|
|
| 411 |
generator=generator,
|
| 412 |
image=control_image,
|
| 413 |
).images[0]
|
| 414 |
-
print(f"\n-------------------------Preprocess done in: {preprocess_time:.2f} seconds-------------------------")
|
| 415 |
print(f"\n-------------------------Inference done in: {time.time() - start:.2f} seconds-------------------------")
|
| 416 |
results.save("temp_image.jpg")
|
| 417 |
# torch.cuda.synchronize()
|
|
|
|
| 401 |
prompt=str(get_prompt(prompt, a_prompt))
|
| 402 |
negative_prompt=str(n_prompt)
|
| 403 |
print(prompt)
|
| 404 |
+
print(f"\n-------------------------Preprocess done in: {preprocess_time:.2f} seconds-------------------------")
|
| 405 |
start = time.time()
|
| 406 |
results = pipe(
|
| 407 |
prompt=prompt,
|
|
|
|
| 412 |
generator=generator,
|
| 413 |
image=control_image,
|
| 414 |
).images[0]
|
|
|
|
| 415 |
print(f"\n-------------------------Inference done in: {time.time() - start:.2f} seconds-------------------------")
|
| 416 |
results.save("temp_image.jpg")
|
| 417 |
# torch.cuda.synchronize()
|