2511
Browse files- media/result_grid.jpg +2 -2
- samples/unet_320x640_0.jpg +2 -2
- samples/unet_384x640_0.jpg +2 -2
- samples/unet_448x640_0.jpg +2 -2
- samples/unet_512x640_0.jpg +2 -2
- samples/unet_576x640_0.jpg +2 -2
- samples/unet_640x320_0.jpg +2 -2
- samples/unet_640x384_0.jpg +2 -2
- samples/unet_640x448_0.jpg +2 -2
- samples/unet_640x512_0.jpg +2 -2
- samples/unet_640x576_0.jpg +2 -2
- samples/unet_640x640_0.jpg +2 -2
- test.ipynb +2 -2
- train.py +1 -0
media/result_grid.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_320x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_384x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_448x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_512x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_576x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x320_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x384_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x448_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x512_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x576_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
samples/unet_640x640_0.jpg
CHANGED
|
Git LFS Details
|
|
Git LFS Details
|
test.ipynb
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:395a7bb21ad5721b8bf639c1c01b20c9cac497b0260706f53cce64abd6bce172
|
| 3 |
+
size 5685007
|
train.py
CHANGED
|
@@ -145,6 +145,7 @@ def encode_texts(texts, max_length=150):
|
|
| 145 |
add_generation_prompt=True,
|
| 146 |
#enable_thinking=True,
|
| 147 |
)
|
|
|
|
| 148 |
texts[i] = prompt_item
|
| 149 |
|
| 150 |
toks = tokenizer(
|
|
|
|
| 145 |
add_generation_prompt=True,
|
| 146 |
#enable_thinking=True,
|
| 147 |
)
|
| 148 |
+
#print(prompt_item+"\n")
|
| 149 |
texts[i] = prompt_item
|
| 150 |
|
| 151 |
toks = tokenizer(
|