Spaces:
Runtime error
Runtime error
Commit
·
7d4b686
1
Parent(s):
4272504
Update main.py
Browse files
main.py
CHANGED
|
@@ -18,7 +18,7 @@ def return_item(item: Item):
|
|
| 18 |
return item
|
| 19 |
|
| 20 |
@app.put("/")
|
| 21 |
-
def predict(Item):
|
| 22 |
# Load the OpenAI CLIP Model
|
| 23 |
print('Loading CLIP Model...')
|
| 24 |
model = SentenceTransformer('clip-ViT-B-32')
|
|
@@ -29,7 +29,7 @@ def predict(Item):
|
|
| 29 |
# encoded_image = model.encode(Image.open(filepath))
|
| 30 |
#image_names = list(glob.glob('./*.jpg'))
|
| 31 |
#print("Images:", len(image_names))
|
| 32 |
-
encoded_image = model.encode([
|
| 33 |
|
| 34 |
# Now we run the clustering algorithm. This function compares images aganist
|
| 35 |
# all other images and returns a list with the pairs that have the highest
|
|
|
|
| 18 |
return item
|
| 19 |
|
| 20 |
@app.put("/")
|
| 21 |
+
def predict(item: Item):
|
| 22 |
# Load the OpenAI CLIP Model
|
| 23 |
print('Loading CLIP Model...')
|
| 24 |
model = SentenceTransformer('clip-ViT-B-32')
|
|
|
|
| 29 |
# encoded_image = model.encode(Image.open(filepath))
|
| 30 |
#image_names = list(glob.glob('./*.jpg'))
|
| 31 |
#print("Images:", len(image_names))
|
| 32 |
+
encoded_image = model.encode([item.image1, item.image2], batch_size=128, convert_to_tensor=True, show_progress_bar=True)
|
| 33 |
|
| 34 |
# Now we run the clustering algorithm. This function compares images aganist
|
| 35 |
# all other images and returns a list with the pairs that have the highest
|