| import streamlit as st |
| import numpy as np |
| import similarSearch |
| import pandas as pd |
| import cv2 |
| import os |
| from PIL import Image |
|
|
| df = pd.read_csv('./gallery.csv') |
|
|
| def read_image(uploaded_file): |
| with open(uploaded_file.name, "wb") as f: |
| f.write(uploaded_file.getbuffer()) |
| img = cv2.imread(uploaded_file.name, cv2.IMREAD_COLOR | cv2.IMREAD_IGNORE_ORIENTATION) |
| img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) |
| if img is None: |
| raise ValueError('Failed to read {}'.format(uploaded_file.name)) |
| os.remove(uploaded_file.name) |
| return img |
|
|
| def image_open(image_file): |
| img = cv2.imread( |
| image_file, cv2.IMREAD_COLOR | cv2.IMREAD_IGNORE_ORIENTATION |
| ) |
| img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) |
| if img is None: |
| raise ValueError('Failed to read {}'.format(image_file)) |
| return img |
|
|
| |
| |
| def display_images(image_ids): |
| images = [] |
| captions = [] |
| for img_id in image_ids: |
| image_path = df.loc[df['seller_img_id'] == img_id, 'img_path'].values[0] |
| print(image_path) |
| image = image_open(image_path) |
| images.append(image) |
| captions.append(img_id) |
| return images, captions |
| |
| |
| def find_similar_images(image): |
| |
| similar_ids = similarSearch.predict(image) |
| images, captions = display_images(similar_ids) |
| length = len(images) |
| return similar_ids,length |
| |
|
|
| def main(): |
| st.set_page_config( |
| page_title='See it, search it', |
| page_icon=':mag:', |
| layout='wide', |
| initial_sidebar_state='expanded' |
| ) |
| st.title('See it, search it') |
| st.markdown( |
| """ |
| <style> |
| .stTitle { |
| text-align: center; |
| font-size: 36px; |
| margin-bottom: 30px; |
| } |
| .stRadio > div { |
| display: flex; |
| justify-content: center; |
| align-items: center; |
| } |
| .stImageContainer { |
| display: flex; |
| justify-content: center; |
| align-items: center; |
| margin-bottom: 30px; |
| } |
| .stImage { |
| max-width: 100%; |
| height: auto; |
| } |
| </style> |
| """, |
| unsafe_allow_html=True |
| ) |
|
|
| |
|
|
| image_columns = st.columns(5) |
|
|
| |
| image_width = 150 |
| image_height = 150 |
|
|
| |
| num_columns = 5 |
|
|
| |
| images_per_page = 50 |
|
|
| |
| uploaded_image = st.file_uploader("Upload Image", type=['png', 'jpg', 'jpeg']) |
|
|
| if uploaded_image is not None: |
| |
| image = read_image(uploaded_image) |
|
|
| image = Image.fromarray(image) |
| image = image.resize((224, 224)) |
|
|
| |
| st.image(image, caption="Query Image", width=224) |
|
|
| |
| similar_images, length = find_similar_images(image) |
|
|
| |
| total_pages = (length + images_per_page - 1) // images_per_page |
|
|
| page_number = st.sidebar.number_input("Select Page", min_value=1, max_value=total_pages) |
|
|
| |
| start_index = (page_number - 1) * images_per_page |
| end_index = min(start_index + images_per_page, length) |
|
|
| image_columns = st.columns(num_columns) |
|
|
| index = 0 |
|
|
| for img_id in similar_images[start_index:end_index]: |
| row = df[df['seller_img_id'] == img_id] |
| if not row.empty: |
| img_path = row['img_path'].values[0] |
| product_id = row['product_id'].values[0] |
| |
| |
| full_img_path = img_path |
|
|
| |
| image = image_open(full_img_path) |
| resized_image = cv2.resize(image, (image_width, image_height)) |
|
|
| |
| col_index = index % num_columns |
| with image_columns[col_index]: |
| st.image(resized_image, caption=f'Product ID: {product_id}\nImage ID: {img_id}', width=image_width) |
| index += 1 |
| |
| else: |
| st.write(f"Could not find details for image with ID: {img_id}") |
| |
|
|
|
|
| if __name__ == '__main__': |
| main() |