Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import cv2, numpy as np | |
| from PIL import Image | |
| def clahe_uniform(image): | |
| # Convert PIL → OpenCV BGR | |
| img = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR) | |
| # --- Convert to LAB color space --- | |
| lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB) | |
| L, A, B = cv2.split(lab) | |
| # --- Apply CLAHE (Contrast Limited Adaptive Histogram Equalization) --- | |
| clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(18,18)) | |
| L2 = clahe.apply(L) | |
| # --- Merge back channels and convert to RGB --- | |
| merged = cv2.merge([L2, A, B]) | |
| result = cv2.cvtColor(merged, cv2.COLOR_LAB2BGR) | |
| result_rgb = cv2.cvtColor(result, cv2.COLOR_BGR2RGB) | |
| return [image, Image.fromarray(result_rgb)] | |
| title = "🧵 Saree AI – Uniform Lighting (CLAHE)" | |
| description = """ | |
| Upload a saree image with uneven illumination. | |
| This app applies **CLAHE (adaptive histogram equalization)** to balance lighting locally | |
| while preserving texture and color richness. | |
| """ | |
| demo = gr.Interface( | |
| fn=clahe_uniform, | |
| inputs=gr.Image(label="Upload Saree Image", type="pil"), | |
| outputs=[gr.Image(label="Original"), gr.Image(label="CLAHE Corrected")], | |
| title=title, | |
| description=description, | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |