Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from streamlit_option_menu import option_menu | |
| import tensorflow as tf | |
| #importation des librairies | |
| #from tensorflow import keras | |
| import base64 | |
| #import torch | |
| from keras.models import load_model | |
| from PIL import ImageOps, Image | |
| import numpy as np | |
| import pandas as pd | |
| import matplotlib.pyplot as plt | |
| #from util import classify | |
| def classify(image, model, class_names): | |
| """ | |
| This function takes an image, a model, and a list of class names and returns the predicted class and confidence | |
| score of the image. | |
| Parameters: | |
| image (PIL.Image.Image): An image to be classified. | |
| model (tensorflow.keras.Model): A trained machine learning model for image classification. | |
| class_names (list): A list of class names corresponding to the classes that the model can predict. | |
| Returns: | |
| A tuple of the predicted class name and the confidence score for that prediction. | |
| """ | |
| # convert image to (224, 224) | |
| image = ImageOps.fit(image, (224, 224), Image.Resampling.LANCZOS) | |
| # convert image to numpy array | |
| image_array = np.asarray(image) | |
| # normalize image | |
| normalized_image_array = (image_array.astype(np.float32) / 127.5) - 1 | |
| # set model input | |
| data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32) | |
| data[0] = normalized_image_array | |
| # make prediction | |
| prediction = model.predict(data) | |
| # index = np.argmax(prediction) | |
| index = 0 if prediction[0][0] > 0.95 else 1 | |
| class_name = class_names[index] | |
| confidence_score = prediction[0][index] | |
| return class_name, confidence_score,index | |
| model=load_model('./models/model.h5') | |
| with open('./models/names.txt', 'r') as f: | |
| class_names = [a[:-1].split(' ')[1] for a in f.readlines()] | |
| f.close() | |
| st.set_page_config(layout='wide') | |
| st.markdown(""" | |
| <style> | |
| .block-container { | |
| padding-top: 2rem; | |
| padding-bottom: 0rem; | |
| padding-left: 1rem; | |
| padding-right: 1rem; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| header , menu = st.columns(2) | |
| with header: | |
| st.image('static/image/cif2.PNG') | |
| with menu: | |
| # option_menu(menu_title=None, | |
| # options=['Visualisation','Prédiction'], | |
| # icons=["house","book",'envelope'], | |
| # default_index=0, | |
| # orientation="horizontal" | |
| # ) | |
| selecte=option_menu(None, ["Home", "About"], | |
| icons=['house', 'cloud-upload'], | |
| menu_icon="cast", default_index=0, orientation="horizontal", | |
| styles={ | |
| "container": {"padding": "0!important", "background-color": "#ffffff","font-family": "Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif"}, | |
| "icon": {"color": "red", "font-size": "25px" }, | |
| "nav-link": {"font-size": "20px", "text-align": "left", "margin":"0px", "--hover-color": "#eee"}, | |
| "nav-link-selected": {"background-color": "#F9C949","color":"white"}, | |
| "menu-title":{"color":"#424143"} | |
| } | |
| ) | |
| if selecte == "Home": | |
| st.title(f"A propos de la Fraude à l'Assurance Automobile") | |
| sect1_col1,sect1_col2, sect1_col3 = st.columns(3) | |
| for col in (sect1_col1,sect1_col2, sect1_col3): | |
| col.container() | |
| with open('./static/css/style.css') as f: | |
| st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True) | |
| with sect1_col2.container(height=360): | |
| # st.markdown( | |
| # """ | |
| # <style> | |
| # [data-testid="stMetricValue"]{ | |
| # font-siz: 50px; | |
| # color: #2FB56B; | |
| # font-weight:bold; | |
| # text-align:center; | |
| # } | |
| # [data-testid="metric-container"] { | |
| # background-color: #EEEEEE; | |
| # border: 2px solid #CCCCCC; | |
| # padding: 5% 5% 5% 10%; | |
| # border-radius: 5px; | |
| # } | |
| # </style> | |
| # """, | |
| # unsafe_allow_html=True, | |
| # ) | |
| st.markdown(""" | |
| <style> | |
| # div[data-testid="stMetric"] { | |
| # background-color: rgba(187, 216, 158, 0.59); | |
| # border: 1px solid rgba(28, 131, 225, 0.1); | |
| padding:-10px; | |
| # border-radius: 5px; | |
| # color: rgb(30, 103, 119); | |
| # overflow-wrap: break-word; | |
| # font-weight:bold; | |
| # } | |
| [data-testid="stMetricValue"]{ | |
| font-size: 45px; | |
| color: #ff3131; | |
| font-weight:bold; | |
| text-align:center; | |
| margin-top:-33px; | |
| } | |
| /* breakline for metric text */ | |
| [data-testid="stMetricLabel"] { | |
| word-wrap: break-word; | |
| color: #ef8451; | |
| font-size:40px; | |
| font-weight:bold; | |
| } | |
| [data-testid ="stVerticalBlock"]{ | |
| #background-color: rgba(187, 216, 158, 0.59); | |
| #border: 1px solid rgba(28, 131, 225, 0.1); | |
| text-align:center; | |
| } | |
| [data-v-5af006b8]{ | |
| background-color:black; | |
| } | |
| /* Les container*/ | |
| .st-emotion-cache-6srzk2 { | |
| background-color: #f9ca49c0; | |
| border: 1px solid rgba(28, 131, 225, 0.1); | |
| text-align: center; | |
| font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif; | |
| } | |
| .menu .container-xxl[data-v-5af006b8] { | |
| background-color: transparent !important; | |
| } | |
| @font-face { | |
| font-family: "Anton"; | |
| src: url(Anton-Regular.ttf); | |
| } | |
| /*Les Titres*/ | |
| .st-emotion-cache-10trblm { | |
| font-size: 1.8rem; | |
| color: #424143; | |
| font-weight: 300; | |
| text-transform: uppercase; | |
| line-height: 1.235; | |
| font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif; | |
| } | |
| /*section2 lement2*/ | |
| .st-emotion-cache-1o1eenq{ | |
| font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif; | |
| margin-left:5% !important; | |
| margin-right: 5% !important; | |
| color: #FF3131; | |
| font-size: 1.8rem; | |
| font-weight: 300; | |
| text-transform: uppercase; | |
| line-height: 1.235; | |
| } | |
| /*Les paragraphes*/ | |
| p { | |
| color: 424143; | |
| padding-left:20% ; | |
| padding-right: 20%; | |
| font-size: 30px; | |
| } | |
| </style> | |
| """ | |
| , unsafe_allow_html=True) | |
| st.write("Le secteur de l'assurance est confronté à un dilème:") | |
| #st.write(" au KENYA par an ") | |
| st.caption("Distinguer les demandes d'indmnisations authentiques des des demandes trompeuses") | |
| with sect1_col1.container(height=360): | |
| st.write("L'émergence de l'IA générative a contribué à: ",) | |
| st.caption("l'augmentation des demandes d'indemnisations frauduleuses") | |
| with sect1_col3.container(height=360): | |
| script = """<div id= 'conte'></div>""" | |
| st.subheader("Cout de la fraude à l'assurance ") | |
| st.write("Le cout de la Fraud à l'assurance automobile est estimé à") | |
| st.metric("", "Plus de 10% ") | |
| st.write("de la somme totale des sinistres") | |
| st.title(f"Vérifiez l'Authenticité des images de vos Clients") | |
| st.markdown("Distinguez les images frauduleus des images non frauduleuses") | |
| with st.container(height=400): | |
| st.markdown( | |
| """ | |
| <style> | |
| .st-emotion-cache-g7r313 { | |
| width: 700px; | |
| margin-left:25%; | |
| margin-rigth:25%; | |
| } | |
| .st-emotion-cache-1kyxreq{ | |
| flex-direction:column; | |
| } | |
| .st-emotion-cache-1v0mbdj{ | |
| align-items:center !important; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True | |
| ) | |
| file = st.file_uploader("Choisissez une image",type=["png","jpg"]) | |
| if file is not None: | |
| image = Image.open(file).convert('RGB') | |
| st.image(image, use_column_width=True) | |
| class_name, conf_score ,index = classify(image, model, class_names) | |
| if index == 0: | |
| st.image('static/image/not_fraud.jpg') | |
| else: | |
| st.image('static/image/alert.PNG') | |
| st.write("### score: {}%".format(int(conf_score * 1000) / 10)) | |
| footer = st.container() | |
| with footer: | |
| st.markdown("---") | |
| st.markdown( | |
| """ | |
| <style> | |
| p { | |
| font-size: 16px; | |
| text-align: center; | |
| } | |
| a { | |
| text-decoration: none; | |
| color: #00a; | |
| font-weight: 600; | |
| } | |
| </style> | |
| <p> | |
| © Designed by <a href="#">ONDOA Michelle & NGNINTEDEM Marlyne</a>. | |
| </p> | |
| """, unsafe_allow_html=True | |
| ) | |
| if selecte == "About": | |
| st.title("A propos du modèle") | |
| st.markdown( | |
| """ | |
| <style> | |
| /*Les Titres*/ | |
| .st-emotion-cache-10trblm { | |
| font-size: 1.5rem; | |
| color: #424143; | |
| font-weight: 300; | |
| text-transform: uppercase; | |
| line-height: 1.235; | |
| font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif; | |
| margin-left:0% !important; | |
| margin-right: 5% !important; | |
| } | |
| [data-testid="stMetricValue"]{ | |
| font-size: 45px; | |
| color: #ff3131; | |
| font-weight:bold; | |
| text-align:center; | |
| margin-top:-33px; | |
| } | |
| /* breakline for metric text */ | |
| [data-testid="stMetricLabel"] { | |
| word-wrap: break-word; | |
| color: #ef8451; | |
| font-size:40px; | |
| font-weight:bold; | |
| } | |
| .st-emotion-cache-16idsys >p{ | |
| font-size:30px; | |
| } | |
| [data-testid ="stVerticalBlock"]{ | |
| #background-color: rgba(187, 216, 158, 0.59); | |
| #border: 1px solid rgba(28, 131, 225, 0.1); | |
| text-align:center; | |
| } | |
| [data-v-5af006b8]{ | |
| background-color:black; | |
| } | |
| .st-emotion-cache-1q7spjk{ | |
| font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif; | |
| color: #FF3131; | |
| font-size: 1.8rem; | |
| font-weight: 300; | |
| text-transform: uppercase; | |
| line-height: 1.235; | |
| margin-bottom:10px; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True | |
| ) | |
| with st.container(height=1500): | |
| st.title('Définition du problème') | |
| st.write('Prédire si une image de voiture donnée est une demande d\'indemnisation frauduleuse ?') | |
| st.title('Type de problème') | |
| st.write('Problème de Classification ') | |
| st.title('Problème Domaine') | |
| st.write('Vision par ordinateur') | |
| st.title('Analyse des données') | |
| st.write('L\'examination des données a montré que les données d\'entraînement sont déséquilibrées. La différence entre la distribution des classes positives et négatives est TRÈS ÉNORME !' ) | |
| c1 , c2 = st.columns(2) | |
| with c1: | |
| st.metric("Classe Négative", "94%") | |
| with c2: | |
| st.metric("Classe Positive", "6%") | |
| st.title("Modélisation") | |
| st.caption('ResNet 18 (Pré-entraîné)') | |
| st.write('Comme il s\'agit d\'un problème de vision par ordinateur, il était très clair et logique d\'essayer un réseau neuronal convolutif. Nous avons utilisé ResNet 18 avec les poids pré-entraînés sur l\'ensemble de données ImageNet .Nous avons remplacé la couche de sortie et la couche d\'entrée.Le model a donné un résultat suivants:') | |
| col1, col2 =st.columns(2) | |
| with col1: | |
| st.image('static/image/loss.png') | |
| with col2: | |
| st.image('static/image/acc.png') | |
| footer = st.container() | |
| with footer: | |
| st.markdown("---") | |
| st.markdown( | |
| """ | |
| <style> | |
| p { | |
| font-size: 16px; | |
| text-align: center; | |
| } | |
| a { | |
| text-decoration: none; | |
| color: #00a; | |
| font-weight: 600; | |
| } | |
| </style> | |
| <p> | |
| © Designed by <a href="#">ONDOA Michelle & NGNINTEDEM Marlyne </a>. | |
| </p> | |
| """, unsafe_allow_html=True | |
| ) |