|
|
import logging |
|
|
import sys |
|
|
from IPython.display import Markdown, display |
|
|
from sqlalchemy import text |
|
|
from llama_index.core import ServiceContext, PromptHelper |
|
|
|
|
|
from llama_index.embeddings.openai import OpenAIEmbedding |
|
|
from llama_index.llms.openai import OpenAI |
|
|
from llama_index.core.indices.struct_store.sql_query import NLSQLTableQueryEngine |
|
|
from sqlalchemy import (create_engine, MetaData, Table, Column, String, Integer) |
|
|
import re |
|
|
from llama_index.core import Document, VectorStoreIndex |
|
|
from mistralai.client import MistralClient |
|
|
from mistralai.models.chat_completion import ChatMessage |
|
|
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler |
|
|
from langchain_community.llms import HuggingFaceTextGenInference |
|
|
import re |
|
|
import os |
|
|
import gradio as gr |
|
|
from dotenv import load_dotenv |
|
|
import os |
|
|
import openai |
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
OPENAI_API_KEY = os.getenv('openai_key') |
|
|
db_user = os.getenv('db_s_user') |
|
|
db_password = os.getenv('db_s_pwd') |
|
|
db_host = os.getenv('db_s_host') |
|
|
db_name = os.getenv('db_s_name') |
|
|
|
|
|
connection_string = f"postgresql+psycopg2://{db_user}:{db_password}@{db_host}/{db_name}" |
|
|
engine = create_engine(connection_string) |
|
|
metadata_obj = MetaData() |
|
|
|
|
|
engine = create_engine(connection_string) |
|
|
|
|
|
def query_fun(input_txt): |
|
|
query_similarity =f""" |
|
|
SELECT set_limit(0.2); |
|
|
SELECT |
|
|
item, |
|
|
suppleant, |
|
|
signataire, |
|
|
theme, |
|
|
GREATEST(similarity(item,'{input_txt}'), similarity(theme, '{input_txt}')) AS sml |
|
|
FROM public.info_2 |
|
|
WHERE |
|
|
item % '{input_txt}' OR theme % '{input_txt}' OR signataire % '{input_txt}' |
|
|
ORDER BY |
|
|
sml DESC, |
|
|
item |
|
|
LIMIT 5; |
|
|
|
|
|
""" |
|
|
result_list=[] |
|
|
with engine.connect() as connection: |
|
|
result = connection.execute(text(query_similarity)) |
|
|
for row in result: |
|
|
result_list.append(str(row[0])) |
|
|
result_list.append(str(row[1])) |
|
|
result_list.append(str(row[2])) |
|
|
result_list.append(str(row[3])) |
|
|
return result_list |
|
|
|
|
|
def gradio_interface(input_text): |
|
|
result = process_text(input_text) |
|
|
return result |
|
|
def process_text(input_text): |
|
|
|
|
|
texte_sans_apostrophes = re.sub(r"'", "", input_text) |
|
|
sml_input = query_fun(texte_sans_apostrophes) |
|
|
if not sml_input: |
|
|
result = "cette information n'existe pas dans votre base de données" |
|
|
print(result) |
|
|
return result |
|
|
|
|
|
split_list = [sml_input[i:i + 4] for i in range(0, len(sml_input), 4)] |
|
|
|
|
|
|
|
|
texts = [f" * item: {sublist[0]} theme:{sublist[3]} premier signataire: {sublist[2]}, suppléant: {sublist[1]}" for sublist in split_list] |
|
|
|
|
|
|
|
|
text = '\n'.join(texts) |
|
|
|
|
|
print(text) |
|
|
from openai import OpenAI |
|
|
client = OpenAI(api_key=OPENAI_API_KEY) |
|
|
|
|
|
response = client.chat.completions.create( |
|
|
model="gpt-4o", |
|
|
messages=[ |
|
|
{"role": "system", "content": f"You are an expert in math.based on the folowing context answer direct response: {text}"}, |
|
|
{"role": "user", "content": f"{input_text}"}, |
|
|
] |
|
|
) |
|
|
print(text) |
|
|
return response.choices[0].message.content |
|
|
|
|
|
|
|
|
examples=[ |
|
|
"qui est habilité de signer au Devis et bons de commande égale à 14.000 €HT ?", |
|
|
"Qui peut renouveler l'adhésion à des associations professionnelles ?", |
|
|
"qui est habilité de signer au Devis et bons de commande égale à 4.000 €HT ?", |
|
|
"qui est habilité de signer au courriers liés aux demandes d'attestations fiscales et sociales et assurances", |
|
|
"qui signe décision de mobilité interne", |
|
|
"Est-ce qu'Anne-Marie Atlan signe les bons de commande à 80 000 ?", |
|
|
] |
|
|
iface = gr.Interface( |
|
|
fn=gradio_interface, |
|
|
inputs="text", |
|
|
outputs="text", |
|
|
title="Métropole Signature Expert", |
|
|
description="Provide a question related to signing authorities and get a response.", |
|
|
examples=examples, |
|
|
cache_examples=False, |
|
|
|
|
|
) |
|
|
|
|
|
iface.launch() |
|
|
|