Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings | |
| from llama_index.llms.openai import OpenAI | |
| from llama_index.embeddings.openai import OpenAIEmbedding | |
| from llama_parse import LlamaParse | |
| import nest_asyncio | |
| nest_asyncio.apply() | |
| # --- CONFIGURAÇÃO VISUAL --- | |
| theme_css = """ | |
| body { background-color: #0b0c10; color: #c5c6c7; font-family: 'Roboto', sans-serif; } | |
| .header-container { display: flex; align-items: center; justify-content: center; gap: 15px; padding: 20px; border-bottom: 1px solid #1f2833; margin-bottom: 20px; } | |
| .logo-img { height: 60px; width: auto; filter: drop-shadow(0 0 10px rgba(102, 252, 241, 0.5)); } | |
| .brand-name { font-size: 32px; font-weight: bold; color: #66fcf1; letter-spacing: 4px; font-family: 'Impact', sans-serif; } | |
| .chatbot-area { height: 500px !important; background-color: #1f2833; border: 1px solid #45a29e; border-radius: 10px; } | |
| """ | |
| global_query_engine = None | |
| def processar_pdf(files, api_key_llama, api_key_openai): | |
| global global_query_engine | |
| # Limpa espaços em branco acidentais | |
| api_key_openai = api_key_openai.strip() | |
| api_key_llama = api_key_llama.strip() | |
| if not files: return "⚠️ Envie um PDF." | |
| if not api_key_openai.startswith("sk-"): | |
| return f"⚠️ Erro: A chave digitada não parece uma chave OpenAI válida. Ela começa com: {api_key_openai[:7]}..." | |
| # FORÇA AS CHAVES NO AMBIENTE | |
| os.environ["LLAMA_CLOUD_API_KEY"] = api_key_llama | |
| os.environ["OPENAI_API_KEY"] = api_key_openai | |
| try: | |
| # CONFIGURAÇÃO DIRETA (Ignora variáveis de ambiente globais do Hugging Face) | |
| Settings.llm = OpenAI(model="gpt-4o", api_key=api_key_openai) | |
| Settings.embed_model = OpenAIEmbedding(api_key=api_key_openai) | |
| parser = LlamaParse(result_type="markdown", language="pt") | |
| file_extractor = {".pdf": parser} | |
| filepaths = [f.name if hasattr(f, 'name') else f for f in files] | |
| documents = SimpleDirectoryReader(input_files=filepaths, file_extractor=file_extractor).load_data() | |
| index = VectorStoreIndex.from_documents(documents) | |
| global_query_engine = index.as_query_engine() | |
| return f"✅ CONECTADO! Chave detectada: {api_key_openai[:7]}***" | |
| except Exception as e: | |
| return f"❌ Erro Técnico: {str(e)}" | |
| def responder(message, history): | |
| global global_query_engine | |
| if global_query_engine is None: return "⚠️ Sistema Offline. Configure acima." | |
| try: | |
| response = global_query_engine.query(message) | |
| return str(response) | |
| except Exception as e: | |
| return f"Erro: {str(e)}" | |
| with gr.Blocks() as demo: | |
| with gr.Row(elem_classes="header-container"): | |
| if os.path.exists("logo.png"): | |
| gr.Image("logo.png", elem_classes="logo-img", show_label=False, show_download_button=False) | |
| gr.Markdown("<div class='brand-name'>COGNILINE</div>") | |
| with gr.Row(): | |
| with gr.Column(scale=1, min_width=300): | |
| gr.Markdown("### ⚙️ Painel") | |
| txt_llama = gr.Textbox(label="LlamaCloud Key", type="password") | |
| # Adicionado o autocomplete="off" para o navegador não interferir | |
| txt_openai = gr.Textbox(label="OpenAI Key (sk-...)", type="password") | |
| file_up = gr.File(label="PDF", file_count="multiple", file_types=[".pdf"]) | |
| btn_start = gr.Button("ATIVAR", variant="primary") | |
| lbl_status = gr.Textbox(label="Status", interactive=False) | |
| with gr.Column(scale=3): | |
| gr.ChatInterface(fn=responder, chatbot=gr.Chatbot(elem_classes="chatbot-area")) | |
| btn_start.click(processar_pdf, inputs=[file_up, txt_llama, txt_openai], outputs=lbl_status) | |
| if __name__ == "__main__": | |
| demo.launch(css=theme_css, theme=gr.themes.Soft()) |