File size: 7,019 Bytes
cd5c320
d143b5b
 
cd5c320
9a4ccf8
 
cd5c320
9a4ccf8
0e239d8
 
9a4ccf8
cd5c320
 
 
 
d143b5b
cd5c320
0073632
 
0e239d8
 
 
d143b5b
0e239d8
d143b5b
0e239d8
 
 
 
d143b5b
0e239d8
d143b5b
 
 
 
0e239d8
d143b5b
62c65cf
0e239d8
 
d143b5b
 
0e239d8
 
9a4ccf8
d143b5b
 
 
 
0e239d8
d143b5b
9a4ccf8
0e239d8
d143b5b
 
0e239d8
9a4ccf8
d143b5b
 
 
9a4ccf8
d143b5b
9a4ccf8
d143b5b
9a4ccf8
d143b5b
 
 
 
9a4ccf8
d143b5b
 
0e239d8
9a4ccf8
d143b5b
 
 
 
 
62c65cf
0e239d8
 
d143b5b
 
 
 
0e239d8
d143b5b
 
 
 
 
62c65cf
d143b5b
 
 
62c65cf
d143b5b
 
 
9a4ccf8
d143b5b
cd5c320
d143b5b
 
9a4ccf8
d143b5b
cd5c320
d143b5b
 
 
 
 
0e239d8
d143b5b
 
 
9a4ccf8
0e239d8
d143b5b
 
 
62c65cf
d143b5b
 
 
62c65cf
d143b5b
62c65cf
 
0e239d8
d143b5b
 
62c65cf
d143b5b
 
 
62c65cf
0e239d8
62c65cf
d143b5b
 
 
 
0e239d8
62c65cf
d143b5b
0e239d8
d143b5b
 
 
0e239d8
d143b5b
 
0e239d8
d143b5b
 
0e239d8
d143b5b
 
 
 
 
 
 
 
 
 
 
62c65cf
d143b5b
 
 
 
 
 
 
 
 
62c65cf
cd5c320
9a4ccf8
 
d143b5b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
# ╔════════════════════════════════════════════════════════════════════════════╗
# β•‘             PIPELINE v27: UI LIMPA & ESTRUTURA DE ABAS                     β•‘
# β•‘        Layout: Chat (Aba 1) | Debug (Aba 2) | Config (Aba 3)               β•‘
# β•šβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•

import os
import json
import re
import time
from datetime import datetime
import gradio as gr
import google.generativeai as genai

# ==================== 1. CONFIGURAÇÃO ====================
api_key = os.getenv("GOOGLE_API_KEY", "SUA_API_KEY_AQUI")
if api_key: genai.configure(api_key=api_key)

model_flash = genai.GenerativeModel("gemini-flash-latest")
model_pro   = genai.GenerativeModel("gemini-pro-latest")

ARQUIVO_CONFIG = "protocolo.json"

# ==================== 2. UTILIDADES ====================

def carregar_protocolo():
    try:
        with open(ARQUIVO_CONFIG, "r", encoding="utf-8") as f: return f.read()
    except: return "[]"

def salvar_protocolo(conteudo):
    try:
        json.loads(conteudo)
        with open(ARQUIVO_CONFIG, "w", encoding="utf-8") as f: f.write(conteudo)
        return "βœ… Salvo"
    except: return "❌ Erro JSON"

def ler_anexo(arquivo):
    if arquivo is None: return ""
    try:
        with open(arquivo.name, "r", encoding="utf-8") as f:
            return f"\n\n[ANEXO SISTEMA: {os.path.basename(arquivo.name)}]\n{f.read()}\n[FIM ANEXO]\n"
    except: return ""

# ==================== 3. ENGINE DE EXECUÇÃO ====================

def executar_no(timeline, config):
    modelo = model_pro if config.get("modelo") == "pro" else model_flash
    contexto = json.dumps(timeline, ensure_ascii=False, indent=2)
    prompt = f"--- TIMELINE ---\n{contexto}\n----------------\nAGENTE: {config['nome']}\nMISSÃO: {config['missao']}"
    
    log = f"\nπŸ”Έ {config['nome']}..."
    try:
        inicio = time.time()
        resp = modelo.generate_content(prompt)
        out = resp.text
        tempo = time.time() - inicio
        
        content = json.loads(out.strip().replace('```json','').replace('```','')) if config['tipo_saida']=='json' else out
        log += f" (OK - {tempo:.2f}s)"
        return {"role": "assistant", "agent": config['nome'], "content": content}, log, out
    except Exception as e:
        return {"role": "system", "error": str(e)}, f" (ERRO: {e})", str(e)

# ==================== 4. ORQUESTRADOR ====================

def orquestrador(texto, arquivo, history, json_config):
    # 1. Input Check
    anexo = ler_anexo(arquivo)
    full_input = f"{texto}\n{anexo}".strip()
    
    if not full_input:
        yield history, {}, "Sem input."
        return

    # 2. Setup
    history = history + [[texto + (" πŸ“Ž" if arquivo else ""), None]]
    try: protocolo = json.loads(json_config)
    except: 
        history[-1][1] = "❌ Erro no JSON de Configuração."
        yield history, {}, "Erro JSON"
        return

    timeline = [{"role": "user", "content": full_input}]
    logs = f"πŸš€ START: {datetime.now().strftime('%H:%M:%S')}\n"
    history[-1][1] = "⏳ Iniciando anÑlise..."
    yield history, timeline, logs

    # 3. Loop
    final_response = ""
    for cfg in protocolo:
        history[-1][1] = f"βš™οΈ {cfg['nome']} trabalhando..."
        yield history, timeline, logs
        
        res, log_add, raw = executar_no(timeline, cfg)
        timeline.append(res)
        logs += log_add + "\n"
        
        if cfg['tipo_saida'] == 'texto':
            final_response = res['content']
            history[-1][1] = final_response # Mostra texto progressivo se fosse stream, aqui mostra final
        
        yield history, timeline, logs

    logs += "βœ… FIM."
    yield history, timeline, logs

# ==================== 5. UI LIMPA (v27) ====================

def ui_clean():
    css = """
    footer {display: none !important;} 
    .contain {border: none !important;}
    """
    
    config_init = carregar_protocolo()

    with gr.Blocks(title="AI Forensics", css=css, theme=gr.themes.Soft()) as app:
        
        with gr.Tabs():
            
            # === ABA 1: CHAT (LIMPO) ===
            with gr.Tab("πŸ’¬ Investigador"):
                chatbot = gr.Chatbot(
                    label="", 
                    show_label=False, 
                    height=600, 
                    show_copy_button=True,
                    render_markdown=True
                )
                
                with gr.Row():
                    with gr.Column(scale=10):
                        txt_in = gr.Textbox(
                            show_label=False, 
                            placeholder="Descreva o caso ou instruΓ§Γ£o...", 
                            lines=1,
                            max_lines=5,
                            container=False
                        )
                    with gr.Column(scale=1, min_width=50):
                        file_in = gr.UploadButton(
                            "πŸ“Ž", 
                            file_types=[".txt", ".md", ".csv", ".json"],
                            size="sm"
                        )
                    with gr.Column(scale=1, min_width=80):
                        btn_send = gr.Button("Enviar", variant="primary", size="sm")
                
                # Feedback visual sutil do arquivo
                file_status = gr.Markdown("", visible=True)
                file_in.upload(lambda x: f"πŸ“Ž Anexo: {os.path.basename(x.name)}", file_in, file_status)

            # === ABA 2: DEPURAÇÃO (ESCONDIDO) ===
            with gr.Tab("πŸ•΅οΈ DepuraΓ§Γ£o"):
                with gr.Row():
                    out_dna = gr.JSON(label="DNA (Timeline)")
                    out_logs = gr.Textbox(label="Logs do Sistema", lines=20)

            # === ABA 3: CONFIG (TÉCNICO) ===
            with gr.Tab("βš™οΈ Config"):
                with gr.Row():
                    btn_save = gr.Button("Salvar Config")
                    lbl_save = gr.Label(show_label=False)
                code_json = gr.Code(value=config_init, language="json", label="protocolo.json")
                btn_save.click(salvar_protocolo, code_json, lbl_save)

        # === TRIGGERS ===
        # Enter ou BotΓ£o Enviar
        triggers = [btn_send.click, txt_in.submit]
        
        for trig in triggers:
            trig(
                orquestrador,
                inputs=[txt_in, file_in, chatbot, code_json],
                outputs=[chatbot, out_dna, out_logs]
            ).then(
                lambda: (None, ""), # Limpa input e label do arquivo apΓ³s envio
                outputs=[txt_in, file_status] 
            )

    return app

if __name__ == "__main__":
    ui_clean().launch()