File size: 9,031 Bytes
fd8ef38
d62e3f7
 
fd8ef38
 
 
 
 
 
 
 
 
d62e3f7
 
fd8ef38
d62e3f7
b13d2b2
fd8ef38
 
 
 
 
 
 
b13d2b2
d62e3f7
b13d2b2
d62e3f7
b13d2b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd8ef38
 
 
 
 
b13d2b2
fd8ef38
 
 
 
 
 
 
b13d2b2
fd8ef38
 
 
b13d2b2
fd8ef38
 
b13d2b2
 
fd8ef38
d62e3f7
b13d2b2
fd8ef38
d62e3f7
fd8ef38
8240179
 
d62e3f7
 
8240179
d2a58f5
d62e3f7
fd8ef38
d62e3f7
8240179
fd8ef38
d62e3f7
fd8ef38
d2a58f5
 
d62e3f7
fd8ef38
 
d62e3f7
 
 
 
 
d2a58f5
d62e3f7
 
 
 
 
 
 
 
b13d2b2
 
d62e3f7
fd8ef38
b13d2b2
d62e3f7
 
 
fd8ef38
8240179
fd8ef38
b13d2b2
d2a58f5
fd8ef38
8240179
d62e3f7
 
8240179
 
 
d62e3f7
b13d2b2
8240179
d62e3f7
fd8ef38
d62e3f7
b13d2b2
fd8ef38
b13d2b2
fd8ef38
 
b13d2b2
fd8ef38
 
d62e3f7
 
b13d2b2
d62e3f7
fd8ef38
 
 
b13d2b2
d2a58f5
d62e3f7
b13d2b2
d62e3f7
fd8ef38
 
b13d2b2
d62e3f7
fd8ef38
 
b13d2b2
 
d2a58f5
fd8ef38
8240179
d62e3f7
fd8ef38
 
b13d2b2
d62e3f7
fd8ef38
 
d62e3f7
b13d2b2
fd8ef38
b13d2b2
 
fd8ef38
 
b13d2b2
fd8ef38
d62e3f7
 
8240179
fd8ef38
d62e3f7
b13d2b2
8240179
 
 
d62e3f7
8240179
 
fd8ef38
 
b13d2b2
8240179
d62e3f7
8240179
 
 
 
d62e3f7
8240179
 
d62e3f7
8240179
d62e3f7
 
fd8ef38
d62e3f7
8240179
d62e3f7
 
8240179
fd8ef38
d62e3f7
8240179
fd8ef38
d62e3f7
 
fd8ef38
d62e3f7
b13d2b2
8240179
 
 
d62e3f7
8240179
d62e3f7
fd8ef38
d62e3f7
b13d2b2
 
8240179
 
 
 
 
d62e3f7
 
b13d2b2
fd8ef38
 
 
 
d62e3f7
 
 
 
8240179
b13d2b2
d62e3f7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
# ╔════════════════════════════════════════════════════════════════════════════╗
# β•‘ PIPELINE v32: 100% COMPATÍVEL HF SPACES | ZERO WARNINGS β•‘
# β•‘ Corrige: Chatbot type='messages' | JSON sem 'lines' β•‘
# β•šβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•

import os
import json
import re
import time
from datetime import datetime
import gradio as gr
import google.generativeai as genai
import warnings
warnings.filterwarnings("ignore")

# ==================== 1. CONFIGURAÇÃO ====================
api_key = os.getenv("GOOGLE_API_KEY", "SUA_API_KEY_AQUI")
if api_key: 
    genai.configure(api_key=api_key)
    model_flash = genai.GenerativeModel("gemini-flash-latest")
    model_pro = genai.GenerativeModel("gemini-pro-latest")
else:
    model_flash = model_pro = None

ARQUIVO_CONFIG = "protocolo.json"
ARQUIVO_HISTORY = "history_v32.json"

# ==================== 2. UTILIDADES ====================
def carregar_protocolo():
    try:
        with open(ARQUIVO_CONFIG, "r", encoding="utf-8") as f: 
            return f.read()
    except: 
        return "[]"

def salvar_protocolo(conteudo):
    try:
        json.loads(conteudo)
        with open(ARQUIVO_CONFIG, "w", encoding="utf-8") as f: 
            f.write(conteudo)
        return "βœ… Salvo"
    except: 
        return "❌ Erro JSON"

def carregar_history():
    try:
        with open(ARQUIVO_HISTORY, "r", encoding="utf-8") as f:
            return json.load(f)
    except: 
        return []

def salvar_history(history):
    try:
        with open(ARQUIVO_HISTORY, "w", encoding="utf-8") as f:
            json.dump(history, f, ensure_ascii=False, indent=2)
        return True
    except: 
        return False

def ler_anexo(arquivo):
    if arquivo is None: return ""
    try:
        with open(arquivo.name, "r", encoding="utf-8") as f:
            return f"\n\n[ANEXO: {os.path.basename(arquivo.name)}]\n{f.read()}\n[FIM ANEXO]\n"
    except: return ""

# ==================== 3. PLANEJADOR ROBUSTO ====================
def criar_plano_auto(full_input, history_contexto):
    if not model_pro: 
        return fallback_plano(), "⚠️ Demo mode"
    
    def fallback_plano():
        return [
            {"nome": "Analisador", "missao": "Analise o input principal", "modelo": "flash", "tipo_saida": "json"},
            {"nome": "RespostaFinal", "missao": "Crie resposta clara e completa", "modelo": "pro", "tipo_saida": "texto"}
        ]
    
    history_resumo = "\n".join([f"πŸ‘€: {h[0][:80]}..." for h in history_contexto[-2:]])[:150] if history_contexto else ""
    
    prompt = f"""INPUT: {full_input[:350]}
HISTΓ“RICO: {history_resumo}

CRIE PLANO JSON (2-4 agentes):
[
  {{"nome": "Analisador", "missao": "Analise input", "modelo": "flash", "tipo_saida": "json"}},
  {{"nome": "Final", "missao": "Resposta final", "modelo": "pro", "tipo_saida": "texto"}}
]"""

    try:
        resp = model_pro.generate_content(prompt, temperature=0.1)
        raw = resp.text.strip()
        clean = re.sub(r'``````|\n\s*\n', '', raw)
        clean = re.sub(r'^.*?\[', '[', clean)
        clean = re.sub(r'\].*?$', ']', clean)
        
        plano = json.loads(clean)
        if isinstance(plano, list) and len(plano) >= 2:
            return plano, f"βœ… {len(plano)} agentes"
        return fallback_plano(), "⚠️ Plano padrão"
    except:
        return fallback_plano(), "⚠️ Fallback ativo"

# ==================== 4. EXECUTOR ====================
def executar_no(timeline, config):
    if not (model_flash or model_pro):
        return {"role": "system", "error": "Sem API"}, "(ERRO)", "Sem key"
    
    modelo = model_pro if config.get("modelo") == "pro" else model_flash
    contexto = json.dumps(timeline[-6:], ensure_ascii=False)
    
    prompt = f"TIMELINE: {contexto}\nAGENTE: {config['nome']}\nMISSÃO: {config['missao']}"
    
    log = f"πŸ”Έ {config['nome']}..."
    try:
        resp = modelo.generate_content(prompt)
        out = resp.text.strip()
        
        if config.get('tipo_saida') == 'json':
            out = re.sub(r'``````', '', out)
            content = json.loads(out) if out.startswith('{') else {"resumo": out}
        else:
            content = out
            
        log += " βœ“ OK"
        return {"role": "assistant", "agent": config['nome'], "content": content}, log, out
    except Exception as e:
        return {"role": "system", "error": str(e)}, f"{log} βœ—", str(e)

# ==================== 5. ORQUESTRADOR ====================
def orquestrador(texto, arquivo, history, json_config):
    anexo = ler_anexo(arquivo)
    full_input = f"{texto}\n{anexo}".strip()
    
    if not full_input:
        yield history, {}, "Sem input."
        return
    
    # Formato MESSAGES para chatbot
    history.append([full_input, "🎯 Criando plano..."])
    timeline = [{"role": "user", "content": full_input}]
    logs = f"πŸš€ v32: {datetime.now().strftime('%H:%M:%S')}\n"
    
    yield history, timeline, logs
    
    plano, log_plano = criar_plano_auto(full_input, history)
    logs += f"PLANO: {log_plano}\n"
    timeline.append({"role": "system", "plano": plano})
    
    history[-1][1] = f"βœ… {log_plano}"
    yield history, timeline, logs
    
    for i, cfg in enumerate(plano):
        history[-1][1] = f"[{i+1}/{len(plano)}] {cfg['nome']}..."
        yield history, timeline, logs
        
        res, log_add, raw = executar_no(timeline, cfg)
        timeline.append(res)
        logs += f"  {log_add}\n"
        
        if cfg.get('tipo_saida') == 'texto' and isinstance(res.get('content'), str):
            history[-1][1] = res['content'][:850]
            yield history, timeline, logs
    
    salvar_history(history)
    logs += "βœ… ConcluΓ­do"
    yield history, timeline, logs

# ==================== 6. UI 100% COMPATÍVEL HF SPACES ====================
def ui_clean():
    css = """
    footer {display: none !important;}
    .contain {border: none !important;}
    """
    
    config_init = carregar_protocolo()
    
    with gr.Blocks(title="πŸš€ PIPELINE v32 - ZERO WARNINGS", css=css, theme=gr.themes.Soft()) as app:
        gr.Markdown("# PIPELINE v32 - Auto-Plan Inteligente")
        
        with gr.Tabs():
            # ABA 1: CHAT (type='messages')
            with gr.Tab("πŸ’¬ Pipeline"):
                chatbot = gr.Chatbot(
                    height=600,
                    show_copy_button=True,
                    type="tuples",  # CompatΓ­vel antigo
                    label=""
                )
                
                with gr.Row():
                    with gr.Column(scale=10):
                        txt_in = gr.Textbox(
                            placeholder="Digite qualquer input...",
                            lines=2,
                            container=False,
                            show_label=False
                        )
                    with gr.Column(scale=1):
                        file_in = gr.UploadButton(
                            "πŸ“Ž",
                            file_types=[".txt", ".md", ".json", ".py"]
                        )
                    with gr.Column(scale=1):
                        btn_send = gr.Button("▢️ Executar", variant="primary")
                
                file_status = gr.Markdown("")
                file_in.upload(
                    lambda x: f"πŸ“Ž {os.path.basename(x.name) if x else ''}", 
                    file_in, file_status
                )
            
            # ABA 2: DEBUG (sem 'lines')
            with gr.Tab("πŸ” Debug"):
                with gr.Row():
                    out_dna = gr.JSON(label="Timeline")
                    out_logs = gr.Textbox(label="Logs", lines=15)
            
            # ABA 3: CONFIG
            with gr.Tab("βš™οΈ Config"):
                code_json = gr.Code(
                    value=config_init, 
                    language="json", 
                    label="Config (nΓ£o usada)"
                )
                gr.Button("Salvar", variant="secondary")
        
        # TRIGGERS
        triggers = [btn_send.click, txt_in.submit]
        for trig in triggers:
            trig(
                orquestrador,
                inputs=[txt_in, file_in, chatbot, code_json],
                outputs=[chatbot, out_dna, out_logs]
            ).then(
                lambda: gr.update(value=""),
                outputs=[txt_in]
            )
    
    return app

if __name__ == "__main__":
    print("πŸš€ PIPELINE v32 - 100% HF SPACES COMPATÍVEL")
    print("βœ… Sem warnings Gradio")
    print("βœ… Sem erros JSON plano")
    print("βœ… Python 3.10 OK")
    
    app = ui_clean()
    app.launch(server_name="0.0.0.0", server_port=7860, share=False)