GABASSI commited on
Commit
8a33e64
·
verified ·
1 Parent(s): 17cafbe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -2,12 +2,14 @@ import os
2
  import uvicorn
3
  import base64
4
  import nest_asyncio
 
5
  import logging
6
  import tempfile
7
  import io
8
  import requests
9
- from typing import List, Optional
10
- from fastapi import FastAPI, UploadFile, File
 
11
  from fastapi.responses import HTMLResponse
12
  from fastapi.middleware.cors import CORSMiddleware
13
  from pydantic import BaseModel
@@ -23,7 +25,7 @@ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
23
  from llama_parse import LlamaParse
24
 
25
  logging.basicConfig(level=logging.INFO)
26
- logger = logging.getLogger("CognilineCore")
27
 
28
  HF_TOKEN = os.getenv("HF_TOKEN")
29
  LLAMA_KEY = os.getenv("LLAMA_KEY")
@@ -38,12 +40,11 @@ app.add_middleware(
38
  allow_headers=["*"],
39
  )
40
 
41
- # --- 1. MOTOR DE VISÃO ---
42
  class VisionCore:
43
  def __init__(self, token):
44
  self.api_url = "https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-large"
45
  self.headers = {"Authorization": f"Bearer {token}"}
46
-
47
  def see(self, image_b64):
48
  try:
49
  if "," in image_b64: image_b64 = image_b64.split(",")[1]
@@ -52,9 +53,9 @@ class VisionCore:
52
  if response.status_code == 200:
53
  result = response.json()
54
  if isinstance(result, list) and len(result) > 0:
55
- return f"[SISTEMA VISUAL]: {result[0].get('generated_text', 'Objeto detetado.')}"
56
  return "[Erro Visual: API ocupada]"
57
- except Exception as e: return f"[Erro: {str(e)}]"
58
 
59
  # --- 2. BUSCA WEB ---
60
  def search_web(query):
@@ -76,7 +77,7 @@ class DoubleBrainLLM(CustomLLM):
76
  def metadata(self) -> LLMMetadata: return LLMMetadata(model_name=self.model_name, num_output=2048, context_window=8192)
77
  @llm_completion_callback()
78
  def complete(self, prompt: str, **kwargs) -> CompletionResponse:
79
- sys = "Sistema COGNILINE. Regra: Use APENAS LaTeX ($...$) para fórmulas. Responda em Português Técnico."
80
  msgs = [{"role": "system", "content": sys}, {"role": "user", "content": prompt}]
81
  try:
82
  resp = self.client.chat_completion(model=self.model_name, messages=msgs, max_tokens=1536, temperature=0.2)
@@ -143,7 +144,7 @@ async def api_ask(q: Q):
143
  async def home():
144
  try:
145
  with open("index.html", "r", encoding="utf-8") as f: html = f.read()
146
- except: return HTMLResponse("<h1>ERRO: Crie o arquivo index.html na aba Files!</h1>")
147
 
148
  logo = ""
149
  if os.path.exists("CGL.png"):
 
2
  import uvicorn
3
  import base64
4
  import nest_asyncio
5
+ import shutil
6
  import logging
7
  import tempfile
8
  import io
9
  import requests
10
+ # CORREÇÃO: Adicionado 'Any' na lista de imports
11
+ from typing import List, Optional, Any
12
+ from fastapi import FastAPI, UploadFile, File, Form
13
  from fastapi.responses import HTMLResponse
14
  from fastapi.middleware.cors import CORSMiddleware
15
  from pydantic import BaseModel
 
25
  from llama_parse import LlamaParse
26
 
27
  logging.basicConfig(level=logging.INFO)
28
+ logger = logging.getLogger("CognilineOmni")
29
 
30
  HF_TOKEN = os.getenv("HF_TOKEN")
31
  LLAMA_KEY = os.getenv("LLAMA_KEY")
 
40
  allow_headers=["*"],
41
  )
42
 
43
+ # --- 1. VISÃO ---
44
  class VisionCore:
45
  def __init__(self, token):
46
  self.api_url = "https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-large"
47
  self.headers = {"Authorization": f"Bearer {token}"}
 
48
  def see(self, image_b64):
49
  try:
50
  if "," in image_b64: image_b64 = image_b64.split(",")[1]
 
53
  if response.status_code == 200:
54
  result = response.json()
55
  if isinstance(result, list) and len(result) > 0:
56
+ return f"[VISÃO]: {result[0].get('generated_text', 'Objeto técnico.')}"
57
  return "[Erro Visual: API ocupada]"
58
+ except Exception as e: return f"[Erro Vision: {str(e)}]"
59
 
60
  # --- 2. BUSCA WEB ---
61
  def search_web(query):
 
77
  def metadata(self) -> LLMMetadata: return LLMMetadata(model_name=self.model_name, num_output=2048, context_window=8192)
78
  @llm_completion_callback()
79
  def complete(self, prompt: str, **kwargs) -> CompletionResponse:
80
+ sys = "Sistema COGNILINE. 1. Use APENAS LaTeX ($...$) para fórmulas. 2. Responda em Português Técnico."
81
  msgs = [{"role": "system", "content": sys}, {"role": "user", "content": prompt}]
82
  try:
83
  resp = self.client.chat_completion(model=self.model_name, messages=msgs, max_tokens=1536, temperature=0.2)
 
144
  async def home():
145
  try:
146
  with open("index.html", "r", encoding="utf-8") as f: html = f.read()
147
+ except: return HTMLResponse("<h1>ERRO CRÍTICO: Crie o arquivo index.html na aba Files!</h1>")
148
 
149
  logo = ""
150
  if os.path.exists("CGL.png"):