CanerDedeoglu commited on
Commit
69f64da
·
verified ·
1 Parent(s): 87aa5e9

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +46 -88
handler.py CHANGED
@@ -1,91 +1,49 @@
1
- # hf_client.py
2
- from __future__ import annotations
3
- import os, json, base64, io, time
4
- from typing import Any, Dict, Optional, Union
5
- import requests
6
  from PIL import Image
7
 
8
- DEFAULT_TIMEOUT = 60
9
-
10
- class HFEndpointError(RuntimeError):
11
- pass
12
-
13
- class HFECGClient:
14
- """
15
- Hugging Face Inference Endpoint (LLaVA/PULSE tarzı) istemci.
16
- image: URL veya yerel dosya yolu (yerel dosyayı otomatik base64'e çevirir).
17
- dönüş: {"text": "...", "raw": <json>, "latency_ms": float, "status_code": int}
18
- """
19
- def __init__(self, endpoint_url: str, hf_token: str, timeout: int = DEFAULT_TIMEOUT,
20
- session: Optional[requests.Session] = None) -> None:
21
- if not endpoint_url: raise ValueError("endpoint_url zorunlu.")
22
- if not hf_token: raise ValueError("hf_token zorunlu.")
23
- self.endpoint_url = endpoint_url.strip()
24
- self.hf_token = hf_token.strip()
25
- self.timeout = timeout
26
- self.session = session or requests.Session()
27
-
28
- @staticmethod
29
- def _encode_image_to_b64(img: Image.Image) -> str:
30
- buf = io.BytesIO(); img.save(buf, format="PNG")
31
- return "data:image/png;base64," + base64.b64encode(buf.getvalue()).decode("utf-8")
32
-
33
- def _image_to_payload(self, image: Union[str, Image.Image]) -> str:
34
- if isinstance(image, str):
35
- low = image.lower()
36
- if low.startswith("http://") or low.startswith("https://"):
37
- return image # URL ise aynen gönder
38
- if not os.path.isfile(image):
39
- raise FileNotFoundError(f"Görsel bulunamadı: {image}")
40
- return self._encode_image_to_b64(Image.open(image).convert("RGB"))
41
- elif isinstance(image, Image.Image):
42
- return self._encode_image_to_b64(image.convert("RGB"))
43
  else:
44
- raise TypeError("image: str (url/dosya) veya PIL.Image olmalı.")
45
-
46
- def build_payload(self, query: str, image: Union[str, Image.Image],
47
- max_new_tokens: int = 4096, temperature: float = 0.0, top_p: float = 1.0,
48
- repetition_penalty: float = 1.0, return_full_text: bool = False, seed: int = 42,
49
- conv_mode: str = "llava_v2", use_cache: bool = True,
50
- extra_parameters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
51
- payload = {
52
- "inputs": {"query": query, "image": self._image_to_payload(image)},
53
- "parameters": {
54
- "max_new_tokens": max_new_tokens,
55
- "do_sample": False if temperature == 0 else True,
56
- "temperature": temperature, "top_p": top_p,
57
- "repetition_penalty": repetition_penalty,
58
- "return_full_text": return_full_text, "seed": seed,
59
- },
60
- "options": {"use_cache": use_cache},
61
- "conv_mode": conv_mode,
62
- }
63
- if extra_parameters: payload["parameters"].update(extra_parameters)
64
- return payload
65
-
66
- def infer(self, query: str, image: Union[str, Image.Image], **gen_kwargs) -> Dict[str, Any]:
67
- payload = self.build_payload(query, image, **gen_kwargs)
68
- headers = {
69
- "Accept": "application/json",
70
- "Authorization": f"Bearer {self.hf_token}",
71
- "Content-Type": "application/json",
72
- }
73
- t0 = time.time()
74
- resp = self.session.post(self.endpoint_url, headers=headers, json=payload, timeout=self.timeout)
75
- latency_ms = (time.time() - t0) * 1000.0
76
- status = resp.status_code
77
- try:
78
- data = resp.json()
79
- except Exception as e:
80
- raise HFEndpointError(f"JSON olmayan cevap (status={status}): {resp.text[:500]}") from e
81
-
82
- text = ""
83
- if isinstance(data, list) and data:
84
- text = data[0].get("generated_text", "") or data[0].get("text", "")
85
- elif isinstance(data, dict):
86
- text = data.get("generated_text", "") or data.get("text", "")
87
-
88
- if status >= 400:
89
- raise HFEndpointError(f"Endpoint hatası {status}: {json.dumps(data)[:500]}")
90
-
91
- return {"text": text, "raw": data, "latency_ms": latency_ms, "status_code": status}
 
1
+ # /repository/handler.py
2
+ import base64, io, os, json
3
+ from typing import Any, Dict, List
 
 
4
  from PIL import Image
5
 
6
+ # (Gerekiyorsa: from transformers import ... # model yükleme burada olur)
7
+
8
+ class EndpointHandler:
9
+ def __init__(self, path: str = "") -> None:
10
+ # Burada model/processor/tokenizer'ı yükleyin
11
+ # ör: self.model = ...
12
+ # self.processor = ...
13
+ pass
14
+
15
+ def _load_image(self, img_field: str) -> Image.Image:
16
+ if img_field.startswith("data:image"):
17
+ # data URL -> bytes
18
+ header, b64data = img_field.split(",", 1)
19
+ img_bytes = base64.b64decode(b64data)
20
+ return Image.open(io.BytesIO(img_bytes)).convert("RGB")
21
+ elif img_field.startswith("http://") or img_field.startswith("https://"):
22
+ import requests
23
+ resp = requests.get(img_field, timeout=20)
24
+ resp.raise_for_status()
25
+ return Image.open(io.BytesIO(resp.content)).convert("RGB")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  else:
27
+ # Yerel yol (container içinden)
28
+ return Image.open(img_field).convert("RGB")
29
+
30
+ def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
31
+ """
32
+ Hugging Face Inference Toolkit burayı çağırır.
33
+ Beklenen dönüş genelde: [{"generated_text": "..."}]
34
+ """
35
+ inputs = data.get("inputs") or {}
36
+ params = data.get("parameters") or {}
37
+ query = inputs.get("query", "")
38
+ img_field = inputs.get("image", "")
39
+
40
+ # Görseli hazırla (opsiyonel — modeliniz görsel kullanıyorsa)
41
+ image = None
42
+ if img_field:
43
+ image = self._load_image(img_field)
44
+
45
+ # Burada kendi inference kodunuzu çağırın:
46
+ # out_text = run_model(self.model, self.processor, query, image, **params)
47
+ out_text = f"(demo) prompt='{query[:50]}...' image={'yes' if image else 'no'}"
48
+
49
+ return [{"generated_text": out_text}]