api_light_hf / apis /heatmap_text2comment.py
Renecto's picture
deploy api_light_hf (2026-03-12 12:47:03)
cf7f643
import os
from src.clients.llm_client import LLMClient
import json
import base64
from io import BytesIO
from PIL import Image
import re
from pydantic import BaseModel
from enum import Enum
def _ask_raw_hf(messages, model, response_format=None):
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
from src.clients.llm_client import LLMClient
import json as _json
client = LLMClient()
system_prompt = None
user_text = ""
images = []
for msg in messages:
role = msg.get("role", "")
c = msg.get("content", "")
if role == "system":
if isinstance(c, str):
system_prompt = c
elif role == "user":
if isinstance(c, str):
user_text = c
elif isinstance(c, list):
for part in c:
if isinstance(part, dict):
if part.get("type") == "text":
user_text += part.get("text", "")
elif part.get("type") == "image_url":
url = part.get("image_url", {}).get("url", "")
if url.startswith("data:"):
images.append(url.split(",", 1)[1] if "," in url else url)
else:
images.append(url)
if response_format is not None and hasattr(response_format, "model_json_schema"):
result = client.call(
prompt=user_text,
schema=response_format,
model=model,
system_prompt=system_prompt,
images=images if images else None,
temperature=0,
)
return _json.dumps(result.model_dump(), ensure_ascii=False)
else:
return client.call_raw(
prompt=user_text,
model=model,
system_prompt=system_prompt,
images=images if images else None,
)
client = LLMClient()
class Comment(BaseModel):
コメンチE str
琁E��: str
チE��スチE str
チE��スト�E種顁E str
def ask_raw(messages, model):
response = _ask_raw_hf([{"role":"user","content":p}], model,
model=model,
messages=messages,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
response_format=Comment,
temperature=0
)
return response
def heatmap_text2comment(p, fv_info1,fv_info2,title1, title2, openai_key=os.environ.get('OPENAI_KEY')):
"""
input1 (text):
input2 (text):
input3 (text):
input4 (text):
input5 (text):
input6 (text): default
output1 (json): コメンチE
"""
if openai_key == "default":
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
else:
os.environ['OPENAI_API_KEY'] = openai_key
messages = [
{
"role": "system",
"content": f"以下�E、�E析を進めてぁE��LPの冁E��です、Enこ�ELPの惁E��を�Eに、LP刁E��の専門家としてコメントしてください、En\n#{title1}\n{fv_info1}\n\n#{title2}\n{fv_info2}"
},
{
"role": "user",
"content":[
{"type": "text", "text":p}
]
},
]
return ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")