Spaces:
Sleeping
Sleeping
Commit ·
cf7f643
0
Parent(s):
deploy api_light_hf (2026-03-12 12:47:03)
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +7 -0
- Dockerfile +25 -0
- README.md +228 -0
- apis/__init__.py +0 -0
- apis/ab2samediff.py +63 -0
- apis/background.py +123 -0
- apis/base64img2component.py +134 -0
- apis/base64img2score.py +195 -0
- apis/baseimg2baseimg.py +56 -0
- apis/baseimg2cninfo.py +114 -0
- apis/baseimg2cta_detail.py +143 -0
- apis/baseimg2ecinfo_rect.py +190 -0
- apis/baseimg2fvinfo.py +156 -0
- apis/baseimg2fvinfo_rect.py +197 -0
- apis/baseimg2fvinfo_with_design.py +180 -0
- apis/baseimg2html.py +54 -0
- apis/baseimg2ocr.py +56 -0
- apis/baseimg2pagetype.py +222 -0
- apis/baseimg2score.py +92 -0
- apis/ecinfo2winningrate.py +233 -0
- apis/format2cninfo.py +118 -0
- apis/format2cninfos.py +161 -0
- apis/format2ecinfo.py +245 -0
- apis/format2ecinfos.py +279 -0
- apis/format2fvinfo.py +154 -0
- apis/format2fvinfos.py +184 -0
- apis/framework.py +70 -0
- apis/fvinfo2winningrate_nolift.py +126 -0
- apis/heatimage2score.py +60 -0
- apis/heatmap_text2comment.py +106 -0
- apis/html2variants.py +159 -0
- apis/image2color.py +67 -0
- apis/image2inpaint.py +66 -0
- apis/image2inpaint3.py +62 -0
- apis/image2text.py +57 -0
- apis/image2types.py +114 -0
- apis/images2inpaint.py +65 -0
- apis/info2img64.py +129 -0
- apis/keyword2urls.py +50 -0
- apis/modifyButton.py +105 -0
- apis/modifyHTML.py +102 -0
- apis/moment2normalize.py +101 -0
- apis/moment2theme.py +77 -0
- apis/nayose_cn.py +186 -0
- apis/nayose_fv.py +170 -0
- apis/rader.py +51 -0
- apis/rader_dual.py +81 -0
- apis/samediff2winningrate.py +99 -0
- apis/sample.py +80 -0
- apis/score2summary.py +64 -0
.gitignore
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__/
|
| 2 |
+
*.pyc
|
| 3 |
+
*.pyo
|
| 4 |
+
.env
|
| 5 |
+
.venv/
|
| 6 |
+
*.egg-info/
|
| 7 |
+
.DS_Store
|
Dockerfile
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Dockerfile — api_light_hf (FastAPI + Hugging Face Inference API)
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# Non-root user (compatible with HF Spaces default)
|
| 5 |
+
RUN useradd -m -u 1000 user
|
| 6 |
+
|
| 7 |
+
USER user
|
| 8 |
+
ENV HOME=/home/user \
|
| 9 |
+
PATH=/home/user/.local/bin:$PATH \
|
| 10 |
+
PYTHONPATH=$HOME/app \
|
| 11 |
+
PYTHONUNBUFFERED=1
|
| 12 |
+
|
| 13 |
+
WORKDIR $HOME/app
|
| 14 |
+
|
| 15 |
+
COPY --chown=user . $HOME/app
|
| 16 |
+
|
| 17 |
+
RUN python -m pip install --upgrade pip \
|
| 18 |
+
&& pip install --no-cache-dir -r requirements.txt
|
| 19 |
+
|
| 20 |
+
# PORT is overridable via docker run -e PORT=8080
|
| 21 |
+
ENV PORT=7860
|
| 22 |
+
|
| 23 |
+
EXPOSE $PORT
|
| 24 |
+
|
| 25 |
+
CMD ["sh", "-c", "uvicorn app:app --host 0.0.0.0 --port ${PORT}"]
|
README.md
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# api_light_hf
|
| 2 |
+
|
| 3 |
+
`api_light_dev` の全エンドポイントを **Hugging Face Inference API** に乗せ換えた FastAPI サーバーです。
|
| 4 |
+
Gradio / GCP / Vertex AI への依存を排除し、`HF_TOKEN` 一本で動きます。
|
| 5 |
+
|
| 6 |
+
---
|
| 7 |
+
|
| 8 |
+
## アーキテクチャ概要
|
| 9 |
+
|
| 10 |
+
```
|
| 11 |
+
api_light_hf/
|
| 12 |
+
├── app.py # FastAPI エントリーポイント。apis/ を動的ロード
|
| 13 |
+
├── apis/ # 各 API 関数(1ファイル = 1関数)
|
| 14 |
+
├── src/
|
| 15 |
+
│ ├── clients/
|
| 16 |
+
│ │ └── llm_client.py # HF Inference API 統合クライアント
|
| 17 |
+
│ ├── config/
|
| 18 |
+
│ │ └── models.yaml # モデルエイリアス・タスク定義
|
| 19 |
+
│ └── utils/
|
| 20 |
+
│ └── tracer.py # ロギング/OpenTelemetry トレーサー
|
| 21 |
+
├── requirements.txt
|
| 22 |
+
└── Dockerfile
|
| 23 |
+
```
|
| 24 |
+
|
| 25 |
+
### 主な変更点(api_light_dev との差分)
|
| 26 |
+
|
| 27 |
+
| 項目 | api_light_dev | api_light_hf |
|
| 28 |
+
|---|---|---|
|
| 29 |
+
| サーバー | Gradio | FastAPI (`POST /{api_name}`) |
|
| 30 |
+
| LLM バックエンド | OpenAI / Vertex AI / LiteLLM | HF Inference API (`huggingface_hub`) |
|
| 31 |
+
| OCR | Google Vision API | VLM (Qwen2.5-VL) |
|
| 32 |
+
| Inpaint | Vertex AI Imagen | HF `stable-diffusion-inpainting` |
|
| 33 |
+
| Image Generation | Vertex AI Imagen / DALL-E | HF FLUX.1-dev |
|
| 34 |
+
| HTML 生成 (baseimg2html) | Vertex AI Gemini 2.5 Pro | HF Qwen2.5-VL-72B |
|
| 35 |
+
| 認証 | GCP サービスアカウント JSON | `HF_TOKEN` 環境変数のみ |
|
| 36 |
+
|
| 37 |
+
---
|
| 38 |
+
|
| 39 |
+
## セットアップ
|
| 40 |
+
|
| 41 |
+
### 1. 環境変数
|
| 42 |
+
|
| 43 |
+
| 変数 | 必須 | 説明 |
|
| 44 |
+
|---|---|---|
|
| 45 |
+
| `HF_TOKEN` | ✅ | Hugging Face の API トークン([取得](https://huggingface.co/settings/tokens)) |
|
| 46 |
+
| `PORT` | – | 待ち受けポート(デフォルト `7860`) |
|
| 47 |
+
| `HF_MODEL` | – | デフォルトモデルの上書き(例: `meta-llama/Llama-3.3-70B-Instruct`) |
|
| 48 |
+
| `GCP_SERVICE_KEY_FOR_TRACE` | – | OpenTelemetry / Cloud Trace を有効化する場合のサービスアカウント JSON |
|
| 49 |
+
|
| 50 |
+
### 2. ローカル起動
|
| 51 |
+
|
| 52 |
+
```bash
|
| 53 |
+
# 依存インストール
|
| 54 |
+
pip install -r requirements.txt
|
| 55 |
+
|
| 56 |
+
# 起動
|
| 57 |
+
HF_TOKEN=hf_xxx uvicorn app:app --host 0.0.0.0 --port 7860 --reload
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
### 3. Docker
|
| 61 |
+
|
| 62 |
+
```bash
|
| 63 |
+
# ビルド
|
| 64 |
+
docker build -t api_light_hf .
|
| 65 |
+
|
| 66 |
+
# 実行
|
| 67 |
+
docker run -p 7860:7860 -e HF_TOKEN=hf_xxx api_light_hf
|
| 68 |
+
```
|
| 69 |
+
|
| 70 |
+
### 4. Hugging Face Spaces へのデプロイ
|
| 71 |
+
|
| 72 |
+
専用スクリプト `deploy_api_light_hf.py`(リポジトリルート `DD/` 直下)を使います。
|
| 73 |
+
|
| 74 |
+
#### 前提
|
| 75 |
+
|
| 76 |
+
| 条件 | 内容 |
|
| 77 |
+
|---|---|
|
| 78 |
+
| `HF_TOKEN` | 書き込み権限のある HF API トークン([取得](https://huggingface.co/settings/tokens)) |
|
| 79 |
+
| Space の作成 | HF Spaces で **Docker** SDK の Space を先に作成しておく |
|
| 80 |
+
| `git` | ローカルに git がインストールされていること |
|
| 81 |
+
|
| 82 |
+
#### 基本コマンド
|
| 83 |
+
|
| 84 |
+
```bash
|
| 85 |
+
# 環境変数に HF_TOKEN を設定
|
| 86 |
+
export HF_TOKEN=hf_xxxxxxxxxxxx
|
| 87 |
+
|
| 88 |
+
# デプロイ(初回・更新共通)
|
| 89 |
+
python deploy_api_light_hf.py --org DLPO --space api_light_hf
|
| 90 |
+
|
| 91 |
+
# コミットメッセージを指定
|
| 92 |
+
python deploy_api_light_hf.py --org DLPO --space api_light_hf -m "feat: add new api"
|
| 93 |
+
|
| 94 |
+
# push せずに手順を確認する(ドライラン)
|
| 95 |
+
python deploy_api_light_hf.py --org DLPO --space api_light_hf --dry-run
|
| 96 |
+
|
| 97 |
+
# Space の現在の状態を確認
|
| 98 |
+
python deploy_api_light_hf.py status --org DLPO --space api_light_hf
|
| 99 |
+
```
|
| 100 |
+
|
| 101 |
+
#### 引数一覧
|
| 102 |
+
|
| 103 |
+
| 引数 | デフォルト | 説明 |
|
| 104 |
+
|---|---|---|
|
| 105 |
+
| `cmd` | `deploy` | `deploy` または `status` |
|
| 106 |
+
| `--org` | `DLPO` または env `HF_ORG` | HF 組織名 / ユーザー名 |
|
| 107 |
+
| `--space` | `api_light_hf` または env `HF_SPACE` | Space 名 |
|
| 108 |
+
| `--branch` | `main` | 対象ブランチ |
|
| 109 |
+
| `-m / --message` | タイムスタンプ付き自動生成 | コミットメッセージ |
|
| 110 |
+
| `--token` | env `HF_TOKEN` | HF API トークン |
|
| 111 |
+
| `--local-dir` | `<スクリプトと同階層>/api_light_hf` | ローカルのソースディレクトリ |
|
| 112 |
+
| `--dry-run` | false | push せず手順だけ表示 |
|
| 113 |
+
|
| 114 |
+
#### 失敗時のチェックポイント
|
| 115 |
+
|
| 116 |
+
- `[error] HF_TOKEN is not set` → `export HF_TOKEN=hf_xxx` を実行
|
| 117 |
+
- `Push failed` → トークンに `write` 権限があるか、Space が存在するか確認
|
| 118 |
+
- `Local directory not found` → `--local-dir` で正しいパスを指定
|
| 119 |
+
- `git is not available` → git をインストールして PATH を通す
|
| 120 |
+
|
| 121 |
+
#### Space の secrets 設定
|
| 122 |
+
|
| 123 |
+
デプロイ後、Space の **Settings > Repository secrets** に以下を登録してください。
|
| 124 |
+
|
| 125 |
+
| Secret 名 | 値 |
|
| 126 |
+
|---|---|
|
| 127 |
+
| `HF_TOKEN` | Inference API 呼び出し用トークン(アプリ内部で使用) |
|
| 128 |
+
|
| 129 |
+
---
|
| 130 |
+
|
| 131 |
+
## API の使い方
|
| 132 |
+
|
| 133 |
+
サーバー起動後、`POST /{api_name}` にリクエストします。
|
| 134 |
+
|
| 135 |
+
### エンドポイント一覧の確認
|
| 136 |
+
|
| 137 |
+
```
|
| 138 |
+
GET /endpoints
|
| 139 |
+
```
|
| 140 |
+
|
| 141 |
+
### リクエスト形式
|
| 142 |
+
|
| 143 |
+
```bash
|
| 144 |
+
curl -X POST http://localhost:7860/text2theme \
|
| 145 |
+
-H "Content-Type: application/json" \
|
| 146 |
+
-d '{"text": "健康志向の若者向けスポーツドリンク"}'
|
| 147 |
+
```
|
| 148 |
+
|
| 149 |
+
### ヘルスチェック
|
| 150 |
+
|
| 151 |
+
```
|
| 152 |
+
GET /health → {"status": "ok"}
|
| 153 |
+
GET / → {status, uptime_seconds, active_requests, endpoints}
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
---
|
| 157 |
+
|
| 158 |
+
## モデル設定
|
| 159 |
+
|
| 160 |
+
`src/config/models.yaml` でモデルのエイリアス・タスク・能力を管理します。
|
| 161 |
+
|
| 162 |
+
```yaml
|
| 163 |
+
default_model: meta-llama/Llama-3.3-70B-Instruct
|
| 164 |
+
|
| 165 |
+
aliases:
|
| 166 |
+
gpt-4o: meta-llama/Llama-3.3-70B-Instruct
|
| 167 |
+
gemini-flash: Qwen/Qwen2.5-72B-Instruct
|
| 168 |
+
vision: Qwen/Qwen2.5-VL-72B-Instruct
|
| 169 |
+
...
|
| 170 |
+
|
| 171 |
+
models:
|
| 172 |
+
meta-llama/Llama-3.3-70B-Instruct:
|
| 173 |
+
task: text-generation
|
| 174 |
+
supports_json: true
|
| 175 |
+
supports_images: false
|
| 176 |
+
Qwen/Qwen2.5-VL-72B-Instruct:
|
| 177 |
+
task: image-text-to-text
|
| 178 |
+
supports_json: true
|
| 179 |
+
supports_images: true
|
| 180 |
+
...
|
| 181 |
+
```
|
| 182 |
+
|
| 183 |
+
---
|
| 184 |
+
|
| 185 |
+
## api_light_dev からの切り替え手順
|
| 186 |
+
|
| 187 |
+
1. `HF_TOKEN` を発行・設定する
|
| 188 |
+
2. `docker build` または `pip install` でセットアップ
|
| 189 |
+
3. 旧サービス(Gradio)と並行稼働で動作確認
|
| 190 |
+
4. 呼び出し元のベース URL を `https://<gradio-space>/...` から `http://<new-host>:<port>/...` に変更
|
| 191 |
+
- エンドポイント名(`text2theme`, `baseimg2score` など)はそのまま維持
|
| 192 |
+
- リクエストボディは `{"key": "value"}` 形式(旧 Gradio の JSON Body と互換)
|
| 193 |
+
5. 旧サービスを停止
|
| 194 |
+
|
| 195 |
+
### 非互換点
|
| 196 |
+
|
| 197 |
+
| 項目 | 旧(api_light_dev) | 新(api_light_hf) |
|
| 198 |
+
|---|---|---|
|
| 199 |
+
| `gcp_key` 引数 | GCP 認証に使用 | 受け取るが無視(ログ警告なし) |
|
| 200 |
+
| `openai_key` / `google_api_key` | LLM 認証に使用 | 受け取るが無視 |
|
| 201 |
+
| OCR 精度 | Google Vision API | VLM ベース(精度はモデルに依存) |
|
| 202 |
+
| Inpaint モデル | Vertex Imagen | SD-Inpainting(品質差あり) |
|
| 203 |
+
|
| 204 |
+
---
|
| 205 |
+
|
| 206 |
+
## 新しい API の追加
|
| 207 |
+
|
| 208 |
+
`apis/` に Python ファイルを追加するだけで自動登録されます。
|
| 209 |
+
|
| 210 |
+
```python
|
| 211 |
+
# apis/my_new_api.py
|
| 212 |
+
from src.clients.llm_client import LLMClient
|
| 213 |
+
from pydantic import BaseModel
|
| 214 |
+
|
| 215 |
+
class MyOutput(BaseModel):
|
| 216 |
+
result: str
|
| 217 |
+
|
| 218 |
+
def my_new_api(input_text: str) -> dict:
|
| 219 |
+
"""
|
| 220 |
+
input1 (text): 入力テキスト
|
| 221 |
+
output1 (json): {"result": "..."}
|
| 222 |
+
"""
|
| 223 |
+
client = LLMClient()
|
| 224 |
+
output = client.call(prompt=input_text, schema=MyOutput)
|
| 225 |
+
return output.model_dump()
|
| 226 |
+
```
|
| 227 |
+
|
| 228 |
+
サーバーを再起動すると `POST /my_new_api` が自動的に有効になります。
|
apis/__init__.py
ADDED
|
File without changes
|
apis/ab2samediff.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ab2samediff: 2つの画像を比較して類似点と相違点を返す。
|
| 3 |
+
HF版: VLM (Llama-3.2-Vision) を使用。Vertex AI は使用しない。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import base64
|
| 7 |
+
import json
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from src.utils.tracer import customtracer
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _pil_to_b64(image) -> str:
|
| 15 |
+
"""PIL Image を base64 文字列に変換。"""
|
| 16 |
+
fmt = getattr(image, "format", None) or "PNG"
|
| 17 |
+
buf = BytesIO()
|
| 18 |
+
image.save(buf, format=fmt)
|
| 19 |
+
return base64.b64encode(buf.getvalue()).decode("utf-8")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@customtracer
|
| 23 |
+
def ab2samediff(
|
| 24 |
+
lp1,
|
| 25 |
+
lp2,
|
| 26 |
+
p: str = "よりコンバージョンが高まるWEBページを作るための観点を比較します。",
|
| 27 |
+
m: str = "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
| 28 |
+
) -> tuple:
|
| 29 |
+
"""
|
| 30 |
+
input1 (image): 比較画像1
|
| 31 |
+
input2 (image): 比較画像2
|
| 32 |
+
input3 (text): よりコンバージョンが高まるWEBページを作るための観点を比較します。
|
| 33 |
+
input4 (text): meta-llama/Llama-3.2-11B-Vision-Instruct
|
| 34 |
+
output1 (text): 類似点
|
| 35 |
+
output2 (text): 相違点
|
| 36 |
+
|
| 37 |
+
NOTE: HF版は VLM ベース。Vertex AI は使用しない。
|
| 38 |
+
"""
|
| 39 |
+
from src.clients.llm_client import LLMClient
|
| 40 |
+
from pydantic import BaseModel
|
| 41 |
+
|
| 42 |
+
class Comparison(BaseModel):
|
| 43 |
+
same: str
|
| 44 |
+
diff: str
|
| 45 |
+
|
| 46 |
+
b1 = _pil_to_b64(lp1)
|
| 47 |
+
b2 = _pil_to_b64(lp2)
|
| 48 |
+
|
| 49 |
+
prompt = (
|
| 50 |
+
p + "\n\n"
|
| 51 |
+
"1. 2つの画像の類似点を説明してください。\n"
|
| 52 |
+
"2. 2つの画像の相違点を説明してください。\n"
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
client = LLMClient()
|
| 56 |
+
result = client.call(
|
| 57 |
+
prompt=prompt,
|
| 58 |
+
schema=Comparison,
|
| 59 |
+
model=m,
|
| 60 |
+
images=[b1, b2],
|
| 61 |
+
temperature=0,
|
| 62 |
+
)
|
| 63 |
+
return result.same, result.diff
|
apis/background.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from openai import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
from functools import cache
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
from src.models.common import model
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json, re
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
|
| 24 |
+
# Extract system prompt and user content from messages list
|
| 25 |
+
system_prompt = None
|
| 26 |
+
user_text = ""
|
| 27 |
+
images = []
|
| 28 |
+
for msg in messages:
|
| 29 |
+
role = msg.get("role", "")
|
| 30 |
+
c = msg.get("content", "")
|
| 31 |
+
if role == "system":
|
| 32 |
+
if isinstance(c, str):
|
| 33 |
+
system_prompt = c
|
| 34 |
+
elif role == "user":
|
| 35 |
+
if isinstance(c, str):
|
| 36 |
+
user_text = c
|
| 37 |
+
elif isinstance(c, list):
|
| 38 |
+
for part in c:
|
| 39 |
+
if isinstance(part, dict):
|
| 40 |
+
if part.get("type") == "text":
|
| 41 |
+
user_text += part.get("text", "")
|
| 42 |
+
elif part.get("type") == "image_url":
|
| 43 |
+
url = part.get("image_url", {}).get("url", "")
|
| 44 |
+
if url.startswith("data:"):
|
| 45 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 46 |
+
else:
|
| 47 |
+
images.append(url)
|
| 48 |
+
|
| 49 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 50 |
+
result = client.call(
|
| 51 |
+
prompt=user_text,
|
| 52 |
+
schema=response_format,
|
| 53 |
+
model=model,
|
| 54 |
+
system_prompt=system_prompt,
|
| 55 |
+
images=images if images else None,
|
| 56 |
+
temperature=0,
|
| 57 |
+
)
|
| 58 |
+
import json
|
| 59 |
+
return json.dumps(result.model_dump(), ensure_ascii=False)
|
| 60 |
+
else:
|
| 61 |
+
return client.call_raw(
|
| 62 |
+
prompt=user_text,
|
| 63 |
+
model=model,
|
| 64 |
+
system_prompt=system_prompt,
|
| 65 |
+
images=images if images else None,
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
class Estimations(BaseModel):
|
| 69 |
+
name: str
|
| 70 |
+
prob: float
|
| 71 |
+
reason: str
|
| 72 |
+
button_prompt: str
|
| 73 |
+
change_candidates: Optional[List[str]] = Field(default_factory=list, description="変更すべきUI要素のリスチE)
|
| 74 |
+
|
| 75 |
+
class EstimateCategory(BaseModel):
|
| 76 |
+
title: str
|
| 77 |
+
estimations: list[Estimations]
|
| 78 |
+
|
| 79 |
+
class EstimateBackground(BaseModel):
|
| 80 |
+
estimated_bg:list[EstimateCategory]
|
| 81 |
+
|
| 82 |
+
def get_openai_request(messages, format):
|
| 83 |
+
client = LLMClient()
|
| 84 |
+
# HF: beta.parse not available; use _ask_raw_hf instead
|
| 85 |
+
response = client.chat.completions.create(
|
| 86 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 87 |
+
messages=messages,
|
| 88 |
+
top_p=1,
|
| 89 |
+
frequency_penalty=0,
|
| 90 |
+
presence_penalty=0,
|
| 91 |
+
response_format=format,
|
| 92 |
+
temperature=0
|
| 93 |
+
)
|
| 94 |
+
return response.choices[0].message.content
|
| 95 |
+
|
| 96 |
+
@customtracer
|
| 97 |
+
def background(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 98 |
+
"""
|
| 99 |
+
input1 (text): 親子でのスマ�E料��節紁E親子でのお得感 チE�Eタの余剰利用 通話とネット�Eコストパフォーマンス スマ�EチE��ュー支援 家族向け�E安�E機�E 豊富な端末ラインアチE�E
|
| 100 |
+
input2 (text): default
|
| 101 |
+
output1 (json): 頁E��
|
| 102 |
+
"""
|
| 103 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 104 |
+
if openai_key == "default":
|
| 105 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 106 |
+
else:
|
| 107 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 108 |
+
|
| 109 |
+
messages=[
|
| 110 |
+
{
|
| 111 |
+
"role": "system",
|
| 112 |
+
"content": """WEBPAGEのOCR惁E��を提供します。このLandingPageにつぁE��持E��された頁E��の制作背景を推定してください。頁E��ごとに、指定数の候補と確玁E��0~1の間で回答して、E
|
| 113 |
+
吁E��景�E�Eame�E�につぁE��、change_candidatesフィールドに「その背景を実現するために変更すべきUI要素」�Eリストを斁E���E配�Eで返してください、E
|
| 114 |
+
※禁止ワード:「未来」「革命」「夢」に類する想像�E幁E��庁E��てしまぁE��ードがあれば具体的で納得度の高い言葉に置き換えて
|
| 115 |
+
""",
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"role": "user",
|
| 119 |
+
"content": [{"type": "text", "text":p}]
|
| 120 |
+
},
|
| 121 |
+
]
|
| 122 |
+
|
| 123 |
+
return get_openai_request(messages, EstimateBackground)
|
apis/base64img2component.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from openai import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from functools import cache
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import pytz
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 16 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 17 |
+
from src.clients.llm_client import LLMClient
|
| 18 |
+
import json, re
|
| 19 |
+
|
| 20 |
+
client = LLMClient()
|
| 21 |
+
|
| 22 |
+
# Extract system prompt and user content from messages list
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
import json
|
| 57 |
+
return json.dumps(result.model_dump(), ensure_ascii=False)
|
| 58 |
+
else:
|
| 59 |
+
return client.call_raw(
|
| 60 |
+
prompt=user_text,
|
| 61 |
+
model=model,
|
| 62 |
+
system_prompt=system_prompt,
|
| 63 |
+
images=images if images else None,
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
class UIoption(str, Enum):
|
| 67 |
+
element1 = "バナー/動画"
|
| 68 |
+
element2 = "CTA"
|
| 69 |
+
element3 = "チE��スチE
|
| 70 |
+
element4 = "フォーム"
|
| 71 |
+
|
| 72 |
+
class Component(BaseModel):
|
| 73 |
+
component_large: str
|
| 74 |
+
component_middle: str
|
| 75 |
+
component_small: list[str]
|
| 76 |
+
UIelement: UIoption
|
| 77 |
+
|
| 78 |
+
class Components(BaseModel):
|
| 79 |
+
components: list[Component]
|
| 80 |
+
|
| 81 |
+
def ask_raw(messages):
|
| 82 |
+
client = LLMClient()
|
| 83 |
+
# HF: beta.parse not available; use _ask_raw_hf instead
|
| 84 |
+
response = client.chat.completions.create(
|
| 85 |
+
model='meta-llama/Llama-3.3-70B-Instruct',
|
| 86 |
+
messages=messages,
|
| 87 |
+
top_p=1,
|
| 88 |
+
frequency_penalty=0,
|
| 89 |
+
presence_penalty=0,
|
| 90 |
+
response_format=Components,
|
| 91 |
+
temperature=0
|
| 92 |
+
)
|
| 93 |
+
return response.choices[0].message.content
|
| 94 |
+
|
| 95 |
+
@customtracer
|
| 96 |
+
def base64img2component(p, image64, openai_key=os.environ.get('OPENAI_KEY')):
|
| 97 |
+
"""
|
| 98 |
+
input1 (text): 13: ※金融犯罪にご注愁E手口はこちら、E38: ▼ご利用条件はこちら、E77: ピンチ�E時�E、E133: アコム一抁E409: WEB完結カードを作らぁE415: ご契紁E�E翌日から最大30日間��利0冁E421: 借りられめE0刁E�� 644: 今すぐお申し込み 722: 実質年玁E3.0%~18.0%ご融賁E��E1丁E�E~800丁E�E 760: 以前ご利用があっぁE761: ご増額をご希望のお客さまはこちめE784: お客さまはこちめE819: *お申し込み時間めE��査によりご希望に沿えなぁE��合がござぁE��す、E868: お借�E可能かすぐに刁E��めE秒スピ�Eド診断 977: 侁E22 1055: ご年叁E税込) 1067: 侁E250 1146: 他社お借�E顁E1249: 診断開姁E1323: ※クレジチE��カードでのショチE��ング、E��行でのお借�E(銀行カードローン、住宁E��ーン、�E動車ローンなど)を除ぁE��、キャチE��ングめE��ードローンのお借�E状況をご�E力ください、E1498: 借りるなめE1558: アコム一抁E1710: 20刁E��借りられめE1835: アコムなら最短20刁E��お借�Eが可能!※すぐにお��が忁E��とぁE��時�E、本ペ�Eジの申込ボタンから早速お申し込みくだ 1960: ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2045: カードを作らずWEB完絁E2165: お申し込み〜お借�EまでWEBだけで完結できます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E2354: |30日間��利ぁE冁E2356: 契紁E�E翌日から 2470: はじめてご利用のお客さまは、契紁E�E翌日から最大30日間��利ぁE冁E 2663: たっぁEスチE��チE!(最短20刁E 2757: 申し込みから借りるまでの流れ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2937: お申し込み・1忁E��書類提出(審査)お申し込みぁE��だぁE��後、忁E��書類を提�EしてぁE��だき審査に進みます、E3131: 2ご契紁E�Eお借�E 3194: 審査結果の冁E��にご同意いただけましたら、契紁E��続きは完亁E��なります。契紁E���E、すぐにお借�EぁE��だけます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E3335: 忁E��書類とは? 3405: 本人確認書顁E免許証など) 3455: (該当する方のみ)+収�E証明書 3488: ※「当社のご利用において50丁E�Eを趁E��るご契紁E��行うお客さま」と「他社を含めたお借�E総額が100丁E�Eを趁E��るお客「さま」につぁE��は、収入証明書も忁E��で 3633: アコムの 3664: よくある質啁E3777: 申し込み編 3892: Q勤務�Eに在籍確認�E電話がかかってきま 3961: 原則、実施しません。※原則、E��話での在籍確認�Eせずに書面めE��申告�E容での確認を実施します。もし実施が忁E��となる場合でも、お客さまの同意を得ずに実施することはありませんので、ご安忁E��ださい、E4135: Q契紁E��ると、忁E��カードが自宁E��郵送さ 4159: れるんですか? 4205: ぁE��え。カードレスでご契紁E��続きぁE��だくことも可能です、E4296: 自宁E��勤務�Eに何か書類が送られてくる 4320: ことはありますか? 4366: 原則、E��付しません、E郵送契紁E��選択された場合や、書面の郵送受け取りを選 4418: んだ場合等を除ぁE 5914: は、ご返済シミュレーションをご利用ぁE5943: ださい、E5992: ペ�Eジ上部に戻る▲ 7671: ご増額をご希望のお客さまはこちめE7671: 以前ご利用があったお客さまはこちめE8033: 今すぐお申し込み
|
| 99 |
+
input2 (text): スクショ
|
| 100 |
+
input3 (text): default
|
| 101 |
+
output1 (json): 頁E��
|
| 102 |
+
"""
|
| 103 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), f"base64img2component:", image64[0:30])
|
| 104 |
+
|
| 105 |
+
if openai_key == "default":
|
| 106 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 107 |
+
else:
|
| 108 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 109 |
+
|
| 110 |
+
messages=[
|
| 111 |
+
{
|
| 112 |
+
"role": "system",
|
| 113 |
+
"content": """
|
| 114 |
+
■構�E要素名�Eアウト�EチE��サンプル
|
| 115 |
+
[
|
| 116 |
+
{"component_large":"啁E��/サービスの特徴","component_middle":"アコム", "component_small":[], "UIelement":"チE��スチE},
|
| 117 |
+
{"component_large":"FAQ/よくある質啁E,"component_middle":"よくあるご質啁E, "component_small":["自宁E��勤務�Eに何か書類が送られてくることはありますか�E�E,"家族割などの割引�Eありますか�E�E], "UIelement":"表絁E��"}
|
| 118 |
+
]
|
| 119 |
+
"""
|
| 120 |
+
},
|
| 121 |
+
{
|
| 122 |
+
"role": "user",
|
| 123 |
+
"content": [{"type": "text", "text":p}]
|
| 124 |
+
},
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url":"data:image/png;base64,"+image64}})
|
| 128 |
+
# OpenAI 側の認証エラーなどを�E示皁E��メチE��ージとして上位に伝搬させめE
|
| 129 |
+
try:
|
| 130 |
+
return ask_raw(messages)
|
| 131 |
+
except openai.AuthenticationError as e:
|
| 132 |
+
# API キー / 絁E��設定�E問題を含むエラー冁E��をラチE�Eして投げ直ぁE
|
| 133 |
+
# 呼び出し�E�E�EE Origin 側など�E�でこ�EメチE��ージをキャチE��してユーザに表示できる
|
| 134 |
+
raise RuntimeError(f"[base64img2component] OpenAI AuthenticationError: {e}") from e
|
apis/base64img2score.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from functools import cache
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import pytz
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
# 追加
|
| 16 |
+
import logging
|
| 17 |
+
|
| 18 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 19 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 20 |
+
from src.clients.llm_client import LLMClient
|
| 21 |
+
import json as _json
|
| 22 |
+
|
| 23 |
+
client = LLMClient()
|
| 24 |
+
system_prompt = None
|
| 25 |
+
user_text = ""
|
| 26 |
+
images = []
|
| 27 |
+
for msg in messages:
|
| 28 |
+
role = msg.get("role", "")
|
| 29 |
+
c = msg.get("content", "")
|
| 30 |
+
if role == "system":
|
| 31 |
+
if isinstance(c, str):
|
| 32 |
+
system_prompt = c
|
| 33 |
+
elif role == "user":
|
| 34 |
+
if isinstance(c, str):
|
| 35 |
+
user_text = c
|
| 36 |
+
elif isinstance(c, list):
|
| 37 |
+
for part in c:
|
| 38 |
+
if isinstance(part, dict):
|
| 39 |
+
if part.get("type") == "text":
|
| 40 |
+
user_text += part.get("text", "")
|
| 41 |
+
elif part.get("type") == "image_url":
|
| 42 |
+
url = part.get("image_url", {}).get("url", "")
|
| 43 |
+
if url.startswith("data:"):
|
| 44 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 45 |
+
else:
|
| 46 |
+
images.append(url)
|
| 47 |
+
|
| 48 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 49 |
+
result = client.call(
|
| 50 |
+
prompt=user_text,
|
| 51 |
+
schema=response_format,
|
| 52 |
+
model=model,
|
| 53 |
+
system_prompt=system_prompt,
|
| 54 |
+
images=images if images else None,
|
| 55 |
+
temperature=0,
|
| 56 |
+
)
|
| 57 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 58 |
+
else:
|
| 59 |
+
return client.call_raw(
|
| 60 |
+
prompt=user_text,
|
| 61 |
+
model=model,
|
| 62 |
+
system_prompt=system_prompt,
|
| 63 |
+
images=images if images else None,
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# logger 設定(褁E�� import 時に重褁E��定されなぁE��ぁE��ェチE���E�E
|
| 68 |
+
logger = logging.getLogger("base64img2score")
|
| 69 |
+
if not logger.handlers:
|
| 70 |
+
handler = logging.StreamHandler()
|
| 71 |
+
handler.setFormatter(logging.Formatter("%(message)s"))
|
| 72 |
+
logger.addHandler(handler)
|
| 73 |
+
logger.setLevel(logging.INFO)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class Answer(BaseModel):
|
| 77 |
+
citation: str
|
| 78 |
+
suggestion: str
|
| 79 |
+
score: int
|
| 80 |
+
raw_schema = Answer.schema()
|
| 81 |
+
raw_schema["additionalProperties"] = False
|
| 82 |
+
|
| 83 |
+
def ask_raw(messages, model):
|
| 84 |
+
client = LLMClient()
|
| 85 |
+
# OpenAI 呼び出ぁE
|
| 86 |
+
response = client.chat.completions.create(
|
| 87 |
+
model=model,
|
| 88 |
+
messages=messages,
|
| 89 |
+
response_format={
|
| 90 |
+
"type": "json_schema",
|
| 91 |
+
"json_schema": {
|
| 92 |
+
"name": "AnswerSchema",
|
| 93 |
+
"strict": True,
|
| 94 |
+
"schema": raw_schema
|
| 95 |
+
}
|
| 96 |
+
}
|
| 97 |
+
)
|
| 98 |
+
# ト�Eクン使用量を取得(呼び出し�Eでまとめてログするため、ここでは print しなぁE��E
|
| 99 |
+
usage = response.usage
|
| 100 |
+
content_str = response
|
| 101 |
+
# 以前�E動作と同じくパースした JSON を返すが、呼び出し�Eで usage も使ぁE��ぁE�Eでタプルで返す
|
| 102 |
+
return json.loads(content_str), usage
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@customtracer
|
| 106 |
+
def base64img2score(p, image64=None, model="meta-llama/Llama-3.3-70B-Instruct", openai_key=os.environ.get('OPENAI_KEY')):
|
| 107 |
+
"""
|
| 108 |
+
input1 (text): 13: ※金融犯罪にご注愁E手口はこちら、E38: ▼ご利用条件はこちら、E77: ピンチ�E時�E、E133: アコム一抁E409: WEB完結カードを作らぁE415: ご契紁E�E翌日から最大30日間��利0冁E421: 借りられめE0刁E�� 644: 今すぐお申し込み 722: 実質年玁E3.0%~18.0%ご融賁E��E1丁E�E~800丁E�E 760: 以前ご利用があっぁE761: ご増額をご希望のお客さまはこちめE784: お客さまはこちめE819: *お申し込み時間めE��査によりご希望に沿えなぁE��合がござぁE��す、E868: お借�E可能かすぐに刁E��めE秒スピ�Eド診断 977: 侁E22 1055: ご年叁E税込) 1067: 侁E250 1146: 他社お借�E顁E1249: 診断開姁E1323: ※クレジチE��カードでのショチE��ング、E��行でのお借�E(銀行カードローン、住宁E��ーン、�E動車ローンなど)を除ぁE��、キャチE��ングめE��ードローンのお借�E状況をご�E力ください、E1498: 借りるなめE1558: アコム一抁E1710: 20刁E��借りられめE1835: アコムなら最短20刁E��お借�Eが可能!※すぐにお��が忁E��とぁE��時�E、本ペ�Eジの申込ボタンから早速お申し込みくだ 1960: ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2045: カードを作らずWEB完絁E2165: お申し込み〜お借�EまでWEBだけで完結できます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E2354: |30日間��利ぁE冁E2356: 契紁E�E翌日から 2470: はじめてご利用のお客さまは、契紁E�E翌日から最大30日間��利ぁE冁E 2663: たっぁEスチE��チE!(最短20刁E 2757: 申し込みから借りるまでの流れ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2937: お申し込み・1忁E��書類提出(審査)お申し込みぁE��だぁE��後、忁E��書類を提�EしてぁE��だき審査に進みます、E3131: 2ご契紁E�Eお借�E 3194: 審査結果の冁E��にご同意いただけましたら、契紁E��続きは完亁E��なります。契紁E���E、すぐにお借�EぁE��だけます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E3335: 忁E��書類とは? 3405: 本人確認書顁E免許証など) 3455: (該当する方のみ)+収�E証明書 3488: ※「当社のご利用において50丁E�Eを趁E��るご契紁E��行うお客さま」と「他社を含めたお借�E総額が100丁E�Eを趁E��るお客「さま」につぁE��は、収入証明書も忁E��で 3633: アコムの 3664: よくある質啁E3777: 申し込み編 3892: Q勤務�Eに在籍確認�E電話がかかってきま 3961: 原則、実施しません。※原則、E��話での在籍確認�Eせずに書面めE��申告�E容での確認を実施します。もし実施が忁E��となる場合でも、お客さまの同意を得ずに実施することはありませんので、ご安忁E��ださい、E4135: Q契紁E��ると、忁E��カードが自宁E��郵送さ 4159: れるんですか? 4205: ぁE��え。カードレスでご契紁E��続きぁE��だくことも可能です、E4296: 自宁E��勤務�Eに何か書類が送られてくる 4320: ことはありますか? 4366: 原則、E��付しません、E郵送契紁E��選択された場合や、書面の郵送受け取りを選 4418: んだ場合等を除ぁE 5914: は、ご返済シミュレーションをご利用ぁE5943: ださい、E5992: ペ�Eジ上部に戻る▲ 7671: ご増額をご希望のお客さまはこちめE7671: 以前ご利用があったお客さまはこちめE8033: 今すぐお申し込み
|
| 109 |
+
input2 (text):
|
| 110 |
+
input3 (text): gpt-4o
|
| 111 |
+
input4 (text): default
|
| 112 |
+
output1 (json): 頁E��
|
| 113 |
+
"""
|
| 114 |
+
if openai_key == "default":
|
| 115 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 116 |
+
else:
|
| 117 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 118 |
+
|
| 119 |
+
messages=[
|
| 120 |
+
{
|
| 121 |
+
"role": "system",
|
| 122 |
+
"content": """与えられた情報と質問に対して、採点基準を参�Eして以下を回答します、E
|
| 123 |
+
citation:当該箁E��の引用
|
| 124 |
+
suggestion:満点でなぁE��合�E満点になるよぁE��具体的な持E��。満点の場合�E優れた点を�E体的な叙述
|
| 125 |
+
"""
|
| 126 |
+
},
|
| 127 |
+
{
|
| 128 |
+
"role": "user",
|
| 129 |
+
"content": [{"type": "text", "text":p}]
|
| 130 |
+
},
|
| 131 |
+
]
|
| 132 |
+
img_flag= "none"
|
| 133 |
+
if image64:
|
| 134 |
+
messages[1]["content"].insert(0, {
|
| 135 |
+
"type": "image_url",
|
| 136 |
+
"image_url": {"url": f"data:image/png;base64,{image64}"}
|
| 137 |
+
})
|
| 138 |
+
img_flag = image64[-4:]
|
| 139 |
+
|
| 140 |
+
# --- 前�E琁E��マリ�E�E行!E---
|
| 141 |
+
summary_parts = []
|
| 142 |
+
for i, msg in enumerate(messages):
|
| 143 |
+
content = msg.get("content")
|
| 144 |
+
is_str = isinstance(content, str)
|
| 145 |
+
if is_str:
|
| 146 |
+
length = len(content or "")
|
| 147 |
+
kind = "str"
|
| 148 |
+
elif isinstance(content, list):
|
| 149 |
+
length = len(content)
|
| 150 |
+
kind = "list"
|
| 151 |
+
else:
|
| 152 |
+
# unknown type: show repr length where possible
|
| 153 |
+
try:
|
| 154 |
+
length = len(content)
|
| 155 |
+
except Exception:
|
| 156 |
+
length = 0
|
| 157 |
+
kind = type(content).__name__
|
| 158 |
+
has_image = False
|
| 159 |
+
if isinstance(content, list):
|
| 160 |
+
for part in content:
|
| 161 |
+
if isinstance(part, dict) and part.get("type") == "image_url":
|
| 162 |
+
has_image = True
|
| 163 |
+
break
|
| 164 |
+
emoji = "🖼�E�E if has_image else " E
|
| 165 |
+
summary_parts.append(f"[{i}:{msg.get('role')}] {kind}(len={length}) {emoji}")
|
| 166 |
+
pre_summary = " | ".join(summary_parts)
|
| 167 |
+
|
| 168 |
+
# 実行とログを一行で出す(エラー含む�E�E
|
| 169 |
+
dt = datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%m/%d %H:%M")
|
| 170 |
+
|
| 171 |
+
try:
|
| 172 |
+
# ask_raw は (result, usage) を返すよう変更
|
| 173 |
+
result, usage = ask_raw(messages, model)
|
| 174 |
+
|
| 175 |
+
# messages めEJSON にした長さ(可読のため ensure_ascii=False�E�E
|
| 176 |
+
msg_len = len(json.dumps(messages, ensure_ascii=False))
|
| 177 |
+
|
| 178 |
+
combined = (
|
| 179 |
+
f"[base64img2score] (img:{img_flag}) {pre_summary} ↁE"
|
| 180 |
+
f"({dt}) token:{usage.total_tokens}, len:{msg_len},mdl:{model} "
|
| 181 |
+
f"snt:{usage.prompt_tokens}, Received:{usage.completion_tokens}"
|
| 182 |
+
)
|
| 183 |
+
# 一行で出力!Eogging はスレチE��セーフ!E
|
| 184 |
+
logger.info(combined)
|
| 185 |
+
|
| 186 |
+
return result
|
| 187 |
+
|
| 188 |
+
except Exception as e:
|
| 189 |
+
# エラー時も一行で記録して再送�E�E�詳細なトレースは再送�E先で扱ってください�E�E
|
| 190 |
+
err_msg = f"[base64img2score] {pre_summary} ↁE({dt}) ERROR: {type(e).__name__}: {str(e)}"
|
| 191 |
+
logger.error(err_msg)
|
| 192 |
+
# 忁E��ならここで詳しい traceback めElogger.debug などで出せますが、E
|
| 193 |
+
# 要望どおり「実行前後を一行で出す」ことを優先して多行�E力�E抑制してぁE��す、E
|
| 194 |
+
raise
|
| 195 |
+
|
apis/baseimg2baseimg.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
baseimg2baseimg: 画像編集(画像 + プロンプトから新画像生成)。
|
| 3 |
+
HF版: HF Inference API の image-to-image を使用。
|
| 4 |
+
NOTE: 元の実装は OpenAI gpt-image-1 を使用。HF版は SD inpainting ベース。
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import base64
|
| 8 |
+
import os
|
| 9 |
+
from io import BytesIO
|
| 10 |
+
from typing import Optional
|
| 11 |
+
|
| 12 |
+
from PIL import Image
|
| 13 |
+
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@customtracer
|
| 18 |
+
def baseimg2baseimg(
|
| 19 |
+
base64img: str,
|
| 20 |
+
p: str,
|
| 21 |
+
q: str = "low",
|
| 22 |
+
openai_key: str = "default",
|
| 23 |
+
model_name: Optional[str] = None,
|
| 24 |
+
) -> str:
|
| 25 |
+
"""
|
| 26 |
+
input1 (text): base64エンコードされた画像
|
| 27 |
+
input2 (text): 変更内容のプロンプト
|
| 28 |
+
input3 (text): low
|
| 29 |
+
input4 (text): default
|
| 30 |
+
output1 (text): 生成画像(base64)
|
| 31 |
+
|
| 32 |
+
NOTE: HF版は runwayml/stable-diffusion-inpainting を使用。
|
| 33 |
+
"""
|
| 34 |
+
from huggingface_hub import InferenceClient
|
| 35 |
+
|
| 36 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 37 |
+
if not hf_token:
|
| 38 |
+
raise ValueError("HF_TOKEN is required for baseimg2baseimg.")
|
| 39 |
+
|
| 40 |
+
model = model_name or "runwayml/stable-diffusion-inpainting"
|
| 41 |
+
|
| 42 |
+
if "," in base64img:
|
| 43 |
+
base64img = base64img.split(",", 1)[1]
|
| 44 |
+
image_bytes = base64.b64decode(base64img)
|
| 45 |
+
image = Image.open(BytesIO(image_bytes)).convert("RGB")
|
| 46 |
+
|
| 47 |
+
client = InferenceClient(token=hf_token)
|
| 48 |
+
result = client.image_to_image(
|
| 49 |
+
image=image,
|
| 50 |
+
prompt=p,
|
| 51 |
+
model=model,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
buf = BytesIO()
|
| 55 |
+
result.save(buf, format="PNG")
|
| 56 |
+
return base64.b64encode(buf.getvalue()).decode("utf-8")
|
apis/baseimg2cninfo.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
from enum import Enum
|
| 7 |
+
from io import BytesIO
|
| 8 |
+
from PIL import Image
|
| 9 |
+
from functools import cache
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
import pytz
|
| 12 |
+
|
| 13 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 14 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 15 |
+
from src.clients.llm_client import LLMClient
|
| 16 |
+
import json as _json
|
| 17 |
+
|
| 18 |
+
client = LLMClient()
|
| 19 |
+
system_prompt = None
|
| 20 |
+
user_text = ""
|
| 21 |
+
images = []
|
| 22 |
+
for msg in messages:
|
| 23 |
+
role = msg.get("role", "")
|
| 24 |
+
c = msg.get("content", "")
|
| 25 |
+
if role == "system":
|
| 26 |
+
if isinstance(c, str):
|
| 27 |
+
system_prompt = c
|
| 28 |
+
elif role == "user":
|
| 29 |
+
if isinstance(c, str):
|
| 30 |
+
user_text = c
|
| 31 |
+
elif isinstance(c, list):
|
| 32 |
+
for part in c:
|
| 33 |
+
if isinstance(part, dict):
|
| 34 |
+
if part.get("type") == "text":
|
| 35 |
+
user_text += part.get("text", "")
|
| 36 |
+
elif part.get("type") == "image_url":
|
| 37 |
+
url = part.get("image_url", {}).get("url", "")
|
| 38 |
+
if url.startswith("data:"):
|
| 39 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 40 |
+
else:
|
| 41 |
+
images.append(url)
|
| 42 |
+
|
| 43 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 44 |
+
result = client.call(
|
| 45 |
+
prompt=user_text,
|
| 46 |
+
schema=response_format,
|
| 47 |
+
model=model,
|
| 48 |
+
system_prompt=system_prompt,
|
| 49 |
+
images=images if images else None,
|
| 50 |
+
temperature=0,
|
| 51 |
+
)
|
| 52 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 53 |
+
else:
|
| 54 |
+
return client.call_raw(
|
| 55 |
+
prompt=user_text,
|
| 56 |
+
model=model,
|
| 57 |
+
system_prompt=system_prompt,
|
| 58 |
+
images=images if images else None,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class UIoption(str, Enum):
|
| 63 |
+
element1 = "バナー/動画"
|
| 64 |
+
element2 = "CTA"
|
| 65 |
+
element3 = "チE��スチE
|
| 66 |
+
element4 = "フォーム"
|
| 67 |
+
|
| 68 |
+
class Component(BaseModel):
|
| 69 |
+
component_large: str
|
| 70 |
+
component_middle: str
|
| 71 |
+
component_small: list[str]
|
| 72 |
+
UIelement: UIoption
|
| 73 |
+
|
| 74 |
+
class Components(BaseModel):
|
| 75 |
+
theme: list[str]
|
| 76 |
+
components: list[Component]
|
| 77 |
+
|
| 78 |
+
def ask_raw(messages, m):
|
| 79 |
+
return _ask_raw_hf(messages, m)
|
| 80 |
+
|
| 81 |
+
def baseimg2cninfo(p, base64img, openai_key=os.environ.get('OPENAI_KEY')):
|
| 82 |
+
"""
|
| 83 |
+
input1 (text): 13: ※金融犯罪にご注愁E手口はこちら、E38: ▼ご利用条件はこちら、E77: ピンチ�E時�E、E133: アコム一抁E409: WEB完結カードを作らぁE415: ご契紁E�E翌日から最大30日間��利0冁E421: 借りられめE0刁E�� 644: 今すぐお申し込み 722: 実質年玁E3.0%~18.0%ご融賁E��E1丁E�E~800丁E�E 760: 以前ご利用があっぁE761: ご増額をご希望のお客さまはこちめE784: お客さまはこちめE819: *お申し込み時間めE��査によりご希望に沿えなぁE��合がござぁE��す、E868: お借�E可能かすぐに刁E��めE秒スピ�Eド診断 977: 侁E22 1055: ご年叁E税込) 1067: 侁E250 1146: 他社お借�E顁E1249: 診断開姁E1323: ※クレジチE��カードでのショチE��ング、E��行でのお借�E(銀行カードローン、住宁E��ーン、�E動車ローンなど)を除ぁE��、キャチE��ングめE��ードローンのお借�E状況をご�E力ください、E1498: 借りるなめE1558: アコム一抁E1710: 20刁E��借りられめE1835: アコムなら最短20刁E��お借�Eが可能!※すぐにお��が忁E��とぁE��時�E、本ペ�Eジの申込ボタンから早速お申し込みくだ 1960: ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2045: カードを作らずWEB完絁E2165: お申し込み〜お借�EまでWEBだけで完結できます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E2354: |30日間��利ぁE冁E2356: 契紁E�E翌日から 2470: はじめてご利用のお客さまは、契紁E�E翌日から最大30日間��利ぁE冁E 2663: たっぁEスチE��チE!(最短20刁E 2757: 申し込みから借りるまでの流れ※お申し込み時間めE��査によりご希望に添えなぁE��合がござぁE��す、E2937: お申し込み・1忁E��書類提出(審査)お申し込みぁE��だぁE��後、忁E��書類を提�EしてぁE��だき審査に進みます、E3131: 2ご契紁E�Eお借�E 3194: 審査結果の冁E��にご同意いただけましたら、契紁E��続きは完亁E��なります。契紁E���E、すぐにお借�EぁE��だけます。ご希望ぁE��だければカードレスでご契紁E��ただけます、E3335: 忁E��書類とは? 3405: 本人確認書顁E免許証など) 3455: (該当する方のみ)+収�E証明書 3488: ※「当社のご利用において50丁E�Eを趁E��るご契紁E��行うお客さま」と「他社を含めたお借�E総額が100丁E�Eを趁E��るお客「さま」につぁE��は、収入証明書も忁E��で 3633: アコムの 3664: よくある質啁E3777: 申し込み編 3892: Q勤務�Eに在籍確認�E電話がかかってきま 3961: 原則、実施しません。※原則、E��話での在籍確認�Eせずに書面めE��申告�E容での確認を実施します。もし実施が忁E��となる場合でも、お客さまの同意を得ずに実施することはありませんので、ご安忁E��ださい、E4135: Q契紁E��ると、忁E��カードが自宁E��郵送さ 4159: れるんですか? 4205: ぁE��え。カードレスでご契紁E��続きぁE��だくことも可能です、E4296: 自宁E��勤務�Eに何か書類が送られてくる 4320: ことはありますか? 4366: 原則、E��付しません、E郵送契紁E��選択された場合や、書面の郵送受け取りを選 4418: んだ場合等を除ぁE 5914: は、ご返済シミュレーションをご利用ぁE5943: ださい、E5992: ペ�Eジ上部に戻る▲ 7671: ご増額をご希望のお客さまはこちめE7671: 以前ご利用があったお客さまはこちめE8033: 今すぐお申し込み
|
| 84 |
+
input2 (text): /9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxISEhUSEhIVFRUVFxUVFxUVFRcVFRUVFRUWFhUVFRYYHSggGBomHRUVITEhJSkrLi4uFx8zODMsNygtLisBCgoKDg0OGhAQGy0lHyUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAQsAvQMBEQACEQEDEQH/xAAcAAABBQEBAQAAAAAAAAAAAAACAAEDBAUGBwj/xABPEAABAwEEBAcKCggFBAMAAAABAAIDEQQSITEFBkFREyJhcXOR0gcUMlOBkpOxsrMzQlJicnShwdHwFiMkNENjgsIXNaPD02SitOEVg+L/xAAbAQACAwEBAQAAAAAAAAAAAAAAAQIDBAUGB//EADoRAAIBAgEIBwgDAAIDAQEAAAABAgMRBBIhMUFRcZHRBRMVYYGhwRQiMkJScrHhM5LwI2IkNPGiQ//aAAwDAQACEQMRAD8A3bp5CF6TMeLs2J0ICFJg6cURNmu/nFScblSqZJYjeHYgqtqxojJSV0PJhjmNqFnCXu5wTK00DXAHdknZrSJyjJWiwopHVoUmlqHGcr2ZKGbyo3LMnaEIm7kspjyIkgFFEstYcORYdx3YiiNDB51YggZuPkUpMqhHYyw0KDLkSNKTJJ2LETlW0XRZfhVMjTEstVZch0gHQMYoEyKR1Bj9mKmkQk7LOUXzNORVqi0Z3OL0FV6sRSznS4xkY4fnYt2aRxfepvuL8zmm7RjDVgcSTJWpc8fFeBTihZll3ee1nbUb5dXkxvG91fS1rff3A0GXBR/6v/IpWn9XkiP/AA6MjzfMrh7mn4OPzpv+RSyZP5vJFWVCD/j/AP1LmXI56j4OP/U7arcJr5vJGmNWm1fI83zIpAMxFGfS9tSSn9XkiuXVaVT83zB4cHAxx9ctR5eETyJrRLyRHraTVnDzfMnZaPmR03/rft46g6c/q8kXKrTXy+b5kgl+ZH/qdtRyZ/V5Inl0/p83zJmvPyI/9TtqLjLb+C1Sh9PmyOaYj4kZ9J204wk/m/BXOpCPyebI4bSTmyMek7alKnL6vJEIVoPTG3i+ZI7O9cjr/wDb21FKejK8kWPq75WT5vmOLUfkMrupJ28UdXL6vwProfT+QxO7xbKc0nqvpZEvq/BJVY/R5stRE/Jj6pO2qmpbS+Lh9P5Lcd44Uj6n9tVNS2l6cXqLMZdkOD6n9tQae0ti46kHx/mea/tqNntJXWwFzpB8jzXdtNJ7SLklqIXySO8Ex4cjx/epqNtJBzv8KK8lpfW7RjjgSAHAgGtCRf5D1KSh3lcp7Y3IbRQmhaBg04DaRXeVZTbz5ymtGObNpK1zdUc6vvtM2TsMgXX5ODuQFac6OdaMtdw7XJwZYCOLwbcd3HkVdNZTlv8ARF1eXVqCejJ9WGXYVPXs/wDSkRvYGSpGVebFNZiMrtZkQRWgA0+zJScblcKlnYth4Vdi9SViCQGtQprRYqkne6CuhwqOpK7TzkrKauiIPc04HyHEKVkytSlB6S1FbGnkP2darcGjRCvF9xLUOUc6J3UiTggcDj+d6jlPUTyE8zHETxgKEcqMqL0jUJrMhgXDwwAjNqBOS+JEodQ45FRtcnezzluMqtl8Syw15FWy5ZyeNrhk6vOoNp6iyKktDLDXV51W0Wpic4BCQNpDFwTsJtGNwDWSulMjjWoaK5B2JBNcQCXUyoDtopRpO9yudaNrBzzC8eUM5fiqynF595VVqLMu4qSwkmocQtCkthklBt5mY4iIANwfSaRXnWm6b0nOyWle3AltxLixhY5wMTajAHw5Mq0qeRVU8zk76/RF9e8lCLV/d9WW9V7Ox83ASguYY3OYTea4FpHFOVcCepV4qcoQyoabl3R9OFSq6dRXVs2lHVxat2VuTHDk4R9PWue8XWel+SOxHo7DR0R83zOM1x0UyOcCMhoLA/jE4m84HE8w610MHXlOLUjjdKYSFOSlDMZ9jtT63XY8tK4c4WuUVpOfTqSvZmlwe7D87lVc1ZBmWiNzXVyxzBoFbFpoyTi4u5savaJ4eXjhwa0Xnmpxrk0HeTXLYDyLNia/VQzaXoN2BwnX1PeTstPI6r9GLJWvBu9I/wDFc72yvt8kdrszC6cnzfM5vTJhZaOBgZQMFZHlziA51C2MVOdMTzjlWrDzqTTcmYsXSo0pKNNW25wakDKqvM2dIZswrnQ7ihxzAqiuTSuwr4Qz2fYopZ7FknmvpIGaQZkPtwCm6UtLKViaehEjJ3A4trygqLitTJqpJPOjQjmCocWbIzRaZMN6rcWWqaCdM3PD70slknOOkB1oBxwpvripKD0EXUWkgmtYzGI31+5SjTK5VlpRn2xzTm0K+CaMlVxelFa0S0cKYcRnsqVON8q+0rrTtk2+lDttzuT1qXVoisRJHMwaYc3AtB3jLHfyFaGkznQqSjuNPSOkmng6tIrE1wpjTjyD7lRRjaUt/ojXi6icKd1pj6sk0BpUi0xcfC8G4g436sz/AKgjE006crIMDXlGvG7zaOOY9G4VcbJPUZRxfdHaf2eUVwMkZp84Bw9h3Wt2BdptHJ6XjempbGYVngaW4PINPBOw7cF0W2tRw4wi1pLURkb4JDt3G+5ReS9JbDLj8LuPJaK0bcLnEgXaZkmgojJtnuN1Mr3bXZ3WiLMIYgzM5uO9x+4ZDmXHrT6ybkelw1JUaahxI9PaY73hc8CrzxY2/KkPgjmGJPICoQp5TsWVaypxcjhdGWd1CXSFznFxeTm5zjVxPOarsJKCSSPNXlVk5NmjFIRgWnnGKi0nrLYScczRK5zHbj6x+CSUkTbhPvKrRiQW0HKVPvKEs9rZghHHuCLyGo09gJbQUa403bkadIrZKtFsjY+6auJ6sFJq+ZEIvJd5MvWa1MPOVTKEjXTrQZcjlON1wdzhVOK1miMnqaYEVobU33Y7tibg/lRGNWN/fecaaauLS0jLPFOMbaRTnfPGzM+0yDnOAor4ox1Joa1nEcXExx+TiqNP5s+tkq7zQzfKiAO5APJ+CtsUXPOn64sJBLJCRhUtYajceNisXaVLY/LmdF9B13plHz5GlpTWqFne5MTyH2dj6ANwBlmFPC5CoQx9OLk7PO+7mWVeh601BKUfdVte19xXi13szDebDKKUIBDHUINcDfBUn0jSatZ+XMjHoSvGSkpR8+R7bDaw9rXtODgHDmcKj1rMkdFyMDX6cNsT5SCeCcySjQCfCuHAkDJ52qcJ9U8tlNWj7RHq1pe08pZrjEDUMlFMsGdtaO0qWx+XMwdg4i98qPnyLo17s58KGWu9oYP9xLtKnqT8uZPsSs9Lj58ju9Qray1MNobG9rGuLWGSlXEeE4UJwGVd9dyjPEqrH3bltHAvDz99ptaLX9UjsOGVGSbMs8p0/wB0SzPtTqtleyKrIywMLXGvHkFXitSKA7gN6VPE04POmFbBVaqVmlx5FH/EGzB94QzY5gtj+6RaO0KdrNPy5mLsasp5SlHz5Eo7o9nGUdo6oj/eo+309j/3iWLomstEl58hx3RrLWphtFRkQ2OvvEvb4akxromre7kvPkM7ui2U5xWg84jH+4n7fDYxPoiq9Ml58iKXX6yfFhtAPNHn6RNdIw1p/wC8SuXQtT5ZJceRCO6BF8mbqj7al2jR2Py5kOxcV9cfPkTO7osGyObqjP8Aeo9oUdj8uZN9D4nVKPnyBPdDg2RSjmbGP9xPtGlsflzF2NiNUo+fINndGgAIEc48kZHlPCVSfSFJvQ/LmSj0RiErKUfPkC3uiw41jmP9MeH/AHp9oUdj8uYl0Pidco+fIdvdDs22KbyCMf3pdo0tSflzGuhq+uUfPkNNr/ZT/Bn6o/8AkTXSNNan5cyMuhar1x8+R1FntolZFK0Ua+GJwvHjUpuB+9acPNVIuS1tmHGU3RnGnK11FEwun4pKuz7TN7r1HhhXmT3Zqae8GyfVI/fWhAlpMpAz3zUPSHCaPsxri2MRnniJj/sXSo54JnGxDyaskX9YIeGss8PjIpGjnLDd+2inOF4tFdOrkyT7z5zaaiq5J3TQ0Hop9qnZAzAvOLqVDGDFzzzDrNBtU4Qc5WRXVqKnFyZ9B6PhZBEyGIXWRtDWjkG87TtJ2krqKCSsjiyquTuzle6brP3vZ+AjdSWcFtQcWRZPfyE+COcnYqMRLJVlpZpwkOsld6EeLhYDqiQAkAJACQA4QhM9f0HoCCdxDwxoBAoGRVNQ84XyMrmyuflXqMRkU1mgn4btie08Vg+srt5VSS8d+1rZ3mjY9U7K4Orjdo6rIo6FpEZoCLwLuMW4EipGdCqJ1UrWhHPtW/uWbXoNVKhKSd6s3bY9WbvefVpI59VLM2IvN2ojD6cHGKFxddvCtQDdDQM6uCcakZTUerjpto/WrTuIzoyjTcutlovp36c+vRvZYi1Qspja+7i5sZ8COl5zLxAF2uw05s1B1UptZEdL1d9i2GHbgpOpPOl82tq5JNqXZWuYLuDn3TxYyaXHnY3DFhFeUFRjXTT9yOjZ3rmSlhnFxXWTzv6u58jI1m1ehs0gY2MEFtauYytdowHN1rThnCtBycY8DFjVVw9RRjOVu9njjMhzBeYPavSeyavg952U/wDTxc+S7mA/it3nlOl0/aL9yNFltcNgWx00c5YiSPDS07j1LzB7s1NPNN2yYH90j2fzrQgitJl3DuPUgkeodyy3HvaSI/w5SR9F7QfaD108Fng1sZw+k3k1E9q/B2gtK2ZBzetPBNKWQxzSxgGjJJGjD4ocQ37KLhVI5MnHYz1VKeXTjLakel9zjQ/e8JneP1swBFc2RZtbznwj/TuXTwlDJjlPS/wcTH4tSnkR0L8nV2vSLY2OkeaNYC5x5B6ytMkopt6DHCTnJRjpZ4fp3SclqnfO8HjHit+QweCwcw+0k7VxKk3OTkz01GmqcFFFC4dx6lAtFcO49SAFcO49SAFcO49SAFcO49SAFdO49SBHRx632hpq2NgI2gPB9pdR9LVWrOMfPmcSPQVGLvGcr+HIdmuNpAc0MYA6l4APoaGorxscUn0rUbTcY5t/Ma6DopOKnKz06ORI3Xi1ht0ABtCLoMgFCCCKXtxPWl2nNu+RG+58ya6Hgo5PWTtvXIdmvVsGVBgBgZMm0oPC2UHUk+kpPTCPB8wXREFoqT4rkE7X62nM5cslciM73KetJdItf/zjwfMk+iovTVnxXIhtGulqk8NrXZ+FfNKmppV29Sj0pOKtGEVx5kJ9C0pu8qknva5HMALmHZec9k1bP7JZht73i2/NXbwP8PieX6Vf/kNdyNHvYnG9TnH4LZl2Ob1Lee54qV5k9waenMrL9Uj99aEAZiAOo7n9ruTSM+WwHysd/wDsro9Gv/kcdq/H/wBON02rUYz2O3Ffo7vvpdnIPMdecXatDibSLy4Vi4kr9xq0NDPK5jvICuXLCZeKd9GZ/ryO/DpJUuj4tP3s8VxvfwTXjY7TvpdTIOB15xOvemr5FmaeK2jpOV2bWeTBx5bu5cjpCtn6pePI9J0Nh3k9fLXmXq/Tici7I0pXZzrmHdO6j/Rygvd/1oK0u0rtonmFnCrq3/1//almDOcRaAwyP4Oty+7g73hXLxuXvnXaV5UAdVoPUPSBnhMtgkMXCx8KHFrBwd8cJm4Hwa5Jhc3dbe5ba3WyU2KztFmNws/WMaG8RoeKOde8IOOI2osJM4nWTV6ewSiG0hoe5gkF114XXOc0Y0zqx2HMkM1NBaL0RJAx9r0hJBMb16JsD3taA9wZRwYQatDTntQGc2NHaraEnlZDDpSd8kjrrGizPFTzmOgwBNTgAExXZymtmiGWO1zWZkvCiIgX6UNS0OLTsqK0NPsyCGizqDYYp9I2WGZgfG97w5prRwEMjhlytB8iEDKetNmZFbbVFG0NYyeVjWitGta8gAeQIAzEAVEEj2TV5o7zsuFf2eHLmXawD/4vE8t0sk8R4IlllLTQ3hyVW9K5x5Np58x5EV5Y9+aWnMrL9Uj99aEAZqANHV2a5aYzsJun+oFo+0ha8FPJrx4cf2c/pWl1mDqLYr8M/wCDv769PY8FYV5FgKWmNJCCIvzOTRvccvJmTyArPiq6oU3PXq3mzAYN4qsqerS3sX+zLvPPCSSSTUkkknMkmpK8s227s+gRiopRirJZkaGgLXBFMH2mz98RgOBivXKkjA3huSGddZdYtDySMjGhcXvYwftDs3uDR9pTFnNbXG0aHsFqdZjooSlrWOLhM5o44Ju0JOynWgFc4nTlps9slhZYLCbO41ZwYkvmV7y0MoXUApiP6khnRSauazSg1da250JtrGgHmbN9yYsx2HdI1Nt9ulgmgkbGBCGStfO+NgeHF2AYCHeERXkCGJM8x1o1NlsDGSSzWeQvfcuwyF7gbrnXnVaMOLSvKEiVznCUDPVdRdXZ7BYp9Kus75LRwR72hpVzWuwMz25450GNwHa6gZFs8sdI55L3OvOeS9zicXOcbznHlJJPlSGdL3Mf82sfSSf+PMmhM6zWHTOhZ7XaIbdY32eRk0rO+rPiXFry3hJGsFS454tegFc5bXDVWCyxx2izW6O0wTOLGUpwoLW1dW7VpptPFoXAUxQNHFJEj1vQZPetloQP2eHHbkV3ej/4fFnkemX/AOV4InLsTfJduIIot1thyL7Tykryh9DNLTmVl+qR++tCAM1ABRyXSHD4pDvK01+5OMslqWzPwIzhlxcXrTXHMekB9cRtxXs1nV0fOHFp2YqosKxw2n9I8PLgeIyrW7j8p/l9QC8xjsT11TN8K0cz3HReC9mo+8velnfdsXh+bmcsR0hIA9A1G0BZrOyPS1vtMQhYS6GFhvySSsOAI+U1wrcFTUAkgAgsTOP1i0u62Wqa1PF0yvvBud1oAYxvKQ1rQTvBSGUYpXNcHMcWuaQ5rmktc0g1BaRiCDtCALUul7U7wrXaXfSnld63IA7XW4C06B0bOeOYZHQOLsTQCRhJryws60xazztsYGQA5hRIZ6Loqz6I0dFHap5hbrS9rZIrNGKMYSKtMoNbpG9+7BhITFnZj/4hW/v3v7hON4PAY8BwVa8Fd+2/nXHkRcLFnugO0dOyK3WN4jktDnCay0xY8Cr5CBgw1oDsfeDh8aowRS7mP+bWPpJP/HlQgZS10/zC2fWZ/eOSGYl0Z7eZAFVBI9O0dLSCy5fu0OZ+aV3ejn/w+J5Dptf+T4IeS045DqW65yVFsy3au2ffJ5w/BYezqPfx/R1u28VsjwfMu6U1fgdwFeEo2zsaKOAwEsx2t5SqqeApScr3zP8A2ovr9L4inGDWTnV3m733lIauWY+N85vZVvZ1Hv4/oo7cxOyPB8x/0Yg/meeD/ajs2j38f0Lt3E/9eD5hN1Vg3y+cOyn2dR2vj+g7cxOyPB8x/wBFYf5vnj72o7Oo7XxDtzE7I8HzEdV4Ngl89vqupdm0e/j+h9uYnZHg+YB1YhGfCecOyjs2j38f0LtzE7I8HzBOrMIOPCDneOyjs6j38f0PtzE/9eD5g/ozBX4/OHDso7Oo9/H9B25iv+vB8xSauQjLhPOHZSfR1Hv4/oa6cxOvJ4fsjGrsRy4Trr6mpdnUu/8A3gT7axGyPD9kbtAxbC/yvaPuS7Ppd/Ea6ZxGxcHzIv8A4GIkGricswSPsS7Ppd/En2xX2Lg+ZN/8BFvf5w/BPs6l38f0V9tYjYuD5hfo9D8p/nDsp9nUu/j+hdtYjYuH7JYtW4T4zzh2U10dR7+P6Iy6bxK+ng+ZKNV4N8nnDsp9m0e/j+iPbmK2R4PmL9F4N8nnDso7No9/H9B25itkeD5gu1YgG1/njso7No9/H9B25idkeD5kEmr8I2v84fgovo6j38f0Tj03iXqjwfMqv0BAMuE84fgo+wUe8tXTGJeqPB8zftjGxtgY0YNs8IFTj4JVuGioRcVqbKMfKVSUZvS4ogLgtJz7Mt38cXdRCmmUtbEX9KSCkOZ/Ut5vhJVVSeee/wBEasTF5FL7fVlMSbP/AGFdcx2CEp/ITuFiYSfOQIJz6mgcaddEDzXGcwjCrTy5+vFCE1YYQ8358qAJBCNtUADJcAwBryYdaB6SrJTPEeVIEQPJJqDTlqa+Sii85YsxDJGXGrnDy1qk1csU7aESRQAcvk/FNRISm2TWazXnNaBi5waK73EAY5bUOyV2OKc5KC0s6Y6j2zxTfSM/FZvbqG3yN/Y+K7uIUepVsH8JvpG/ij2+ht8gfQ+K7uIQ1Ntvim+kb+KPb6O3yF2Nie7iMdTLb4pvpGfij2+jt8g7GxOxcRjqXbfFN9Iz8Ue30NvkHY2J7uJC/Ua3H+G30jfxSeOo7fImuiMStnE5C0xFri05gkHnBoVeZbOLaLmlW1MWB+Ai9Spo/NvZpxb+D7UVQ3kHldT1q8xX7x9vhIE9xq6RYKQ4j4BufSSqFFZ57/RF2Lbyaf2+rImkfKHkK0GFpk15nJ1oFZgOc0ZOqgLMXfVMgPIi40mAJqnIIuJoMTu2IATpjvJ5kAVHS8ijcmokT5uX7VG5YoEbp6ZFGUSVO+kGM1215wkiTzFoP2H7Ap3KcnWXNFfvENBhwsWefhtUKnwS3P8ABdhv5ob0ezaZtL447zASQ5pNA0gMBq+9eIoLoIrsqFwKEIylaX+eo9jiJyhG8f8ALX5HPWXWKR0UkgLqNE4FQz4R8p73a7GraC62lMSeSp3TwkYzUXryduhL3jnU8dKVOU1qytmlv3b7NhtaN0o5zxA+GUPaxrnvdwVMQQHG485ljsAFkq0Eo5akrXzJX9VqubKOIlKSpyi7pK7dvR67AW/SMvC8HDG9xiLHSU4K65j2uo3juBBwqCN2KdOjDIyptK97adK3IVWvU6zJpxbta+jQ75s7RXktNtuPa2GSrn1a8mz3o4yQSKX6OcOMBXDKuWNihh8pNyWjOvezvho2lTqYnJaUHneZ+7mXHTs8Lmvoy2iaMPDS3F7SHUqCx5Y6t0kZtOSy1afVyyb7PNXNtGr1kMq1tOnudjwXSb6TSGmIkf7RXfTzI8jUV5y3sPTs1TFsHAxGg5iqqb+LezRiIWUPtRnB35qrTNYk2poizT0ocIMf4DfezKqlplv9EX4le7T+31ZSMnKr7mTJCbTlQJ3JBTlUiDuyRoTRFkgfyDqTuRsC56VwSKkkx3qDZfGCRC5/L61G5Yl3AcyRIQAOyvlQDui3Z8BhgrEUTzseo/JQLOXtEOHDwj+bF7bVGo/cluf4LcMn10H3o9h1kB4Ev4MS8Gb9wl9HXQaC6wG9jTA4bdi4eF+O17XzXzeujwznrsZ/HlZN7Z7Z/TT45tZhaOsjbQDE9sL2saXSWhjz4cvCPIHFANHOvXa0bVu0LZVqOk8uLab0Ra1Ky2+F9ecwUaarLIkotJZ5J63d7NTz21ZjR1StBm4WV7mmQmNhDa+AxguuxGTi57xyOCoxsOryYJZs78W/TMjRgJuplTk8+ZeCWZ+OdreW9JxukN11kbK0GoLnsxNM6EYZkKqk1FXVSz3MurRc3aVNSXe0YFl0aHzSO7yZSJ7WtY10bA03GPq4gVeeNvplhVbZ1nGml1jzrS7vW14fkwU8OpVZPql7rzJWWpPx/HcddY5HubV8fBmpwvB2G+o8q5k1FP3Xc69Nya95WPnzS5/XSfTf7RXeWhHkp/G97D0ycYegh9RVVL5t7NOJ+T7UUmSkZH7AfWrk7GNxTLGKkVsv6WOEHQN97MqqWmW/0RqxPw0/t9WUWq4yMnYaZYKSKmr6SQPO9O5GyJQ/DZ1JkLEb383UlckolSaWqg2XwhYgL1G5ZYEvSuSsDfRcdiWIfmiaISLXlUykYORcLFzQrq2iHpYvbaoVPgluf4LsOrVYb1+T3S12YSNulz2iubHFjua8MQF5+E8h3svHOeyqQy1a7W52KTtX7PS6I7oNA66SOEaDW7Ic3gnOuJVqxVW927+m7YUex0bWStttr37S0dHx8I2UCj2tLAWkgFvyXAYOAzAOSr62WQ4ann8S3qYZanbOs3hsJ5WXmltSKgircCK7Qd6gnZ3LJK6sV9H2AQ3qOe8vdfc55BcTda3YAMmjYrKtV1LZkrZsxVSoqnfO227tvgW1UXHzlpg/rpfpv9or0OpHkZfG97/IemjjD9Xh9RVNL5t7NOI+T7UZ9VaZrFkuUymxpaVGEHQN97MqqWmW/wBEaMT8NP7fVlQK8xEjUyLJAVIgC56VxpFaaVQbLowIC5RLLAFyRKw15A7BNQJliIKaKZBEpisOHICxe0If2iDpY/bCjP4Huf4LcP8Ayx3r8nvNpkLWOc1t4taSGjAuIFQ0HlyXnopOSTdj2M5OMW0r9xx1s0lpOZjnMhFmjDS4ucePQCpArjWnzRzrqQo4SnJKUsp+X+8Ti1K+PqxbjHIVr59P+8DR1AtD5LKXSPc88I4Vc4uNKNwqVR0lCMa1oq2Y0dEVJTw95Nt3ek6RYDqCQAkAfOOmfh5fpv8AaK9BqR5KXxPewtNZw/V4fUVTS+beacR8n2oz6q0zFoBWIpZq6Uyg6BvvZlVS0y3+iLsT8NP7fVlJqvMbJAUyIznouCiV5ZVFstjErlygW2BLkDSGqkOw4TBkrAmiDZOclIqGQMcIEXtCfvMHSx+2FGfwvc/wW4f+WO9fk9/XnT2RU0v8BN0cnsFW0f5I71+Sqv8AxS3P8GD3OP3Q9I/1NWzpT+fwRzuhf/W8WdSucdYSAEgD5w018PJ9N/tFeg1I8m/ie9h6azh+rw+oqml829mjEaIfajOVpnLoVhmZp6Vyg6BvvZlXS0y3+iNGJ+Gn9vqymFcYmM5yBpEMkii2WRiVyaqJbawzigaQKQxAIBkgCZAnjapIrkx3JiQkAEECZd0H+8wdLH7YUZ/A9z/BbQ/ljvX5PoBedPZFTS/wE3RyewVbR/kjvX5Kq/8AFLc/wYPc4/dD0j/U1bOlP5/BHO6F/wDW8WdSucdYoSaYha57XEi5QF10ltT8UEDMVHWFcsPNpNayh4mmpNPUWbNamyVukmmdWub6wFXKDjpLITjLQfO+mh+uk+m/2iu9qR5V/HLe/wAhaazh+rw+oqml82804jRD7UZ9FcZS9RWGdmlpTKDoG+9mVVLTLf6I0Yn4af2+rKDnK4ypEL3qLZNIgcVEtSsMUACkSFRAEjWpkGyRrU0RbJqKRWCgY4CACTIlvQf7zB0sfthVz+F7n+C+h/JHevyfQK88exKml/gJujk9gq2j/JHevyVV/wCKW5/gwe5x+6HpH+pq2dKfz+COd0L/AOt4s6lc46xyukhLdlJinYwvYWNaYA0AmMuqA6t4vvmvKF0aWRePvJuzv8Xf3bLHKq9Zky92SV1ZLJ7r69N7m7o5zqEOZKNtZTGa12Dg3Hdt3rHVS0prwv6o30XLOmn429GeAaZH66T6b/aK7mpHlpP33vYWmhjD9Xh9RVNL5t7NWJfwfaihRXGS5dAU0UM0NLnCDoG+9lVVLTLf6I1Yhe7T+31ZluKsuZ0iFxUSxIaiBjUQMVEAExiERbJA1SIkrGJorbCITECQkMcNTEEQgEHo6cRzRyOrRj2ONM6NcCadShJXTW8upSyZKWxo9RHdMsni5/NZ21zOz5/UvPkd3tel9L8uYz+6RYyCDFMQRQgtYQQcwRfTWAqJ3Ul58hPpai1ZxflzI7L3QbDGLsdnlY2taNjjaK76Byc8FVm7ykm/HkRh0nh6atCDS7kuZL/iXZPFT+aztqPZ8/qXnyJ9r0vpflzGk7o1jcKOhmIwwLWbDUfH3hNYCondSXnyIy6WoNWcX5cw/wDEmy+Kn81nbR2dU2rz5B2zR2Py5nlGkHh73uGTnOI30JqunayscNyvJsl0y3GHoIfUVRS+bezZiX8H2oohquMlyy4qZSW9MnCDoG+9mVFLTLf6I2Yj4af2+rMtxVhSgQEBcVEDFRILiDUBclDVIhcNrUyLZLRSK7iokO411AXCDUxXGeEhogcFEsRV0hO6MR3KVfI1mOVHVVFepKCjk62lxNeEpQqueXe0YuWbuH75cHmN1LwYZK5spWgrka1qjrZKThLTa/cHUQlBVIfDlKNtd9O4sB5yJFcBkc7taHca7DsKtyn/ALd/vAoyFpSflt883mMyQ/GIyDsnZDwzjsoQkpvX6+I5U4/KnptpWl6CZrnVwu0vU+NkMHY76qWU+7/afMrcI2s73t3eHkPefvZTi7Hf1eo05ksqe1efiGTT2S17PD9jvarCpMtaYbjF0EXqKopL4t7N2Kfwfaig1qusZGwnFAi3pnKDoG+9mVFLTLf6I24j4af2+rM4BXGa49ECGogdxUSC4cbE0iMpEgapELkzGJpEHIItTI3EWoHcQaiwrjlqBXI3hJk0yO6lYncJ9nDhQ9eFQd4qMDypSgpKzCFZwldFdmi2BxdVxJaWGpHgk12BVLDRTcru7VvA0Sx9RxUbJJPKzX08SyLK3HDAkEjYSAAD1AdSt6tGf2iSS7tevPn5j96DAEk0BbjTwTSrcuQY58qOrWj/AFv94i9oabaS038dv+zdxILKMeU3vLh+AUurRF4iXlb88wu9W7vzj2j1o6qJH2if+8OSGcxSsJSLOl2YxdDF6is9FfFvZuxTzQ+1FEMV1jI2QOUS0vaXGEHQN97MqaWmW/0RrxD92n9vqyhdVxkuINQFx7qAuO1iLCbJQ1SsQuG1idiLZMGpldxXUBca6gLhBqYriLUAmRuakTTEGIsDkTXE7FdxXEWDKHDE7CuOGIsFx7qLCuPdTABzUrEkyzpVuMfQxepZ6Kzy3s3Yt5qf2opBiusY8opXVWaWzR0q3CDoG+9mVVLTLf6I04l+7T+31ZQuq4yXCupiuNdQFyVrE7EHIMNTFckYxNIg2FdTI3FRAXEGosFwg1AriLUBcAtRYlcdjECbJriZEcMQAVxABBiBjFiBCDEDBcxAixpNmMfRRepUUdMt7NuLean9qKlxXmIz7iqsamzQ0o34HoG+8lVVLTPf6I04l+5T+31ZSuK6xkuK6iwrhsYnYTkHdTsQuE1idhNktxMiPcQAgxADhqACuoAa6gBXEAOxiBklxAWHuoAMMQOw91IBrqYWFcSAZzUwJ9IsxZ0UXsqmj829mnFaKf2L1Kt1XGUzbqgWtl/SbcIegb7yVU0dM9/ojVin7lL7fVlK6rzHccNRYTZKGKRFsdrEATMYgB7qAHLUAINQAQagLBFqBjXUAK6gAmsQFggEhj3UAE0IAYoAZADhqAFdQBYtzcWdFF7KqpfNvZpxOin9i9SqArTMZl1IGy9pIfA9C33kqpo/FPf6I14r4KX2+rKgYtFjFcNrECJGhABhqBhBqBjhqAHISAcNQA4agY5YgAS1ADgIANoQMe6gLDoAYBADhqAsPQIAZABAJDsWLa3FvRR+yqqXzb2acSs0PtXqVlcZjKuJ2K7l7SI+C6FvvJVRR+Ke/wBEa8V8FL7fVldrFeYwqIGExiASJQ1IkFRACAQAqIAcBAw6IAYtQFhqBADhAD0QMdIBUQA4QMQQAqIFYeiB2EGouPJZYtoxb0cfsqml829mjE6IfavUqlXGVmeRzKRWXLePguhb7yVU0dM9/ojXifgpfb6srtarjISXUhhNCBjoAcIAK6gdh2tQNIJIYJTEJACqgQTUiSCLUrkrDJizBNalcaiPcSuPJGup3FYVEAEkSFQICyJrZm3o4/ZVVL5t7NGI0Qt9KIFaZjNAVhnNC0Rh4jIfFhG1pDpY2kEPkJBDnA7R1rNGeRKV09Ox7EdCpSdWFNxazRtpS1vayPvb+ZD6eLtKfXR7+D5FPss9sf7R5i72/mQ+ni7SOuj38HyD2We2P9o8wuA/mQ+ni7SOuj38HyH7LPbH+0eYuB+fD6eLtI66PfwfIPZZ7Y/2jzHFnHjIfTxdpHXR7+D5B7LPbH+0eYRiHjIfTxdpLro9/B8h+zT2x/tHmMIP5kPp4u0n10e/g+QvZp7Y/wBo8x+9/wCZD6eLtJddHv4PkP2ae2P9o8x+BHjIfTxdpHXR7+D5B7NPbH+0eYuBHjIfTxdpHXR7+D5B7NPbH+0eYuAHjIfTRdpHXR7+D5B7NLbH+0eY4hHjIfTxdpHXR7+D5DWGltj/AGjzC4IeMi9NF2kuuWx8HyH7PLav7R5iEQ8ZD6aLtI65d/B8g9ne2P8AaPMfgx4yH00XaR1sdj4PkPqJbY/2jzG4MeMh9PF2kdbHY+D5C9nltX9o8xGMeMi9PF2kdau/g+Qezy2r+0eY3BDxkPpou0n10e/g+QvZ5bY/2jzDaweMi9PF2lF1Y9/B8iaoS2x/tHmOWN8bD6eLtI62PfwfIboPav7R5itTmkijmuoxgJa4OFQMRUYFFL5n3sMQleCvoilmzkeCtKMxntad6mZgxXegYQJ2IHdjVO1AXYV9FguPf5UrDuECgdxAoAK8iw7iLt5RYG9pW0hbxEAS0mtcqbOdVVJqGdl1Kk6t0nYoy6wxta15a7jNkd8Wv6twa4EA4GpGBxxG3BVSxUEk2tvkaIYCpKTimszS168+z/bs5Zm02xj7hD73FGAbTjNDsDWmRTlXipZNghhJyhl3Vv8ALYaDjTatBieYdpKBpscFIdxFxRYG2CKpkc4ZJSzE86EDyoBMcuSsNsQKAuOgYJCCNikxisM9h6IAMc6CQ14bECH8iBhBvIkOwTWoGkOUgdxUQFgSExAT2YOpe2Go6iPvUZJSLISlC9tYBsDCA2hwaWjfRxBz8mCr6qNi1V53uttyJ+hoiahpGyjTQAXODFBkKNyUXQg/93WLI4qqs2n/AO3/ACX7quuZckdA9wxciwrhNQSQ4CQ0hXUXHkioEBZDEIE0wgEDsMgBXUXFYqOKsM7Ym5IAEoEOwYoGtJI0pEk7BFxolYbbsJibCIb1FEpAtCbEgQUyK0hhIkEEiaBKZF6RFAahbEaxag2JMnEc5JDegAFMhckSLAQgitIQOKCWsNImMUCYKZE//9k=
|
| 85 |
+
input3 (text): default
|
| 86 |
+
output1 (json): 頁E��
|
| 87 |
+
"""
|
| 88 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), f"text2component")
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
if openai_key == "default":
|
| 92 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 93 |
+
else:
|
| 94 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 95 |
+
|
| 96 |
+
messages=[
|
| 97 |
+
{
|
| 98 |
+
"role": "system",
|
| 99 |
+
"content": """
|
| 100 |
+
■構�E要素名�Eアウト�EチE��サンプル
|
| 101 |
+
[
|
| 102 |
+
{"component_large":"サービス吁E,"component_middle":"アコム", "component_small":["カードローン・キャチE��ングなら消費老E��融�Eアコムにご相諁E��ださい"], "UIelement":"チE��スチE},
|
| 103 |
+
{"component_large":"CTAボタン","component_middle":"お申し込みはこちめE, "component_small":["簡十E0秒でお申し込み完亁E], "UIelement":"CTA"},
|
| 104 |
+
{"component_large":"Q&A�E�E,"component_middle":"よくあるご質啁E, "component_small":["自宁E��勤務�Eに何か書類が送られてくることはありますか�E�E,"家族割などの割引�Eありますか�E�E], "UIelement":"表絁E��"}
|
| 105 |
+
]
|
| 106 |
+
"""
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"role": "user",
|
| 110 |
+
"content": [{"type": "text", "text":p}]
|
| 111 |
+
},
|
| 112 |
+
]
|
| 113 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}})
|
| 114 |
+
return ask_raw(messages, 'gpt-4o-2024-11-20')
|
apis/baseimg2cta_detail.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from enum import Enum
|
| 11 |
+
|
| 12 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 13 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 14 |
+
from src.clients.llm_client import LLMClient
|
| 15 |
+
import json as _json
|
| 16 |
+
|
| 17 |
+
client = LLMClient()
|
| 18 |
+
system_prompt = None
|
| 19 |
+
user_text = ""
|
| 20 |
+
images = []
|
| 21 |
+
for msg in messages:
|
| 22 |
+
role = msg.get("role", "")
|
| 23 |
+
c = msg.get("content", "")
|
| 24 |
+
if role == "system":
|
| 25 |
+
if isinstance(c, str):
|
| 26 |
+
system_prompt = c
|
| 27 |
+
elif role == "user":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
user_text = c
|
| 30 |
+
elif isinstance(c, list):
|
| 31 |
+
for part in c:
|
| 32 |
+
if isinstance(part, dict):
|
| 33 |
+
if part.get("type") == "text":
|
| 34 |
+
user_text += part.get("text", "")
|
| 35 |
+
elif part.get("type") == "image_url":
|
| 36 |
+
url = part.get("image_url", {}).get("url", "")
|
| 37 |
+
if url.startswith("data:"):
|
| 38 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 39 |
+
else:
|
| 40 |
+
images.append(url)
|
| 41 |
+
|
| 42 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 43 |
+
result = client.call(
|
| 44 |
+
prompt=user_text,
|
| 45 |
+
schema=response_format,
|
| 46 |
+
model=model,
|
| 47 |
+
system_prompt=system_prompt,
|
| 48 |
+
images=images if images else None,
|
| 49 |
+
temperature=0,
|
| 50 |
+
)
|
| 51 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 52 |
+
else:
|
| 53 |
+
return client.call_raw(
|
| 54 |
+
prompt=user_text,
|
| 55 |
+
model=model,
|
| 56 |
+
system_prompt=system_prompt,
|
| 57 |
+
images=images if images else None,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
client = LLMClient()
|
| 61 |
+
|
| 62 |
+
class CTAStyle(BaseModel):
|
| 63 |
+
font_size: str
|
| 64 |
+
font_color: str
|
| 65 |
+
letter_spacing: str
|
| 66 |
+
border_radius: str
|
| 67 |
+
box_shadow: str
|
| 68 |
+
background_color: str
|
| 69 |
+
gradient: str
|
| 70 |
+
hover_effect: str
|
| 71 |
+
padding: str
|
| 72 |
+
|
| 73 |
+
class CTA(BaseModel):
|
| 74 |
+
main_copy: str
|
| 75 |
+
sub_copy: str
|
| 76 |
+
sub_copy_position: str # "inside" or "outside"
|
| 77 |
+
html: str
|
| 78 |
+
css: CTAStyle
|
| 79 |
+
|
| 80 |
+
class CTAlist(BaseModel):
|
| 81 |
+
cta_buttons: list[CTA]
|
| 82 |
+
|
| 83 |
+
def ask_raw(messages, model):
|
| 84 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 85 |
+
model=model,
|
| 86 |
+
messages=messages,
|
| 87 |
+
top_p=1,
|
| 88 |
+
frequency_penalty=0,
|
| 89 |
+
presence_penalty=0,
|
| 90 |
+
response_format=CTAlist,
|
| 91 |
+
temperature=0
|
| 92 |
+
)
|
| 93 |
+
return response
|
| 94 |
+
|
| 95 |
+
def baseimg2cta_detail(base64img, ocr, openai_key=os.environ.get('OPENAI_KEY')):
|
| 96 |
+
"""
|
| 97 |
+
input1 (text): iVBORw0KGgoAAAANSUhEUgAAALcAAAETCAMAAABDSmfhAAAA1VBMVEX///9iAO5cAO74+PhXAO77+/v///3Vy/O5pfD19vDw8uqHXun7/fb8/fisleufgevo6t/Iwdzu7uzv7fdvLe6hh+bWz+zi3+3j0vx4LfCSZvJMAOyymfarg/X58v/Qwvn27P5uHO/w4/6hePTTvfrDqPiyjvb9+v+5mPd9O/GmfvXq2v3XyvqRX/Kgc/Tp3fyBRfHIsPm9oPeuiPaabPOFTPFpE++MVfLCr/fNuPmASPF+NfGLXfLNr/rZxfuEUvHIpfnb1efi3ua0muTEs+aacuWSZuSq4NaQAAAKrElEQVR4nO2dDXebOBaGxbWSJt1pd5t6WzAxQXxjMNiYYBMcPJOdmf//kxYJfztJfbp8uav3nCTYCPpUvhJYenVBiIuLi4uLi4uLi4uLi4urIV03rKqwe+lNg3r4VA349XeCoUFh6WsV4Ne9CQiNCpa9CsCvvzWMXYB/r4QbN82Nv19x7l+HG3BNkV8vNwxStx7werllhHz54rghsRBSRvBKjR+8d3zwOZ9QjdzwaLO96iu7nqLddpBj2hAwlkvgfibj7f+LbRCpQW6Q7PVu9aRCYRyyBkt/8LMpkGRMC47YewGyxWlcgsMwAgFG3sn5a+SebvfrR5/8ajLULD+ehatiB6gqUrTAl+V0yCo3we5o6qy5reLDgGLncezUGCfadr9xuBt8VVWeZxnotgVgibIN8ljE2KDc2LDYrVNZUp6lIE0cLx4mTXGT3f7jQMF4KdIYBjcAiCXKvdxyhzGmouWkZ0+xNU/UFP+BtMBtnwT4wJeGTkyrtQgn2g6e/TU3aNk4VxRDKrYzM4iMJIFRKh+foRVuKGC19EZUWSVCYsuwjROiiWFMiGmzXdhKMcjW9KRrbIMbJM3LiziRpwbrQAZogDfc4CKTNkJZY40UCu55rlid4MbhcCXSkBiKrCPMp+pkG999qyyz4yZxkDTHDe/UN0oKbsCJSnu7ooaFQA3CwUAsUen3MDxEhBWN1aIVQ3Jy4amP29srkB/UF0SK58u/53bOPheVdoa64vtKyR04lqnZ6wtP4tlUaVP9Nw4PShz24DBfPGHrnn0pBZM2Onj8XZaHS/ZGnOcLfRsZQLKMkOTk/DVxQ3RPZVkO/RMND+uLdYCbawtsfm/ewPjgtuu1+7Ia44SG6fLBMPp4e/mrUPXefxMFiVUTN8ENyVM92LV/L65rYIV/n79I7hbG2Srgvvpa02jD29iPd1Vw974lzY4jkz96FXCjq9uXz/9uUP95ua0Cm9b4baOqpLZL8kZVFTUXFxcXFxcXFxcXV6O6QFcYhfY+NKiwKl9Ybwm4QUFchb0KoU/jhgessP6pAuw2xquqMOJd7vgg5/41uN9sN93mJg65RG7ZQ+EbtrIa/T7w+vbZAljZCM1fP7a++cvxePsPwn0ApfVrXU4S9qZWS+vXyYywkIjs5CJpdt518YxhMxnsrWQsW4qiqIS9ntpipGfrgs4cBJzOjum2E/xho9yy+RxPBdGhvjVmjRCjSACFGRswZKNUC0oe7OkFtzE8cqSCvj398BXwerhhUryr+GaSiKoLkJorUZZNBzDjhvCRWjXW8/KyGGMBjFn2dICH/e3pxVdaT031DVi+saj1CztLgGdY7nNjm2y4YWKlSih6mm0r+cFJsL07f3PcxYkXlrQcS7T9QR5qdiimG24ByeOp+MDaqi5a2v0qg3xwZP2S987fIDeAb3uiqEa0VleryH9cbeobJmruWWNdNVknI4sDGizjoyBuhxtcRXHofJ0yYHCmWvxsuHXErI2ZzXoUXMS3HqHjlR3tcBNlHlHzFPaZFQ00XyziG8uMO45Y8URhrAU3zKPZMVgr3KCHchTR+rbHLIo1XHSDUZ+U/Uk5dxqVRmWcO7sLUuv1raahlQxNZuiCjEaLf2+rKiqNaePleObZ87InjIuG4IVqcNgNOnvnN0478Lrim5iiRPyUtTbwWMg4I1nOCdubappoba7f4MZ6MFiSg+Ojg3/AOKnx+q7zmPp310703b3I+g/sL9iAnQF5+9bigcowDPb3xIbX2ftYdhOW3Tv3GRt3ONnfVW4mUlwzT5cIdJ8biP7qTWzXuS/2++Xb4twXyH310n8zEOsRuJX4wm7/FJq0hRUXqQ+VGKx6t1/+/keD+uvbba8CbHTV631sVNVgl+RNihvDuLi4uLi4uLi4uLha0vXV14pVoYftHezvq4ptYrCsJEHYD7C/vjUa+T8MmEjVONje06eohuErnNbP/c8aRq/gcxXOu3f12y/PDf2zW0KXuGFiq9m5ZbvDDbKD0OjMNtwdbpCYo+d1H1J3ucFdO0wUclnc2wQp/jmh0hVucHcHnHrZOsxt7Q4IzinfFe7F7oB7zs25fzFufHOh3Hv5rcTL4QZ//wj/UrhhdnhI/OMjOsFd3AtS0S22ccYB3eBmojGeX9x9bCGSTsmZRTvFLZyfPrlb3OeLc3Nuzs25OTfn/lnun1od2BL33hgJ6PTFnsn+IO8qvP4/a4kb2zRF4TpBLAIMeBZFUZmIHcRovk3On0yKX+S53wluwLJNEjexWHrbGC1S40mZRg9imRB5ZqjIXhd1lT5Npn1iuG2DG4aijzzFF3J1Tu+6ozCd9VWCl4x7YtHlO4/rz6KvJYCF8GSQtpX6nugxmiQg4EBN4FGTH3RZViUYsGzaeoi3SYeTSaCOrKlvh9oI2ucuvpYhNg0JRT1Klu5NR8stt2XIyUBf0QDHohrauTkMtPHjUYS3xW2bmq2aRcuEpwdD+fDgaBtu0zEVP9c8Wq6fSJqAMdFOMpW30y4lDzmuQHy/fDRGON7FCTaQCUXD9VNa4+BqBLLJ6TM2WmmXrhLYtKoBsdcEqdKOW4vo93oINNYVFtzYs83TT6wFbuw7slJ88pAhtnIxMkYaqIlc9idJGUhG+c2e9ievDUy0Ut+mSFCSSAPFpHEioQk2HE0L1XKALSkuOpPpZuWoZuj6yIqO5yDaaZdTW5EtpLGnjki2BQWrMp7MPHbdyZBtK+IaG9xUFEV/2g1uoBik7KST8rEv8yLgSblTcomw9+Saw9WCrXIfIuz9Fk623zqofe6fEud+U59u6vDN/Fk7d+/OrRwcV5Lw+31d9V7+rnjdl/DXXQNrdnp3H/9VsT72qloh9Z5qWD3V0AopnvCbi4uLi4uLi4uLi+sHur7+VKkaWS6Frq+srF+pMqf2UQiKPa/68e2ABxWlKn+Pe1HDuI+c8/VSzXL/Vjf3L+UrIHsIM/riXV/BK3kBW/IVIJdmZC1fsDnYaLFYlMmFIVwE0mZehIyLLWJ0w1dA510ld04iOp0KAXrwP6xUx1qsfQWBGdrKumSnfAUzLUShZghTJaCPNrY8MyYqwWM2X/wUYcCCuy7a1xKMhTARkva5hf7yEUlFlOCxJsCjd+Qr8Ha+AjKfqY6Zh0hVjh953KavgF1Gs3srNO7jfV8B0a0B8xV4qmgvrHiuTfpdqG8a36mKFINuTxY36sNiqO58BYZiTEO1NC/1i08E97viK3jU0IhgIfWYdwBr8Z6vIEfUOCMbxtZXMB90xFfwpCz3fAUg2epkx+1ZDHjPV5CzB7y3z419i/oKAE9KX4GZxir1Fcx3C2BAzo3u+Qqi0EUCyUY2rUfIUIankaYo9sZXAMI8R+uVxppvWU6Ukg5w07TBqmyhkM3Mu4j+AcVNgo2vABVNVlpPf0uRkRvpfUd8BcW1ZJPSXmBdHtDk39uZethLSA346AH0LXIfIuz9Plsd4P4pcW7Ozbk5N+fm3P+33DxPTpPcPE9Ok9w8T06j3DxPTsPcF5r/hHNzbp4np3PcPE9Oo9w8T04b3DxPTtPc56sJH8fnOtZL3dTOffX9p5wa7wrIS/3rpW6/uFXn0Z78UcmDsN7n7t29fKlYL3cNrJcqwCtXIwumripf6MUXTHFxcXFxcXFxcXF1Wv8FctGY0qiyJUAAAAAASUVORK5CYII=
|
| 98 |
+
input2 (text): OCR
|
| 99 |
+
input3 (text): default
|
| 100 |
+
output1 (json): cta
|
| 101 |
+
"""
|
| 102 |
+
if openai_key == "default":
|
| 103 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 104 |
+
else:
|
| 105 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 106 |
+
messages = [
|
| 107 |
+
{
|
| 108 |
+
"role": "system",
|
| 109 |
+
"content": "あなた�E優れたWEBマ�Eケターで、ランチE��ングペ�Eジの要素を見�Eけることに長けてぁE��す、E
|
| 110 |
+
},
|
| 111 |
+
{
|
| 112 |
+
"role": "user",
|
| 113 |
+
"content":[
|
| 114 |
+
{"type": "text", "text":"""LPのスクロール画像とOCR結果が以下にあります、E
|
| 115 |
+
こ�E画像�Eに存在するすべてのCTAボタンにを探し�Eして個数を数えてください。文言が�Eく同じ�Eタンは不要です、E
|
| 116 |
+
また、それぞれ�ECTAボタンのチE��インを�E現するcssを含んだHTMLを書き�Eしてください。書き�Eす際には、CTAボタンごとに
|
| 117 |
+
・斁E��サイズ
|
| 118 |
+
・斁E���E色
|
| 119 |
+
・斁E���E間隔めE��置・改衁E
|
| 120 |
+
・構�Eするレクタングルごとの角�E丸ぁE
|
| 121 |
+
・影めE�Eタン背景の色(gradientのあるなぁE
|
| 122 |
+
・サブコピ�Eが�Eタン外かボタン冁E��
|
| 123 |
+
・ボタン冁E�E矢印等�E絵斁E��(�EめE���!E
|
| 124 |
+
を別、E��認識して丁寧に再現してください。OCRの惁E��で斁E���Eを精確に修正しつつも、主に画像情報からボタンを抽出することに注力してください、E
|
| 125 |
+
ボタンの色めE��ザインは基本皁E��一種類になる�Eずなので、特別な琁E��がなぁE��り�E褁E��の色合いのボタンを混ぜなぁE��ください
|
| 126 |
+
|
| 127 |
+
例!E
|
| 128 |
+
<p>サブコピ�E1</p>
|
| 129 |
+
<button style="display: inline-flex; align-items: center; justify-content: center; gap: 10px; padding: 12px 24px; font-family: Arial, sans-serif; font-weight: bold; font-size: 16px; color: white; background: linear-gradient(45deg, #ff7eb3, #ff758c, #ff5d73); border: 2px solid #ff5d73; border-radius: 25px; box-shadow: 0 8px 15px rgba(0, 0, 0, 0.2); cursor: pointer; transition: all 0.3s ease; text-transform: uppercase;" onmouseover="this.style.background='linear-gradient(45deg, #ff5d73, #ff758c, #ff7eb3)'; this.style.boxShadow='0 15px 20px rgba(255, 93, 115, 0.4)'; this.style.transform='translateY(-3px)';" onmouseout="this.style.background='linear-gradient(45deg, #ff7eb3, #ff758c, #ff5d73)'; this.style.boxShadow='0 8px 15px rgba(0, 0, 0, 0.2)'; this.style.transform='translateY(0)';" onmousedown="this.style.transform='translateY(1px)'; this.style.boxShadow='0 5px 10px rgba(255, 93, 115, 0.2)';" onmouseup="this.style.transform='translateY(-3px)'; this.style.boxShadow='0 15px 20px rgba(255, 93, 115, 0.4)';">
|
| 130 |
+
<span style="font-size: 18px; color: white;">ボタンコピ�E</span>
|
| 131 |
+
<span style="font-size: 12px; color: #ffeeff;"> サブコピ�E2</span>
|
| 132 |
+
</button>
|
| 133 |
+
|
| 134 |
+
# OCR結果
|
| 135 |
+
"""+ocr}
|
| 136 |
+
]
|
| 137 |
+
},
|
| 138 |
+
]
|
| 139 |
+
|
| 140 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}})
|
| 141 |
+
r = ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")
|
| 142 |
+
|
| 143 |
+
return r
|
apis/baseimg2ecinfo_rect.py
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from enum import Enum
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
|
| 5 |
+
from src.utils.tracer import customtracer
|
| 6 |
+
from src.clients.llm_client import get_llm_client
|
| 7 |
+
|
| 8 |
+
class Category(str, Enum):
|
| 9 |
+
ビジネス = "ビジネス�E�EaaS・法人支援�E�E
|
| 10 |
+
ヘルスケア = "ヘルスケア�E�美容・健康�E�E
|
| 11 |
+
ヒューマンリソース = "ヒューマンリソース�E�求人・紹介!E
|
| 12 |
+
コマ�Eス = "コマ�Eス�E�趣味・食品・衣類!E
|
| 13 |
+
ファイナンス = "ファイナンス�E���融�E保険・不動産�E�E
|
| 14 |
+
インフラ = "インフラ�E�電気�E通信・ガス・住屁E��E
|
| 15 |
+
ライフイベンチE= "ライフイベント(教育・結婚�E相諁E��E
|
| 16 |
+
|
| 17 |
+
class CategoryMiddle(str, Enum):
|
| 18 |
+
# ビジネス
|
| 19 |
+
ITソフトウェア = "IT・ソフトウェア"
|
| 20 |
+
マ�Eケ支援コンサル = "マ�Eケ支援・コンサル"
|
| 21 |
+
オフィス機器用品E= "オフィス・機器用品E
|
| 22 |
+
|
| 23 |
+
# ヘルスケア
|
| 24 |
+
健康食品器具 = "健康食品・器具"
|
| 25 |
+
美容医療クリニック = "美容・医療クリニック"
|
| 26 |
+
美容コスメ = "美容コスメ"
|
| 27 |
+
フィチE��ネスジム = "フィチE��ネスジム"
|
| 28 |
+
|
| 29 |
+
# ヒューマンリソース
|
| 30 |
+
求人惁E�� = "求人惁E��"
|
| 31 |
+
人材紹仁E= "人材紹仁E
|
| 32 |
+
人材派遣 = "人材派遣"
|
| 33 |
+
|
| 34 |
+
# コマ�Eス
|
| 35 |
+
動画アニメゲーム = "動画・アニメ・ゲーム"
|
| 36 |
+
リユースリサイクル = "リユース・リサイクル"
|
| 37 |
+
旁E���EチE��レジャー = "旁E���Eホテル・レジャー"
|
| 38 |
+
趣味交隁E= "趣味・交隁E
|
| 39 |
+
新聞雑誌メチE��ア = "新聞�E雑誌�E惁E��メチE��ア"
|
| 40 |
+
自動車レンタカー用品E= "自動車�Eレンタカー・用品E
|
| 41 |
+
飲料食品生活用品E= "飲料食品・生活用品E
|
| 42 |
+
家電パソコン = "家電・パソコン"
|
| 43 |
+
ファチE��ョン = "ファチE��ョン"
|
| 44 |
+
|
| 45 |
+
# ファイナンス
|
| 46 |
+
不動産 = "不動産"
|
| 47 |
+
保険 = "保険"
|
| 48 |
+
ローン = "ローン"
|
| 49 |
+
クレカ電子決渁E= "クレカ・電子決渁E
|
| 50 |
+
証券FX先物 = "証券・FX・先物"
|
| 51 |
+
銀衁E= "銀衁E
|
| 52 |
+
|
| 53 |
+
# インフラ
|
| 54 |
+
ネット通信サービス = "ネット�E通信サービス"
|
| 55 |
+
電気ガス = "電気�Eガス"
|
| 56 |
+
住宁E��備リフォーム = "住宁E��備�Eリフォーム"
|
| 57 |
+
|
| 58 |
+
# ライフイベンチE
|
| 59 |
+
士業相諁E= "士業・相諁E
|
| 60 |
+
学習スクール = "学習�Eスクール"
|
| 61 |
+
結婚�E会い = "結婚�E出会い"
|
| 62 |
+
葬儀墓地 = "葬儀・墓地"
|
| 63 |
+
引越し介護 = "引越し・介護"
|
| 64 |
+
|
| 65 |
+
class Meta(BaseModel):
|
| 66 |
+
会社吁E str
|
| 67 |
+
業畁E Category
|
| 68 |
+
中刁E��E CategoryMiddle
|
| 69 |
+
サービス: str
|
| 70 |
+
啁E��: str
|
| 71 |
+
タイトル: str
|
| 72 |
+
訴求テーチE list[str]
|
| 73 |
+
|
| 74 |
+
class cood(BaseModel):
|
| 75 |
+
x: int
|
| 76 |
+
y: int
|
| 77 |
+
|
| 78 |
+
class str_with_rect(BaseModel):
|
| 79 |
+
text: str
|
| 80 |
+
html: str
|
| 81 |
+
rect: list[cood]
|
| 82 |
+
|
| 83 |
+
class pageInfo(BaseModel):
|
| 84 |
+
# ペ�Eジ共送E
|
| 85 |
+
メタ: Meta
|
| 86 |
+
ロゴ: list[str_with_rect]
|
| 87 |
+
グローバル検索バ�E: list[str_with_rect]
|
| 88 |
+
ハンバ�Eガーメニューアイコン: list[str_with_rect]
|
| 89 |
+
カートアイコン: list[str_with_rect]
|
| 90 |
+
ユーザーメニュー: list[str_with_rect]
|
| 91 |
+
|
| 92 |
+
# ナビゲーション
|
| 93 |
+
ブレチE��クラム: list[str_with_rect]
|
| 94 |
+
ペ�Eジネ�Eション: list[str_with_rect]
|
| 95 |
+
タブ�E替: list[str_with_rect]
|
| 96 |
+
|
| 97 |
+
# トップ�Eージ
|
| 98 |
+
メインビジュアル: list[str_with_rect]
|
| 99 |
+
プロモーションバナー: list[str_with_rect]
|
| 100 |
+
カチE��リカーチE list[str_with_rect]
|
| 101 |
+
|
| 102 |
+
# 啁E��一覧ペ�Eジ
|
| 103 |
+
啁E��一覧: list[str_with_rect]
|
| 104 |
+
フィルタ: list[str_with_rect]
|
| 105 |
+
ソーチE list[str_with_rect]
|
| 106 |
+
ペ�Eジャー: list[str_with_rect]
|
| 107 |
+
クイチE��ビューアイコン: list[str_with_rect]
|
| 108 |
+
|
| 109 |
+
# 啁E��詳細ペ�Eジ
|
| 110 |
+
啁E��吁E list[str_with_rect]
|
| 111 |
+
価格: list[str_with_rect]
|
| 112 |
+
ブランチE list[str_with_rect]
|
| 113 |
+
サムネイル: list[str_with_rect]
|
| 114 |
+
画像ギャラリー: list[str_with_rect]
|
| 115 |
+
カラースウォチE��: list[str_with_rect]
|
| 116 |
+
サイズセレクタ: list[str_with_rect]
|
| 117 |
+
在庫スチE�Eタス: list[str_with_rect]
|
| 118 |
+
配送情報: list[str_with_rect]
|
| 119 |
+
ボタン_カート追加: list[str_with_rect]
|
| 120 |
+
ボタン_今すぐ購入: list[str_with_rect]
|
| 121 |
+
レビューサマリー: list[str_with_rect]
|
| 122 |
+
レビューボタン: list[str_with_rect]
|
| 123 |
+
QnAリンク: list[str_with_rect]
|
| 124 |
+
バッジタグ: list[str_with_rect]
|
| 125 |
+
関連啁E��カルーセル: list[str_with_rect]
|
| 126 |
+
|
| 127 |
+
# カート�Eージ
|
| 128 |
+
カート商品リスチE list[str_with_rect]
|
| 129 |
+
数量セレクタ: list[str_with_rect]
|
| 130 |
+
削除アイコン: list[str_with_rect]
|
| 131 |
+
クーポン入劁E list[str_with_rect]
|
| 132 |
+
注斁E��計サマリー: list[str_with_rect]
|
| 133 |
+
チェチE��アウト�Eタン: list[str_with_rect]
|
| 134 |
+
|
| 135 |
+
# 共通下部
|
| 136 |
+
フッターリンク: list[str_with_rect]
|
| 137 |
+
SNSアイコン: list[str_with_rect]
|
| 138 |
+
カスタマ�Eサポ�Eトリンク: list[str_with_rect]
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
@customtracer
|
| 142 |
+
def baseimg2ecinfo_rect(
|
| 143 |
+
base64img: str,
|
| 144 |
+
openai_key: str | None,
|
| 145 |
+
google_api_key: str | None,
|
| 146 |
+
p: str = "",
|
| 147 |
+
model: str = "Qwen/Qwen2.5-VL-72B-Instruct",
|
| 148 |
+
):
|
| 149 |
+
"""
|
| 150 |
+
input1 (text): /9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxISEhUSEhIVFRUVFxUVFxUVFRcVFRUVFRUWFhUVFRYYHSggGBomHRUVITEhJSkrLi4uFx8zODMsNygtLisBCgoKDg0OGhAQGy0lHyUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAQsAvQMBEQACEQEDEQH/xAAcAAABBQEBAQAAAAAAAAAAAAACAAEDBAUGBwj/xABPEAABAwEEBAcKCggFBAMAAAABAAIDEQQSITEFBkFREyJhcXOR0gcUMlOBkpOxsrMzQlJicnShwdHwFiMkNENjgsIXNaPD02SitOEVg+L/xAAbAQACAwEBAQAAAAAAAAAAAAAAAQIDBAUGB//EADoRAAIBAgEIBwgDAAIDAQEAAAABAgMRBBIhMUFRcZHRBRMVYYGhwRQiMkJScrHhM5LwI2IkNPGiQ//aAAwDAQACEQMRAD8A3bp5CF6TMeLs2J0ICFJg6cURNmu/nFScblSqZJYjeHYgqtqxojJSV0PJhjmNqFnCXu5wTK00DXAHdknZrSJyjJWiwopHVoUmlqHGcr2ZKGbyo3LMnaEIm7kspjyIkgFFEstYcORYdx3YiiNDB51YggZuPkUpMqhHYyw0KDLkSNKTJJ2LETlW0XRZfhVMjTEstVZch0gHQMYoEyKR1Bj9mKmkQk7LOUXzNORVqi0Z3OL0FV6sRSznS4xkY4fnYt2aRxfepvuL8zmm7RjDVgcSTJWpc8fFeBTihZll3ee1nbUb5dXkxvG91fS1rff3A0GXBR/6v/IpWn9XkiP/AA6MjzfMrh7mn4OPzpv+RSyZP5vJFWVCD/j/AP1LmXI56j4OP/U7arcJr5vJGmNWm1fI83zIpAMxFGfS9tSSn9XkiuXVaVT83zB4cHAxx9ctR5eETyJrRLyRHraTVnDzfMnZaPmR03/rft46g6c/q8kXKrTXy+b5kgl+ZH/qdtRyZ/V5Inl0/p83zJmvPyI/9TtqLjLb+C1Sh9PmyOaYj4kZ9J204wk/m/BXOpCPyebI4bSTmyMek7alKnL6vJEIVoPTG3i+ZI7O9cjr/wDb21FKejK8kWPq75WT5vmOLUfkMrupJ28UdXL6vwProfT+QxO7xbKc0nqvpZEvq/BJVY/R5stRE/Jj6pO2qmpbS+Lh9P5Lcd44Uj6n9tVNS2l6cXqLMZdkOD6n9tQae0ti46kHx/mea/tqNntJXWwFzpB8jzXdtNJ7SLklqIXySO8Ex4cjx/epqNtJBzv8KK8lpfW7RjjgSAHAgGtCRf5D1KSh3lcp7Y3IbRQmhaBg04DaRXeVZTbz5ymtGObNpK1zdUc6vvtM2TsMgXX5ODuQFac6OdaMtdw7XJwZYCOLwbcd3HkVdNZTlv8ARF1eXVqCejJ9WGXYVPXs/wDSkRvYGSpGVebFNZiMrtZkQRWgA0+zJScblcKlnYth4Vdi9SViCQGtQprRYqkne6CuhwqOpK7TzkrKauiIPc04HyHEKVkytSlB6S1FbGnkP2darcGjRCvF9xLUOUc6J3UiTggcDj+d6jlPUTyE8zHETxgKEcqMqL0jUJrMhgXDwwAjNqBOS+JEodQ45FRtcnezzluMqtl8Syw15FWy5ZyeNrhk6vOoNp6iyKktDLDXV51W0Wpic4BCQNpDFwTsJtGNwDWSulMjjWoaK5B2JBNcQCXUyoDtopRpO9yudaNrBzzC8eUM5fiqynF595VVqLMu4qSwkmocQtCkthklBt5mY4iIANwfSaRXnWm6b0nOyWle3AltxLixhY5wMTajAHw5Mq0qeRVU8zk76/RF9e8lCLV/d9WW9V7Ox83ASguYY3OYTea4FpHFOVcCepV4qcoQyoabl3R9OFSq6dRXVs2lHVxat2VuTHDk4R9PWue8XWel+SOxHo7DR0R83zOM1x0UyOcCMhoLA/jE4m84HE8w610MHXlOLUjjdKYSFOSlDMZ9jtT63XY8tK4c4WuUVpOfTqSvZmlwe7D87lVc1ZBmWiNzXVyxzBoFbFpoyTi4u5savaJ4eXjhwa0Xnmpxrk0HeTXLYDyLNia/VQzaXoN2BwnX1PeTstPI6r9GLJWvBu9I/wDFc72yvt8kdrszC6cnzfM5vTJhZaOBgZQMFZHlziA51C2MVOdMTzjlWrDzqTTcmYsXSo0pKNNW25wakDKqvM2dIZswrnQ7ihxzAqiuTSuwr4Qz2fYopZ7FknmvpIGaQZkPtwCm6UtLKViaehEjJ3A4trygqLitTJqpJPOjQjmCocWbIzRaZMN6rcWWqaCdM3PD70slknOOkB1oBxwpvripKD0EXUWkgmtYzGI31+5SjTK5VlpRn2xzTm0K+CaMlVxelFa0S0cKYcRnsqVON8q+0rrTtk2+lDttzuT1qXVoisRJHMwaYc3AtB3jLHfyFaGkznQqSjuNPSOkmng6tIrE1wpjTjyD7lRRjaUt/ojXi6icKd1pj6sk0BpUi0xcfC8G4g436sz/AKgjE006crIMDXlGvG7zaOOY9G4VcbJPUZRxfdHaf2eUVwMkZp84Bw9h3Wt2BdptHJ6XjempbGYVngaW4PINPBOw7cF0W2tRw4wi1pLURkb4JDt3G+5ReS9JbDLj8LuPJaK0bcLnEgXaZkmgojJtnuN1Mr3bXZ3WiLMIYgzM5uO9x+4ZDmXHrT6ybkelw1JUaahxI9PaY73hc8CrzxY2/KkPgjmGJPICoQp5TsWVaypxcjhdGWd1CXSFznFxeTm5zjVxPOarsJKCSSPNXlVk5NmjFIRgWnnGKi0nrLYScczRK5zHbj6x+CSUkTbhPvKrRiQW0HKVPvKEs9rZghHHuCLyGo09gJbQUa403bkadIrZKtFsjY+6auJ6sFJq+ZEIvJd5MvWa1MPOVTKEjXTrQZcjlON1wdzhVOK1miMnqaYEVobU33Y7tibg/lRGNWN/fecaaauLS0jLPFOMbaRTnfPGzM+0yDnOAor4ox1Joa1nEcXExx+TiqNP5s+tkq7zQzfKiAO5APJ+CtsUXPOn64sJBLJCRhUtYajceNisXaVLY/LmdF9B13plHz5GlpTWqFne5MTyH2dj6ANwBlmFPC5CoQx9OLk7PO+7mWVeh601BKUfdVte19xXi13szDebDKKUIBDHUINcDfBUn0jSatZ+XMjHoSvGSkpR8+R7bDaw9rXtODgHDmcKj1rMkdFyMDX6cNsT5SCeCcySjQCfCuHAkDJ52qcJ9U8tlNWj7RHq1pe08pZrjEDUMlFMsGdtaO0qWx+XMwdg4i98qPnyLo17s58KGWu9oYP9xLtKnqT8uZPsSs9Lj58ju9Qray1MNobG9rGuLWGSlXEeE4UJwGVd9dyjPEqrH3bltHAvDz99ptaLX9UjsOGVGSbMs8p0/wB0SzPtTqtleyKrIywMLXGvHkFXitSKA7gN6VPE04POmFbBVaqVmlx5FH/EGzB94QzY5gtj+6RaO0KdrNPy5mLsasp5SlHz5Eo7o9nGUdo6oj/eo+309j/3iWLomstEl58hx3RrLWphtFRkQ2OvvEvb4akxromre7kvPkM7ui2U5xWg84jH+4n7fDYxPoiq9Ml58iKXX6yfFhtAPNHn6RNdIw1p/wC8SuXQtT5ZJceRCO6BF8mbqj7al2jR2Py5kOxcV9cfPkTO7osGyObqjP8Aeo9oUdj8uZN9D4nVKPnyBPdDg2RSjmbGP9xPtGlsflzF2NiNUo+fINndGgAIEc48kZHlPCVSfSFJvQ/LmSj0RiErKUfPkC3uiw41jmP9MeH/AHp9oUdj8uYl0Pidco+fIdvdDs22KbyCMf3pdo0tSflzGuhq+uUfPkNNr/ZT/Bn6o/8AkTXSNNan5cyMuhar1x8+R1FntolZFK0Ua+GJwvHjUpuB+9acPNVIuS1tmHGU3RnGnK11FEwun4pKuz7TN7r1HhhXmT3Zqae8GyfVI/fWhAlpMpAz3zUPSHCaPsxri2MRnniJj/sXSo54JnGxDyaskX9YIeGss8PjIpGjnLDd+2inOF4tFdOrkyT7z5zaaiq5J3TQ0Hop9qnZAzAvOLqVDGDFzzzDrNBtU4Qc5WRXVqKnFyZ9B6PhZBEyGIXWRtDWjkG87TtJ2krqKCSsjiyquTuzle6brP3vZ+AjdSWcFtQcWRZPfyE+COcnYqMRLJVlpZpwkOsld6EeLhYDqiQAkAJACQA4QhM9f0HoCCdxDwxoBAoGRVNQ84XyMrmyuflXqMRkU1mgn4btie08Vg+srt5VSS8d+1rZ3mjY9U7K4Orjdo6rIo6FpEZoCLwLuMW4EipGdCqJ1UrWhHPtW/uWbXoNVKhKSd6s3bY9WbvefVpI59VLM2IvN2ojD6cHGKFxddvCtQDdDQM6uCcakZTUerjpto/WrTuIzoyjTcutlovp36c+vRvZYi1Qspja+7i5sZ8COl5zLxAF2uw05s1B1UptZEdL1d9i2GHbgpOpPOl82tq5JNqXZWuYLuDn3TxYyaXHnY3DFhFeUFRjXTT9yOjZ3rmSlhnFxXWTzv6u58jI1m1ehs0gY2MEFtauYytdowHN1rThnCtBycY8DFjVVw9RRjOVu9njjMhzBeYPavSeyavg952U/wDTxc+S7mA/it3nlOl0/aL9yNFltcNgWx00c5YiSPDS07j1LzB7s1NPNN2yYH90j2fzrQgitJl3DuPUgkeodyy3HvaSI/w5SR9F7QfaD108Fng1sZw+k3k1E9q/B2gtK2ZBzetPBNKWQxzSxgGjJJGjD4ocQ37KLhVI5MnHYz1VKeXTjLakel9zjQ/e8JneP1swBFc2RZtbznwj/TuXTwlDJjlPS/wcTH4tSnkR0L8nV2vSLY2OkeaNYC5x5B6ytMkopt6DHCTnJRjpZ4fp3SclqnfO8HjHit+QweCwcw+0k7VxKk3OTkz01GmqcFFFC4dx6lAtFcO49SAFcO49SAFcO49SAFcO49SAFdO49SBHRx632hpq2NgI2gPB9pdR9LVWrOMfPmcSPQVGLvGcr+HIdmuNpAc0MYA6l4APoaGorxscUn0rUbTcY5t/Ma6DopOKnKz06ORI3Xi1ht0ABtCLoMgFCCCKXtxPWl2nNu+RG+58ya6Hgo5PWTtvXIdmvVsGVBgBgZMm0oPC2UHUk+kpPTCPB8wXREFoqT4rkE7X62nM5cslciM73KetJdItf/zjwfMk+iovTVnxXIhtGulqk8NrXZ+FfNKmppV29Sj0pOKtGEVx5kJ9C0pu8qknva5HMALmHZec9k1bP7JZht73i2/NXbwP8PieX6Vf/kNdyNHvYnG9TnH4LZl2Ob1Lee54qV5k9waenMrL9Uj99aEAZiAOo7n9ruTSM+WwHysd/wDsro9Gv/kcdq/H/wBON02rUYz2O3Ffo7vvpdnIPMdecXatDibSLy4Vi4kr9xq0NDPK5jvICuXLCZeKd9GZ/ryO/DpJUuj4tP3s8VxvfwTXjY7TvpdTIOB15xOvemr5FmaeK2jpOV2bWeTBx5bu5cjpCtn6pePI9J0Nh3k9fLXmXq/Tici7I0pXZzrmHdO6j/Rygvd/1oK0u0rtonmFnCrq3/1//almDOcRaAwyP4Oty+7g73hXLxuXvnXaV5UAdVoPUPSBnhMtgkMXCx8KHFrBwd8cJm4Hwa5Jhc3dbe5ba3WyU2KztFmNws/WMaG8RoeKOde8IOOI2osJM4nWTV6ewSiG0hoe5gkF114XXOc0Y0zqx2HMkM1NBaL0RJAx9r0hJBMb16JsD3taA9wZRwYQatDTntQGc2NHaraEnlZDDpSd8kjrrGizPFTzmOgwBNTgAExXZymtmiGWO1zWZkvCiIgX6UNS0OLTsqK0NPsyCGizqDYYp9I2WGZgfG97w5prRwEMjhlytB8iEDKetNmZFbbVFG0NYyeVjWitGta8gAeQIAzEAVEEj2TV5o7zsuFf2eHLmXawD/4vE8t0sk8R4IlllLTQ3hyVW9K5x5Np58x5EV5Y9+aWnMrL9Uj99aEAZqANHV2a5aYzsJun+oFo+0ha8FPJrx4cf2c/pWl1mDqLYr8M/wCDv769PY8FYV5FgKWmNJCCIvzOTRvccvJmTyArPiq6oU3PXq3mzAYN4qsqerS3sX+zLvPPCSSSTUkkknMkmpK8s227s+gRiopRirJZkaGgLXBFMH2mz98RgOBivXKkjA3huSGddZdYtDySMjGhcXvYwftDs3uDR9pTFnNbXG0aHsFqdZjooSlrWOLhM5o44Ju0JOynWgFc4nTlps9slhZYLCbO41ZwYkvmV7y0MoXUApiP6khnRSauazSg1da250JtrGgHmbN9yYsx2HdI1Nt9ulgmgkbGBCGStfO+NgeHF2AYCHeERXkCGJM8x1o1NlsDGSSzWeQvfcuwyF7gbrnXnVaMOLSvKEiVznCUDPVdRdXZ7BYp9Kus75LRwR72hpVzWuwMz25450GNwHa6gZFs8sdI55L3OvOeS9zicXOcbznHlJJPlSGdL3Mf82sfSSf+PMmhM6zWHTOhZ7XaIbdY32eRk0rO+rPiXFry3hJGsFS454tegFc5bXDVWCyxx2izW6O0wTOLGUpwoLW1dW7VpptPFoXAUxQNHFJEj1vQZPetloQP2eHHbkV3ej/4fFnkemX/AOV4InLsTfJduIIot1thyL7Tykryh9DNLTmVl+qR++tCAM1ABRyXSHD4pDvK01+5OMslqWzPwIzhlxcXrTXHMekB9cRtxXs1nV0fOHFp2YqosKxw2n9I8PLgeIyrW7j8p/l9QC8xjsT11TN8K0cz3HReC9mo+8velnfdsXh+bmcsR0hIA9A1G0BZrOyPS1vtMQhYS6GFhvySSsOAI+U1wrcFTUAkgAgsTOP1i0u62Wqa1PF0yvvBud1oAYxvKQ1rQTvBSGUYpXNcHMcWuaQ5rmktc0g1BaRiCDtCALUul7U7wrXaXfSnld63IA7XW4C06B0bOeOYZHQOLsTQCRhJryws60xazztsYGQA5hRIZ6Loqz6I0dFHap5hbrS9rZIrNGKMYSKtMoNbpG9+7BhITFnZj/4hW/v3v7hON4PAY8BwVa8Fd+2/nXHkRcLFnugO0dOyK3WN4jktDnCay0xY8Cr5CBgw1oDsfeDh8aowRS7mP+bWPpJP/HlQgZS10/zC2fWZ/eOSGYl0Z7eZAFVBI9O0dLSCy5fu0OZ+aV3ejn/w+J5Dptf+T4IeS045DqW65yVFsy3au2ffJ5w/BYezqPfx/R1u28VsjwfMu6U1fgdwFeEo2zsaKOAwEsx2t5SqqeApScr3zP8A2ovr9L4inGDWTnV3m733lIauWY+N85vZVvZ1Hv4/oo7cxOyPB8x/0Yg/meeD/ajs2j38f0Lt3E/9eD5hN1Vg3y+cOyn2dR2vj+g7cxOyPB8x/wBFYf5vnj72o7Oo7XxDtzE7I8HzEdV4Ngl89vqupdm0e/j+h9uYnZHg+YB1YhGfCecOyjs2j38f0LtzE7I8HzBOrMIOPCDneOyjs6j38f0PtzE/9eD5g/ozBX4/OHDso7Oo9/H9B25iv+vB8xSauQjLhPOHZSfR1Hv4/oa6cxOvJ4fsjGrsRy4Trr6mpdnUu/8A3gT7axGyPD9kbtAxbC/yvaPuS7Ppd/Ea6ZxGxcHzIv8A4GIkGricswSPsS7Ppd/En2xX2Lg+ZN/8BFvf5w/BPs6l38f0V9tYjYuD5hfo9D8p/nDsp9nUu/j+hdtYjYuH7JYtW4T4zzh2U10dR7+P6Iy6bxK+ng+ZKNV4N8nnDsp9m0e/j+iPbmK2R4PmL9F4N8nnDso7No9/H9B25itkeD5gu1YgG1/njso7No9/H9B25idkeD5kEmr8I2v84fgovo6j38f0Tj03iXqjwfMqv0BAMuE84fgo+wUe8tXTGJeqPB8zftjGxtgY0YNs8IFTj4JVuGioRcVqbKMfKVSUZvS4ogLgtJz7Mt38cXdRCmmUtbEX9KSCkOZ/Ut5vhJVVSeee/wBEasTF5FL7fVlMSbP/AGFdcx2CEp/ITuFiYSfOQIJz6mgcaddEDzXGcwjCrTy5+vFCE1YYQ8358qAJBCNtUADJcAwBryYdaB6SrJTPEeVIEQPJJqDTlqa+Sii85YsxDJGXGrnDy1qk1csU7aESRQAcvk/FNRISm2TWazXnNaBi5waK73EAY5bUOyV2OKc5KC0s6Y6j2zxTfSM/FZvbqG3yN/Y+K7uIUepVsH8JvpG/ij2+ht8gfQ+K7uIQ1Ntvim+kb+KPb6O3yF2Nie7iMdTLb4pvpGfij2+jt8g7GxOxcRjqXbfFN9Iz8Ue30NvkHY2J7uJC/Ua3H+G30jfxSeOo7fImuiMStnE5C0xFri05gkHnBoVeZbOLaLmlW1MWB+Ai9Spo/NvZpxb+D7UVQ3kHldT1q8xX7x9vhIE9xq6RYKQ4j4BufSSqFFZ57/RF2Lbyaf2+rImkfKHkK0GFpk15nJ1oFZgOc0ZOqgLMXfVMgPIi40mAJqnIIuJoMTu2IATpjvJ5kAVHS8ijcmokT5uX7VG5YoEbp6ZFGUSVO+kGM1215wkiTzFoP2H7Ap3KcnWXNFfvENBhwsWefhtUKnwS3P8ABdhv5ob0ezaZtL447zASQ5pNA0gMBq+9eIoLoIrsqFwKEIylaX+eo9jiJyhG8f8ALX5HPWXWKR0UkgLqNE4FQz4R8p73a7GraC62lMSeSp3TwkYzUXryduhL3jnU8dKVOU1qytmlv3b7NhtaN0o5zxA+GUPaxrnvdwVMQQHG485ljsAFkq0Eo5akrXzJX9VqubKOIlKSpyi7pK7dvR67AW/SMvC8HDG9xiLHSU4K65j2uo3juBBwqCN2KdOjDIyptK97adK3IVWvU6zJpxbta+jQ75s7RXktNtuPa2GSrn1a8mz3o4yQSKX6OcOMBXDKuWNihh8pNyWjOvezvho2lTqYnJaUHneZ+7mXHTs8Lmvoy2iaMPDS3F7SHUqCx5Y6t0kZtOSy1afVyyb7PNXNtGr1kMq1tOnudjwXSb6TSGmIkf7RXfTzI8jUV5y3sPTs1TFsHAxGg5iqqb+LezRiIWUPtRnB35qrTNYk2poizT0ocIMf4DfezKqlplv9EX4le7T+31ZSMnKr7mTJCbTlQJ3JBTlUiDuyRoTRFkgfyDqTuRsC56VwSKkkx3qDZfGCRC5/L61G5Yl3AcyRIQAOyvlQDui3Z8BhgrEUTzseo/JQLOXtEOHDwj+bF7bVGo/cluf4LcMn10H3o9h1kB4Ev4MS8Gb9wl9HXQaC6wG9jTA4bdi4eF+O17XzXzeujwznrsZ/HlZN7Z7Z/TT45tZhaOsjbQDE9sL2saXSWhjz4cvCPIHFANHOvXa0bVu0LZVqOk8uLab0Ra1Ky2+F9ecwUaarLIkotJZ5J63d7NTz21ZjR1StBm4WV7mmQmNhDa+AxguuxGTi57xyOCoxsOryYJZs78W/TMjRgJuplTk8+ZeCWZ+OdreW9JxukN11kbK0GoLnsxNM6EYZkKqk1FXVSz3MurRc3aVNSXe0YFl0aHzSO7yZSJ7WtY10bA03GPq4gVeeNvplhVbZ1nGml1jzrS7vW14fkwU8OpVZPql7rzJWWpPx/HcddY5HubV8fBmpwvB2G+o8q5k1FP3Xc69Nya95WPnzS5/XSfTf7RXeWhHkp/G97D0ycYegh9RVVL5t7NOJ+T7UUmSkZH7AfWrk7GNxTLGKkVsv6WOEHQN97MqqWmW/0RqxPw0/t9WUWq4yMnYaZYKSKmr6SQPO9O5GyJQ/DZ1JkLEb383UlckolSaWqg2XwhYgL1G5ZYEvSuSsDfRcdiWIfmiaISLXlUykYORcLFzQrq2iHpYvbaoVPgluf4LsOrVYb1+T3S12YSNulz2iubHFjua8MQF5+E8h3svHOeyqQy1a7W52KTtX7PS6I7oNA66SOEaDW7Ic3gnOuJVqxVW927+m7YUex0bWStttr37S0dHx8I2UCj2tLAWkgFvyXAYOAzAOSr62WQ4ann8S3qYZanbOs3hsJ5WXmltSKgircCK7Qd6gnZ3LJK6sV9H2AQ3qOe8vdfc55BcTda3YAMmjYrKtV1LZkrZsxVSoqnfO227tvgW1UXHzlpg/rpfpv9or0OpHkZfG97/IemjjD9Xh9RVNL5t7NOI+T7UZ9VaZrFkuUymxpaVGEHQN97MqqWmW/wBEaMT8NP7fVlQK8xEjUyLJAVIgC56VxpFaaVQbLowIC5RLLAFyRKw15A7BNQJliIKaKZBEpisOHICxe0If2iDpY/bCjP4Huf4LcP8Ayx3r8nvNpkLWOc1t4taSGjAuIFQ0HlyXnopOSTdj2M5OMW0r9xx1s0lpOZjnMhFmjDS4ucePQCpArjWnzRzrqQo4SnJKUsp+X+8Ti1K+PqxbjHIVr59P+8DR1AtD5LKXSPc88I4Vc4uNKNwqVR0lCMa1oq2Y0dEVJTw95Nt3ek6RYDqCQAkAfOOmfh5fpv8AaK9BqR5KXxPewtNZw/V4fUVTS+beacR8n2oz6q0zFoBWIpZq6Uyg6BvvZlVS0y3+iLsT8NP7fVlJqvMbJAUyIznouCiV5ZVFstjErlygW2BLkDSGqkOw4TBkrAmiDZOclIqGQMcIEXtCfvMHSx+2FGfwvc/wW4f+WO9fk9/XnT2RU0v8BN0cnsFW0f5I71+Sqv8AxS3P8GD3OP3Q9I/1NWzpT+fwRzuhf/W8WdSucdYSAEgD5w018PJ9N/tFeg1I8m/ie9h6azh+rw+oqml829mjEaIfajOVpnLoVhmZp6Vyg6BvvZlXS0y3+iNGJ+Gn9vqymFcYmM5yBpEMkii2WRiVyaqJbawzigaQKQxAIBkgCZAnjapIrkx3JiQkAEECZd0H+8wdLH7YUZ/A9z/BbQ/ljvX5PoBedPZFTS/wE3RyewVbR/kjvX5Kq/8AFLc/wYPc4/dD0j/U1bOlP5/BHO6F/wDW8WdSucdYoSaYha57XEi5QF10ltT8UEDMVHWFcsPNpNayh4mmpNPUWbNamyVukmmdWub6wFXKDjpLITjLQfO+mh+uk+m/2iu9qR5V/HLe/wAhaazh+rw+oqml82804jRD7UZ9FcZS9RWGdmlpTKDoG+9mVVLTLf6I0Yn4af2+rKDnK4ypEL3qLZNIgcVEtSsMUACkSFRAEjWpkGyRrU0RbJqKRWCgY4CACTIlvQf7zB0sfthVz+F7n+C+h/JHevyfQK88exKml/gJujk9gq2j/JHevyVV/wCKW5/gwe5x+6HpH+pq2dKfz+COd0L/AOt4s6lc46xyukhLdlJinYwvYWNaYA0AmMuqA6t4vvmvKF0aWRePvJuzv8Xf3bLHKq9Zky92SV1ZLJ7r69N7m7o5zqEOZKNtZTGa12Dg3Hdt3rHVS0prwv6o30XLOmn429GeAaZH66T6b/aK7mpHlpP33vYWmhjD9Xh9RVNL5t7NWJfwfaihRXGS5dAU0UM0NLnCDoG+9lVVLTLf6I1Yhe7T+31ZluKsuZ0iFxUSxIaiBjUQMVEAExiERbJA1SIkrGJorbCITECQkMcNTEEQgEHo6cRzRyOrRj2ONM6NcCadShJXTW8upSyZKWxo9RHdMsni5/NZ21zOz5/UvPkd3tel9L8uYz+6RYyCDFMQRQgtYQQcwRfTWAqJ3Ul58hPpai1ZxflzI7L3QbDGLsdnlY2taNjjaK76Byc8FVm7ykm/HkRh0nh6atCDS7kuZL/iXZPFT+aztqPZ8/qXnyJ9r0vpflzGk7o1jcKOhmIwwLWbDUfH3hNYCondSXnyIy6WoNWcX5cw/wDEmy+Kn81nbR2dU2rz5B2zR2Py5nlGkHh73uGTnOI30JqunayscNyvJsl0y3GHoIfUVRS+bezZiX8H2oohquMlyy4qZSW9MnCDoG+9mVFLTLf6I2Yj4af2+rMtxVhSgQEBcVEDFRILiDUBclDVIhcNrUyLZLRSK7iokO411AXCDUxXGeEhogcFEsRV0hO6MR3KVfI1mOVHVVFepKCjk62lxNeEpQqueXe0YuWbuH75cHmN1LwYZK5spWgrka1qjrZKThLTa/cHUQlBVIfDlKNtd9O4sB5yJFcBkc7taHca7DsKtyn/ALd/vAoyFpSflt883mMyQ/GIyDsnZDwzjsoQkpvX6+I5U4/KnptpWl6CZrnVwu0vU+NkMHY76qWU+7/afMrcI2s73t3eHkPefvZTi7Hf1eo05ksqe1efiGTT2S17PD9jvarCpMtaYbjF0EXqKopL4t7N2Kfwfaig1qusZGwnFAi3pnKDoG+9mVFLTLf6I24j4af2+rM4BXGa49ECGogdxUSC4cbE0iMpEgapELkzGJpEHIItTI3EWoHcQaiwrjlqBXI3hJk0yO6lYncJ9nDhQ9eFQd4qMDypSgpKzCFZwldFdmi2BxdVxJaWGpHgk12BVLDRTcru7VvA0Sx9RxUbJJPKzX08SyLK3HDAkEjYSAAD1AdSt6tGf2iSS7tevPn5j96DAEk0BbjTwTSrcuQY58qOrWj/AFv94i9oabaS038dv+zdxILKMeU3vLh+AUurRF4iXlb88wu9W7vzj2j1o6qJH2if+8OSGcxSsJSLOl2YxdDF6is9FfFvZuxTzQ+1FEMV1jI2QOUS0vaXGEHQN97MqaWmW/0RrxD92n9vqyhdVxkuINQFx7qAuO1iLCbJQ1SsQuG1idiLZMGpldxXUBca6gLhBqYriLUAmRuakTTEGIsDkTXE7FdxXEWDKHDE7CuOGIsFx7qLCuPdTABzUrEkyzpVuMfQxepZ6Kzy3s3Yt5qf2opBiusY8opXVWaWzR0q3CDoG+9mVVLTLf6I04l+7T+31ZQuq4yXCupiuNdQFyVrE7EHIMNTFckYxNIg2FdTI3FRAXEGosFwg1AriLUBcAtRYlcdjECbJriZEcMQAVxABBiBjFiBCDEDBcxAixpNmMfRRepUUdMt7NuLean9qKlxXmIz7iqsamzQ0o34HoG+8lVVLTPf6I04l+5T+31ZSuK6xkuK6iwrhsYnYTkHdTsQuE1idhNktxMiPcQAgxADhqACuoAa6gBXEAOxiBklxAWHuoAMMQOw91IBrqYWFcSAZzUwJ9IsxZ0UXsqmj829mnFaKf2L1Kt1XGUzbqgWtl/SbcIegb7yVU0dM9/ojVin7lL7fVlK6rzHccNRYTZKGKRFsdrEATMYgB7qAHLUAINQAQagLBFqBjXUAK6gAmsQFggEhj3UAE0IAYoAZADhqAFdQBYtzcWdFF7KqpfNvZpxOin9i9SqArTMZl1IGy9pIfA9C33kqpo/FPf6I14r4KX2+rKgYtFjFcNrECJGhABhqBhBqBjhqAHISAcNQA4agY5YgAS1ADgIANoQMe6gLDoAYBADhqAsPQIAZABAJDsWLa3FvRR+yqqXzb2acSs0PtXqVlcZjKuJ2K7l7SI+C6FvvJVRR+Ke/wBEa8V8FL7fVldrFeYwqIGExiASJQ1IkFRACAQAqIAcBAw6IAYtQFhqBADhAD0QMdIBUQA4QMQQAqIFYeiB2EGouPJZYtoxb0cfsqml829mjE6IfavUqlXGVmeRzKRWXLePguhb7yVU0dM9/ojXifgpfb6srtarjISXUhhNCBjoAcIAK6gdh2tQNIJIYJTEJACqgQTUiSCLUrkrDJizBNalcaiPcSuPJGup3FYVEAEkSFQICyJrZm3o4/ZVVL5t7NGI0Qt9KIFaZjNAVhnNC0Rh4jIfFhG1pDpY2kEPkJBDnA7R1rNGeRKV09Ox7EdCpSdWFNxazRtpS1vayPvb+ZD6eLtKfXR7+D5FPss9sf7R5i72/mQ+ni7SOuj38HyD2We2P9o8wuA/mQ+ni7SOuj38HyH7LPbH+0eYuB+fD6eLtI66PfwfIPZZ7Y/2jzHFnHjIfTxdpHXR7+D5B7LPbH+0eYRiHjIfTxdpLro9/B8h+zT2x/tHmMIP5kPp4u0n10e/g+QvZp7Y/wBo8x+9/wCZD6eLtJddHv4PkP2ae2P9o8x+BHjIfTxdpHXR7+D5B7NPbH+0eYuBHjIfTxdpHXR7+D5B7NPbH+0eYuAHjIfTRdpHXR7+D5B7NLbH+0eY4hHjIfTxdpHXR7+D5DWGltj/AGjzC4IeMi9NF2kuuWx8HyH7PLav7R5iEQ8ZD6aLtI65d/B8g9ne2P8AaPMfgx4yH00XaR1sdj4PkPqJbY/2jzG4MeMh9PF2kdbHY+D5C9nltX9o8xGMeMi9PF2kdau/g+Qezy2r+0eY3BDxkPpou0n10e/g+QvZ5bY/2jzDaweMi9PF2lF1Y9/B8iaoS2x/tHmOWN8bD6eLtI62PfwfIboPav7R5itTmkijmuoxgJa4OFQMRUYFFL5n3sMQleCvoilmzkeCtKMxntad6mZgxXegYQJ2IHdjVO1AXYV9FguPf5UrDuECgdxAoAK8iw7iLt5RYG9pW0hbxEAS0mtcqbOdVVJqGdl1Kk6t0nYoy6wxta15a7jNkd8Wv6twa4EA4GpGBxxG3BVSxUEk2tvkaIYCpKTimszS168+z/bs5Zm02xj7hD73FGAbTjNDsDWmRTlXipZNghhJyhl3Vv8ALYaDjTatBieYdpKBpscFIdxFxRYG2CKpkc4ZJSzE86EDyoBMcuSsNsQKAuOgYJCCNikxisM9h6IAMc6CQ14bECH8iBhBvIkOwTWoGkOUgdxUQFgSExAT2YOpe2Go6iPvUZJSLISlC9tYBsDCA2hwaWjfRxBz8mCr6qNi1V53uttyJ+hoiahpGyjTQAXODFBkKNyUXQg/93WLI4qqs2n/AO3/ACX7quuZckdA9wxciwrhNQSQ4CQ0hXUXHkioEBZDEIE0wgEDsMgBXUXFYqOKsM7Ym5IAEoEOwYoGtJI0pEk7BFxolYbbsJibCIb1FEpAtCbEgQUyK0hhIkEEiaBKZF6RFAahbEaxag2JMnEc5JDegAFMhckSLAQgitIQOKCWsNImMUCYKZE//9k=
|
| 151 |
+
input2 (text): default
|
| 152 |
+
input3 (text): default
|
| 153 |
+
input4 (text): ECサイト�Eコンポ�Eネントを抽出して���ださい
|
| 154 |
+
output1 (json): ecinfo
|
| 155 |
+
"""
|
| 156 |
+
print("baseimg2ecinfo_rect p(len):", len(p), " img(len):", len(base64img))
|
| 157 |
+
|
| 158 |
+
system_prompt = "あなた�EECサイト�E開発兼チE��イナ�Eで、ECの吁E�Eージの構造と用途を熟知してぁE��す、E
|
| 159 |
+
user_prompt = """以下�E手頁E��ECサイト�EのWEBペ�Eジ画像を解析します、E
|
| 160 |
+
・画像�EのECサイト�E要素を以下�E形式で刁E��して登録してください、E
|
| 161 |
+
・吁E��素のチE��スト�E斁E��サイズ、文字色、文字�E間隔めE��置・改行を認識してください
|
| 162 |
+
・吁E��素の枠のレクタングルの線や色めE���E丸さ、影めE��景色、gradientを認識してください
|
| 163 |
+
・吁E��素冁E��ある斁E��で表現しにくいアイコンを絵斁E��や記号として認識してください、E
|
| 164 |
+
・ここ認識した情報めEdiv>{icon}<span>{text}</span></div>の形式でHTML,CSSを使って記�Eしてください、E
|
| 165 |
+
・画像�Eに該当�E値がなければ[]のように空の配�Eを回答し、画像�Eに存在しなぁE��とは回答しなぁE��ください。特に黒一色めE�E色一色の場合に注意し、すべての値が空になるよぁE��してください、E
|
| 166 |
+
・画像�Eに写ってぁE��イメージ(写真めE��ラスチEにつぁE��、どんなも�Eが起用されてぁE��か、�Eロンプトで再現できるチE��ストとして登録し、オブジェクト�E位置を囲ってください、E
|
| 167 |
+
・値を抽出した後�E、その値が含まれるレクタングルの座標を2点方式で教えてください。OCRの斁E���Eの座標min(xs), min(ys), max(xs), max(ys)がある�Eで、それを参老E��レクタングルを合体したり、�Eタン刁E�Eバッファを庁E��たりしてもいぁE��す。あくまで画像から座標を抽出してください、E
|
| 168 |
+
・OCRの抽出冁E��がある場合�E以下に記載する�Eで、それも利用して抽出の正確さを高めてください
|
| 169 |
+
・これら�E抽出惁E��を総合して、メタの吁E��E��を記載してください。訴求要素は、情報かOCRがある限り�E20斁E��で6種類提案してください。情報がなければ空にしてください、E
|
| 170 |
+
|
| 171 |
+
""" + p
|
| 172 |
+
|
| 173 |
+
# キーの解決�E�Edefault" は None に�E�E
|
| 174 |
+
resolved_openai_key = os.environ.get("OPENAI_KEY") if openai_key == "default" else openai_key
|
| 175 |
+
resolved_google_api_key = os.environ.get("GEMINI_KEY") if google_api_key == "default" else google_api_key
|
| 176 |
+
|
| 177 |
+
client = get_llm_client(
|
| 178 |
+
openai_key=resolved_openai_key,
|
| 179 |
+
google_api_key=resolved_google_api_key,
|
| 180 |
+
)
|
| 181 |
+
response = client.call(
|
| 182 |
+
prompt=user_prompt,
|
| 183 |
+
schema=pageInfo,
|
| 184 |
+
system_prompt=system_prompt,
|
| 185 |
+
images=[base64img],
|
| 186 |
+
model=model,
|
| 187 |
+
temperature=0,
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
return response
|
apis/baseimg2fvinfo.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from enum import Enum
|
| 11 |
+
|
| 12 |
+
from src.utils.tracer import customtracer
|
| 13 |
+
|
| 14 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 15 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 16 |
+
from src.clients.llm_client import LLMClient
|
| 17 |
+
import json as _json
|
| 18 |
+
|
| 19 |
+
client = LLMClient()
|
| 20 |
+
system_prompt = None
|
| 21 |
+
user_text = ""
|
| 22 |
+
images = []
|
| 23 |
+
for msg in messages:
|
| 24 |
+
role = msg.get("role", "")
|
| 25 |
+
c = msg.get("content", "")
|
| 26 |
+
if role == "system":
|
| 27 |
+
if isinstance(c, str):
|
| 28 |
+
system_prompt = c
|
| 29 |
+
elif role == "user":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
user_text = c
|
| 32 |
+
elif isinstance(c, list):
|
| 33 |
+
for part in c:
|
| 34 |
+
if isinstance(part, dict):
|
| 35 |
+
if part.get("type") == "text":
|
| 36 |
+
user_text += part.get("text", "")
|
| 37 |
+
elif part.get("type") == "image_url":
|
| 38 |
+
url = part.get("image_url", {}).get("url", "")
|
| 39 |
+
if url.startswith("data:"):
|
| 40 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 41 |
+
else:
|
| 42 |
+
images.append(url)
|
| 43 |
+
|
| 44 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 45 |
+
result = client.call(
|
| 46 |
+
prompt=user_text,
|
| 47 |
+
schema=response_format,
|
| 48 |
+
model=model,
|
| 49 |
+
system_prompt=system_prompt,
|
| 50 |
+
images=images if images else None,
|
| 51 |
+
temperature=0,
|
| 52 |
+
)
|
| 53 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 54 |
+
else:
|
| 55 |
+
return client.call_raw(
|
| 56 |
+
prompt=user_text,
|
| 57 |
+
model=model,
|
| 58 |
+
system_prompt=system_prompt,
|
| 59 |
+
images=images if images else None,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Meta(BaseModel):
|
| 64 |
+
会社吁E str
|
| 65 |
+
業畁E str
|
| 66 |
+
ブランチE str
|
| 67 |
+
サービス: str
|
| 68 |
+
啁E��: str
|
| 69 |
+
タイトル: str
|
| 70 |
+
訴求テーチE list[str]
|
| 71 |
+
|
| 72 |
+
class Font(str, Enum):
|
| 73 |
+
font1 = "ゴシチE��"
|
| 74 |
+
font2 = "明朝"
|
| 75 |
+
font3 = "手書ぁE
|
| 76 |
+
|
| 77 |
+
class EvsF(str, Enum):
|
| 78 |
+
EMOTIONAL = "惁E��E
|
| 79 |
+
FUNCTIONAL = "機�E"
|
| 80 |
+
|
| 81 |
+
class PvsS(str, Enum):
|
| 82 |
+
PROBLEM = "問題提起"
|
| 83 |
+
SOLUTION = "課題解決"
|
| 84 |
+
|
| 85 |
+
class Copy(BaseModel):
|
| 86 |
+
text: str
|
| 87 |
+
font: Font
|
| 88 |
+
color: str
|
| 89 |
+
visual: str
|
| 90 |
+
appeal_mode : EvsF
|
| 91 |
+
forcus_stage : PvsS
|
| 92 |
+
|
| 93 |
+
class CatchCopy(BaseModel):
|
| 94 |
+
main_copy: list[Copy]
|
| 95 |
+
sub_copy: list[Copy]
|
| 96 |
+
|
| 97 |
+
class FvInfo(BaseModel):
|
| 98 |
+
非LP: bool
|
| 99 |
+
メタ: Meta
|
| 100 |
+
キャチE��コピ�E: CatchCopy
|
| 101 |
+
権威付け: list[str]
|
| 102 |
+
ビジュアル: list[str]
|
| 103 |
+
CTAボタン: list[str]
|
| 104 |
+
|
| 105 |
+
def ask_raw(messages, model):
|
| 106 |
+
client = LLMClient()
|
| 107 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 108 |
+
model=model,
|
| 109 |
+
messages=messages,
|
| 110 |
+
top_p=1,
|
| 111 |
+
frequency_penalty=0,
|
| 112 |
+
presence_penalty=0,
|
| 113 |
+
response_format=FvInfo,
|
| 114 |
+
temperature=0
|
| 115 |
+
)
|
| 116 |
+
return response
|
| 117 |
+
|
| 118 |
+
@customtracer
|
| 119 |
+
def baseimg2fvinfo(base64img, openai_key=os.environ.get('OPENAI_KEY'), p=""):
|
| 120 |
+
"""
|
| 121 |
+
input1 (text): /9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxISEhUSEhIVFRUVFxUVFxUVFRcVFRUVFRUWFhUVFRYYHSggGBomHRUVITEhJSkrLi4uFx8zODMsNygtLisBCgoKDg0OGhAQGy0lHyUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAQsAvQMBEQACEQEDEQH/xAAcAAABBQEBAQAAAAAAAAAAAAACAAEDBAUGBwj/xABPEAABAwEEBAcKCggFBAMAAAABAAIDEQQSITEFBkFREyJhcXOR0gcUMlOBkpOxsrMzQlJicnShwdHwFiMkNENjgsIXNaPD02SitOEVg+L/xAAbAQACAwEBAQAAAAAAAAAAAAAAAQIDBAUGB//EADoRAAIBAgEIBwgDAAIDAQEAAAABAgMRBBIhMUFRcZHRBRMVYYGhwRQiMkJScrHhM5LwI2IkNPGiQ//aAAwDAQACEQMRAD8A3bp5CF6TMeLs2J0ICFJg6cURNmu/nFScblSqZJYjeHYgqtqxojJSV0PJhjmNqFnCXu5wTK00DXAHdknZrSJyjJWiwopHVoUmlqHGcr2ZKGbyo3LMnaEIm7kspjyIkgFFEstYcORYdx3YiiNDB51YggZuPkUpMqhHYyw0KDLkSNKTJJ2LETlW0XRZfhVMjTEstVZch0gHQMYoEyKR1Bj9mKmkQk7LOUXzNORVqi0Z3OL0FV6sRSznS4xkY4fnYt2aRxfepvuL8zmm7RjDVgcSTJWpc8fFeBTihZll3ee1nbUb5dXkxvG91fS1rff3A0GXBR/6v/IpWn9XkiP/AA6MjzfMrh7mn4OPzpv+RSyZP5vJFWVCD/j/AP1LmXI56j4OP/U7arcJr5vJGmNWm1fI83zIpAMxFGfS9tSSn9XkiuXVaVT83zB4cHAxx9ctR5eETyJrRLyRHraTVnDzfMnZaPmR03/rft46g6c/q8kXKrTXy+b5kgl+ZH/qdtRyZ/V5Inl0/p83zJmvPyI/9TtqLjLb+C1Sh9PmyOaYj4kZ9J204wk/m/BXOpCPyebI4bSTmyMek7alKnL6vJEIVoPTG3i+ZI7O9cjr/wDb21FKejK8kWPq75WT5vmOLUfkMrupJ28UdXL6vwProfT+QxO7xbKc0nqvpZEvq/BJVY/R5stRE/Jj6pO2qmpbS+Lh9P5Lcd44Uj6n9tVNS2l6cXqLMZdkOD6n9tQae0ti46kHx/mea/tqNntJXWwFzpB8jzXdtNJ7SLklqIXySO8Ex4cjx/epqNtJBzv8KK8lpfW7RjjgSAHAgGtCRf5D1KSh3lcp7Y3IbRQmhaBg04DaRXeVZTbz5ymtGObNpK1zdUc6vvtM2TsMgXX5ODuQFac6OdaMtdw7XJwZYCOLwbcd3HkVdNZTlv8ARF1eXVqCejJ9WGXYVPXs/wDSkRvYGSpGVebFNZiMrtZkQRWgA0+zJScblcKlnYth4Vdi9SViCQGtQprRYqkne6CuhwqOpK7TzkrKauiIPc04HyHEKVkytSlB6S1FbGnkP2darcGjRCvF9xLUOUc6J3UiTggcDj+d6jlPUTyE8zHETxgKEcqMqL0jUJrMhgXDwwAjNqBOS+JEodQ45FRtcnezzluMqtl8Syw15FWy5ZyeNrhk6vOoNp6iyKktDLDXV51W0Wpic4BCQNpDFwTsJtGNwDWSulMjjWoaK5B2JBNcQCXUyoDtopRpO9yudaNrBzzC8eUM5fiqynF595VVqLMu4qSwkmocQtCkthklBt5mY4iIANwfSaRXnWm6b0nOyWle3AltxLixhY5wMTajAHw5Mq0qeRVU8zk76/RF9e8lCLV/d9WW9V7Ox83ASguYY3OYTea4FpHFOVcCepV4qcoQyoabl3R9OFSq6dRXVs2lHVxat2VuTHDk4R9PWue8XWel+SOxHo7DR0R83zOM1x0UyOcCMhoLA/jE4m84HE8w610MHXlOLUjjdKYSFOSlDMZ9jtT63XY8tK4c4WuUVpOfTqSvZmlwe7D87lVc1ZBmWiNzXVyxzBoFbFpoyTi4u5savaJ4eXjhwa0Xnmpxrk0HeTXLYDyLNia/VQzaXoN2BwnX1PeTstPI6r9GLJWvBu9I/wDFc72yvt8kdrszC6cnzfM5vTJhZaOBgZQMFZHlziA51C2MVOdMTzjlWrDzqTTcmYsXSo0pKNNW25wakDKqvM2dIZswrnQ7ihxzAqiuTSuwr4Qz2fYopZ7FknmvpIGaQZkPtwCm6UtLKViaehEjJ3A4trygqLitTJqpJPOjQjmCocWbIzRaZMN6rcWWqaCdM3PD70slknOOkB1oBxwpvripKD0EXUWkgmtYzGI31+5SjTK5VlpRn2xzTm0K+CaMlVxelFa0S0cKYcRnsqVON8q+0rrTtk2+lDttzuT1qXVoisRJHMwaYc3AtB3jLHfyFaGkznQqSjuNPSOkmng6tIrE1wpjTjyD7lRRjaUt/ojXi6icKd1pj6sk0BpUi0xcfC8G4g436sz/AKgjE006crIMDXlGvG7zaOOY9G4VcbJPUZRxfdHaf2eUVwMkZp84Bw9h3Wt2BdptHJ6XjempbGYVngaW4PINPBOw7cF0W2tRw4wi1pLURkb4JDt3G+5ReS9JbDLj8LuPJaK0bcLnEgXaZkmgojJtnuN1Mr3bXZ3WiLMIYgzM5uO9x+4ZDmXHrT6ybkelw1JUaahxI9PaY73hc8CrzxY2/KkPgjmGJPICoQp5TsWVaypxcjhdGWd1CXSFznFxeTm5zjVxPOarsJKCSSPNXlVk5NmjFIRgWnnGKi0nrLYScczRK5zHbj6x+CSUkTbhPvKrRiQW0HKVPvKEs9rZghHHuCLyGo09gJbQUa403bkadIrZKtFsjY+6auJ6sFJq+ZEIvJd5MvWa1MPOVTKEjXTrQZcjlON1wdzhVOK1miMnqaYEVobU33Y7tibg/lRGNWN/fecaaauLS0jLPFOMbaRTnfPGzM+0yDnOAor4ox1Joa1nEcXExx+TiqNP5s+tkq7zQzfKiAO5APJ+CtsUXPOn64sJBLJCRhUtYajceNisXaVLY/LmdF9B13plHz5GlpTWqFne5MTyH2dj6ANwBlmFPC5CoQx9OLk7PO+7mWVeh601BKUfdVte19xXi13szDebDKKUIBDHUINcDfBUn0jSatZ+XMjHoSvGSkpR8+R7bDaw9rXtODgHDmcKj1rMkdFyMDX6cNsT5SCeCcySjQCfCuHAkDJ52qcJ9U8tlNWj7RHq1pe08pZrjEDUMlFMsGdtaO0qWx+XMwdg4i98qPnyLo17s58KGWu9oYP9xLtKnqT8uZPsSs9Lj58ju9Qray1MNobG9rGuLWGSlXEeE4UJwGVd9dyjPEqrH3bltHAvDz99ptaLX9UjsOGVGSbMs8p0/wB0SzPtTqtleyKrIywMLXGvHkFXitSKA7gN6VPE04POmFbBVaqVmlx5FH/EGzB94QzY5gtj+6RaO0KdrNPy5mLsasp5SlHz5Eo7o9nGUdo6oj/eo+309j/3iWLomstEl58hx3RrLWphtFRkQ2OvvEvb4akxromre7kvPkM7ui2U5xWg84jH+4n7fDYxPoiq9Ml58iKXX6yfFhtAPNHn6RNdIw1p/wC8SuXQtT5ZJceRCO6BF8mbqj7al2jR2Py5kOxcV9cfPkTO7osGyObqjP8Aeo9oUdj8uZN9D4nVKPnyBPdDg2RSjmbGP9xPtGlsflzF2NiNUo+fINndGgAIEc48kZHlPCVSfSFJvQ/LmSj0RiErKUfPkC3uiw41jmP9MeH/AHp9oUdj8uYl0Pidco+fIdvdDs22KbyCMf3pdo0tSflzGuhq+uUfPkNNr/ZT/Bn6o/8AkTXSNNan5cyMuhar1x8+R1FntolZFK0Ua+GJwvHjUpuB+9acPNVIuS1tmHGU3RnGnK11FEwun4pKuz7TN7r1HhhXmT3Zqae8GyfVI/fWhAlpMpAz3zUPSHCaPsxri2MRnniJj/sXSo54JnGxDyaskX9YIeGss8PjIpGjnLDd+2inOF4tFdOrkyT7z5zaaiq5J3TQ0Hop9qnZAzAvOLqVDGDFzzzDrNBtU4Qc5WRXVqKnFyZ9B6PhZBEyGIXWRtDWjkG87TtJ2krqKCSsjiyquTuzle6brP3vZ+AjdSWcFtQcWRZPfyE+COcnYqMRLJVlpZpwkOsld6EeLhYDqiQAkAJACQA4QhM9f0HoCCdxDwxoBAoGRVNQ84XyMrmyuflXqMRkU1mgn4btie08Vg+srt5VSS8d+1rZ3mjY9U7K4Orjdo6rIo6FpEZoCLwLuMW4EipGdCqJ1UrWhHPtW/uWbXoNVKhKSd6s3bY9WbvefVpI59VLM2IvN2ojD6cHGKFxddvCtQDdDQM6uCcakZTUerjpto/WrTuIzoyjTcutlovp36c+vRvZYi1Qspja+7i5sZ8COl5zLxAF2uw05s1B1UptZEdL1d9i2GHbgpOpPOl82tq5JNqXZWuYLuDn3TxYyaXHnY3DFhFeUFRjXTT9yOjZ3rmSlhnFxXWTzv6u58jI1m1ehs0gY2MEFtauYytdowHN1rThnCtBycY8DFjVVw9RRjOVu9njjMhzBeYPavSeyavg952U/wDTxc+S7mA/it3nlOl0/aL9yNFltcNgWx00c5YiSPDS07j1LzB7s1NPNN2yYH90j2fzrQgitJl3DuPUgkeodyy3HvaSI/w5SR9F7QfaD108Fng1sZw+k3k1E9q/B2gtK2ZBzetPBNKWQxzSxgGjJJGjD4ocQ37KLhVI5MnHYz1VKeXTjLakel9zjQ/e8JneP1swBFc2RZtbznwj/TuXTwlDJjlPS/wcTH4tSnkR0L8nV2vSLY2OkeaNYC5x5B6ytMkopt6DHCTnJRjpZ4fp3SclqnfO8HjHit+QweCwcw+0k7VxKk3OTkz01GmqcFFFC4dx6lAtFcO49SAFcO49SAFcO49SAFcO49SAFdO49SBHRx632hpq2NgI2gPB9pdR9LVWrOMfPmcSPQVGLvGcr+HIdmuNpAc0MYA6l4APoaGorxscUn0rUbTcY5t/Ma6DopOKnKz06ORI3Xi1ht0ABtCLoMgFCCCKXtxPWl2nNu+RG+58ya6Hgo5PWTtvXIdmvVsGVBgBgZMm0oPC2UHUk+kpPTCPB8wXREFoqT4rkE7X62nM5cslciM73KetJdItf/zjwfMk+iovTVnxXIhtGulqk8NrXZ+FfNKmppV29Sj0pOKtGEVx5kJ9C0pu8qknva5HMALmHZec9k1bP7JZht73i2/NXbwP8PieX6Vf/kNdyNHvYnG9TnH4LZl2Ob1Lee54qV5k9waenMrL9Uj99aEAZiAOo7n9ruTSM+WwHysd/wDsro9Gv/kcdq/H/wBON02rUYz2O3Ffo7vvpdnIPMdecXatDibSLy4Vi4kr9xq0NDPK5jvICuXLCZeKd9GZ/ryO/DpJUuj4tP3s8VxvfwTXjY7TvpdTIOB15xOvemr5FmaeK2jpOV2bWeTBx5bu5cjpCtn6pePI9J0Nh3k9fLXmXq/Tici7I0pXZzrmHdO6j/Rygvd/1oK0u0rtonmFnCrq3/1//almDOcRaAwyP4Oty+7g73hXLxuXvnXaV5UAdVoPUPSBnhMtgkMXCx8KHFrBwd8cJm4Hwa5Jhc3dbe5ba3WyU2KztFmNws/WMaG8RoeKOde8IOOI2osJM4nWTV6ewSiG0hoe5gkF114XXOc0Y0zqx2HMkM1NBaL0RJAx9r0hJBMb16JsD3taA9wZRwYQatDTntQGc2NHaraEnlZDDpSd8kjrrGizPFTzmOgwBNTgAExXZymtmiGWO1zWZkvCiIgX6UNS0OLTsqK0NPsyCGizqDYYp9I2WGZgfG97w5prRwEMjhlytB8iEDKetNmZFbbVFG0NYyeVjWitGta8gAeQIAzEAVEEj2TV5o7zsuFf2eHLmXawD/4vE8t0sk8R4IlllLTQ3hyVW9K5x5Np58x5EV5Y9+aWnMrL9Uj99aEAZqANHV2a5aYzsJun+oFo+0ha8FPJrx4cf2c/pWl1mDqLYr8M/wCDv769PY8FYV5FgKWmNJCCIvzOTRvccvJmTyArPiq6oU3PXq3mzAYN4qsqerS3sX+zLvPPCSSSTUkkknMkmpK8s227s+gRiopRirJZkaGgLXBFMH2mz98RgOBivXKkjA3huSGddZdYtDySMjGhcXvYwftDs3uDR9pTFnNbXG0aHsFqdZjooSlrWOLhM5o44Ju0JOynWgFc4nTlps9slhZYLCbO41ZwYkvmV7y0MoXUApiP6khnRSauazSg1da250JtrGgHmbN9yYsx2HdI1Nt9ulgmgkbGBCGStfO+NgeHF2AYCHeERXkCGJM8x1o1NlsDGSSzWeQvfcuwyF7gbrnXnVaMOLSvKEiVznCUDPVdRdXZ7BYp9Kus75LRwR72hpVzWuwMz25450GNwHa6gZFs8sdI55L3OvOeS9zicXOcbznHlJJPlSGdL3Mf82sfSSf+PMmhM6zWHTOhZ7XaIbdY32eRk0rO+rPiXFry3hJGsFS454tegFc5bXDVWCyxx2izW6O0wTOLGUpwoLW1dW7VpptPFoXAUxQNHFJEj1vQZPetloQP2eHHbkV3ej/4fFnkemX/AOV4InLsTfJduIIot1thyL7Tykryh9DNLTmVl+qR++tCAM1ABRyXSHD4pDvK01+5OMslqWzPwIzhlxcXrTXHMekB9cRtxXs1nV0fOHFp2YqosKxw2n9I8PLgeIyrW7j8p/l9QC8xjsT11TN8K0cz3HReC9mo+8velnfdsXh+bmcsR0hIA9A1G0BZrOyPS1vtMQhYS6GFhvySSsOAI+U1wrcFTUAkgAgsTOP1i0u62Wqa1PF0yvvBud1oAYxvKQ1rQTvBSGUYpXNcHMcWuaQ5rmktc0g1BaRiCDtCALUul7U7wrXaXfSnld63IA7XW4C06B0bOeOYZHQOLsTQCRhJryws60xazztsYGQA5hRIZ6Loqz6I0dFHap5hbrS9rZIrNGKMYSKtMoNbpG9+7BhITFnZj/4hW/v3v7hON4PAY8BwVa8Fd+2/nXHkRcLFnugO0dOyK3WN4jktDnCay0xY8Cr5CBgw1oDsfeDh8aowRS7mP+bWPpJP/HlQgZS10/zC2fWZ/eOSGYl0Z7eZAFVBI9O0dLSCy5fu0OZ+aV3ejn/w+J5Dptf+T4IeS045DqW65yVFsy3au2ffJ5w/BYezqPfx/R1u28VsjwfMu6U1fgdwFeEo2zsaKOAwEsx2t5SqqeApScr3zP8A2ovr9L4inGDWTnV3m733lIauWY+N85vZVvZ1Hv4/oo7cxOyPB8x/0Yg/meeD/ajs2j38f0Lt3E/9eD5hN1Vg3y+cOyn2dR2vj+g7cxOyPB8x/wBFYf5vnj72o7Oo7XxDtzE7I8HzEdV4Ngl89vqupdm0e/j+h9uYnZHg+YB1YhGfCecOyjs2j38f0LtzE7I8HzBOrMIOPCDneOyjs6j38f0PtzE/9eD5g/ozBX4/OHDso7Oo9/H9B25iv+vB8xSauQjLhPOHZSfR1Hv4/oa6cxOvJ4fsjGrsRy4Trr6mpdnUu/8A3gT7axGyPD9kbtAxbC/yvaPuS7Ppd/Ea6ZxGxcHzIv8A4GIkGricswSPsS7Ppd/En2xX2Lg+ZN/8BFvf5w/BPs6l38f0V9tYjYuD5hfo9D8p/nDsp9nUu/j+hdtYjYuH7JYtW4T4zzh2U10dR7+P6Iy6bxK+ng+ZKNV4N8nnDsp9m0e/j+iPbmK2R4PmL9F4N8nnDso7No9/H9B25itkeD5gu1YgG1/njso7No9/H9B25idkeD5kEmr8I2v84fgovo6j38f0Tj03iXqjwfMqv0BAMuE84fgo+wUe8tXTGJeqPB8zftjGxtgY0YNs8IFTj4JVuGioRcVqbKMfKVSUZvS4ogLgtJz7Mt38cXdRCmmUtbEX9KSCkOZ/Ut5vhJVVSeee/wBEasTF5FL7fVlMSbP/AGFdcx2CEp/ITuFiYSfOQIJz6mgcaddEDzXGcwjCrTy5+vFCE1YYQ8358qAJBCNtUADJcAwBryYdaB6SrJTPEeVIEQPJJqDTlqa+Sii85YsxDJGXGrnDy1qk1csU7aESRQAcvk/FNRISm2TWazXnNaBi5waK73EAY5bUOyV2OKc5KC0s6Y6j2zxTfSM/FZvbqG3yN/Y+K7uIUepVsH8JvpG/ij2+ht8gfQ+K7uIQ1Ntvim+kb+KPb6O3yF2Nie7iMdTLb4pvpGfij2+jt8g7GxOxcRjqXbfFN9Iz8Ue30NvkHY2J7uJC/Ua3H+G30jfxSeOo7fImuiMStnE5C0xFri05gkHnBoVeZbOLaLmlW1MWB+Ai9Spo/NvZpxb+D7UVQ3kHldT1q8xX7x9vhIE9xq6RYKQ4j4BufSSqFFZ57/RF2Lbyaf2+rImkfKHkK0GFpk15nJ1oFZgOc0ZOqgLMXfVMgPIi40mAJqnIIuJoMTu2IATpjvJ5kAVHS8ijcmokT5uX7VG5YoEbp6ZFGUSVO+kGM1215wkiTzFoP2H7Ap3KcnWXNFfvENBhwsWefhtUKnwS3P8ABdhv5ob0ezaZtL447zASQ5pNA0gMBq+9eIoLoIrsqFwKEIylaX+eo9jiJyhG8f8ALX5HPWXWKR0UkgLqNE4FQz4R8p73a7GraC62lMSeSp3TwkYzUXryduhL3jnU8dKVOU1qytmlv3b7NhtaN0o5zxA+GUPaxrnvdwVMQQHG485ljsAFkq0Eo5akrXzJX9VqubKOIlKSpyi7pK7dvR67AW/SMvC8HDG9xiLHSU4K65j2uo3juBBwqCN2KdOjDIyptK97adK3IVWvU6zJpxbta+jQ75s7RXktNtuPa2GSrn1a8mz3o4yQSKX6OcOMBXDKuWNihh8pNyWjOvezvho2lTqYnJaUHneZ+7mXHTs8Lmvoy2iaMPDS3F7SHUqCx5Y6t0kZtOSy1afVyyb7PNXNtGr1kMq1tOnudjwXSb6TSGmIkf7RXfTzI8jUV5y3sPTs1TFsHAxGg5iqqb+LezRiIWUPtRnB35qrTNYk2poizT0ocIMf4DfezKqlplv9EX4le7T+31ZSMnKr7mTJCbTlQJ3JBTlUiDuyRoTRFkgfyDqTuRsC56VwSKkkx3qDZfGCRC5/L61G5Yl3AcyRIQAOyvlQDui3Z8BhgrEUTzseo/JQLOXtEOHDwj+bF7bVGo/cluf4LcMn10H3o9h1kB4Ev4MS8Gb9wl9HXQaC6wG9jTA4bdi4eF+O17XzXzeujwznrsZ/HlZN7Z7Z/TT45tZhaOsjbQDE9sL2saXSWhjz4cvCPIHFANHOvXa0bVu0LZVqOk8uLab0Ra1Ky2+F9ecwUaarLIkotJZ5J63d7NTz21ZjR1StBm4WV7mmQmNhDa+AxguuxGTi57xyOCoxsOryYJZs78W/TMjRgJuplTk8+ZeCWZ+OdreW9JxukN11kbK0GoLnsxNM6EYZkKqk1FXVSz3MurRc3aVNSXe0YFl0aHzSO7yZSJ7WtY10bA03GPq4gVeeNvplhVbZ1nGml1jzrS7vW14fkwU8OpVZPql7rzJWWpPx/HcddY5HubV8fBmpwvB2G+o8q5k1FP3Xc69Nya95WPnzS5/XSfTf7RXeWhHkp/G97D0ycYegh9RVVL5t7NOJ+T7UUmSkZH7AfWrk7GNxTLGKkVsv6WOEHQN97MqqWmW/0RqxPw0/t9WUWq4yMnYaZYKSKmr6SQPO9O5GyJQ/DZ1JkLEb383UlckolSaWqg2XwhYgL1G5ZYEvSuSsDfRcdiWIfmiaISLXlUykYORcLFzQrq2iHpYvbaoVPgluf4LsOrVYb1+T3S12YSNulz2iubHFjua8MQF5+E8h3svHOeyqQy1a7W52KTtX7PS6I7oNA66SOEaDW7Ic3gnOuJVqxVW927+m7YUex0bWStttr37S0dHx8I2UCj2tLAWkgFvyXAYOAzAOSr62WQ4ann8S3qYZanbOs3hsJ5WXmltSKgircCK7Qd6gnZ3LJK6sV9H2AQ3qOe8vdfc55BcTda3YAMmjYrKtV1LZkrZsxVSoqnfO227tvgW1UXHzlpg/rpfpv9or0OpHkZfG97/IemjjD9Xh9RVNL5t7NOI+T7UZ9VaZrFkuUymxpaVGEHQN97MqqWmW/wBEaMT8NP7fVlQK8xEjUyLJAVIgC56VxpFaaVQbLowIC5RLLAFyRKw15A7BNQJliIKaKZBEpisOHICxe0If2iDpY/bCjP4Huf4LcP8Ayx3r8nvNpkLWOc1t4taSGjAuIFQ0HlyXnopOSTdj2M5OMW0r9xx1s0lpOZjnMhFmjDS4ucePQCpArjWnzRzrqQo4SnJKUsp+X+8Ti1K+PqxbjHIVr59P+8DR1AtD5LKXSPc88I4Vc4uNKNwqVR0lCMa1oq2Y0dEVJTw95Nt3ek6RYDqCQAkAfOOmfh5fpv8AaK9BqR5KXxPewtNZw/V4fUVTS+beacR8n2oz6q0zFoBWIpZq6Uyg6BvvZlVS0y3+iLsT8NP7fVlJqvMbJAUyIznouCiV5ZVFstjErlygW2BLkDSGqkOw4TBkrAmiDZOclIqGQMcIEXtCfvMHSx+2FGfwvc/wW4f+WO9fk9/XnT2RU0v8BN0cnsFW0f5I71+Sqv8AxS3P8GD3OP3Q9I/1NWzpT+fwRzuhf/W8WdSucdYSAEgD5w018PJ9N/tFeg1I8m/ie9h6azh+rw+oqml829mjEaIfajOVpnLoVhmZp6Vyg6BvvZlXS0y3+iNGJ+Gn9vqymFcYmM5yBpEMkii2WRiVyaqJbawzigaQKQxAIBkgCZAnjapIrkx3JiQkAEECZd0H+8wdLH7YUZ/A9z/BbQ/ljvX5PoBedPZFTS/wE3RyewVbR/kjvX5Kq/8AFLc/wYPc4/dD0j/U1bOlP5/BHO6F/wDW8WdSucdYoSaYha57XEi5QF10ltT8UEDMVHWFcsPNpNayh4mmpNPUWbNamyVukmmdWub6wFXKDjpLITjLQfO+mh+uk+m/2iu9qR5V/HLe/wAhaazh+rw+oqml82804jRD7UZ9FcZS9RWGdmlpTKDoG+9mVVLTLf6I0Yn4af2+rKDnK4ypEL3qLZNIgcVEtSsMUACkSFRAEjWpkGyRrU0RbJqKRWCgY4CACTIlvQf7zB0sfthVz+F7n+C+h/JHevyfQK88exKml/gJujk9gq2j/JHevyVV/wCKW5/gwe5x+6HpH+pq2dKfz+COd0L/AOt4s6lc46xyukhLdlJinYwvYWNaYA0AmMuqA6t4vvmvKF0aWRePvJuzv8Xf3bLHKq9Zky92SV1ZLJ7r69N7m7o5zqEOZKNtZTGa12Dg3Hdt3rHVS0prwv6o30XLOmn429GeAaZH66T6b/aK7mpHlpP33vYWmhjD9Xh9RVNL5t7NWJfwfaihRXGS5dAU0UM0NLnCDoG+9lVVLTLf6I1Yhe7T+31ZluKsuZ0iFxUSxIaiBjUQMVEAExiERbJA1SIkrGJorbCITECQkMcNTEEQgEHo6cRzRyOrRj2ONM6NcCadShJXTW8upSyZKWxo9RHdMsni5/NZ21zOz5/UvPkd3tel9L8uYz+6RYyCDFMQRQgtYQQcwRfTWAqJ3Ul58hPpai1ZxflzI7L3QbDGLsdnlY2taNjjaK76Byc8FVm7ykm/HkRh0nh6atCDS7kuZL/iXZPFT+aztqPZ8/qXnyJ9r0vpflzGk7o1jcKOhmIwwLWbDUfH3hNYCondSXnyIy6WoNWcX5cw/wDEmy+Kn81nbR2dU2rz5B2zR2Py5nlGkHh73uGTnOI30JqunayscNyvJsl0y3GHoIfUVRS+bezZiX8H2oohquMlyy4qZSW9MnCDoG+9mVFLTLf6I2Yj4af2+rMtxVhSgQEBcVEDFRILiDUBclDVIhcNrUyLZLRSK7iokO411AXCDUxXGeEhogcFEsRV0hO6MR3KVfI1mOVHVVFepKCjk62lxNeEpQqueXe0YuWbuH75cHmN1LwYZK5spWgrka1qjrZKThLTa/cHUQlBVIfDlKNtd9O4sB5yJFcBkc7taHca7DsKtyn/ALd/vAoyFpSflt883mMyQ/GIyDsnZDwzjsoQkpvX6+I5U4/KnptpWl6CZrnVwu0vU+NkMHY76qWU+7/afMrcI2s73t3eHkPefvZTi7Hf1eo05ksqe1efiGTT2S17PD9jvarCpMtaYbjF0EXqKopL4t7N2Kfwfaig1qusZGwnFAi3pnKDoG+9mVFLTLf6I24j4af2+rM4BXGa49ECGogdxUSC4cbE0iMpEgapELkzGJpEHIItTI3EWoHcQaiwrjlqBXI3hJk0yO6lYncJ9nDhQ9eFQd4qMDypSgpKzCFZwldFdmi2BxdVxJaWGpHgk12BVLDRTcru7VvA0Sx9RxUbJJPKzX08SyLK3HDAkEjYSAAD1AdSt6tGf2iSS7tevPn5j96DAEk0BbjTwTSrcuQY58qOrWj/AFv94i9oabaS038dv+zdxILKMeU3vLh+AUurRF4iXlb88wu9W7vzj2j1o6qJH2if+8OSGcxSsJSLOl2YxdDF6is9FfFvZuxTzQ+1FEMV1jI2QOUS0vaXGEHQN97MqaWmW/0RrxD92n9vqyhdVxkuINQFx7qAuO1iLCbJQ1SsQuG1idiLZMGpldxXUBca6gLhBqYriLUAmRuakTTEGIsDkTXE7FdxXEWDKHDE7CuOGIsFx7qLCuPdTABzUrEkyzpVuMfQxepZ6Kzy3s3Yt5qf2opBiusY8opXVWaWzR0q3CDoG+9mVVLTLf6I04l+7T+31ZQuq4yXCupiuNdQFyVrE7EHIMNTFckYxNIg2FdTI3FRAXEGosFwg1AriLUBcAtRYlcdjECbJriZEcMQAVxABBiBjFiBCDEDBcxAixpNmMfRRepUUdMt7NuLean9qKlxXmIz7iqsamzQ0o34HoG+8lVVLTPf6I04l+5T+31ZSuK6xkuK6iwrhsYnYTkHdTsQuE1idhNktxMiPcQAgxADhqACuoAa6gBXEAOxiBklxAWHuoAMMQOw91IBrqYWFcSAZzUwJ9IsxZ0UXsqmj829mnFaKf2L1Kt1XGUzbqgWtl/SbcIegb7yVU0dM9/ojVin7lL7fVlK6rzHccNRYTZKGKRFsdrEATMYgB7qAHLUAINQAQagLBFqBjXUAK6gAmsQFggEhj3UAE0IAYoAZADhqAFdQBYtzcWdFF7KqpfNvZpxOin9i9SqArTMZl1IGy9pIfA9C33kqpo/FPf6I14r4KX2+rKgYtFjFcNrECJGhABhqBhBqBjhqAHISAcNQA4agY5YgAS1ADgIANoQMe6gLDoAYBADhqAsPQIAZABAJDsWLa3FvRR+yqqXzb2acSs0PtXqVlcZjKuJ2K7l7SI+C6FvvJVRR+Ke/wBEa8V8FL7fVldrFeYwqIGExiASJQ1IkFRACAQAqIAcBAw6IAYtQFhqBADhAD0QMdIBUQA4QMQQAqIFYeiB2EGouPJZYtoxb0cfsqml829mjE6IfavUqlXGVmeRzKRWXLePguhb7yVU0dM9/ojXifgpfb6srtarjISXUhhNCBjoAcIAK6gdh2tQNIJIYJTEJACqgQTUiSCLUrkrDJizBNalcaiPcSuPJGup3FYVEAEkSFQICyJrZm3o4/ZVVL5t7NGI0Qt9KIFaZjNAVhnNC0Rh4jIfFhG1pDpY2kEPkJBDnA7R1rNGeRKV09Ox7EdCpSdWFNxazRtpS1vayPvb+ZD6eLtKfXR7+D5FPss9sf7R5i72/mQ+ni7SOuj38HyD2We2P9o8wuA/mQ+ni7SOuj38HyH7LPbH+0eYuB+fD6eLtI66PfwfIPZZ7Y/2jzHFnHjIfTxdpHXR7+D5B7LPbH+0eYRiHjIfTxdpLro9/B8h+zT2x/tHmMIP5kPp4u0n10e/g+QvZp7Y/wBo8x+9/wCZD6eLtJddHv4PkP2ae2P9o8x+BHjIfTxdpHXR7+D5B7NPbH+0eYuBHjIfTxdpHXR7+D5B7NPbH+0eYuAHjIfTRdpHXR7+D5B7NLbH+0eY4hHjIfTxdpHXR7+D5DWGltj/AGjzC4IeMi9NF2kuuWx8HyH7PLav7R5iEQ8ZD6aLtI65d/B8g9ne2P8AaPMfgx4yH00XaR1sdj4PkPqJbY/2jzG4MeMh9PF2kdbHY+D5C9nltX9o8xGMeMi9PF2kdau/g+Qezy2r+0eY3BDxkPpou0n10e/g+QvZ5bY/2jzDaweMi9PF2lF1Y9/B8iaoS2x/tHmOWN8bD6eLtI62PfwfIboPav7R5itTmkijmuoxgJa4OFQMRUYFFL5n3sMQleCvoilmzkeCtKMxntad6mZgxXegYQJ2IHdjVO1AXYV9FguPf5UrDuECgdxAoAK8iw7iLt5RYG9pW0hbxEAS0mtcqbOdVVJqGdl1Kk6t0nYoy6wxta15a7jNkd8Wv6twa4EA4GpGBxxG3BVSxUEk2tvkaIYCpKTimszS168+z/bs5Zm02xj7hD73FGAbTjNDsDWmRTlXipZNghhJyhl3Vv8ALYaDjTatBieYdpKBpscFIdxFxRYG2CKpkc4ZJSzE86EDyoBMcuSsNsQKAuOgYJCCNikxisM9h6IAMc6CQ14bECH8iBhBvIkOwTWoGkOUgdxUQFgSExAT2YOpe2Go6iPvUZJSLISlC9tYBsDCA2hwaWjfRxBz8mCr6qNi1V53uttyJ+hoiahpGyjTQAXODFBkKNyUXQg/93WLI4qqs2n/AO3/ACX7quuZckdA9wxciwrhNQSQ4CQ0hXUXHkioEBZDEIE0wgEDsMgBXUXFYqOKsM7Ym5IAEoEOwYoGtJI0pEk7BFxolYbbsJibCIb1FEpAtCbEgQUyK0hhIkEEiaBKZF6RFAahbEaxag2JMnEc5JDegAFMhckSLAQgitIQOKCWsNImMUCYKZE//9k=
|
| 122 |
+
input2 (text): default
|
| 123 |
+
input3 (text):
|
| 124 |
+
output1 (json): fvinfo
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
print("baseimg2fvinfo openai_key:",openai_key[-4:])
|
| 128 |
+
if openai_key == "default":
|
| 129 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 130 |
+
else:
|
| 131 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 132 |
+
|
| 133 |
+
messages = [
|
| 134 |
+
{
|
| 135 |
+
"role": "system",
|
| 136 |
+
"content": "あなた�E優れたWEBマ�Eケターで、ランチE��ングペ�Eジの要素を見�Eけることに長けてぁE��す。また�EーケチE��ングの達人なので訴求テーマを言語化するのが上手です、E
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"role": "user",
|
| 140 |
+
"content":[
|
| 141 |
+
{"type": "text", "text":"""LPのファーストビューの画像を解析します、E
|
| 142 |
+
・何も書かれてぁE��ぁE��像�E場合�E、空の値を返し、E��LP=Trueとしてください、E
|
| 143 |
+
・CTAボタンが存在する場合、�Eタン冁E�E記載�E容を�E列で教えて下さぁE��アンカーリンクのあるチE��ストもCTAとしてください、E
|
| 144 |
+
・画像�Eに書かれてぁE��斁E��・コピ�Eを読み取り、LPに掲載されてぁE��頁E��に並べてください。大きい目立つ斁E��で書かれてぁE��冁E��を「main_copy」とぁE��キー、それ以外を「sub_copy」とぁE��キーで、読み取ったテキストをtext、それぞれ�E斁E��がどんなフォントで書かれてぁE��のか、「ゴシチE��体」や「�E朝体」などのフォント情報をfontをキーとしてできるだけ正確に付与してください。フォント�E色、テキスト周辺で関連するアイコンめE��ジュアルも抽出してください、E
|
| 145 |
+
・画像�Eに写ってぁE��イメージ(写真めE��ラスチEにつぁE��、どんなも�Eが起用されてぁE��か教えて下さぁE��E
|
| 146 |
+
・画像�Eに該当�E値���なければ[]のように空の配�Eを回答し、画像になぁE��とは回答しなぁE��ください。特に黒一色めE�E色一色の場合に注意し、E��LP=Trueを返してください、E
|
| 147 |
+
・これら�E抽出惁E��を総合して、メタの吁E��E��を記載してください。訴求要素は、情報かOCRがある限り�E20斁E��で6種類提案してください。情報がなければ空にしてください、E
|
| 148 |
+
""" + p}
|
| 149 |
+
]
|
| 150 |
+
},
|
| 151 |
+
]
|
| 152 |
+
|
| 153 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}})
|
| 154 |
+
r = ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")
|
| 155 |
+
|
| 156 |
+
return r
|
apis/baseimg2fvinfo_rect.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from enum import Enum
|
| 11 |
+
client = LLMClient()
|
| 12 |
+
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 16 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 17 |
+
from src.clients.llm_client import LLMClient
|
| 18 |
+
import json as _json
|
| 19 |
+
|
| 20 |
+
client = LLMClient()
|
| 21 |
+
system_prompt = None
|
| 22 |
+
user_text = ""
|
| 23 |
+
images = []
|
| 24 |
+
for msg in messages:
|
| 25 |
+
role = msg.get("role", "")
|
| 26 |
+
c = msg.get("content", "")
|
| 27 |
+
if role == "system":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
system_prompt = c
|
| 30 |
+
elif role == "user":
|
| 31 |
+
if isinstance(c, str):
|
| 32 |
+
user_text = c
|
| 33 |
+
elif isinstance(c, list):
|
| 34 |
+
for part in c:
|
| 35 |
+
if isinstance(part, dict):
|
| 36 |
+
if part.get("type") == "text":
|
| 37 |
+
user_text += part.get("text", "")
|
| 38 |
+
elif part.get("type") == "image_url":
|
| 39 |
+
url = part.get("image_url", {}).get("url", "")
|
| 40 |
+
if url.startswith("data:"):
|
| 41 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 42 |
+
else:
|
| 43 |
+
images.append(url)
|
| 44 |
+
|
| 45 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 46 |
+
result = client.call(
|
| 47 |
+
prompt=user_text,
|
| 48 |
+
schema=response_format,
|
| 49 |
+
model=model,
|
| 50 |
+
system_prompt=system_prompt,
|
| 51 |
+
images=images if images else None,
|
| 52 |
+
temperature=0,
|
| 53 |
+
)
|
| 54 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 55 |
+
else:
|
| 56 |
+
return client.call_raw(
|
| 57 |
+
prompt=user_text,
|
| 58 |
+
model=model,
|
| 59 |
+
system_prompt=system_prompt,
|
| 60 |
+
images=images if images else None,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class Category(str, Enum):
|
| 65 |
+
ビジネス = "ビジネス�E�EaaS・法人支援�E�E
|
| 66 |
+
ヘルスケア = "ヘルスケア�E�美容・健康�E�E
|
| 67 |
+
ヒューマンリソース = "ヒューマンリソース�E�求人・紹介!E
|
| 68 |
+
コマ�Eス = "コマ�Eス�E�趣味・食品・衣類!E
|
| 69 |
+
ファイナンス = "ファイナンス�E���融�E保険・不動産�E�E
|
| 70 |
+
インフラ = "インフラ�E�電気�E通信・ガス・住屁E��E
|
| 71 |
+
ライフイベンチE= "ライフイベント(教育・結婚�E相諁E��E
|
| 72 |
+
|
| 73 |
+
class CategoryMiddle(str, Enum):
|
| 74 |
+
# ビジネス
|
| 75 |
+
ITソフトウェア = "IT・ソフトウェア"
|
| 76 |
+
マ�Eケ支援コンサル = "マ�Eケ支援・コンサル"
|
| 77 |
+
オフィス機器用品E= "オフィス・機器用品E
|
| 78 |
+
|
| 79 |
+
# ヘルスケア
|
| 80 |
+
健康食品器具 = "健康食品・器具"
|
| 81 |
+
美容医療クリニック = "美容・医療クリニック"
|
| 82 |
+
美容コスメ = "美容コスメ"
|
| 83 |
+
フィチE��ネスジム = "フィチE��ネスジム"
|
| 84 |
+
|
| 85 |
+
# ヒューマンリソース
|
| 86 |
+
求人惁E�� = "求人惁E��"
|
| 87 |
+
人材紹仁E= "人材紹仁E
|
| 88 |
+
人材派遣 = "人材派遣"
|
| 89 |
+
|
| 90 |
+
# コマ�Eス
|
| 91 |
+
動画アニメゲーム = "動画・アニメ・ゲーム"
|
| 92 |
+
リユースリサイクル = "リユース・リサイクル"
|
| 93 |
+
旁E���EチE��レジャー = "旁E���Eホテル・レジャー"
|
| 94 |
+
趣味交隁E= "趣味・交隁E
|
| 95 |
+
新聞雑誌メチE��ア = "新聞�E雑誌�E惁E��メチE��ア"
|
| 96 |
+
自動車レンタカー用品E= "自動車�Eレンタカー・用品E
|
| 97 |
+
飲料食品生活用品E= "飲料食品・生活用品E
|
| 98 |
+
家電パソコン = "家電・パソコン"
|
| 99 |
+
ファチE��ョン = "ファチE��ョン"
|
| 100 |
+
|
| 101 |
+
# ファイナンス
|
| 102 |
+
不動産 = "不動産"
|
| 103 |
+
保険 = "保険"
|
| 104 |
+
ローン = "ローン"
|
| 105 |
+
クレカ電子決渁E= "クレカ・電子決渁E
|
| 106 |
+
証券FX先物 = "証券・FX・先物"
|
| 107 |
+
銀衁E= "銀衁E
|
| 108 |
+
|
| 109 |
+
# インフラ
|
| 110 |
+
ネット通信サービス = "ネット�E通信サービス"
|
| 111 |
+
電気ガス = "電気�Eガス"
|
| 112 |
+
住宁E��備リフォーム = "住宁E��備�Eリフォーム"
|
| 113 |
+
|
| 114 |
+
# ライフイベンチE
|
| 115 |
+
士業相諁E= "士業・相諁E
|
| 116 |
+
学習スクール = "学習�Eスクール"
|
| 117 |
+
結婚�E会い = "結婚�E出会い"
|
| 118 |
+
葬儀墓地 = "葬儀・墓地"
|
| 119 |
+
引越し介護 = "引越し・介護"
|
| 120 |
+
|
| 121 |
+
class Meta(BaseModel):
|
| 122 |
+
会社吁E str
|
| 123 |
+
業畁E Category
|
| 124 |
+
中刁E��E CategoryMiddle
|
| 125 |
+
サービス: str
|
| 126 |
+
啁E��: str
|
| 127 |
+
タイトル: str
|
| 128 |
+
訴求テーチE list[str]
|
| 129 |
+
|
| 130 |
+
class cood(BaseModel):
|
| 131 |
+
x: int
|
| 132 |
+
y: int
|
| 133 |
+
|
| 134 |
+
class str_with_rect(BaseModel):
|
| 135 |
+
text: str
|
| 136 |
+
html: str
|
| 137 |
+
rect: list[cood]
|
| 138 |
+
|
| 139 |
+
class FvInfo(BaseModel):
|
| 140 |
+
非LP: bool
|
| 141 |
+
メタ: Meta
|
| 142 |
+
メインコピ�E: list[str_with_rect]
|
| 143 |
+
サブコピ�E: list[str_with_rect]
|
| 144 |
+
権威付け: list[str_with_rect]
|
| 145 |
+
ビジュアル: list[str_with_rect]
|
| 146 |
+
CTAボタン: list[str_with_rect]
|
| 147 |
+
|
| 148 |
+
def ask_raw(messages, model):
|
| 149 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 150 |
+
model=model,
|
| 151 |
+
messages=messages,
|
| 152 |
+
top_p=1,
|
| 153 |
+
frequency_penalty=0,
|
| 154 |
+
presence_penalty=0,
|
| 155 |
+
response_format=FvInfo,
|
| 156 |
+
temperature=0
|
| 157 |
+
)
|
| 158 |
+
return response
|
| 159 |
+
|
| 160 |
+
@customtracer
|
| 161 |
+
def baseimg2fvinfo_rect(base64img, openai_key=os.environ.get('OPENAI_KEY'), p=""):
|
| 162 |
+
"""
|
| 163 |
+
input1 (text): /9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxISEhUSEhIVFRUVFxUVFxUVFRcVFRUVFRUWFhUVFRYYHSggGBomHRUVITEhJSkrLi4uFx8zODMsNygtLisBCgoKDg0OGhAQGy0lHyUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAQsAvQMBEQACEQEDEQH/xAAcAAABBQEBAQAAAAAAAAAAAAACAAEDBAUGBwj/xABPEAABAwEEBAcKCggFBAMAAAABAAIDEQQSITEFBkFREyJhcXOR0gcUMlOBkpOxsrMzQlJicnShwdHwFiMkNENjgsIXNaPD02SitOEVg+L/xAAbAQACAwEBAQAAAAAAAAAAAAAAAQIDBAUGB//EADoRAAIBAgEIBwgDAAIDAQEAAAABAgMRBBIhMUFRcZHRBRMVYYGhwRQiMkJScrHhM5LwI2IkNPGiQ//aAAwDAQACEQMRAD8A3bp5CF6TMeLs2J0ICFJg6cURNmu/nFScblSqZJYjeHYgqtqxojJSV0PJhjmNqFnCXu5wTK00DXAHdknZrSJyjJWiwopHVoUmlqHGcr2ZKGbyo3LMnaEIm7kspjyIkgFFEstYcORYdx3YiiNDB51YggZuPkUpMqhHYyw0KDLkSNKTJJ2LETlW0XRZfhVMjTEstVZch0gHQMYoEyKR1Bj9mKmkQk7LOUXzNORVqi0Z3OL0FV6sRSznS4xkY4fnYt2aRxfepvuL8zmm7RjDVgcSTJWpc8fFeBTihZll3ee1nbUb5dXkxvG91fS1rff3A0GXBR/6v/IpWn9XkiP/AA6MjzfMrh7mn4OPzpv+RSyZP5vJFWVCD/j/AP1LmXI56j4OP/U7arcJr5vJGmNWm1fI83zIpAMxFGfS9tSSn9XkiuXVaVT83zB4cHAxx9ctR5eETyJrRLyRHraTVnDzfMnZaPmR03/rft46g6c/q8kXKrTXy+b5kgl+ZH/qdtRyZ/V5Inl0/p83zJmvPyI/9TtqLjLb+C1Sh9PmyOaYj4kZ9J204wk/m/BXOpCPyebI4bSTmyMek7alKnL6vJEIVoPTG3i+ZI7O9cjr/wDb21FKejK8kWPq75WT5vmOLUfkMrupJ28UdXL6vwProfT+QxO7xbKc0nqvpZEvq/BJVY/R5stRE/Jj6pO2qmpbS+Lh9P5Lcd44Uj6n9tVNS2l6cXqLMZdkOD6n9tQae0ti46kHx/mea/tqNntJXWwFzpB8jzXdtNJ7SLklqIXySO8Ex4cjx/epqNtJBzv8KK8lpfW7RjjgSAHAgGtCRf5D1KSh3lcp7Y3IbRQmhaBg04DaRXeVZTbz5ymtGObNpK1zdUc6vvtM2TsMgXX5ODuQFac6OdaMtdw7XJwZYCOLwbcd3HkVdNZTlv8ARF1eXVqCejJ9WGXYVPXs/wDSkRvYGSpGVebFNZiMrtZkQRWgA0+zJScblcKlnYth4Vdi9SViCQGtQprRYqkne6CuhwqOpK7TzkrKauiIPc04HyHEKVkytSlB6S1FbGnkP2darcGjRCvF9xLUOUc6J3UiTggcDj+d6jlPUTyE8zHETxgKEcqMqL0jUJrMhgXDwwAjNqBOS+JEodQ45FRtcnezzluMqtl8Syw15FWy5ZyeNrhk6vOoNp6iyKktDLDXV51W0Wpic4BCQNpDFwTsJtGNwDWSulMjjWoaK5B2JBNcQCXUyoDtopRpO9yudaNrBzzC8eUM5fiqynF595VVqLMu4qSwkmocQtCkthklBt5mY4iIANwfSaRXnWm6b0nOyWle3AltxLixhY5wMTajAHw5Mq0qeRVU8zk76/RF9e8lCLV/d9WW9V7Ox83ASguYY3OYTea4FpHFOVcCepV4qcoQyoabl3R9OFSq6dRXVs2lHVxat2VuTHDk4R9PWue8XWel+SOxHo7DR0R83zOM1x0UyOcCMhoLA/jE4m84HE8w610MHXlOLUjjdKYSFOSlDMZ9jtT63XY8tK4c4WuUVpOfTqSvZmlwe7D87lVc1ZBmWiNzXVyxzBoFbFpoyTi4u5savaJ4eXjhwa0Xnmpxrk0HeTXLYDyLNia/VQzaXoN2BwnX1PeTstPI6r9GLJWvBu9I/wDFc72yvt8kdrszC6cnzfM5vTJhZaOBgZQMFZHlziA51C2MVOdMTzjlWrDzqTTcmYsXSo0pKNNW25wakDKqvM2dIZswrnQ7ihxzAqiuTSuwr4Qz2fYopZ7FknmvpIGaQZkPtwCm6UtLKViaehEjJ3A4trygqLitTJqpJPOjQjmCocWbIzRaZMN6rcWWqaCdM3PD70slknOOkB1oBxwpvripKD0EXUWkgmtYzGI31+5SjTK5VlpRn2xzTm0K+CaMlVxelFa0S0cKYcRnsqVON8q+0rrTtk2+lDttzuT1qXVoisRJHMwaYc3AtB3jLHfyFaGkznQqSjuNPSOkmng6tIrE1wpjTjyD7lRRjaUt/ojXi6icKd1pj6sk0BpUi0xcfC8G4g436sz/AKgjE006crIMDXlGvG7zaOOY9G4VcbJPUZRxfdHaf2eUVwMkZp84Bw9h3Wt2BdptHJ6XjempbGYVngaW4PINPBOw7cF0W2tRw4wi1pLURkb4JDt3G+5ReS9JbDLj8LuPJaK0bcLnEgXaZkmgojJtnuN1Mr3bXZ3WiLMIYgzM5uO9x+4ZDmXHrT6ybkelw1JUaahxI9PaY73hc8CrzxY2/KkPgjmGJPICoQp5TsWVaypxcjhdGWd1CXSFznFxeTm5zjVxPOarsJKCSSPNXlVk5NmjFIRgWnnGKi0nrLYScczRK5zHbj6x+CSUkTbhPvKrRiQW0HKVPvKEs9rZghHHuCLyGo09gJbQUa403bkadIrZKtFsjY+6auJ6sFJq+ZEIvJd5MvWa1MPOVTKEjXTrQZcjlON1wdzhVOK1miMnqaYEVobU33Y7tibg/lRGNWN/fecaaauLS0jLPFOMbaRTnfPGzM+0yDnOAor4ox1Joa1nEcXExx+TiqNP5s+tkq7zQzfKiAO5APJ+CtsUXPOn64sJBLJCRhUtYajceNisXaVLY/LmdF9B13plHz5GlpTWqFne5MTyH2dj6ANwBlmFPC5CoQx9OLk7PO+7mWVeh601BKUfdVte19xXi13szDebDKKUIBDHUINcDfBUn0jSatZ+XMjHoSvGSkpR8+R7bDaw9rXtODgHDmcKj1rMkdFyMDX6cNsT5SCeCcySjQCfCuHAkDJ52qcJ9U8tlNWj7RHq1pe08pZrjEDUMlFMsGdtaO0qWx+XMwdg4i98qPnyLo17s58KGWu9oYP9xLtKnqT8uZPsSs9Lj58ju9Qray1MNobG9rGuLWGSlXEeE4UJwGVd9dyjPEqrH3bltHAvDz99ptaLX9UjsOGVGSbMs8p0/wB0SzPtTqtleyKrIywMLXGvHkFXitSKA7gN6VPE04POmFbBVaqVmlx5FH/EGzB94QzY5gtj+6RaO0KdrNPy5mLsasp5SlHz5Eo7o9nGUdo6oj/eo+309j/3iWLomstEl58hx3RrLWphtFRkQ2OvvEvb4akxromre7kvPkM7ui2U5xWg84jH+4n7fDYxPoiq9Ml58iKXX6yfFhtAPNHn6RNdIw1p/wC8SuXQtT5ZJceRCO6BF8mbqj7al2jR2Py5kOxcV9cfPkTO7osGyObqjP8Aeo9oUdj8uZN9D4nVKPnyBPdDg2RSjmbGP9xPtGlsflzF2NiNUo+fINndGgAIEc48kZHlPCVSfSFJvQ/LmSj0RiErKUfPkC3uiw41jmP9MeH/AHp9oUdj8uYl0Pidco+fIdvdDs22KbyCMf3pdo0tSflzGuhq+uUfPkNNr/ZT/Bn6o/8AkTXSNNan5cyMuhar1x8+R1FntolZFK0Ua+GJwvHjUpuB+9acPNVIuS1tmHGU3RnGnK11FEwun4pKuz7TN7r1HhhXmT3Zqae8GyfVI/fWhAlpMpAz3zUPSHCaPsxri2MRnniJj/sXSo54JnGxDyaskX9YIeGss8PjIpGjnLDd+2inOF4tFdOrkyT7z5zaaiq5J3TQ0Hop9qnZAzAvOLqVDGDFzzzDrNBtU4Qc5WRXVqKnFyZ9B6PhZBEyGIXWRtDWjkG87TtJ2krqKCSsjiyquTuzle6brP3vZ+AjdSWcFtQcWRZPfyE+COcnYqMRLJVlpZpwkOsld6EeLhYDqiQAkAJACQA4QhM9f0HoCCdxDwxoBAoGRVNQ84XyMrmyuflXqMRkU1mgn4btie08Vg+srt5VSS8d+1rZ3mjY9U7K4Orjdo6rIo6FpEZoCLwLuMW4EipGdCqJ1UrWhHPtW/uWbXoNVKhKSd6s3bY9WbvefVpI59VLM2IvN2ojD6cHGKFxddvCtQDdDQM6uCcakZTUerjpto/WrTuIzoyjTcutlovp36c+vRvZYi1Qspja+7i5sZ8COl5zLxAF2uw05s1B1UptZEdL1d9i2GHbgpOpPOl82tq5JNqXZWuYLuDn3TxYyaXHnY3DFhFeUFRjXTT9yOjZ3rmSlhnFxXWTzv6u58jI1m1ehs0gY2MEFtauYytdowHN1rThnCtBycY8DFjVVw9RRjOVu9njjMhzBeYPavSeyavg952U/wDTxc+S7mA/it3nlOl0/aL9yNFltcNgWx00c5YiSPDS07j1LzB7s1NPNN2yYH90j2fzrQgitJl3DuPUgkeodyy3HvaSI/w5SR9F7QfaD108Fng1sZw+k3k1E9q/B2gtK2ZBzetPBNKWQxzSxgGjJJGjD4ocQ37KLhVI5MnHYz1VKeXTjLakel9zjQ/e8JneP1swBFc2RZtbznwj/TuXTwlDJjlPS/wcTH4tSnkR0L8nV2vSLY2OkeaNYC5x5B6ytMkopt6DHCTnJRjpZ4fp3SclqnfO8HjHit+QweCwcw+0k7VxKk3OTkz01GmqcFFFC4dx6lAtFcO49SAFcO49SAFcO49SAFcO49SAFdO49SBHRx632hpq2NgI2gPB9pdR9LVWrOMfPmcSPQVGLvGcr+HIdmuNpAc0MYA6l4APoaGorxscUn0rUbTcY5t/Ma6DopOKnKz06ORI3Xi1ht0ABtCLoMgFCCCKXtxPWl2nNu+RG+58ya6Hgo5PWTtvXIdmvVsGVBgBgZMm0oPC2UHUk+kpPTCPB8wXREFoqT4rkE7X62nM5cslciM73KetJdItf/zjwfMk+iovTVnxXIhtGulqk8NrXZ+FfNKmppV29Sj0pOKtGEVx5kJ9C0pu8qknva5HMALmHZec9k1bP7JZht73i2/NXbwP8PieX6Vf/kNdyNHvYnG9TnH4LZl2Ob1Lee54qV5k9waenMrL9Uj99aEAZiAOo7n9ruTSM+WwHysd/wDsro9Gv/kcdq/H/wBON02rUYz2O3Ffo7vvpdnIPMdecXatDibSLy4Vi4kr9xq0NDPK5jvICuXLCZeKd9GZ/ryO/DpJUuj4tP3s8VxvfwTXjY7TvpdTIOB15xOvemr5FmaeK2jpOV2bWeTBx5bu5cjpCtn6pePI9J0Nh3k9fLXmXq/Tici7I0pXZzrmHdO6j/Rygvd/1oK0u0rtonmFnCrq3/1//almDOcRaAwyP4Oty+7g73hXLxuXvnXaV5UAdVoPUPSBnhMtgkMXCx8KHFrBwd8cJm4Hwa5Jhc3dbe5ba3WyU2KztFmNws/WMaG8RoeKOde8IOOI2osJM4nWTV6ewSiG0hoe5gkF114XXOc0Y0zqx2HMkM1NBaL0RJAx9r0hJBMb16JsD3taA9wZRwYQatDTntQGc2NHaraEnlZDDpSd8kjrrGizPFTzmOgwBNTgAExXZymtmiGWO1zWZkvCiIgX6UNS0OLTsqK0NPsyCGizqDYYp9I2WGZgfG97w5prRwEMjhlytB8iEDKetNmZFbbVFG0NYyeVjWitGta8gAeQIAzEAVEEj2TV5o7zsuFf2eHLmXawD/4vE8t0sk8R4IlllLTQ3hyVW9K5x5Np58x5EV5Y9+aWnMrL9Uj99aEAZqANHV2a5aYzsJun+oFo+0ha8FPJrx4cf2c/pWl1mDqLYr8M/wCDv769PY8FYV5FgKWmNJCCIvzOTRvccvJmTyArPiq6oU3PXq3mzAYN4qsqerS3sX+zLvPPCSSSTUkkknMkmpK8s227s+gRiopRirJZkaGgLXBFMH2mz98RgOBivXKkjA3huSGddZdYtDySMjGhcXvYwftDs3uDR9pTFnNbXG0aHsFqdZjooSlrWOLhM5o44Ju0JOynWgFc4nTlps9slhZYLCbO41ZwYkvmV7y0MoXUApiP6khnRSauazSg1da250JtrGgHmbN9yYsx2HdI1Nt9ulgmgkbGBCGStfO+NgeHF2AYCHeERXkCGJM8x1o1NlsDGSSzWeQvfcuwyF7gbrnXnVaMOLSvKEiVznCUDPVdRdXZ7BYp9Kus75LRwR72hpVzWuwMz25450GNwHa6gZFs8sdI55L3OvOeS9zicXOcbznHlJJPlSGdL3Mf82sfSSf+PMmhM6zWHTOhZ7XaIbdY32eRk0rO+rPiXFry3hJGsFS454tegFc5bXDVWCyxx2izW6O0wTOLGUpwoLW1dW7VpptPFoXAUxQNHFJEj1vQZPetloQP2eHHbkV3ej/4fFnkemX/AOV4InLsTfJduIIot1thyL7Tykryh9DNLTmVl+qR++tCAM1ABRyXSHD4pDvK01+5OMslqWzPwIzhlxcXrTXHMekB9cRtxXs1nV0fOHFp2YqosKxw2n9I8PLgeIyrW7j8p/l9QC8xjsT11TN8K0cz3HReC9mo+8velnfdsXh+bmcsR0hIA9A1G0BZrOyPS1vtMQhYS6GFhvySSsOAI+U1wrcFTUAkgAgsTOP1i0u62Wqa1PF0yvvBud1oAYxvKQ1rQTvBSGUYpXNcHMcWuaQ5rmktc0g1BaRiCDtCALUul7U7wrXaXfSnld63IA7XW4C06B0bOeOYZHQOLsTQCRhJryws60xazztsYGQA5hRIZ6Loqz6I0dFHap5hbrS9rZIrNGKMYSKtMoNbpG9+7BhITFnZj/4hW/v3v7hON4PAY8BwVa8Fd+2/nXHkRcLFnugO0dOyK3WN4jktDnCay0xY8Cr5CBgw1oDsfeDh8aowRS7mP+bWPpJP/HlQgZS10/zC2fWZ/eOSGYl0Z7eZAFVBI9O0dLSCy5fu0OZ+aV3ejn/w+J5Dptf+T4IeS045DqW65yVFsy3au2ffJ5w/BYezqPfx/R1u28VsjwfMu6U1fgdwFeEo2zsaKOAwEsx2t5SqqeApScr3zP8A2ovr9L4inGDWTnV3m733lIauWY+N85vZVvZ1Hv4/oo7cxOyPB8x/0Yg/meeD/ajs2j38f0Lt3E/9eD5hN1Vg3y+cOyn2dR2vj+g7cxOyPB8x/wBFYf5vnj72o7Oo7XxDtzE7I8HzEdV4Ngl89vqupdm0e/j+h9uYnZHg+YB1YhGfCecOyjs2j38f0LtzE7I8HzBOrMIOPCDneOyjs6j38f0PtzE/9eD5g/ozBX4/OHDso7Oo9/H9B25iv+vB8xSauQjLhPOHZSfR1Hv4/oa6cxOvJ4fsjGrsRy4Trr6mpdnUu/8A3gT7axGyPD9kbtAxbC/yvaPuS7Ppd/Ea6ZxGxcHzIv8A4GIkGricswSPsS7Ppd/En2xX2Lg+ZN/8BFvf5w/BPs6l38f0V9tYjYuD5hfo9D8p/nDsp9nUu/j+hdtYjYuH7JYtW4T4zzh2U10dR7+P6Iy6bxK+ng+ZKNV4N8nnDsp9m0e/j+iPbmK2R4PmL9F4N8nnDso7No9/H9B25itkeD5gu1YgG1/njso7No9/H9B25idkeD5kEmr8I2v84fgovo6j38f0Tj03iXqjwfMqv0BAMuE84fgo+wUe8tXTGJeqPB8zftjGxtgY0YNs8IFTj4JVuGioRcVqbKMfKVSUZvS4ogLgtJz7Mt38cXdRCmmUtbEX9KSCkOZ/Ut5vhJVVSeee/wBEasTF5FL7fVlMSbP/AGFdcx2CEp/ITuFiYSfOQIJz6mgcaddEDzXGcwjCrTy5+vFCE1YYQ8358qAJBCNtUADJcAwBryYdaB6SrJTPEeVIEQPJJqDTlqa+Sii85YsxDJGXGrnDy1qk1csU7aESRQAcvk/FNRISm2TWazXnNaBi5waK73EAY5bUOyV2OKc5KC0s6Y6j2zxTfSM/FZvbqG3yN/Y+K7uIUepVsH8JvpG/ij2+ht8gfQ+K7uIQ1Ntvim+kb+KPb6O3yF2Nie7iMdTLb4pvpGfij2+jt8g7GxOxcRjqXbfFN9Iz8Ue30NvkHY2J7uJC/Ua3H+G30jfxSeOo7fImuiMStnE5C0xFri05gkHnBoVeZbOLaLmlW1MWB+Ai9Spo/NvZpxb+D7UVQ3kHldT1q8xX7x9vhIE9xq6RYKQ4j4BufSSqFFZ57/RF2Lbyaf2+rImkfKHkK0GFpk15nJ1oFZgOc0ZOqgLMXfVMgPIi40mAJqnIIuJoMTu2IATpjvJ5kAVHS8ijcmokT5uX7VG5YoEbp6ZFGUSVO+kGM1215wkiTzFoP2H7Ap3KcnWXNFfvENBhwsWefhtUKnwS3P8ABdhv5ob0ezaZtL447zASQ5pNA0gMBq+9eIoLoIrsqFwKEIylaX+eo9jiJyhG8f8ALX5HPWXWKR0UkgLqNE4FQz4R8p73a7GraC62lMSeSp3TwkYzUXryduhL3jnU8dKVOU1qytmlv3b7NhtaN0o5zxA+GUPaxrnvdwVMQQHG485ljsAFkq0Eo5akrXzJX9VqubKOIlKSpyi7pK7dvR67AW/SMvC8HDG9xiLHSU4K65j2uo3juBBwqCN2KdOjDIyptK97adK3IVWvU6zJpxbta+jQ75s7RXktNtuPa2GSrn1a8mz3o4yQSKX6OcOMBXDKuWNihh8pNyWjOvezvho2lTqYnJaUHneZ+7mXHTs8Lmvoy2iaMPDS3F7SHUqCx5Y6t0kZtOSy1afVyyb7PNXNtGr1kMq1tOnudjwXSb6TSGmIkf7RXfTzI8jUV5y3sPTs1TFsHAxGg5iqqb+LezRiIWUPtRnB35qrTNYk2poizT0ocIMf4DfezKqlplv9EX4le7T+31ZSMnKr7mTJCbTlQJ3JBTlUiDuyRoTRFkgfyDqTuRsC56VwSKkkx3qDZfGCRC5/L61G5Yl3AcyRIQAOyvlQDui3Z8BhgrEUTzseo/JQLOXtEOHDwj+bF7bVGo/cluf4LcMn10H3o9h1kB4Ev4MS8Gb9wl9HXQaC6wG9jTA4bdi4eF+O17XzXzeujwznrsZ/HlZN7Z7Z/TT45tZhaOsjbQDE9sL2saXSWhjz4cvCPIHFANHOvXa0bVu0LZVqOk8uLab0Ra1Ky2+F9ecwUaarLIkotJZ5J63d7NTz21ZjR1StBm4WV7mmQmNhDa+AxguuxGTi57xyOCoxsOryYJZs78W/TMjRgJuplTk8+ZeCWZ+OdreW9JxukN11kbK0GoLnsxNM6EYZkKqk1FXVSz3MurRc3aVNSXe0YFl0aHzSO7yZSJ7WtY10bA03GPq4gVeeNvplhVbZ1nGml1jzrS7vW14fkwU8OpVZPql7rzJWWpPx/HcddY5HubV8fBmpwvB2G+o8q5k1FP3Xc69Nya95WPnzS5/XSfTf7RXeWhHkp/G97D0ycYegh9RVVL5t7NOJ+T7UUmSkZH7AfWrk7GNxTLGKkVsv6WOEHQN97MqqWmW/0RqxPw0/t9WUWq4yMnYaZYKSKmr6SQPO9O5GyJQ/DZ1JkLEb383UlckolSaWqg2XwhYgL1G5ZYEvSuSsDfRcdiWIfmiaISLXlUykYORcLFzQrq2iHpYvbaoVPgluf4LsOrVYb1+T3S12YSNulz2iubHFjua8MQF5+E8h3svHOeyqQy1a7W52KTtX7PS6I7oNA66SOEaDW7Ic3gnOuJVqxVW927+m7YUex0bWStttr37S0dHx8I2UCj2tLAWkgFvyXAYOAzAOSr62WQ4ann8S3qYZanbOs3hsJ5WXmltSKgircCK7Qd6gnZ3LJK6sV9H2AQ3qOe8vdfc55BcTda3YAMmjYrKtV1LZkrZsxVSoqnfO227tvgW1UXHzlpg/rpfpv9or0OpHkZfG97/IemjjD9Xh9RVNL5t7NOI+T7UZ9VaZrFkuUymxpaVGEHQN97MqqWmW/wBEaMT8NP7fVlQK8xEjUyLJAVIgC56VxpFaaVQbLowIC5RLLAFyRKw15A7BNQJliIKaKZBEpisOHICxe0If2iDpY/bCjP4Huf4LcP8Ayx3r8nvNpkLWOc1t4taSGjAuIFQ0HlyXnopOSTdj2M5OMW0r9xx1s0lpOZjnMhFmjDS4ucePQCpArjWnzRzrqQo4SnJKUsp+X+8Ti1K+PqxbjHIVr59P+8DR1AtD5LKXSPc88I4Vc4uNKNwqVR0lCMa1oq2Y0dEVJTw95Nt3ek6RYDqCQAkAfOOmfh5fpv8AaK9BqR5KXxPewtNZw/V4fUVTS+beacR8n2oz6q0zFoBWIpZq6Uyg6BvvZlVS0y3+iLsT8NP7fVlJqvMbJAUyIznouCiV5ZVFstjErlygW2BLkDSGqkOw4TBkrAmiDZOclIqGQMcIEXtCfvMHSx+2FGfwvc/wW4f+WO9fk9/XnT2RU0v8BN0cnsFW0f5I71+Sqv8AxS3P8GD3OP3Q9I/1NWzpT+fwRzuhf/W8WdSucdYSAEgD5w018PJ9N/tFeg1I8m/ie9h6azh+rw+oqml829mjEaIfajOVpnLoVhmZp6Vyg6BvvZlXS0y3+iNGJ+Gn9vqymFcYmM5yBpEMkii2WRiVyaqJbawzigaQKQxAIBkgCZAnjapIrkx3JiQkAEECZd0H+8wdLH7YUZ/A9z/BbQ/ljvX5PoBedPZFTS/wE3RyewVbR/kjvX5Kq/8AFLc/wYPc4/dD0j/U1bOlP5/BHO6F/wDW8WdSucdYoSaYha57XEi5QF10ltT8UEDMVHWFcsPNpNayh4mmpNPUWbNamyVukmmdWub6wFXKDjpLITjLQfO+mh+uk+m/2iu9qR5V/HLe/wAhaazh+rw+oqml82804jRD7UZ9FcZS9RWGdmlpTKDoG+9mVVLTLf6I0Yn4af2+rKDnK4ypEL3qLZNIgcVEtSsMUACkSFRAEjWpkGyRrU0RbJqKRWCgY4CACTIlvQf7zB0sfthVz+F7n+C+h/JHevyfQK88exKml/gJujk9gq2j/JHevyVV/wCKW5/gwe5x+6HpH+pq2dKfz+COd0L/AOt4s6lc46xyukhLdlJinYwvYWNaYA0AmMuqA6t4vvmvKF0aWRePvJuzv8Xf3bLHKq9Zky92SV1ZLJ7r69N7m7o5zqEOZKNtZTGa12Dg3Hdt3rHVS0prwv6o30XLOmn429GeAaZH66T6b/aK7mpHlpP33vYWmhjD9Xh9RVNL5t7NWJfwfaihRXGS5dAU0UM0NLnCDoG+9lVVLTLf6I1Yhe7T+31ZluKsuZ0iFxUSxIaiBjUQMVEAExiERbJA1SIkrGJorbCITECQkMcNTEEQgEHo6cRzRyOrRj2ONM6NcCadShJXTW8upSyZKWxo9RHdMsni5/NZ21zOz5/UvPkd3tel9L8uYz+6RYyCDFMQRQgtYQQcwRfTWAqJ3Ul58hPpai1ZxflzI7L3QbDGLsdnlY2taNjjaK76Byc8FVm7ykm/HkRh0nh6atCDS7kuZL/iXZPFT+aztqPZ8/qXnyJ9r0vpflzGk7o1jcKOhmIwwLWbDUfH3hNYCondSXnyIy6WoNWcX5cw/wDEmy+Kn81nbR2dU2rz5B2zR2Py5nlGkHh73uGTnOI30JqunayscNyvJsl0y3GHoIfUVRS+bezZiX8H2oohquMlyy4qZSW9MnCDoG+9mVFLTLf6I2Yj4af2+rMtxVhSgQEBcVEDFRILiDUBclDVIhcNrUyLZLRSK7iokO411AXCDUxXGeEhogcFEsRV0hO6MR3KVfI1mOVHVVFepKCjk62lxNeEpQqueXe0YuWbuH75cHmN1LwYZK5spWgrka1qjrZKThLTa/cHUQlBVIfDlKNtd9O4sB5yJFcBkc7taHca7DsKtyn/ALd/vAoyFpSflt883mMyQ/GIyDsnZDwzjsoQkpvX6+I5U4/KnptpWl6CZrnVwu0vU+NkMHY76qWU+7/afMrcI2s73t3eHkPefvZTi7Hf1eo05ksqe1efiGTT2S17PD9jvarCpMtaYbjF0EXqKopL4t7N2Kfwfaig1qusZGwnFAi3pnKDoG+9mVFLTLf6I24j4af2+rM4BXGa49ECGogdxUSC4cbE0iMpEgapELkzGJpEHIItTI3EWoHcQaiwrjlqBXI3hJk0yO6lYncJ9nDhQ9eFQd4qMDypSgpKzCFZwldFdmi2BxdVxJaWGpHgk12BVLDRTcru7VvA0Sx9RxUbJJPKzX08SyLK3HDAkEjYSAAD1AdSt6tGf2iSS7tevPn5j96DAEk0BbjTwTSrcuQY58qOrWj/AFv94i9oabaS038dv+zdxILKMeU3vLh+AUurRF4iXlb88wu9W7vzj2j1o6qJH2if+8OSGcxSsJSLOl2YxdDF6is9FfFvZuxTzQ+1FEMV1jI2QOUS0vaXGEHQN97MqaWmW/0RrxD92n9vqyhdVxkuINQFx7qAuO1iLCbJQ1SsQuG1idiLZMGpldxXUBca6gLhBqYriLUAmRuakTTEGIsDkTXE7FdxXEWDKHDE7CuOGIsFx7qLCuPdTABzUrEkyzpVuMfQxepZ6Kzy3s3Yt5qf2opBiusY8opXVWaWzR0q3CDoG+9mVVLTLf6I04l+7T+31ZQuq4yXCupiuNdQFyVrE7EHIMNTFckYxNIg2FdTI3FRAXEGosFwg1AriLUBcAtRYlcdjECbJriZEcMQAVxABBiBjFiBCDEDBcxAixpNmMfRRepUUdMt7NuLean9qKlxXmIz7iqsamzQ0o34HoG+8lVVLTPf6I04l+5T+31ZSuK6xkuK6iwrhsYnYTkHdTsQuE1idhNktxMiPcQAgxADhqACuoAa6gBXEAOxiBklxAWHuoAMMQOw91IBrqYWFcSAZzUwJ9IsxZ0UXsqmj829mnFaKf2L1Kt1XGUzbqgWtl/SbcIegb7yVU0dM9/ojVin7lL7fVlK6rzHccNRYTZKGKRFsdrEATMYgB7qAHLUAINQAQagLBFqBjXUAK6gAmsQFggEhj3UAE0IAYoAZADhqAFdQBYtzcWdFF7KqpfNvZpxOin9i9SqArTMZl1IGy9pIfA9C33kqpo/FPf6I14r4KX2+rKgYtFjFcNrECJGhABhqBhBqBjhqAHISAcNQA4agY5YgAS1ADgIANoQMe6gLDoAYBADhqAsPQIAZABAJDsWLa3FvRR+yqqXzb2acSs0PtXqVlcZjKuJ2K7l7SI+C6FvvJVRR+Ke/wBEa8V8FL7fVldrFeYwqIGExiASJQ1IkFRACAQAqIAcBAw6IAYtQFhqBADhAD0QMdIBUQA4QMQQAqIFYeiB2EGouPJZYtoxb0cfsqml829mjE6IfavUqlXGVmeRzKRWXLePguhb7yVU0dM9/ojXifgpfb6srtarjISXUhhNCBjoAcIAK6gdh2tQNIJIYJTEJACqgQTUiSCLUrkrDJizBNalcaiPcSuPJGup3FYVEAEkSFQICyJrZm3o4/ZVVL5t7NGI0Qt9KIFaZjNAVhnNC0Rh4jIfFhG1pDpY2kEPkJBDnA7R1rNGeRKV09Ox7EdCpSdWFNxazRtpS1vayPvb+ZD6eLtKfXR7+D5FPss9sf7R5i72/mQ+ni7SOuj38HyD2We2P9o8wuA/mQ+ni7SOuj38HyH7LPbH+0eYuB+fD6eLtI66PfwfIPZZ7Y/2jzHFnHjIfTxdpHXR7+D5B7LPbH+0eYRiHjIfTxdpLro9/B8h+zT2x/tHmMIP5kPp4u0n10e/g+QvZp7Y/wBo8x+9/wCZD6eLtJddHv4PkP2ae2P9o8x+BHjIfTxdpHXR7+D5B7NPbH+0eYuBHjIfTxdpHXR7+D5B7NPbH+0eYuAHjIfTRdpHXR7+D5B7NLbH+0eY4hHjIfTxdpHXR7+D5DWGltj/AGjzC4IeMi9NF2kuuWx8HyH7PLav7R5iEQ8ZD6aLtI65d/B8g9ne2P8AaPMfgx4yH00XaR1sdj4PkPqJbY/2jzG4MeMh9PF2kdbHY+D5C9nltX9o8xGMeMi9PF2kdau/g+Qezy2r+0eY3BDxkPpou0n10e/g+QvZ5bY/2jzDaweMi9PF2lF1Y9/B8iaoS2x/tHmOWN8bD6eLtI62PfwfIboPav7R5itTmkijmuoxgJa4OFQMRUYFFL5n3sMQleCvoilmzkeCtKMxntad6mZgxXegYQJ2IHdjVO1AXYV9FguPf5UrDuECgdxAoAK8iw7iLt5RYG9pW0hbxEAS0mtcqbOdVVJqGdl1Kk6t0nYoy6wxta15a7jNkd8Wv6twa4EA4GpGBxxG3BVSxUEk2tvkaIYCpKTimszS168+z/bs5Zm02xj7hD73FGAbTjNDsDWmRTlXipZNghhJyhl3Vv8ALYaDjTatBieYdpKBpscFIdxFxRYG2CKpkc4ZJSzE86EDyoBMcuSsNsQKAuOgYJCCNikxisM9h6IAMc6CQ14bECH8iBhBvIkOwTWoGkOUgdxUQFgSExAT2YOpe2Go6iPvUZJSLISlC9tYBsDCA2hwaWjfRxBz8mCr6qNi1V53uttyJ+hoiahpGyjTQAXODFBkKNyUXQg/93WLI4qqs2n/AO3/ACX7quuZckdA9wxciwrhNQSQ4CQ0hXUXHkioEBZDEIE0wgEDsMgBXUXFYqOKsM7Ym5IAEoEOwYoGtJI0pEk7BFxolYbbsJibCIb1FEpAtCbEgQUyK0hhIkEEiaBKZF6RFAahbEaxag2JMnEc5JDegAFMhckSLAQgitIQOKCWsNImMUCYKZE//9k=
|
| 164 |
+
input2 (text): default
|
| 165 |
+
input3 (text):
|
| 166 |
+
output1 (json): fvinfo
|
| 167 |
+
"""
|
| 168 |
+
if openai_key == "default":
|
| 169 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 170 |
+
else:
|
| 171 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 172 |
+
messages = [
|
| 173 |
+
{
|
| 174 |
+
"role": "system",
|
| 175 |
+
"content": "あなた�E優れたWEBマ�Eケターで、ランチE��ングペ�Eジの要素を見�Eけることに長けてぁE��す。また�EーケチE��ングの達人なので訴求テーマを言語化するのが上手です、E
|
| 176 |
+
},
|
| 177 |
+
{
|
| 178 |
+
"role": "user",
|
| 179 |
+
"content":[
|
| 180 |
+
{"type": "text", "text":"""LPのファーストビューの画像を解析します、E
|
| 181 |
+
何も書かれてぁE��ぁE��像�E場合�E、空の値を返し、E��LP=Trueとして終亁E��てください。何か書かれてぁE��ば以下�E優先頁E��斁E��を抽出してください。タイトルめE��ゴはメタ惁E��に刁E��し、権威付けめE��ピ�Eと区別してください、E
|
| 182 |
+
・CTAボタンが存在する場合、�Eタン冁E�E記載�E容で教えて下さぁE��ETAを�E現するHTMLを文字サイズ、文字色、文字�E間隔めE��置・改行、構�EするレクタングルめE���E丸さ、影めE�Eタン背景の色、gradientのあるなし、サブコピ�Eが�Eタン外かボタン冁E��、�Eタン冁E�E矢印等�E絵斁E��に気を付けて生�Eしてください、E
|
| 183 |
+
・画像�Eに権威付けのバッジがあれ�E最優先で権威付けに刁E��してください。権威付けのバッジを�E現するHTMLをCTAと同様�E頁E��で気を付けて抽出してください。誤ってロゴを�E類しなぁE��ください、E
|
| 184 |
+
・キャチE��コピ�EをLPに掲載されてぁE��頁E��に並べてください。大きい目立つ斁E��で書かれてぁE��冁E��をメインコピ�E、それ以外をサブコピ�Eに刁E��して、色めE��景、フォント�E種類などをHTMLで再現してください。また、Eつの斁E��を作るコピ�Eは刁E��ずにまとめて抽出してください、E
|
| 185 |
+
・画像�Eに写ってぁE��イメージ(写真めE��ラスチEにつぁE��、どんなも�Eが起用されてぁE��か、�Eロンプトで再現できるチE��ストとしてビジュアルに列挙してください、E
|
| 186 |
+
値を抽出した後�E、その値が含まれるレクタングルの座標を2点方式で教えてください。OCRの斁E���Eの座標min(xs), min(ys), max(xs), max(ys)がある�Eで、それを参老E��レクタングルを合体したり、�Eタン刁E�Eバッファを庁E��たりしてもいぁE��す。あくまで画像から座標を抽出してください、E
|
| 187 |
+
画像�Eに該当�E値がなければ[]のように空の配�Eを回答し、画像になぁE��とは回答しなぁE��ください。特に黒一色めE�E色一色の場合に注意し、E��LP=Trueを返してください、E
|
| 188 |
+
これら�E抽出惁E��を総合して、メタの吁E��E��を記載してください。訴求要素は、情報かOCRがある限り�E20斁E��で6種類提案してください。情報がなければ空にしてください、E
|
| 189 |
+
""" + p}
|
| 190 |
+
]
|
| 191 |
+
},
|
| 192 |
+
]
|
| 193 |
+
|
| 194 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}})
|
| 195 |
+
r = ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")
|
| 196 |
+
|
| 197 |
+
return r
|
apis/baseimg2fvinfo_with_design.py
ADDED
|
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from typing import Dict
|
| 11 |
+
from enum import Enum
|
| 12 |
+
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 16 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 17 |
+
from src.clients.llm_client import LLMClient
|
| 18 |
+
import json as _json
|
| 19 |
+
|
| 20 |
+
client = LLMClient()
|
| 21 |
+
system_prompt = None
|
| 22 |
+
user_text = ""
|
| 23 |
+
images = []
|
| 24 |
+
for msg in messages:
|
| 25 |
+
role = msg.get("role", "")
|
| 26 |
+
c = msg.get("content", "")
|
| 27 |
+
if role == "system":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
system_prompt = c
|
| 30 |
+
elif role == "user":
|
| 31 |
+
if isinstance(c, str):
|
| 32 |
+
user_text = c
|
| 33 |
+
elif isinstance(c, list):
|
| 34 |
+
for part in c:
|
| 35 |
+
if isinstance(part, dict):
|
| 36 |
+
if part.get("type") == "text":
|
| 37 |
+
user_text += part.get("text", "")
|
| 38 |
+
elif part.get("type") == "image_url":
|
| 39 |
+
url = part.get("image_url", {}).get("url", "")
|
| 40 |
+
if url.startswith("data:"):
|
| 41 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 42 |
+
else:
|
| 43 |
+
images.append(url)
|
| 44 |
+
|
| 45 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 46 |
+
result = client.call(
|
| 47 |
+
prompt=user_text,
|
| 48 |
+
schema=response_format,
|
| 49 |
+
model=model,
|
| 50 |
+
system_prompt=system_prompt,
|
| 51 |
+
images=images if images else None,
|
| 52 |
+
temperature=0,
|
| 53 |
+
)
|
| 54 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 55 |
+
else:
|
| 56 |
+
return client.call_raw(
|
| 57 |
+
prompt=user_text,
|
| 58 |
+
model=model,
|
| 59 |
+
system_prompt=system_prompt,
|
| 60 |
+
images=images if images else None,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class Meta(BaseModel):
|
| 65 |
+
会社吁E str
|
| 66 |
+
業畁E str
|
| 67 |
+
ブランチE str
|
| 68 |
+
サービス: str
|
| 69 |
+
啁E��: str
|
| 70 |
+
タイトル: str
|
| 71 |
+
訴求テーチE list[str]
|
| 72 |
+
|
| 73 |
+
class Design(BaseModel):
|
| 74 |
+
重要なフレーズの斁E��色を赤めE��レンジめE��ンクめE��E��などFV上で目立つ色に着色: float
|
| 75 |
+
背景を画像�E主要な配色と変えて目立たせる: float
|
| 76 |
+
四角や丸など図形で囲ぁE��認性を上げめE float
|
| 77 |
+
アイコンを使用して視認性を上げめE float
|
| 78 |
+
チE��スト�E重要なフレーズの下に水平なアクセント線が引かれてぁE��: float
|
| 79 |
+
|
| 80 |
+
class sCopy(BaseModel):
|
| 81 |
+
text: str
|
| 82 |
+
design: Design
|
| 83 |
+
|
| 84 |
+
class EvsF(str, Enum):
|
| 85 |
+
EMOTIONAL = "惁E��E
|
| 86 |
+
FUNCTIONAL = "機�E"
|
| 87 |
+
|
| 88 |
+
class EFitems(BaseModel):
|
| 89 |
+
item: str
|
| 90 |
+
judge: EvsF
|
| 91 |
+
|
| 92 |
+
class PvsS(str, Enum):
|
| 93 |
+
PROBLEM = "問題提起"
|
| 94 |
+
SOLUTION = "課題解決"
|
| 95 |
+
|
| 96 |
+
class PSitems(BaseModel):
|
| 97 |
+
item: str
|
| 98 |
+
judge: PvsS
|
| 99 |
+
|
| 100 |
+
class mCopy(BaseModel):
|
| 101 |
+
text: str
|
| 102 |
+
appeal_mode : list[EFitems]
|
| 103 |
+
forcus_stage : list[PSitems]
|
| 104 |
+
|
| 105 |
+
class CatchCopy(BaseModel):
|
| 106 |
+
main_copy: list[mCopy]
|
| 107 |
+
sub_copy: list[sCopy]
|
| 108 |
+
|
| 109 |
+
class FvInfo(BaseModel):
|
| 110 |
+
非LP: bool
|
| 111 |
+
メタ: Meta
|
| 112 |
+
キャチE��コピ�E: CatchCopy
|
| 113 |
+
権威付け: list[str]
|
| 114 |
+
ビジュアル: list[str]
|
| 115 |
+
CTAボタン: list[str]
|
| 116 |
+
|
| 117 |
+
def ask_raw(messages, model):
|
| 118 |
+
client = LLMClient()
|
| 119 |
+
|
| 120 |
+
# パラメータの準備
|
| 121 |
+
params = {
|
| 122 |
+
"top_p": 1,
|
| 123 |
+
"frequency_penalty": 0,
|
| 124 |
+
"presence_penalty": 0,
|
| 125 |
+
"response_format": FvInfo,
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
# gpt-5系はtemperatureを渡さなぁE��環墁E��よって0が弾かれるためE��E
|
| 129 |
+
model_lower = (model or "").lower()
|
| 130 |
+
if not model_lower.startswith("gpt-5"):
|
| 131 |
+
params["temperature"] = 0
|
| 132 |
+
|
| 133 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 134 |
+
model=model,
|
| 135 |
+
messages=messages,
|
| 136 |
+
**params
|
| 137 |
+
)
|
| 138 |
+
return response
|
| 139 |
+
|
| 140 |
+
@customtracer
|
| 141 |
+
def baseimg2fvinfo_with_design(base64img, openai_key=os.environ.get('OPENAI_KEY'), p="", model="meta-llama/Llama-3.3-70B-Instruct"):
|
| 142 |
+
"""
|
| 143 |
+
input1 (text):
|
| 144 |
+
input2 (text): default
|
| 145 |
+
input3 (text):
|
| 146 |
+
input4 (text): gpt-4o
|
| 147 |
+
output1 (json): fvinfo
|
| 148 |
+
"""
|
| 149 |
+
|
| 150 |
+
print(f"baseimg2fvinfo_with_design {model} openai_key:",openai_key[-4:])
|
| 151 |
+
if openai_key == "default":
|
| 152 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 153 |
+
else:
|
| 154 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 155 |
+
|
| 156 |
+
messages = [
|
| 157 |
+
{
|
| 158 |
+
"role": "system",
|
| 159 |
+
"content": "あなた�E優れたWEBマ�Eケターで、ランチE��ングペ�Eジの要素を見�Eけることに長けてぁE��す。また�EーケチE��ングの達人なので訴求テーマを言語化するのが上手です、E
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"role": "user",
|
| 163 |
+
"content":[
|
| 164 |
+
{"type": "text", "text":"""LPのファーストビューの画像を解析します、E
|
| 165 |
+
・何も書かれてぁE��ぁE��像�E場合�E、空の値を返し、E��LP=Trueとしてください、E
|
| 166 |
+
・CTAボタンが存在する場合、�Eタン冁E�E記載�E容を�E列で教えて下さぁE��アンカーリンクのあるチE��ストもCTAとしてください、E
|
| 167 |
+
・画像�Eに書かれてぁE��斁E��・コピ�Eを読み取り、LPに掲載されてぁE��頁E��に並べてください。大きい目立つ斁E��で書かれてぁE��冁E��を「main_copy」とぁE��キーで1つ抽出し、情緒�E機�Eのどちらに訴えてぁE��かなどを記載、E
|
| 168 |
+
・main_copy以外を「sub_copy」とぁE��キーで、読み取ったテキストをtext、それぞれ�Eサブコピ�Eの裁E��タイプ�E適用度合いをdesignに0~1のfloatで記述
|
| 169 |
+
・画像�Eに写ってぁE��イメージ(写真めE��ラスチEにつぁE��、どんなも�Eが起用されてぁE��か教えて下さぁE��E
|
| 170 |
+
・画像�Eに該当�E値がなければ[]のように空の配�Eを回答し、画像になぁE��とは回答しなぁE��ください。特に黒一色めE�E色一色の場合に注意し、E��LP=Trueを返してください、E
|
| 171 |
+
・これら�E抽出惁E��を総合して、メタの吁E��E��を記載してください。訴求要素は、情報かOCRがある限り�E20斁E��で6種類提案してください。情報がなければ空にしてください、E
|
| 172 |
+
""" + p}
|
| 173 |
+
]
|
| 174 |
+
},
|
| 175 |
+
]
|
| 176 |
+
|
| 177 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}})
|
| 178 |
+
r = ask_raw(messages, model)
|
| 179 |
+
|
| 180 |
+
return r
|
apis/baseimg2html.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
baseimg2html: 画像からHTMLコンポーネントを生成。
|
| 3 |
+
HF版: VLM (Qwen2.5-VL) を使用。Vertex AI は使用しない。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from typing import List
|
| 8 |
+
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
|
| 11 |
+
from src.utils.tracer import customtracer
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class newHTMLs(BaseModel):
|
| 15 |
+
HTMLs: List[str]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@customtracer
|
| 19 |
+
def baseimg2html(p: str, base64img: str, gcp_key: str = "default") -> dict:
|
| 20 |
+
"""
|
| 21 |
+
input1 (text): OCRテキストやページの説明
|
| 22 |
+
input2 (text): base64エンコードされた画像
|
| 23 |
+
input3 (text): default
|
| 24 |
+
output1 (json): HTMLコンポーネントのリスト({"HTMLs": [...]})
|
| 25 |
+
|
| 26 |
+
NOTE: HF版は VLM ベース。Vertex AI / GCP は使用しない。
|
| 27 |
+
"""
|
| 28 |
+
from src.clients.llm_client import LLMClient
|
| 29 |
+
|
| 30 |
+
client = LLMClient()
|
| 31 |
+
|
| 32 |
+
system_prompt = (
|
| 33 |
+
"あなたはHTMLとCSSの達人です。"
|
| 34 |
+
"画像とテキスト情報を基に、適切なHTMLコンポーネントを生成してください。"
|
| 35 |
+
"各コンポーネントは完全に機能するHTMLとして生成してください。"
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
prompt = (
|
| 39 |
+
system_prompt + "\n\n[テキスト情報]\n" + str(p)
|
| 40 |
+
+ "\n\n画像のコンポーネントをHTMLリストとして出力してください。"
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
# Strip base64 data-URI prefix if present
|
| 44 |
+
if base64img and "," in base64img:
|
| 45 |
+
base64img = base64img.split(",", 1)[1]
|
| 46 |
+
|
| 47 |
+
result = client.call(
|
| 48 |
+
prompt=prompt,
|
| 49 |
+
schema=newHTMLs,
|
| 50 |
+
model="Qwen/Qwen2.5-VL-72B-Instruct",
|
| 51 |
+
images=[base64img] if base64img else None,
|
| 52 |
+
temperature=0.1,
|
| 53 |
+
)
|
| 54 |
+
return result.model_dump()
|
apis/baseimg2ocr.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
baseimg2ocr: base64画像をOCRしてテキストを抽出。
|
| 3 |
+
HF版: VLM (Qwen2.5-VL) を使用。Google Vision API は使用しない。
|
| 4 |
+
|
| 5 |
+
NOTE: 元の実装は Google Vision API を使用。精度が異なる場合がある。
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import json
|
| 9 |
+
import os
|
| 10 |
+
|
| 11 |
+
from src.utils.tracer import customtracer
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _vlm_ocr(base64image: str, model: str = "Qwen/Qwen2.5-VL-7B-Instruct") -> str:
|
| 15 |
+
"""VLM でテキスト抽出(url2ocr 互換フォーマット)。"""
|
| 16 |
+
from src.clients.llm_client import LLMClient
|
| 17 |
+
from pydantic import BaseModel
|
| 18 |
+
from typing import List
|
| 19 |
+
|
| 20 |
+
class OcrEntry(BaseModel):
|
| 21 |
+
text: str
|
| 22 |
+
y: int
|
| 23 |
+
size: int
|
| 24 |
+
|
| 25 |
+
class OcrResult(BaseModel):
|
| 26 |
+
items: List[OcrEntry]
|
| 27 |
+
|
| 28 |
+
client = LLMClient()
|
| 29 |
+
result = client.call(
|
| 30 |
+
prompt=(
|
| 31 |
+
"Extract all visible text from this image. "
|
| 32 |
+
"For each text block, estimate its vertical position (y coordinate, 0=top) "
|
| 33 |
+
"and approximate font size in pixels. "
|
| 34 |
+
"Return results sorted by y position."
|
| 35 |
+
),
|
| 36 |
+
schema=OcrResult,
|
| 37 |
+
model=model,
|
| 38 |
+
images=[base64image],
|
| 39 |
+
temperature=0,
|
| 40 |
+
)
|
| 41 |
+
return json.dumps(
|
| 42 |
+
[{"text": e.text, "y": e.y, "size": e.size, "rect": []} for e in result.items],
|
| 43 |
+
ensure_ascii=False,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@customtracer
|
| 48 |
+
def baseimg2ocr(base64image: str, margin: int = 120) -> str:
|
| 49 |
+
"""
|
| 50 |
+
input1 (text): base64エンコードされた画像
|
| 51 |
+
input2 (text): 120
|
| 52 |
+
output1 (json): OCR結果
|
| 53 |
+
|
| 54 |
+
NOTE: HF版は VLM ベースOCR。Google Vision API は使用しない。
|
| 55 |
+
"""
|
| 56 |
+
return _vlm_ocr(base64image)
|
apis/baseimg2pagetype.py
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
from enum import Enum
|
| 4 |
+
from typing import List
|
| 5 |
+
|
| 6 |
+
from pydantic import BaseModel, Field
|
| 7 |
+
|
| 8 |
+
from src.utils.tracer import customtracer
|
| 9 |
+
|
| 10 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 11 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 12 |
+
from src.clients.llm_client import LLMClient
|
| 13 |
+
import json as _json
|
| 14 |
+
|
| 15 |
+
client = LLMClient()
|
| 16 |
+
system_prompt = None
|
| 17 |
+
user_text = ""
|
| 18 |
+
images = []
|
| 19 |
+
for msg in messages:
|
| 20 |
+
role = msg.get("role", "")
|
| 21 |
+
c = msg.get("content", "")
|
| 22 |
+
if role == "system":
|
| 23 |
+
if isinstance(c, str):
|
| 24 |
+
system_prompt = c
|
| 25 |
+
elif role == "user":
|
| 26 |
+
if isinstance(c, str):
|
| 27 |
+
user_text = c
|
| 28 |
+
elif isinstance(c, list):
|
| 29 |
+
for part in c:
|
| 30 |
+
if isinstance(part, dict):
|
| 31 |
+
if part.get("type") == "text":
|
| 32 |
+
user_text += part.get("text", "")
|
| 33 |
+
elif part.get("type") == "image_url":
|
| 34 |
+
url = part.get("image_url", {}).get("url", "")
|
| 35 |
+
if url.startswith("data:"):
|
| 36 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 37 |
+
else:
|
| 38 |
+
images.append(url)
|
| 39 |
+
|
| 40 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 41 |
+
result = client.call(
|
| 42 |
+
prompt=user_text,
|
| 43 |
+
schema=response_format,
|
| 44 |
+
model=model,
|
| 45 |
+
system_prompt=system_prompt,
|
| 46 |
+
images=images if images else None,
|
| 47 |
+
temperature=0,
|
| 48 |
+
)
|
| 49 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 50 |
+
else:
|
| 51 |
+
return client.call_raw(
|
| 52 |
+
prompt=user_text,
|
| 53 |
+
model=model,
|
| 54 |
+
system_prompt=system_prompt,
|
| 55 |
+
images=images if images else None,
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class PageKind(str, Enum):
|
| 61 |
+
"""ペ�Eジ種別"""
|
| 62 |
+
LP = "LP" # ランチE��ングペ�Eジ�E�単一訴求�E問い合わぁE賁E��請求など�E�E
|
| 63 |
+
EC = "EC" # ECサイト(商品�Eサービスを比輁E�E選択できるサイト。購入だけでなく、賁E��請求�E見学予紁E��ども含む。カチE��リ一覧、商品詳細、比輁E���Eなどの構造を持つ�E�E
|
| 64 |
+
CORPORATE = "CORPORATE" # コーポレートサイト(企業惁E��・採用・会社案�Eなど�E�E
|
| 65 |
+
MEDIA = "MEDIA" # メチE��ア/ブログ�E�記事コンチE��チE��忁E��E
|
| 66 |
+
FORM = "FORM" # フォーム/申し込みペ�Eジ�E�問ぁE��わせ・予紁E�E申し込みフォーム中忁E��E
|
| 67 |
+
OTHER = "OTHER" # そ�E他(上記に当てはまらなぁE�Eージ�E�E
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class EcPageType(str, Enum):
|
| 71 |
+
"""ECペ�Eジ種別�E�ECの場合�Eみ有効�E�E""
|
| 72 |
+
TOP_OR_SPECIAL = "EC_TOP_OR_SPECIAL" # ECのTOPめE��雁E�Eージ
|
| 73 |
+
CATEGORY_LIST = "EC_CATEGORY_LIST" # カチE��リ一覧ペ�Eジ
|
| 74 |
+
PRODUCT_DETAIL = "EC_PRODUCT_DETAIL" # 啁E��詳細ペ�Eジ
|
| 75 |
+
CART_OR_CHECKOUT = "EC_CART_OR_CHECKOUT" # カート�EチェチE��アウト�Eージ
|
| 76 |
+
OTHER_EC = "EC_OTHER" # そ�E他�EEC関連ペ�Eジ
|
| 77 |
+
NOT_EC = "NOT_EC" # ECではなぁE��吁E
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class PageScore(BaseModel):
|
| 81 |
+
"""ペ�Eジ種別のスコア"""
|
| 82 |
+
kind: PageKind
|
| 83 |
+
score: float = Field(ge=0.0, le=1.0, description="0.0、E.0のスコア")
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class PageClassification(BaseModel):
|
| 87 |
+
"""ペ�Eジ刁E��結果"""
|
| 88 |
+
scores: List[PageScore] # 吁E�Eージ種別のスコア�E�合計�E1.0になる忁E���EなぁE��E
|
| 89 |
+
best_kind: PageKind # 最も高いスコアのペ�Eジ種別
|
| 90 |
+
is_ec: bool # ECサイトかどぁE���E�Eest_kind == EC の場吁ETrue�E�E
|
| 91 |
+
is_lp: bool # LPかどぁE���E�Eest_kind == LP の場吁ETrue�E�E
|
| 92 |
+
ec_page_type: EcPageType # ECの場合�Eペ�Eジ種別�E�非ECの場合�E NOT_EC�E�E
|
| 93 |
+
reason: str # 判定理由�E�日本語で簡潔に�E�E
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _build_prompt(p: str = "") -> str:
|
| 97 |
+
"""プロンプトを構築(最小限の持E���E�E""
|
| 98 |
+
page_kinds = "\n".join([f"- {kind.value}: {kind.name}" for kind in PageKind])
|
| 99 |
+
ec_page_types = "\n".join([f"- {pt.value}: {pt.name}" for pt in EcPageType if pt != EcPageType.NOT_EC])
|
| 100 |
+
|
| 101 |
+
base_instruction = f"""画像から�Eージ種別を判定してください、E
|
| 102 |
+
【�Eージ種別候補!EageKind�E�、E
|
| 103 |
+
{page_kinds}
|
| 104 |
+
【ECペ�Eジ種別候補!EcPageType、ECの場合�Eみ�E�、E
|
| 105 |
+
{ec_page_types}
|
| 106 |
+
【EC判定�E重要基準、E
|
| 107 |
+
以下�E構造皁E��徴がある場合�E、ECとして高スコアを付与してください�E�E
|
| 108 |
+
- カチE��リ一覧ペ�Eジ�E�褁E��の啁E��・サービスが一覧表示されてぁE���E�E
|
| 109 |
+
- 啁E��・サービスの詳細ペ�EジへのリンクがあめE
|
| 110 |
+
- 褁E��の選択肢を比輁E��きる機�EがあめE
|
| 111 |
+
- フィルター・ソート機�EがあめE
|
| 112 |
+
- 価格・料��などの惁E��が表示されてぁE��
|
| 113 |
+
- 最終的なゴールが「購入」だけでなく「賁E��請求」「見学予紁E��「問ぁE��わせ」でも構いません
|
| 114 |
+
- 例:霊園検索サイト、不動産検索サイト、求人サイト、比輁E��イトなど
|
| 115 |
+
【LP判定�E基準、E
|
| 116 |
+
- 単一の啁E��・サービス・惁E��に焦点を当ててぁE��
|
| 117 |
+
- 明確なCTA�E�問ぁE��わせ・賁E��請求�Eタンなど�E�が1つまた�E少数
|
| 118 |
+
- 比輁E���EめE��チE��リ一覧がなぁE
|
| 119 |
+
【判定手頁E��E
|
| 120 |
+
1. 各PageKindにつぁE��0.0、E.0でスコアを付与!Ecores�E�E
|
| 121 |
+
2. 最も高いスコアのPageKindをbest_kindに設宁E
|
| 122 |
+
3. best_kind == "EC" の場吁E
|
| 123 |
+
- is_ec = True
|
| 124 |
+
- ec_page_type めEEC_TOP_OR_SPECIAL / EC_CATEGORY_LIST / EC_PRODUCT_DETAIL / EC_CART_OR_CHECKOUT / EC_OTHER から選抁E
|
| 125 |
+
4. best_kind == "LP" の場吁E
|
| 126 |
+
- is_lp = True
|
| 127 |
+
- ec_page_type = "NOT_EC"
|
| 128 |
+
5. reason に判定根拠を日本語で簡潔に記述
|
| 129 |
+
【補足惁E��、E
|
| 130 |
+
{p if p else "(補足惁E��なぁE"}
|
| 131 |
+
"""
|
| 132 |
+
return base_instruction.strip()
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def ask_raw(base64img: str, openai_key: str, p: str = "") -> PageClassification:
|
| 136 |
+
"""OpenAIに問い合わせてペ�Eジ刁E��結果を取征E""
|
| 137 |
+
prompt = _build_prompt(p=p)
|
| 138 |
+
|
| 139 |
+
# 毎回新しいクライアントを作�Eし、openai_keyを反映
|
| 140 |
+
client = LLMClient()
|
| 141 |
+
|
| 142 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 143 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 144 |
+
messages=[
|
| 145 |
+
{
|
| 146 |
+
"role": "system",
|
| 147 |
+
"content": "あなた�E優れたWEBアナリストで、Webペ�Eジの種別を正確に判定する専門家です。画像から�Eージの種類を判断し、各候補に0.0、E.0のスコアを付与してください。特に、ECサイト�E判定では、購入機�Eの有無よりも、カチE��リ一覧・啁E��比輁E�E褁E��選択肢などの構造皁E��徴を重視してください。賁E��請求や見学予紁E��ゴールでも、構造皁E��EC皁E��サイト�EECとして判定してください、E
|
| 148 |
+
},
|
| 149 |
+
{
|
| 150 |
+
"role": "user",
|
| 151 |
+
"content": [
|
| 152 |
+
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64img}"}},
|
| 153 |
+
{"type": "text", "text": prompt}
|
| 154 |
+
]
|
| 155 |
+
},
|
| 156 |
+
],
|
| 157 |
+
response_format=PageClassification,
|
| 158 |
+
temperature=0,
|
| 159 |
+
top_p=1,
|
| 160 |
+
frequency_penalty=0,
|
| 161 |
+
presence_penalty=0,
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
classification: PageClassification = response
|
| 165 |
+
|
| 166 |
+
# best_kindからis_ec, is_lpを設定(念のため�E�E
|
| 167 |
+
if classification.best_kind == PageKind.EC:
|
| 168 |
+
classification.is_ec = True
|
| 169 |
+
if classification.ec_page_type == EcPageType.NOT_EC:
|
| 170 |
+
# ECと判定されたのにNOT_ECになってぁE��場合�E、デフォルトでOTHER_ECに設宁E
|
| 171 |
+
classification.ec_page_type = EcPageType.OTHER_EC
|
| 172 |
+
elif classification.best_kind == PageKind.LP:
|
| 173 |
+
classification.is_lp = True
|
| 174 |
+
if classification.ec_page_type != EcPageType.NOT_EC:
|
| 175 |
+
classification.ec_page_type = EcPageType.NOT_EC
|
| 176 |
+
else:
|
| 177 |
+
# ECでめEPでもなぁE��吁E
|
| 178 |
+
classification.is_ec = False
|
| 179 |
+
classification.is_lp = False
|
| 180 |
+
if classification.ec_page_type != EcPageType.NOT_EC:
|
| 181 |
+
classification.ec_page_type = EcPageType.NOT_EC
|
| 182 |
+
|
| 183 |
+
return classification
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@customtracer
|
| 187 |
+
def baseimg2pagetype(base64img, openai_key = "default", p = ""):
|
| 188 |
+
"""
|
| 189 |
+
input1 (text): base64img - base64エンコードされた画僁E
|
| 190 |
+
input2 (text): default
|
| 191 |
+
input3 (text): 補足チE��スト(任意!E
|
| 192 |
+
output1 (json): ペ�EジタイチE
|
| 193 |
+
output2 (text): human_readable_summary
|
| 194 |
+
"""
|
| 195 |
+
# openai_keyの処琁E
|
| 196 |
+
if openai_key == "default" or not openai_key:
|
| 197 |
+
openai_key = os.environ.get('OPENAI_KEY', '')
|
| 198 |
+
|
| 199 |
+
print(f"baseimg2pagetype openai_key:",openai_key[-4:])
|
| 200 |
+
|
| 201 |
+
classification = ask_raw(base64img=base64img, openai_key=openai_key, p=p)
|
| 202 |
+
|
| 203 |
+
result_dict = classification.model_dump()
|
| 204 |
+
|
| 205 |
+
# 人間向けサマリ
|
| 206 |
+
summary_lines = [
|
| 207 |
+
f"best_kind: {classification.best_kind.value}",
|
| 208 |
+
f"is_ec: {classification.is_ec}",
|
| 209 |
+
f"is_lp: {classification.is_lp}",
|
| 210 |
+
f"ec_page_type: {classification.ec_page_type.value}",
|
| 211 |
+
"",
|
| 212 |
+
"scores:",
|
| 213 |
+
]
|
| 214 |
+
# スコアの高い頁E��ソーチE
|
| 215 |
+
sorted_scores = sorted(classification.scores, key=lambda s: s.score, reverse=True)
|
| 216 |
+
for score in sorted_scores:
|
| 217 |
+
summary_lines.append(f" - {score.kind.value}: {score.score:.3f}")
|
| 218 |
+
|
| 219 |
+
summary_lines.extend(["", f"reason: {classification.reason}"])
|
| 220 |
+
summary_text = "\n".join(summary_lines)
|
| 221 |
+
|
| 222 |
+
return result_dict, summary_text
|
apis/baseimg2score.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
import logging
|
| 4 |
+
from pydantic import BaseModel
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import pytz
|
| 7 |
+
|
| 8 |
+
from src.clients.llm_client import LLMClient
|
| 9 |
+
from src.utils.tracer import customtracer
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger("baseimg2score")
|
| 12 |
+
if not logger.handlers:
|
| 13 |
+
handler = logging.StreamHandler()
|
| 14 |
+
handler.setFormatter(logging.Formatter("%(message)s"))
|
| 15 |
+
logger.addHandler(handler)
|
| 16 |
+
logger.setLevel(logging.INFO)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Answer(BaseModel):
|
| 20 |
+
citation: str
|
| 21 |
+
suggestion: str
|
| 22 |
+
score: int
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@customtracer
|
| 26 |
+
def baseimg2score(
|
| 27 |
+
p,
|
| 28 |
+
openai_key,
|
| 29 |
+
image64=None,
|
| 30 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 31 |
+
gemini_key=None,
|
| 32 |
+
):
|
| 33 |
+
"""
|
| 34 |
+
input1 (text): 13: ※金融犯罪にご注愁E手口はこちら、E
|
| 35 |
+
input2 (text): default
|
| 36 |
+
input3 (text):
|
| 37 |
+
input4 (text): gpt-4o
|
| 38 |
+
input5 (text): default
|
| 39 |
+
output1 (json): 頁E��
|
| 40 |
+
"""
|
| 41 |
+
selected_model = model if model else "meta-llama/Llama-3.3-70B-Instruct"
|
| 42 |
+
|
| 43 |
+
#1. webUIからの呼び出し�E場合キーを�Eれなくても実行可能
|
| 44 |
+
#2. APIからの呼び出し�E場合キーを�EれなくてはならなぁE
|
| 45 |
+
if selected_model and "gemini" in selected_model.lower():
|
| 46 |
+
if gemini_key and gemini_key != "default":
|
| 47 |
+
api_key = gemini_key
|
| 48 |
+
else:
|
| 49 |
+
api_key = os.environ.get('GEMINI_KEY')
|
| 50 |
+
client = LLMClient(google_api_key=api_key)
|
| 51 |
+
else:
|
| 52 |
+
if openai_key and openai_key != "default":
|
| 53 |
+
api_key = openai_key
|
| 54 |
+
else:
|
| 55 |
+
api_key = os.environ.get('OPENAI_KEY')
|
| 56 |
+
client = LLMClient(openai_key=api_key)
|
| 57 |
+
|
| 58 |
+
system_prompt = """与えられた情報と質問に対して、採点基準を参�Eして以下を回答します、E
|
| 59 |
+
citation:当該箁E��の引用
|
| 60 |
+
suggestion:満点でなぁE��合�E満点になるよぁE��具体的な持E��。満点の場合�E優れた点を�E体的な叙述
|
| 61 |
+
"""
|
| 62 |
+
|
| 63 |
+
images = [image64] if image64 else None
|
| 64 |
+
img_flag = image64[-4:] if image64 else "none"
|
| 65 |
+
|
| 66 |
+
dt = datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%m/%d %H:%M")
|
| 67 |
+
|
| 68 |
+
try:
|
| 69 |
+
result = client.call(
|
| 70 |
+
prompt=p,
|
| 71 |
+
schema=Answer,
|
| 72 |
+
model=selected_model,
|
| 73 |
+
system_prompt=system_prompt,
|
| 74 |
+
images=images,
|
| 75 |
+
temperature=0,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
combined = (
|
| 79 |
+
f"[baseimg2score] (img:{img_flag}) "
|
| 80 |
+
f"({dt}) mdl:{selected_model}"
|
| 81 |
+
)
|
| 82 |
+
logger.info(combined)
|
| 83 |
+
|
| 84 |
+
return result.model_dump()
|
| 85 |
+
|
| 86 |
+
except Exception as e:
|
| 87 |
+
err_msg = (
|
| 88 |
+
f"[baseimg2score] (img:{img_flag}) "
|
| 89 |
+
f"({dt}) ERROR: {type(e).__name__}: {str(e)}"
|
| 90 |
+
)
|
| 91 |
+
logger.error(err_msg)
|
| 92 |
+
raise
|
apis/ecinfo2winningrate.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
from pydantic import BaseModel, conint
|
| 5 |
+
from enum import Enum
|
| 6 |
+
from typing import Any, Dict, List
|
| 7 |
+
from src.utils.tracer import customtracer
|
| 8 |
+
|
| 9 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 10 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 11 |
+
from src.clients.llm_client import LLMClient
|
| 12 |
+
import json as _json
|
| 13 |
+
|
| 14 |
+
client = LLMClient()
|
| 15 |
+
system_prompt = None
|
| 16 |
+
user_text = ""
|
| 17 |
+
images = []
|
| 18 |
+
for msg in messages:
|
| 19 |
+
role = msg.get("role", "")
|
| 20 |
+
c = msg.get("content", "")
|
| 21 |
+
if role == "system":
|
| 22 |
+
if isinstance(c, str):
|
| 23 |
+
system_prompt = c
|
| 24 |
+
elif role == "user":
|
| 25 |
+
if isinstance(c, str):
|
| 26 |
+
user_text = c
|
| 27 |
+
elif isinstance(c, list):
|
| 28 |
+
for part in c:
|
| 29 |
+
if isinstance(part, dict):
|
| 30 |
+
if part.get("type") == "text":
|
| 31 |
+
user_text += part.get("text", "")
|
| 32 |
+
elif part.get("type") == "image_url":
|
| 33 |
+
url = part.get("image_url", {}).get("url", "")
|
| 34 |
+
if url.startswith("data:"):
|
| 35 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 36 |
+
else:
|
| 37 |
+
images.append(url)
|
| 38 |
+
|
| 39 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 40 |
+
result = client.call(
|
| 41 |
+
prompt=user_text,
|
| 42 |
+
schema=response_format,
|
| 43 |
+
model=model,
|
| 44 |
+
system_prompt=system_prompt,
|
| 45 |
+
images=images if images else None,
|
| 46 |
+
temperature=0,
|
| 47 |
+
)
|
| 48 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 49 |
+
else:
|
| 50 |
+
return client.call_raw(
|
| 51 |
+
prompt=user_text,
|
| 52 |
+
model=model,
|
| 53 |
+
system_prompt=system_prompt,
|
| 54 |
+
images=images if images else None,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class reason(BaseModel):
|
| 59 |
+
choice: str
|
| 60 |
+
content_description: str
|
| 61 |
+
contribution: int
|
| 62 |
+
reason: str
|
| 63 |
+
recommend: str
|
| 64 |
+
|
| 65 |
+
class win_or_lose(str, Enum):
|
| 66 |
+
win = "勝ち"
|
| 67 |
+
lose = "負ぁE
|
| 68 |
+
|
| 69 |
+
class testpattern_win_or_lose(BaseModel):
|
| 70 |
+
testpattern: win_or_lose
|
| 71 |
+
possibility: int
|
| 72 |
+
reasons: list[reason]
|
| 73 |
+
|
| 74 |
+
def extract_text_from_items(items: List[Dict[str, Any]]) -> str:
|
| 75 |
+
"""リスト�EのアイチE��からtextを抽出して結合"""
|
| 76 |
+
if not items:
|
| 77 |
+
return "なぁE
|
| 78 |
+
texts = []
|
| 79 |
+
for item in items:
|
| 80 |
+
if isinstance(item, dict) and "text" in item:
|
| 81 |
+
texts.append(item["text"])
|
| 82 |
+
return ", ".join(texts) if texts else "なぁE
|
| 83 |
+
|
| 84 |
+
def ecinfo_to_text(ecinfo_data: Dict[str, Any]) -> str:
|
| 85 |
+
"""
|
| 86 |
+
ecinfoチE�Eタをテキスト形式に変換
|
| 87 |
+
FV版�E形式に類似: 「商品名: xxx, 価格: xxx, カート�Eタン: xxx、E
|
| 88 |
+
すべてのコンポ�Eネントを含め、空の場合�E「なし」と明示
|
| 89 |
+
"""
|
| 90 |
+
parts = []
|
| 91 |
+
|
| 92 |
+
# メタ惁E��
|
| 93 |
+
meta = ecinfo_data.get("メタ", {})
|
| 94 |
+
if meta:
|
| 95 |
+
parts.append(f"会社吁E {meta.get('会社吁E, 'なぁE)}")
|
| 96 |
+
parts.append(f"業畁E {meta.get('業畁E, 'なぁE)}")
|
| 97 |
+
parts.append(f"中刁E��E {meta.get('中刁E��E, 'なぁE)}")
|
| 98 |
+
parts.append(f"サービス: {meta.get('サービス', 'なぁE)}")
|
| 99 |
+
parts.append(f"啁E��: {meta.get('啁E��', 'なぁE)}")
|
| 100 |
+
parts.append(f"タイトル: {meta.get('タイトル', 'なぁE)}")
|
| 101 |
+
if meta.get("訴求テーチE):
|
| 102 |
+
themes = ", ".join(meta["訴求テーチE]) if isinstance(meta["訴求テーチE], list) else meta["訴求テーチE]
|
| 103 |
+
parts.append(f"訴求テーチE {themes}")
|
| 104 |
+
else:
|
| 105 |
+
parts.append("訴求テーチE なぁE)
|
| 106 |
+
|
| 107 |
+
# ペ�Eジ共送E
|
| 108 |
+
parts.append(f"ロゴ: {extract_text_from_items(ecinfo_data.get('ロゴ', []))}")
|
| 109 |
+
parts.append(f"グローバル検索バ�E: {extract_text_from_items(ecinfo_data.get('グローバル検索バ�E', []))}")
|
| 110 |
+
parts.append(f"ハンバ�Eガーメニューアイコン: {extract_text_from_items(ecinfo_data.get('ハンバ�Eガーメニューアイコン', []))}")
|
| 111 |
+
parts.append(f"カートアイコン: {extract_text_from_items(ecinfo_data.get('カートアイコン', []))}")
|
| 112 |
+
parts.append(f"ユーザーメニュー: {extract_text_from_items(ecinfo_data.get('ユーザーメニュー', []))}")
|
| 113 |
+
|
| 114 |
+
# ナビゲーション
|
| 115 |
+
parts.append(f"ブレチE��クラム: {extract_text_from_items(ecinfo_data.get('ブレチE��クラム', []))}")
|
| 116 |
+
parts.append(f"ペ�Eジネ�Eション: {extract_text_from_items(ecinfo_data.get('ペ�Eジネ�Eション', []))}")
|
| 117 |
+
parts.append(f"タブ�E替: {extract_text_from_items(ecinfo_data.get('タブ�E替', []))}")
|
| 118 |
+
|
| 119 |
+
# トップ�Eージ
|
| 120 |
+
parts.append(f"メインビジュアル: {extract_text_from_items(ecinfo_data.get('メインビジュアル', []))}")
|
| 121 |
+
parts.append(f"プロモーションバナー: {extract_text_from_items(ecinfo_data.get('プロモーションバナー', []))}")
|
| 122 |
+
parts.append(f"カチE��リカーチE {extract_text_from_items(ecinfo_data.get('カチE��リカーチE, []))}")
|
| 123 |
+
|
| 124 |
+
# 啁E��一覧ペ�Eジ
|
| 125 |
+
parts.append(f"啁E��一覧: {extract_text_from_items(ecinfo_data.get('啁E��一覧', []))}")
|
| 126 |
+
parts.append(f"フィルタ: {extract_text_from_items(ecinfo_data.get('フィルタ', []))}")
|
| 127 |
+
parts.append(f"ソーチE {extract_text_from_items(ecinfo_data.get('ソーチE, []))}")
|
| 128 |
+
parts.append(f"ペ�Eジャー: {extract_text_from_items(ecinfo_data.get('ペ�Eジャー', []))}")
|
| 129 |
+
parts.append(f"クイチE��ビューアイコン: {extract_text_from_items(ecinfo_data.get('クイチE��ビューアイコン', []))}")
|
| 130 |
+
|
| 131 |
+
# 啁E��詳細ペ�Eジ
|
| 132 |
+
parts.append(f"啁E��吁E {extract_text_from_items(ecinfo_data.get('啁E��吁E, []))}")
|
| 133 |
+
parts.append(f"価格: {extract_text_from_items(ecinfo_data.get('価格', []))}")
|
| 134 |
+
parts.append(f"ブランチE {extract_text_from_items(ecinfo_data.get('ブランチE, []))}")
|
| 135 |
+
parts.append(f"サムネイル: {extract_text_from_items(ecinfo_data.get('サムネイル', []))}")
|
| 136 |
+
parts.append(f"画像ギャラリー: {extract_text_from_items(ecinfo_data.get('画像ギャラリー', []))}")
|
| 137 |
+
parts.append(f"カラースウォチE��: {extract_text_from_items(ecinfo_data.get('カラースウォチE��', []))}")
|
| 138 |
+
parts.append(f"サイズセレクタ: {extract_text_from_items(ecinfo_data.get('サイズセレクタ', []))}")
|
| 139 |
+
parts.append(f"在庫スチE�Eタス: {extract_text_from_items(ecinfo_data.get('在庫スチE�Eタス', []))}")
|
| 140 |
+
parts.append(f"配送情報: {extract_text_from_items(ecinfo_data.get('配送情報', []))}")
|
| 141 |
+
parts.append(f"ボタン_カート追加: {extract_text_from_items(ecinfo_data.get('ボタン_カート追加', []))}")
|
| 142 |
+
parts.append(f"ボタン_今すぐ購入: {extract_text_from_items(ecinfo_data.get('ボタン_今すぐ購入', []))}")
|
| 143 |
+
parts.append(f"レビューサマリー: {extract_text_from_items(ecinfo_data.get('レビューサマリー', []))}")
|
| 144 |
+
parts.append(f"レビューボタン: {extract_text_from_items(ecinfo_data.get('レビューボタン', []))}")
|
| 145 |
+
parts.append(f"QnAリンク: {extract_text_from_items(ecinfo_data.get('QnAリンク', []))}")
|
| 146 |
+
parts.append(f"バッジタグ: {extract_text_from_items(ecinfo_data.get('バッジタグ', []))}")
|
| 147 |
+
parts.append(f"関連啁E��カルーセル: {extract_text_from_items(ecinfo_data.get('関連啁E��カルーセル', []))}")
|
| 148 |
+
|
| 149 |
+
# カート�Eージ
|
| 150 |
+
parts.append(f"カート商品リスチE {extract_text_from_items(ecinfo_data.get('カート商品リスチE, []))}")
|
| 151 |
+
parts.append(f"数量セレクタ: {extract_text_from_items(ecinfo_data.get('数量セレクタ', []))}")
|
| 152 |
+
parts.append(f"削除アイコン: {extract_text_from_items(ecinfo_data.get('削除アイコン', []))}")
|
| 153 |
+
parts.append(f"クーポン入劁E {extract_text_from_items(ecinfo_data.get('クーポン入劁E, []))}")
|
| 154 |
+
parts.append(f"注斁E��計サマリー: {extract_text_from_items(ecinfo_data.get('注斁E��計サマリー', []))}")
|
| 155 |
+
parts.append(f"チェチE��アウト�Eタン: {extract_text_from_items(ecinfo_data.get('チェチE��アウト�Eタン', []))}")
|
| 156 |
+
|
| 157 |
+
# 共通下部
|
| 158 |
+
parts.append(f"フッターリンク: {extract_text_from_items(ecinfo_data.get('フッターリンク', []))}")
|
| 159 |
+
parts.append(f"SNSアイコン: {extract_text_from_items(ecinfo_data.get('SNSアイコン', []))}")
|
| 160 |
+
parts.append(f"カスタマ�Eサポ�Eトリンク: {extract_text_from_items(ecinfo_data.get('カスタマ�Eサポ�Eトリンク', []))}")
|
| 161 |
+
|
| 162 |
+
return "、E.join(parts) + "、E if parts else "惁E��なぁE
|
| 163 |
+
|
| 164 |
+
def parse_ecinfo_input(ecinfo_input: str) -> str:
|
| 165 |
+
"""
|
| 166 |
+
ecinfo入力をパ�EスしてチE��スト形式に変換
|
| 167 |
+
JSON形式�E場合�Eパ�EスしてチE��ストに変換、既にチE��スト形式�E場合�Eそ�Eまま返す
|
| 168 |
+
"""
|
| 169 |
+
# JSON形式かどぁE��を判宁E
|
| 170 |
+
ecinfo_input = ecinfo_input.strip()
|
| 171 |
+
if ecinfo_input.startswith('{') or ecinfo_input.startswith('['):
|
| 172 |
+
try:
|
| 173 |
+
ecinfo_data = json.loads(ecinfo_input)
|
| 174 |
+
return ecinfo_to_text(ecinfo_data)
|
| 175 |
+
except json.JSONDecodeError:
|
| 176 |
+
# JSONパ�Eスに失敗した場合�E、テキスト形式として扱ぁE
|
| 177 |
+
return ecinfo_input
|
| 178 |
+
else:
|
| 179 |
+
# 既にチE��スト形弁E
|
| 180 |
+
return ecinfo_input
|
| 181 |
+
|
| 182 |
+
@customtracer
|
| 183 |
+
def ecinfo2winningrate(ecinfo1, ecinfo2):
|
| 184 |
+
"""
|
| 185 |
+
input1 (text): 会社吁E ベルーナ。業畁E コマ�Eス�E�趣味・食品・衣類)。中刁E��E ファチE��ョン。サービス: オンラインショチE��ング。商杁E 裏起毛パーカー。タイトル: あったか裏起毛パフ袖ゆったりパ�Eカー - ベルーナ【���E式】。訴求テーチE チE��イン変更でタチE�E玁E��加, カラー/サイズ変更でCVR増加。ロゴ: ベルーナ【�E式】。グローバル検索バ�E: 検索。商品名: あったか裏起毛パフ袖ゆったりパ�Eカー。価格: 3,289冁E��税込�E�。カラースウォチE��: カラー選択。サイズセレクタ: サイズ選択。�Eタン_カート追加: カートに追加、E
|
| 186 |
+
input2 (text): 会社吁E ベルーナ。業畁E コマ�Eス�E�趣味・食品・衣類)。中刁E��E ファチE��ョン。サービス: オンラインショチE�E。商杁E あったか裏起毛パフ袖ゆったりパ�Eカー。タイトル: あったか裏起毛パフ袖ゆったりパ�Eカー - ベルーナ【�E式】。訴求テーチE フリルのスタンドカラー, 大人可愛いチE��イン。ロゴ: alotta。商品名: あったか裏起毛パフ袖ゆったりパ�Eカー。価格: なし。カラースウォチE��: なし。サイズセレクタ: M L LL 3L。�Eタン_カート追加: カートに入れる、E
|
| 187 |
+
output1 (json): チE��トパターンの勝敗予測と琁E��
|
| 188 |
+
"""
|
| 189 |
+
print("ecinfo2winningrate")
|
| 190 |
+
client = LLMClient())
|
| 191 |
+
|
| 192 |
+
# 入力がJSON形式�E場合�EチE��ストに変換
|
| 193 |
+
ecinfo1_text = parse_ecinfo_input(ecinfo1)
|
| 194 |
+
ecinfo2_text = parse_ecinfo_input(ecinfo2)
|
| 195 |
+
|
| 196 |
+
p = "以下に2つのECサイト�Eージの冁E��を�E挙します。テストパターンの勝敗を予想し、勝敗�E琁E��めEつ述べてください、En\n#オリジナル\n" + ecinfo1_text + "\n\n#チE��トパターン\n" + ecinfo2_text
|
| 197 |
+
|
| 198 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 199 |
+
model="meta-llama/Llama-3.1-8B-Instruct",
|
| 200 |
+
|
| 201 |
+
messages=[
|
| 202 |
+
{
|
| 203 |
+
"role": "system",
|
| 204 |
+
"content": [
|
| 205 |
+
{
|
| 206 |
+
"type": "text",
|
| 207 |
+
"text": """WEBペ�EジのECサイト情報を比輁E��て、テストパターンの勝敗を予測し、勝因を記述します。以下�E注意に従い記載をしてください、E
|
| 208 |
+
STEP1: possibilityには、その勝敗予測が合ってぁE��確玁E��0~100の間で入れてください。�E容が同一の場合�EpossibilityめEにしてください。勝因に特筁E��べきものがあれ�E、contributionを高く設定してください、E
|
| 209 |
+
STEP2: contributionに応じて、reasonの強さを「優れてぁE��、E「すると良ぁE��E「可能性がある」�Eように表現を変えてください、E
|
| 210 |
+
STEP3: オリジナルと差刁E��なければ、理由には特に何も書かなぁE��ください、E
|
| 211 |
+
STEP4: チE��トパターンに値がなぁE��合�E、contributionめEにして何も提案しなくてよいです、E
|
| 212 |
+
STEP5: 斁E���E、だ・である調で統一してください、E""
|
| 213 |
+
}
|
| 214 |
+
]
|
| 215 |
+
},
|
| 216 |
+
{
|
| 217 |
+
"role": "user",
|
| 218 |
+
"content": [
|
| 219 |
+
{
|
| 220 |
+
"type": "text",
|
| 221 |
+
"text": p
|
| 222 |
+
}
|
| 223 |
+
]
|
| 224 |
+
}
|
| 225 |
+
],
|
| 226 |
+
response_format=testpattern_win_or_lose,
|
| 227 |
+
temperature=1.2,
|
| 228 |
+
top_p=1,
|
| 229 |
+
frequency_penalty=0,
|
| 230 |
+
presence_penalty=0
|
| 231 |
+
)
|
| 232 |
+
return response
|
| 233 |
+
|
apis/format2cninfo.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
from functools import cache
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
from src.models.common import model
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json as _json
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 57 |
+
else:
|
| 58 |
+
return client.call_raw(
|
| 59 |
+
prompt=user_text,
|
| 60 |
+
model=model,
|
| 61 |
+
system_prompt=system_prompt,
|
| 62 |
+
images=images if images else None,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class UIoption(str, Enum):
|
| 67 |
+
element1 = "バナー/動画"
|
| 68 |
+
element2 = "CTA"
|
| 69 |
+
element3 = "チE��スチE
|
| 70 |
+
element4 = "フォーム"
|
| 71 |
+
|
| 72 |
+
class Component(BaseModel):
|
| 73 |
+
component_large: str
|
| 74 |
+
component_middle: str
|
| 75 |
+
component_small: list[str]
|
| 76 |
+
UIelement: UIoption
|
| 77 |
+
|
| 78 |
+
class CNinfo(BaseModel):
|
| 79 |
+
components: list[Component]
|
| 80 |
+
|
| 81 |
+
def get_openai_request(messages, format):
|
| 82 |
+
client = LLMClient()
|
| 83 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 84 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 85 |
+
messages=messages,
|
| 86 |
+
top_p=1,
|
| 87 |
+
frequency_penalty=0,
|
| 88 |
+
presence_penalty=0,
|
| 89 |
+
response_format=format,
|
| 90 |
+
temperature=0
|
| 91 |
+
)
|
| 92 |
+
return response
|
| 93 |
+
|
| 94 |
+
@customtracer
|
| 95 |
+
def format2cninfo(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 96 |
+
"""
|
| 97 |
+
input1 (text): ■自社: 親子でのスマ�E料��節紁E親子でのお得感 チE�Eタの余剰利用 通話とネット�Eコストパフォーマンス スマ�EチE��ュー支援 家族向け�E安�E機�E 豊富な端末ラインアチE�E ■競合他社: 22歳までのお得なプラン 大好評�Eサービス 親子でお得にスマ�Eを利用 22歳以下限定�Eお得なキャンペ�Eン 学生向け�Eお得さ 青春年齢向けのお得なプラン 低価格で高品質な通信サービス 格安SIMとスマ�Eの利便性 22歳以下限定�E割引キャンペ�Eン スマ�EチE��ュー応援 家族割引との絁E��合わせでの最安値 料��プランの多様性 親子でのお得な割引サービス スマ�EチE��ューのお得さ 特別割弁EチE�Eタ3GB提侁E割引サービスによるコスト削渁E新規契紁E��プラン変更による特典 機種代と基本料�Eダブル割弁E大容量データ エントリー制の特典シスチE�� 24時間ぁE��でもオンラインで手続き可能 家族�E員が割引を受けられるサービス 家族間の無料通話サービス プライムビデオ特典 22歳までの長期利用可能 製品ラインナップ�E允E��E期間限定�Eキャンペ�Eン 人気スマ�Eの割引販売 安�E教育サービス 話題�Eスマ�Eが安く手に入めE詳細なサポ�EトとFAQ シンプルな料��プラン 家族�E員の料��割弁E子育てサポ�Eトサービス 業界トレンド:「◯◯◯◯◯」「◯◯◯◯◯」「◯◯◯◯◯」が吁E��共通する訴求コンチE��チE��ある、E60字程度)
|
| 98 |
+
input2 (text): default
|
| 99 |
+
output1 (json): 頁E��
|
| 100 |
+
"""
|
| 101 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 102 |
+
if openai_key == "default":
|
| 103 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 104 |
+
else:
|
| 105 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 106 |
+
|
| 107 |
+
messages=[
|
| 108 |
+
{
|
| 109 |
+
"role": "system",
|
| 110 |
+
"content": """提供したフォーマットデータから、忁E��なコンチE��チE�E要素を生成してください、E"",
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"role": "user",
|
| 114 |
+
"content": [{"type": "text", "text":p}]
|
| 115 |
+
},
|
| 116 |
+
]
|
| 117 |
+
|
| 118 |
+
return get_openai_request(messages, CNinfo)
|
apis/format2cninfos.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
from functools import cache
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
from src.models.common import model
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json as _json
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 57 |
+
else:
|
| 58 |
+
return client.call_raw(
|
| 59 |
+
prompt=user_text,
|
| 60 |
+
model=model,
|
| 61 |
+
system_prompt=system_prompt,
|
| 62 |
+
images=images if images else None,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
CN(コンチE��チE用の褁E��バリアント生成API�E�Eormat2cninfo.pyに依存しなぁE��立実裁E��E
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
# スキーマ定義�E�Eormat2cninfo.pyから独立!E
|
| 71 |
+
class UIoption(str, Enum):
|
| 72 |
+
element1 = "バナー/動画"
|
| 73 |
+
element2 = "CTA"
|
| 74 |
+
element3 = "チE��スチE
|
| 75 |
+
element4 = "フォーム"
|
| 76 |
+
|
| 77 |
+
class Component(BaseModel):
|
| 78 |
+
component_large: str
|
| 79 |
+
component_middle: str
|
| 80 |
+
component_small: list[str]
|
| 81 |
+
UIelement: UIoption
|
| 82 |
+
|
| 83 |
+
class CNinfo(BaseModel):
|
| 84 |
+
components: list[Component]
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def get_openai_request(messages, format, n=1):
|
| 88 |
+
"""
|
| 89 |
+
OpenAI API呼び出し!Eパラメータ対応、常にリストを返す�E�E
|
| 90 |
+
|
| 91 |
+
Args:
|
| 92 |
+
messages: メチE��ージリスチE
|
| 93 |
+
format: レスポンスフォーマッチE
|
| 94 |
+
n: 生�Eする候補数�E�デフォルチE 1�E�E
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
list[str]: 常にリストで返却�E�E=1でも長ぁEのリスト!E
|
| 98 |
+
"""
|
| 99 |
+
client = LLMClient())
|
| 100 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 101 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 102 |
+
messages=messages,
|
| 103 |
+
top_p=1,
|
| 104 |
+
frequency_penalty=0,
|
| 105 |
+
presence_penalty=0,
|
| 106 |
+
response_format=format,
|
| 107 |
+
temperature=0,
|
| 108 |
+
n=n
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# 常にリストで返す�E�E=1でも統一�E�E
|
| 112 |
+
return [choice.message.content for choice in response.choices]
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@customtracer
|
| 116 |
+
def format2cninfos(p, openai_key=os.environ.get('OPENAI_KEY'), n=1):
|
| 117 |
+
"""
|
| 118 |
+
input1 (text): prompt text
|
| 119 |
+
input2 (text): default
|
| 120 |
+
input3 (number): 1
|
| 121 |
+
output1 (json): CNinfo variants list
|
| 122 |
+
"""
|
| 123 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__, f"n={n}")
|
| 124 |
+
|
| 125 |
+
if openai_key == "default" or not openai_key:
|
| 126 |
+
openai_key = os.environ.get('OPENAI_KEY', '')
|
| 127 |
+
|
| 128 |
+
if openai_key:
|
| 129 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 130 |
+
|
| 131 |
+
# n を整数に変換し、篁E��チェチE��
|
| 132 |
+
try:
|
| 133 |
+
n = int(n)
|
| 134 |
+
if n < 1:
|
| 135 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 136 |
+
n = 1
|
| 137 |
+
elif n > 10:
|
| 138 |
+
print(f"Warning: n={n} is too large, capping at 10")
|
| 139 |
+
n = 10
|
| 140 |
+
except (TypeError, ValueError):
|
| 141 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 142 |
+
n = 1
|
| 143 |
+
|
| 144 |
+
messages=[
|
| 145 |
+
{
|
| 146 |
+
"role": "system",
|
| 147 |
+
"content": """提供したフォーマットデータから、忁E��なコンチE��チE�E要素を生成してください、E"",
|
| 148 |
+
},
|
| 149 |
+
{
|
| 150 |
+
"role": "user",
|
| 151 |
+
"content": [{"type": "text", "text":p}]
|
| 152 |
+
},
|
| 153 |
+
]
|
| 154 |
+
|
| 155 |
+
# get_openai_requestは常にリストを返すので、そのまま使用
|
| 156 |
+
result = get_openai_request(messages, CNinfo, n=n)
|
| 157 |
+
|
| 158 |
+
print(f"Generated {len(result)} CN variants")
|
| 159 |
+
|
| 160 |
+
# リストをJSON斁E���Eとして返す
|
| 161 |
+
return json.dumps(result, ensure_ascii=False)
|
apis/format2ecinfo.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from enum import Enum
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import pytz
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 16 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 17 |
+
from src.clients.llm_client import LLMClient
|
| 18 |
+
import json as _json
|
| 19 |
+
|
| 20 |
+
client = LLMClient()
|
| 21 |
+
system_prompt = None
|
| 22 |
+
user_text = ""
|
| 23 |
+
images = []
|
| 24 |
+
for msg in messages:
|
| 25 |
+
role = msg.get("role", "")
|
| 26 |
+
c = msg.get("content", "")
|
| 27 |
+
if role == "system":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
system_prompt = c
|
| 30 |
+
elif role == "user":
|
| 31 |
+
if isinstance(c, str):
|
| 32 |
+
user_text = c
|
| 33 |
+
elif isinstance(c, list):
|
| 34 |
+
for part in c:
|
| 35 |
+
if isinstance(part, dict):
|
| 36 |
+
if part.get("type") == "text":
|
| 37 |
+
user_text += part.get("text", "")
|
| 38 |
+
elif part.get("type") == "image_url":
|
| 39 |
+
url = part.get("image_url", {}).get("url", "")
|
| 40 |
+
if url.startswith("data:"):
|
| 41 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 42 |
+
else:
|
| 43 |
+
images.append(url)
|
| 44 |
+
|
| 45 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 46 |
+
result = client.call(
|
| 47 |
+
prompt=user_text,
|
| 48 |
+
schema=response_format,
|
| 49 |
+
model=model,
|
| 50 |
+
system_prompt=system_prompt,
|
| 51 |
+
images=images if images else None,
|
| 52 |
+
temperature=0,
|
| 53 |
+
)
|
| 54 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 55 |
+
else:
|
| 56 |
+
return client.call_raw(
|
| 57 |
+
prompt=user_text,
|
| 58 |
+
model=model,
|
| 59 |
+
system_prompt=system_prompt,
|
| 60 |
+
images=images if images else None,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
EC用のバリアント生成API
|
| 66 |
+
baseimg2ecinfo_rect.pyのpageInfo構造に準拠
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
class Category(str, Enum):
|
| 70 |
+
ビジネス = "ビジネス�E�EaaS・法人支援�E�E
|
| 71 |
+
ヘルスケア = "ヘルスケア�E�美容・健康�E�E
|
| 72 |
+
ヒューマンリソース = "ヒューマンリソース�E�求人・紹介!E
|
| 73 |
+
コマ�Eス = "コマ�Eス�E�趣味・食品・衣類!E
|
| 74 |
+
ファイナンス = "ファイナンス�E���融�E保険・不動産�E�E
|
| 75 |
+
インフラ = "インフラ�E�電気�E通信・ガス・住屁E��E
|
| 76 |
+
ライフイベンチE= "ライフイベント(教育・結婚�E相諁E��E
|
| 77 |
+
|
| 78 |
+
class CategoryMiddle(str, Enum):
|
| 79 |
+
# ビジネス
|
| 80 |
+
ITソフトウェア = "IT・ソフトウェア"
|
| 81 |
+
マ�Eケ支援コンサル = "マ�Eケ支援・コンサル"
|
| 82 |
+
オフィス機器用品E= "オフィス・機器用品E
|
| 83 |
+
|
| 84 |
+
# ヘルスケア
|
| 85 |
+
健康食品器具 = "健康食品・器具"
|
| 86 |
+
美容医療クリニック = "美容・医療クリニック"
|
| 87 |
+
美容コスメ = "美容コスメ"
|
| 88 |
+
フィチE��ネスジム = "フィチE��ネスジム"
|
| 89 |
+
|
| 90 |
+
# ヒューマンリソース
|
| 91 |
+
求人惁E�� = "求人惁E��"
|
| 92 |
+
人材紹仁E= "人材紹仁E
|
| 93 |
+
人材派遣 = "人材派遣"
|
| 94 |
+
|
| 95 |
+
# コマ�Eス
|
| 96 |
+
動画アニメゲーム = "動画・アニメ・ゲーム"
|
| 97 |
+
リユースリサイクル = "リユース・リサイクル"
|
| 98 |
+
旁E���EチE��レジャー = "旁E���Eホテル・レジャー"
|
| 99 |
+
趣味交隁E= "趣味・交隁E
|
| 100 |
+
新聞雑誌メチE��ア = "新聞�E雑誌�E惁E��メチE��ア"
|
| 101 |
+
自動車レンタカー用品E= "自動車�Eレンタカー・用品E
|
| 102 |
+
飲料食品生活用品E= "飲料食品・生活用品E
|
| 103 |
+
家電パソコン = "家電・パソコン"
|
| 104 |
+
ファチE��ョン = "ファチE��ョン"
|
| 105 |
+
|
| 106 |
+
# ファイナンス
|
| 107 |
+
不動産 = "不動産"
|
| 108 |
+
保険 = "保険"
|
| 109 |
+
ローン = "ローン"
|
| 110 |
+
クレカ電子決渁E= "クレカ・電子決渁E
|
| 111 |
+
証券FX先物 = "証券・FX・先物"
|
| 112 |
+
銀衁E= "銀衁E
|
| 113 |
+
|
| 114 |
+
# インフラ
|
| 115 |
+
ネット通信サービス = "ネット�E通信サービス"
|
| 116 |
+
電気ガス = "電気�Eガス"
|
| 117 |
+
住宁E��備リフォーム = "住宁E��備�Eリフォーム"
|
| 118 |
+
|
| 119 |
+
# ライフイベンチE
|
| 120 |
+
士業相諁E= "士業・相諁E
|
| 121 |
+
学習スクール = "学習�Eスクール"
|
| 122 |
+
結婚�E会い = "結婚�E出会い"
|
| 123 |
+
葬儀墓地 = "葬儀・墓地"
|
| 124 |
+
引越し介護 = "引越し・介護"
|
| 125 |
+
|
| 126 |
+
class Meta(BaseModel):
|
| 127 |
+
会社吁E str
|
| 128 |
+
業畁E Category
|
| 129 |
+
中刁E��E CategoryMiddle
|
| 130 |
+
サービス: str
|
| 131 |
+
啁E��: str
|
| 132 |
+
タイトル: str
|
| 133 |
+
訴求テーチE list[str]
|
| 134 |
+
|
| 135 |
+
class cood(BaseModel):
|
| 136 |
+
x: int
|
| 137 |
+
y: int
|
| 138 |
+
|
| 139 |
+
class str_with_rect(BaseModel):
|
| 140 |
+
text: str
|
| 141 |
+
html: str
|
| 142 |
+
rect: list[cood]
|
| 143 |
+
|
| 144 |
+
class pageInfo(BaseModel):
|
| 145 |
+
# ペ�Eジ共送E
|
| 146 |
+
メタ: Meta
|
| 147 |
+
ロゴ: list[str_with_rect]
|
| 148 |
+
グローバル検索バ�E: list[str_with_rect]
|
| 149 |
+
ハンバ�Eガーメニューアイコン: list[str_with_rect]
|
| 150 |
+
カートアイコン: list[str_with_rect]
|
| 151 |
+
ユーザーメニュー: list[str_with_rect]
|
| 152 |
+
|
| 153 |
+
# ナビゲーション
|
| 154 |
+
ブレチE��クラム: list[str_with_rect]
|
| 155 |
+
ペ�Eジネ�Eション: list[str_with_rect]
|
| 156 |
+
タブ�E替: list[str_with_rect]
|
| 157 |
+
|
| 158 |
+
# トップ�Eージ
|
| 159 |
+
メインビジュアル: list[str_with_rect]
|
| 160 |
+
プロモーションバナー: list[str_with_rect]
|
| 161 |
+
カチE��リカーチE list[str_with_rect]
|
| 162 |
+
|
| 163 |
+
# 啁E��一覧ペ�Eジ
|
| 164 |
+
啁E��一覧: list[str_with_rect]
|
| 165 |
+
フィルタ: list[str_with_rect]
|
| 166 |
+
ソーチE list[str_with_rect]
|
| 167 |
+
ペ�Eジャー: list[str_with_rect]
|
| 168 |
+
クイチE��ビューアイコン: list[str_with_rect]
|
| 169 |
+
|
| 170 |
+
# 啁E��詳細ペ�Eジ
|
| 171 |
+
啁E��吁E list[str_with_rect]
|
| 172 |
+
価格: list[str_with_rect]
|
| 173 |
+
ブランチE list[str_with_rect]
|
| 174 |
+
サムネイル: list[str_with_rect]
|
| 175 |
+
画像ギャラリー: list[str_with_rect]
|
| 176 |
+
カラースウォチE��: list[str_with_rect]
|
| 177 |
+
サイズセレクタ: list[str_with_rect]
|
| 178 |
+
在庫スチE�Eタス: list[str_with_rect]
|
| 179 |
+
配送情報: list[str_with_rect]
|
| 180 |
+
ボタン_カート追加: list[str_with_rect]
|
| 181 |
+
ボタン_今すぐ購入: list[str_with_rect]
|
| 182 |
+
レビューサマリー: list[str_with_rect]
|
| 183 |
+
レビューボタン: list[str_with_rect]
|
| 184 |
+
QnAリンク: list[str_with_rect]
|
| 185 |
+
バッジタグ: list[str_with_rect]
|
| 186 |
+
関連啁E��カルーセル: list[str_with_rect]
|
| 187 |
+
|
| 188 |
+
# カート�Eージ
|
| 189 |
+
カート商品リスチE list[str_with_rect]
|
| 190 |
+
数量セレクタ: list[str_with_rect]
|
| 191 |
+
削除アイコン: list[str_with_rect]
|
| 192 |
+
クーポン入劁E list[str_with_rect]
|
| 193 |
+
注斁E��計サマリー: list[str_with_rect]
|
| 194 |
+
チェチE��アウト�Eタン: list[str_with_rect]
|
| 195 |
+
|
| 196 |
+
# 共通下部
|
| 197 |
+
フッターリンク: list[str_with_rect]
|
| 198 |
+
SNSアイコン: list[str_with_rect]
|
| 199 |
+
カスタマ�Eサポ�Eトリンク: list[str_with_rect]
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def get_openai_request(messages, format, openai_key):
|
| 203 |
+
"""OpenAI APIを呼び出ぁE""
|
| 204 |
+
client = LLMClient()
|
| 205 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 206 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 207 |
+
messages=messages,
|
| 208 |
+
top_p=1,
|
| 209 |
+
frequency_penalty=0,
|
| 210 |
+
presence_penalty=0,
|
| 211 |
+
response_format=format,
|
| 212 |
+
temperature=0
|
| 213 |
+
)
|
| 214 |
+
return response
|
| 215 |
+
|
| 216 |
+
@customtracer
|
| 217 |
+
def format2ecinfo(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 218 |
+
"""
|
| 219 |
+
ECサイト用のバリアント生成API
|
| 220 |
+
baseimg2ecinfo_rect.pyのpageInfo構造に準拠
|
| 221 |
+
|
| 222 |
+
input1 (text): プロンプト�E�Eormat2fvinfoと同様�E形式!E
|
| 223 |
+
input2 (text): default
|
| 224 |
+
output1 (json): pageInfo形式�EJSON
|
| 225 |
+
"""
|
| 226 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 227 |
+
|
| 228 |
+
if openai_key == "default" or not openai_key:
|
| 229 |
+
openai_key = os.environ.get('OPENAI_KEY', '')
|
| 230 |
+
|
| 231 |
+
if openai_key:
|
| 232 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 233 |
+
|
| 234 |
+
messages=[
|
| 235 |
+
{
|
| 236 |
+
"role": "system",
|
| 237 |
+
"content": """提供したフォーマットデータから、ECサイト向け�Eペ�Eジ惁E��を生成してください。baseimg2ecinfo_rect.pyのpageInfo構造に準拠し、ECサイト�E特性�E�商品比輁E��カチE��リ一覧、賁E��請求など�E�を老E�Eして、E��刁E��要素を生成してください。各要素はstr_with_rect形式!Eext, html, rect�E�で記述してください、E"",
|
| 238 |
+
},
|
| 239 |
+
{
|
| 240 |
+
"role": "user",
|
| 241 |
+
"content": [{"type": "text", "text":p}]
|
| 242 |
+
},
|
| 243 |
+
]
|
| 244 |
+
|
| 245 |
+
return get_openai_request(messages, pageInfo, openai_key)
|
apis/format2ecinfos.py
ADDED
|
@@ -0,0 +1,279 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
import numpy as np
|
| 10 |
+
from enum import Enum
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import pytz
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 16 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 17 |
+
from src.clients.llm_client import LLMClient
|
| 18 |
+
import json as _json
|
| 19 |
+
|
| 20 |
+
client = LLMClient()
|
| 21 |
+
system_prompt = None
|
| 22 |
+
user_text = ""
|
| 23 |
+
images = []
|
| 24 |
+
for msg in messages:
|
| 25 |
+
role = msg.get("role", "")
|
| 26 |
+
c = msg.get("content", "")
|
| 27 |
+
if role == "system":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
system_prompt = c
|
| 30 |
+
elif role == "user":
|
| 31 |
+
if isinstance(c, str):
|
| 32 |
+
user_text = c
|
| 33 |
+
elif isinstance(c, list):
|
| 34 |
+
for part in c:
|
| 35 |
+
if isinstance(part, dict):
|
| 36 |
+
if part.get("type") == "text":
|
| 37 |
+
user_text += part.get("text", "")
|
| 38 |
+
elif part.get("type") == "image_url":
|
| 39 |
+
url = part.get("image_url", {}).get("url", "")
|
| 40 |
+
if url.startswith("data:"):
|
| 41 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 42 |
+
else:
|
| 43 |
+
images.append(url)
|
| 44 |
+
|
| 45 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 46 |
+
result = client.call(
|
| 47 |
+
prompt=user_text,
|
| 48 |
+
schema=response_format,
|
| 49 |
+
model=model,
|
| 50 |
+
system_prompt=system_prompt,
|
| 51 |
+
images=images if images else None,
|
| 52 |
+
temperature=0,
|
| 53 |
+
)
|
| 54 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 55 |
+
else:
|
| 56 |
+
return client.call_raw(
|
| 57 |
+
prompt=user_text,
|
| 58 |
+
model=model,
|
| 59 |
+
system_prompt=system_prompt,
|
| 60 |
+
images=images if images else None,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
EC用の褁E��バリアント生成API�E�Eormat2ecinfo.pyに依存しなぁE��立実裁E��E
|
| 66 |
+
baseimg2ecinfo_rect.pyのpageInfo構造に準拠
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
# スキーマ定義�E�Eormat2ecinfo.pyから独立!E
|
| 70 |
+
class Category(str, Enum):
|
| 71 |
+
ビジネス = "ビジネス�E�EaaS・法人支援�E�E
|
| 72 |
+
ヘルスケア = "ヘルスケア�E�美容・健康�E�E
|
| 73 |
+
ヒューマンリソース = "ヒューマンリソース�E�求人・紹介!E
|
| 74 |
+
コマ�Eス = "コマ�Eス�E�趣味・食品・衣類!E
|
| 75 |
+
ファイナンス = "ファイナンス�E���融�E保険・不動産�E�E
|
| 76 |
+
インフラ = "インフラ�E�電気�E通信・ガス・住屁E��E
|
| 77 |
+
ライフイベンチE= "ライフイベント(教育・結婚�E相諁E��E
|
| 78 |
+
|
| 79 |
+
class CategoryMiddle(str, Enum):
|
| 80 |
+
# ビジネス
|
| 81 |
+
ITソフトウェア = "IT・ソフトウェア"
|
| 82 |
+
マ�Eケ支援コンサル = "マ�Eケ支援・コンサル"
|
| 83 |
+
オフィス機器用品E= "オフィス・機器用品E
|
| 84 |
+
|
| 85 |
+
# ヘルスケア
|
| 86 |
+
健康食品器具 = "健康食品・器具"
|
| 87 |
+
美容医療クリニック = "美容・医療クリニック"
|
| 88 |
+
美容コスメ = "美容コスメ"
|
| 89 |
+
フィチE��ネスジム = "フィチE��ネスジム"
|
| 90 |
+
|
| 91 |
+
# ヒューマンリソース
|
| 92 |
+
求人惁E�� = "求人惁E��"
|
| 93 |
+
人材紹仁E= "人材紹仁E
|
| 94 |
+
人材派遣 = "人材派遣"
|
| 95 |
+
|
| 96 |
+
# コマ�Eス
|
| 97 |
+
動画アニメゲーム = "動画・アニメ・ゲーム"
|
| 98 |
+
リユースリサイクル = "リユース・リサイクル"
|
| 99 |
+
旁E���EチE��レジャー = "旁E���Eホテル・レジャー"
|
| 100 |
+
趣味交隁E= "趣味・交隁E
|
| 101 |
+
新聞雑誌メチE��ア = "新聞�E雑誌�E惁E��メチE��ア"
|
| 102 |
+
自動車レンタカー用品E= "自動車�Eレンタカー・用品E
|
| 103 |
+
飲料食品生活用品E= "飲料食品・生活用品E
|
| 104 |
+
家電パソコン = "家電・パソコン"
|
| 105 |
+
ファチE��ョン = "ファチE��ョン"
|
| 106 |
+
|
| 107 |
+
# ファイナンス
|
| 108 |
+
不動産 = "不動産"
|
| 109 |
+
保険 = "保険"
|
| 110 |
+
ローン = "ローン"
|
| 111 |
+
クレカ電子決渁E= "クレカ・電子決渁E
|
| 112 |
+
証券FX先物 = "証券・FX・先物"
|
| 113 |
+
銀衁E= "銀衁E
|
| 114 |
+
|
| 115 |
+
# インフラ
|
| 116 |
+
ネット通信サービス = "ネット�E通信サービス"
|
| 117 |
+
電気ガス = "電気�Eガス"
|
| 118 |
+
住宁E��備リフォーム = "住宁E��備�Eリフォーム"
|
| 119 |
+
|
| 120 |
+
# ライフイベンチE
|
| 121 |
+
士業相諁E= "士業・相諁E
|
| 122 |
+
学習スクール = "学習�Eスクール"
|
| 123 |
+
結婚�E会い = "結婚�E出会い"
|
| 124 |
+
葬儀墓地 = "葬儀・墓地"
|
| 125 |
+
引越し介護 = "引越し・介護"
|
| 126 |
+
|
| 127 |
+
class Meta(BaseModel):
|
| 128 |
+
会社吁E str
|
| 129 |
+
業畁E Category
|
| 130 |
+
中刁E��E CategoryMiddle
|
| 131 |
+
サービス: str
|
| 132 |
+
啁E��: str
|
| 133 |
+
タイトル: str
|
| 134 |
+
構�Eの意図: str
|
| 135 |
+
訴求テーチE list[str]
|
| 136 |
+
|
| 137 |
+
class cood(BaseModel):
|
| 138 |
+
x: int
|
| 139 |
+
y: int
|
| 140 |
+
|
| 141 |
+
class str_with_rect(BaseModel):
|
| 142 |
+
text: str
|
| 143 |
+
html: str
|
| 144 |
+
rect: list[cood]
|
| 145 |
+
|
| 146 |
+
class pageInfo(BaseModel):
|
| 147 |
+
# ペ�Eジ共送E
|
| 148 |
+
メタ: Meta
|
| 149 |
+
ロゴ: list[str_with_rect]
|
| 150 |
+
グローバル検索バ�E: list[str_with_rect]
|
| 151 |
+
ハンバ�Eガーメニューアイコン: list[str_with_rect]
|
| 152 |
+
カートアイコン: list[str_with_rect]
|
| 153 |
+
ユーザーメニュー: list[str_with_rect]
|
| 154 |
+
|
| 155 |
+
# ナビゲーション
|
| 156 |
+
ブレチE��クラム: list[str_with_rect]
|
| 157 |
+
ペ�Eジネ�Eション: list[str_with_rect]
|
| 158 |
+
タブ�E替: list[str_with_rect]
|
| 159 |
+
|
| 160 |
+
# トップ�Eージ
|
| 161 |
+
メインビジュアル: list[str_with_rect]
|
| 162 |
+
プロモーションバナー: list[str_with_rect]
|
| 163 |
+
カチE��リカーチE list[str_with_rect]
|
| 164 |
+
|
| 165 |
+
# 啁E��一覧ペ�Eジ
|
| 166 |
+
啁E��一覧: list[str_with_rect]
|
| 167 |
+
フィルタ: list[str_with_rect]
|
| 168 |
+
ソーチE list[str_with_rect]
|
| 169 |
+
ペ�Eジャー: list[str_with_rect]
|
| 170 |
+
クイチE��ビューアイコン: list[str_with_rect]
|
| 171 |
+
|
| 172 |
+
# 啁E��詳細ペ�Eジ
|
| 173 |
+
啁E��吁E list[str_with_rect]
|
| 174 |
+
価格: list[str_with_rect]
|
| 175 |
+
ブランチE list[str_with_rect]
|
| 176 |
+
サムネイル: list[str_with_rect]
|
| 177 |
+
画像ギャラリー: list[str_with_rect]
|
| 178 |
+
カラースウォチE��: list[str_with_rect]
|
| 179 |
+
サイズセレクタ: list[str_with_rect]
|
| 180 |
+
在庫スチE�Eタス: list[str_with_rect]
|
| 181 |
+
配送情報: list[str_with_rect]
|
| 182 |
+
ボタン_カート追加: list[str_with_rect]
|
| 183 |
+
ボタン_今すぐ購入: list[str_with_rect]
|
| 184 |
+
レビューサマリー: list[str_with_rect]
|
| 185 |
+
レビューボタン: list[str_with_rect]
|
| 186 |
+
QnAリンク: list[str_with_rect]
|
| 187 |
+
バッジタグ: list[str_with_rect]
|
| 188 |
+
関連啁E��カルーセル: list[str_with_rect]
|
| 189 |
+
|
| 190 |
+
# カート�Eージ
|
| 191 |
+
カート商品リスチE list[str_with_rect]
|
| 192 |
+
数量セレクタ: list[str_with_rect]
|
| 193 |
+
削除アイコン: list[str_with_rect]
|
| 194 |
+
クーポン入劁E list[str_with_rect]
|
| 195 |
+
注斁E��計サマリー: list[str_with_rect]
|
| 196 |
+
チェチE��アウト�Eタン: list[str_with_rect]
|
| 197 |
+
|
| 198 |
+
# 共通下部
|
| 199 |
+
フッターリンク: list[str_with_rect]
|
| 200 |
+
SNSアイコン: list[str_with_rect]
|
| 201 |
+
カスタマ�Eサポ�Eトリンク: list[str_with_rect]
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def get_openai_request(messages, format, n=1):
|
| 205 |
+
"""
|
| 206 |
+
OpenAI API呼び出し!Eパラメータ対応、常にリストを返す�E�E
|
| 207 |
+
|
| 208 |
+
Args:
|
| 209 |
+
messages: メチE��ージリスチE
|
| 210 |
+
format: レスポンスフォーマッチE
|
| 211 |
+
n: 生�Eする候補数�E�デフォルチE 1�E�E
|
| 212 |
+
|
| 213 |
+
Returns:
|
| 214 |
+
list[str]: 常にリストで返却�E�E=1でも長ぁEのリスト!E
|
| 215 |
+
"""
|
| 216 |
+
client = LLMClient())
|
| 217 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 218 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 219 |
+
messages=messages,
|
| 220 |
+
top_p=1,
|
| 221 |
+
frequency_penalty=0,
|
| 222 |
+
presence_penalty=0,
|
| 223 |
+
response_format=format,
|
| 224 |
+
temperature=0,
|
| 225 |
+
n=n
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
# 常にリストで返す�E�E=1でも統一�E�E
|
| 229 |
+
return [choice.message.content for choice in response.choices]
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
@customtracer
|
| 233 |
+
def format2ecinfos(p, openai_key=os.environ.get('OPENAI_KEY'), n=1):
|
| 234 |
+
"""
|
| 235 |
+
input1 (text): プロンプト�E�Eormat2ecinfoと同様�E形式!E
|
| 236 |
+
input2 (text): default
|
| 237 |
+
input3 (number): 1
|
| 238 |
+
output1 (json): pageInfo形式�EJSONの配�E
|
| 239 |
+
"""
|
| 240 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__, f"n={n}")
|
| 241 |
+
|
| 242 |
+
if openai_key == "default" or not openai_key:
|
| 243 |
+
openai_key = os.environ.get('OPENAI_KEY', '')
|
| 244 |
+
|
| 245 |
+
if openai_key:
|
| 246 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 247 |
+
|
| 248 |
+
# n を整数に変換し、篁E��チェチE��
|
| 249 |
+
try:
|
| 250 |
+
n = int(n)
|
| 251 |
+
if n < 1:
|
| 252 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 253 |
+
n = 1
|
| 254 |
+
elif n > 10:
|
| 255 |
+
print(f"Warning: n={n} is too large, capping at 10")
|
| 256 |
+
n = 10
|
| 257 |
+
except (TypeError, ValueError):
|
| 258 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 259 |
+
n = 1
|
| 260 |
+
|
| 261 |
+
messages=[
|
| 262 |
+
{
|
| 263 |
+
"role": "system",
|
| 264 |
+
"content": """提供したフォーマットデータから、ECサイト向け�Eペ�Eジ惁E��を生成してください。baseimg2ecinfo_rect.pyのpageInfo構造に準拠し、ECサイト�E特性�E�商品比輁E��カチE��リ一覧、賁E��請求など�E�を老E�Eして、E��刁E��要素を生成してください。各要素はstr_with_rect形式!Eext, html, rect�E�で記述してください、E"",
|
| 265 |
+
},
|
| 266 |
+
{
|
| 267 |
+
"role": "user",
|
| 268 |
+
"content": [{"type": "text", "text":p}]
|
| 269 |
+
},
|
| 270 |
+
]
|
| 271 |
+
|
| 272 |
+
# get_openai_requestは常にリストを返すので、そのまま使用
|
| 273 |
+
result = get_openai_request(messages, pageInfo, n=n)
|
| 274 |
+
|
| 275 |
+
print(f"Generated {len(result)} EC variants")
|
| 276 |
+
|
| 277 |
+
# リストをJSON斁E���Eとして返す
|
| 278 |
+
#return json.dumps(result, ensure_ascii=False)
|
| 279 |
+
return result
|
apis/format2fvinfo.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
from functools import cache
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
from src.models.common import model
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json as _json
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 57 |
+
else:
|
| 58 |
+
return client.call_raw(
|
| 59 |
+
prompt=user_text,
|
| 60 |
+
model=model,
|
| 61 |
+
system_prompt=system_prompt,
|
| 62 |
+
images=images if images else None,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
class FVinfo(BaseModel):
|
| 68 |
+
メインコピ�E: list[str]
|
| 69 |
+
サブコピ�E: list[str]
|
| 70 |
+
ビジュアル: list[str]
|
| 71 |
+
権威付け: list[str]
|
| 72 |
+
CTA: list[str]
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
class Meta(BaseModel):
|
| 76 |
+
会社吁E str
|
| 77 |
+
業畁E str
|
| 78 |
+
ブランチE str
|
| 79 |
+
サービス: str
|
| 80 |
+
啁E��: str
|
| 81 |
+
タイトル: str
|
| 82 |
+
訴求テーチE list[str]
|
| 83 |
+
|
| 84 |
+
class Font(str, Enum):
|
| 85 |
+
font1 = "ゴシチE��"
|
| 86 |
+
font2 = "明朝"
|
| 87 |
+
font3 = "手書ぁE
|
| 88 |
+
|
| 89 |
+
class EvsF(str, Enum):
|
| 90 |
+
EMOTIONAL = "惁E��E
|
| 91 |
+
FUNCTIONAL = "機�E"
|
| 92 |
+
|
| 93 |
+
class PvsS(str, Enum):
|
| 94 |
+
PROBLEM = "問題提起"
|
| 95 |
+
SOLUTION = "課題解決"
|
| 96 |
+
|
| 97 |
+
class Copy(BaseModel):
|
| 98 |
+
text: str
|
| 99 |
+
font: Font
|
| 100 |
+
color: str
|
| 101 |
+
visual: str
|
| 102 |
+
appeal_mode : EvsF
|
| 103 |
+
forcus_stage : PvsS
|
| 104 |
+
|
| 105 |
+
class CatchCopy(BaseModel):
|
| 106 |
+
main_copy: list[Copy]
|
| 107 |
+
sub_copy: list[Copy]
|
| 108 |
+
|
| 109 |
+
class FVinfo(BaseModel):
|
| 110 |
+
非LP: bool
|
| 111 |
+
メタ: Meta
|
| 112 |
+
キャチE��コピ�E: CatchCopy
|
| 113 |
+
権威付け: list[str]
|
| 114 |
+
ビジュアル: list[str]
|
| 115 |
+
CTAボタン: list[str]
|
| 116 |
+
|
| 117 |
+
def get_openai_request(messages, format):
|
| 118 |
+
client = LLMClient()
|
| 119 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 120 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 121 |
+
messages=messages,
|
| 122 |
+
top_p=1,
|
| 123 |
+
frequency_penalty=0,
|
| 124 |
+
presence_penalty=0,
|
| 125 |
+
response_format=format,
|
| 126 |
+
temperature=0
|
| 127 |
+
)
|
| 128 |
+
return response
|
| 129 |
+
|
| 130 |
+
@customtracer
|
| 131 |
+
def format2fvinfo(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 132 |
+
"""
|
| 133 |
+
input1 (text): ■自社: 親子でのスマ�E料��節紁E親子でのお得感 チE�Eタの余剰利用 通話とネット�Eコストパフォーマンス スマ�EチE��ュー支援 家族向け�E安�E機�E 豊富な端末ラインアチE�E ■競合他社: 22歳までのお得なプラン 大好評�Eサービス 親子でお得にスマ�Eを利用 22歳以下限定�Eお得なキャンペ�Eン 学生向け�Eお得さ 青春年齢向けのお得なプラン 低価格で高品質な通信サービス 格安SIMとスマ�Eの利便性 22歳以下限定�E割引キャンペ�Eン スマ�EチE��ュー応援 家族割引との絁E��合わせでの最安値 料��プランの多様性 親子でのお得な割引サービス スマ�EチE��ューのお得さ 特別割弁EチE�Eタ3GB提侁E割引サービスによるコスト削渁E新規契紁E��プラン変更による特典 機種代と基本料�Eダブル割弁E大容量データ エントリー制の特典シスチE�� 24時間ぁE��でもオンラインで手続き可能 家族�E員が割引を受けられるサービス 家族間の無料通話サービス プライムビデオ特典 22歳までの長期利用可能 製品ラインナップ�E允E��E期間限定�Eキャンペ�Eン 人気スマ�Eの割引販売 安�E教育サービス 話題�Eスマ�Eが安く手に入めE詳細なサポ�EトとFAQ シンプルな料��プラン 家族�E員の料��割弁E子育てサポ�Eトサービス 業界トレンド:「◯◯◯◯◯」「◯◯◯◯◯」「◯◯◯◯◯」が吁E��共通する訴求コンチE��チE��ある、E60字程度)
|
| 134 |
+
input2 (text): default
|
| 135 |
+
output1 (json): 頁E��
|
| 136 |
+
"""
|
| 137 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 138 |
+
if openai_key == "default":
|
| 139 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 140 |
+
else:
|
| 141 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 142 |
+
|
| 143 |
+
messages=[
|
| 144 |
+
{
|
| 145 |
+
"role": "system",
|
| 146 |
+
"content": """提供したフォーマットデータから、忁E��なファーストビューの要素を生成してください、E"",
|
| 147 |
+
},
|
| 148 |
+
{
|
| 149 |
+
"role": "user",
|
| 150 |
+
"content": [{"type": "text", "text":p}]
|
| 151 |
+
},
|
| 152 |
+
]
|
| 153 |
+
|
| 154 |
+
return get_openai_request(messages, FVinfo)
|
apis/format2fvinfos.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
import base64
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from PIL import Image
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
from functools import cache
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
from src.models.common import model
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json as _json
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 57 |
+
else:
|
| 58 |
+
return client.call_raw(
|
| 59 |
+
prompt=user_text,
|
| 60 |
+
model=model,
|
| 61 |
+
system_prompt=system_prompt,
|
| 62 |
+
images=images if images else None,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
# スキーマ定義�E�Eormat2fvinfo.pyから独立!E
|
| 67 |
+
class Meta(BaseModel):
|
| 68 |
+
会社吁E str
|
| 69 |
+
業畁E str
|
| 70 |
+
ブランチE str
|
| 71 |
+
サービス: str
|
| 72 |
+
啁E��: str
|
| 73 |
+
タイトル: str
|
| 74 |
+
構�Eの意図: str
|
| 75 |
+
訴求テーチE list[str]
|
| 76 |
+
|
| 77 |
+
class Font(str, Enum):
|
| 78 |
+
font1 = "ゴシチE��"
|
| 79 |
+
font2 = "明朝"
|
| 80 |
+
font3 = "手書ぁE
|
| 81 |
+
|
| 82 |
+
class EvsF(str, Enum):
|
| 83 |
+
EMOTIONAL = "惁E��E
|
| 84 |
+
FUNCTIONAL = "機�E"
|
| 85 |
+
|
| 86 |
+
class PvsS(str, Enum):
|
| 87 |
+
PROBLEM = "問題提起"
|
| 88 |
+
SOLUTION = "課題解決"
|
| 89 |
+
|
| 90 |
+
class Copy(BaseModel):
|
| 91 |
+
text: str
|
| 92 |
+
font: Font
|
| 93 |
+
color: str
|
| 94 |
+
visual: str
|
| 95 |
+
appeal_mode : EvsF
|
| 96 |
+
forcus_stage : PvsS
|
| 97 |
+
|
| 98 |
+
class CatchCopy(BaseModel):
|
| 99 |
+
main_copy: list[Copy]
|
| 100 |
+
sub_copy: list[Copy]
|
| 101 |
+
|
| 102 |
+
class FVinfo(BaseModel):
|
| 103 |
+
非LP: bool
|
| 104 |
+
メタ: Meta
|
| 105 |
+
キャチE��コピ�E: CatchCopy
|
| 106 |
+
権威付け: list[str]
|
| 107 |
+
ビジュアル: list[str]
|
| 108 |
+
CTAボタン: list[str]
|
| 109 |
+
|
| 110 |
+
def get_openai_request(messages, format, n=1):
|
| 111 |
+
"""
|
| 112 |
+
OpenAI API呼び出し!Eパラメータ対応!E
|
| 113 |
+
|
| 114 |
+
Args:
|
| 115 |
+
messages: メチE��ージリスチE
|
| 116 |
+
format: レスポンスフォーマッチE
|
| 117 |
+
n: 生�Eする候補数�E�デフォルチE 1�E�E
|
| 118 |
+
|
| 119 |
+
Returns:
|
| 120 |
+
list[str]: 常にリストで返却�E�E=1でも長ぁEのリスト!E
|
| 121 |
+
"""
|
| 122 |
+
client = LLMClient()
|
| 123 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 124 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 125 |
+
messages=messages,
|
| 126 |
+
top_p=1,
|
| 127 |
+
frequency_penalty=0,
|
| 128 |
+
presence_penalty=0,
|
| 129 |
+
response_format=format,
|
| 130 |
+
temperature=1.2,
|
| 131 |
+
n=n
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
# 常にリストで返す�E�E=1でも統一�E�E
|
| 135 |
+
return [choice.message.content for choice in response.choices]
|
| 136 |
+
|
| 137 |
+
@customtracer
|
| 138 |
+
def format2fvinfos(p, openai_key=os.environ.get('OPENAI_KEY'), n=1):
|
| 139 |
+
"""
|
| 140 |
+
褁E��バリアントを返すformat2fvinfo�E��E列版�E�E
|
| 141 |
+
|
| 142 |
+
input1 (text): prompt text
|
| 143 |
+
input2 (text): default
|
| 144 |
+
input3 (number): 1
|
| 145 |
+
output1 (json): variants list
|
| 146 |
+
"""
|
| 147 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__, f"n={n}")
|
| 148 |
+
|
| 149 |
+
if openai_key == "default":
|
| 150 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 151 |
+
else:
|
| 152 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 153 |
+
|
| 154 |
+
# n を整数に変換し、篁E��チェチE��
|
| 155 |
+
try:
|
| 156 |
+
n = int(n)
|
| 157 |
+
if n < 1:
|
| 158 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 159 |
+
n = 1
|
| 160 |
+
elif n > 10:
|
| 161 |
+
print(f"Warning: n={n} is too large, capping at 10")
|
| 162 |
+
n = 10
|
| 163 |
+
except (TypeError, ValueError):
|
| 164 |
+
print(f"Warning: n={n} is invalid, using n=1")
|
| 165 |
+
n = 1
|
| 166 |
+
|
| 167 |
+
messages=[
|
| 168 |
+
{
|
| 169 |
+
"role": "system",
|
| 170 |
+
"content": """提供したフォーマットデータから、忁E��なファーストビューの要素を生成してください、E"",
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"role": "user",
|
| 174 |
+
"content": [{"type": "text", "text":p}]
|
| 175 |
+
},
|
| 176 |
+
]
|
| 177 |
+
|
| 178 |
+
# get_openai_requestは常にリストを返すので、そのまま使用
|
| 179 |
+
result = get_openai_request(messages, FVinfo, n=n)
|
| 180 |
+
|
| 181 |
+
print(f"Generated {len(result)} variants")
|
| 182 |
+
|
| 183 |
+
# リストをJSON斁E���Eとして返す
|
| 184 |
+
return json.dumps(result, ensure_ascii=False)
|
apis/framework.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from pydantic import BaseModel
|
| 3 |
+
from typing import List
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
import pytz
|
| 6 |
+
|
| 7 |
+
from src.clients.llm_client import LLMClient
|
| 8 |
+
|
| 9 |
+
class EvaluationQuestion(BaseModel):
|
| 10 |
+
question: str
|
| 11 |
+
result: int
|
| 12 |
+
citation: str
|
| 13 |
+
reason: str
|
| 14 |
+
suggestion: str
|
| 15 |
+
|
| 16 |
+
class EvaluationCategory(BaseModel):
|
| 17 |
+
category_name: str # カチE��リ吁E questions: List[EvaluationQuestion] # 質問リスチE
|
| 18 |
+
class EvaluationModel(BaseModel):
|
| 19 |
+
categories: List[EvaluationCategory] # カチE��リをリストとして保持
|
| 20 |
+
|
| 21 |
+
def framework(
|
| 22 |
+
base64img,
|
| 23 |
+
p,
|
| 24 |
+
framework_p,
|
| 25 |
+
openai_key=os.environ.get('OPENAI_API_KEY'),
|
| 26 |
+
gemini_key=None,
|
| 27 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 28 |
+
):
|
| 29 |
+
"""
|
| 30 |
+
input0 (text): /9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxISEhUSEhIVFRUVFxUVFxUVFRcVFRUVFRUWFhUVFRYYHSggGBomHRUVITEhJSkrLi4uFx8zODMsNygtLisBCgoKDg0OGhAQGy0lHyUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAQsAvQMBEQACEQEDEQH/xAAcAAABBQEBAQAAAAAAAAAAAAACAAEDBAUGBwj/xABPEAABAwEEBAcKCggFBAMAAAABAAIDEQQSITEFBkFREyJhcXOR0gcUMlOBkpOxsrMzQlJicnShwdHwFiMkNENjgsIXNaPD02SitOEVg+L/xAAbAQACAwEBAQAAAAAAAAAAAAAAAQIDBAUGB//EADoRAAIBAgEIBwgDAAIDAQEAAAABAgMRBBIhMUFRcZHRBRMVYYGhwRQiMkJScrHhM5LwI2IkNPGiQ//aAAwDAQACEQMRAD8A3bp5CF6TMeLs2J0ICFJg6cURNmu/nFScblSqZJYjeHYgqtqxojJSV0PJhjmNqFnCXu5wTK00DXAHdknZrSJyjJWiwopHVoUmlqHGcr2ZKGbyo3LMnaEIm7kspjyIkgFFEstYcORYdx3YiiNDB51YggZuPkUpMqhHYyw0KDLkSNKTJJ2LETlW0XRZfhVMjTEstVZch0gHQMYoEyKR1Bj9mKmkQk7LOUXzNORVqi0Z3OL0FV6sRSznS4xkY4fnYt2aRxfepvuL8zmm7RjDVgcSTJWpc8fFeBTihZll3ee1nbUb5dXkxvG91fS1rff3A0GXBR/6v/IpWn9XkiP/AA6MjzfMrh7mn4OPzpv+RSyZP5vJFWVCD/j/AP1LmXI56j4OP/U7arcJr5vJGmNWm1fI83zIpAMxFGfS9tSSn9XkiuXVaVT83zB4cHAxx9ctR5eETyJrRLyRHraTVnDzfMnZaPmR03/rft46g6c/q8kXKrTXy+b5kgl+ZH/qdtRyZ/V5Inl0/p83zJmvPyI/9TtqLjLb+C1Sh9PmyOaYj4kZ9J204wk/m/BXOpCPyebI4bSTmyMek7alKnL6vJEIVoPTG3i+ZI7O9cjr/wDb21FKejK8kWPq75WT5vmOLUfkMrupJ28UdXL6vwProfT+QxO7xbKc0nqvpZEvq/BJVY/R5stRE/Jj6pO2qmpbS+Lh9P5Lcd44Uj6n9tVNS2l6cXqLMZdkOD6n9tQae0ti46kHx/mea/tqNntJXWwFzpB8jzXdtNJ7SLklqIXySO8Ex4cjx/epqNtJBzv8KK8lpfW7RjjgSAHAgGtCRf5D1KSh3lcp7Y3IbRQmhaBg04DaRXeVZTbz5ymtGObNpK1zdUc6vvtM2TsMgXX5ODuQFac6OdaMtdw7XJwZYCOLwbcd3HkVdNZTlv8ARF1eXVqCejJ9WGXYVPXs/wDSkRvYGSpGVebFNZiMrtZkQRWgA0+zJScblcKlnYth4Vdi9SViCQGtQprRYqkne6CuhwqOpK7TzkrKauiIPc04HyHEKVkytSlB6S1FbGnkP2darcGjRCvF9xLUOUc6J3UiTggcDj+d6jlPUTyE8zHETxgKEcqMqL0jUJrMhgXDwwAjNqBOS+JEodQ45FRtcnezzluMqtl8Syw15FWy5ZyeNrhk6vOoNp6iyKktDLDXV51W0Wpic4BCQNpDFwTsJtGNwDWSulMjjWoaK5B2JBNcQCXUyoDtopRpO9yudaNrBzzC8eUM5fiqynF595VVqLMu4qSwkmocQtCkthklBt5mY4iIANwfSaRXnWm6b0nOyWle3AltxLixhY5wMTajAHw5Mq0qeRVU8zk76/RF9e8lCLV/d9WW9V7Ox83ASguYY3OYTea4FpHFOVcCepV4qcoQyoabl3R9OFSq6dRXVs2lHVxat2VuTHDk4R9PWue8XWel+SOxHo7DR0R83zOM1x0UyOcCMhoLA/jE4m84HE8w610MHXlOLUjjdKYSFOSlDMZ9jtT63XY8tK4c4WuUVpOfTqSvZmlwe7D87lVc1ZBmWiNzXVyxzBoFbFpoyTi4u5savaJ4eXjhwa0Xnmpxrk0HeTXLYDyLNia/VQzaXoN2BwnX1PeTstPI6r9GLJWvBu9I/wDFc72yvt8kdrszC6cnzfM5vTJhZaOBgZQMFZHlziA51C2MVOdMTzjlWrDzqTTcmYsXSo0pKNNW25wakDKqvM2dIZswrnQ7ihxzAqiuTSuwr4Qz2fYopZ7FknmvpIGaQZkPtwCm6UtLKViaehEjJ3A4trygqLitTJqpJPOjQjmCocWbIzRaZMN6rcWWqaCdM3PD70slknOOkB1oBxwpvripKD0EXUWkgmtYzGI31+5SjTK5VlpRn2xzTm0K+CaMlVxelFa0S0cKYcRnsqVON8q+0rrTtk2+lDttzuT1qXVoisRJHMwaYc3AtB3jLHfyFaGkznQqSjuNPSOkmng6tIrE1wpjTjyD7lRRjaUt/ojXi6icKd1pj6sk0BpUi0xcfC8G4g436sz/AKgjE006crIMDXlGvG7zaOOY9G4VcbJPUZRxfdHaf2eUVwMkZp84Bw9h3Wt2BdptHJ6XjempbGYVngaW4PINPBOw7cF0W2tRw4wi1pLURkb4JDt3G+5ReS9JbDLj8LuPJaK0bcLnEgXaZkmgojJtnuN1Mr3bXZ3WiLMIYgzM5uO9x+4ZDmXHrT6ybkelw1JUaahxI9PaY73hc8CrzxY2/KkPgjmGJPICoQp5TsWVaypxcjhdGWd1CXSFznFxeTm5zjVxPOarsJKCSSPNXlVk5NmjFIRgWnnGKi0nrLYScczRK5zHbj6x+CSUkTbhPvKrRiQW0HKVPvKEs9rZghHHuCLyGo09gJbQUa403bkadIrZKtFsjY+6auJ6sFJq+ZEIvJd5MvWa1MPOVTKEjXTrQZcjlON1wdzhVOK1miMnqaYEVobU33Y7tibg/lRGNWN/fecaaauLS0jLPFOMbaRTnfPGzM+0yDnOAor4ox1Joa1nEcXExx+TiqNP5s+tkq7zQzfKiAO5APJ+CtsUXPOn64sJBLJCRhUtYajceNisXaVLY/LmdF9B13plHz5GlpTWqFne5MTyH2dj6ANwBlmFPC5CoQx9OLk7PO+7mWVeh601BKUfdVte19xXi13szDebDKKUIBDHUINcDfBUn0jSatZ+XMjHoSvGSkpR8+R7bDaw9rXtODgHDmcKj1rMkdFyMDX6cNsT5SCeCcySjQCfCuHAkDJ52qcJ9U8tlNWj7RHq1pe08pZrjEDUMlFMsGdtaO0qWx+XMwdg4i98qPnyLo17s58KGWu9oYP9xLtKnqT8uZPsSs9Lj58ju9Qray1MNobG9rGuLWGSlXEeE4UJwGVd9dyjPEqrH3bltHAvDz99ptaLX9UjsOGVGSbMs8p0/wB0SzPtTqtleyKrIywMLXGvHkFXitSKA7gN6VPE04POmFbBVaqVmlx5FH/EGzB94QzY5gtj+6RaO0KdrNPy5mLsasp5SlHz5Eo7o9nGUdo6oj/eo+309j/3iWLomstEl58hx3RrLWphtFRkQ2OvvEvb4akxromre7kvPkM7ui2U5xWg84jH+4n7fDYxPoiq9Ml58iKXX6yfFhtAPNHn6RNdIw1p/wC8SuXQtT5ZJceRCO6BF8mbqj7al2jR2Py5kOxcV9cfPkTO7osGyObqjP8Aeo9oUdj8uZN9D4nVKPnyBPdDg2RSjmbGP9xPtGlsflzF2NiNUo+fINndGgAIEc48kZHlPCVSfSFJvQ/LmSj0RiErKUfPkC3uiw41jmP9MeH/AHp9oUdj8uYl0Pidco+fIdvdDs22KbyCMf3pdo0tSflzGuhq+uUfPkNNr/ZT/Bn6o/8AkTXSNNan5cyMuhar1x8+R1FntolZFK0Ua+GJwvHjUpuB+9acPNVIuS1tmHGU3RnGnK11FEwun4pKuz7TN7r1HhhXmT3Zqae8GyfVI/fWhAlpMpAz3zUPSHCaPsxri2MRnniJj/sXSo54JnGxDyaskX9YIeGss8PjIpGjnLDd+2inOF4tFdOrkyT7z5zaaiq5J3TQ0Hop9qnZAzAvOLqVDGDFzzzDrNBtU4Qc5WRXVqKnFyZ9B6PhZBEyGIXWRtDWjkG87TtJ2krqKCSsjiyquTuzle6brP3vZ+AjdSWcFtQcWRZPfyE+COcnYqMRLJVlpZpwkOsld6EeLhYDqiQAkAJACQA4QhM9f0HoCCdxDwxoBAoGRVNQ84XyMrmyuflXqMRkU1mgn4btie08Vg+srt5VSS8d+1rZ3mjY9U7K4Orjdo6rIo6FpEZoCLwLuMW4EipGdCqJ1UrWhHPtW/uWbXoNVKhKSd6s3bY9WbvefVpI59VLM2IvN2ojD6cHGKFxddvCtQDdDQM6uCcakZTUerjpto/WrTuIzoyjTcutlovp36c+vRvZYi1Qspja+7i5sZ8COl5zLxAF2uw05s1B1UptZEdL1d9i2GHbgpOpPOl82tq5JNqXZWuYLuDn3TxYyaXHnY3DFhFeUFRjXTT9yOjZ3rmSlhnFxXWTzv6u58jI1m1ehs0gY2MEFtauYytdowHN1rThnCtBycY8DFjVVw9RRjOVu9njjMhzBeYPavSeyavg952U/wDTxc+S7mA/it3nlOl0/aL9yNFltcNgWx00c5YiSPDS07j1LzB7s1NPNN2yYH90j2fzrQgitJl3DuPUgkeodyy3HvaSI/w5SR9F7QfaD108Fng1sZw+k3k1E9q/B2gtK2ZBzetPBNKWQxzSxgGjJJGjD4ocQ37KLhVI5MnHYz1VKeXTjLakel9zjQ/e8JneP1swBFc2RZtbznwj/TuXTwlDJjlPS/wcTH4tSnkR0L8nV2vSLY2OkeaNYC5x5B6ytMkopt6DHCTnJRjpZ4fp3SclqnfO8HjHit+QweCwcw+0k7VxKk3OTkz01GmqcFFFC4dx6lAtFcO49SAFcO49SAFcO49SAFcO49SAFdO49SBHRx632hpq2NgI2gPB9pdR9LVWrOMfPmcSPQVGLvGcr+HIdmuNpAc0MYA6l4APoaGorxscUn0rUbTcY5t/Ma6DopOKnKz06ORI3Xi1ht0ABtCLoMgFCCCKXtxPWl2nNu+RG+58ya6Hgo5PWTtvXIdmvVsGVBgBgZMm0oPC2UHUk+kpPTCPB8wXREFoqT4rkE7X62nM5cslciM73KetJdItf/zjwfMk+iovTVnxXIhtGulqk8NrXZ+FfNKmppV29Sj0pOKtGEVx5kJ9C0pu8qknva5HMALmHZec9k1bP7JZht73i2/NXbwP8PieX6Vf/kNdyNHvYnG9TnH4LZl2Ob1Lee54qV5k9waenMrL9Uj99aEAZiAOo7n9ruTSM+WwHysd/wDsro9Gv/kcdq/H/wBON02rUYz2O3Ffo7vvpdnIPMdecXatDibSLy4Vi4kr9xq0NDPK5jvICuXLCZeKd9GZ/ryO/DpJUuj4tP3s8VxvfwTXjY7TvpdTIOB15xOvemr5FmaeK2jpOV2bWeTBx5bu5cjpCtn6pePI9J0Nh3k9fLXmXq/Tici7I0pXZzrmHdO6j/Rygvd/1oK0u0rtonmFnCrq3/1//almDOcRaAwyP4Oty+7g73hXLxuXvnXaV5UAdVoPUPSBnhMtgkMXCx8KHFrBwd8cJm4Hwa5Jhc3dbe5ba3WyU2KztFmNws/WMaG8RoeKOde8IOOI2osJM4nWTV6ewSiG0hoe5gkF114XXOc0Y0zqx2HMkM1NBaL0RJAx9r0hJBMb16JsD3taA9wZRwYQatDTntQGc2NHaraEnlZDDpSd8kjrrGizPFTzmOgwBNTgAExXZymtmiGWO1zWZkvCiIgX6UNS0OLTsqK0NPsyCGizqDYYp9I2WGZgfG97w5prRwEMjhlytB8iEDKetNmZFbbVFG0NYyeVjWitGta8gAeQIAzEAVEEj2TV5o7zsuFf2eHLmXawD/4vE8t0sk8R4IlllLTQ3hyVW9K5x5Np58x5EV5Y9+aWnMrL9Uj99aEAZqANHV2a5aYzsJun+oFo+0ha8FPJrx4cf2c/pWl1mDqLYr8M/wCDv769PY8FYV5FgKWmNJCCIvzOTRvccvJmTyArPiq6oU3PXq3mzAYN4qsqerS3sX+zLvPPCSSSTUkkknMkmpK8s227s+gRiopRirJZkaGgLXBFMH2mz98RgOBivXKkjA3huSGddZdYtDySMjGhcXvYwftDs3uDR9pTFnNbXG0aHsFqdZjooSlrWOLhM5o44Ju0JOynWgFc4nTlps9slhZYLCbO41ZwYkvmV7y0MoXUApiP6khnRSauazSg1da250JtrGgHmbN9yYsx2HdI1Nt9ulgmgkbGBCGStfO+NgeHF2AYCHeERXkCGJM8x1o1NlsDGSSzWeQvfcuwyF7gbrnXnVaMOLSvKEiVznCUDPVdRdXZ7BYp9Kus75LRwR72hpVzWuwMz25450GNwHa6gZFs8sdI55L3OvOeS9zicXOcbznHlJJPlSGdL3Mf82sfSSf+PMmhM6zWHTOhZ7XaIbdY32eRk0rO+rPiXFry3hJGsFS454tegFc5bXDVWCyxx2izW6O0wTOLGUpwoLW1dW7VpptPFoXAUxQNHFJEj1vQZPetloQP2eHHbkV3ej/4fFnkemX/AOV4InLsTfJduIIot1thyL7Tykryh9DNLTmVl+qR++tCAM1ABRyXSHD4pDvK01+5OMslqWzPwIzhlxcXrTXHMekB9cRtxXs1nV0fOHFp2YqosKxw2n9I8PLgeIyrW7j8p/l9QC8xjsT11TN8K0cz3HReC9mo+8velnfdsXh+bmcsR0hIA9A1G0BZrOyPS1vtMQhYS6GFhvySSsOAI+U1wrcFTUAkgAgsTOP1i0u62Wqa1PF0yvvBud1oAYxvKQ1rQTvBSGUYpXNcHMcWuaQ5rmktc0g1BaRiCDtCALUul7U7wrXaXfSnld63IA7XW4C06B0bOeOYZHQOLsTQCRhJryws60xazztsYGQA5hRIZ6Loqz6I0dFHap5hbrS9rZIrNGKMYSKtMoNbpG9+7BhITFnZj/4hW/v3v7hON4PAY8BwVa8Fd+2/nXHkRcLFnugO0dOyK3WN4jktDnCay0xY8Cr5CBgw1oDsfeDh8aowRS7mP+bWPpJP/HlQgZS10/zC2fWZ/eOSGYl0Z7eZAFVBI9O0dLSCy5fu0OZ+aV3ejn/w+J5Dptf+T4IeS045DqW65yVFsy3au2ffJ5w/BYezqPfx/R1u28VsjwfMu6U1fgdwFeEo2zsaKOAwEsx2t5SqqeApScr3zP8A2ovr9L4inGDWTnV3m733lIauWY+N85vZVvZ1Hv4/oo7cxOyPB8x/0Yg/meeD/ajs2j38f0Lt3E/9eD5hN1Vg3y+cOyn2dR2vj+g7cxOyPB8x/wBFYf5vnj72o7Oo7XxDtzE7I8HzEdV4Ngl89vqupdm0e/j+h9uYnZHg+YB1YhGfCecOyjs2j38f0LtzE7I8HzBOrMIOPCDneOyjs6j38f0PtzE/9eD5g/ozBX4/OHDso7Oo9/H9B25iv+vB8xSauQjLhPOHZSfR1Hv4/oa6cxOvJ4fsjGrsRy4Trr6mpdnUu/8A3gT7axGyPD9kbtAxbC/yvaPuS7Ppd/Ea6ZxGxcHzIv8A4GIkGricswSPsS7Ppd/En2xX2Lg+ZN/8BFvf5w/BPs6l38f0V9tYjYuD5hfo9D8p/nDsp9nUu/j+hdtYjYuH7JYtW4T4zzh2U10dR7+P6Iy6bxK+ng+ZKNV4N8nnDsp9m0e/j+iPbmK2R4PmL9F4N8nnDso7No9/H9B25itkeD5gu1YgG1/njso7No9/H9B25idkeD5kEmr8I2v84fgovo6j38f0Tj03iXqjwfMqv0BAMuE84fgo+wUe8tXTGJeqPB8zftjGxtgY0YNs8IFTj4JVuGioRcVqbKMfKVSUZvS4ogLgtJz7Mt38cXdRCmmUtbEX9KSCkOZ/Ut5vhJVVSeee/wBEasTF5FL7fVlMSbP/AGFdcx2CEp/ITuFiYSfOQIJz6mgcaddEDzXGcwjCrTy5+vFCE1YYQ8358qAJBCNtUADJcAwBryYdaB6SrJTPEeVIEQPJJqDTlqa+Sii85YsxDJGXGrnDy1qk1csU7aESRQAcvk/FNRISm2TWazXnNaBi5waK73EAY5bUOyV2OKc5KC0s6Y6j2zxTfSM/FZvbqG3yN/Y+K7uIUepVsH8JvpG/ij2+ht8gfQ+K7uIQ1Ntvim+kb+KPb6O3yF2Nie7iMdTLb4pvpGfij2+jt8g7GxOxcRjqXbfFN9Iz8Ue30NvkHY2J7uJC/Ua3H+G30jfxSeOo7fImuiMStnE5C0xFri05gkHnBoVeZbOLaLmlW1MWB+Ai9Spo/NvZpxb+D7UVQ3kHldT1q8xX7x9vhIE9xq6RYKQ4j4BufSSqFFZ57/RF2Lbyaf2+rImkfKHkK0GFpk15nJ1oFZgOc0ZOqgLMXfVMgPIi40mAJqnIIuJoMTu2IATpjvJ5kAVHS8ijcmokT5uX7VG5YoEbp6ZFGUSVO+kGM1215wkiTzFoP2H7Ap3KcnWXNFfvENBhwsWefhtUKnwS3P8ABdhv5ob0ezaZtL447zASQ5pNA0gMBq+9eIoLoIrsqFwKEIylaX+eo9jiJyhG8f8ALX5HPWXWKR0UkgLqNE4FQz4R8p73a7GraC62lMSeSp3TwkYzUXryduhL3jnU8dKVOU1qytmlv3b7NhtaN0o5zxA+GUPaxrnvdwVMQQHG485ljsAFkq0Eo5akrXzJX9VqubKOIlKSpyi7pK7dvR67AW/SMvC8HDG9xiLHSU4K65j2uo3juBBwqCN2KdOjDIyptK97adK3IVWvU6zJpxbta+jQ75s7RXktNtuPa2GSrn1a8mz3o4yQSKX6OcOMBXDKuWNihh8pNyWjOvezvho2lTqYnJaUHneZ+7mXHTs8Lmvoy2iaMPDS3F7SHUqCx5Y6t0kZtOSy1afVyyb7PNXNtGr1kMq1tOnudjwXSb6TSGmIkf7RXfTzI8jUV5y3sPTs1TFsHAxGg5iqqb+LezRiIWUPtRnB35qrTNYk2poizT0ocIMf4DfezKqlplv9EX4le7T+31ZSMnKr7mTJCbTlQJ3JBTlUiDuyRoTRFkgfyDqTuRsC56VwSKkkx3qDZfGCRC5/L61G5Yl3AcyRIQAOyvlQDui3Z8BhgrEUTzseo/JQLOXtEOHDwj+bF7bVGo/cluf4LcMn10H3o9h1kB4Ev4MS8Gb9wl9HXQaC6wG9jTA4bdi4eF+O17XzXzeujwznrsZ/HlZN7Z7Z/TT45tZhaOsjbQDE9sL2saXSWhjz4cvCPIHFANHOvXa0bVu0LZVqOk8uLab0Ra1Ky2+F9ecwUaarLIkotJZ5J63d7NTz21ZjR1StBm4WV7mmQmNhDa+AxguuxGTi57xyOCoxsOryYJZs78W/TMjRgJuplTk8+ZeCWZ+OdreW9JxukN11kbK0GoLnsxNM6EYZkKqk1FXVSz3MurRc3aVNSXe0YFl0aHzSO7yZSJ7WtY10bA03GPq4gVeeNvplhVbZ1nGml1jzrS7vW14fkwU8OpVZPql7rzJWWpPx/HcddY5HubV8fBmpwvB2G+o8q5k1FP3Xc69Nya95WPnzS5/XSfTf7RXeWhHkp/G97D0ycYegh9RVVL5t7NOJ+T7UUmSkZH7AfWrk7GNxTLGKkVsv6WOEHQN97MqqWmW/0RqxPw0/t9WUWq4yMnYaZYKSKmr6SQPO9O5GyJQ/DZ1JkLEb383UlckolSaWqg2XwhYgL1G5ZYEvSuSsDfRcdiWIfmiaISLXlUykYORcLFzQrq2iHpYvbaoVPgluf4LsOrVYb1+T3S12YSNulz2iubHFjua8MQF5+E8h3svHOeyqQy1a7W52KTtX7PS6I7oNA66SOEaDW7Ic3gnOuJVqxVW927+m7YUex0bWStttr37S0dHx8I2UCj2tLAWkgFvyXAYOAzAOSr62WQ4ann8S3qYZanbOs3hsJ5WXmltSKgircCK7Qd6gnZ3LJK6sV9H2AQ3qOe8vdfc55BcTda3YAMmjYrKtV1LZkrZsxVSoqnfO227tvgW1UXHzlpg/rpfpv9or0OpHkZfG97/IemjjD9Xh9RVNL5t7NOI+T7UZ9VaZrFkuUymxpaVGEHQN97MqqWmW/wBEaMT8NP7fVlQK8xEjUyLJAVIgC56VxpFaaVQbLowIC5RLLAFyRKw15A7BNQJliIKaKZBEpisOHICxe0If2iDpY/bCjP4Huf4LcP8Ayx3r8nvNpkLWOc1t4taSGjAuIFQ0HlyXnopOSTdj2M5OMW0r9xx1s0lpOZjnMhFmjDS4ucePQCpArjWnzRzrqQo4SnJKUsp+X+8Ti1K+PqxbjHIVr59P+8DR1AtD5LKXSPc88I4Vc4uNKNwqVR0lCMa1oq2Y0dEVJTw95Nt3ek6RYDqCQAkAfOOmfh5fpv8AaK9BqR5KXxPewtNZw/V4fUVTS+beacR8n2oz6q0zFoBWIpZq6Uyg6BvvZlVS0y3+iLsT8NP7fVlJqvMbJAUyIznouCiV5ZVFstjErlygW2BLkDSGqkOw4TBkrAmiDZOclIqGQMcIEXtCfvMHSx+2FGfwvc/wW4f+WO9fk9/XnT2RU0v8BN0cnsFW0f5I71+Sqv8AxS3P8GD3OP3Q9I/1NWzpT+fwRzuhf/W8WdSucdYSAEgD5w018PJ9N/tFeg1I8m/ie9h6azh+rw+oqml829mjEaIfajOVpnLoVhmZp6Vyg6BvvZlXS0y3+iNGJ+Gn9vqymFcYmM5yBpEMkii2WRiVyaqJbawzigaQKQxAIBkgCZAnjapIrkx3JiQkAEECZd0H+8wdLH7YUZ/A9z/BbQ/ljvX5PoBedPZFTS/wE3RyewVbR/kjvX5Kq/8AFLc/wYPc4/dD0j/U1bOlP5/BHO6F/wDW8WdSucdYoSaYha57XEi5QF10ltT8UEDMVHWFcsPNpNayh4mmpNPUWbNamyVukmmdWub6wFXKDjpLITjLQfO+mh+uk+m/2iu9qR5V/HLe/wAhaazh+rw+oqml82804jRD7UZ9FcZS9RWGdmlpTKDoG+9mVVLTLf6I0Yn4af2+rKDnK4ypEL3qLZNIgcVEtSsMUACkSFRAEjWpkGyRrU0RbJqKRWCgY4CACTIlvQf7zB0sfthVz+F7n+C+h/JHevyfQK88exKml/gJujk9gq2j/JHevyVV/wCKW5/gwe5x+6HpH+pq2dKfz+COd0L/AOt4s6lc46xyukhLdlJinYwvYWNaYA0AmMuqA6t4vvmvKF0aWRePvJuzv8Xf3bLHKq9Zky92SV1ZLJ7r69N7m7o5zqEOZKNtZTGa12Dg3Hdt3rHVS0prwv6o30XLOmn429GeAaZH66T6b/aK7mpHlpP33vYWmhjD9Xh9RVNL5t7NWJfwfaihRXGS5dAU0UM0NLnCDoG+9lVVLTLf6I1Yhe7T+31ZluKsuZ0iFxUSxIaiBjUQMVEAExiERbJA1SIkrGJorbCITECQkMcNTEEQgEHo6cRzRyOrRj2ONM6NcCadShJXTW8upSyZKWxo9RHdMsni5/NZ21zOz5/UvPkd3tel9L8uYz+6RYyCDFMQRQgtYQQcwRfTWAqJ3Ul58hPpai1ZxflzI7L3QbDGLsdnlY2taNjjaK76Byc8FVm7ykm/HkRh0nh6atCDS7kuZL/iXZPFT+aztqPZ8/qXnyJ9r0vpflzGk7o1jcKOhmIwwLWbDUfH3hNYCondSXnyIy6WoNWcX5cw/wDEmy+Kn81nbR2dU2rz5B2zR2Py5nlGkHh73uGTnOI30JqunayscNyvJsl0y3GHoIfUVRS+bezZiX8H2oohquMlyy4qZSW9MnCDoG+9mVFLTLf6I2Yj4af2+rMtxVhSgQEBcVEDFRILiDUBclDVIhcNrUyLZLRSK7iokO411AXCDUxXGeEhogcFEsRV0hO6MR3KVfI1mOVHVVFepKCjk62lxNeEpQqueXe0YuWbuH75cHmN1LwYZK5spWgrka1qjrZKThLTa/cHUQlBVIfDlKNtd9O4sB5yJFcBkc7taHca7DsKtyn/ALd/vAoyFpSflt883mMyQ/GIyDsnZDwzjsoQkpvX6+I5U4/KnptpWl6CZrnVwu0vU+NkMHY76qWU+7/afMrcI2s73t3eHkPefvZTi7Hf1eo05ksqe1efiGTT2S17PD9jvarCpMtaYbjF0EXqKopL4t7N2Kfwfaig1qusZGwnFAi3pnKDoG+9mVFLTLf6I24j4af2+rM4BXGa49ECGogdxUSC4cbE0iMpEgapELkzGJpEHIItTI3EWoHcQaiwrjlqBXI3hJk0yO6lYncJ9nDhQ9eFQd4qMDypSgpKzCFZwldFdmi2BxdVxJaWGpHgk12BVLDRTcru7VvA0Sx9RxUbJJPKzX08SyLK3HDAkEjYSAAD1AdSt6tGf2iSS7tevPn5j96DAEk0BbjTwTSrcuQY58qOrWj/AFv94i9oabaS038dv+zdxILKMeU3vLh+AUurRF4iXlb88wu9W7vzj2j1o6qJH2if+8OSGcxSsJSLOl2YxdDF6is9FfFvZuxTzQ+1FEMV1jI2QOUS0vaXGEHQN97MqaWmW/0RrxD92n9vqyhdVxkuINQFx7qAuO1iLCbJQ1SsQuG1idiLZMGpldxXUBca6gLhBqYriLUAmRuakTTEGIsDkTXE7FdxXEWDKHDE7CuOGIsFx7qLCuPdTABzUrEkyzpVuMfQxepZ6Kzy3s3Yt5qf2opBiusY8opXVWaWzR0q3CDoG+9mVVLTLf6I04l+7T+31ZQuq4yXCupiuNdQFyVrE7EHIMNTFckYxNIg2FdTI3FRAXEGosFwg1AriLUBcAtRYlcdjECbJriZEcMQAVxABBiBjFiBCDEDBcxAixpNmMfRRepUUdMt7NuLean9qKlxXmIz7iqsamzQ0o34HoG+8lVVLTPf6I04l+5T+31ZSuK6xkuK6iwrhsYnYTkHdTsQuE1idhNktxMiPcQAgxADhqACuoAa6gBXEAOxiBklxAWHuoAMMQOw91IBrqYWFcSAZzUwJ9IsxZ0UXsqmj829mnFaKf2L1Kt1XGUzbqgWtl/SbcIegb7yVU0dM9/ojVin7lL7fVlK6rzHccNRYTZKGKRFsdrEATMYgB7qAHLUAINQAQagLBFqBjXUAK6gAmsQFggEhj3UAE0IAYoAZADhqAFdQBYtzcWdFF7KqpfNvZpxOin9i9SqArTMZl1IGy9pIfA9C33kqpo/FPf6I14r4KX2+rKgYtFjFcNrECJGhABhqBhBqBjhqAHISAcNQA4agY5YgAS1ADgIANoQMe6gLDoAYBADhqAsPQIAZABAJDsWLa3FvRR+yqqXzb2acSs0PtXqVlcZjKuJ2K7l7SI+C6FvvJVRR+Ke/wBEa8V8FL7fVldrFeYwqIGExiASJQ1IkFRACAQAqIAcBAw6IAYtQFhqBADhAD0QMdIBUQA4QMQQAqIFYeiB2EGouPJZYtoxb0cfsqml829mjE6IfavUqlXGVmeRzKRWXLePguhb7yVU0dM9/ojXifgpfb6srtarjISXUhhNCBjoAcIAK6gdh2tQNIJIYJTEJACqgQTUiSCLUrkrDJizBNalcaiPcSuPJGup3FYVEAEkSFQICyJrZm3o4/ZVVL5t7NGI0Qt9KIFaZjNAVhnNC0Rh4jIfFhG1pDpY2kEPkJBDnA7R1rNGeRKV09Ox7EdCpSdWFNxazRtpS1vayPvb+ZD6eLtKfXR7+D5FPss9sf7R5i72/mQ+ni7SOuj38HyD2We2P9o8wuA/mQ+ni7SOuj38HyH7LPbH+0eYuB+fD6eLtI66PfwfIPZZ7Y/2jzHFnHjIfTxdpHXR7+D5B7LPbH+0eYRiHjIfTxdpLro9/B8h+zT2x/tHmMIP5kPp4u0n10e/g+QvZp7Y/wBo8x+9/wCZD6eLtJddHv4PkP2ae2P9o8x+BHjIfTxdpHXR7+D5B7NPbH+0eYuBHjIfTxdpHXR7+D5B7NPbH+0eYuAHjIfTRdpHXR7+D5B7NLbH+0eY4hHjIfTxdpHXR7+D5DWGltj/AGjzC4IeMi9NF2kuuWx8HyH7PLav7R5iEQ8ZD6aLtI65d/B8g9ne2P8AaPMfgx4yH00XaR1sdj4PkPqJbY/2jzG4MeMh9PF2kdbHY+D5C9nltX9o8xGMeMi9PF2kdau/g+Qezy2r+0eY3BDxkPpou0n10e/g+QvZ5bY/2jzDaweMi9PF2lF1Y9/B8iaoS2x/tHmOWN8bD6eLtI62PfwfIboPav7R5itTmkijmuoxgJa4OFQMRUYFFL5n3sMQleCvoilmzkeCtKMxntad6mZgxXegYQJ2IHdjVO1AXYV9FguPf5UrDuECgdxAoAK8iw7iLt5RYG9pW0hbxEAS0mtcqbOdVVJqGdl1Kk6t0nYoy6wxta15a7jNkd8Wv6twa4EA4GpGBxxG3BVSxUEk2tvkaIYCpKTimszS168+z/bs5Zm02xj7hD73FGAbTjNDsDWmRTlXipZNghhJyhl3Vv8ALYaDjTatBieYdpKBpscFIdxFxRYG2CKpkc4ZJSzE86EDyoBMcuSsNsQKAuOgYJCCNikxisM9h6IAMc6CQ14bECH8iBhBvIkOwTWoGkOUgdxUQFgSExAT2YOpe2Go6iPvUZJSLISlC9tYBsDCA2hwaWjfRxBz8mCr6qNi1V53uttyJ+hoiahpGyjTQAXODFBkKNyUXQg/93WLI4qqs2n/AO3/ACX7quuZckdA9wxciwrhNQSQ4CQ0hXUXHkioEBZDEIE0wgEDsMgBXUXFYqOKsM7Ym5IAEoEOwYoGtJI0pEk7BFxolYbbsJibCIb1FEpAtCbEgQUyK0hhIkEEiaBKZF6RFAahbEaxag2JMnEc5JDegAFMhckSLAQgitIQOKCWsNImMUCYKZE//9k=
|
| 31 |
+
input1 (text): ■自社: 親子でのスマ�E料��節紁E親子でのお得感 チE�Eタの余剰利用 通話とネット�Eコストパフォーマンス スマ�EチE��ュー支援 家族向け�E安�E機�E 豊富な端末ラインアチE�E ■競合他社: 22歳までのお得なプラン 大好評�Eサービス 親子でお得にスマ�Eを利用 22歳以下限定�Eお得なキャンペ�Eン 学生向け�Eお得さ 青春年齢向けのお得なプラン 低価格で高品質な通信サービス 格安SIMとスマ�Eの利便性 22歳以下限定�E割引キャンペ�Eン スマ�EチE��ュー応援 家族割引との絁E��合わせでの最安値 料��プランの多様性 親子でのお得な割引サービス スマ�EチE��ューのお得さ 特別割弁EチE�Eタ3GB提侁E割引サービスによるコスト削渁E新規契紁E��プラン変更による特典 機種代と基本料�Eダブル割弁E大容量データ エントリー制の特典シスチE�� 24時間ぁE��でもオンラインで手続き可能 家族�E員が割引を受けられるサービス 家族間の無料通話サービス プライムビデオ特典 22歳までの長期利用可能 製品ラインナップ�E允E��E期間限定�Eキャンペ�Eン 人気スマ�Eの割引販売 安�E教育サービス 話題�Eスマ�Eが安く手に入めE詳細なサポ�EトとFAQ シンプルな料��プラン 家族�E員の料��割弁E子育てサポ�Eトサービス 業界トレンド:「◯◯◯◯◯」「◯◯◯◯◯」「◯◯◯◯◯」が吁E��共通する訴求コンチE��チE��ある、E60字程度)
|
| 32 |
+
input2 (text): sometext
|
| 33 |
+
input3 (text): default
|
| 34 |
+
input4 (text): default
|
| 35 |
+
input5 (text): gpt-4o
|
| 36 |
+
output1 (json): 頁E��
|
| 37 |
+
"""
|
| 38 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 39 |
+
selected_model = model if model else "meta-llama/Llama-3.3-70B-Instruct"
|
| 40 |
+
|
| 41 |
+
# Handle API key based on model
|
| 42 |
+
if selected_model and "gemini" in selected_model.lower():
|
| 43 |
+
if gemini_key and gemini_key != "default":
|
| 44 |
+
api_key = gemini_key
|
| 45 |
+
else:
|
| 46 |
+
api_key = os.environ.get('GEMINI_KEY')
|
| 47 |
+
client = LLMClient(google_api_key=api_key)
|
| 48 |
+
else:
|
| 49 |
+
if openai_key and openai_key != "default":
|
| 50 |
+
api_key = openai_key
|
| 51 |
+
else:
|
| 52 |
+
api_key = os.environ.get('OPENAI_KEY')
|
| 53 |
+
client = LLMClient(openai_key=api_key)
|
| 54 |
+
|
| 55 |
+
system_prompt = f"""与えられた情報と質問に対して、採点基準を参�Eして以下を日本語で回答します、Ecitation:当該箁E��の引用
|
| 56 |
+
suggestion:満点でなぁE��合�E満点になるよぁE��具体的な持E��、Ereason:高得点の場合�E優れた点を�E体的な叙述
|
| 57 |
+
出力�E忁E��、すべてのカチE��リと頁E��を含めてください、E
|
| 58 |
+
{framework_p}
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
result = client.call(
|
| 62 |
+
prompt=p,
|
| 63 |
+
schema=EvaluationModel,
|
| 64 |
+
model=selected_model,
|
| 65 |
+
system_prompt=system_prompt,
|
| 66 |
+
images=[base64img],
|
| 67 |
+
temperature=0,
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
return result.model_dump()
|
apis/fvinfo2winningrate_nolift.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
client = LLMClient())
|
| 4 |
+
from pydantic import BaseModel, conint
|
| 5 |
+
from enum import Enum
|
| 6 |
+
from src.utils.tracer import customtracer
|
| 7 |
+
|
| 8 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 9 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 10 |
+
from src.clients.llm_client import LLMClient
|
| 11 |
+
import json as _json
|
| 12 |
+
|
| 13 |
+
client = LLMClient()
|
| 14 |
+
system_prompt = None
|
| 15 |
+
user_text = ""
|
| 16 |
+
images = []
|
| 17 |
+
for msg in messages:
|
| 18 |
+
role = msg.get("role", "")
|
| 19 |
+
c = msg.get("content", "")
|
| 20 |
+
if role == "system":
|
| 21 |
+
if isinstance(c, str):
|
| 22 |
+
system_prompt = c
|
| 23 |
+
elif role == "user":
|
| 24 |
+
if isinstance(c, str):
|
| 25 |
+
user_text = c
|
| 26 |
+
elif isinstance(c, list):
|
| 27 |
+
for part in c:
|
| 28 |
+
if isinstance(part, dict):
|
| 29 |
+
if part.get("type") == "text":
|
| 30 |
+
user_text += part.get("text", "")
|
| 31 |
+
elif part.get("type") == "image_url":
|
| 32 |
+
url = part.get("image_url", {}).get("url", "")
|
| 33 |
+
if url.startswith("data:"):
|
| 34 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 35 |
+
else:
|
| 36 |
+
images.append(url)
|
| 37 |
+
|
| 38 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 39 |
+
result = client.call(
|
| 40 |
+
prompt=user_text,
|
| 41 |
+
schema=response_format,
|
| 42 |
+
model=model,
|
| 43 |
+
system_prompt=system_prompt,
|
| 44 |
+
images=images if images else None,
|
| 45 |
+
temperature=0,
|
| 46 |
+
)
|
| 47 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 48 |
+
else:
|
| 49 |
+
return client.call_raw(
|
| 50 |
+
prompt=user_text,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class reason(BaseModel):
|
| 58 |
+
choice: str
|
| 59 |
+
content_description: str
|
| 60 |
+
contribution: int
|
| 61 |
+
reason: str
|
| 62 |
+
recommend: str
|
| 63 |
+
compliance_score: float
|
| 64 |
+
|
| 65 |
+
class win_or_lose(str, Enum):
|
| 66 |
+
win = "勝ち"
|
| 67 |
+
lose= "負ぁE
|
| 68 |
+
|
| 69 |
+
class testpattern_win_or_lose(BaseModel):
|
| 70 |
+
testpattern: win_or_lose
|
| 71 |
+
possibility: float
|
| 72 |
+
reasons: list[reason]
|
| 73 |
+
|
| 74 |
+
@customtracer
|
| 75 |
+
def fvinfo2winningrate_nolift(img1, img2):
|
| 76 |
+
"""
|
| 77 |
+
input1 (text): メインコピ�E: 重たぁE�Eトルを持ち上げなくてOK�E�E 天然水ウォーターサーバ�E牁E��でラクラク♪, 1日1人あためE7.5冁E��E 天然水ウォーターサーバ�E売丁ENo.1, ウォーターサーバ�E契紁E��E��増数 No.1、ETAボタン: なし。ビジュアル: ウォーターサーバ�Eの写真, ウォーターサーバ�EのボトルをセチE��してぁE��女性の写真, 驚いてぁE��女性の写真, 牁E��でラクラクと書かれた吹き�Eし�EイラスチE 重たぁE�Eトルを持ち上げなくてOK!と書かれたイラスト。権威付け: 天然水ウォーターサーバ�E売丁ENo.1, ウォーターサーバ�E契紁E��E��増数 No.1、E
|
| 78 |
+
input2 (text): メインコピ�E: 選ばれてNo.1「安忁E��水」にこだわる人たちに, 選び抜いた日本の銘水, 使ぁE�Eりで衛生皁E 徹底した水質管琁E ※1�E�天然水宁E�E市場におけめE015年売上げ、総合企画センター大阪調べ、ETAボタン: なし。ビジュアル: 水を飲んでぁE��女性の写真, 水のイラスチE 葉�Eイラスト。権威付け: なぁE
|
| 79 |
+
output1 (json): CVRと説昁E
|
| 80 |
+
"""
|
| 81 |
+
print("fvinfo2winningrate_nolift")
|
| 82 |
+
|
| 83 |
+
p = "以下に2つのWEBペ�EジのFVの冁E��を�E挙します。テストパターンの勝敗を予想し予想の精度(possibility)と、勝敗�E琁E��めEつ述べてください、En\n#オリジナル\n" + img1 + "\n\n#チE��トパターン\n" + img2
|
| 84 |
+
|
| 85 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 86 |
+
#model="ft:gpt-4o-2024-08-06:dlpo-inc::9yywbtA2", #gp4-oにアチE�EチE�Eト、E0件の金融→�Eて-120%・全て負ぁE
|
| 87 |
+
#model="ft:gpt-4o-2024-08-06:dlpo-inc::9zMmgJhD", #80件の金融。引き刁E��除く。�E1件だけ差がつく�E全て負ぁE
|
| 88 |
+
#model="ft:gpt-4o-2024-08-06:dlpo-inc::9zNF7KD5", #60件学習、E0件検証。引き刁E��除く。�E3件差がつぁE��・全て負ぁE
|
| 89 |
+
#model= "ft:gpt-4o-2024-08-06:dlpo-inc:no-lift:9zNTJu9j", #さらにリフトなぁE
|
| 90 |
+
model= "ft:gpt-4o-2024-08-06:dlpo-inc:without-color-ratio:A1RIpaUj", #さらに色割合削除
|
| 91 |
+
|
| 92 |
+
messages=[
|
| 93 |
+
{
|
| 94 |
+
"role": "system",
|
| 95 |
+
"content": [
|
| 96 |
+
{
|
| 97 |
+
"type": "text",
|
| 98 |
+
"text": """WEBペ�EジのFVの冁E��を比輁E��て、テストパターンの勝敗を予測し、勝因を記述します。以下�E頁E��で常体�E言ぁE�Eり�E日本語で記載し��ください、E
|
| 99 |
+
STEP1: possibilityには、その勝敗予測が合ってぁE��確玁E��0~100の間で入れてください。�E容が同一の場合�EpossibilityめEにしてください、E
|
| 100 |
+
STEP2: 勝因に特筁E��べきものがあれ�Epossibilityに3を加えてください
|
| 101 |
+
STEP3: possibilityに応じて、reasonの強さを「優れてぁE��、E「すると良ぁE��E「可能性がある」�Eように表現を変えてください、E
|
| 102 |
+
STEP4: オリジナルと差刁E��なければ、理由には特に何も書かなぁE��ください、E
|
| 103 |
+
STEP5: チE��トパターンに値がなぁE��合�E、contributionめEにして何も提案しなくてよいです、E
|
| 104 |
+
STEP6: 最後に斁E��を「だ・である調」に統一し、句点は除去してください
|
| 105 |
+
STEP7: complianceに違反するリスクのある表現めE~1で判定します。�E皁E��業として問題�Eある表現ぁEつでも含まれればcompliance_scoreぁEに近くなるよぁE��スコアして、E
|
| 106 |
+
"""
|
| 107 |
+
}
|
| 108 |
+
]
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"role": "user",
|
| 112 |
+
"content": [
|
| 113 |
+
{
|
| 114 |
+
"type": "text",
|
| 115 |
+
"text": p
|
| 116 |
+
}
|
| 117 |
+
]
|
| 118 |
+
}
|
| 119 |
+
],
|
| 120 |
+
response_format=testpattern_win_or_lose,
|
| 121 |
+
temperature=1.2,
|
| 122 |
+
top_p=1,
|
| 123 |
+
frequency_penalty=0,
|
| 124 |
+
presence_penalty=0
|
| 125 |
+
)
|
| 126 |
+
return response
|
apis/heatimage2score.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from matplotlib.colors import LinearSegmentedColormap
|
| 2 |
+
import numpy as np
|
| 3 |
+
from matplotlib.colors import to_hex,to_rgb
|
| 4 |
+
from PIL import Image, ImageDraw, ImageFont, ImageStat, ImageFilter
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import pytz
|
| 7 |
+
|
| 8 |
+
cmap = LinearSegmentedColormap.from_list("custom_heatmap", ["black","purple", "blue", "green", "yellow", "orange", "red"], N=100)
|
| 9 |
+
colors = cmap(np.linspace(0, 1, 256))
|
| 10 |
+
hex_colors = [to_hex(c) for c in colors]
|
| 11 |
+
color_score_cache = {}
|
| 12 |
+
for color in colors:
|
| 13 |
+
hex_color = to_hex(color)
|
| 14 |
+
closest_index = np.argmin([np.linalg.norm(np.array(to_rgb(x)) - color[:3]) for x in hex_colors])
|
| 15 |
+
value = 0 + (100 - 0) * (closest_index / 255)
|
| 16 |
+
color_score_cache[hex_color] = value
|
| 17 |
+
|
| 18 |
+
def heatimage2score(img_path):
|
| 19 |
+
"""
|
| 20 |
+
input1 (image_filepath):
|
| 21 |
+
output2 (number): 相違点
|
| 22 |
+
"""
|
| 23 |
+
# 画像を開いてRGBに変換
|
| 24 |
+
img = Image.open(img_path)
|
| 25 |
+
img = img.convert('RGB')
|
| 26 |
+
|
| 27 |
+
# 画像をnumpy配�Eに変換
|
| 28 |
+
pixels = np.array(img)
|
| 29 |
+
|
| 30 |
+
# スコア用の配�Eを�E期化
|
| 31 |
+
scores = np.zeros(pixels.shape[:2], dtype=np.float64)
|
| 32 |
+
|
| 33 |
+
# RGBごとにスコアを割り当て
|
| 34 |
+
for i in range(pixels.shape[0]): # 行ごとに処琁E
|
| 35 |
+
for j in range(pixels.shape[1]): # 列ごとに処琁E
|
| 36 |
+
pixel = pixels[i, j] / 255.0 # 正規化
|
| 37 |
+
if np.all(pixel == 1.0): # 白色の場吁E
|
| 38 |
+
scores[i, j] = 0
|
| 39 |
+
else:
|
| 40 |
+
hex_color = to_hex(pixel)
|
| 41 |
+
if hex_color in color_score_cache:
|
| 42 |
+
scores[i, j] = color_score_cache[hex_color]
|
| 43 |
+
else:
|
| 44 |
+
# キャチE��ュになぁE��の場合、スコアを計算してキャチE��ュに追加
|
| 45 |
+
closest_index = np.argmin([np.linalg.norm(np.array(to_rgb(x)) - pixel) for x in hex_colors])
|
| 46 |
+
value = 0 + (100 - 0) * (closest_index / 255)
|
| 47 |
+
color_score_cache[hex_color] = value
|
| 48 |
+
scores[i, j] = value
|
| 49 |
+
|
| 50 |
+
# スコアから忁E��な値を計箁E
|
| 51 |
+
max_temperature = np.max(scores) # 最大スコア
|
| 52 |
+
total_temperature = np.sum(scores) # スコアの総合
|
| 53 |
+
average_temperature_per_area = np.mean(scores) # 平坁E��コア
|
| 54 |
+
colored_area = scores[scores > 0] # 色付きエリアを抽出
|
| 55 |
+
if colored_area.size > 0:
|
| 56 |
+
average_temperature_per_colored_area = np.mean(colored_area) # 色付きエリアの平坁E��コア
|
| 57 |
+
else:
|
| 58 |
+
average_temperature_per_colored_area = 0 # 色付きエリアがなぁE��合�E0
|
| 59 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%m-%d %H:%M:%S"), "heatimage2score", len(color_score_cache.keys()))
|
| 60 |
+
return int(average_temperature_per_area)
|
apis/heatmap_text2comment.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from enum import Enum
|
| 10 |
+
|
| 11 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 12 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 13 |
+
from src.clients.llm_client import LLMClient
|
| 14 |
+
import json as _json
|
| 15 |
+
|
| 16 |
+
client = LLMClient()
|
| 17 |
+
system_prompt = None
|
| 18 |
+
user_text = ""
|
| 19 |
+
images = []
|
| 20 |
+
for msg in messages:
|
| 21 |
+
role = msg.get("role", "")
|
| 22 |
+
c = msg.get("content", "")
|
| 23 |
+
if role == "system":
|
| 24 |
+
if isinstance(c, str):
|
| 25 |
+
system_prompt = c
|
| 26 |
+
elif role == "user":
|
| 27 |
+
if isinstance(c, str):
|
| 28 |
+
user_text = c
|
| 29 |
+
elif isinstance(c, list):
|
| 30 |
+
for part in c:
|
| 31 |
+
if isinstance(part, dict):
|
| 32 |
+
if part.get("type") == "text":
|
| 33 |
+
user_text += part.get("text", "")
|
| 34 |
+
elif part.get("type") == "image_url":
|
| 35 |
+
url = part.get("image_url", {}).get("url", "")
|
| 36 |
+
if url.startswith("data:"):
|
| 37 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 38 |
+
else:
|
| 39 |
+
images.append(url)
|
| 40 |
+
|
| 41 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 42 |
+
result = client.call(
|
| 43 |
+
prompt=user_text,
|
| 44 |
+
schema=response_format,
|
| 45 |
+
model=model,
|
| 46 |
+
system_prompt=system_prompt,
|
| 47 |
+
images=images if images else None,
|
| 48 |
+
temperature=0,
|
| 49 |
+
)
|
| 50 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 51 |
+
else:
|
| 52 |
+
return client.call_raw(
|
| 53 |
+
prompt=user_text,
|
| 54 |
+
model=model,
|
| 55 |
+
system_prompt=system_prompt,
|
| 56 |
+
images=images if images else None,
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
client = LLMClient()
|
| 61 |
+
|
| 62 |
+
class Comment(BaseModel):
|
| 63 |
+
コメンチE str
|
| 64 |
+
琁E��: str
|
| 65 |
+
チE��スチE str
|
| 66 |
+
チE��スト�E種顁E str
|
| 67 |
+
|
| 68 |
+
def ask_raw(messages, model):
|
| 69 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 70 |
+
model=model,
|
| 71 |
+
messages=messages,
|
| 72 |
+
top_p=1,
|
| 73 |
+
frequency_penalty=0,
|
| 74 |
+
presence_penalty=0,
|
| 75 |
+
response_format=Comment,
|
| 76 |
+
temperature=0
|
| 77 |
+
)
|
| 78 |
+
return response
|
| 79 |
+
|
| 80 |
+
def heatmap_text2comment(p, fv_info1,fv_info2,title1, title2, openai_key=os.environ.get('OPENAI_KEY')):
|
| 81 |
+
"""
|
| 82 |
+
input1 (text):
|
| 83 |
+
input2 (text):
|
| 84 |
+
input3 (text):
|
| 85 |
+
input4 (text):
|
| 86 |
+
input5 (text):
|
| 87 |
+
input6 (text): default
|
| 88 |
+
output1 (json): コメンチE
|
| 89 |
+
"""
|
| 90 |
+
if openai_key == "default":
|
| 91 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 92 |
+
else:
|
| 93 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 94 |
+
messages = [
|
| 95 |
+
{
|
| 96 |
+
"role": "system",
|
| 97 |
+
"content": f"以下�E、�E析を進めてぁE��LPの冁E��です、Enこ�ELPの惁E��を�Eに、LP刁E��の専門家としてコメントしてください、En\n#{title1}\n{fv_info1}\n\n#{title2}\n{fv_info2}"
|
| 98 |
+
},
|
| 99 |
+
{
|
| 100 |
+
"role": "user",
|
| 101 |
+
"content":[
|
| 102 |
+
{"type": "text", "text":p}
|
| 103 |
+
]
|
| 104 |
+
},
|
| 105 |
+
]
|
| 106 |
+
return ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")
|
apis/html2variants.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
HTMLバリアント生成API
|
| 3 |
+
允E�EHTMLと変更点を受け取り、Eつの新しいHTMLを提案すめE
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from src.clients.llm_client import LLMClient
|
| 8 |
+
import json
|
| 9 |
+
import re
|
| 10 |
+
from typing import List
|
| 11 |
+
from pydantic import BaseModel, Field
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
import pytz
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
|
| 16 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 17 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 18 |
+
from src.clients.llm_client import LLMClient
|
| 19 |
+
import json as _json
|
| 20 |
+
|
| 21 |
+
client = LLMClient()
|
| 22 |
+
system_prompt = None
|
| 23 |
+
user_text = ""
|
| 24 |
+
images = []
|
| 25 |
+
for msg in messages:
|
| 26 |
+
role = msg.get("role", "")
|
| 27 |
+
c = msg.get("content", "")
|
| 28 |
+
if role == "system":
|
| 29 |
+
if isinstance(c, str):
|
| 30 |
+
system_prompt = c
|
| 31 |
+
elif role == "user":
|
| 32 |
+
if isinstance(c, str):
|
| 33 |
+
user_text = c
|
| 34 |
+
elif isinstance(c, list):
|
| 35 |
+
for part in c:
|
| 36 |
+
if isinstance(part, dict):
|
| 37 |
+
if part.get("type") == "text":
|
| 38 |
+
user_text += part.get("text", "")
|
| 39 |
+
elif part.get("type") == "image_url":
|
| 40 |
+
url = part.get("image_url", {}).get("url", "")
|
| 41 |
+
if url.startswith("data:"):
|
| 42 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 43 |
+
else:
|
| 44 |
+
images.append(url)
|
| 45 |
+
|
| 46 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 47 |
+
result = client.call(
|
| 48 |
+
prompt=user_text,
|
| 49 |
+
schema=response_format,
|
| 50 |
+
model=model,
|
| 51 |
+
system_prompt=system_prompt,
|
| 52 |
+
images=images if images else None,
|
| 53 |
+
temperature=0,
|
| 54 |
+
)
|
| 55 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 56 |
+
else:
|
| 57 |
+
return client.call_raw(
|
| 58 |
+
prompt=user_text,
|
| 59 |
+
model=model,
|
| 60 |
+
system_prompt=system_prompt,
|
| 61 |
+
images=images if images else None,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class HtmlVariant(BaseModel):
|
| 67 |
+
html : str
|
| 68 |
+
description: str
|
| 69 |
+
changes: list[str]
|
| 70 |
+
|
| 71 |
+
class HtmlVariantsResponse(BaseModel):
|
| 72 |
+
variants : list[HtmlVariant]
|
| 73 |
+
|
| 74 |
+
def get_openai_request(messages, format, openai_key):
|
| 75 |
+
"""OpenAI APIを呼び出ぁE""
|
| 76 |
+
client = LLMClient()
|
| 77 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 78 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 79 |
+
messages=messages,
|
| 80 |
+
top_p=1,
|
| 81 |
+
frequency_penalty=0,
|
| 82 |
+
presence_penalty=0,
|
| 83 |
+
response_format=format,
|
| 84 |
+
temperature=0.7 # バリエーションを�Eすため少し高めに設宁E
|
| 85 |
+
)
|
| 86 |
+
return response
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@customtracer
|
| 90 |
+
def html2variants(original_html: str, change_points: str, openai_key=os.environ.get('OPENAI_KEY')):
|
| 91 |
+
"""
|
| 92 |
+
input1 (text): <h1>title</h1>
|
| 93 |
+
input2 (text): タイトルに下線を表示
|
| 94 |
+
input3 (text): default
|
| 95 |
+
output1 (json): html
|
| 96 |
+
"""
|
| 97 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 98 |
+
|
| 99 |
+
if openai_key == "default" or not openai_key:
|
| 100 |
+
openai_key = os.environ.get('OPENAI_KEY', '')
|
| 101 |
+
|
| 102 |
+
if openai_key:
|
| 103 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 104 |
+
|
| 105 |
+
# HTMLの構造を簡潔に要紁E��長すぎる場合�E要紁E��E
|
| 106 |
+
html_summary = original_html[:5000] # 最初�E5000斁E��を使用
|
| 107 |
+
if len(original_html) > 5000:
|
| 108 |
+
html_summary += "\n\n[以下省略...]"
|
| 109 |
+
|
| 110 |
+
# プロンプトを構篁E
|
| 111 |
+
prompt = f"""以下�E允E�EHTMLと変更点の説明を基に、指定数のHTMLバリアントを生�Eしてください、E
|
| 112 |
+
|
| 113 |
+
【�EのHTML、E
|
| 114 |
+
{html_summary}
|
| 115 |
+
|
| 116 |
+
【変更点の説明、E
|
| 117 |
+
{change_points}
|
| 118 |
+
|
| 119 |
+
【要件、E
|
| 120 |
+
1. 允E�EHTMLの構造とスタイルを可能な限り維持すめE
|
| 121 |
+
2. 変更点の説明に基づぁE��、指定数のHTMLバリアントを生�Eする
|
| 122 |
+
3. バリアント�E完�EなHTMLドキュメントとして返す�E�E!DOCTYPE html>から</html>まで�E�E
|
| 123 |
+
4. 画像パス、CSSパス、JavaScriptパスなどは允E�EHTMLからそ�Eまま維持すめE
|
| 124 |
+
5. 変更するのは主にチE��ストコンチE��チE��、その周辺のチE��イン�E�フォントサイズ、色、レイアウト、余白、スタイルなど�E�でぁE
|
| 125 |
+
6. 変更点周辺のチE��インを変更することで、より効果的な表現を実現してください
|
| 126 |
+
7. ただし、変更点以外�E部刁E��ロゴ、ナビゲーション、フチE��ー、その他�Eセクション�E��EチE��インは維持してください
|
| 127 |
+
8. 允E�EHTMLのすべての要素�E�ESS、画像、JavaScript、メタタグなど�E�を完�Eに保持する
|
| 128 |
+
|
| 129 |
+
【�E力形式、E
|
| 130 |
+
- variants: 持E��数のHtmlVariantオブジェクト�EリスチE
|
| 131 |
+
- HtmlVariantには以下を含める:
|
| 132 |
+
- html: 完�EなHTMLドキュメンチE
|
| 133 |
+
- description: こ�Eバリアント�E説明(変更点の要紁E��E0斁E��程度�E�E
|
| 134 |
+
- changes: 具体的な変更点のリスト(各頁E��は30斁E��程度�E�E
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
messages = [
|
| 138 |
+
{
|
| 139 |
+
"role": "system",
|
| 140 |
+
"content": """あなた�EHTMLの専門家です。�EのHTMLと変更点の説明を基に、指定数のHTMLバリアントを生�Eしてください、E
|
| 141 |
+
バリアント�E完�EなHTMLドキュメントとして返し、�EのHTMLの構造とスタイルを可能な限り維持しながら、変更点を反映してください、E
|
| 142 |
+
変更点周辺のチE��イン�E�フォントサイズ、色、レイアウト、余白、スタイルなど�E�も変更可能です、E"",
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"role": "user",
|
| 146 |
+
"content": prompt
|
| 147 |
+
},
|
| 148 |
+
]
|
| 149 |
+
|
| 150 |
+
try:
|
| 151 |
+
result = get_openai_request(messages, HtmlVariantsResponse, openai_key)
|
| 152 |
+
return result
|
| 153 |
+
except Exception as e:
|
| 154 |
+
print(f"[html2variants] エラー: {e}")
|
| 155 |
+
import traceback
|
| 156 |
+
print(traceback.format_exc())
|
| 157 |
+
# エラー時�E空のレスポンスを返す
|
| 158 |
+
return HtmlVariantsResponse(variants=[])
|
| 159 |
+
|
apis/image2color.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from PIL import Image
|
| 2 |
+
import numpy as np
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
def quantize_color(color, levels=8):
|
| 6 |
+
"""色を量子化する関数。各色成�Eを指定されたレベルに近似します、E""
|
| 7 |
+
return (color // levels) * levels
|
| 8 |
+
|
| 9 |
+
def image2color(image, openai_key=os.environ.get('OPENAI_KEY')):
|
| 10 |
+
"""
|
| 11 |
+
input1 (image):
|
| 12 |
+
input2 (text):
|
| 13 |
+
output1 (json): サンプル色
|
| 14 |
+
"""
|
| 15 |
+
grid_unit=8
|
| 16 |
+
quant_levels=16
|
| 17 |
+
grid_size=(grid_unit, grid_unit)
|
| 18 |
+
# 画像をRGB形式に変換し、numpy配�Eに変換
|
| 19 |
+
image = image.convert('RGB')
|
| 20 |
+
data = np.array(image)
|
| 21 |
+
|
| 22 |
+
rows, cols, _ = data.shape
|
| 23 |
+
row_height = rows // grid_size[0]
|
| 24 |
+
col_width = cols // grid_size[1]
|
| 25 |
+
|
| 26 |
+
all_colors = []
|
| 27 |
+
|
| 28 |
+
# 吁E��チE��ュを�E琁E
|
| 29 |
+
for i in range(grid_size[0]):
|
| 30 |
+
for j in range(grid_size[1]):
|
| 31 |
+
# メチE��ュを抽出
|
| 32 |
+
row_start = i * row_height
|
| 33 |
+
row_end = (i + 1) * row_height if i < grid_size[0] - 1 else rows
|
| 34 |
+
col_start = j * col_width
|
| 35 |
+
col_end = (j + 1) * col_width if j < grid_size[1] - 1 else cols
|
| 36 |
+
|
| 37 |
+
mesh = data[row_start:row_end, col_start:col_end]
|
| 38 |
+
# 色の量子化を適用
|
| 39 |
+
quantized_mesh = quantize_color(mesh, levels=quant_levels)
|
| 40 |
+
# 色の出現回数をカウンチE
|
| 41 |
+
mesh_colors, counts = np.unique(quantized_mesh.reshape(-1, 3), axis=0, return_counts=True)
|
| 42 |
+
all_colors.append((mesh_colors, counts))
|
| 43 |
+
|
| 44 |
+
# 色チE�Eタを統吁E
|
| 45 |
+
color_dict = {}
|
| 46 |
+
for colors, counts in all_colors:
|
| 47 |
+
for color, count in zip(colors, counts):
|
| 48 |
+
hex_color = f"#{color[0]:02X}{color[1]:02X}{color[2]:02X}"
|
| 49 |
+
if hex_color in color_dict:
|
| 50 |
+
color_dict[hex_color] += count
|
| 51 |
+
else:
|
| 52 |
+
color_dict[hex_color] = count
|
| 53 |
+
|
| 54 |
+
# ソートしてトップN色を取征E
|
| 55 |
+
sorted_colors = sorted(color_dict.items(), key=lambda x: x[1], reverse=True)
|
| 56 |
+
total_pixels = sum(color_dict.values())
|
| 57 |
+
top_colors = sorted_colors[:5]
|
| 58 |
+
|
| 59 |
+
# 最終的なカラーチE�Eタを作�E
|
| 60 |
+
final_colors = {}
|
| 61 |
+
color_names = ["base_color", "main_color", "accent_color1", "accent_color2", "accent_color3"]
|
| 62 |
+
for i, (hex_color, count) in enumerate(top_colors):
|
| 63 |
+
name = color_names[i] if i < len(color_names) else f"additional_color_{i-len(color_names)+1}"
|
| 64 |
+
ratio = (count / total_pixels) * 100
|
| 65 |
+
final_colors[name] = {'code': hex_color, 'ratio': f"{ratio:.2f}%"}
|
| 66 |
+
|
| 67 |
+
return final_colors
|
apis/image2inpaint.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
image2inpaint: 画像の一部を変更(Inpainting)。
|
| 3 |
+
HF Inference API の image-to-image (inpainting) タスクを使用。
|
| 4 |
+
|
| 5 |
+
NOTE: FLUX.1-dev はオープンなインペインティングAPIを提供していない。
|
| 6 |
+
runwayml/stable-diffusion-inpainting を使用。
|
| 7 |
+
画像・マスクは base64 PNG として受け取る。
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import base64
|
| 11 |
+
import os
|
| 12 |
+
from io import BytesIO
|
| 13 |
+
from typing import Optional
|
| 14 |
+
|
| 15 |
+
from PIL import Image
|
| 16 |
+
|
| 17 |
+
from src.utils.tracer import customtracer
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@customtracer
|
| 21 |
+
def image2inpaint(
|
| 22 |
+
base64image: str,
|
| 23 |
+
base64mask: str,
|
| 24 |
+
p: str,
|
| 25 |
+
gcp_key: str = "default",
|
| 26 |
+
model_name: Optional[str] = None,
|
| 27 |
+
) -> str:
|
| 28 |
+
"""
|
| 29 |
+
input1 (text): base64エンコードされた元画像
|
| 30 |
+
input2 (text): base64エンコードされたマスク画像(変更したい領域を白、保持したい領域を黒)
|
| 31 |
+
input3 (text): 変更内容のプロンプト
|
| 32 |
+
input4 (text): default
|
| 33 |
+
output1 (text): 生成した画像のbase64文字列
|
| 34 |
+
|
| 35 |
+
NOTE: HF版ではgcp_keyは使用しない。HF_TOKENを使用。
|
| 36 |
+
"""
|
| 37 |
+
from huggingface_hub import InferenceClient
|
| 38 |
+
|
| 39 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 40 |
+
if not hf_token:
|
| 41 |
+
raise ValueError("HF_TOKEN is required for image2inpaint.")
|
| 42 |
+
|
| 43 |
+
model = model_name if model_name and "gemini" not in model_name else "runwayml/stable-diffusion-inpainting"
|
| 44 |
+
|
| 45 |
+
def _decode_b64_image(b64: str) -> Image.Image:
|
| 46 |
+
if "," in b64:
|
| 47 |
+
b64 = b64.split(",", 1)[1]
|
| 48 |
+
data = base64.b64decode(b64)
|
| 49 |
+
return Image.open(BytesIO(data)).convert("RGB")
|
| 50 |
+
|
| 51 |
+
image = _decode_b64_image(base64image)
|
| 52 |
+
mask = _decode_b64_image(base64mask)
|
| 53 |
+
|
| 54 |
+
if image.size != mask.size:
|
| 55 |
+
mask = mask.resize(image.size)
|
| 56 |
+
|
| 57 |
+
client = InferenceClient(token=hf_token)
|
| 58 |
+
result = client.image_to_image(
|
| 59 |
+
image=image,
|
| 60 |
+
prompt=p,
|
| 61 |
+
model=model,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
buf = BytesIO()
|
| 65 |
+
result.save(buf, format="PNG")
|
| 66 |
+
return base64.b64encode(buf.getvalue()).decode("utf-8")
|
apis/image2inpaint3.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
image2inpaint3: 画像のインペインティング(バリアント)。
|
| 3 |
+
HF Inference API の image-to-image を使用。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import base64
|
| 7 |
+
import os
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from PIL import Image
|
| 12 |
+
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@customtracer
|
| 17 |
+
def image2inpaint3(
|
| 18 |
+
base64image: str,
|
| 19 |
+
base64mask: str,
|
| 20 |
+
p: str,
|
| 21 |
+
gcp_key: str = "default",
|
| 22 |
+
model_name: Optional[str] = None,
|
| 23 |
+
) -> str:
|
| 24 |
+
"""
|
| 25 |
+
input1 (text): base64エンコードされた元画像
|
| 26 |
+
input2 (text): base64エンコードされたマスク画像
|
| 27 |
+
input3 (text): 変更内容のプロンプト
|
| 28 |
+
input4 (text): default
|
| 29 |
+
output1 (text): 生成した画像のbase64文字列
|
| 30 |
+
|
| 31 |
+
NOTE: HF版ではgcp_keyは使用しない。HF_TOKENを使用。
|
| 32 |
+
"""
|
| 33 |
+
from huggingface_hub import InferenceClient
|
| 34 |
+
|
| 35 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 36 |
+
if not hf_token:
|
| 37 |
+
raise ValueError("HF_TOKEN is required for image2inpaint3.")
|
| 38 |
+
|
| 39 |
+
model = model_name if model_name and "gemini" not in model_name else "runwayml/stable-diffusion-inpainting"
|
| 40 |
+
|
| 41 |
+
def _decode_b64_image(b64: str) -> Image.Image:
|
| 42 |
+
if "," in b64:
|
| 43 |
+
b64 = b64.split(",", 1)[1]
|
| 44 |
+
data = base64.b64decode(b64)
|
| 45 |
+
return Image.open(BytesIO(data)).convert("RGB")
|
| 46 |
+
|
| 47 |
+
image = _decode_b64_image(base64image)
|
| 48 |
+
mask = _decode_b64_image(base64mask)
|
| 49 |
+
|
| 50 |
+
if image.size != mask.size:
|
| 51 |
+
mask = mask.resize(image.size)
|
| 52 |
+
|
| 53 |
+
client = InferenceClient(token=hf_token)
|
| 54 |
+
result = client.image_to_image(
|
| 55 |
+
image=image,
|
| 56 |
+
prompt=p,
|
| 57 |
+
model=model,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
buf = BytesIO()
|
| 61 |
+
result.save(buf, format="PNG")
|
| 62 |
+
return base64.b64encode(buf.getvalue()).decode("utf-8")
|
apis/image2text.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
image2text: PIL Image を OCR してテキストを返す。
|
| 3 |
+
HF版: VLM (Qwen2.5-VL) を使用。Google Vision API は使用しない。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import base64
|
| 7 |
+
import json
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
|
| 10 |
+
from src.utils.tracer import customtracer
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def _vlm_ocr_from_pil(image, model: str = "Qwen/Qwen2.5-VL-7B-Instruct") -> str:
|
| 14 |
+
"""PIL Image → base64 → VLM OCR。"""
|
| 15 |
+
from src.clients.llm_client import LLMClient
|
| 16 |
+
from pydantic import BaseModel
|
| 17 |
+
from typing import List
|
| 18 |
+
|
| 19 |
+
buf = BytesIO()
|
| 20 |
+
image.save(buf, format="JPEG")
|
| 21 |
+
b64 = base64.b64encode(buf.getvalue()).decode("utf-8")
|
| 22 |
+
|
| 23 |
+
class OcrEntry(BaseModel):
|
| 24 |
+
text: str
|
| 25 |
+
y: int
|
| 26 |
+
size: int
|
| 27 |
+
|
| 28 |
+
class OcrResult(BaseModel):
|
| 29 |
+
items: List[OcrEntry]
|
| 30 |
+
|
| 31 |
+
client = LLMClient()
|
| 32 |
+
result = client.call(
|
| 33 |
+
prompt=(
|
| 34 |
+
"Extract all visible text from this image. "
|
| 35 |
+
"For each text block, estimate its vertical position (y, 0=top) "
|
| 36 |
+
"and approximate font size in pixels. Sort by y."
|
| 37 |
+
),
|
| 38 |
+
schema=OcrResult,
|
| 39 |
+
model=model,
|
| 40 |
+
images=[b64],
|
| 41 |
+
temperature=0,
|
| 42 |
+
)
|
| 43 |
+
return json.dumps(
|
| 44 |
+
[{"text": e.text, "y": e.y, "size": e.size} for e in result.items],
|
| 45 |
+
ensure_ascii=False,
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@customtracer
|
| 50 |
+
def image2text(image) -> str:
|
| 51 |
+
"""
|
| 52 |
+
input1 (image): PIL Image
|
| 53 |
+
output1 (json): OCR結果
|
| 54 |
+
|
| 55 |
+
NOTE: HF版は VLM ベースOCR。Google Vision API は使用しない。
|
| 56 |
+
"""
|
| 57 |
+
return _vlm_ocr_from_pil(image)
|
apis/image2types.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import base64
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from PIL import Image
|
| 7 |
+
import re
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from enum import Enum
|
| 10 |
+
from functools import cache
|
| 11 |
+
from src.utils.tracer import customtracer
|
| 12 |
+
|
| 13 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 14 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 15 |
+
from src.clients.llm_client import LLMClient
|
| 16 |
+
import json as _json
|
| 17 |
+
|
| 18 |
+
client = LLMClient()
|
| 19 |
+
system_prompt = None
|
| 20 |
+
user_text = ""
|
| 21 |
+
images = []
|
| 22 |
+
for msg in messages:
|
| 23 |
+
role = msg.get("role", "")
|
| 24 |
+
c = msg.get("content", "")
|
| 25 |
+
if role == "system":
|
| 26 |
+
if isinstance(c, str):
|
| 27 |
+
system_prompt = c
|
| 28 |
+
elif role == "user":
|
| 29 |
+
if isinstance(c, str):
|
| 30 |
+
user_text = c
|
| 31 |
+
elif isinstance(c, list):
|
| 32 |
+
for part in c:
|
| 33 |
+
if isinstance(part, dict):
|
| 34 |
+
if part.get("type") == "text":
|
| 35 |
+
user_text += part.get("text", "")
|
| 36 |
+
elif part.get("type") == "image_url":
|
| 37 |
+
url = part.get("image_url", {}).get("url", "")
|
| 38 |
+
if url.startswith("data:"):
|
| 39 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 40 |
+
else:
|
| 41 |
+
images.append(url)
|
| 42 |
+
|
| 43 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 44 |
+
result = client.call(
|
| 45 |
+
prompt=user_text,
|
| 46 |
+
schema=response_format,
|
| 47 |
+
model=model,
|
| 48 |
+
system_prompt=system_prompt,
|
| 49 |
+
images=images if images else None,
|
| 50 |
+
temperature=0,
|
| 51 |
+
)
|
| 52 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 53 |
+
else:
|
| 54 |
+
return client.call_raw(
|
| 55 |
+
prompt=user_text,
|
| 56 |
+
model=model,
|
| 57 |
+
system_prompt=system_prompt,
|
| 58 |
+
images=images if images else None,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
client = LLMClient()
|
| 63 |
+
|
| 64 |
+
class IntentInfo(BaseModel):
|
| 65 |
+
タイチE str
|
| 66 |
+
詳細カチE��リ: str
|
| 67 |
+
評価区刁E str
|
| 68 |
+
評価箁E��: str
|
| 69 |
+
評価琁E��: str
|
| 70 |
+
|
| 71 |
+
class IntentInfos(BaseModel):
|
| 72 |
+
types: list[IntentInfo]
|
| 73 |
+
|
| 74 |
+
def ask_raw(messages, model):
|
| 75 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 76 |
+
model=model,
|
| 77 |
+
messages=messages,
|
| 78 |
+
top_p=1,
|
| 79 |
+
frequency_penalty=0,
|
| 80 |
+
presence_penalty=0,
|
| 81 |
+
response_format=IntentInfos,
|
| 82 |
+
temperature=0
|
| 83 |
+
)
|
| 84 |
+
return response
|
| 85 |
+
|
| 86 |
+
@customtracer
|
| 87 |
+
def image2types(image, p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 88 |
+
"""
|
| 89 |
+
input1 (image):
|
| 90 |
+
input2 (text): #前提\n・Google検索品質評価ガイドラインを踏まえる\n・添付した画像�E検索上位に表示されてぁE��\n・画像�E検索クエリは「uq wimax 解紁E��\n・画像�Eに記載されてぁE��惁E��のみ利用し、記載されてぁE��ぁE��報は利用しない\n\n#質問\n・画像�Eの惁E��から検索クエリをgo,buy,know,doに刁E��\n・画像�Eの惁E��から刁E���E根拠となった部刁E��箁E��書きで記載\n・画像�Eの惁E��からユーザーが何を求めて検索したかを150斁E��程度で記載\n・エラーペ�Eジの場合�E、「なし」と記載\n・上記�E画像�Eに記載がある冁E��か改めてチェチE��\n\n#出力形式\n検索クエリの刁E��:\n刁E���E根拠となる情報�E�\nユーザーが求める情報�E�\n
|
| 91 |
+
input4 (text): default
|
| 92 |
+
output1 (json): 検索意図
|
| 93 |
+
"""
|
| 94 |
+
if openai_key == "default":
|
| 95 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 96 |
+
else:
|
| 97 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 98 |
+
messages = [
|
| 99 |
+
{
|
| 100 |
+
"role": "system",
|
| 101 |
+
"content": "あなた�E優れたWEBマ�Eケターで、各種マ�EケチE��ングフレームワークを前提として、E��客忁E��に基づぁE��刁E��ができます、E
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"role": "user",
|
| 105 |
+
"content":[
|
| 106 |
+
{"type": "text", "text":p}
|
| 107 |
+
]
|
| 108 |
+
},
|
| 109 |
+
]
|
| 110 |
+
buffered = BytesIO()
|
| 111 |
+
image.save(buffered, format="PNG")
|
| 112 |
+
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
|
| 113 |
+
messages[1]["content"].insert(0, {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{img_str}"}})
|
| 114 |
+
return ask_raw(messages, "meta-llama/Llama-3.3-70B-Instruct")
|
apis/images2inpaint.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
images2inpaint: 複数画像のインペインティング。
|
| 3 |
+
HF Inference API の image-to-image を使用。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import base64
|
| 7 |
+
import os
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from typing import List, Optional
|
| 10 |
+
|
| 11 |
+
from PIL import Image
|
| 12 |
+
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@customtracer
|
| 17 |
+
def images2inpaint(
|
| 18 |
+
base64images: List[str],
|
| 19 |
+
base64mask: str,
|
| 20 |
+
p: str,
|
| 21 |
+
gcp_key: str = "default",
|
| 22 |
+
model_name: Optional[str] = None,
|
| 23 |
+
) -> List[str]:
|
| 24 |
+
"""
|
| 25 |
+
input1 (text): base64エンコードされた元画像リスト(JSON配列)
|
| 26 |
+
input2 (text): base64エンコードされたマスク画像
|
| 27 |
+
input3 (text): 変更内容のプロンプト
|
| 28 |
+
input4 (text): default
|
| 29 |
+
output1 (json): 生成した画像のbase64文字列リスト
|
| 30 |
+
|
| 31 |
+
NOTE: HF版ではgcp_keyは使用しない。HF_TOKENを使用。
|
| 32 |
+
"""
|
| 33 |
+
import json
|
| 34 |
+
from huggingface_hub import InferenceClient
|
| 35 |
+
|
| 36 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 37 |
+
if not hf_token:
|
| 38 |
+
raise ValueError("HF_TOKEN is required for images2inpaint.")
|
| 39 |
+
|
| 40 |
+
model = model_name if model_name and "gemini" not in model_name else "runwayml/stable-diffusion-inpainting"
|
| 41 |
+
|
| 42 |
+
# Accept JSON string or list
|
| 43 |
+
if isinstance(base64images, str):
|
| 44 |
+
base64images = json.loads(base64images)
|
| 45 |
+
|
| 46 |
+
def _decode_b64(b64: str) -> Image.Image:
|
| 47 |
+
if "," in b64:
|
| 48 |
+
b64 = b64.split(",", 1)[1]
|
| 49 |
+
return Image.open(BytesIO(base64.b64decode(b64))).convert("RGB")
|
| 50 |
+
|
| 51 |
+
mask = _decode_b64(base64mask)
|
| 52 |
+
|
| 53 |
+
client = InferenceClient(token=hf_token)
|
| 54 |
+
results = []
|
| 55 |
+
for b64 in base64images:
|
| 56 |
+
img = _decode_b64(b64)
|
| 57 |
+
if img.size != mask.size:
|
| 58 |
+
m = mask.resize(img.size)
|
| 59 |
+
else:
|
| 60 |
+
m = mask
|
| 61 |
+
out = client.image_to_image(image=img, prompt=p, model=model)
|
| 62 |
+
buf = BytesIO()
|
| 63 |
+
out.save(buf, format="PNG")
|
| 64 |
+
results.append(base64.b64encode(buf.getvalue()).decode("utf-8"))
|
| 65 |
+
return results
|
apis/info2img64.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from PIL import Image, ImageDraw, ImageFont
|
| 2 |
+
import textwrap
|
| 3 |
+
import io
|
| 4 |
+
import base64
|
| 5 |
+
import os
|
| 6 |
+
import json
|
| 7 |
+
|
| 8 |
+
def draw_wrapped_text(draw, text, x, y, font, max_width, line_spacing=4, align='left'):
|
| 9 |
+
lines = []
|
| 10 |
+
for paragraph in text.split("\n"):
|
| 11 |
+
current_line = ""
|
| 12 |
+
for char in paragraph:
|
| 13 |
+
trial_line = current_line + char
|
| 14 |
+
bbox = draw.textbbox((0, 0), trial_line, font=font)
|
| 15 |
+
if bbox[2] - bbox[0] > max_width:
|
| 16 |
+
lines.append(current_line)
|
| 17 |
+
current_line = char
|
| 18 |
+
else:
|
| 19 |
+
current_line = trial_line
|
| 20 |
+
if current_line:
|
| 21 |
+
lines.append(current_line)
|
| 22 |
+
|
| 23 |
+
for line in lines:
|
| 24 |
+
bbox = draw.textbbox((0, 0), line, font=font)
|
| 25 |
+
line_width = bbox[2] - bbox[0]
|
| 26 |
+
text_height = bbox[3] - bbox[1]
|
| 27 |
+
|
| 28 |
+
if align == 'center':
|
| 29 |
+
draw_x = x + (max_width - line_width) // 2
|
| 30 |
+
else:
|
| 31 |
+
draw_x = x
|
| 32 |
+
|
| 33 |
+
draw.text((draw_x, y), line, font=font, fill=(0, 0, 0))
|
| 34 |
+
y += text_height + line_spacing
|
| 35 |
+
|
| 36 |
+
return y
|
| 37 |
+
|
| 38 |
+
def draw_component(draw, comp, margin, width, sub_font, small_font, y):
|
| 39 |
+
y += 60
|
| 40 |
+
box_top = y
|
| 41 |
+
# 中チE��スチE
|
| 42 |
+
y = draw_wrapped_text(draw, comp.get("component_middle", ""), margin + 10, y, sub_font, width - 2 * (margin + 10))
|
| 43 |
+
# 小テキスチE
|
| 44 |
+
if comp.get("UIelement") == "チE��スチE:
|
| 45 |
+
for item in comp.get("component_small", []):
|
| 46 |
+
y = draw_wrapped_text(draw, f"・ {item}", margin + 20, y, small_font, width - 2 * (margin + 20))
|
| 47 |
+
box_bottom = y + 10
|
| 48 |
+
# 外枠
|
| 49 |
+
draw.rectangle([margin, box_top, width - margin, box_bottom], outline="black", width=2)
|
| 50 |
+
# ラベル: 左寁E���E�E��孁E
|
| 51 |
+
if comp.get("component_large"):
|
| 52 |
+
txt = comp["component_large"]
|
| 53 |
+
bbox = draw.textbbox((0, 0), txt, font=small_font)
|
| 54 |
+
w, h = bbox[2] - bbox[0], bbox[3] - bbox[1]
|
| 55 |
+
pad = 12
|
| 56 |
+
# 左側に配置
|
| 57 |
+
x0 = margin
|
| 58 |
+
x1 = margin + w + pad
|
| 59 |
+
y0 = box_top - h - pad
|
| 60 |
+
# ラベル背景
|
| 61 |
+
draw.rectangle([x0, y0, x1, box_top], fill="black")
|
| 62 |
+
# 太字効极E stroke_widthを使用
|
| 63 |
+
draw.text((x0 + 3, y0 + 3), txt, font=small_font, fill="white", stroke_width=1, stroke_fill="white")
|
| 64 |
+
return box_bottom
|
| 65 |
+
|
| 66 |
+
# レイアウト描画と高さ計算を共通化
|
| 67 |
+
def layout_and_draw(draw, fvinfo, cninfo, width, margin, fonts):
|
| 68 |
+
y = margin
|
| 69 |
+
# メイン�E�サブコピ�E
|
| 70 |
+
for section, font in [("main_copy", fonts['main_bold']), ("sub_copy", fonts['sub_font'])]:
|
| 71 |
+
for line in fvinfo["キャチE��コピ�E"].get(section, []):
|
| 72 |
+
y = draw_wrapped_text(draw, line["text"], margin, y + (10 if section == "sub_copy" else 0), font, width - 2 * margin)
|
| 73 |
+
# ビジュアル
|
| 74 |
+
for vis in fvinfo.get("ビジュアル", []):
|
| 75 |
+
# 上部マ�Eジン
|
| 76 |
+
y += 20
|
| 77 |
+
# プレースホルダー枠の高さ
|
| 78 |
+
placeholder_h = 100
|
| 79 |
+
# 枠描画
|
| 80 |
+
draw.rectangle([margin, y, width - margin, y + placeholder_h], outline="gray", width=2)
|
| 81 |
+
# プレースホルダーチE��スチE
|
| 82 |
+
text = f"[画僁E {vis}]"
|
| 83 |
+
# チE��ストを中央寁E��
|
| 84 |
+
text_w, text_h = draw.textbbox((0,0), text, font=fonts['sub_font'])[2:]
|
| 85 |
+
tx = margin + (width - 2*margin - text_w) / 2
|
| 86 |
+
ty = y + (placeholder_h - text_h) / 2
|
| 87 |
+
draw.text((tx, ty), text, font=fonts['sub_font'], fill="gray")
|
| 88 |
+
y += placeholder_h
|
| 89 |
+
# CTA
|
| 90 |
+
for cta in fvinfo.get("CTA", []):
|
| 91 |
+
y += 20
|
| 92 |
+
draw.rounded_rectangle([margin, y, width - margin, y + 40], radius=10, fill=(255, 100, 100))
|
| 93 |
+
y = draw_wrapped_text(draw, cta, margin, y + 8, fonts['sub_font'], width - 2 * margin, align='center')
|
| 94 |
+
y += 10
|
| 95 |
+
# コンポ�EネンチE
|
| 96 |
+
for comp in cninfo.get("components", []):
|
| 97 |
+
y = draw_component(draw, comp, margin, width, fonts['sub_font'], fonts['small_font'], y)
|
| 98 |
+
return y
|
| 99 |
+
|
| 100 |
+
def info2img64(fvinfo_json_raw, cninfo_json_raw):
|
| 101 |
+
"""
|
| 102 |
+
input1 (text):
|
| 103 |
+
input2 (text):
|
| 104 |
+
output1 (text): よりコンバ�Eジョンが高まるWEBペ�Eジを作るための観点を比輁E��ます、E
|
| 105 |
+
"""
|
| 106 |
+
fvinfo = json.loads(fvinfo_json_raw)
|
| 107 |
+
cninfo = json.loads(cninfo_json_raw)
|
| 108 |
+
# フォント設宁E
|
| 109 |
+
base = os.path.dirname(__file__)
|
| 110 |
+
path = os.path.join(base, "../NotoSansJP-VariableFont_wght.ttf")
|
| 111 |
+
fonts = {
|
| 112 |
+
'main_bold': ImageFont.truetype(path, 24),
|
| 113 |
+
'sub_font': ImageFont.truetype(path, 14),
|
| 114 |
+
'small_font': ImageFont.truetype(path, 12)
|
| 115 |
+
}
|
| 116 |
+
width, margin = 600, 20
|
| 117 |
+
# 仮描画�E�高さ計算�Eみ
|
| 118 |
+
tmp = Image.new("RGB", (width, 10000), "white")
|
| 119 |
+
d_tmp = ImageDraw.Draw(tmp)
|
| 120 |
+
y_end = layout_and_draw(d_tmp, fvinfo, cninfo, width, margin, fonts)
|
| 121 |
+
final_h = y_end + 20
|
| 122 |
+
# 本描画�E�新規キャンバスに描画
|
| 123 |
+
img = Image.new("RGB", (width, final_h), "white")
|
| 124 |
+
draw = ImageDraw.Draw(img)
|
| 125 |
+
layout_and_draw(draw, fvinfo, cninfo, width, margin, fonts)
|
| 126 |
+
# 返却
|
| 127 |
+
buf = io.BytesIO()
|
| 128 |
+
img.save(buf, format="PNG")
|
| 129 |
+
return base64.b64encode(buf.getvalue()).decode()
|
apis/keyword2urls.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
from outscraper import ApiClient
|
| 5 |
+
from urllib.parse import urlparse, parse_qs
|
| 6 |
+
from src.utils.tracer import customtracer
|
| 7 |
+
|
| 8 |
+
def extract_u_value_if_translate(url):
|
| 9 |
+
# URLをパース
|
| 10 |
+
parsed_url = urlparse(url)
|
| 11 |
+
|
| 12 |
+
# ホストが 'translate.google.com' かどぁE��を確誁E
|
| 13 |
+
if parsed_url.netloc == 'translate.google.com':
|
| 14 |
+
# クエリパラメータを辞書形式で取征E
|
| 15 |
+
query_params = parse_qs(parsed_url.query)
|
| 16 |
+
|
| 17 |
+
# 'u'の値を取得し、リスト�E最初�E要素を返す
|
| 18 |
+
return query_params.get('u', [None])[0]
|
| 19 |
+
else:
|
| 20 |
+
return url
|
| 21 |
+
|
| 22 |
+
@customtracer
|
| 23 |
+
def keyword2urls(query):
|
| 24 |
+
"""
|
| 25 |
+
input1 (text): レム睡眠 薬
|
| 26 |
+
output1 (json): 検索結果
|
| 27 |
+
"""
|
| 28 |
+
# 結果を格納するリスチE
|
| 29 |
+
#results = []
|
| 30 |
+
#
|
| 31 |
+
# Google Custom Search JSON APIを使用して結果を取得(上佁E0件�E�E
|
| 32 |
+
#for start_index in range(1, 51, 10): # 1から始まり、E0まで10ごとに�E�EPIの仕様により一度に最大10件の結果しか取得できなぁE��めE��E
|
| 33 |
+
# url = f"https://www.googleapis.com/customsearch/v1?key={os.environ.get('CSE_KEY')}&cx={os.environ.get('CSE_ID')}&q={query}&start={start_index}&lr=lang_ja"
|
| 34 |
+
# response = requests.get(url)
|
| 35 |
+
# search_results = response.json()
|
| 36 |
+
#
|
| 37 |
+
# # 吁E��イチE��に対してURL、タイトル、説明を抽出
|
| 38 |
+
# for item in search_results.get("items", []):
|
| 39 |
+
# results.append(item)
|
| 40 |
+
|
| 41 |
+
api_client = ApiClient(api_key=os.environ.get('OUTSCRAPER_KEY'))
|
| 42 |
+
results = api_client.google_search([query], language="ja",region="JA", pages_per_query=1)
|
| 43 |
+
|
| 44 |
+
#translate.google.comを置揁E
|
| 45 |
+
new_results = []
|
| 46 |
+
for result in results[0]["organic_results"]:
|
| 47 |
+
result["link"] = extract_u_value_if_translate(result["link"])
|
| 48 |
+
new_results.append(result)
|
| 49 |
+
|
| 50 |
+
return new_results
|
apis/modifyButton.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, ValidationError
|
| 6 |
+
|
| 7 |
+
from functools import cache
|
| 8 |
+
from typing import List
|
| 9 |
+
from datetime import datetime
|
| 10 |
+
import pytz
|
| 11 |
+
|
| 12 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 13 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 14 |
+
from src.clients.llm_client import LLMClient
|
| 15 |
+
import json as _json
|
| 16 |
+
|
| 17 |
+
client = LLMClient()
|
| 18 |
+
system_prompt = None
|
| 19 |
+
user_text = ""
|
| 20 |
+
images = []
|
| 21 |
+
for msg in messages:
|
| 22 |
+
role = msg.get("role", "")
|
| 23 |
+
c = msg.get("content", "")
|
| 24 |
+
if role == "system":
|
| 25 |
+
if isinstance(c, str):
|
| 26 |
+
system_prompt = c
|
| 27 |
+
elif role == "user":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
user_text = c
|
| 30 |
+
elif isinstance(c, list):
|
| 31 |
+
for part in c:
|
| 32 |
+
if isinstance(part, dict):
|
| 33 |
+
if part.get("type") == "text":
|
| 34 |
+
user_text += part.get("text", "")
|
| 35 |
+
elif part.get("type") == "image_url":
|
| 36 |
+
url = part.get("image_url", {}).get("url", "")
|
| 37 |
+
if url.startswith("data:"):
|
| 38 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 39 |
+
else:
|
| 40 |
+
images.append(url)
|
| 41 |
+
|
| 42 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 43 |
+
result = client.call(
|
| 44 |
+
prompt=user_text,
|
| 45 |
+
schema=response_format,
|
| 46 |
+
model=model,
|
| 47 |
+
system_prompt=system_prompt,
|
| 48 |
+
images=images if images else None,
|
| 49 |
+
temperature=0,
|
| 50 |
+
)
|
| 51 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 52 |
+
else:
|
| 53 |
+
return client.call_raw(
|
| 54 |
+
prompt=user_text,
|
| 55 |
+
model=model,
|
| 56 |
+
system_prompt=system_prompt,
|
| 57 |
+
images=images if images else None,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class newButton(BaseModel):
|
| 62 |
+
subCopy : str
|
| 63 |
+
HTML: str
|
| 64 |
+
|
| 65 |
+
class newButtons(BaseModel):
|
| 66 |
+
Buttons: list[newButton]
|
| 67 |
+
|
| 68 |
+
@cache
|
| 69 |
+
def modifyButton(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 70 |
+
"""
|
| 71 |
+
input0 (text): ボタンの斁E��を通信業界に変えて。色は若老E��け�EぁE��グラチE�Eションで、サブコピ�Eもバイブスあがる感じに。影をつけて、�Eタンは丸くして. <button class="c-button" style="appearance:none;border:0;border-radius:5px;background:#4676D7;color:#fff;padding:8px 16px;font-size:16px;">申し込む</button> <button class="c-button" style="appearance:none;border:0;border-radius:5px;background:#4676D7;color:#fff;padding:8px 16px;font-size:16px;">申し込まなぁE/button>
|
| 72 |
+
input1 (text): default
|
| 73 |
+
output1 (json): 修正したボタン
|
| 74 |
+
"""
|
| 75 |
+
start_time = datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S")
|
| 76 |
+
print(start_time, f"modifyButton[{len(p)}]")
|
| 77 |
+
|
| 78 |
+
if openai_key == "default":
|
| 79 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 80 |
+
else:
|
| 81 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 82 |
+
|
| 83 |
+
client = LLMClient()
|
| 84 |
+
|
| 85 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 86 |
+
model='meta-llama/Llama-3.3-70B-Instruct',
|
| 87 |
+
messages=[
|
| 88 |
+
{
|
| 89 |
+
"role": "system",
|
| 90 |
+
"content": """入力されたボタンのHTMLの数だけ、HTMLを指示通りに書き換え、buttonタグのみをHTMLに出力します。buttonの位置はこちらで決めるのでposition: absoluteやtransformは使わなぁE��ください、E
|
| 91 |
+
"""
|
| 92 |
+
},
|
| 93 |
+
{
|
| 94 |
+
"role": "user",
|
| 95 |
+
"content": p
|
| 96 |
+
},
|
| 97 |
+
],
|
| 98 |
+
response_format=newButtons,
|
| 99 |
+
top_p=1,
|
| 100 |
+
frequency_penalty=0,
|
| 101 |
+
presence_penalty=0
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
return response
|
| 105 |
+
#return response.choices
|
apis/modifyHTML.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, ValidationError
|
| 6 |
+
|
| 7 |
+
from functools import cache
|
| 8 |
+
from typing import List
|
| 9 |
+
from datetime import datetime
|
| 10 |
+
import pytz
|
| 11 |
+
|
| 12 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 13 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 14 |
+
from src.clients.llm_client import LLMClient
|
| 15 |
+
import json as _json
|
| 16 |
+
|
| 17 |
+
client = LLMClient()
|
| 18 |
+
system_prompt = None
|
| 19 |
+
user_text = ""
|
| 20 |
+
images = []
|
| 21 |
+
for msg in messages:
|
| 22 |
+
role = msg.get("role", "")
|
| 23 |
+
c = msg.get("content", "")
|
| 24 |
+
if role == "system":
|
| 25 |
+
if isinstance(c, str):
|
| 26 |
+
system_prompt = c
|
| 27 |
+
elif role == "user":
|
| 28 |
+
if isinstance(c, str):
|
| 29 |
+
user_text = c
|
| 30 |
+
elif isinstance(c, list):
|
| 31 |
+
for part in c:
|
| 32 |
+
if isinstance(part, dict):
|
| 33 |
+
if part.get("type") == "text":
|
| 34 |
+
user_text += part.get("text", "")
|
| 35 |
+
elif part.get("type") == "image_url":
|
| 36 |
+
url = part.get("image_url", {}).get("url", "")
|
| 37 |
+
if url.startswith("data:"):
|
| 38 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 39 |
+
else:
|
| 40 |
+
images.append(url)
|
| 41 |
+
|
| 42 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 43 |
+
result = client.call(
|
| 44 |
+
prompt=user_text,
|
| 45 |
+
schema=response_format,
|
| 46 |
+
model=model,
|
| 47 |
+
system_prompt=system_prompt,
|
| 48 |
+
images=images if images else None,
|
| 49 |
+
temperature=0,
|
| 50 |
+
)
|
| 51 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 52 |
+
else:
|
| 53 |
+
return client.call_raw(
|
| 54 |
+
prompt=user_text,
|
| 55 |
+
model=model,
|
| 56 |
+
system_prompt=system_prompt,
|
| 57 |
+
images=images if images else None,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class newHTMLs(BaseModel):
|
| 62 |
+
HTMLs: list[str]
|
| 63 |
+
|
| 64 |
+
@cache
|
| 65 |
+
def modifyHTML(p, openai_key=os.environ.get('OPENAI_KEY')):
|
| 66 |
+
"""
|
| 67 |
+
input0 (text): ボタンの斁E��を通信業界に変えて。色は若老E��け�EぁE��グラチE�Eションで、サブコピ�Eもバイブスあがる感じに。影をつけて、�Eタンは丸くして. <button class="c-button" style="appearance:none;border:0;border-radius:5px;background:#4676D7;color:#fff;padding:8px 16px;font-size:16px;">申し込む</button> <button class="c-button" style="appearance:none;border:0;border-radius:5px;background:#4676D7;color:#fff;padding:8px 16px;font-size:16px;">申し込まなぁE/button>
|
| 68 |
+
input1 (text): default
|
| 69 |
+
output1 (json): 修正したボタン
|
| 70 |
+
"""
|
| 71 |
+
start_time = datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S")
|
| 72 |
+
print(start_time, f"modifyButton[{len(p)}]")
|
| 73 |
+
|
| 74 |
+
if openai_key == "default":
|
| 75 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 76 |
+
else:
|
| 77 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 78 |
+
|
| 79 |
+
client = LLMClient()
|
| 80 |
+
|
| 81 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 82 |
+
model='meta-llama/Llama-3.3-70B-Instruct',
|
| 83 |
+
messages=[
|
| 84 |
+
{
|
| 85 |
+
"role": "system",
|
| 86 |
+
"content": """あなた�EHTMLとCSSの達人です、E
|
| 87 |
+
持E��された個数�E�なければ10個)�EHTMLコンポ�Eネントを提案してください、E
|
| 88 |
+
位置はこちらで決めるのでposition: absoluteやtransformは使わなぁE��ください、E
|
| 89 |
+
"""
|
| 90 |
+
},
|
| 91 |
+
{
|
| 92 |
+
"role": "user",
|
| 93 |
+
"content": p
|
| 94 |
+
},
|
| 95 |
+
],
|
| 96 |
+
response_format=newHTMLs,
|
| 97 |
+
top_p=1,
|
| 98 |
+
frequency_penalty=0,
|
| 99 |
+
presence_penalty=0
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
return response
|
apis/moment2normalize.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
from typing import Optional, List
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import pytz
|
| 7 |
+
from src.utils.tracer import customtracer
|
| 8 |
+
from src.clients.llm_client import LLMClient
|
| 9 |
+
|
| 10 |
+
class Emotion(BaseModel):
|
| 11 |
+
current_state: str
|
| 12 |
+
desired_state: str
|
| 13 |
+
|
| 14 |
+
class Common(BaseModel):
|
| 15 |
+
moment_raw: str
|
| 16 |
+
core_intent: str
|
| 17 |
+
emotion: Emotion
|
| 18 |
+
|
| 19 |
+
class ToneWorldview(BaseModel):
|
| 20 |
+
worldview: str
|
| 21 |
+
visual_style_hint: str
|
| 22 |
+
|
| 23 |
+
class VisualElements(BaseModel):
|
| 24 |
+
subject: str
|
| 25 |
+
action: str
|
| 26 |
+
|
| 27 |
+
class FVImage(BaseModel):
|
| 28 |
+
tone_worldview: ToneWorldview
|
| 29 |
+
visual_elements: VisualElements
|
| 30 |
+
|
| 31 |
+
class Keywords(BaseModel):
|
| 32 |
+
should_include: List[str]
|
| 33 |
+
|
| 34 |
+
class Text(BaseModel):
|
| 35 |
+
keywords: Keywords
|
| 36 |
+
|
| 37 |
+
class NormalizedMoment(BaseModel):
|
| 38 |
+
common: Common
|
| 39 |
+
fv_image: FVImage
|
| 40 |
+
text: Text
|
| 41 |
+
|
| 42 |
+
@customtracer
|
| 43 |
+
def moment2normalize(
|
| 44 |
+
prompt: str,
|
| 45 |
+
openai_key: Optional[str] = None,
|
| 46 |
+
gemini_key: Optional[str] = None,
|
| 47 |
+
model: Optional[str] = None
|
| 48 |
+
):
|
| 49 |
+
"""
|
| 50 |
+
input1 (text): モーメント正規化のためのプロンプト�E�シスチE��プロンプト + モーメント!E
|
| 51 |
+
input3 (text): default
|
| 52 |
+
input4 (text): default
|
| 53 |
+
input5 (text): gpt-4o
|
| 54 |
+
output (json): 正規化されたモーメンチESON
|
| 55 |
+
"""
|
| 56 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 57 |
+
|
| 58 |
+
# DEBUG: Log input parameters
|
| 59 |
+
print(f"[DEBUG] Input parameters:")
|
| 60 |
+
print(f" - prompt length: {len(prompt)}")
|
| 61 |
+
print(f" - openai_key: {'SET' if openai_key else 'None'} (value: {openai_key[:10] if openai_key else 'None'}...)")
|
| 62 |
+
print(f" - gemini_key: {'SET' if gemini_key else 'None'} (value: {gemini_key[:10] if gemini_key else 'None'}...)")
|
| 63 |
+
print(f" - model: {model}")
|
| 64 |
+
print(f" - request type: {type(request)}")
|
| 65 |
+
|
| 66 |
+
# Use gpt-4o as default if no model specified
|
| 67 |
+
selected_model = model if model else "meta-llama/Llama-3.3-70B-Instruct"
|
| 68 |
+
print(f"[DEBUG] Selected model: {selected_model}")
|
| 69 |
+
|
| 70 |
+
# Handle API key based on model
|
| 71 |
+
if selected_model and "gemini" in selected_model.lower():
|
| 72 |
+
# Use GEMINI_KEY for Gemini models
|
| 73 |
+
if gemini_key and gemini_key != "default":
|
| 74 |
+
api_key = gemini_key
|
| 75 |
+
print(f"[DEBUG] Using provided gemini_key")
|
| 76 |
+
else:
|
| 77 |
+
api_key = os.environ.get('GEMINI_KEY')
|
| 78 |
+
print(f"[DEBUG] Using GEMINI_KEY from environment: {'SET' if api_key else 'NOT SET'}")
|
| 79 |
+
print(f"[DEBUG] Creating LLMClient with google_api_key (length: {len(api_key) if api_key else 0})")
|
| 80 |
+
client = LLMClient(google_api_key=api_key)
|
| 81 |
+
else:
|
| 82 |
+
# Use OpenAI key for other models
|
| 83 |
+
if openai_key and openai_key != "default":
|
| 84 |
+
api_key = openai_key
|
| 85 |
+
print(f"[DEBUG] Using provided openai_key")
|
| 86 |
+
else:
|
| 87 |
+
api_key = os.environ.get('OPENAI_KEY')
|
| 88 |
+
print(f"[DEBUG] Using OPENAI_KEY from environment: {'SET' if api_key else 'NOT SET'}")
|
| 89 |
+
print(f"[DEBUG] Creating LLMClient with openai_key (length: {len(api_key) if api_key else 0})")
|
| 90 |
+
client = LLMClient(openai_key=api_key)
|
| 91 |
+
|
| 92 |
+
# Call LLM with structured output
|
| 93 |
+
result = client.call(
|
| 94 |
+
prompt=prompt,
|
| 95 |
+
schema=NormalizedMoment,
|
| 96 |
+
model=selected_model,
|
| 97 |
+
temperature=0
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
# Convert Pydantic model to dict for API response
|
| 101 |
+
return result.model_dump()
|
apis/moment2theme.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import pytz
|
| 7 |
+
from src.utils.tracer import customtracer
|
| 8 |
+
from src.clients.llm_client import LLMClient
|
| 9 |
+
|
| 10 |
+
class THEMES(BaseModel):
|
| 11 |
+
improved_proposition: str
|
| 12 |
+
advertising_strategy: str
|
| 13 |
+
appeal_words: list[str]
|
| 14 |
+
reason: str
|
| 15 |
+
クライアント向け訴求テーチE str
|
| 16 |
+
AI向け訴求テーチE str
|
| 17 |
+
|
| 18 |
+
@customtracer
|
| 19 |
+
def moment2theme(
|
| 20 |
+
p: str,
|
| 21 |
+
openai_key: Optional[str] = None,
|
| 22 |
+
gemini_key: Optional[str] = None,
|
| 23 |
+
model: Optional[str] = None
|
| 24 |
+
):
|
| 25 |
+
"""
|
| 26 |
+
input1 (text): ■自社: 親子でのスマ�E料��節紁E親子でのお得感 チE�Eタの余剰利用 通話とネット�Eコストパフォーマンス スマ�EチE��ュー支援 家族向け�E安�E機�E 豊富な端末ラインアチE�E ■競合他社: 22歳までのお得なプラン 大好評�Eサービス 親子でお得にスマ�Eを利用 22歳以下限定�Eお得なキャンペ�Eン 学生向け�Eお得さ 青春年齢向けのお得なプラン 低価格で高品質な通信サービス 格安SIMとスマ�Eの利便性 22歳以下限定�E割引キャンペ�Eン スマ�EチE��ュー応援 家族割引との絁E��合わせでの最安値 料��プランの多様性 親子でのお得な割引サービス スマ�EチE��ューのお得さ 特別割弁EチE�Eタ3GB提侁E割引サービスによるコスト削渁E新規契紁E��プラン変更による特典 機種代と基本料�Eダブル割弁E大容量データ エントリー制の特典シスチE�� 24時間ぁE��でもオンラインで手続き可能 家族�E員が割引を受けられるサービス 家族間の無料通話サービス プライムビデオ特典 22歳までの長期利用可能 製品ラインナップ�E允E��E期間限定�Eキャンペ�Eン 人気スマ�Eの割引販売 安�E教育サービス 話題�Eスマ�Eが安く手に入めE詳細なサポ�EトとFAQ シンプルな料��プラン 家族�E員の料��割弁E子育てサポ�Eトサービス 業界トレンド:「◯◯◯◯◯」「◯◯◯◯◯」「◯◯◯◯◯」が吁E��共通する訴求コンチE��チE��ある、E60字程度)
|
| 27 |
+
input2 (text): default
|
| 28 |
+
input3 (text): default
|
| 29 |
+
input4 (text): gpt-4o
|
| 30 |
+
output1 (json): 頁E��
|
| 31 |
+
"""
|
| 32 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 33 |
+
|
| 34 |
+
# DEBUG: Log input parameters
|
| 35 |
+
print(f"[DEBUG] Input parameters:")
|
| 36 |
+
print(f" - prompt length: {len(p)}")
|
| 37 |
+
print(f" - openai_key: {'SET' if openai_key else 'None'} (value: {openai_key[:10] if openai_key else 'None'}...)")
|
| 38 |
+
print(f" - gemini_key: {'SET' if gemini_key else 'None'} (value: {gemini_key[:10] if gemini_key else 'None'}...)")
|
| 39 |
+
print(f" - model: {model}")
|
| 40 |
+
print(f" - request type: {type(request)}")
|
| 41 |
+
|
| 42 |
+
# Use gpt-4o as default if no model specified
|
| 43 |
+
selected_model = model if model else "meta-llama/Llama-3.3-70B-Instruct"
|
| 44 |
+
print(f"[DEBUG] Selected model: {selected_model}")
|
| 45 |
+
|
| 46 |
+
# Handle API key based on model
|
| 47 |
+
if selected_model and "gemini" in selected_model.lower():
|
| 48 |
+
# Use GEMINI_KEY for Gemini models
|
| 49 |
+
if gemini_key and gemini_key != "default":
|
| 50 |
+
api_key = gemini_key
|
| 51 |
+
print(f"[DEBUG] Using provided gemini_key")
|
| 52 |
+
else:
|
| 53 |
+
api_key = os.environ.get('GEMINI_KEY')
|
| 54 |
+
print(f"[DEBUG] Using GEMINI_KEY from environment: {'SET' if api_key else 'NOT SET'}")
|
| 55 |
+
print(f"[DEBUG] Creating LLMClient with google_api_key (length: {len(api_key) if api_key else 0})")
|
| 56 |
+
client = LLMClient(google_api_key=api_key)
|
| 57 |
+
else:
|
| 58 |
+
# Use OpenAI key for other models
|
| 59 |
+
if openai_key and openai_key != "default":
|
| 60 |
+
api_key = openai_key
|
| 61 |
+
print(f"[DEBUG] Using provided openai_key")
|
| 62 |
+
else:
|
| 63 |
+
api_key = os.environ.get('OPENAI_KEY')
|
| 64 |
+
print(f"[DEBUG] Using OPENAI_KEY from environment: {'SET' if api_key else 'NOT SET'}")
|
| 65 |
+
print(f"[DEBUG] Creating LLMClient with openai_key (length: {len(api_key) if api_key else 0})")
|
| 66 |
+
client = LLMClient(openai_key=api_key)
|
| 67 |
+
|
| 68 |
+
# Call LLM with structured output
|
| 69 |
+
result = client.call(
|
| 70 |
+
prompt=p,
|
| 71 |
+
schema=THEMES,
|
| 72 |
+
model=selected_model,
|
| 73 |
+
temperature=0
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
# Convert Pydantic model to dict for API response
|
| 77 |
+
return result.model_dump()
|
apis/nayose_cn.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, ValidationError
|
| 6 |
+
|
| 7 |
+
from typing import List
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
import pytz
|
| 10 |
+
from enum import Enum
|
| 11 |
+
import psutil
|
| 12 |
+
|
| 13 |
+
from src.utils.tracer import customtracer
|
| 14 |
+
import re
|
| 15 |
+
import time
|
| 16 |
+
|
| 17 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 18 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 19 |
+
from src.clients.llm_client import LLMClient
|
| 20 |
+
import json as _json
|
| 21 |
+
|
| 22 |
+
client = LLMClient()
|
| 23 |
+
system_prompt = None
|
| 24 |
+
user_text = ""
|
| 25 |
+
images = []
|
| 26 |
+
for msg in messages:
|
| 27 |
+
role = msg.get("role", "")
|
| 28 |
+
c = msg.get("content", "")
|
| 29 |
+
if role == "system":
|
| 30 |
+
if isinstance(c, str):
|
| 31 |
+
system_prompt = c
|
| 32 |
+
elif role == "user":
|
| 33 |
+
if isinstance(c, str):
|
| 34 |
+
user_text = c
|
| 35 |
+
elif isinstance(c, list):
|
| 36 |
+
for part in c:
|
| 37 |
+
if isinstance(part, dict):
|
| 38 |
+
if part.get("type") == "text":
|
| 39 |
+
user_text += part.get("text", "")
|
| 40 |
+
elif part.get("type") == "image_url":
|
| 41 |
+
url = part.get("image_url", {}).get("url", "")
|
| 42 |
+
if url.startswith("data:"):
|
| 43 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 44 |
+
else:
|
| 45 |
+
images.append(url)
|
| 46 |
+
|
| 47 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 48 |
+
result = client.call(
|
| 49 |
+
prompt=user_text,
|
| 50 |
+
schema=response_format,
|
| 51 |
+
model=model,
|
| 52 |
+
system_prompt=system_prompt,
|
| 53 |
+
images=images if images else None,
|
| 54 |
+
temperature=0,
|
| 55 |
+
)
|
| 56 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 57 |
+
else:
|
| 58 |
+
return client.call_raw(
|
| 59 |
+
prompt=user_text,
|
| 60 |
+
model=model,
|
| 61 |
+
system_prompt=system_prompt,
|
| 62 |
+
images=images if images else None,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class ContentOrigin(str, Enum):
|
| 67 |
+
type0 = "自社"
|
| 68 |
+
type1 = "他社"
|
| 69 |
+
type2 = ""
|
| 70 |
+
|
| 71 |
+
class QandA(BaseModel):
|
| 72 |
+
質啁E str
|
| 73 |
+
回筁E str
|
| 74 |
+
惁E��溁E list[ContentOrigin]
|
| 75 |
+
|
| 76 |
+
class Enquete(BaseModel):
|
| 77 |
+
質啁E str
|
| 78 |
+
選択肢: list[str]
|
| 79 |
+
惁E��溁E list[ContentOrigin]
|
| 80 |
+
|
| 81 |
+
class Button(BaseModel):
|
| 82 |
+
ボタンチE��スチE str
|
| 83 |
+
マイクロコピ�E: str
|
| 84 |
+
惁E��溁E list[ContentOrigin]
|
| 85 |
+
|
| 86 |
+
class ContentDetail(BaseModel):
|
| 87 |
+
チE��スチE str
|
| 88 |
+
惁E��溁E list[ContentOrigin]
|
| 89 |
+
|
| 90 |
+
class Price(BaseModel):
|
| 91 |
+
価格: int
|
| 92 |
+
惁E��溁E list[ContentOrigin]
|
| 93 |
+
|
| 94 |
+
class Content(BaseModel):
|
| 95 |
+
見�EぁE ContentDetail
|
| 96 |
+
導�E: ContentDetail
|
| 97 |
+
冁E��: ContentDetail
|
| 98 |
+
注釁E ContentDetail
|
| 99 |
+
質疁E QandA
|
| 100 |
+
ボタン: Button
|
| 101 |
+
料��: Price
|
| 102 |
+
|
| 103 |
+
class newCategory(BaseModel):
|
| 104 |
+
title : str
|
| 105 |
+
content : List[Content]
|
| 106 |
+
originalCategories: list[str]
|
| 107 |
+
|
| 108 |
+
@customtracer
|
| 109 |
+
def nayose_cn(p, model, openai_key=os.environ.get('OPENAI_KEY')):
|
| 110 |
+
"""
|
| 111 |
+
input1 (text): ■構�E要素 啁E��説明、問ぁE��わせ ■允E�EコンチE��チE�� カレーハウスCoCo壱番屋�E式サイトです。メニュー紹介やココイチ�E楽しみ方のご案�E、現在地から近くのお店を探せる便利な店�E検索など便利な惁E��をお届けしてぁE��す。お客様相諁E�� 0120-055188. 受付時間:平日 9時aE7時(土・日・祝日・夏季休暇及�E年末年始を除く!E お客様Q&AはこちめE
|
| 112 |
+
input2 (text): gpt-4o
|
| 113 |
+
input2 (text): default
|
| 114 |
+
output1 (json): 新しいコンチE��チE
|
| 115 |
+
"""
|
| 116 |
+
start_time = datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S")
|
| 117 |
+
print(f"nayose_cn {start_time} {model}")
|
| 118 |
+
if openai_key == "default":
|
| 119 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 120 |
+
else:
|
| 121 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 122 |
+
|
| 123 |
+
client = LLMClient()
|
| 124 |
+
|
| 125 |
+
# 基本のメチE��ージと固定パラメータ�E��Eの持E��は変更せずにそ�Eまま�E�E
|
| 126 |
+
base_kwargs = {
|
| 127 |
+
"model": model,
|
| 128 |
+
"messages": [
|
| 129 |
+
{
|
| 130 |
+
"role": "system",
|
| 131 |
+
"content": """プロンプトで持E��されたタスクを、以下を守りながら実行します、E
|
| 132 |
+
1. 列挙された「�EになるカチE��リ一覧」を持E��通りグルーピングして新しいカチE��リ名を定義
|
| 133 |
+
2. originalMiddleCategoriesには忁E��「�EになるカチE��リ一覧」から1つずつ一言一句変えずにそ�Eまま引用。改変せずに丸ごと引用
|
| 134 |
+
3. これから生�Eする頁E��に対して漏れなく利用した惁E��溁EContentOrigin)を選ぶ。「�E社」「他社」「」�E3つから選択(褁E��も可�E�E
|
| 135 |
+
4. 日本語でタイトルを決め��後に、指定された持E��された個数のContentを生成し、�E部の頁E���E�見�Eし、導�E、�E容、注釈、料金、質疑、�Eタン�E��Eすべてを生成。質疑やフォームはContentにつぁEつまでとし、褁E��の質疑�Eフォームが忁E��な場合�EContentを褁E��用愁E
|
| 136 |
+
5. 質問に対して頁E��が不要であれば\"-\"、料金なめEを回答、回答�E体がjsonとして正しくなければ、補宁E
|
| 137 |
+
"""
|
| 138 |
+
},
|
| 139 |
+
{
|
| 140 |
+
"role": "user",
|
| 141 |
+
"content": p
|
| 142 |
+
},
|
| 143 |
+
],
|
| 144 |
+
"response_format": newCategory,
|
| 145 |
+
"top_p": 1,
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
# --- シンプルなパラメータ群�E�速さ優先�EチE��ォルト!E---
|
| 149 |
+
# 注愁E chat.completions.parse を使ぁE�Eでト�Eクンは max_tokens にする
|
| 150 |
+
model_lower = (model or "").lower()
|
| 151 |
+
params = {
|
| 152 |
+
"temperature": 0.1,
|
| 153 |
+
"reasoning_effort": "minimal",
|
| 154 |
+
"verbosity": "low",
|
| 155 |
+
}
|
| 156 |
+
|
| 157 |
+
# --- シンプル特侁E gpt-4 系だけ�E reasoning_effort と verbosity を渡さなぁE---
|
| 158 |
+
# �E�あなた�E報告どおり、gpt-4 系で reasoning_effort が未対応/verbosity の値が弾かれるケースがあるためE��E
|
| 159 |
+
if model_lower.startswith("gpt-4") or model_lower.startswith("meta-llama/Llama-3.3-70B-Instruct") or model_lower.startswith("chatgpt-4o"):
|
| 160 |
+
# 削除 Egpt-4 系ではこれを含めるとエラーになりやすい
|
| 161 |
+
params.pop("reasoning_effort", None)
|
| 162 |
+
params.pop("verbosity", None)
|
| 163 |
+
# temperature は gpt-4 系で問題なぁE��ースが多いので残す�E�忁E��ならここで削除可能�E�E
|
| 164 |
+
# params.pop("temperature", None)
|
| 165 |
+
if model_lower.startswith("gpt-5"):
|
| 166 |
+
params.pop("temperature", None)
|
| 167 |
+
# merge
|
| 168 |
+
final_kwargs = {**base_kwargs, **params}
|
| 169 |
+
|
| 170 |
+
try:
|
| 171 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,**final_kwargs)
|
| 172 |
+
return response
|
| 173 |
+
|
| 174 |
+
except ValidationError as e:
|
| 175 |
+
# ValidationErrorが発生した場合�Eみダンプ(�Eの処琁E��維持E��E
|
| 176 |
+
error_message = str(e) # エラーメチE��ージ
|
| 177 |
+
error_details = e.args[0] if e.args else "No additional details available"
|
| 178 |
+
|
| 179 |
+
print("Validation Error occurred:", error_message)
|
| 180 |
+
print("#######################")
|
| 181 |
+
print(p)
|
| 182 |
+
raise e # エラーを�Eスロー
|
| 183 |
+
|
| 184 |
+
except Exception as e:
|
| 185 |
+
# シンプル実裁E��ので、その他�E例外�Eそ�Eまま上げる(忁E��なら後で拾ぁE��E
|
| 186 |
+
raise
|
apis/nayose_fv.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from pydantic import BaseModel, ValidationError
|
| 6 |
+
from typing import List
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
import pytz
|
| 9 |
+
from enum import Enum
|
| 10 |
+
import psutil
|
| 11 |
+
from src.utils.tracer import customtracer
|
| 12 |
+
|
| 13 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 14 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 15 |
+
from src.clients.llm_client import LLMClient
|
| 16 |
+
import json as _json
|
| 17 |
+
|
| 18 |
+
client = LLMClient()
|
| 19 |
+
system_prompt = None
|
| 20 |
+
user_text = ""
|
| 21 |
+
images = []
|
| 22 |
+
for msg in messages:
|
| 23 |
+
role = msg.get("role", "")
|
| 24 |
+
c = msg.get("content", "")
|
| 25 |
+
if role == "system":
|
| 26 |
+
if isinstance(c, str):
|
| 27 |
+
system_prompt = c
|
| 28 |
+
elif role == "user":
|
| 29 |
+
if isinstance(c, str):
|
| 30 |
+
user_text = c
|
| 31 |
+
elif isinstance(c, list):
|
| 32 |
+
for part in c:
|
| 33 |
+
if isinstance(part, dict):
|
| 34 |
+
if part.get("type") == "text":
|
| 35 |
+
user_text += part.get("text", "")
|
| 36 |
+
elif part.get("type") == "image_url":
|
| 37 |
+
url = part.get("image_url", {}).get("url", "")
|
| 38 |
+
if url.startswith("data:"):
|
| 39 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 40 |
+
else:
|
| 41 |
+
images.append(url)
|
| 42 |
+
|
| 43 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 44 |
+
result = client.call(
|
| 45 |
+
prompt=user_text,
|
| 46 |
+
schema=response_format,
|
| 47 |
+
model=model,
|
| 48 |
+
system_prompt=system_prompt,
|
| 49 |
+
images=images if images else None,
|
| 50 |
+
temperature=0,
|
| 51 |
+
)
|
| 52 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 53 |
+
else:
|
| 54 |
+
return client.call_raw(
|
| 55 |
+
prompt=user_text,
|
| 56 |
+
model=model,
|
| 57 |
+
system_prompt=system_prompt,
|
| 58 |
+
images=images if images else None,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class ContentOrigin(str, Enum):
|
| 63 |
+
type1 = "自社"
|
| 64 |
+
type2 = "他社"
|
| 65 |
+
type3 = ""
|
| 66 |
+
|
| 67 |
+
class Copy(BaseModel):
|
| 68 |
+
メインコピ�E: str
|
| 69 |
+
サブコピ�E1: str
|
| 70 |
+
サブコピ�E2: str
|
| 71 |
+
サブコピ�E3: str
|
| 72 |
+
惁E��溁E list[ContentOrigin]
|
| 73 |
+
|
| 74 |
+
class Cta(BaseModel):
|
| 75 |
+
ボタンチE��スチE str
|
| 76 |
+
マイクロコピ�E: str
|
| 77 |
+
惁E��溁E list[ContentOrigin]
|
| 78 |
+
|
| 79 |
+
class Visual(BaseModel):
|
| 80 |
+
冁E��: str
|
| 81 |
+
作�E持E��: str
|
| 82 |
+
惁E��溁E list[ContentOrigin]
|
| 83 |
+
|
| 84 |
+
class Authority(BaseModel):
|
| 85 |
+
冁E��: str
|
| 86 |
+
作�E持E��: str
|
| 87 |
+
惁E��溁E list[ContentOrigin]
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class newCategory(BaseModel):
|
| 91 |
+
コピ�E: Copy
|
| 92 |
+
CTA: Cta
|
| 93 |
+
ビジュアル: Visual
|
| 94 |
+
権威付け: Authority
|
| 95 |
+
originalCategories: list[str]
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@customtracer
|
| 99 |
+
def nayose_fv(p, model, openai_key=os.environ.get('OPENAI_KEY')):
|
| 100 |
+
"""
|
| 101 |
+
input1 (text): ■構�E要素 啁E��説明、問ぁE��わせ ■允E�EコンチE��チE�� カレーハウスCoCo壱番屋�E式サイトです。メニュー紹介やココイチ�E楽しみ方のご案�E、現在地から近くのお店を探せる便利な店�E検索など便利な惁E��をお届けしてぁE��す。お客様相諁E�� 0120-055188. 受付時間:平日 9時aE7時(土・日・祝日・夏季休暇及�E年末年始を除く!E お客様Q&AはこちめE
|
| 102 |
+
input2 (text): gpt-4o
|
| 103 |
+
input3 (text): default
|
| 104 |
+
output1 (json): 新しいコンチE��チE
|
| 105 |
+
"""
|
| 106 |
+
if openai_key == "default":
|
| 107 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 108 |
+
else:
|
| 109 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 110 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), f"nayose_fv {model}")
|
| 111 |
+
client = LLMClient()
|
| 112 |
+
|
| 113 |
+
# モチE��名を小文字にして判定に使ぁE
|
| 114 |
+
model_lower = (model or "").lower()
|
| 115 |
+
|
| 116 |
+
# 共通�Eシンプルなパラメータ�E�Ehat.completions.parse を想宁E-> max_tokens�E�E
|
| 117 |
+
params = {
|
| 118 |
+
"temperature": 0.1, # ただぁEgpt-5 系では後で削除
|
| 119 |
+
"reasoning_effort": "minimal", # ただぁEgpt-4 系では後で削除
|
| 120 |
+
"verbosity": "low", # ただぁEgpt-4 系では後で削除 / 一部モチE��で制紁E��めE
|
| 121 |
+
"top_p": 1,
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
# gpt-4 系は reasoning_effort / verbosity を渡さなぁE��既知のエラー回避�E�E
|
| 125 |
+
if model_lower.startswith("gpt-4") or model_lower.startswith("meta-llama/Llama-3.3-70B-Instruct") or model_lower.startswith("chatgpt-4o") or model_lower.startswith("gpt-4.1"):
|
| 126 |
+
params.pop("reasoning_effort", None)
|
| 127 |
+
params.pop("verbosity", None)
|
| 128 |
+
|
| 129 |
+
# gpt-5 系は temperature を渡さなぁE��環墁E��よって 0.1 が弾かれるためE��E
|
| 130 |
+
if model_lower.startswith("gpt-5"):
|
| 131 |
+
params.pop("temperature", None)
|
| 132 |
+
|
| 133 |
+
try:
|
| 134 |
+
response = _ask_raw_hf([{"role":"user","content":p}], model,
|
| 135 |
+
model=model,
|
| 136 |
+
messages=[
|
| 137 |
+
{
|
| 138 |
+
"role": "system",
|
| 139 |
+
"content": """まず以下�Eタスクを行った後、指示のタスクを行います、E
|
| 140 |
+
1. 列挙された「�EになるカチE��リ一覧」を持E��通りグルーピングして新しいカチE��リを作ります、E
|
| 141 |
+
2. これから生�Eする頁E��に対して漏れなく利用した惁E��溁EContentOrigin)を選ぶ。�E体的に引用した惁E��源につぁE��「�E社」「他社」「」�E3つから選択(褁E��も可�E�。抽象皁E��派生させた場合�E「」とする、E
|
| 142 |
+
3. 「�E容」には持E��された�E体的なコンチE��チE��、「作業依頼」にはそ�E「�E容」をチE��イナ�Eに依頼する際�E作業持E��を書き�Eす、E
|
| 143 |
+
4. 出力フォーマット�E「テーブル形式」には、その冁E��めE��業依頼を見やすく頁E��に刁E��て、�Eークダウンの表形式で書き�Eしてください。表は、E��E��を行方向、値を�E方向に持っぁE衁E列以上で作り、�Eークダウンは改行を確実にしてください。セパレータ行�E忁E��です、E
|
| 144 |
+
5. originalMiddleCategoriesには忁E��「�EになるカチE��リ一覧」から1つずつ一言一句変えずにそ�Eまま引用します。どんなに長くても丸ごと引用します、E
|
| 145 |
+
"""
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"role": "user",
|
| 149 |
+
"content": p
|
| 150 |
+
},
|
| 151 |
+
],
|
| 152 |
+
response_format=newCategory,
|
| 153 |
+
**params
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
return response
|
| 157 |
+
|
| 158 |
+
except ValidationError as e:
|
| 159 |
+
# もしバリチE�Eションエラーが�Eたらダンプして再スロー�E��Eの挙動を維持E��E
|
| 160 |
+
error_message = str(e)
|
| 161 |
+
error_details = e.args[0] if e.args else "No additional details available"
|
| 162 |
+
|
| 163 |
+
print("Validation Error occurred:", error_message)
|
| 164 |
+
print("#######################")
|
| 165 |
+
print(p)
|
| 166 |
+
raise e
|
| 167 |
+
|
| 168 |
+
except Exception:
|
| 169 |
+
# シンプル実裁E��はそ�E他エラーは上に投げます(忁E��ならここでロギングを追加�E�E
|
| 170 |
+
raise
|
apis/rader.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import matplotlib.pyplot as plt
|
| 2 |
+
import numpy as np
|
| 3 |
+
import json
|
| 4 |
+
from io import BytesIO
|
| 5 |
+
from PIL import Image
|
| 6 |
+
from matplotlib import font_manager
|
| 7 |
+
import os
|
| 8 |
+
script_path = os.path.abspath(__file__)
|
| 9 |
+
print("Script absolute path:", script_path)
|
| 10 |
+
|
| 11 |
+
def rader(data_json, max_val):
|
| 12 |
+
"""
|
| 13 |
+
input1 (text): [{ "key": "Clarity", "val": 4 }, { "key": "Value Proposition", "val": 4 }, { "key": "Relevance", "val": 4 }, { "key": "Distraction", "val": 4 }, { "key": "Urgency", "val": 2 }]
|
| 14 |
+
input2 (number): 5
|
| 15 |
+
output1 (image): レーダーチャーチE
|
| 16 |
+
"""
|
| 17 |
+
try:
|
| 18 |
+
data = json.loads(data_json)
|
| 19 |
+
labels = [item["key"] for item in data]
|
| 20 |
+
values = [item["val"] for item in data]
|
| 21 |
+
except (json.JSONDecodeError, KeyError):
|
| 22 |
+
return "Invalid JSON input. Ensure it is a list of dictionaries with 'key' and 'val'."
|
| 23 |
+
|
| 24 |
+
# 持E���E日本語フォントを登録し、グローバル設定に反映
|
| 25 |
+
font_path = "/home/user/app/NotoSansJP-VariableFont_wght.ttf"
|
| 26 |
+
font_manager.fontManager.addfont(font_path)
|
| 27 |
+
jp_font = font_manager.FontProperties(fname=font_path)
|
| 28 |
+
plt.rcParams['font.family'] = jp_font.get_name()
|
| 29 |
+
|
| 30 |
+
# 忁E��なチE�Eタを計箁E
|
| 31 |
+
num_vars = len(labels)
|
| 32 |
+
values += values[:1]
|
| 33 |
+
angles = np.linspace(0, 2 * np.pi, num_vars, endpoint=False).tolist() + [0]
|
| 34 |
+
|
| 35 |
+
# レーダーチャートを描画
|
| 36 |
+
fig, ax = plt.subplots(figsize=(6, 6), subplot_kw=dict(polar=True))
|
| 37 |
+
ax.fill(angles, values, color="blue", alpha=0.45)
|
| 38 |
+
ax.plot(angles, values, color="blue", linewidth=2)
|
| 39 |
+
ax.set_xticks(angles[:-1])
|
| 40 |
+
# set_xticklabelsで日本語フォントを明示皁E��持E��E
|
| 41 |
+
ax.set_xticklabels(labels, fontproperties=jp_font, fontsize=12)
|
| 42 |
+
ax.set_ylim(0, int(max_val))
|
| 43 |
+
|
| 44 |
+
# チャートを画像に変換
|
| 45 |
+
buf = BytesIO()
|
| 46 |
+
plt.savefig(buf, format="png", bbox_inches="tight", dpi=100)
|
| 47 |
+
plt.close(fig)
|
| 48 |
+
buf.seek(0)
|
| 49 |
+
img = Image.open(buf)
|
| 50 |
+
|
| 51 |
+
return img
|
apis/rader_dual.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import matplotlib.pyplot as plt
|
| 2 |
+
import numpy as np
|
| 3 |
+
import json
|
| 4 |
+
from io import BytesIO
|
| 5 |
+
from PIL import Image
|
| 6 |
+
from matplotlib import font_manager
|
| 7 |
+
import os
|
| 8 |
+
import base64
|
| 9 |
+
|
| 10 |
+
script_path = os.path.abspath(__file__)
|
| 11 |
+
print("Script absolute path:", script_path)
|
| 12 |
+
|
| 13 |
+
def rader_dual(data_json, max_val, data_json2=None):
|
| 14 |
+
"""
|
| 15 |
+
input1 (text): [{"key": "\u4fa1\u5024\u63d0\u6848", "val": 13.0}, {"key": "\u660e\u78ba\u6027", "val": 12.0}, {"key": "\u95a2\u9023\u6027", "val": 12.0}, {"key": "\u5206\u6563", "val": 13.0}, {"key": "\u7dca\u6025\u6027", "val": 9.0}, {"key": "\u4e0d\u5b89", "val": 9.0}]
|
| 16 |
+
input2 (number): 15
|
| 17 |
+
input3 (text): [{"key": "\u4fa1\u5024\u63d0\u6848", "val": 13.0}, {"key": "\u660e\u78ba\u6027", "val": 13.0}, {"key": "\u95a2\u9023\u6027", "val": 13.0}, {"key": "\u5206\u6563", "val": 13.0}, {"key": "\u7dca\u6025\u6027", "val": 13.0}, {"key": "\u4e0d\u5b89", "val": 12.0}]
|
| 18 |
+
output1 (image): レーターチャーチE
|
| 19 |
+
output2 (text): base64斁E���E
|
| 20 |
+
"""
|
| 21 |
+
# メインチE�Eタ読み込み
|
| 22 |
+
try:
|
| 23 |
+
data = json.loads(data_json)
|
| 24 |
+
labels = [item["key"] for item in data]
|
| 25 |
+
values = [item["val"] for item in data]
|
| 26 |
+
except (json.JSONDecodeError, KeyError):
|
| 27 |
+
return "Invalid JSON input. Ensure it is a list of dictionaries with 'key' and 'val'."
|
| 28 |
+
|
| 29 |
+
# 比輁E��チE�Eタがあれ�E読み込む
|
| 30 |
+
values2 = None
|
| 31 |
+
if data_json2:
|
| 32 |
+
try:
|
| 33 |
+
data2 = json.loads(data_json2)
|
| 34 |
+
values2 = [item["val"] for item in data2]
|
| 35 |
+
except (json.JSONDecodeError, KeyError):
|
| 36 |
+
return "Invalid secondary JSON input. Ensure it is a list of dictionaries with 'key' and 'val'."
|
| 37 |
+
|
| 38 |
+
# 日本語フォント設宁E
|
| 39 |
+
font_path = "/home/user/app/NotoSansJP-VariableFont_wght.ttf"
|
| 40 |
+
font_manager.fontManager.addfont(font_path)
|
| 41 |
+
jp_font = font_manager.FontProperties(fname=font_path)
|
| 42 |
+
plt.rcParams['font.family'] = jp_font.get_name()
|
| 43 |
+
|
| 44 |
+
# 描画用チE�Eタ整形
|
| 45 |
+
num_vars = len(labels)
|
| 46 |
+
values_plot = values + values[:1]
|
| 47 |
+
angles = np.linspace(0, 2 * np.pi, num_vars, endpoint=False).tolist() + [0]
|
| 48 |
+
|
| 49 |
+
# プロチE��準備
|
| 50 |
+
fig, ax = plt.subplots(figsize=(6, 6), subplot_kw=dict(polar=True))
|
| 51 |
+
# オリジナルチE�Eタ�E�グレー�E�E
|
| 52 |
+
ax.fill(angles, values_plot, color="gray", alpha=0.45, label="オリジナル")
|
| 53 |
+
ax.plot(angles, values_plot, color="gray", linewidth=2)
|
| 54 |
+
|
| 55 |
+
# 比輁E��ータのプロチE���E�テストパターン、薄め�E赤�E�E
|
| 56 |
+
if values2:
|
| 57 |
+
values2_plot = values2 + values2[:1]
|
| 58 |
+
ax.fill(angles, values2_plot, color="#ff9999", alpha=0.3, label="チE��トパターン")
|
| 59 |
+
ax.plot(angles, values2_plot, color="#ff9999", linewidth=2)
|
| 60 |
+
|
| 61 |
+
# 軸ラベル設宁E
|
| 62 |
+
ax.set_xticks(angles[:-1])
|
| 63 |
+
ax.set_xticklabels(labels, fontproperties=jp_font, fontsize=12)
|
| 64 |
+
ax.set_ylim(0, int(max_val))
|
| 65 |
+
ax.legend(loc='upper right', bbox_to_anchor=(1.3, 1.1))
|
| 66 |
+
|
| 67 |
+
# 画像保存とBase64エンコーチE
|
| 68 |
+
buf = BytesIO()
|
| 69 |
+
plt.savefig(buf, format="png", bbox_inches="tight", dpi=100)
|
| 70 |
+
plt.close(fig)
|
| 71 |
+
buf.seek(0)
|
| 72 |
+
|
| 73 |
+
# PIL Imageとして読み込み
|
| 74 |
+
img = Image.open(buf)
|
| 75 |
+
|
| 76 |
+
# Base64斁E���Eに変換
|
| 77 |
+
img_bytes = buf.getvalue()
|
| 78 |
+
base64_str = base64.b64encode(img_bytes).decode('utf-8')
|
| 79 |
+
|
| 80 |
+
return img, base64_str
|
| 81 |
+
|
apis/samediff2winningrate.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from src.clients.llm_client import LLMClient
|
| 3 |
+
|
| 4 |
+
def _ask_raw_hf(messages, model, response_format=None):
|
| 5 |
+
"""Compatibility wrapper: routes OpenAI-style messages through HF LLMClient."""
|
| 6 |
+
from src.clients.llm_client import LLMClient
|
| 7 |
+
import json as _json
|
| 8 |
+
|
| 9 |
+
client = LLMClient()
|
| 10 |
+
system_prompt = None
|
| 11 |
+
user_text = ""
|
| 12 |
+
images = []
|
| 13 |
+
for msg in messages:
|
| 14 |
+
role = msg.get("role", "")
|
| 15 |
+
c = msg.get("content", "")
|
| 16 |
+
if role == "system":
|
| 17 |
+
if isinstance(c, str):
|
| 18 |
+
system_prompt = c
|
| 19 |
+
elif role == "user":
|
| 20 |
+
if isinstance(c, str):
|
| 21 |
+
user_text = c
|
| 22 |
+
elif isinstance(c, list):
|
| 23 |
+
for part in c:
|
| 24 |
+
if isinstance(part, dict):
|
| 25 |
+
if part.get("type") == "text":
|
| 26 |
+
user_text += part.get("text", "")
|
| 27 |
+
elif part.get("type") == "image_url":
|
| 28 |
+
url = part.get("image_url", {}).get("url", "")
|
| 29 |
+
if url.startswith("data:"):
|
| 30 |
+
images.append(url.split(",", 1)[1] if "," in url else url)
|
| 31 |
+
else:
|
| 32 |
+
images.append(url)
|
| 33 |
+
|
| 34 |
+
if response_format is not None and hasattr(response_format, "model_json_schema"):
|
| 35 |
+
result = client.call(
|
| 36 |
+
prompt=user_text,
|
| 37 |
+
schema=response_format,
|
| 38 |
+
model=model,
|
| 39 |
+
system_prompt=system_prompt,
|
| 40 |
+
images=images if images else None,
|
| 41 |
+
temperature=0,
|
| 42 |
+
)
|
| 43 |
+
return _json.dumps(result.model_dump(), ensure_ascii=False)
|
| 44 |
+
else:
|
| 45 |
+
return client.call_raw(
|
| 46 |
+
prompt=user_text,
|
| 47 |
+
model=model,
|
| 48 |
+
system_prompt=system_prompt,
|
| 49 |
+
images=images if images else None,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
client = LLMClient()
|
| 53 |
+
|
| 54 |
+
def samediff2winningrate(same, diff, bottom, target, openai_key = os.environ.get('OPENAI_KEY')):
|
| 55 |
+
"""
|
| 56 |
+
input1 (text): どちらも、携帯の画像が表示されてぁE��、E��話プラン名が列挙されてぁE��す、E
|
| 57 |
+
input2 (text): 1枚目はCTAボタンには「�Eランに申し込む」とだけ書かれてぁE��す、E枚目のCTAボタンには今かめE9刁E9秒だけ�Eラン50%引きと書かれてぁE��す、E
|
| 58 |
+
input3 (text): 1枚目と2枚目のCVRを予測して、CVRの差刁E�E琁E��を�E挙してください、E
|
| 59 |
+
input4 (text): ユーザ目線を好む拁E��老E
|
| 60 |
+
input5 (text): default
|
| 61 |
+
output1 (text): CVRと説昁E
|
| 62 |
+
"""
|
| 63 |
+
if openai_key == "default":
|
| 64 |
+
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_KEY')
|
| 65 |
+
else:
|
| 66 |
+
os.environ['OPENAI_API_KEY'] = openai_key
|
| 67 |
+
|
| 68 |
+
response = client.chat.completions.create(
|
| 69 |
+
#model="ft:gpt-3.5-turbo-1106:dlpo-inc:account531test:8xB3Rpq1",
|
| 70 |
+
#model="ft:gpt-3.5-turbo-1106:dlpo-inc:winrate:9mCPVJjO",
|
| 71 |
+
model="ft:gpt-3.5-turbo-1106:dlpo-inc:winrate:9mGek3DY",
|
| 72 |
+
|
| 73 |
+
messages=[
|
| 74 |
+
{
|
| 75 |
+
"role": "system",
|
| 76 |
+
"content": [
|
| 77 |
+
{
|
| 78 |
+
"type": "text",
|
| 79 |
+
"text": f"チE��インの共通点と差刁E��ら、どちらがCVRが高いかを予測し、{bottom}、Etarget}向けに書き味を変えて"
|
| 80 |
+
}
|
| 81 |
+
]
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"role": "user",
|
| 85 |
+
"content": [
|
| 86 |
+
{
|
| 87 |
+
"type": "text",
|
| 88 |
+
"text": f"類似点�E�E{same} \n 差異:{diff}"
|
| 89 |
+
}
|
| 90 |
+
]
|
| 91 |
+
}
|
| 92 |
+
],
|
| 93 |
+
temperature=0,
|
| 94 |
+
max_tokens=3000,
|
| 95 |
+
top_p=1,
|
| 96 |
+
frequency_penalty=0,
|
| 97 |
+
presence_penalty=0
|
| 98 |
+
)
|
| 99 |
+
return response
|
apis/sample.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Sample API demonstrating the new LLM client framework.
|
| 3 |
+
|
| 4 |
+
This API shows how to use the unified LLM client with Pydantic schema validation.
|
| 5 |
+
One file = one API function (matching the file name).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
from typing import List
|
| 10 |
+
|
| 11 |
+
from pydantic import BaseModel, Field
|
| 12 |
+
|
| 13 |
+
from src.clients import LLMClient
|
| 14 |
+
from src.utils.tracer import customtracer
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# =============================================================================
|
| 18 |
+
# Response Schema (Pydantic Model)
|
| 19 |
+
# =============================================================================
|
| 20 |
+
|
| 21 |
+
class TextAnalysisResponse(BaseModel):
|
| 22 |
+
"""Schema for text analysis results."""
|
| 23 |
+
summary: str = Field(description="Brief summary of the input text")
|
| 24 |
+
sentiment: str = Field(description="Sentiment: positive, negative, or neutral")
|
| 25 |
+
key_points: List[str] = Field(description="List of key points extracted from text")
|
| 26 |
+
confidence: float = Field(ge=0.0, le=1.0, description="Confidence score 0-1")
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# =============================================================================
|
| 30 |
+
# API Function
|
| 31 |
+
# =============================================================================
|
| 32 |
+
|
| 33 |
+
@customtracer
|
| 34 |
+
def sample(
|
| 35 |
+
text: str,
|
| 36 |
+
model: str = "meta-llama/Llama-3.1-8B-Instruct",
|
| 37 |
+
openai_key: str = "default",
|
| 38 |
+
) -> dict:
|
| 39 |
+
"""
|
| 40 |
+
input1 (text): This product is amazing! The quality exceeded my expectations.
|
| 41 |
+
input2 (text): gpt-4o
|
| 42 |
+
input3 (text): default
|
| 43 |
+
output1 (json): Analysis result with summary, sentiment, key_points, and confidence
|
| 44 |
+
"""
|
| 45 |
+
# Setup API key
|
| 46 |
+
if openai_key == "default":
|
| 47 |
+
api_key = os.environ.get("OPENAI_KEY") or os.environ.get("OPENAI_API_KEY")
|
| 48 |
+
else:
|
| 49 |
+
api_key = openai_key
|
| 50 |
+
|
| 51 |
+
# Create LLM client
|
| 52 |
+
client = LLMClient(openai_key=api_key)
|
| 53 |
+
|
| 54 |
+
# Define the prompt
|
| 55 |
+
prompt = f"""Analyze the following text and provide:
|
| 56 |
+
1. A brief summary
|
| 57 |
+
2. The overall sentiment (positive, negative, or neutral)
|
| 58 |
+
3. Key points extracted from the text
|
| 59 |
+
4. Your confidence level in this analysis (0-1)
|
| 60 |
+
|
| 61 |
+
Text to analyze:
|
| 62 |
+
{text}
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
system_prompt = (
|
| 66 |
+
"You are a text analysis assistant. Provide accurate, concise analysis. "
|
| 67 |
+
"Focus on the actual content and avoid over-interpretation."
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
# Call LLM with Pydantic schema validation
|
| 71 |
+
result = client.call(
|
| 72 |
+
prompt=prompt,
|
| 73 |
+
schema=TextAnalysisResponse,
|
| 74 |
+
model=model,
|
| 75 |
+
system_prompt=system_prompt,
|
| 76 |
+
temperature=0.3,
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
# Return as dict (Gradio JSON component expects dict)
|
| 80 |
+
return result.model_dump()
|
apis/score2summary.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from pydantic import BaseModel
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
import pytz
|
| 5 |
+
|
| 6 |
+
from src.clients.llm_client import LLMClient
|
| 7 |
+
from src.utils.tracer import customtracer
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Category(BaseModel):
|
| 11 |
+
own_company_advantage_and_advice: str
|
| 12 |
+
competitors_advantage: str
|
| 13 |
+
自社長所: str
|
| 14 |
+
自社改喁E��: str
|
| 15 |
+
|
| 16 |
+
class Summaries(BaseModel):
|
| 17 |
+
FV: Category
|
| 18 |
+
CTA: Category
|
| 19 |
+
CONTENTS: Category
|
| 20 |
+
CREATIVE: Category
|
| 21 |
+
ACCESSIBILITY: Category
|
| 22 |
+
|
| 23 |
+
@customtracer
|
| 24 |
+
def score2summary(
|
| 25 |
+
p,
|
| 26 |
+
openai_key=os.environ.get('OPENAI_KEY'),
|
| 27 |
+
gemini_key=None,
|
| 28 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 29 |
+
):
|
| 30 |
+
"""
|
| 31 |
+
input1 (text): 下記�E「�E社LPのコンチE��チE��を「構�E案」�E吁E��E��を書き換えください。書き換え�E「�E社LPのコンチE��チE���EみのチE�Eタを活用し、用語やニュアンス、特徴を活用すること。ただし、「構�E案」�E持つチE�Eマ�Eそ�Eままにすることを厳守。なお、商材名めE��ービス名がある場合�E、忁E��「�E社LPのコンチE��チE��情報に変更、E
|
| 32 |
+
input2 (text): default
|
| 33 |
+
input3 (text): default
|
| 34 |
+
input4 (text): gpt-4o
|
| 35 |
+
output1 (json): 頁E��
|
| 36 |
+
"""
|
| 37 |
+
print(datetime.now(pytz.timezone('Asia/Tokyo')).strftime("%Y-%m-%d %H:%M:%S"), __name__)
|
| 38 |
+
|
| 39 |
+
selected_model = model if model else "meta-llama/Llama-3.3-70B-Instruct"
|
| 40 |
+
|
| 41 |
+
if selected_model and "gemini" in selected_model.lower():
|
| 42 |
+
if gemini_key and gemini_key != "default":
|
| 43 |
+
api_key = gemini_key
|
| 44 |
+
else:
|
| 45 |
+
api_key = os.environ.get('GEMINI_KEY')
|
| 46 |
+
client = LLMClient(google_api_key=api_key)
|
| 47 |
+
else:
|
| 48 |
+
if openai_key and openai_key != "default":
|
| 49 |
+
api_key = openai_key
|
| 50 |
+
else:
|
| 51 |
+
api_key = os.environ.get('OPENAI_KEY')
|
| 52 |
+
client = LLMClient(openai_key=api_key)
|
| 53 |
+
|
| 54 |
+
system_prompt = "持E��通りに要紁E��整琁E��行ってください。own_company_advantage_and_advice(自社相対位置+自社長所+自社改喁E��)とtotal_analysisには自社の惁E��のみ、competitors_advantageには他社の惁E��のみを記載してください"
|
| 55 |
+
|
| 56 |
+
result = client.call(
|
| 57 |
+
prompt=p,
|
| 58 |
+
schema=Summaries,
|
| 59 |
+
model=selected_model,
|
| 60 |
+
system_prompt=system_prompt,
|
| 61 |
+
temperature=0,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
return result.model_dump()
|