|
|
import json
|
|
|
import joblib
|
|
|
import numpy as np
|
|
|
import torch
|
|
|
import os
|
|
|
from transformers import AutoModel
|
|
|
|
|
|
|
|
|
os.environ['HTTP_PROXY'] = 'http://localhost:1080'
|
|
|
os.environ['HTTPS_PROXY'] = 'http://localhost:1080'
|
|
|
|
|
|
def create_static_embeddings(
|
|
|
input_path="./data/Bear_room/static_info.json",
|
|
|
output_path="./data/Bear_room/static_info_embeddings.pkl",
|
|
|
):
|
|
|
"""
|
|
|
Loads static information from a JSON file, generates embeddings for the text fields,
|
|
|
and saves the result as a pickle file.
|
|
|
|
|
|
Args:
|
|
|
input_path (str): Path to the input static_info.json file.
|
|
|
output_path (str): Path to save the output .pkl file with embeddings.
|
|
|
"""
|
|
|
|
|
|
|
|
|
print("Initializing embedding model...")
|
|
|
|
|
|
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
|
|
print(f"Using device: {device}")
|
|
|
|
|
|
|
|
|
model = AutoModel.from_pretrained(
|
|
|
"jinaai/jina-embeddings-v3",
|
|
|
trust_remote_code=True
|
|
|
).to(device=device)
|
|
|
|
|
|
|
|
|
print(f"Loading data from '{input_path}'...")
|
|
|
with open(input_path, "r") as f:
|
|
|
static_info = json.load(f)
|
|
|
|
|
|
|
|
|
texts_to_embed = [
|
|
|
static_info['general_info'],
|
|
|
static_info['downtime_prompt']
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
channel_info_paths = []
|
|
|
for channel_name, details_dict in static_info['channel_info'].items():
|
|
|
for sub_key, text_value in details_dict.items():
|
|
|
texts_to_embed.append(text_value)
|
|
|
channel_info_paths.append((channel_name, sub_key))
|
|
|
|
|
|
print(f"Found {len(texts_to_embed)} text snippets to embed.")
|
|
|
|
|
|
|
|
|
print("Generating embeddings...")
|
|
|
embeddings = model.encode(
|
|
|
texts_to_embed,
|
|
|
truncate_dim=256
|
|
|
)
|
|
|
print(f"Embeddings generated with shape: {embeddings.shape}")
|
|
|
|
|
|
|
|
|
print("Replacing text data with embeddings...")
|
|
|
|
|
|
static_info['general_info'] = embeddings[0:1, :]
|
|
|
static_info['downtime_prompt'] = embeddings[1:2, :]
|
|
|
|
|
|
|
|
|
channel_embeddings_start_index = 2
|
|
|
channel_embeddings_dict = {key: [] for key in static_info['channel_info'].keys()}
|
|
|
for i, (channel_name, sub_key) in enumerate(channel_info_paths):
|
|
|
|
|
|
embedding_index = channel_embeddings_start_index + i
|
|
|
|
|
|
channel_embeddings_dict[channel_name].append(embeddings[embedding_index:embedding_index+1, :])
|
|
|
|
|
|
|
|
|
|
|
|
for channel_name, embeddings_list in channel_embeddings_dict.items():
|
|
|
|
|
|
stacked_embedding = np.squeeze(np.stack(embeddings_list, axis=0))
|
|
|
|
|
|
static_info['channel_info'][channel_name] = stacked_embedding
|
|
|
|
|
|
print(f"Channel '{channel_name}' embeddings shape: {stacked_embedding.shape}")
|
|
|
|
|
|
|
|
|
|
|
|
output_dir = os.path.dirname(output_path)
|
|
|
if output_dir:
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
|
|
print(f"Saving embeddings to '{output_path}'...")
|
|
|
with open(output_path, "wb") as f:
|
|
|
joblib.dump(static_info, f)
|
|
|
|
|
|
print("Process completed successfully!")
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
create_static_embeddings() |