| | import logging |
| | import os |
| | import asyncio |
| | import tempfile |
| | from typing import Optional |
| | import openai |
| | from core.config import settings |
| | from core.prompts import get_mindmap_system_prompt |
| | from services.s3_service import s3_service |
| |
|
| | logger = logging.getLogger(__name__) |
| |
|
| | class MindMapService: |
| | def __init__(self): |
| | self.openai_client = openai.OpenAI(api_key=settings.OPENAI_API_KEY) |
| |
|
| | async def generate_mindmap( |
| | self, |
| | file_key: Optional[str] = None, |
| | text_input: Optional[str] = None |
| | ) -> str: |
| | """ |
| | Generates a Mermaid mindmap from either an S3 PDF or direct text input. |
| | Uses asyncio.to_thread for all blocking I/O operations. |
| | """ |
| | try: |
| | system_prompt = get_mindmap_system_prompt() |
| |
|
| | if file_key: |
| | |
| | tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") |
| | tmp_path = tmp.name |
| | tmp.close() |
| |
|
| | try: |
| | await asyncio.to_thread( |
| | s3_service.s3_client.download_file, |
| | settings.AWS_S3_BUCKET, |
| | file_key, |
| | tmp_path |
| | ) |
| | |
| | |
| | def upload_to_openai(): |
| | with open(tmp_path, "rb") as f: |
| | return self.openai_client.files.create( |
| | file=f, |
| | purpose="assistants" |
| | ) |
| | |
| | uploaded_file = await asyncio.to_thread(upload_to_openai) |
| | |
| | messages = [ |
| | {"role": "system", "content": system_prompt}, |
| | { |
| | "role": "user", |
| | "content": [ |
| | { |
| | "type": "file", |
| | "file": {"file_id": uploaded_file.id} |
| | } |
| | ] |
| | } |
| | ] |
| | |
| | |
| | response = await asyncio.to_thread( |
| | self.openai_client.chat.completions.create, |
| | model="gpt-4o-mini", |
| | messages=messages, |
| | temperature=0.7 |
| | ) |
| | |
| | |
| | await asyncio.to_thread( |
| | self.openai_client.files.delete, |
| | uploaded_file.id |
| | ) |
| | |
| | raw_content = response.choices[0].message.content |
| | |
| | finally: |
| | if os.path.exists(tmp_path): |
| | await asyncio.to_thread(os.remove, tmp_path) |
| | |
| | elif text_input: |
| | messages = [ |
| | {"role": "system", "content": system_prompt}, |
| | {"role": "user", "content": text_input} |
| | ] |
| | |
| | |
| | response = await asyncio.to_thread( |
| | self.openai_client.chat.completions.create, |
| | model="gpt-4o-mini", |
| | messages=messages, |
| | temperature=0.7 |
| | ) |
| | raw_content = response.choices[0].message.content |
| | |
| | else: |
| | raise ValueError("Either file_key or text_input must be provided") |
| |
|
| | |
| | if "```mermaid" in raw_content: |
| | raw_content = raw_content.split("```mermaid")[1].split("```")[0].strip() |
| | elif "```" in raw_content: |
| | raw_content = raw_content.split("```")[1].split("```")[0].strip() |
| | |
| | return raw_content.strip() |
| |
|
| | except Exception as e: |
| | logger.error(f"Mind map generation failed: {e}") |
| | raise |
| |
|
| | mindmap_service = MindMapService() |
| |
|