| | import os |
| | import google.generativeai as genai |
| | from openai import OpenAI |
| | from anthropic import Anthropic |
| | import httpx |
| |
|
| | |
| | LLM_PROVIDERS = { |
| | "Gemini": { |
| | "models": [ |
| | "gemini-2.5-pro", |
| | "gemini-2.5-flash", |
| | "gemini-2.5-flash-lite-preview-06-17", |
| | "gemini-2.5-flash-preview-native-audio-dialog", |
| | "gemini-2.5-flash-exp-native-audio-thinking-dialog", |
| | "gemini-2.5-flash-preview-tts", |
| | "gemini-2.5-pro-preview-tts", |
| | "gemini-2.0-flash", |
| | "gemini-2.0-flash-preview-image-generation", |
| | "gemini-2.0-flash-lite", |
| | "gemini-2.0-flash-live-001", |
| | "gemini-live-2.5-flash-preview", |
| | "gemini-1.5-flash", |
| | "gemini-1.5-flash-8b", |
| | "gemini-1.5-pro" |
| | ], |
| | "api_key_env": "GEMINI_API_KEY" |
| | }, |
| | "OpenAI": { |
| | "models": [], |
| | "api_key_env": "OPENAI_API_KEY" |
| | }, |
| | "Anthropic": { |
| | "models": [], |
| | "api_key_env": "ANTHROPIC_API_KEY" |
| | }, |
| | "OpenRouter": { |
| | "models": [], |
| | "api_key_env": "OPENROUTER_API_KEY" |
| | } |
| | } |
| |
|
| | def get_llm_models(provider_name, api_key): |
| | """Seçilen sağlayıcının modellerini dinamik olarak çeker.""" |
| | models = [] |
| | if provider_name == "Gemini": |
| | |
| | models = LLM_PROVIDERS["Gemini"]["models"] |
| | elif provider_name == "OpenAI": |
| | try: |
| | client = OpenAI(api_key=api_key) |
| | response = client.models.list() |
| | models = [model.id for model in response.data if "gpt" in model.id and "vision" not in model.id and "instruct" not in model.id] |
| | models.sort() |
| | except Exception as e: |
| | print(f"OpenAI modelleri çekilirken hata oluştu: {e}") |
| | models = [] |
| | elif provider_name == "Anthropic": |
| | try: |
| | client = Anthropic(api_key=api_key) |
| | |
| | models = [ |
| | "claude-3-opus-20240229", |
| | "claude-3-sonnet-20240229", |
| | "claude-3-haiku-20240307" |
| | ] |
| | except Exception as e: |
| | print(f"Anthropic modelleri çekilirken hata oluştu: {e}") |
| | models = [] |
| | elif provider_name == "OpenRouter": |
| | try: |
| | |
| | headers = {"Authorization": f"Bearer {api_key}"} |
| | response = httpx.get("https://openrouter.ai/api/v1/models", headers=headers) |
| | response.raise_for_status() |
| | data = response.json() |
| | models = [model["id"] for model in data["data"]] |
| | models.sort() |
| | except Exception as e: |
| | print(f"OpenRouter modelleri çekilirken hata oluştu: {e}") |
| | models = [] |
| | return models |
| |
|
| | def validate_api_key(provider_name, api_key): |
| | """Seçilen sağlayıcının API anahtarını doğrular.""" |
| | if not api_key: |
| | return False, "API Anahtarı boş olamaz." |
| |
|
| | try: |
| | if provider_name == "Gemini": |
| | genai.configure(api_key=api_key) |
| | |
| | test_models = ['gemini-1.5-flash', 'gemini-2.0-flash', 'gemini-pro'] |
| | last_error = None |
| | |
| | for model_name in test_models: |
| | try: |
| | model = genai.GenerativeModel(model_name) |
| | response = model.generate_content("test") |
| | return True, f"API Anahtarı Geçerli! (Test modeli: {model_name})" |
| | except Exception as e: |
| | last_error = e |
| | continue |
| | |
| | |
| | if last_error: |
| | raise last_error |
| | else: |
| | raise Exception("Gemini API ile bağlantı kurulamadı.") |
| | elif provider_name == "OpenAI": |
| | client = OpenAI(api_key=api_key) |
| | client.models.list() |
| | return True, "API Anahtarı Geçerli!" |
| | elif provider_name == "Anthropic": |
| | client = Anthropic(api_key=api_key) |
| | client.messages.create( |
| | model="claude-3-haiku-20240307", |
| | max_tokens=1, |
| | messages=[{"role": "user", "content": "hi"}] |
| | ) |
| | return True, "API Anahtarı Geçerli!" |
| | elif provider_name == "OpenRouter": |
| | headers = {"Authorization": f"Bearer {api_key}"} |
| | response = httpx.get("https://openrouter.ai/api/v1/models", headers=headers) |
| | response.raise_for_status() |
| | return True, "API Anahtarı Geçerli!" |
| | else: |
| | return False, "Bilinmeyen sağlayıcı." |
| | except Exception as e: |
| | return False, f"API Anahtarı Geçersiz veya Bir Hata Oluştu: {e}" |
| |
|
| |
|
| |
|
| |
|
| |
|
| | def call_llm(provider_name, model_name, api_key, prompt): |
| | """Seçilen LLM sağlayıcısını kullanarak bir çağrı yapar.""" |
| | try: |
| | if provider_name == "Gemini": |
| | genai.configure(api_key=api_key) |
| | model = genai.GenerativeModel(model_name) |
| | |
| | |
| | generation_config = genai.GenerationConfig( |
| | temperature=0.7, |
| | max_output_tokens=8192, |
| | ) |
| | |
| | response = model.generate_content( |
| | prompt, |
| | generation_config=generation_config, |
| | request_options={"timeout": 120} |
| | ) |
| | return response.text |
| | elif provider_name == "OpenAI": |
| | client = OpenAI(api_key=api_key) |
| | response = client.chat.completions.create( |
| | model=model_name, |
| | messages=[ |
| | {"role": "user", "content": prompt} |
| | ] |
| | ) |
| | return response.choices[0].message.content |
| | elif provider_name == "Anthropic": |
| | client = Anthropic(api_key=api_key) |
| | response = client.messages.create( |
| | model=model_name, |
| | max_tokens=4000, |
| | messages=[ |
| | {"role": "user", "content": prompt} |
| | ] |
| | ) |
| | return response.content[0].text |
| | elif provider_name == "OpenRouter": |
| | headers = { |
| | "Authorization": f"Bearer {api_key}", |
| | "Content-Type": "application/json" |
| | } |
| | data = { |
| | "model": model_name, |
| | "messages": [ |
| | {"role": "user", "content": prompt} |
| | ], |
| | "max_tokens": 8192 |
| | } |
| | with httpx.Client(timeout=120.0) as client: |
| | response = client.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=data) |
| | response.raise_for_status() |
| | return response.json()["choices"][0]["message"]["content"] |
| | else: |
| | return "Bilinmeyen LLM sağlayıcısı." |
| | except Exception as e: |
| | return f"LLM çağrısı sırasında hata oluştu: {e}" |
| |
|
| |
|
| |
|
| |
|
| | import re |
| | import zipfile |
| | import io |
| |
|
| | def parse_llm_output(llm_output): |
| | """LLM çıktısını dosya yolları ve içerikleri olarak ayrıştırır.""" |
| | files = {} |
| | |
| | |
| | |
| | matches = re.finditer(r'^#\s*((context|commands|prompts)/[\w\d_\-]+\.(md|txt|tpl))\s*\n', llm_output, re.MULTILINE) |
| |
|
| | last_end = 0 |
| | last_file_path = None |
| |
|
| | for match in matches: |
| | current_file_path = match.group(1).strip() |
| | current_start = match.end() |
| |
|
| | if last_file_path: |
| | |
| | content = llm_output[last_end:match.start()].strip() |
| | files[last_file_path] = content |
| |
|
| | last_file_path = current_file_path |
| | last_end = current_start |
| | |
| | |
| | if last_file_path: |
| | content = llm_output[last_end:].strip() |
| | files[last_file_path] = content |
| |
|
| | return files |
| |
|
| | def create_zip_from_files(files_dict): |
| | """Bir dosya sözlüğünden (yol:içerik) bir ZIP dosyası oluşturur ve baytlarını döndürür.""" |
| | zip_buffer = io.BytesIO() |
| | with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED, False) as zip_file: |
| | for file_path, content in files_dict.items(): |
| | |
| | zip_file.writestr(file_path, content.encode("utf-8")) |
| | zip_buffer.seek(0) |
| | return zip_buffer.getvalue() |
| |
|
| |
|
| |
|