Spaces:
Sleeping
Sleeping
File size: 2,747 Bytes
8d9eecc c9d3c5c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c 8d9eecc c55241c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
# gemini_client.py
import os
# correct import
import google.generativeai as genai
from dotenv import load_dotenv
load_dotenv()
# Try to import the official Google GenAI SDK. If it fails, fall back to a safe mock.
try:
import google.generativeai as genai
GENAI_AVAILABLE = True
except Exception:
genai = None
GENAI_AVAILABLE = False
from llm_file_generator import file_generator
api_key = os.getenv("GEMINI_API_KEY")
if GENAI_AVAILABLE:
if not api_key:
# Do not raise here to allow the Space to run; instead, functions will return helpful errors.
print("Warning: GEMINI_API_KEY not set. Gemini calls will return an error.")
else:
genai.configure(api_key=api_key)
SYSTEM_INSTRUCTION = f"""You are AcidopShell AI Assistant - a helpful coding assistant integrated into a custom shell.
You can answer questions, explain concepts, and generate code files.
{file_generator.get_enhanced_prompt()}
Be concise but helpful. When generating files, always provide complete, working code.
You can provide explanations before or after the XML tags."""
# Create lightweight "model" wrappers only when genai is available
if GENAI_AVAILABLE and api_key:
try:
model_with_files = genai.GenerativeModel(
"gemini-2.0-flash-exp",
system_instruction=SYSTEM_INSTRUCTION
)
model_regular = genai.GenerativeModel("gemini-2.0-flash-exp")
except Exception as e:
print(f"Warning: Failed to initialize GenAI models: {e}")
model_with_files = None
model_regular = None
else:
model_with_files = None
model_regular = None
def _not_available_msg():
return "⚠️ Gemini SDK not available or GEMINI_API_KEY not configured in this environment."
def ask_gemini(prompt: str):
"""
Basic Gemini query - returns response text or helpful error.
"""
if not GENAI_AVAILABLE or not model_regular:
return _not_available_msg()
try:
response = model_regular.generate_content(prompt)
return response.text
except Exception as e:
return f"⚠️ Error calling Gemini: {e}"
def ask_gemini_with_file_generation(query: str, project_dir: str = "."):
"""
Enhanced Gemini query with automatic file generation.
Returns: (response_text, files_were_generated)
"""
if not GENAI_AVAILABLE or not model_with_files:
return _not_available_msg(), False
try:
response = model_with_files.generate_content(query)
response_text = response.text
files_generated = file_generator.process_response(response_text, project_dir)
return response_text, files_generated
except Exception as e:
return f"⚠️ Error calling Gemini: {e}", False
|