Spaces:
Sleeping
Sleeping
fix: fixes gemini model
Browse files
app.py
CHANGED
|
@@ -179,7 +179,7 @@ class RepoAnalyzer:
|
|
| 179 |
self.chat_history = []
|
| 180 |
|
| 181 |
async def stream_gemini_response(
|
| 182 |
-
self, prompt: str, api_key: str = None, model: str = "gemini-
|
| 183 |
) -> AsyncGenerator[str, None]:
|
| 184 |
"""Stream response dari Gemini API menggunakan OpenAI client"""
|
| 185 |
try:
|
|
@@ -191,6 +191,8 @@ class RepoAnalyzer:
|
|
| 191 |
# Gunakan OpenAI client untuk Gemini
|
| 192 |
client = AsyncOpenAI(api_key=actual_key, base_url=GEMINI_BASE_URL)
|
| 193 |
|
|
|
|
|
|
|
| 194 |
# Tambahkan konteks repository jika ada
|
| 195 |
messages = [
|
| 196 |
{
|
|
@@ -209,7 +211,7 @@ class RepoAnalyzer:
|
|
| 209 |
|
| 210 |
try:
|
| 211 |
stream = await client.chat.completions.create(
|
| 212 |
-
model=
|
| 213 |
messages=messages,
|
| 214 |
stream=True,
|
| 215 |
temperature=0.7,
|
|
|
|
| 179 |
self.chat_history = []
|
| 180 |
|
| 181 |
async def stream_gemini_response(
|
| 182 |
+
self, prompt: str, api_key: str = None, model: str = "gemini-1.5-flash"
|
| 183 |
) -> AsyncGenerator[str, None]:
|
| 184 |
"""Stream response dari Gemini API menggunakan OpenAI client"""
|
| 185 |
try:
|
|
|
|
| 191 |
# Gunakan OpenAI client untuk Gemini
|
| 192 |
client = AsyncOpenAI(api_key=actual_key, base_url=GEMINI_BASE_URL)
|
| 193 |
|
| 194 |
+
api_model = f"models/{model}" if not model.startswith("models/") else model
|
| 195 |
+
|
| 196 |
# Tambahkan konteks repository jika ada
|
| 197 |
messages = [
|
| 198 |
{
|
|
|
|
| 211 |
|
| 212 |
try:
|
| 213 |
stream = await client.chat.completions.create(
|
| 214 |
+
model=api_model,
|
| 215 |
messages=messages,
|
| 216 |
stream=True,
|
| 217 |
temperature=0.7,
|