ErNewdev0 commited on
Commit
ec6c0cf
Β·
verified Β·
1 Parent(s): a988fb6

chore: add gemini

Browse files
Files changed (1) hide show
  1. app.py +262 -254
app.py CHANGED
@@ -6,15 +6,64 @@ import requests
6
  import json
7
  from datetime import datetime
8
  import textwrap
 
 
 
9
 
10
  # Metadata
11
- CURRENT_TIME = "2025-05-22 22:42:10"
12
  CURRENT_USER = "ErRickow"
13
 
14
- # Ollama API settings
15
- OLLAMA_API = os.environ["OLLAMA_API"]
16
-
17
- # Default available models
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  DEFAULT_MODELS = [
19
  "llama2",
20
  "codellama",
@@ -26,287 +75,246 @@ DEFAULT_MODELS = [
26
  "orca-mini"
27
  ]
28
 
29
- def check_ollama_status():
30
- try:
31
- response = requests.get(f"{OLLAMA_API}/api/tags", timeout=10)
32
- return response.status_code == 200
33
- except:
34
- return False
35
-
36
- def list_available_models():
37
- try:
38
- response = requests.get(f"{OLLAMA_API}/api/tags")
39
- installed_models = [model['name'] for model in response.json().get('models', [])]
40
- # Combine installed and default models
41
- all_models = list(set(installed_models + DEFAULT_MODELS))
42
- return sorted(all_models) # Sort for better presentation
43
- except:
44
- return sorted(DEFAULT_MODELS)
45
-
46
- def download_model(model_name):
47
- if not model_name:
48
- return "Please select a model to download"
49
-
50
- print(f"Starting download of model: {model_name}")
51
- try:
52
- headers = {
53
- "Content-Type": "application/json",
54
- }
55
-
56
- response = requests.post(
57
- f"{OLLAMA_API}/api/pull",
58
- headers=headers,
59
- json={"name": model_name},
60
- stream=True
61
- )
62
-
63
- if response.status_code == 200:
64
- for line in response.iter_lines():
65
- if line:
66
- print(f"Download progress: {line.decode()}")
67
- return f"Successfully downloaded model: {model_name}"
68
- else:
69
- error_msg = f"Failed to download model. Status: {response.status_code}"
70
- print(error_msg)
71
- return error_msg
72
 
73
- except Exception as e:
74
- error_msg = f"Error downloading model: {str(e)}"
75
- print(error_msg)
76
- return error_msg
77
-
78
- def clone_repository(repo_url, github_token, branch=None):
79
- """Clone a repository with authentication"""
80
- repo_name = repo_url.split('/')[-1].replace('.git', '')
81
- print(f"Cloning repository: {repo_url} to {repo_name}")
82
-
83
- if os.path.exists(repo_name):
84
- print(f"Removing existing repository: {repo_name}")
85
- subprocess.run(['rm', '-rf', repo_name], check=True)
86
-
87
- try:
88
- owner_repo = '/'.join(repo_url.split('/')[-2:])
89
- auth_url = f"https://{github_token}@github.com/{owner_repo}"
90
-
91
- cmd = ['git', 'clone']
92
- if branch:
93
- cmd.extend(['--branch', branch])
94
- cmd.append(auth_url)
95
-
96
- process = subprocess.run(
97
- cmd,
98
- capture_output=True,
99
- text=True,
100
- env=dict(os.environ, GIT_ASKPASS='echo', GIT_TERMINAL_PROMPT='0')
101
- )
102
-
103
- if process.returncode == 0:
104
- print(f"Successfully cloned repository: {repo_name}")
105
- return True, repo_name
106
- else:
107
- print(f"Failed to clone repository: {process.stderr}")
108
- return False, process.stderr
109
- except Exception as e:
110
- error_msg = f"Error cloning repository: {str(e)}"
111
- print(error_msg)
112
- return False, error_msg
113
-
114
- def analyze_with_ollama(model_name, text):
115
- """Process text with Ollama model"""
116
- print(f"\nAnalyzing with {model_name}...")
117
- try:
118
- payload = {
119
- "model": model_name,
120
- "prompt": text,
121
- "stream": False,
122
- "options": {
123
- "temperature": 0.7,
124
- "top_p": 0.9,
125
- "max_tokens": 2048,
126
- "stop": None
127
- }
128
- }
129
-
130
- print("Sending request to Ollama API...")
131
- response = requests.post(
132
- f"{OLLAMA_API}/api/generate",
133
- headers={"Content-Type": "application/json"},
134
- json=payload,
135
- timeout=60
136
- )
137
-
138
- print(f"Response status: {response.status_code}")
139
-
140
- if response.status_code == 200:
141
- result = response.json()
142
- if 'response' in result:
143
- print("Got response from model")
144
- return result['response']
145
- else:
146
- print("Unexpected response format:", result)
147
- return "Error: Unexpected response format from model"
148
- else:
149
- error_msg = f"API Error {response.status_code}: {response.text}"
150
- print(error_msg)
151
- return error_msg
152
 
153
- except Exception as e:
154
- error_msg = f"Error processing with model: {str(e)}"
155
- print(error_msg)
156
- return error_msg
 
 
 
 
 
157
 
158
- def chunk_text(text, max_length=4000):
159
- return textwrap.wrap(text, max_length, break_long_words=False, break_on_hyphens=False)
 
 
160
 
161
- def read_file_safely(file_path):
162
- encodings = ['utf-8', 'latin-1', 'cp1252']
163
- for encoding in encodings:
164
  try:
165
- with open(file_path, 'r', encoding=encoding) as f:
166
- content = f.read()
167
- print(f"Successfully read file with {encoding} encoding")
168
- return True, content
169
- except UnicodeDecodeError:
170
- continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  except Exception as e:
172
- error_msg = f"Error reading file: {str(e)}"
173
- print(error_msg)
174
- return False, error_msg
175
- return False, "Unable to read file with supported encodings"
 
 
 
 
 
 
 
 
 
176
 
177
  def create_ui():
178
- with gr.Blocks(title="Ollama Repository Analyzer") as app:
 
 
179
  gr.Markdown(f"""
180
- # Ollama Repository Analyzer
181
 
182
- Current Time: {CURRENT_TIME}
183
- User: {CURRENT_USER}
184
  """)
185
 
186
- with gr.Tab("Model Management"):
187
- model_status = gr.Textbox(label="Ollama Status", interactive=False)
188
- available_models = gr.Dropdown(
189
- label="Available Models",
190
- choices=DEFAULT_MODELS,
191
- interactive=True
192
  )
193
- download_button = gr.Button("Download Selected Model")
194
- download_status = gr.Textbox(label="Download Status", interactive=False)
195
-
196
- def update_status():
197
- status = "Connected" if check_ollama_status() else "Not Connected"
198
- models = list_available_models()
199
- return status, gr.Dropdown(choices=models)
200
-
201
- download_button.click(
202
- fn=download_model,
203
- inputs=[available_models],
204
- outputs=[download_status]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
  )
206
-
207
- with gr.Tab("Repository Analysis"):
208
- repo_url = gr.Textbox(label="Repository URL")
209
- github_token = gr.Textbox(label="GitHub Token", type="password")
210
- branch = gr.Textbox(label="Branch (optional)")
211
- clone_button = gr.Button("Clone Repository")
212
- clone_status = gr.Textbox(label="Clone Status", interactive=False)
213
-
 
214
  with gr.Row():
215
- file_list = gr.Dropdown(label="Files in Repository", multiselect=True)
216
- selected_model = gr.Dropdown(
217
- label="Select Model for Analysis",
218
- choices=DEFAULT_MODELS,
219
- interactive=True
220
  )
 
 
 
 
 
 
 
221
 
222
- analyze_button = gr.Button("Analyze Selected Files")
223
- debug_output = gr.Textbox(label="Debug Output", interactive=False)
224
- analysis_output = gr.Markdown()
225
-
226
- def handle_clone(url, token, branch_name):
227
- print(f"\nCloning repository: {url}")
228
- success, result = clone_repository(url, token, branch_name if branch_name else None)
229
- if success:
230
- files = [str(p) for p in Path(result).rglob('*')
231
- if p.is_file() and '.git' not in str(p)]
232
- print(f"Found {len(files)} files in repository")
233
- return f"Successfully cloned: {result}", gr.Dropdown(choices=files)
234
- return f"Clone failed: {result}", None
235
-
236
- def analyze_files(files, model_name):
237
- if not files:
238
- return "Please select files to analyze", "No files selected"
239
-
240
- debug_info = []
241
- results = []
242
 
243
- debug_info.append(f"Starting analysis with model: {model_name}")
244
- debug_info.append(f"Files to analyze: {len(files)}")
245
 
246
- for file_path in files:
247
- debug_info.append(f"\nProcessing file: {file_path}")
248
- success, content = read_file_safely(file_path)
249
-
250
- if success:
251
- chunks = chunk_text(content)
252
- debug_info.append(f"Split into {len(chunks)} chunks")
253
- analysis = []
254
-
255
- for i, chunk in enumerate(chunks, 1):
256
- debug_info.append(f"Analyzing chunk {i}/{len(chunks)}")
257
- prompt = f"""
258
- Analyze this code/content:
259
-
260
- File: {file_path}
261
- Part {i}/{len(chunks)}
262
-
263
- ```
264
- {chunk}
265
- ```
266
-
267
- Provide:
268
- 1. Brief overview
269
- 2. Key functionality
270
- 3. Notable patterns or concerns
271
- 4. Suggestions (if any)
272
- """
273
-
274
- response = analyze_with_ollama(model_name, prompt)
275
- debug_info.append(f"Got response of length: {len(response)}")
276
- analysis.append(response)
277
-
278
- results.append(f"### Analysis of {file_path}\n\n" +
279
- "\n\n=== Next Part ===\n\n".join(analysis))
280
- else:
281
- error_msg = f"Error reading {file_path}: {content}"
282
- debug_info.append(error_msg)
283
- results.append(error_msg)
284
-
285
- return "\n\n---\n\n".join(results), "\n".join(debug_info)
286
 
287
  clone_button.click(
288
  fn=handle_clone,
289
  inputs=[repo_url, github_token, branch],
290
- outputs=[clone_status, file_list]
291
  )
292
 
293
- analyze_button.click(
294
- fn=analyze_files,
295
- inputs=[file_list, selected_model],
296
- outputs=[analysis_output, debug_output]
 
 
 
297
  )
298
 
299
- # Update status every 30 seconds
300
- app.load(update_status, outputs=[model_status, available_models])
 
 
 
 
 
 
301
 
302
  return app
303
 
304
- # Launch the app
305
  if __name__ == "__main__":
306
  print(f"""
307
- Starting Ollama Repository Analyzer
308
- Time: {CURRENT_TIME}
309
- User: {CURRENT_USER}
310
  """)
311
 
312
  app = create_ui()
 
6
  import json
7
  from datetime import datetime
8
  import textwrap
9
+ import google.generativeai as genai
10
+ import asyncio
11
+ from typing import Generator, AsyncGenerator
12
 
13
  # Metadata
14
+ CURRENT_TIME = "2025-05-23 12:44:47"
15
  CURRENT_USER = "ErRickow"
16
 
17
+ # Link bantuan
18
+ GITHUB_TOKEN_HELP = """
19
+ ### Cara Mendapatkan GitHub Token:
20
+
21
+ 1. Kunjungi [GitHub Token Settings](https://github.com/settings/tokens)
22
+ 2. Klik "Generate new token" > "Generate new token (classic)"
23
+ 3. Beri nama token Anda di "Note"
24
+ 4. Pilih scope:
25
+ - `repo` (untuk akses repository private)
26
+ - `read:packages` (opsional, untuk akses package)
27
+ 5. Klik "Generate token"
28
+ 6. **PENTING**: Salin token segera! Token hanya ditampilkan sekali
29
+
30
+ Token diperlukan untuk:
31
+ - Mengakses repository private
32
+ - Clone repository dengan rate limit lebih tinggi
33
+ - Mengakses fitur GitHub API
34
+ """
35
+
36
+ GEMINI_API_HELP = """
37
+ ### Cara Mendapatkan Gemini API Key:
38
+
39
+ 1. Kunjungi [Google AI Studio](https://makersuite.google.com/app/apikey)
40
+ 2. Login dengan akun Google Anda
41
+ 3. Klik "Create API Key"
42
+ 4. Salin API Key yang dihasilkan
43
+
44
+ Catatan:
45
+ - Gemini memberikan kuota gratis setiap bulan
46
+ - Key bisa dibuat ulang jika diperlukan
47
+ - Monitor penggunaan di [Google Cloud Console](https://console.cloud.google.com/)
48
+ """
49
+
50
+ OLLAMA_HELP = """
51
+ ### Cara Menggunakan Ollama:
52
+
53
+ 1. Install Ollama dari [ollama.ai](https://ollama.ai)
54
+ 2. Jalankan Ollama di komputer Anda
55
+ 3. Pastikan Ollama berjalan di http://localhost:11434
56
+
57
+ Catatan:
58
+ - Ollama berjalan secara lokal di komputer Anda
59
+ - Tidak memerlukan API key
60
+ - Ideal untuk privasi dan penggunaan offline
61
+ """
62
+
63
+ # API settings
64
+ OLLAMA_API = os.environ.get("OLLAMA_API", "http://localhost:11434")
65
+
66
+ # Model lists
67
  DEFAULT_MODELS = [
68
  "llama2",
69
  "codellama",
 
75
  "orca-mini"
76
  ]
77
 
78
+ class AIProvider:
79
+ OLLAMA = "ollama"
80
+ GEMINI = "gemini"
81
+
82
+ class RepoAnalyzer:
83
+ def __init__(self):
84
+ self.current_repo = None
85
+ self.repo_content = {}
86
+ self.chat_history = []
87
+
88
+ async def stream_gemini_response(self, prompt: str, api_key: str) -> AsyncGenerator[str, None]:
89
+ """Stream response dari Gemini API"""
90
+ try:
91
+ if not api_key:
92
+ yield "⚠️ API Key Gemini diperlukan. Klik icon bantuan (?) di samping input API Key untuk panduan mendapatkan key."
93
+ return
94
+
95
+ genai.configure(api_key=api_key)
96
+ model = genai.GenerativeModel('gemini-1.5-mini')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
+ # Tambahkan konteks repository jika ada
99
+ if self.current_repo:
100
+ context = f"Repository: {self.current_repo}\n\n"
101
+ repo_files = "\n".join(list(self.repo_content.keys()))
102
+ context += f"Files in repository:\n{repo_files}\n\n"
103
+ prompt = context + prompt
104
+
105
+ response = model.generate_content(
106
+ prompt,
107
+ generation_config={
108
+ "temperature": 0.7,
109
+ "top_p": 0.8,
110
+ "top_k": 40
111
+ },
112
+ stream=True
113
+ )
114
+
115
+ full_response = ""
116
+ async for chunk in response:
117
+ if chunk.text:
118
+ full_response += chunk.text
119
+ yield chunk.text
120
+
121
+ self.chat_history.append({"role": "user", "content": prompt})
122
+ self.chat_history.append({"role": "assistant", "content": full_response})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
 
124
+ except Exception as e:
125
+ error_msg = f"⚠️ Error dalam Gemini API: {str(e)}\n\nPastikan API Key valid dan memiliki kuota yang cukup."
126
+ print(error_msg)
127
+ yield error_msg
128
+
129
+ def clone_repository(self, repo_url: str, github_token: str, branch: str = None) -> tuple[bool, str]:
130
+ """Clone repository GitHub dengan autentikasi"""
131
+ if not repo_url:
132
+ return False, "⚠️ URL repository diperlukan"
133
 
134
+ repo_name = repo_url.split('/')[-1].replace('.git', '')
135
+
136
+ if os.path.exists(repo_name):
137
+ subprocess.run(['rm', '-rf', repo_name], check=True)
138
 
 
 
 
139
  try:
140
+ owner_repo = '/'.join(repo_url.split('/')[-2:])
141
+
142
+ # Cek apakah repository private
143
+ headers = {'Authorization': f'token {github_token}'} if github_token else {}
144
+ repo_check = requests.get(f"https://api.github.com/repos/{owner_repo}", headers=headers)
145
+
146
+ if repo_check.status_code == 404:
147
+ return False, "⚠️ Repository tidak ditemukan. Periksa URL repository."
148
+ elif repo_check.status_code == 401:
149
+ return False, "⚠️ Token GitHub tidak valid. Klik icon bantuan (?) untuk panduan mendapatkan token."
150
+ elif repo_check.status_code == 403 and repo_check.json().get('private', False):
151
+ return False, "⚠️ Ini adalah repository private. Token GitHub dengan akses 'repo' diperlukan."
152
+
153
+ auth_url = f"https://{github_token}@github.com/{owner_repo}" if github_token else f"https://github.com/{owner_repo}"
154
+
155
+ cmd = ['git', 'clone']
156
+ if branch:
157
+ cmd.extend(['--branch', branch])
158
+ cmd.append(auth_url)
159
+
160
+ process = subprocess.run(
161
+ cmd,
162
+ capture_output=True,
163
+ text=True,
164
+ env=dict(os.environ, GIT_ASKPASS='echo', GIT_TERMINAL_PROMPT='0')
165
+ )
166
+
167
+ if process.returncode == 0:
168
+ self.current_repo = repo_name
169
+ # Scan dan simpan konten repository
170
+ file_count = 0
171
+ for file_path in Path(repo_name).rglob('*'):
172
+ if file_path.is_file() and '.git' not in str(file_path):
173
+ success, content = self.read_file_safely(str(file_path))
174
+ if success:
175
+ self.repo_content[str(file_path)] = content
176
+ file_count += 1
177
+
178
+ return True, f"βœ… Repository berhasil di-clone!\n\nNama: {repo_name}\nJumlah file: {file_count}\n\nAnda sekarang bisa mengajukan pertanyaan tentang repository ini."
179
+ else:
180
+ return False, f"⚠️ Gagal clone repository:\n{process.stderr}"
181
+
182
  except Exception as e:
183
+ return False, f"⚠️ Error: {str(e)}"
184
+
185
+ def read_file_safely(self, file_path: str) -> tuple[bool, str]:
186
+ """Baca file dengan aman menggunakan berbagai encoding"""
187
+ encodings = ['utf-8', 'latin-1', 'cp1252']
188
+ for encoding in encodings:
189
+ try:
190
+ with open(file_path, 'r', encoding=encoding) as f:
191
+ content = f.read()
192
+ return True, content
193
+ except Exception as e:
194
+ continue
195
+ return False, "Tidak dapat membaca file dengan encoding yang didukung"
196
 
197
  def create_ui():
198
+ analyzer = RepoAnalyzer()
199
+
200
+ with gr.Blocks(title="Repository Chat Analysis") as app:
201
  gr.Markdown(f"""
202
+ # πŸ€– Repository Chat Analysis
203
 
204
+ πŸ“… Waktu: {CURRENT_TIME}
205
+ πŸ‘€ Pengguna: {CURRENT_USER}
206
  """)
207
 
208
+ with gr.Tab("πŸ› οΈ Konfigurasi"):
209
+ provider = gr.Radio(
210
+ choices=[AIProvider.GEMINI, AIProvider.OLLAMA],
211
+ label="Penyedia AI",
212
+ value=AIProvider.GEMINI
 
213
  )
214
+
215
+ with gr.Group() as api_settings:
216
+ with gr.Row():
217
+ gemini_key = gr.Textbox(
218
+ label="Gemini API Key",
219
+ type="password",
220
+ placeholder="Klik icon (?) untuk panduan mendapatkan API key",
221
+ show_label=True
222
+ )
223
+ gr.Markdown(GEMINI_API_HELP)
224
+
225
+ with gr.Tab("πŸ“Š Analisis Repository"):
226
+ with gr.Row():
227
+ repo_url = gr.Textbox(
228
+ label="URL Repository GitHub",
229
+ placeholder="Contoh: https://github.com/username/repository"
230
+ )
231
+ with gr.Column():
232
+ github_token = gr.Textbox(
233
+ label="Token GitHub",
234
+ type="password",
235
+ placeholder="Klik icon (?) untuk panduan mendapatkan token"
236
+ )
237
+ gr.Markdown(GITHUB_TOKEN_HELP)
238
+ branch = gr.Textbox(
239
+ label="Branch (opsional)",
240
+ placeholder="main"
241
+ )
242
+
243
+ clone_button = gr.Button("πŸ”„ Clone Repository", variant="primary")
244
+ clone_status = gr.Markdown(
245
+ label="Status Repository",
246
  )
247
+
248
+ gr.Markdown("""
249
+ ### πŸ’‘ Contoh Pertanyaan:
250
+ - "Jelaskan struktur utama dari repository ini"
251
+ - "Apa saja fitur-fitur utama dalam kode ini?"
252
+ - "Bagaimana cara memperbaiki [masalah specific] di repository ini?"
253
+ - "Tolong analisis kualitas kode di file [nama file]"
254
+ """)
255
+
256
  with gr.Row():
257
+ chat_input = gr.Textbox(
258
+ label="πŸ’­ Tanyakan tentang Repository",
259
+ placeholder="Ketik pertanyaan Anda di sini...",
260
+ lines=3
 
261
  )
262
+ send_button = gr.Button("πŸ“€ Kirim", variant="primary")
263
+
264
+ chat_history = gr.Chatbot(
265
+ label="πŸ“ Riwayat Chat",
266
+ height=500,
267
+ show_label=True
268
+ )
269
 
270
+ async def handle_chat(message, history, api_key):
271
+ if not analyzer.current_repo:
272
+ return history + [[message, "⚠️ Mohon clone repository terlebih dahulu sebelum mengajukan pertanyaan."]]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
273
 
274
+ history = history or []
275
+ history.append([message, ""])
276
 
277
+ full_response = ""
278
+ async for chunk in analyzer.stream_gemini_response(message, api_key):
279
+ full_response += chunk
280
+ history[-1][1] = full_response
281
+ yield history
282
+
283
+ def handle_clone(url, token, branch_name):
284
+ success, result = analyzer.clone_repository(url, token, branch_name if branch_name else None)
285
+ return result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
286
 
287
  clone_button.click(
288
  fn=handle_clone,
289
  inputs=[repo_url, github_token, branch],
290
+ outputs=clone_status
291
  )
292
 
293
+ send_button.click(
294
+ fn=handle_chat,
295
+ inputs=[chat_input, chat_history, gemini_key],
296
+ outputs=chat_history
297
+ ).then(
298
+ fn=lambda: gr.update(value=""),
299
+ outputs=chat_input
300
  )
301
 
302
+ chat_input.submit(
303
+ fn=handle_chat,
304
+ inputs=[chat_input, chat_history, gemini_key],
305
+ outputs=chat_history
306
+ ).then(
307
+ fn=lambda: gr.update(value=""),
308
+ outputs=chat_input
309
+ )
310
 
311
  return app
312
 
 
313
  if __name__ == "__main__":
314
  print(f"""
315
+ πŸš€ Memulai Repository Chat Analysis
316
+ πŸ“… Waktu: {CURRENT_TIME}
317
+ πŸ‘€ Pengguna: {CURRENT_USER}
318
  """)
319
 
320
  app = create_ui()