abinivas8 commited on
Commit
b9b2c9c
Β·
verified Β·
1 Parent(s): 86b5200

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +345 -345
app.py CHANGED
@@ -1,346 +1,346 @@
1
- import gradio as gr
2
- import joblib
3
- import numpy as np
4
- import pandas as pd
5
- from sklearn.preprocessing import StandardScaler
6
- from fastapi import FastAPI, HTTPException
7
- from pydantic import BaseModel
8
- import uvicorn
9
- import os
10
- import requests
11
- import json
12
- from datetime import datetime, timedelta, timezone
13
- from typing import Dict, List, Optional
14
- from urllib.parse import urlparse
15
- import time
16
-
17
- # Create FastAPI app
18
- app = FastAPI(title="Developer Productivity Prediction API", version="1.0.0")
19
-
20
- # Load the trained model and scaler
21
- model = joblib.load('dev_productivity_model.joblib')
22
- scaler = joblib.load('scaler.joblib')
23
-
24
- # Pydantic models
25
- class ProductivityRequest(BaseModel):
26
- daily_coding_hours: float
27
- commits_per_day: int
28
- pull_requests_per_week: int
29
- issues_closed_per_week: int
30
- active_repos: int
31
- code_reviews_per_week: int
32
-
33
- class ProductivityResponse(BaseModel):
34
- predicted_score: float
35
- status: str
36
-
37
- class GitHubAnalysisRequest(BaseModel):
38
- repo_url: str
39
- github_token: str
40
-
41
- class GitHubAnalysisResponse(BaseModel):
42
- repo_metrics: dict
43
- ml_features: dict
44
- predicted_score: float
45
- productivity_indicators: dict
46
- status: str
47
-
48
- # GitHub Repository Analyzer
49
- class RepoProductivityAnalyzer:
50
- def __init__(self, github_token: str):
51
- if not github_token or github_token == "YOUR_TOKEN_HERE":
52
- raise ValueError("Please provide a valid GitHub token")
53
-
54
- self.token = github_token
55
- self.headers = {
56
- 'Authorization': f'token {github_token}',
57
- 'Accept': 'application/vnd.github.v3+json'
58
- }
59
- self.days_back = 90
60
- self.max_retries = 3
61
-
62
- def safe_request(self, url: str, retries: int = None) -> Optional[List]:
63
- if retries is None:
64
- retries = self.max_retries
65
-
66
- for attempt in range(retries):
67
- try:
68
- response = requests.get(url, headers=self.headers, timeout=30)
69
-
70
- if response.status_code == 200:
71
- return response.json()
72
- elif response.status_code == 403:
73
- time.sleep(60) # Rate limit
74
- continue
75
- elif response.status_code == 404:
76
- return []
77
- else:
78
- return []
79
-
80
- except requests.exceptions.RequestException:
81
- if attempt < retries - 1:
82
- time.sleep(2 ** attempt)
83
- else:
84
- return []
85
- return []
86
-
87
- def parse_repo_url(self, repo_url: str) -> tuple:
88
- try:
89
- parsed = urlparse(repo_url)
90
- path = parsed.path.strip('/').split('/')
91
- if len(path) < 2:
92
- raise ValueError("Invalid GitHub URL format")
93
- return path[0], path[1]
94
- except Exception as e:
95
- raise ValueError(f"Invalid repo URL: {str(e)}")
96
-
97
- def safe_parse_datetime(self, date_str: str) -> Optional[datetime]:
98
- if not date_str:
99
- return None
100
- try:
101
- dt = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
102
- if dt.tzinfo is None:
103
- dt = dt.replace(tzinfo=timezone.utc)
104
- return dt
105
- except:
106
- return None
107
-
108
- def get_metrics(self, repo_url: str) -> Dict:
109
- try:
110
- owner, repo = self.parse_repo_url(repo_url)
111
- except ValueError as e:
112
- return {"error": str(e)}
113
-
114
- now = datetime.now(timezone.utc)
115
- since_dt = now - timedelta(days=self.days_back)
116
- since = since_dt.isoformat()
117
-
118
- metrics = {
119
- 'repo': f"{owner}/{repo}",
120
- 'period_days': self.days_back,
121
- 'analyzed_at': now.isoformat(),
122
- 'status': 'success'
123
- }
124
-
125
- try:
126
- # Check repo exists
127
- repo_info = self.safe_request(f"https://api.github.com/repos/{owner}/{repo}")
128
- if not repo_info:
129
- return {"error": "Repository not found or inaccessible"}
130
-
131
- # Get commits
132
- commits_url = f"https://api.github.com/repos/{owner}/{repo}/commits?per_page=100&since={since}"
133
- commits = self.safe_request(commits_url) or []
134
- metrics['total_commits'] = len(commits)
135
-
136
- # Get PRs
137
- prs_url = f"https://api.github.com/repos/{owner}/{repo}/pulls?state=all&per_page=100"
138
- prs = self.safe_request(prs_url) or []
139
-
140
- recent_prs = []
141
- for pr in prs:
142
- created_at = self.safe_parse_datetime(pr.get('created_at'))
143
- if created_at and created_at >= since_dt:
144
- recent_prs.append(pr)
145
-
146
- metrics['prs_total'] = len(recent_prs)
147
- metrics['prs_merged'] = len([p for p in recent_prs if p.get('merged_at')])
148
-
149
- # Get issues
150
- issues_url = f"https://api.github.com/repos/{owner}/{repo}/issues?state=closed&per_page=100"
151
- issues = self.safe_request(issues_url) or []
152
-
153
- recent_issues = []
154
- for issue in issues:
155
- closed_at = self.safe_parse_datetime(issue.get('closed_at'))
156
- if closed_at and closed_at >= since_dt:
157
- recent_issues.append(issue)
158
-
159
- metrics['issues_total'] = len(recent_issues)
160
-
161
- # Calculate rates
162
- metrics['commits_per_day'] = metrics['total_commits'] / max(self.days_back, 1)
163
- metrics['prs_per_week'] = metrics['prs_total'] / max((self.days_back / 7), 1)
164
- metrics['issues_per_week'] = metrics['issues_total'] / max((self.days_back / 7), 1)
165
-
166
- return metrics
167
-
168
- except Exception as e:
169
- return {
170
- "error": f"Analysis failed: {str(e)}",
171
- "repo": f"{owner}/{repo}",
172
- "analyzed_at": now.isoformat()
173
- }
174
-
175
- def predict_productivity_core(daily_coding_hours, commits_per_day, pull_requests_per_week,
176
- issues_closed_per_week, active_repos, code_reviews_per_week):
177
- try:
178
- features = np.array([[
179
- daily_coding_hours, commits_per_day, pull_requests_per_week,
180
- issues_closed_per_week, active_repos, code_reviews_per_week
181
- ]])
182
- features_scaled = scaler.transform(features)
183
- prediction = model.predict(features_scaled)[0]
184
- return float(prediction)
185
- except Exception as e:
186
- raise HTTPException(status_code=500, detail=f"Prediction failed: {str(e)}")
187
-
188
- # FastAPI Endpoints
189
- @app.get("/")
190
- async def root():
191
- return {"message": "Developer Productivity Prediction API", "status": "online"}
192
-
193
- @app.post("/predict", response_model=ProductivityResponse)
194
- async def predict_productivity(request: ProductivityRequest):
195
- try:
196
- prediction = predict_productivity_core(
197
- request.daily_coding_hours, request.commits_per_day, request.pull_requests_per_week,
198
- request.issues_closed_per_week, request.active_repos, request.code_reviews_per_week
199
- )
200
- return ProductivityResponse(predicted_score=prediction, status="success")
201
- except Exception as e:
202
- raise HTTPException(status_code=500, detail=str(e))
203
-
204
- @app.post("/analyze-github", response_model=GitHubAnalysisResponse)
205
- async def analyze_github_repo(request: GitHubAnalysisRequest):
206
- try:
207
- analyzer = RepoProductivityAnalyzer(request.github_token)
208
- metrics = analyzer.get_metrics(request.repo_url)
209
-
210
- if "error" in metrics:
211
- raise HTTPException(status_code=400, detail=metrics["error"])
212
-
213
- # Transform to ML features
214
- ml_features = {
215
- 'daily_coding_hours': min(metrics['commits_per_day'] * 2, 8),
216
- 'commits_per_day': max(int(metrics['commits_per_day']), 0),
217
- 'pull_requests_per_week': max(int(metrics['prs_per_week']), 0),
218
- 'issues_closed_per_week': max(int(metrics['issues_per_week']), 0),
219
- 'active_repos': 1,
220
- 'code_reviews_per_week': max(int(metrics['prs_per_week']), 0)
221
- }
222
-
223
- prediction = predict_productivity_core(**ml_features)
224
-
225
- productivity_indicators = {
226
- 'high_commit_frequency': metrics['commits_per_day'] > 1,
227
- 'active_pr_process': metrics['prs_per_week'] > 2,
228
- 'good_issue_resolution': metrics['issues_per_week'] > 1,
229
- 'overall_productivity': prediction > 0.7
230
- }
231
-
232
- return GitHubAnalysisResponse(
233
- repo_metrics=metrics, ml_features=ml_features,
234
- predicted_score=prediction, productivity_indicators=productivity_indicators,
235
- status="success"
236
- )
237
- except Exception as e:
238
- raise HTTPException(status_code=500, detail=str(e))
239
-
240
- # Gradio Interface Functions
241
- def gradio_predict(daily_coding_hours, commits_per_day, pull_requests_per_week,
242
- issues_closed_per_week, active_repos, code_reviews_per_week):
243
- try:
244
- prediction = predict_productivity_core(
245
- daily_coding_hours, commits_per_day, pull_requests_per_week,
246
- issues_closed_per_week, active_repos, code_reviews_per_week
247
- )
248
- return f"Predicted Score: {prediction:.3f}"
249
- except Exception as e:
250
- return f"Error: {str(e)}"
251
-
252
- def gradio_github_analysis(repo_url, github_token):
253
- try:
254
- analyzer = RepoProductivityAnalyzer(github_token)
255
- metrics = analyzer.get_metrics(repo_url)
256
-
257
- if "error" in metrics:
258
- return f"Error: {metrics['error']}"
259
-
260
- ml_features = {
261
- 'daily_coding_hours': min(metrics['commits_per_day'] * 2, 8),
262
- 'commits_per_day': max(int(metrics['commits_per_day']), 0),
263
- 'pull_requests_per_week': max(int(metrics['prs_per_week']), 0),
264
- 'issues_closed_per_week': max(int(metrics['issues_per_week']), 0),
265
- 'active_repos': 1,
266
- 'code_reviews_per_week': max(int(metrics['prs_per_week']), 0)
267
- }
268
-
269
- prediction = predict_productivity_core(**ml_features)
270
-
271
- return f"""πŸ† PRODUCTIVITY ANALYSIS
272
- πŸ“Š Repository: {metrics['repo']}
273
- ⏱️ Period: {metrics['period_days']} days
274
-
275
- πŸ“ˆ KEY METRICS:
276
- β€’ Commits/day: {metrics['commits_per_day']:.1f}
277
- β€’ PRs/week: {metrics['prs_per_week']:.1f}
278
- β€’ Issues/week: {metrics['issues_per_week']:.1f}
279
-
280
- πŸ€– ML PREDICTION: {prediction:.3f}
281
- {'πŸš€ High Productivity!' if prediction > 0.7 else '⚠️ Room for improvement'}
282
-
283
- πŸ’‘ FEATURES:
284
- β€’ Daily coding hours: {ml_features['daily_coding_hours']}
285
- β€’ Commits/day: {ml_features['commits_per_day']}
286
- β€’ PRs/week: {ml_features['pull_requests_per_week']}
287
- β€’ Issues/week: {ml_features['issues_closed_per_week']}
288
- β€’ Active repos: {ml_features['active_repos']}
289
- β€’ Reviews/week: {ml_features['code_reviews_per_week']}"""
290
-
291
- except Exception as e:
292
- return f"Error: {str(e)}"
293
-
294
- # Create Gradio Interface
295
- with gr.Blocks(title="Developer Productivity Predictor") as demo:
296
- gr.Markdown("# πŸ† Developer Productivity Predictor")
297
- gr.Markdown("Predict productivity scores and analyze GitHub repositories using ML")
298
-
299
- with gr.Tab("Manual Prediction"):
300
- gr.Markdown("### Enter your development metrics:")
301
- with gr.Row():
302
- daily_hours = gr.Slider(1, 12, value=6, label="Daily Coding Hours")
303
- commits = gr.Slider(0, 20, value=3, label="Commits per Day")
304
- prs = gr.Slider(0, 10, value=2, label="Pull Requests per Week")
305
- with gr.Row():
306
- issues = gr.Slider(0, 15, value=3, label="Issues Closed per Week")
307
- repos = gr.Slider(1, 10, value=2, label="Active Repositories")
308
- reviews = gr.Slider(0, 20, value=5, label="Code Reviews per Week")
309
-
310
- predict_btn = gr.Button("πŸš€ Predict Productivity", variant="primary")
311
- prediction_output = gr.Textbox(label="Prediction Result", lines=2)
312
-
313
- predict_btn.click(
314
- gradio_predict,
315
- inputs=[daily_hours, commits, prs, issues, repos, reviews],
316
- outputs=prediction_output
317
- )
318
-
319
- with gr.Tab("GitHub Analysis"):
320
- gr.Markdown("### Analyze any GitHub repository:")
321
-
322
- repo_url_input = gr.Textbox(
323
- label="GitHub Repository URL",
324
- placeholder="https://github.com/owner/repo",
325
- value="https://github.com/microsoft/vscode"
326
- )
327
- token_input = gr.Textbox(
328
- label="GitHub Token",
329
- type="password",
330
- placeholder="ghp_xxxxxxxxxxxx"
331
- )
332
-
333
- analyze_btn = gr.Button("πŸ” Analyze Repository", variant="primary")
334
- analysis_output = gr.Textbox(label="Analysis Result", lines=15)
335
-
336
- analyze_btn.click(
337
- gradio_github_analysis,
338
- inputs=[repo_url_input, token_input],
339
- outputs=analysis_output
340
- )
341
-
342
- # Mount Gradio app to FastAPI
343
- app = gr.mount_gradio_app(app, demo, path="/")
344
-
345
- if __name__ == "__main__":
346
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ import gradio as gr
2
+ import joblib
3
+ import numpy as np
4
+ import pandas as pd
5
+ from sklearn.preprocessing import StandardScaler
6
+ from fastapi import FastAPI, HTTPException
7
+ from pydantic import BaseModel
8
+ import uvicorn
9
+ import os
10
+ import requests
11
+ import json
12
+ from datetime import datetime, timedelta, timezone
13
+ from typing import Dict, List, Optional
14
+ from urllib.parse import urlparse
15
+ import time
16
+
17
+ # Create FastAPI app
18
+ app = FastAPI(title="Developer Productivity Prediction API", version="1.0.0")
19
+
20
+ # Load the trained model and scaler
21
+ model = joblib.load('dev_productivity_model.joblib')
22
+ scaler = joblib.load('scaler.joblib')
23
+
24
+ # Pydantic models
25
+ class ProductivityRequest(BaseModel):
26
+ daily_coding_hours: float
27
+ commits_per_day: int
28
+ pull_requests_per_week: int
29
+ issues_closed_per_week: int
30
+ active_repos: int
31
+ code_reviews_per_week: int
32
+
33
+ class ProductivityResponse(BaseModel):
34
+ predicted_score: float
35
+ status: str
36
+
37
+ class GitHubAnalysisRequest(BaseModel):
38
+ repo_url: str
39
+ github_token: str
40
+
41
+ class GitHubAnalysisResponse(BaseModel):
42
+ repo_metrics: dict
43
+ ml_features: dict
44
+ predicted_score: float
45
+ productivity_indicators: dict
46
+ status: str
47
+
48
+ # GitHub Repository Analyzer
49
+ class RepoProductivityAnalyzer:
50
+ def __init__(self, github_token: str):
51
+ if not github_token or github_token == "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":
52
+ raise ValueError("Please provide a valid GitHub token")
53
+
54
+ self.token = github_token
55
+ self.headers = {
56
+ 'Authorization': f'token {github_token}',
57
+ 'Accept': 'application/vnd.github.v3+json'
58
+ }
59
+ self.days_back = 90
60
+ self.max_retries = 3
61
+
62
+ def safe_request(self, url: str, retries: int = None) -> Optional[List]:
63
+ if retries is None:
64
+ retries = self.max_retries
65
+
66
+ for attempt in range(retries):
67
+ try:
68
+ response = requests.get(url, headers=self.headers, timeout=30)
69
+
70
+ if response.status_code == 200:
71
+ return response.json()
72
+ elif response.status_code == 403:
73
+ time.sleep(60) # Rate limit
74
+ continue
75
+ elif response.status_code == 404:
76
+ return []
77
+ else:
78
+ return []
79
+
80
+ except requests.exceptions.RequestException:
81
+ if attempt < retries - 1:
82
+ time.sleep(2 ** attempt)
83
+ else:
84
+ return []
85
+ return []
86
+
87
+ def parse_repo_url(self, repo_url: str) -> tuple:
88
+ try:
89
+ parsed = urlparse(repo_url)
90
+ path = parsed.path.strip('/').split('/')
91
+ if len(path) < 2:
92
+ raise ValueError("Invalid GitHub URL format")
93
+ return path[0], path[1]
94
+ except Exception as e:
95
+ raise ValueError(f"Invalid repo URL: {str(e)}")
96
+
97
+ def safe_parse_datetime(self, date_str: str) -> Optional[datetime]:
98
+ if not date_str:
99
+ return None
100
+ try:
101
+ dt = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
102
+ if dt.tzinfo is None:
103
+ dt = dt.replace(tzinfo=timezone.utc)
104
+ return dt
105
+ except:
106
+ return None
107
+
108
+ def get_metrics(self, repo_url: str) -> Dict:
109
+ try:
110
+ owner, repo = self.parse_repo_url(repo_url)
111
+ except ValueError as e:
112
+ return {"error": str(e)}
113
+
114
+ now = datetime.now(timezone.utc)
115
+ since_dt = now - timedelta(days=self.days_back)
116
+ since = since_dt.isoformat()
117
+
118
+ metrics = {
119
+ 'repo': f"{owner}/{repo}",
120
+ 'period_days': self.days_back,
121
+ 'analyzed_at': now.isoformat(),
122
+ 'status': 'success'
123
+ }
124
+
125
+ try:
126
+ # Check repo exists
127
+ repo_info = self.safe_request(f"https://api.github.com/repos/{owner}/{repo}")
128
+ if not repo_info:
129
+ return {"error": "Repository not found or inaccessible"}
130
+
131
+ # Get commits
132
+ commits_url = f"https://api.github.com/repos/{owner}/{repo}/commits?per_page=100&since={since}"
133
+ commits = self.safe_request(commits_url) or []
134
+ metrics['total_commits'] = len(commits)
135
+
136
+ # Get PRs
137
+ prs_url = f"https://api.github.com/repos/{owner}/{repo}/pulls?state=all&per_page=100"
138
+ prs = self.safe_request(prs_url) or []
139
+
140
+ recent_prs = []
141
+ for pr in prs:
142
+ created_at = self.safe_parse_datetime(pr.get('created_at'))
143
+ if created_at and created_at >= since_dt:
144
+ recent_prs.append(pr)
145
+
146
+ metrics['prs_total'] = len(recent_prs)
147
+ metrics['prs_merged'] = len([p for p in recent_prs if p.get('merged_at')])
148
+
149
+ # Get issues
150
+ issues_url = f"https://api.github.com/repos/{owner}/{repo}/issues?state=closed&per_page=100"
151
+ issues = self.safe_request(issues_url) or []
152
+
153
+ recent_issues = []
154
+ for issue in issues:
155
+ closed_at = self.safe_parse_datetime(issue.get('closed_at'))
156
+ if closed_at and closed_at >= since_dt:
157
+ recent_issues.append(issue)
158
+
159
+ metrics['issues_total'] = len(recent_issues)
160
+
161
+ # Calculate rates
162
+ metrics['commits_per_day'] = metrics['total_commits'] / max(self.days_back, 1)
163
+ metrics['prs_per_week'] = metrics['prs_total'] / max((self.days_back / 7), 1)
164
+ metrics['issues_per_week'] = metrics['issues_total'] / max((self.days_back / 7), 1)
165
+
166
+ return metrics
167
+
168
+ except Exception as e:
169
+ return {
170
+ "error": f"Analysis failed: {str(e)}",
171
+ "repo": f"{owner}/{repo}",
172
+ "analyzed_at": now.isoformat()
173
+ }
174
+
175
+ def predict_productivity_core(daily_coding_hours, commits_per_day, pull_requests_per_week,
176
+ issues_closed_per_week, active_repos, code_reviews_per_week):
177
+ try:
178
+ features = np.array([[
179
+ daily_coding_hours, commits_per_day, pull_requests_per_week,
180
+ issues_closed_per_week, active_repos, code_reviews_per_week
181
+ ]])
182
+ features_scaled = scaler.transform(features)
183
+ prediction = model.predict(features_scaled)[0]
184
+ return float(prediction)
185
+ except Exception as e:
186
+ raise HTTPException(status_code=500, detail=f"Prediction failed: {str(e)}")
187
+
188
+ # FastAPI Endpoints
189
+ @app.get("/")
190
+ async def root():
191
+ return {"message": "Developer Productivity Prediction API", "status": "online"}
192
+
193
+ @app.post("/predict", response_model=ProductivityResponse)
194
+ async def predict_productivity(request: ProductivityRequest):
195
+ try:
196
+ prediction = predict_productivity_core(
197
+ request.daily_coding_hours, request.commits_per_day, request.pull_requests_per_week,
198
+ request.issues_closed_per_week, request.active_repos, request.code_reviews_per_week
199
+ )
200
+ return ProductivityResponse(predicted_score=prediction, status="success")
201
+ except Exception as e:
202
+ raise HTTPException(status_code=500, detail=str(e))
203
+
204
+ @app.post("/analyze-github", response_model=GitHubAnalysisResponse)
205
+ async def analyze_github_repo(request: GitHubAnalysisRequest):
206
+ try:
207
+ analyzer = RepoProductivityAnalyzer(request.github_token)
208
+ metrics = analyzer.get_metrics(request.repo_url)
209
+
210
+ if "error" in metrics:
211
+ raise HTTPException(status_code=400, detail=metrics["error"])
212
+
213
+ # Transform to ML features
214
+ ml_features = {
215
+ 'daily_coding_hours': min(metrics['commits_per_day'] * 2, 8),
216
+ 'commits_per_day': max(int(metrics['commits_per_day']), 0),
217
+ 'pull_requests_per_week': max(int(metrics['prs_per_week']), 0),
218
+ 'issues_closed_per_week': max(int(metrics['issues_per_week']), 0),
219
+ 'active_repos': 1,
220
+ 'code_reviews_per_week': max(int(metrics['prs_per_week']), 0)
221
+ }
222
+
223
+ prediction = predict_productivity_core(**ml_features)
224
+
225
+ productivity_indicators = {
226
+ 'high_commit_frequency': metrics['commits_per_day'] > 1,
227
+ 'active_pr_process': metrics['prs_per_week'] > 2,
228
+ 'good_issue_resolution': metrics['issues_per_week'] > 1,
229
+ 'overall_productivity': prediction > 0.7
230
+ }
231
+
232
+ return GitHubAnalysisResponse(
233
+ repo_metrics=metrics, ml_features=ml_features,
234
+ predicted_score=prediction, productivity_indicators=productivity_indicators,
235
+ status="success"
236
+ )
237
+ except Exception as e:
238
+ raise HTTPException(status_code=500, detail=str(e))
239
+
240
+ # Gradio Interface Functions
241
+ def gradio_predict(daily_coding_hours, commits_per_day, pull_requests_per_week,
242
+ issues_closed_per_week, active_repos, code_reviews_per_week):
243
+ try:
244
+ prediction = predict_productivity_core(
245
+ daily_coding_hours, commits_per_day, pull_requests_per_week,
246
+ issues_closed_per_week, active_repos, code_reviews_per_week
247
+ )
248
+ return f"Predicted Score: {prediction:.3f}"
249
+ except Exception as e:
250
+ return f"Error: {str(e)}"
251
+
252
+ def gradio_github_analysis(repo_url, github_token):
253
+ try:
254
+ analyzer = RepoProductivityAnalyzer(github_token)
255
+ metrics = analyzer.get_metrics(repo_url)
256
+
257
+ if "error" in metrics:
258
+ return f"Error: {metrics['error']}"
259
+
260
+ ml_features = {
261
+ 'daily_coding_hours': min(metrics['commits_per_day'] * 2, 8),
262
+ 'commits_per_day': max(int(metrics['commits_per_day']), 0),
263
+ 'pull_requests_per_week': max(int(metrics['prs_per_week']), 0),
264
+ 'issues_closed_per_week': max(int(metrics['issues_per_week']), 0),
265
+ 'active_repos': 1,
266
+ 'code_reviews_per_week': max(int(metrics['prs_per_week']), 0)
267
+ }
268
+
269
+ prediction = predict_productivity_core(**ml_features)
270
+
271
+ return f"""πŸ† PRODUCTIVITY ANALYSIS
272
+ πŸ“Š Repository: {metrics['repo']}
273
+ ⏱️ Period: {metrics['period_days']} days
274
+
275
+ πŸ“ˆ KEY METRICS:
276
+ β€’ Commits/day: {metrics['commits_per_day']:.1f}
277
+ β€’ PRs/week: {metrics['prs_per_week']:.1f}
278
+ β€’ Issues/week: {metrics['issues_per_week']:.1f}
279
+
280
+ πŸ€– ML PREDICTION: {prediction:.3f}
281
+ {'πŸš€ High Productivity!' if prediction > 0.7 else '⚠️ Room for improvement'}
282
+
283
+ πŸ’‘ FEATURES:
284
+ β€’ Daily coding hours: {ml_features['daily_coding_hours']}
285
+ β€’ Commits/day: {ml_features['commits_per_day']}
286
+ β€’ PRs/week: {ml_features['pull_requests_per_week']}
287
+ β€’ Issues/week: {ml_features['issues_closed_per_week']}
288
+ β€’ Active repos: {ml_features['active_repos']}
289
+ β€’ Reviews/week: {ml_features['code_reviews_per_week']}"""
290
+
291
+ except Exception as e:
292
+ return f"Error: {str(e)}"
293
+
294
+ # Create Gradio Interface
295
+ with gr.Blocks(title="Developer Productivity Predictor") as demo:
296
+ gr.Markdown("# πŸ† Developer Productivity Predictor")
297
+ gr.Markdown("Predict productivity scores and analyze GitHub repositories using ML")
298
+
299
+ with gr.Tab("Manual Prediction"):
300
+ gr.Markdown("### Enter your development metrics:")
301
+ with gr.Row():
302
+ daily_hours = gr.Slider(1, 12, value=6, label="Daily Coding Hours")
303
+ commits = gr.Slider(0, 20, value=3, label="Commits per Day")
304
+ prs = gr.Slider(0, 10, value=2, label="Pull Requests per Week")
305
+ with gr.Row():
306
+ issues = gr.Slider(0, 15, value=3, label="Issues Closed per Week")
307
+ repos = gr.Slider(1, 10, value=2, label="Active Repositories")
308
+ reviews = gr.Slider(0, 20, value=5, label="Code Reviews per Week")
309
+
310
+ predict_btn = gr.Button("πŸš€ Predict Productivity", variant="primary")
311
+ prediction_output = gr.Textbox(label="Prediction Result", lines=2)
312
+
313
+ predict_btn.click(
314
+ gradio_predict,
315
+ inputs=[daily_hours, commits, prs, issues, repos, reviews],
316
+ outputs=prediction_output
317
+ )
318
+
319
+ with gr.Tab("GitHub Analysis"):
320
+ gr.Markdown("### Analyze any GitHub repository:")
321
+
322
+ repo_url_input = gr.Textbox(
323
+ label="GitHub Repository URL",
324
+ placeholder="https://github.com/owner/repo",
325
+ value="https://github.com/microsoft/vscode"
326
+ )
327
+ token_input = gr.Textbox(
328
+ label="GitHub Token",
329
+ type="password",
330
+ placeholder="ghp_xxxxxxxxxxxx"
331
+ )
332
+
333
+ analyze_btn = gr.Button("πŸ” Analyze Repository", variant="primary")
334
+ analysis_output = gr.Textbox(label="Analysis Result", lines=15)
335
+
336
+ analyze_btn.click(
337
+ gradio_github_analysis,
338
+ inputs=[repo_url_input, token_input],
339
+ outputs=analysis_output
340
+ )
341
+
342
+ # Mount Gradio app to FastAPI
343
+ app = gr.mount_gradio_app(app, demo, path="/")
344
+
345
+ if __name__ == "__main__":
346
  uvicorn.run(app, host="0.0.0.0", port=7860)