Upload 9 files
Browse files- app/solver.py +642 -0
app/solver.py
CHANGED
|
@@ -1336,6 +1336,469 @@ async def solve_project2_reevals_7(csv_url: str, base_url: str) -> float:
|
|
| 1336 |
logger.error(f"Error summing CSV: {e}")
|
| 1337 |
return 0.0
|
| 1338 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1339 |
|
| 1340 |
class QuizSolver:
|
| 1341 |
"""Main quiz solver class."""
|
|
@@ -1877,6 +2340,185 @@ class QuizSolver:
|
|
| 1877 |
logger.info("Using handler for /project2-reevals-7")
|
| 1878 |
return answer
|
| 1879 |
return 0.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1880 |
|
| 1881 |
# For non-project2 quizzes, proceed with general solving strategies
|
| 1882 |
logger.info(f"Solving non-project2 quiz: {url}")
|
|
|
|
| 1336 |
logger.error(f"Error summing CSV: {e}")
|
| 1337 |
return 0.0
|
| 1338 |
|
| 1339 |
+
def solve_project2_reevals_9(text: str) -> str:
|
| 1340 |
+
"""/project2-reevals-9 - CORS Header"""
|
| 1341 |
+
# Extract origin from text (e.g., "from https://example.com")
|
| 1342 |
+
origin_match = re.search(r'from\s+(https?://[^\s<>"\'\)]+)', text, re.IGNORECASE)
|
| 1343 |
+
if origin_match:
|
| 1344 |
+
origin = origin_match.group(1)
|
| 1345 |
+
else:
|
| 1346 |
+
# Default to example.com if not found
|
| 1347 |
+
origin = "https://example.com"
|
| 1348 |
+
cors_header = f"Access-Control-Allow-Origin: {origin}"
|
| 1349 |
+
logger.info(f"CORS header: {cors_header}")
|
| 1350 |
+
return cors_header
|
| 1351 |
+
|
| 1352 |
+
def solve_project2_reevals_10(base64_str: str) -> str:
|
| 1353 |
+
"""/project2-reevals-10 - Base64 Decoding"""
|
| 1354 |
+
try:
|
| 1355 |
+
decoded = base64.b64decode(base64_str).decode('utf-8')
|
| 1356 |
+
logger.info(f"Decoded Base64: {decoded[:50]}...")
|
| 1357 |
+
return decoded
|
| 1358 |
+
except Exception as e:
|
| 1359 |
+
logger.error(f"Error decoding Base64: {e}")
|
| 1360 |
+
return ""
|
| 1361 |
+
|
| 1362 |
+
async def solve_project2_reevals_11(csv_url: str, base_url: str) -> str:
|
| 1363 |
+
"""/project2-reevals-11 - Data Normalization to JSON"""
|
| 1364 |
+
try:
|
| 1365 |
+
if csv_url.startswith('/'):
|
| 1366 |
+
csv_url = urljoin(base_url, csv_url)
|
| 1367 |
+
logger.info(f"Downloading CSV: {csv_url}")
|
| 1368 |
+
response = requests.get(csv_url, timeout=15)
|
| 1369 |
+
response.raise_for_status()
|
| 1370 |
+
|
| 1371 |
+
df = pd.read_csv(io.StringIO(response.text))
|
| 1372 |
+
|
| 1373 |
+
# Normalize column names to snake_case
|
| 1374 |
+
df.columns = [col.strip().lower().replace(' ', '_') for col in df.columns]
|
| 1375 |
+
|
| 1376 |
+
# Ensure required columns exist (map common variations)
|
| 1377 |
+
column_mapping = {
|
| 1378 |
+
'id': ['id', 'user_id', 'contact_id'],
|
| 1379 |
+
'first_name': ['first_name', 'firstname', 'fname', 'first'],
|
| 1380 |
+
'last_name': ['last_name', 'lastname', 'lname', 'last'],
|
| 1381 |
+
'email': ['email', 'email_address', 'e_mail']
|
| 1382 |
+
}
|
| 1383 |
+
|
| 1384 |
+
# Rename columns to match expected format
|
| 1385 |
+
for target, variants in column_mapping.items():
|
| 1386 |
+
for variant in variants:
|
| 1387 |
+
if variant in df.columns and target not in df.columns:
|
| 1388 |
+
df.rename(columns={variant: target}, inplace=True)
|
| 1389 |
+
|
| 1390 |
+
# Select only required columns if they exist
|
| 1391 |
+
required_cols = ['id', 'first_name', 'last_name', 'email']
|
| 1392 |
+
available_cols = [col for col in required_cols if col in df.columns]
|
| 1393 |
+
if available_cols:
|
| 1394 |
+
df = df[available_cols]
|
| 1395 |
+
|
| 1396 |
+
# Sort by id ascending
|
| 1397 |
+
if 'id' in df.columns:
|
| 1398 |
+
df = df.sort_values('id')
|
| 1399 |
+
|
| 1400 |
+
# Convert to JSON array
|
| 1401 |
+
result = df.to_dict('records')
|
| 1402 |
+
# Convert to JSON string
|
| 1403 |
+
json_str = json.dumps(result, separators=(',', ':'))
|
| 1404 |
+
logger.info(f"Normalized {len(result)} records to JSON")
|
| 1405 |
+
return json_str
|
| 1406 |
+
except Exception as e:
|
| 1407 |
+
logger.error(f"Error normalizing CSV: {e}")
|
| 1408 |
+
return "[]"
|
| 1409 |
+
|
| 1410 |
+
async def solve_project2_reevals_12(json_url: str, base_url: str) -> int:
|
| 1411 |
+
"""/project2-reevals-12 - Count endpoints with status 200"""
|
| 1412 |
+
try:
|
| 1413 |
+
if json_url.startswith('/'):
|
| 1414 |
+
json_url = urljoin(base_url, json_url)
|
| 1415 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1416 |
+
response = requests.get(json_url, timeout=15)
|
| 1417 |
+
response.raise_for_status()
|
| 1418 |
+
data = response.json()
|
| 1419 |
+
|
| 1420 |
+
# Count endpoints with status 200
|
| 1421 |
+
count = 0
|
| 1422 |
+
if isinstance(data, list):
|
| 1423 |
+
for item in data:
|
| 1424 |
+
if isinstance(item, dict) and item.get('status') == 200:
|
| 1425 |
+
count += 1
|
| 1426 |
+
elif isinstance(data, dict):
|
| 1427 |
+
# Check if it's a dict with endpoints
|
| 1428 |
+
if 'endpoints' in data:
|
| 1429 |
+
for endpoint in data['endpoints']:
|
| 1430 |
+
if isinstance(endpoint, dict) and endpoint.get('status') == 200:
|
| 1431 |
+
count += 1
|
| 1432 |
+
# Or check all values
|
| 1433 |
+
for value in data.values():
|
| 1434 |
+
if isinstance(value, dict) and value.get('status') == 200:
|
| 1435 |
+
count += 1
|
| 1436 |
+
|
| 1437 |
+
logger.info(f"Count of endpoints with status 200: {count}")
|
| 1438 |
+
return count
|
| 1439 |
+
except Exception as e:
|
| 1440 |
+
logger.error(f"Error counting status 200: {e}")
|
| 1441 |
+
return 0
|
| 1442 |
+
|
| 1443 |
+
async def solve_project2_reevals_13(json_url: str, base_url: str) -> str:
|
| 1444 |
+
"""/project2-reevals-13 - Find request ID with gzip compression"""
|
| 1445 |
+
try:
|
| 1446 |
+
if json_url.startswith('/'):
|
| 1447 |
+
json_url = urljoin(base_url, json_url)
|
| 1448 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1449 |
+
response = requests.get(json_url, timeout=15)
|
| 1450 |
+
response.raise_for_status()
|
| 1451 |
+
data = response.json()
|
| 1452 |
+
|
| 1453 |
+
# Find request with gzip compression
|
| 1454 |
+
if isinstance(data, list):
|
| 1455 |
+
for item in data:
|
| 1456 |
+
if isinstance(item, dict):
|
| 1457 |
+
compression = item.get('compression', '').lower()
|
| 1458 |
+
if 'gzip' in compression:
|
| 1459 |
+
req_id = item.get('id') or item.get('request_id') or item.get('req_id')
|
| 1460 |
+
if req_id:
|
| 1461 |
+
logger.info(f"Found gzip request: {req_id}")
|
| 1462 |
+
return str(req_id)
|
| 1463 |
+
elif isinstance(data, dict):
|
| 1464 |
+
# Check if it's a dict with requests array
|
| 1465 |
+
requests_list = data.get('requests', [])
|
| 1466 |
+
if isinstance(requests_list, list):
|
| 1467 |
+
for req in requests_list:
|
| 1468 |
+
if isinstance(req, dict):
|
| 1469 |
+
compression = req.get('compression', '').lower()
|
| 1470 |
+
if 'gzip' in compression:
|
| 1471 |
+
req_id = req.get('id') or req.get('request_id')
|
| 1472 |
+
if req_id:
|
| 1473 |
+
logger.info(f"Found gzip request: {req_id}")
|
| 1474 |
+
return str(req_id)
|
| 1475 |
+
|
| 1476 |
+
return ""
|
| 1477 |
+
except Exception as e:
|
| 1478 |
+
logger.error(f"Error finding gzip request: {e}")
|
| 1479 |
+
return ""
|
| 1480 |
+
|
| 1481 |
+
def solve_project2_reevals_14(text: str) -> str:
|
| 1482 |
+
"""/project2-reevals-14 - Bash command for line count"""
|
| 1483 |
+
# Extract file path from text
|
| 1484 |
+
file_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.txt)', text, re.IGNORECASE)
|
| 1485 |
+
if file_match:
|
| 1486 |
+
file_path = file_match.group(1)
|
| 1487 |
+
else:
|
| 1488 |
+
# Default path
|
| 1489 |
+
file_path = "/project2-reevals/logs.txt"
|
| 1490 |
+
command = f"wc -l {file_path}"
|
| 1491 |
+
logger.info(f"Bash command: {command}")
|
| 1492 |
+
return command
|
| 1493 |
+
|
| 1494 |
+
def solve_project2_reevals_15(text: str) -> str:
|
| 1495 |
+
"""/project2-reevals-15 - Docker RUN instruction"""
|
| 1496 |
+
# Standard Docker RUN instruction for pip install
|
| 1497 |
+
instruction = "RUN pip install -r requirements.txt"
|
| 1498 |
+
logger.info(f"Docker RUN: {instruction}")
|
| 1499 |
+
return instruction
|
| 1500 |
+
|
| 1501 |
+
def solve_project2_reevals_16(text: str) -> str:
|
| 1502 |
+
"""/project2-reevals-16 - GitHub Actions test step"""
|
| 1503 |
+
# Standard GitHub Actions step for npm test
|
| 1504 |
+
step = "- name: Run Tests\n run: npm test"
|
| 1505 |
+
logger.info(f"GitHub Actions step: {step}")
|
| 1506 |
+
return step
|
| 1507 |
+
|
| 1508 |
+
async def solve_project2_reevals_17(json_url: str, base_url: str) -> int:
|
| 1509 |
+
"""/project2-reevals-17 - Count positive sentiment tweets"""
|
| 1510 |
+
try:
|
| 1511 |
+
if json_url.startswith('/'):
|
| 1512 |
+
json_url = urljoin(base_url, json_url)
|
| 1513 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1514 |
+
response = requests.get(json_url, timeout=15)
|
| 1515 |
+
response.raise_for_status()
|
| 1516 |
+
data = response.json()
|
| 1517 |
+
|
| 1518 |
+
count = 0
|
| 1519 |
+
if isinstance(data, list):
|
| 1520 |
+
for tweet in data:
|
| 1521 |
+
if isinstance(tweet, dict):
|
| 1522 |
+
sentiment = tweet.get('sentiment', '').lower()
|
| 1523 |
+
if sentiment == 'positive':
|
| 1524 |
+
count += 1
|
| 1525 |
+
elif isinstance(data, dict):
|
| 1526 |
+
if 'tweets' in data:
|
| 1527 |
+
for tweet in data['tweets']:
|
| 1528 |
+
if isinstance(tweet, dict):
|
| 1529 |
+
sentiment = tweet.get('sentiment', '').lower()
|
| 1530 |
+
if sentiment == 'positive':
|
| 1531 |
+
count += 1
|
| 1532 |
+
|
| 1533 |
+
logger.info(f"Count of positive sentiment tweets: {count}")
|
| 1534 |
+
return count
|
| 1535 |
+
except Exception as e:
|
| 1536 |
+
logger.error(f"Error counting positive sentiment: {e}")
|
| 1537 |
+
return 0
|
| 1538 |
+
|
| 1539 |
+
async def solve_project2_reevals_18(json_url: str, base_url: str) -> float:
|
| 1540 |
+
"""/project2-reevals-18 - Calculate cosine similarity"""
|
| 1541 |
+
try:
|
| 1542 |
+
if json_url.startswith('/'):
|
| 1543 |
+
json_url = urljoin(base_url, json_url)
|
| 1544 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1545 |
+
response = requests.get(json_url, timeout=15)
|
| 1546 |
+
response.raise_for_status()
|
| 1547 |
+
data = response.json()
|
| 1548 |
+
|
| 1549 |
+
# Get embeddings
|
| 1550 |
+
emb1 = data.get('embedding1', [])
|
| 1551 |
+
emb2 = data.get('embedding2', [])
|
| 1552 |
+
|
| 1553 |
+
if not emb1 or not emb2:
|
| 1554 |
+
return 0.0
|
| 1555 |
+
|
| 1556 |
+
# Convert to numpy arrays
|
| 1557 |
+
vec1 = np.array(emb1)
|
| 1558 |
+
vec2 = np.array(emb2)
|
| 1559 |
+
|
| 1560 |
+
# Calculate cosine similarity: (A · B) / (||A|| × ||B||)
|
| 1561 |
+
dot_product = np.dot(vec1, vec2)
|
| 1562 |
+
norm1 = np.linalg.norm(vec1)
|
| 1563 |
+
norm2 = np.linalg.norm(vec2)
|
| 1564 |
+
|
| 1565 |
+
if norm1 == 0 or norm2 == 0:
|
| 1566 |
+
return 0.0
|
| 1567 |
+
|
| 1568 |
+
similarity = dot_product / (norm1 * norm2)
|
| 1569 |
+
similarity = round(float(similarity), 3)
|
| 1570 |
+
logger.info(f"Cosine similarity: {similarity}")
|
| 1571 |
+
return similarity
|
| 1572 |
+
except Exception as e:
|
| 1573 |
+
logger.error(f"Error calculating cosine similarity: {e}")
|
| 1574 |
+
return 0.0
|
| 1575 |
+
|
| 1576 |
+
async def solve_project2_reevals_19(pdf_url: str, base_url: str) -> float:
|
| 1577 |
+
"""/project2-reevals-19 - Extract Q2 operating expenses from PDF"""
|
| 1578 |
+
try:
|
| 1579 |
+
if pdf_url.startswith('/'):
|
| 1580 |
+
pdf_url = urljoin(base_url, pdf_url)
|
| 1581 |
+
logger.info(f"Downloading PDF: {pdf_url}")
|
| 1582 |
+
response = requests.get(pdf_url, timeout=15)
|
| 1583 |
+
response.raise_for_status()
|
| 1584 |
+
|
| 1585 |
+
# Try to extract text from PDF
|
| 1586 |
+
try:
|
| 1587 |
+
import PyPDF2
|
| 1588 |
+
pdf_file = io.BytesIO(response.content)
|
| 1589 |
+
pdf_reader = PyPDF2.PdfReader(pdf_file)
|
| 1590 |
+
text = ""
|
| 1591 |
+
for page in pdf_reader.pages:
|
| 1592 |
+
text += page.extract_text()
|
| 1593 |
+
except ImportError:
|
| 1594 |
+
try:
|
| 1595 |
+
import pdfplumber
|
| 1596 |
+
with pdfplumber.open(io.BytesIO(response.content)) as pdf:
|
| 1597 |
+
text = ""
|
| 1598 |
+
for page in pdf.pages:
|
| 1599 |
+
text += page.extract_text() or ""
|
| 1600 |
+
except ImportError:
|
| 1601 |
+
logger.warning("No PDF library available, trying basic extraction")
|
| 1602 |
+
text = ""
|
| 1603 |
+
|
| 1604 |
+
# Look for Q2 Summary and operating expenses
|
| 1605 |
+
q2_match = re.search(r'Q2\s+Summary[^\d]*([\d,]+\.?\d*)', text, re.IGNORECASE)
|
| 1606 |
+
if q2_match:
|
| 1607 |
+
amount_str = q2_match.group(1).replace(',', '')
|
| 1608 |
+
amount = float(amount_str)
|
| 1609 |
+
amount = round(amount, 2)
|
| 1610 |
+
logger.info(f"Q2 operating expenses: {amount}")
|
| 1611 |
+
return amount
|
| 1612 |
+
|
| 1613 |
+
# Try alternative patterns
|
| 1614 |
+
expense_patterns = [
|
| 1615 |
+
r'Q2[^\d]*operating[^\d]*expenses[^\d]*([\d,]+\.?\d*)',
|
| 1616 |
+
r'operating[^\d]*expenses[^\d]*Q2[^\d]*([\d,]+\.?\d*)',
|
| 1617 |
+
r'Q2[^\d]*total[^\d]*([\d,]+\.?\d*)'
|
| 1618 |
+
]
|
| 1619 |
+
|
| 1620 |
+
for pattern in expense_patterns:
|
| 1621 |
+
match = re.search(pattern, text, re.IGNORECASE)
|
| 1622 |
+
if match:
|
| 1623 |
+
amount_str = match.group(1).replace(',', '')
|
| 1624 |
+
amount = float(amount_str)
|
| 1625 |
+
amount = round(amount, 2)
|
| 1626 |
+
logger.info(f"Q2 operating expenses (pattern match): {amount}")
|
| 1627 |
+
return amount
|
| 1628 |
+
|
| 1629 |
+
return 0.0
|
| 1630 |
+
except Exception as e:
|
| 1631 |
+
logger.error(f"Error extracting PDF data: {e}")
|
| 1632 |
+
return 0.0
|
| 1633 |
+
|
| 1634 |
+
async def solve_project2_reevals_20(csv_url: str, base_url: str) -> str:
|
| 1635 |
+
"""/project2-reevals-20 - Group by category and sum amounts"""
|
| 1636 |
+
try:
|
| 1637 |
+
if csv_url.startswith('/'):
|
| 1638 |
+
csv_url = urljoin(base_url, csv_url)
|
| 1639 |
+
logger.info(f"Downloading CSV: {csv_url}")
|
| 1640 |
+
response = requests.get(csv_url, timeout=15)
|
| 1641 |
+
response.raise_for_status()
|
| 1642 |
+
|
| 1643 |
+
df = pd.read_csv(io.StringIO(response.text))
|
| 1644 |
+
|
| 1645 |
+
# Find category and amount columns
|
| 1646 |
+
category_col = None
|
| 1647 |
+
amount_col = None
|
| 1648 |
+
|
| 1649 |
+
for col in df.columns:
|
| 1650 |
+
if 'category' in col.lower():
|
| 1651 |
+
category_col = col
|
| 1652 |
+
if 'amount' in col.lower():
|
| 1653 |
+
amount_col = col
|
| 1654 |
+
|
| 1655 |
+
if not category_col or not amount_col:
|
| 1656 |
+
return "{}"
|
| 1657 |
+
|
| 1658 |
+
# Group by category and sum
|
| 1659 |
+
grouped = df.groupby(category_col)[amount_col].sum()
|
| 1660 |
+
|
| 1661 |
+
# Convert to dict and sort keys alphabetically
|
| 1662 |
+
result = dict(sorted(grouped.items()))
|
| 1663 |
+
|
| 1664 |
+
# Convert to JSON string
|
| 1665 |
+
json_str = json.dumps(result, separators=(',', ':'))
|
| 1666 |
+
logger.info(f"Grouped by category: {len(result)} categories")
|
| 1667 |
+
return json_str
|
| 1668 |
+
except Exception as e:
|
| 1669 |
+
logger.error(f"Error grouping by category: {e}")
|
| 1670 |
+
return "{}"
|
| 1671 |
+
|
| 1672 |
+
def solve_project2_reevals_21(text: str) -> str:
|
| 1673 |
+
"""/project2-reevals-21 - Best chart type selection"""
|
| 1674 |
+
# For showing trends and cumulative effect over time, area chart is best
|
| 1675 |
+
result = {
|
| 1676 |
+
"chart_type": "area",
|
| 1677 |
+
"reason": "Area charts effectively show trends over time and the cumulative effect by filling the area under the line, making it easy to see both individual monthly values and the overall progression."
|
| 1678 |
+
}
|
| 1679 |
+
json_str = json.dumps(result, separators=(',', ':'))
|
| 1680 |
+
logger.info(f"Chart type selection: {json_str}")
|
| 1681 |
+
return json_str
|
| 1682 |
+
|
| 1683 |
+
def solve_project2_reevals_22(text: str) -> str:
|
| 1684 |
+
"""/project2-reevals-22 - FastAPI endpoint implementation"""
|
| 1685 |
+
# Standard FastAPI POST endpoint with Pydantic model
|
| 1686 |
+
code = """@app.post("/submit")
|
| 1687 |
+
async def submit_user(name: str, age: int):
|
| 1688 |
+
return {"status": "ok", "message": "User registered"}"""
|
| 1689 |
+
logger.info("FastAPI endpoint code generated")
|
| 1690 |
+
return code
|
| 1691 |
+
|
| 1692 |
+
async def solve_project2_reevals_23(json_url: str, base_url: str) -> float:
|
| 1693 |
+
"""/project2-reevals-23 - Calculate RMSE"""
|
| 1694 |
+
try:
|
| 1695 |
+
if json_url.startswith('/'):
|
| 1696 |
+
json_url = urljoin(base_url, json_url)
|
| 1697 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1698 |
+
response = requests.get(json_url, timeout=15)
|
| 1699 |
+
response.raise_for_status()
|
| 1700 |
+
data = response.json()
|
| 1701 |
+
|
| 1702 |
+
# Get forecast and actual arrays
|
| 1703 |
+
forecast = data.get('forecast', [])
|
| 1704 |
+
actual = data.get('actual', [])
|
| 1705 |
+
|
| 1706 |
+
if not forecast or not actual or len(forecast) != len(actual):
|
| 1707 |
+
return 0.0
|
| 1708 |
+
|
| 1709 |
+
# Calculate RMSE: sqrt(mean((forecast - actual)^2))
|
| 1710 |
+
forecast_arr = np.array(forecast)
|
| 1711 |
+
actual_arr = np.array(actual)
|
| 1712 |
+
|
| 1713 |
+
squared_errors = (forecast_arr - actual_arr) ** 2
|
| 1714 |
+
mse = np.mean(squared_errors)
|
| 1715 |
+
rmse = np.sqrt(mse)
|
| 1716 |
+
|
| 1717 |
+
rmse = round(float(rmse), 2)
|
| 1718 |
+
logger.info(f"RMSE: {rmse}")
|
| 1719 |
+
return rmse
|
| 1720 |
+
except Exception as e:
|
| 1721 |
+
logger.error(f"Error calculating RMSE: {e}")
|
| 1722 |
+
return 0.0
|
| 1723 |
+
|
| 1724 |
+
async def solve_project2_reevals_24(json_url: str, base_url: str) -> int:
|
| 1725 |
+
"""/project2-reevals-24 - Calculate degree of node A"""
|
| 1726 |
+
try:
|
| 1727 |
+
if json_url.startswith('/'):
|
| 1728 |
+
json_url = urljoin(base_url, json_url)
|
| 1729 |
+
logger.info(f"Downloading JSON: {json_url}")
|
| 1730 |
+
response = requests.get(json_url, timeout=15)
|
| 1731 |
+
response.raise_for_status()
|
| 1732 |
+
data = response.json()
|
| 1733 |
+
|
| 1734 |
+
# Find node A and count its connections
|
| 1735 |
+
degree = 0
|
| 1736 |
+
|
| 1737 |
+
if 'edges' in data:
|
| 1738 |
+
for edge in data['edges']:
|
| 1739 |
+
if isinstance(edge, (list, tuple)) and len(edge) >= 2:
|
| 1740 |
+
if edge[0] == 'A' or edge[1] == 'A':
|
| 1741 |
+
degree += 1
|
| 1742 |
+
elif isinstance(edge, dict):
|
| 1743 |
+
if edge.get('from') == 'A' or edge.get('to') == 'A':
|
| 1744 |
+
degree += 1
|
| 1745 |
+
elif 'nodes' in data and 'edges' in data:
|
| 1746 |
+
for edge in data['edges']:
|
| 1747 |
+
if isinstance(edge, (list, tuple)) and len(edge) >= 2:
|
| 1748 |
+
if edge[0] == 'A' or edge[1] == 'A':
|
| 1749 |
+
degree += 1
|
| 1750 |
+
|
| 1751 |
+
logger.info(f"Degree of node A: {degree}")
|
| 1752 |
+
return degree
|
| 1753 |
+
except Exception as e:
|
| 1754 |
+
logger.error(f"Error calculating degree: {e}")
|
| 1755 |
+
return 0
|
| 1756 |
+
|
| 1757 |
+
def solve_project2_reevals_25(text: str) -> str:
|
| 1758 |
+
"""/project2-reevals-25 - LLM Agent function calling chain"""
|
| 1759 |
+
# Extract repository info from text
|
| 1760 |
+
repo_match = re.search(r'"([^"]+)"\s+repository.*owner[:\s]+"([^"]+)"', text, re.IGNORECASE)
|
| 1761 |
+
if repo_match:
|
| 1762 |
+
repo = repo_match.group(1)
|
| 1763 |
+
owner = repo_match.group(2)
|
| 1764 |
+
else:
|
| 1765 |
+
# Default from example
|
| 1766 |
+
repo = "demo-api"
|
| 1767 |
+
owner = "demo"
|
| 1768 |
+
|
| 1769 |
+
issue_match = re.search(r'issue\s+#?(\d+)', text, re.IGNORECASE)
|
| 1770 |
+
issue_id = issue_match.group(1) if issue_match else "42"
|
| 1771 |
+
|
| 1772 |
+
chain = [
|
| 1773 |
+
{
|
| 1774 |
+
"function": "search_issues",
|
| 1775 |
+
"params": {
|
| 1776 |
+
"owner": owner,
|
| 1777 |
+
"repo": repo,
|
| 1778 |
+
"query": f"issue:{issue_id}"
|
| 1779 |
+
}
|
| 1780 |
+
},
|
| 1781 |
+
{
|
| 1782 |
+
"function": "fetch_issue",
|
| 1783 |
+
"params": {
|
| 1784 |
+
"owner": owner,
|
| 1785 |
+
"repo": repo,
|
| 1786 |
+
"issue_id": issue_id
|
| 1787 |
+
}
|
| 1788 |
+
},
|
| 1789 |
+
{
|
| 1790 |
+
"function": "summarize",
|
| 1791 |
+
"params": {
|
| 1792 |
+
"text": "{{issue_body}}",
|
| 1793 |
+
"max_tokens": 200
|
| 1794 |
+
}
|
| 1795 |
+
}
|
| 1796 |
+
]
|
| 1797 |
+
|
| 1798 |
+
json_str = json.dumps(chain, separators=(',', ':'))
|
| 1799 |
+
logger.info(f"Function calling chain: {json_str}")
|
| 1800 |
+
return json_str
|
| 1801 |
+
|
| 1802 |
|
| 1803 |
class QuizSolver:
|
| 1804 |
"""Main quiz solver class."""
|
|
|
|
| 2340 |
logger.info("Using handler for /project2-reevals-7")
|
| 2341 |
return answer
|
| 2342 |
return 0.0
|
| 2343 |
+
|
| 2344 |
+
# Handle /project2-reevals-9 (CORS Header)
|
| 2345 |
+
if '/project2-reevals-9' in url:
|
| 2346 |
+
answer = solve_project2_reevals_9(text)
|
| 2347 |
+
logger.info("Using handler for /project2-reevals-9")
|
| 2348 |
+
return answer
|
| 2349 |
+
|
| 2350 |
+
# Handle /project2-reevals-10 (Base64 Decoding)
|
| 2351 |
+
if '/project2-reevals-10' in url:
|
| 2352 |
+
# Extract base64 string from text
|
| 2353 |
+
b64_match = re.search(r'[A-Za-z0-9+/]{20,}={0,2}', text)
|
| 2354 |
+
if b64_match:
|
| 2355 |
+
b64_str = b64_match.group(0)
|
| 2356 |
+
answer = solve_project2_reevals_10(b64_str)
|
| 2357 |
+
logger.info("Using handler for /project2-reevals-10")
|
| 2358 |
+
return answer
|
| 2359 |
+
return ""
|
| 2360 |
+
|
| 2361 |
+
# Handle /project2-reevals-11 (Data Normalization)
|
| 2362 |
+
if '/project2-reevals-11' in url:
|
| 2363 |
+
csv_urls = [link.get('href', '') for link in page_content.get('links', []) if '.csv' in link.get('href', '')]
|
| 2364 |
+
if not csv_urls:
|
| 2365 |
+
csv_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.csv)', text, re.IGNORECASE)
|
| 2366 |
+
if csv_match:
|
| 2367 |
+
csv_urls = [csv_match.group(1)]
|
| 2368 |
+
if csv_urls:
|
| 2369 |
+
csv_url = csv_urls[0]
|
| 2370 |
+
answer = await solve_project2_reevals_11(csv_url, base_url)
|
| 2371 |
+
logger.info("Using handler for /project2-reevals-11")
|
| 2372 |
+
return answer
|
| 2373 |
+
return "[]"
|
| 2374 |
+
|
| 2375 |
+
# Handle /project2-reevals-12 (REST API Status Analysis)
|
| 2376 |
+
if '/project2-reevals-12' in url:
|
| 2377 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2378 |
+
if not json_urls:
|
| 2379 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2380 |
+
if json_match:
|
| 2381 |
+
json_urls = [json_match.group(1)]
|
| 2382 |
+
if json_urls:
|
| 2383 |
+
json_url = json_urls[0]
|
| 2384 |
+
answer = await solve_project2_reevals_12(json_url, base_url)
|
| 2385 |
+
logger.info("Using handler for /project2-reevals-12")
|
| 2386 |
+
return answer
|
| 2387 |
+
return 0
|
| 2388 |
+
|
| 2389 |
+
# Handle /project2-reevals-13 (Network Request Analysis)
|
| 2390 |
+
if '/project2-reevals-13' in url:
|
| 2391 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2392 |
+
if not json_urls:
|
| 2393 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2394 |
+
if json_match:
|
| 2395 |
+
json_urls = [json_match.group(1)]
|
| 2396 |
+
if json_urls:
|
| 2397 |
+
json_url = json_urls[0]
|
| 2398 |
+
answer = await solve_project2_reevals_13(json_url, base_url)
|
| 2399 |
+
logger.info("Using handler for /project2-reevals-13")
|
| 2400 |
+
return answer
|
| 2401 |
+
return ""
|
| 2402 |
+
|
| 2403 |
+
# Handle /project2-reevals-14 (Bash Line Count)
|
| 2404 |
+
if '/project2-reevals-14' in url:
|
| 2405 |
+
answer = solve_project2_reevals_14(text)
|
| 2406 |
+
logger.info("Using handler for /project2-reevals-14")
|
| 2407 |
+
return answer
|
| 2408 |
+
|
| 2409 |
+
# Handle /project2-reevals-15 (Docker RUN Instruction)
|
| 2410 |
+
if '/project2-reevals-15' in url:
|
| 2411 |
+
answer = solve_project2_reevals_15(text)
|
| 2412 |
+
logger.info("Using handler for /project2-reevals-15")
|
| 2413 |
+
return answer
|
| 2414 |
+
|
| 2415 |
+
# Handle /project2-reevals-16 (GitHub Actions Test Step)
|
| 2416 |
+
if '/project2-reevals-16' in url:
|
| 2417 |
+
answer = solve_project2_reevals_16(text)
|
| 2418 |
+
logger.info("Using handler for /project2-reevals-16")
|
| 2419 |
+
return answer
|
| 2420 |
+
|
| 2421 |
+
# Handle /project2-reevals-17 (Sentiment Analysis)
|
| 2422 |
+
if '/project2-reevals-17' in url:
|
| 2423 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2424 |
+
if not json_urls:
|
| 2425 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2426 |
+
if json_match:
|
| 2427 |
+
json_urls = [json_match.group(1)]
|
| 2428 |
+
if json_urls:
|
| 2429 |
+
json_url = json_urls[0]
|
| 2430 |
+
answer = await solve_project2_reevals_17(json_url, base_url)
|
| 2431 |
+
logger.info("Using handler for /project2-reevals-17")
|
| 2432 |
+
return answer
|
| 2433 |
+
return 0
|
| 2434 |
+
|
| 2435 |
+
# Handle /project2-reevals-18 (Vector Similarity)
|
| 2436 |
+
if '/project2-reevals-18' in url:
|
| 2437 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2438 |
+
if not json_urls:
|
| 2439 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2440 |
+
if json_match:
|
| 2441 |
+
json_urls = [json_match.group(1)]
|
| 2442 |
+
if json_urls:
|
| 2443 |
+
json_url = json_urls[0]
|
| 2444 |
+
answer = await solve_project2_reevals_18(json_url, base_url)
|
| 2445 |
+
logger.info("Using handler for /project2-reevals-18")
|
| 2446 |
+
return answer
|
| 2447 |
+
return 0.0
|
| 2448 |
+
|
| 2449 |
+
# Handle /project2-reevals-19 (PDF Table Analysis)
|
| 2450 |
+
if '/project2-reevals-19' in url:
|
| 2451 |
+
pdf_urls = [link.get('href', '') for link in page_content.get('links', []) if '.pdf' in link.get('href', '')]
|
| 2452 |
+
if not pdf_urls:
|
| 2453 |
+
pdf_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.pdf)', text, re.IGNORECASE)
|
| 2454 |
+
if pdf_match:
|
| 2455 |
+
pdf_urls = [pdf_match.group(1)]
|
| 2456 |
+
if pdf_urls:
|
| 2457 |
+
pdf_url = pdf_urls[0]
|
| 2458 |
+
answer = await solve_project2_reevals_19(pdf_url, base_url)
|
| 2459 |
+
logger.info("Using handler for /project2-reevals-19")
|
| 2460 |
+
return answer
|
| 2461 |
+
return 0.0
|
| 2462 |
+
|
| 2463 |
+
# Handle /project2-reevals-20 (Data Aggregation)
|
| 2464 |
+
if '/project2-reevals-20' in url:
|
| 2465 |
+
csv_urls = [link.get('href', '') for link in page_content.get('links', []) if '.csv' in link.get('href', '')]
|
| 2466 |
+
if not csv_urls:
|
| 2467 |
+
csv_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.csv)', text, re.IGNORECASE)
|
| 2468 |
+
if csv_match:
|
| 2469 |
+
csv_urls = [csv_match.group(1)]
|
| 2470 |
+
if csv_urls:
|
| 2471 |
+
csv_url = csv_urls[0]
|
| 2472 |
+
answer = await solve_project2_reevals_20(csv_url, base_url)
|
| 2473 |
+
logger.info("Using handler for /project2-reevals-20")
|
| 2474 |
+
return answer
|
| 2475 |
+
return "{}"
|
| 2476 |
+
|
| 2477 |
+
# Handle /project2-reevals-21 (Best Chart Type)
|
| 2478 |
+
if '/project2-reevals-21' in url:
|
| 2479 |
+
answer = solve_project2_reevals_21(text)
|
| 2480 |
+
logger.info("Using handler for /project2-reevals-21")
|
| 2481 |
+
return answer
|
| 2482 |
+
|
| 2483 |
+
# Handle /project2-reevals-22 (FastAPI Endpoint)
|
| 2484 |
+
if '/project2-reevals-22' in url:
|
| 2485 |
+
answer = solve_project2_reevals_22(text)
|
| 2486 |
+
logger.info("Using handler for /project2-reevals-22")
|
| 2487 |
+
return answer
|
| 2488 |
+
|
| 2489 |
+
# Handle /project2-reevals-23 (Forecast RMSE)
|
| 2490 |
+
if '/project2-reevals-23' in url:
|
| 2491 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2492 |
+
if not json_urls:
|
| 2493 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2494 |
+
if json_match:
|
| 2495 |
+
json_urls = [json_match.group(1)]
|
| 2496 |
+
if json_urls:
|
| 2497 |
+
json_url = json_urls[0]
|
| 2498 |
+
answer = await solve_project2_reevals_23(json_url, base_url)
|
| 2499 |
+
logger.info("Using handler for /project2-reevals-23")
|
| 2500 |
+
return answer
|
| 2501 |
+
return 0.0
|
| 2502 |
+
|
| 2503 |
+
# Handle /project2-reevals-24 (Network Degree Centrality)
|
| 2504 |
+
if '/project2-reevals-24' in url:
|
| 2505 |
+
json_urls = [link.get('href', '') for link in page_content.get('links', []) if '.json' in link.get('href', '')]
|
| 2506 |
+
if not json_urls:
|
| 2507 |
+
json_match = re.search(r'/(project2-reevals/[^\s<>"\'\)]+\.json)', text, re.IGNORECASE)
|
| 2508 |
+
if json_match:
|
| 2509 |
+
json_urls = [json_match.group(1)]
|
| 2510 |
+
if json_urls:
|
| 2511 |
+
json_url = json_urls[0]
|
| 2512 |
+
answer = await solve_project2_reevals_24(json_url, base_url)
|
| 2513 |
+
logger.info("Using handler for /project2-reevals-24")
|
| 2514 |
+
return answer
|
| 2515 |
+
return 0
|
| 2516 |
+
|
| 2517 |
+
# Handle /project2-reevals-25 (LLM Agent Function Calling Chain)
|
| 2518 |
+
if '/project2-reevals-25' in url:
|
| 2519 |
+
answer = solve_project2_reevals_25(text)
|
| 2520 |
+
logger.info("Using handler for /project2-reevals-25")
|
| 2521 |
+
return answer
|
| 2522 |
|
| 2523 |
# For non-project2 quizzes, proceed with general solving strategies
|
| 2524 |
logger.info(f"Solving non-project2 quiz: {url}")
|