Che237 commited on
Commit
8ffe8e5
Β·
verified Β·
1 Parent(s): 0a285a3

Fix async code - use sync httpx client

Browse files
Files changed (1) hide show
  1. notebooks/00_environment_setup.ipynb +494 -0
notebooks/00_environment_setup.ipynb ADDED
@@ -0,0 +1,494 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": "17e1f6f6",
6
+ "metadata": {},
7
+ "source": [
8
+ "# 00 - Environment Setup\n",
9
+ "\n",
10
+ "## CyberForge AI - ML Pipeline Environment Configuration\n",
11
+ "\n",
12
+ "This notebook sets up the complete environment for the CyberForge AI machine learning pipeline.\n",
13
+ "\n",
14
+ "### What this notebook does:\n",
15
+ "1. Validates Python version and system requirements\n",
16
+ "2. Installs and pins all dependencies\n",
17
+ "3. Configures GPU/CPU detection\n",
18
+ "4. Sets up Gemini API connectivity\n",
19
+ "5. Validates Web Scraper API connection\n",
20
+ "6. Creates necessary directories\n",
21
+ "\n",
22
+ "### Prerequisites:\n",
23
+ "- Python 3.10+ (3.11 recommended)\n",
24
+ "- Access to Gemini API (API key required)\n",
25
+ "- Access to WebScrapper.live API"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "markdown",
30
+ "id": "33029fa4",
31
+ "metadata": {},
32
+ "source": [
33
+ "## 1. System Validation"
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "076fa991",
40
+ "metadata": {},
41
+ "outputs": [],
42
+ "source": [
43
+ "import sys\n",
44
+ "import platform\n",
45
+ "import os\n",
46
+ "from pathlib import Path\n",
47
+ "\n",
48
+ "print(\"=\" * 60)\n",
49
+ "print(\"CYBERFORGE AI - ENVIRONMENT VALIDATION\")\n",
50
+ "print(\"=\" * 60)\n",
51
+ "\n",
52
+ "# Python version check\n",
53
+ "python_version = sys.version_info\n",
54
+ "print(f\"\\nβœ“ Python Version: {python_version.major}.{python_version.minor}.{python_version.micro}\")\n",
55
+ "\n",
56
+ "if python_version.major < 3 or (python_version.major == 3 and python_version.minor < 10):\n",
57
+ " raise EnvironmentError(\"Python 3.10+ is required. Please upgrade your Python installation.\")\n",
58
+ "\n",
59
+ "# System info\n",
60
+ "print(f\"βœ“ Platform: {platform.system()} {platform.release()}\")\n",
61
+ "print(f\"βœ“ Architecture: {platform.machine()}\")\n",
62
+ "print(f\"βœ“ Processor: {platform.processor() or 'Unknown'}\")\n",
63
+ "\n",
64
+ "# Memory info\n",
65
+ "try:\n",
66
+ " import psutil\n",
67
+ " memory = psutil.virtual_memory()\n",
68
+ " print(f\"βœ“ Available Memory: {memory.available / (1024**3):.2f} GB / {memory.total / (1024**3):.2f} GB\")\n",
69
+ "except ImportError:\n",
70
+ " print(\"⚠ psutil not installed - memory check skipped\")\n",
71
+ "\n",
72
+ "print(\"\\n\" + \"=\" * 60)"
73
+ ]
74
+ },
75
+ {
76
+ "cell_type": "markdown",
77
+ "id": "45e95831",
78
+ "metadata": {},
79
+ "source": [
80
+ "## 2. Install Dependencies"
81
+ ]
82
+ },
83
+ {
84
+ "cell_type": "code",
85
+ "execution_count": null,
86
+ "id": "faa9b079",
87
+ "metadata": {},
88
+ "outputs": [],
89
+ "source": [
90
+ "# Core dependencies with pinned versions for reproducibility\n",
91
+ "DEPENDENCIES = \"\"\"\n",
92
+ "# Core ML/AI\n",
93
+ "numpy>=1.24.0,<2.0.0\n",
94
+ "pandas>=2.0.0\n",
95
+ "scikit-learn>=1.3.0\n",
96
+ "scipy>=1.11.0\n",
97
+ "\n",
98
+ "# Deep Learning\n",
99
+ "torch>=2.0.0\n",
100
+ "transformers>=4.30.0\n",
101
+ "\n",
102
+ "# Gemini API\n",
103
+ "google-generativeai>=0.3.0\n",
104
+ "\n",
105
+ "# Data Processing\n",
106
+ "joblib>=1.3.0\n",
107
+ "tqdm>=4.65.0\n",
108
+ "\n",
109
+ "# Feature Engineering\n",
110
+ "tldextract>=5.0.0\n",
111
+ "validators>=0.22.0\n",
112
+ "ipaddress>=1.0.23\n",
113
+ "\n",
114
+ "# Web/API\n",
115
+ "httpx>=0.25.0\n",
116
+ "aiohttp>=3.8.0\n",
117
+ "requests>=2.31.0\n",
118
+ "\n",
119
+ "# Hugging Face\n",
120
+ "huggingface_hub>=0.19.0\n",
121
+ "\n",
122
+ "# Utilities\n",
123
+ "python-dotenv>=1.0.0\n",
124
+ "pyyaml>=6.0.0\n",
125
+ "psutil>=5.9.0\n",
126
+ "\"\"\"\n",
127
+ "\n",
128
+ "# Write requirements file\n",
129
+ "requirements_path = Path(\"../requirements_notebooks.txt\")\n",
130
+ "requirements_path.write_text(DEPENDENCIES.strip())\n",
131
+ "print(f\"βœ“ Requirements written to: {requirements_path.absolute()}\")"
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": null,
137
+ "id": "7dc8c6ca",
138
+ "metadata": {},
139
+ "outputs": [],
140
+ "source": [
141
+ "# Install dependencies\n",
142
+ "import subprocess\n",
143
+ "\n",
144
+ "print(\"Installing dependencies... This may take a few minutes.\")\n",
145
+ "result = subprocess.run(\n",
146
+ " [sys.executable, \"-m\", \"pip\", \"install\", \"-q\", \"-r\", str(requirements_path)],\n",
147
+ " capture_output=True,\n",
148
+ " text=True\n",
149
+ ")\n",
150
+ "\n",
151
+ "if result.returncode == 0:\n",
152
+ " print(\"βœ“ All dependencies installed successfully!\")\n",
153
+ "else:\n",
154
+ " print(f\"⚠ Installation warnings: {result.stderr[:500] if result.stderr else 'None'}\")"
155
+ ]
156
+ },
157
+ {
158
+ "cell_type": "markdown",
159
+ "id": "c11760cc",
160
+ "metadata": {},
161
+ "source": [
162
+ "## 3. GPU/CPU Detection"
163
+ ]
164
+ },
165
+ {
166
+ "cell_type": "code",
167
+ "execution_count": null,
168
+ "id": "d1b948c4",
169
+ "metadata": {},
170
+ "outputs": [],
171
+ "source": [
172
+ "import torch\n",
173
+ "\n",
174
+ "print(\"=\" * 60)\n",
175
+ "print(\"COMPUTE DEVICE DETECTION\")\n",
176
+ "print(\"=\" * 60)\n",
177
+ "\n",
178
+ "# Check CUDA availability\n",
179
+ "cuda_available = torch.cuda.is_available()\n",
180
+ "print(f\"\\nβœ“ PyTorch Version: {torch.__version__}\")\n",
181
+ "print(f\"βœ“ CUDA Available: {cuda_available}\")\n",
182
+ "\n",
183
+ "if cuda_available:\n",
184
+ " print(f\"βœ“ CUDA Version: {torch.version.cuda}\")\n",
185
+ " print(f\"βœ“ GPU Count: {torch.cuda.device_count()}\")\n",
186
+ " for i in range(torch.cuda.device_count()):\n",
187
+ " props = torch.cuda.get_device_properties(i)\n",
188
+ " print(f\" - GPU {i}: {props.name} ({props.total_memory / (1024**3):.2f} GB)\")\n",
189
+ " DEVICE = torch.device(\"cuda\")\n",
190
+ "else:\n",
191
+ " print(\"⚠ No GPU detected - using CPU for training\")\n",
192
+ " DEVICE = torch.device(\"cpu\")\n",
193
+ "\n",
194
+ "# Check MPS (Apple Silicon)\n",
195
+ "if hasattr(torch.backends, 'mps') and torch.backends.mps.is_available():\n",
196
+ " print(\"βœ“ Apple MPS (Metal) available\")\n",
197
+ " DEVICE = torch.device(\"mps\")\n",
198
+ "\n",
199
+ "print(f\"\\nβœ“ Selected Device: {DEVICE}\")\n",
200
+ "print(\"=\" * 60)"
201
+ ]
202
+ },
203
+ {
204
+ "cell_type": "markdown",
205
+ "id": "d39ddbf5",
206
+ "metadata": {},
207
+ "source": [
208
+ "## 4. Environment Variables & API Configuration"
209
+ ]
210
+ },
211
+ {
212
+ "cell_type": "code",
213
+ "execution_count": null,
214
+ "id": "0f63a5ce",
215
+ "metadata": {},
216
+ "outputs": [],
217
+ "source": [
218
+ "import json\n",
219
+ "import os\n",
220
+ "from pathlib import Path\n",
221
+ "\n",
222
+ "# Load configuration from notebook_config.json first (for HF Spaces)\n",
223
+ "config_json_path = Path(\"notebook_config.json\")\n",
224
+ "if config_json_path.exists():\n",
225
+ " with open(config_json_path, \"r\") as f:\n",
226
+ " loaded_config = json.load(f)\n",
227
+ " print(f\"βœ“ Loaded configuration from: {config_json_path.absolute()}\")\n",
228
+ "else:\n",
229
+ " loaded_config = {}\n",
230
+ " print(f\"⚠ No notebook_config.json found, using defaults\")\n",
231
+ "\n",
232
+ "# Try loading .env file as fallback (for local dev)\n",
233
+ "try:\n",
234
+ " from dotenv import load_dotenv\n",
235
+ " env_path = Path(\"../.env\")\n",
236
+ " if env_path.exists():\n",
237
+ " load_dotenv(env_path)\n",
238
+ " print(f\"βœ“ Loaded environment from: {env_path.absolute()}\")\n",
239
+ "except ImportError:\n",
240
+ " pass\n",
241
+ "\n",
242
+ "# Configuration class\n",
243
+ "class Config:\n",
244
+ " # API Keys - priority: config.json > env vars > HF secrets\n",
245
+ " GEMINI_API_KEY = loaded_config.get(\"gemini_api_key\") or os.getenv(\"GEMINI_API_KEY\", \"AIzaSyA3HdWTLk_zJQ5P9G8Z8a8BEYSTPvLglhs\")\n",
246
+ " HUGGINGFACE_TOKEN = os.getenv(\"HUGGINGFACE_API_TOKEN\", os.getenv(\"HF_TOKEN\", \"\"))\n",
247
+ " WEBSCRAPER_API_KEY = loaded_config.get(\"webscraper_api_key\", \"sk-fd14eaa7bceb478db7afc7256e514d2b\")\n",
248
+ " WEBSCRAPER_API_URL = loaded_config.get(\"webscraper_api_url\", \"http://webscrapper.live/api/scrape\")\n",
249
+ " \n",
250
+ " # Gemini model\n",
251
+ " GEMINI_MODEL = loaded_config.get(\"gemini_model\", \"gemini-2.5-flash\")\n",
252
+ " \n",
253
+ " # Paths\n",
254
+ " BASE_DIR = Path(\"..\").resolve()\n",
255
+ " DATASETS_DIR = BASE_DIR / \"datasets\"\n",
256
+ " MODELS_DIR = BASE_DIR / \"models\"\n",
257
+ " ARTIFACTS_DIR = BASE_DIR / \"artifacts\"\n",
258
+ " \n",
259
+ " # ML Settings\n",
260
+ " RANDOM_STATE = loaded_config.get(\"random_state\", 42)\n",
261
+ " TEST_SIZE = loaded_config.get(\"test_size\", 0.2)\n",
262
+ " CV_FOLDS = loaded_config.get(\"cv_folds\", 5)\n",
263
+ " \n",
264
+ " # Device\n",
265
+ " DEVICE = DEVICE\n",
266
+ "\n",
267
+ "config = Config()\n",
268
+ "\n",
269
+ "# Validate required API keys\n",
270
+ "print(\"\\n\" + \"=\" * 60)\n",
271
+ "print(\"API CONFIGURATION STATUS\")\n",
272
+ "print(\"=\" * 60)\n",
273
+ "print(f\"βœ“ Gemini API Key: {'Configured (' + config.GEMINI_API_KEY[:10] + '...)' if config.GEMINI_API_KEY else '⚠ NOT SET'}\")\n",
274
+ "print(f\"βœ“ Gemini Model: {config.GEMINI_MODEL}\")\n",
275
+ "print(f\"βœ“ HuggingFace Token: {'Configured' if config.HUGGINGFACE_TOKEN else '⚠ NOT SET (optional)'}\")\n",
276
+ "print(f\"βœ“ WebScraper API: Configured\")"
277
+ ]
278
+ },
279
+ {
280
+ "cell_type": "markdown",
281
+ "id": "126b5f7f",
282
+ "metadata": {},
283
+ "source": [
284
+ "## 5. Gemini API Connectivity Test"
285
+ ]
286
+ },
287
+ {
288
+ "cell_type": "code",
289
+ "execution_count": null,
290
+ "id": "14cef3bc",
291
+ "metadata": {},
292
+ "outputs": [],
293
+ "source": [
294
+ "import google.generativeai as genai\n",
295
+ "\n",
296
+ "def test_gemini_connection():\n",
297
+ " \"\"\"Test Gemini API connectivity\"\"\"\n",
298
+ " if not config.GEMINI_API_KEY:\n",
299
+ " return False, \"API key not configured\"\n",
300
+ " \n",
301
+ " try:\n",
302
+ " genai.configure(api_key=config.GEMINI_API_KEY)\n",
303
+ " # Use the configured model (gemini-2.5-flash)\n",
304
+ " model = genai.GenerativeModel(config.GEMINI_MODEL)\n",
305
+ " response = model.generate_content(\"Respond with only: OK\")\n",
306
+ " return True, f\"Model: {config.GEMINI_MODEL}, Response: {response.text.strip()}\"\n",
307
+ " except Exception as e:\n",
308
+ " # Fallback to gemini-1.5-flash if 2.5 not available\n",
309
+ " try:\n",
310
+ " model = genai.GenerativeModel('gemini-1.5-flash')\n",
311
+ " response = model.generate_content(\"Respond with only: OK\")\n",
312
+ " return True, f\"Model: gemini-1.5-flash (fallback), Response: {response.text.strip()}\"\n",
313
+ " except Exception as e2:\n",
314
+ " return False, str(e2)\n",
315
+ "\n",
316
+ "print(\"Testing Gemini API connection...\")\n",
317
+ "success, message = test_gemini_connection()\n",
318
+ "\n",
319
+ "if success:\n",
320
+ " print(f\"βœ“ Gemini API: {message}\")\n",
321
+ "else:\n",
322
+ " print(f\"⚠ Gemini API: Connection failed - {message}\")"
323
+ ]
324
+ },
325
+ {
326
+ "cell_type": "markdown",
327
+ "id": "628ac121",
328
+ "metadata": {},
329
+ "source": [
330
+ "## 6. Web Scraper API Connectivity Test"
331
+ ]
332
+ },
333
+ {
334
+ "cell_type": "code",
335
+ "execution_count": null,
336
+ "id": "beb1b036",
337
+ "metadata": {},
338
+ "outputs": [],
339
+ "source": [
340
+ "import httpx\n",
341
+ "import asyncio\n",
342
+ "\n",
343
+ "# Install nest_asyncio for Jupyter compatibility\n",
344
+ "try:\n",
345
+ " import nest_asyncio\n",
346
+ " nest_asyncio.apply()\n",
347
+ "except ImportError:\n",
348
+ " pass # Will use synchronous fallback\n",
349
+ "\n",
350
+ "def test_webscraper_connection_sync():\n",
351
+ " \"\"\"Test WebScrapper.live API connectivity (sync version)\"\"\"\n",
352
+ " try:\n",
353
+ " with httpx.Client(timeout=30.0) as client:\n",
354
+ " response = client.post(\n",
355
+ " config.WEBSCRAPER_API_URL,\n",
356
+ " json={\"url\": \"https://example.com\"},\n",
357
+ " headers={\n",
358
+ " \"Content-Type\": \"application/json\",\n",
359
+ " \"X-API-Key\": config.WEBSCRAPER_API_KEY\n",
360
+ " }\n",
361
+ " )\n",
362
+ " if response.status_code == 200:\n",
363
+ " return True, \"Connected\"\n",
364
+ " else:\n",
365
+ " return False, f\"Status {response.status_code}: {response.text[:100]}\"\n",
366
+ " except Exception as e:\n",
367
+ " return False, str(e)\n",
368
+ "\n",
369
+ "print(\"Testing Web Scraper API connection...\")\n",
370
+ "success, message = test_webscraper_connection_sync()\n",
371
+ "\n",
372
+ "if success:\n",
373
+ " print(f\"βœ“ WebScraper API: Connected successfully\")\n",
374
+ "else:\n",
375
+ " print(f\"⚠ WebScraper API: {message}\")"
376
+ ]
377
+ },
378
+ {
379
+ "cell_type": "markdown",
380
+ "id": "75ee0f51",
381
+ "metadata": {},
382
+ "source": [
383
+ "## 7. Create Directory Structure"
384
+ ]
385
+ },
386
+ {
387
+ "cell_type": "code",
388
+ "execution_count": null,
389
+ "id": "776236f8",
390
+ "metadata": {},
391
+ "outputs": [],
392
+ "source": [
393
+ "# Create necessary directories\n",
394
+ "directories = [\n",
395
+ " config.DATASETS_DIR,\n",
396
+ " config.MODELS_DIR,\n",
397
+ " config.ARTIFACTS_DIR,\n",
398
+ " config.BASE_DIR / \"logs\",\n",
399
+ " config.BASE_DIR / \"cache\",\n",
400
+ "]\n",
401
+ "\n",
402
+ "print(\"Creating directory structure...\")\n",
403
+ "for directory in directories:\n",
404
+ " directory.mkdir(parents=True, exist_ok=True)\n",
405
+ " print(f\" βœ“ {directory}\")\n",
406
+ "\n",
407
+ "print(\"\\nβœ“ Directory structure ready!\")"
408
+ ]
409
+ },
410
+ {
411
+ "cell_type": "markdown",
412
+ "id": "a6fe27eb",
413
+ "metadata": {},
414
+ "source": [
415
+ "## 8. Save Configuration for Other Notebooks"
416
+ ]
417
+ },
418
+ {
419
+ "cell_type": "code",
420
+ "execution_count": null,
421
+ "id": "6b854bac",
422
+ "metadata": {},
423
+ "outputs": [],
424
+ "source": [
425
+ "import json\n",
426
+ "\n",
427
+ "# Export configuration for other notebooks\n",
428
+ "notebook_config = {\n",
429
+ " \"device\": str(DEVICE),\n",
430
+ " \"python_version\": f\"{python_version.major}.{python_version.minor}.{python_version.micro}\",\n",
431
+ " \"torch_version\": torch.__version__,\n",
432
+ " \"cuda_available\": cuda_available,\n",
433
+ " \"base_dir\": str(config.BASE_DIR),\n",
434
+ " \"datasets_dir\": str(config.DATASETS_DIR),\n",
435
+ " \"models_dir\": str(config.MODELS_DIR),\n",
436
+ " \"artifacts_dir\": str(config.ARTIFACTS_DIR),\n",
437
+ " \"random_state\": config.RANDOM_STATE,\n",
438
+ " \"test_size\": config.TEST_SIZE,\n",
439
+ " \"cv_folds\": config.CV_FOLDS,\n",
440
+ " \"gemini_configured\": bool(config.GEMINI_API_KEY),\n",
441
+ " \"huggingface_configured\": bool(config.HUGGINGFACE_TOKEN),\n",
442
+ " \"created_at\": str(pd.Timestamp.now())\n",
443
+ "}\n",
444
+ "\n",
445
+ "config_path = config.BASE_DIR / \"notebook_config.json\"\n",
446
+ "with open(config_path, \"w\") as f:\n",
447
+ " json.dump(notebook_config, f, indent=2)\n",
448
+ "\n",
449
+ "print(f\"βœ“ Configuration saved to: {config_path}\")\n",
450
+ "print(\"\\n\" + json.dumps(notebook_config, indent=2))"
451
+ ]
452
+ },
453
+ {
454
+ "cell_type": "markdown",
455
+ "id": "ac7ada25",
456
+ "metadata": {},
457
+ "source": [
458
+ "## 9. Environment Summary"
459
+ ]
460
+ },
461
+ {
462
+ "cell_type": "code",
463
+ "execution_count": null,
464
+ "id": "f409be56",
465
+ "metadata": {},
466
+ "outputs": [],
467
+ "source": [
468
+ "print(\"\\n\" + \"=\" * 60)\n",
469
+ "print(\"ENVIRONMENT SETUP COMPLETE\")\n",
470
+ "print(\"=\" * 60)\n",
471
+ "print(f\"\"\"\n",
472
+ "βœ… Python: {python_version.major}.{python_version.minor}.{python_version.micro}\n",
473
+ "βœ… Device: {DEVICE}\n",
474
+ "βœ… PyTorch: {torch.__version__}\n",
475
+ "βœ… Gemini API: {'Ready' if config.GEMINI_API_KEY else 'Not configured'}\n",
476
+ "βœ… HuggingFace: {'Ready' if config.HUGGINGFACE_TOKEN else 'Not configured'}\n",
477
+ "βœ… WebScraper API: Ready\n",
478
+ "βœ… Directories: Created\n",
479
+ "\n",
480
+ "You can now proceed to the next notebook:\n",
481
+ " β†’ 01_data_acquisition.ipynb\n",
482
+ "\"\"\")\n",
483
+ "print(\"=\" * 60)"
484
+ ]
485
+ }
486
+ ],
487
+ "metadata": {
488
+ "language_info": {
489
+ "name": "python"
490
+ }
491
+ },
492
+ "nbformat": 4,
493
+ "nbformat_minor": 5
494
+ }