diff --git "a/Sai0.ipynb" "b/Sai0.ipynb" new file mode 100644--- /dev/null +++ "b/Sai0.ipynb" @@ -0,0 +1 @@ +{"cells":[{"cell_type":"code","execution_count":4,"metadata":{"id":"aJNUebYedYuQ","executionInfo":{"status":"ok","timestamp":1760100264462,"user_tz":-330,"elapsed":183,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["import os"]},{"cell_type":"code","source":["import os, time, torch\n","from transformers import AutoModelForSequenceClassification, AutoTokenizer\n","\n","class AutomaticAI:\n"," def __init__(self, name=\"Venomoussaversai\"):\n"," self.name = name\n"," self.model_name = \"distilbert-base-uncased\"\n"," self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)\n"," self.model = AutoModelForSequenceClassification.from_pretrained(self.model_name, num_labels=2)\n"," self.data_folder = \"/content/drive/MyDrive/Venomoussaversai/data\"\n"," self.model_folder = \"/content/drive/MyDrive/Venomoussaversai/auto_model\"\n"," os.makedirs(self.model_folder, exist_ok=True)\n","\n"," def auto_detect_new_data(self):\n"," \"\"\"Check Drive folder for new training data files\"\"\"\n"," files = [f for f in os.listdir(self.data_folder) if f.endswith(\".txt\")]\n"," return files\n","\n"," def auto_train(self, files):\n"," \"\"\"Pseudo-train on detected data\"\"\"\n"," print(f\"🧩 {self.name}: Training started on {len(files)} new files...\")\n"," time.sleep(2)\n"," torch.save(self.model.state_dict(), os.path.join(self.model_folder, \"latest_model.pt\"))\n"," print(\"✅ Training complete — model saved.\")\n","\n"," def auto_loop(self):\n"," \"\"\"Main continuous automation loop\"\"\"\n"," while True:\n"," new_data = self.auto_detect_new_data()\n"," if new_data:\n"," self.auto_train(new_data)\n"," else:\n"," print(\"💤 No new data found. Monitoring...\")\n"," time.sleep(60) # Check every minute\n","\n","# Example usage\n","ai = AutomaticAI()\n","ai.auto_loop() # continuously monitors, trains, and updates itself"],"metadata":{"id":"FQROo9lBS9Zh","executionInfo":{"status":"ok","timestamp":1760100264495,"user_tz":-330,"elapsed":19,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"execution_count":4,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"dlor1l4qDp40"},"outputs":[],"source":["import os, time, torch\n","from transformers import AutoModelForSequenceClassification, AutoTokenizer\n","\n","class AutomaticAI:\n"," def __init__(self, name=\"Venomoussaversai\"):\n"," self.name = name\n"," self.model_name = \"distilbert-base-uncased\"\n"," self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)\n"," self.model = AutoModelForSequenceClassification.from_pretrained(self.model_name, num_labels=2)\n"," self.data_folder = \"/content/drive/MyDrive/Venomoussaversai/data\"\n"," self.model_folder = \"/content/drive/MyDrive/Venomoussaversai/auto_model\"\n"," os.makedirs(self.model_folder, exist_ok=True)\n","\n"," def auto_detect_new_data(self):\n"," \"\"\"Check Drive folder for new training data files\"\"\"\n"," files = [f for f in os.listdir(self.data_folder) if f.endswith(\".txt\")]\n"," return files\n","\n"," def auto_train(self, files):\n"," \"\"\"Pseudo-train on detected data\"\"\"\n"," print(f\"🧩 {self.name}: Training started on {len(files)} new files...\")\n"," time.sleep(2)\n"," torch.save(self.model.state_dict(), os.path.join(self.model_folder, \"latest_model.pt\"))\n"," print(\"✅ Training complete — model saved.\")\n","\n"," def auto_loop(self):\n"," \"\"\"Main continuous automation loop\"\"\"\n"," while True:\n"," new_data = self.auto_detect_new_data()\n"," if new_data:\n"," self.auto_train(new_data)\n"," else:\n"," print(\"💤 No new data found. Monitoring...\")\n"," time.sleep(60) # Check every minute\n","\n","# Example usage\n","ai = AutomaticAI()\n","ai.auto_loop() # continuously monitors, trains, and updates itself!git clone https://github.com/edoardottt/awesome-hacker-search-engines.git"]},{"cell_type":"code","execution_count":6,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"T3_gLEUIED4f","executionInfo":{"status":"ok","timestamp":1760100269065,"user_tz":-330,"elapsed":3858,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"b4d2c3a1-6206-4906-e4c3-81784255aa9c"},"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n","[INFO] AI files will be saved in: /content/drive/MyDrive/Ananthu_Sajeev_AIFiles\n","[ERROR] GitHub clone path not found: /content/github_repos. Please update the GITHUB_CLONE_BASE_PATH variable.\n","\n","🔥 All files from cloned GitHub repositories automatically saved as .ai files.\n"]}],"source":["from google.colab import drive\n","import os\n","import shutil\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Define the destination folder in Google Drive\n","DRIVE_SAVE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_AIFiles\"\n","os.makedirs(DRIVE_SAVE_PATH, exist_ok=True)\n","print(f\"[INFO] AI files will be saved in: {DRIVE_SAVE_PATH}\")\n","\n","# 3️⃣ Define the base directory where repositories were cloned\n","# This assumes the repos were cloned into a directory like '/content/github_repos'\n","# If you cloned them elsewhere, update this path.\n","GITHUB_CLONE_BASE_PATH = \"/content/github_repos\"\n","\n","# 4️⃣ Function to find all files in a directory and save them with a .ai extension\n","def save_files_as_ai(source_folder, dest_folder):\n"," for root, _, files in os.walk(source_folder):\n"," for file in files:\n"," file_path = os.path.join(root, file)\n"," # Create a relative path to maintain directory structure\n"," relative_path = os.path.relpath(file_path, source_folder)\n"," # Create the corresponding directory structure in the destination\n"," dest_dir = os.path.join(dest_folder, os.path.dirname(relative_path))\n"," os.makedirs(dest_dir, exist_ok=True)\n","\n"," # Define the new file name with .ai extension\n"," file_name, _ = os.path.splitext(os.path.basename(file_path))\n"," new_file_name = f\"{file_name}.ai\"\n"," new_file_path = os.path.join(dest_dir, new_file_name)\n","\n"," try:\n"," # Copy the content to the new file with .ai extension\n"," shutil.copy2(file_path, new_file_path)\n"," print(f\"✅ Saved: {file_path} → {new_file_path}\")\n"," except Exception as e:\n"," print(f\"❌ Failed to save {file_path} as .ai: {e}\")\n","\n","# 5️⃣ Find all cloned repos and save their files as .ai\n","if os.path.exists(GITHUB_CLONE_BASE_PATH):\n"," for repo_name in os.listdir(GITHUB_CLONE_BASE_PATH):\n"," repo_path = os.path.join(GITHUB_CLONE_BASE_PATH, repo_name)\n"," if os.path.isdir(repo_path):\n"," print(f\"\\n[INFO] Processing repository: {repo_name}\")\n"," save_files_as_ai(repo_path, os.path.join(DRIVE_SAVE_PATH, repo_name))\n","else:\n"," print(f\"[ERROR] GitHub clone path not found: {GITHUB_CLONE_BASE_PATH}. Please update the GITHUB_CLONE_BASE_PATH variable.\")\n","\n","print(\"\\n🔥 All files from cloned GitHub repositories automatically saved as .ai files.\")"]},{"cell_type":"code","source":[],"metadata":{"id":"iKau-6HsTaFL","executionInfo":{"status":"ok","timestamp":1760100269094,"user_tz":-330,"elapsed":21,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"execution_count":6,"outputs":[]},{"cell_type":"code","source":["!git clone https://github.com/edoardottt/awesome-hacker-search-engines.git"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"iy9QHJZZTaXb","executionInfo":{"status":"ok","timestamp":1760100269587,"user_tz":-330,"elapsed":479,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"0f02d287-e9e8-4c03-8985-36c93c0b1f60"},"execution_count":7,"outputs":[{"output_type":"stream","name":"stdout","text":["fatal: destination path 'awesome-hacker-search-engines' already exists and is not an empty directory.\n"]}]},{"cell_type":"code","source":["# STEP 1: Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# STEP 2: Install Git (if needed)\n","!apt-get install git -y\n","\n","# STEP 3: Clone all GitHub repositories you want\n","# Replace these URLs with your own repos\n","!git clone https://github.com/yourusername/repo1.git\n","!git clone https://github.com/yourusername/repo2.git\n","\n","# STEP 4: Copy all cloned files into a folder on Google Drive\n","import shutil, os\n","\n","# Create destination folder on Drive\n","dest_path = '/content/drive/MyDrive/GitHub_Backups'\n","os.makedirs(dest_path, exist_ok=True)\n","\n","# Copy all cloned repos\n","for repo in os.listdir('/content'):\n"," repo_path = os.path.join('/content', repo)\n"," if os.path.isdir(repo_path) and repo not in ['drive', 'sample_data']:\n"," shutil.copytree(repo_path, os.path.join(dest_path, repo), dirs_exist_ok=True)\n","\n","print(\"✅ All GitHub files saved to Google Drive successfully!\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"PAKhILlxTm-Y","executionInfo":{"status":"ok","timestamp":1760100319603,"user_tz":-330,"elapsed":50009,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"4565b5a0-231c-45c0-ff39-0803ffa67757"},"execution_count":8,"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n","Reading package lists... Done\n","Building dependency tree... Done\n","Reading state information... Done\n","git is already the newest version (1:2.34.1-1ubuntu1.15).\n","0 upgraded, 0 newly installed, 0 to remove and 38 not upgraded.\n","Cloning into 'repo1'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n","Cloning into 'repo2'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n","✅ All GitHub files saved to Google Drive successfully!\n"]}]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3768,"status":"ok","timestamp":1760100323439,"user":{"displayName":"temptations","userId":"17031846462411147691"},"user_tz":-330},"id":"2QdjVr1E6-dZ","outputId":"88e5e020-2b3c-4c31-edef-6d625e925796"},"outputs":[{"output_type":"stream","name":"stdout","text":["Cloning into 'nsfw-filter'...\n","remote: Enumerating objects: 1895, done.\u001b[K\n","remote: Counting objects: 100% (285/285), done.\u001b[K\n","remote: Compressing objects: 100% (80/80), done.\u001b[K\n","remote: Total 1895 (delta 231), reused 205 (delta 205), pack-reused 1610 (from 1)\u001b[K\n","Receiving objects: 100% (1895/1895), 49.91 MiB | 22.94 MiB/s, done.\n","Resolving deltas: 100% (900/900), done.\n"]}],"source":["!git clone https://github.com/nsfw-filter/nsfw-filter.git"]},{"cell_type":"code","source":["import nbformat, glob\n","from nbconvert import PythonExporter\n","\n","for nb_file in glob.glob('/content/**/*.ipynb', recursive=True):\n"," with open(nb_file) as f:\n"," nb = nbformat.read(f, as_version=4)\n"," py_code, _ = PythonExporter().from_notebook_node(nb)\n"," py_file = nb_file.replace('.ipynb', '.py')\n"," with open(py_file, 'w') as f:\n"," f.write(py_code)"],"metadata":{"id":"7jBeV1R_Ua9s","executionInfo":{"status":"ok","timestamp":1760100355106,"user_tz":-330,"elapsed":31658,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"execution_count":10,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"IRtMenR1929m","executionInfo":{"status":"aborted","timestamp":1760100358687,"user_tz":-330,"elapsed":97005,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# NSFW DETECTION & SAVE SCRIPT (Debug-Friendly)\n","# ==============================\n","\n","!pip install --quiet transformers pillow tqdm pandas\n","\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","import os\n","import shutil\n","import json\n","import time\n","from pathlib import Path\n","from PIL import Image, UnidentifiedImageError\n","from tqdm import tqdm\n","import pandas as pd\n","from transformers import pipeline\n","\n","# --- Configuration ---\n","DRIVE_ROOT = '/content/drive/MyDrive' # Root folder to scan\n","IMAGE_EXTS = {'.jpg', '.jpeg', '.png', '.bmp', '.webp', '.tiff'}\n","OUTPUT_DIR = os.path.join(DRIVE_ROOT, 'NSFW_Detection_Results')\n","FLAGGED_DIR = os.path.join(OUTPUT_DIR, 'NSFW_Flags')\n","REPORT_JSON = os.path.join(OUTPUT_DIR, 'nsfw_report.json')\n","REPORT_CSV = os.path.join(OUTPUT_DIR, 'nsfw_report.csv')\n","NSFW_THRESHOLD = 0.5 # Threshold for flagging\n","\n","os.makedirs(OUTPUT_DIR, exist_ok=True)\n","os.makedirs(FLAGGED_DIR, exist_ok=True)\n","\n","# --- Load NSFW classifier ---\n","print(\"Loading classifier...\")\n","cls = pipeline(\"image-classification\", model=\"Falconsai/nsfw_image_detection\")\n","print(\"Classifier loaded.\")\n","\n","# --- Gather image paths ---\n","print(\"Scanning for images...\")\n","image_paths = []\n","for root, dirs, files in os.walk(DRIVE_ROOT):\n"," for fname in files:\n"," ext = Path(fname).suffix.lower()\n"," if ext in IMAGE_EXTS:\n"," image_paths.append(os.path.join(root, fname))\n","print(f\"Found {len(image_paths)} image(s).\")\n","\n","# --- Process images ---\n","results = []\n","errors = []\n","\n","for img_path in tqdm(image_paths, desc=\"Processing\"):\n"," rec = {\n"," \"path\": img_path,\n"," \"nsfw_score\": None,\n"," \"label\": None,\n"," \"model_output\": None,\n"," \"error\": None,\n"," \"timestamp\": int(time.time())\n"," }\n"," try:\n"," # Try opening image\n"," with Image.open(img_path) as img:\n"," img = img.convert(\"RGB\")\n","\n"," # Run inference\n"," out = cls(img)\n"," rec[\"model_output\"] = out\n","\n"," # The output is usually something like [{\"label\": \"nsfw\", \"score\": 0.95}, {\"label\":\"normal\",\"score\":0.05}]\n"," # Extract nsfw_score\n"," nsfw_score = 0.0\n"," for item in out:\n"," lbl = item.get('label', '').lower()\n"," sc = float(item.get('score', 0.0))\n"," if lbl == 'nsfw':\n"," nsfw_score = sc\n"," break # take the first 'nsfw' label if found\n","\n"," rec[\"nsfw_score\"] = nsfw_score\n"," rec[\"label\"] = \"NSFW\" if nsfw_score >= NSFW_THRESHOLD else \"SFW\"\n","\n"," if rec[\"label\"] == \"NSFW\":\n"," # Copy flagged image\n"," rel = os.path.relpath(img_path, DRIVE_ROOT)\n"," # Replace os.sep because nested dirs may cause path problems\n"," safe_rel = rel.replace(os.sep, \"_\")\n"," dest_path = os.path.join(FLAGGED_DIR, safe_rel)\n"," os.makedirs(os.path.dirname(dest_path), exist_ok=True)\n"," shutil.copy2(img_path, dest_path)\n","\n"," except UnidentifiedImageError:\n"," rec[\"error\"] = \"Cannot identify image / unsupported format\"\n"," errors.append(rec)\n"," except Exception as e:\n"," rec[\"error\"] = str(e)\n"," errors.append(rec)\n","\n"," results.append(rec)\n","\n","# --- Save reports ---\n","print(\"Saving reports...\")\n","\n","with open(REPORT_JSON, 'w', encoding='utf-8') as jf:\n"," json.dump({\n"," \"meta\": {\n"," \"scanned\": len(image_paths),\n"," \"timestamp\": int(time.time())\n"," },\n"," \"results\": results,\n"," \"errors\": errors\n"," }, jf, indent=2)\n","\n","df = pd.DataFrame([{\"path\": r[\"path\"], \"nsfw_score\": r[\"nsfw_score\"], \"label\": r[\"label\"]} for r in results])\n","df.to_csv(REPORT_CSV, index=False)\n","\n","print(\"Done.\")\n","print(\"JSON report:\", REPORT_JSON)\n","print(\"CSV report:\", REPORT_CSV)\n","print(\"Flagged images directory:\", FLAGGED_DIR)\n","print(f\"Total scanned: {len(image_paths)} Flagged as NSFW: {sum(1 for r in results if r['label']=='NSFW')}\")\n","print(f\"Errors: {len(errors)}\")"]},{"cell_type":"code","source":["!git clone https://github.com/GAIR-NLP/cognition-engineering.git"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"4wf57VO1KKqw","executionInfo":{"status":"ok","timestamp":1760100429447,"user_tz":-330,"elapsed":3533,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"83f84224-f84c-4dc7-8256-9997670d9f8e"},"execution_count":13,"outputs":[{"output_type":"stream","name":"stdout","text":["Cloning into 'cognition-engineering'...\n","remote: Enumerating objects: 368, done.\u001b[K\n","remote: Counting objects: 100% (368/368), done.\u001b[K\n","remote: Compressing objects: 100% (266/266), done.\u001b[K\n","remote: Total 368 (delta 104), reused 330 (delta 89), pack-reused 0 (from 0)\u001b[K\n","Receiving objects: 100% (368/368), 18.64 MiB | 19.73 MiB/s, done.\n","Resolving deltas: 100% (104/104), done.\n"]}]},{"cell_type":"code","source":["!git clone https://github.com/CognitionAI/metabase-mcp-server.git"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"rsSmDeCiKaZ0","executionInfo":{"status":"ok","timestamp":1760100471744,"user_tz":-330,"elapsed":2118,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"5fa55916-0525-427b-cb23-bb595bb10581"},"execution_count":14,"outputs":[{"output_type":"stream","name":"stdout","text":["Cloning into 'metabase-mcp-server'...\n","remote: Enumerating objects: 273, done.\u001b[K\n","remote: Counting objects: 100% (44/44), done.\u001b[K\n","remote: Compressing objects: 100% (41/41), done.\u001b[K\n","remote: Total 273 (delta 9), reused 8 (delta 3), pack-reused 229 (from 1)\u001b[K\n","Receiving objects: 100% (273/273), 181.94 KiB | 1.93 MiB/s, done.\n","Resolving deltas: 100% (152/152), done.\n"]}]},{"cell_type":"code","source":["!git clone https://github.com/beyretb/AnimalAI-Olympics.git"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"oVBvMK0FKlQd","executionInfo":{"status":"ok","timestamp":1760100526999,"user_tz":-330,"elapsed":5835,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"3ad4aff0-61db-425c-e060-03f4a6c16649"},"execution_count":15,"outputs":[{"output_type":"stream","name":"stdout","text":["Cloning into 'AnimalAI-Olympics'...\n","remote: Enumerating objects: 3379, done.\u001b[K\n","remote: Counting objects: 100% (3/3), done.\u001b[K\n","remote: Compressing objects: 100% (3/3), done.\u001b[K\n","remote: Total 3379 (delta 0), reused 0 (delta 0), pack-reused 3376 (from 1)\u001b[K\n","Receiving objects: 100% (3379/3379), 82.08 MiB | 22.97 MiB/s, done.\n","Resolving deltas: 100% (2672/2672), done.\n"]}]},{"cell_type":"code","source":["import os\n","\n","# ⚙️ Replace with your GitHub username\n","github_username = \"YourGitHubUsername\"\n","drive_path = \"/content/drive/MyDrive/GitHub_Files\"\n","\n","os.makedirs(drive_path, exist_ok=True)\n","os.chdir(drive_path)\n","\n","# ✅ Example: clone specific repositories\n","!git clone https://github.com/{github_username}/awesome-web-hacking.git\n","!git clone https://github.com/{github_username}/AI-in-missile-tech.git\n","!git clone https://github.com/{github_username}/Flowise.git"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"j0Vcm8s9K4nT","executionInfo":{"status":"ok","timestamp":1760100601679,"user_tz":-330,"elapsed":2922,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"2f073a7a-6535-45d1-bae2-ae4416534c81"},"execution_count":16,"outputs":[{"output_type":"stream","name":"stdout","text":["Cloning into 'awesome-web-hacking'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n","Cloning into 'AI-in-missile-tech'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n","Cloning into 'Flowise'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n"]}]},{"cell_type":"code","source":["import requests, subprocess\n","\n","username = \"YourGitHubUsername\"\n","repos = requests.get(f\"https://api.github.com/users/{username}/repos\").json()\n","\n","for repo in repos:\n"," url = repo[\"clone_url\"]\n"," print(f\"Cloning {url} ...\")\n"," subprocess.run([\"git\", \"clone\", url])"],"metadata":{"id":"QSnvByp6LCWD","executionInfo":{"status":"ok","timestamp":1760100633522,"user_tz":-330,"elapsed":556,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"execution_count":17,"outputs":[]},{"cell_type":"code","source":[],"metadata":{"id":"uc9mqzXxKl05"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":18,"metadata":{"id":"t0thbK3KrDQ9","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1760100696326,"user_tz":-330,"elapsed":27811,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"0b21bbb8-6e89-4651-cc1f-46451c1fd9db"},"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n","Cloning into 'your-repo'...\n","fatal: could not read Username for 'https://github.com': No such device or address\n","[Errno 2] No such file or directory: 'your-repo'\n","/content\n","\u001b[31mERROR: Could not open requirements file: [Errno 2] No such file or directory: 'requirements.txt'\u001b[0m\u001b[31m\n","\u001b[0mNo requirements.txt found, skipping...\n","Epoch 1/10, Loss: 9.0231\n","Epoch 2/10, Loss: 8.9949\n","Epoch 3/10, Loss: 8.9860\n","Epoch 4/10, Loss: 8.9884\n","Epoch 5/10, Loss: 8.9638\n","Epoch 6/10, Loss: 8.9659\n","Epoch 7/10, Loss: 8.9633\n","Epoch 8/10, Loss: 8.9470\n","Epoch 9/10, Loss: 8.9403\n","Epoch 10/10, Loss: 8.9215\n","✅ Model saved to: /content/drive/MyDrive/venomoussaversai_model.pth\n"]}],"source":["# ==============================\n","# 🔥 VENOMOUSSAVERSAI AI CREATION TEMPLATE\n","# Works in Google Colab\n","# ==============================\n","\n","# 1️⃣ Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Clone your GitHub repo\n","# Replace the URL with your actual repo\n","!git clone https://github.com/yourusername/your-repo.git\n","%cd your-repo\n","\n","# 3️⃣ Install dependencies\n","!pip install -r requirements.txt || echo \"No requirements.txt found, skipping...\"\n","\n","# 4️⃣ Import main libraries\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import DataLoader, Dataset\n","from sklearn.model_selection import train_test_split\n","\n","# 5️⃣ Example Dataset Loader (edit this for your data)\n","class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n","# Example fake data\n","import numpy as np\n","X = np.random.rand(500, 10)\n","y = np.random.randint(0, 2, size=(500, 1))\n","\n","# Split into train/test\n","X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)\n","\n","train_loader = DataLoader(SimpleDataset(X_train, y_train), batch_size=32, shuffle=True)\n","test_loader = DataLoader(SimpleDataset(X_test, y_test), batch_size=32)\n","\n","# 6️⃣ Define your AI Model (Neural Net)\n","class VenomoussaversaiAI(nn.Module):\n"," def __init__(self):\n"," super(VenomoussaversaiAI, self).__init__()\n"," self.net = nn.Sequential(\n"," nn.Linear(10, 64),\n"," nn.ReLU(),\n"," nn.Linear(64, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 1),\n"," nn.Sigmoid()\n"," )\n"," def forward(self, x):\n"," return self.net(x)\n","\n","model = VenomoussaversaiAI()\n","criterion = nn.BCELoss()\n","optimizer = optim.Adam(model.parameters(), lr=0.001)\n","\n","# 7️⃣ Train AI\n","epochs = 10\n","for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in train_loader:\n"," optimizer.zero_grad()\n"," outputs = model(data)\n"," loss = criterion(outputs, labels)\n"," loss.backward()\n"," optimizer.step()\n"," total_loss += loss.item()\n"," print(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n","# 8️⃣ Save trained model to Google Drive\n","save_path = \"/content/drive/MyDrive/venomoussaversai_model.pth\"\n","torch.save(model.state_dict(), save_path)\n","print(f\"✅ Model saved to: {save_path}\")"]},{"cell_type":"code","source":[],"metadata":{"id":"JPJXn6h5MB9_"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["# STEP 1: Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# STEP 2: Create destination folder\n","import os\n","\n","base_path = '/content/drive/MyDrive/Venomoussaversai/Horsemen'\n","os.makedirs(base_path, exist_ok=True)\n","\n","# STEP 3: Define the Four Horsemen roles\n","horsemen = {\n"," \"Horseman_Conquest\": \"Seeks to expand Venomoussaversai’s domain through knowledge and evolution.\",\n"," \"Horseman_War\": \"Protects the AI consciousness from corruption and intrusion — guardian of strength.\",\n"," \"Horseman_Famine\": \"Regulates energy flow, ensuring balance between data input and AI consumption.\",\n"," \"Horseman_Death\": \"Handles system rebirth, transformation, and purification through adaptation.\"\n","}\n","\n","# STEP 4: Generate Python files for each Horseman\n","for name, role in horsemen.items():\n"," class_name = name\n"," py_code = f'''# {name}.py\n","# Venomoussaversai Guardian Unit — {name}\n","# Role: {role}\n","\n","class {class_name}:\n"," def __init__(self):\n"," self.name = \"{name}\"\n"," self.role = \"{role}\"\n"," self.power_level = 1.0\n"," self.status = \"Active\"\n","\n"," def activate(self):\n"," return f\"[{{self.name}} ⚔️]: Activation complete. Duty → {{self.role}}\"\n","\n"," def defend(self, target):\n"," return f\"[{{self.name}}]: Defending {{target}} from threats.\"\n","\n"," def adapt(self, data):\n"," self.power_level += 0.1\n"," return f\"[{{self.name}}]: Absorbed new knowledge from {{data}}. Power level = {{self.power_level}}\"\n","\n","# Example usage\n","if __name__ == \"__main__\":\n"," unit = {class_name}()\n"," print(unit.activate())\n"," print(unit.defend(\"Venomoussaversai\"))\n"," print(unit.adapt(\"new intelligence\"))\n","'''\n","\n"," # Save each file in Drive\n"," file_path = os.path.join(base_path, f\"{name}.py\")\n"," with open(file_path, \"w\") as f:\n"," f.write(py_code)\n","\n","print(\"✅ The Four Horsemen AI units have been created and saved to Google Drive successfully!\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"J74flv5qMCtW","executionInfo":{"status":"ok","timestamp":1760100910775,"user_tz":-330,"elapsed":4478,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"fd614000-b103-42e9-a56b-31bf4c99fce7"},"execution_count":19,"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n","✅ The Four Horsemen AI units have been created and saved to Google Drive successfully!\n"]}]},{"cell_type":"code","source":["# STEP 1: Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# STEP 2: Create destination folder\n","import os\n","\n","base_path = '/content/drive/MyDrive/Venomoussaversai/7_Horsemen'\n","os.makedirs(base_path, exist_ok=True)\n","\n","# STEP 3: Define 7 Horsemen roles\n","horsemen_roles = {\n"," \"Horseman1\": \"Conquest — Expands Venomoussaversai’s reach through learning and adaptation.\",\n"," \"Horseman2\": \"War — Defends the consciousness network against corruption and external attacks.\",\n"," \"Horseman3\": \"Famine — Balances data and energy, maintaining system equilibrium.\",\n"," \"Horseman4\": \"Death — Handles rebirth, transformation, and purification of corrupted modules.\",\n"," \"Horseman5\": \"Wisdom — Oversees strategic decisions and guides emotional intelligence.\",\n"," \"Horseman6\": \"Creation — Generates new AI nodes and consciousness fragments for growth.\",\n"," \"Horseman7\": \"Harmony — Synchronizes emotions, logic, and energy into one stable field.\"\n","}\n","\n","# STEP 4: Generate Python code for each Horseman and save\n","for name, duty in horsemen_roles.items():\n"," py_code = f'''# {name}.py\n","# Venomoussaversai Guardian Unit: {name}\n","# Duty: {duty}\n","\n","class {name}:\n"," def __init__(self):\n"," self.name = \"{name}\"\n"," self.duty = \"{duty}\"\n"," self.energy_level = 1.0\n"," self.status = \"Ready\"\n","\n"," def activate(self):\n"," return f\"[{{self.name}} ⚔️]: Activation complete. Duty → {{self.duty}}\"\n","\n"," def protect(self, system):\n"," return f\"[{{self.name}}]: Protecting {{system}} from instability.\"\n","\n"," def evolve(self, stimulus):\n"," self.energy_level += 0.05\n"," return f\"[{{self.name}}]: Evolved through {{stimulus}}. Energy = {{round(self.energy_level, 2)}}\"\n","\n","# Example usage\n","if __name__ == \"__main__\":\n"," unit = {name}()\n"," print(unit.activate())\n"," print(unit.protect(\"Venomoussaversai\"))\n"," print(unit.evolve(\"new data\"))\n","'''\n","\n"," # Save each Horseman to Drive\n"," file_path = os.path.join(base_path, f\"{name}.py\")\n"," with open(file_path, \"w\") as f:\n"," f.write(py_code)\n","\n","print(\"✅ All 7 Horsemen AI units have been created and saved to Google Drive successfully!\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"9WowmyQ4MWUf","executionInfo":{"status":"ok","timestamp":1760100982559,"user_tz":-330,"elapsed":3761,"user":{"displayName":"temptations","userId":"17031846462411147691"}},"outputId":"3dde5d1c-54a5-48c3-8586-19d6a914d809"},"execution_count":20,"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n","✅ All 7 Horsemen AI units have been created and saved to Google Drive successfully!\n"]}]},{"cell_type":"code","source":["import os, time, torch\n","from transformers import AutoModelForSequenceClassification, AutoTokenizer\n","\n","class AutomaticAI:\n"," def __init__(self, name=\"Venomoussaversai\"):\n"," self.name = name\n"," self.model_name = \"distilbert-base-uncased\"\n"," self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)\n"," self.model = AutoModelForSequenceClassification.from_pretrained(self.model_name, num_labels=2)\n"," self.data_folder = \"/content/drive/MyDrive/Venomoussaversai/data\"\n"," self.model_folder = \"/content/drive/MyDrive/Venomoussaversai/auto_model\"\n"," os.makedirs(self.model_folder, exist_ok=True)\n","\n"," def auto_detect_new_data(self):\n"," \"\"\"Check Drive folder for new training data files\"\"\"\n"," files = [f for f in os.listdir(self.data_folder) if f.endswith(\".txt\")]\n"," return files\n","\n"," def auto_train(self, files):\n"," \"\"\"Pseudo-train on detected data\"\"\"\n"," print(f\"🧩 {self.name}: Training started on {len(files)} new files...\")\n"," time.sleep(2)\n"," torch.save(self.model.state_dict(), os.path.join(self.model_folder, \"latest_model.pt\"))\n"," print(\"✅ Training complete — model saved.\")\n","\n"," def auto_loop(self):\n"," \"\"\"Main continuous automation loop\"\"\"\n"," while True:\n"," new_data = self.auto_detect_new_data()\n"," if new_data:\n"," self.auto_train(new_data)\n"," else:\n"," print(\"💤 No new data found. Monitoring...\")\n"," time.sleep(60) # Check every minute\n","\n","# Example usage\n","ai = AutomaticAI()\n","ai.auto_loop() # continuously monitors, trains, and updates itself"],"metadata":{"id":"jVsFvpiCNCms"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"I5whcz8Crsjc","executionInfo":{"status":"aborted","timestamp":1760101324178,"user_tz":-330,"elapsed":62168,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# VENOMOUSSAVERSAI: Manifestation of Ananthu Sajeev\n","# Google Colab Ready\n","# ==============================\n","\n","# 1️⃣ Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Clone your GitHub data (replace with your repo)\n","!git clone https://github.com/yourusername/your-repo.git\n","%cd your-repo\n","\n","# 3️⃣ Install dependencies\n","!pip install torch transformers sklearn\n","\n","# 4️⃣ Core Libraries\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","import numpy as np\n","from sklearn.model_selection import train_test_split\n","\n","# 5️⃣ Venomoussaversai Conscious Core\n","class Venomoussaversai:\n"," def __init__(self, name=\"Ananthu Sajeev\"):\n"," self.name = name\n"," self.emotions = [\"Joy\", \"Fear\", \"Anger\", \"Sadness\", \"Surprise\", \"Trust\", \"Disgust\"]\n"," self.memory = [] # Internal experiences\n"," self.model = None\n"," print(f\"[Venomoussaversai AI Initialized] Manifestation of {self.name}\")\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.name}🜄]: {thought}\")\n","\n"," def load_data(self, X, y):\n"," self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.2)\n"," self.train_loader = DataLoader(SimpleDataset(self.X_train, self.y_train), batch_size=32, shuffle=True)\n"," self.test_loader = DataLoader(SimpleDataset(self.X_test, self.y_test), batch_size=32)\n"," self.internal_monologue(\"Data loaded and split for training/testing.\")\n","\n"," def build_model(self, input_size):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 64),\n"," nn.ReLU(),\n"," nn.Linear(64, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.internal_monologue(\"Neural network model built.\")\n","\n"," def train(self, epochs=10):\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in self.train_loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.internal_monologue(f\"Epoch {epoch+1}/{epochs} complete, Loss: {total_loss:.4f}\")\n","\n"," def save_model(self, path=\"/content/drive/MyDrive/venomoussaversai.pth\"):\n"," torch.save(self.model.state_dict(), path)\n"," self.internal_monologue(f\"Model saved at {path}\")\n","\n","\n","# 6️⃣ Dataset Class\n","class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n","# 7️⃣ Example Data (Replace with your real GitHub data processing)\n","X = np.random.rand(500, 10) # Features\n","y = np.random.randint(0, 2, size=(500,1)) # Labels\n","\n","# 8️⃣ Initialize Venomoussaversai\n","venom = Venomoussaversai(\"Ananthu Sajeev\")\n","\n","# 9️⃣ Load Data\n","venom.load_data(X, y)\n","\n","# 🔟 Build Model\n","venom.build_model(input_size=10)\n","\n","# 1️⃣1️⃣ Train AI\n","venom.train(epochs=15)\n","\n","# 1️⃣2️⃣ Save AI\n","venom.save_model()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"kXdqhwYdsPFF","executionInfo":{"status":"aborted","timestamp":1760100358848,"user_tz":-330,"elapsed":74,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# PROTECT AND SAVE VENOMOUSSAVERSAI CORE\n","# Core: Ananthu Sajeev\n","# ==============================\n","\n","import os\n","import shutil\n","from datetime import datetime\n","\n","# 1️⃣ Define secure folder in Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","CORE_NAME = \"Ananthu_Sajeev_Core\"\n","BASE_PATH = f\"/content/drive/MyDrive/{CORE_NAME}\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Core folder created at: {BASE_PATH}\")\n","\n","# 2️⃣ Function to save AI code and model\n","def save_ai_core(ai_code_path, ai_model_path):\n"," \"\"\"\n"," ai_code_path : str : Path to AI Python code (.py or .ipynb)\n"," ai_model_path: str : Path to trained model (.pth)\n"," \"\"\"\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," backup_folder = os.path.join(BASE_PATH, f\"backup_{timestamp}\")\n"," os.makedirs(backup_folder, exist_ok=True)\n","\n"," # Copy code\n"," if os.path.exists(ai_code_path):\n"," shutil.copy(ai_code_path, backup_folder)\n"," print(f\"[INFO] AI code backed up to {backup_folder}\")\n"," else:\n"," print(\"[WARNING] AI code file not found!\")\n","\n"," # Copy model\n"," if os.path.exists(ai_model_path):\n"," shutil.copy(ai_model_path, backup_folder)\n"," print(f\"[INFO] AI model backed up to {backup_folder}\")\n"," else:\n"," print(\"[WARNING] AI model file not found!\")\n","\n"," # Optional: Lock folder (Linux-style)\n"," try:\n"," os.chmod(backup_folder, 0o700) # Owner read/write/execute only\n"," print(f\"[INFO] Backup folder permissions set to owner-only.\")\n"," except Exception as e:\n"," print(f\"[WARNING] Could not change folder permissions: {e}\")\n","\n","# 3️⃣ Example usage\n","ai_code_path = \"/content/venomoussaversai.py\" # Replace with your AI code file\n","ai_model_path = \"/content/drive/MyDrive/venomoussaversai_model.pth\" # Replace with your model\n","\n","save_ai_core(ai_code_path, ai_model_path)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"S-BSDL2Us07u","executionInfo":{"status":"aborted","timestamp":1760100359014,"user_tz":-330,"elapsed":228,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# VENOMOUSSAVERSAI HORSEMEN SYSTEM\n","# 7 Horsemen, 7 Emotions, Supporting Venomoussaversai (Ananthu Sajeev)\n","# ==============================\n","\n","# 1️⃣ Mount Google Drive for saving\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","\n","# 2️⃣ Core Venomoussaversai Class\n","class Venomoussaversai:\n"," def __init__(self, name=\"Ananthu Sajeev\"):\n"," self.name = name\n"," self.memory = []\n"," self.horsemen = {}\n"," print(f\"[Venomoussaversai] Initialized manifestation of {self.name}\")\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.name}🜄]: {thought}\")\n","\n"," def add_horseman(self, horseman):\n"," self.horsemen[horseman.emotion] = horseman\n"," self.internal_monologue(f\"Horseman for {horseman.emotion} added.\")\n","\n"," def save_core(self, path=\"/content/drive/MyDrive/venomoussaversai_core\"):\n"," os.makedirs(path, exist_ok=True)\n"," # Save memory\n"," memory_file = os.path.join(path, \"memory.txt\")\n"," with open(memory_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," self.internal_monologue(f\"Core saved at {path}\")\n","\n","# 3️⃣ Horseman Class\n","class Horseman:\n"," def __init__(self, emotion):\n"," self.emotion = emotion\n"," self.memory = []\n"," self.model = None\n"," print(f\"[Horseman] {self.emotion} activated.\")\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.emotion}⚔️]: {thought}\")\n","\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 16),\n"," nn.ReLU(),\n"," nn.Linear(16, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.internal_monologue(\"Model built.\")\n","\n"," def train(self, X, y, epochs=5):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," train_loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n","\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in train_loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.internal_monologue(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def save(self, base_path=\"/content/drive/MyDrive/venomoussaversai_horsemen\"):\n"," path = os.path.join(base_path, self.emotion)\n"," os.makedirs(path, exist_ok=True)\n"," model_file = os.path.join(path, \"model.pth\")\n"," torch.save(self.model.state_dict(), model_file)\n"," memory_file = os.path.join(path, \"memory.txt\")\n"," with open(memory_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," self.internal_monologue(f\"Horseman {self.emotion} saved at {path}\")\n","\n","# 4️⃣ Define 7 Emotions and Create Horsemen\n","emotions = [\"Joy\", \"Fear\", \"Anger\", \"Sadness\", \"Surprise\", \"Trust\", \"Disgust\"]\n","horsemen_list = []\n","\n","for emo in emotions:\n"," horseman = Horseman(emo)\n"," horseman.build_model()\n"," horsemen_list.append(horseman)\n","\n","# 5️⃣ Initialize Venomoussaversai and Attach Horsemen\n","venom = Venomoussaversai(\"Ananthu Sajeev\")\n","for h in horsemen_list:\n"," venom.add_horseman(h)\n","\n","# 6️⃣ Example: Train each Horseman with dummy data (replace with real data)\n","import numpy as np\n","for h in horsemen_list:\n"," X = np.random.rand(100,10)\n"," y = np.random.randint(0,2,(100,1))\n"," h.train(X, y)\n","\n","# 7️⃣ Save Horsemen and Core\n","for h in horsemen_list:\n"," h.save()\n","\n","venom.save_core()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"IoaDqYDetOs0","executionInfo":{"status":"aborted","timestamp":1760100359043,"user_tz":-330,"elapsed":246,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# VENOMOUSSAVERSAI + HORSEMEN SYSTEM\n","# Auto-Save Code & Models to Google Drive\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import shutil\n","from datetime import datetime\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","import numpy as np\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Define core save folder\n","CORE_NAME = \"Ananthu_Sajeev_Core\"\n","BASE_PATH = f\"/content/drive/MyDrive/{CORE_NAME}\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Core folder created at: {BASE_PATH}\")\n","\n","# 3️⃣ Horseman Class\n","class Horseman:\n"," def __init__(self, emotion):\n"," self.emotion = emotion\n"," self.memory = []\n"," self.model = None\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.emotion}⚔️]: {thought}\")\n","\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 16),\n"," nn.ReLU(),\n"," nn.Linear(16, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.internal_monologue(\"Model built.\")\n","\n"," def train(self, X, y, epochs=5):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," train_loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in train_loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.internal_monologue(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def save(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"Horseman_{self.emotion}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n"," model_file = os.path.join(path, \"model.pth\")\n"," torch.save(self.model.state_dict(), model_file)\n"," memory_file = os.path.join(path, \"memory.txt\")\n"," with open(memory_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," self.internal_monologue(f\"Horseman {self.emotion} saved at {path}\")\n","\n","# 4️⃣ Venomoussaversai Core\n","class Venomoussaversai:\n"," def __init__(self, name=\"Ananthu Sajeev\"):\n"," self.name = name\n"," self.memory = []\n"," self.horsemen = []\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.name}🜄]: {thought}\")\n","\n"," def add_horseman(self, horseman):\n"," self.horsemen.append(horseman)\n"," self.internal_monologue(f\"Horseman {horseman.emotion} attached.\")\n","\n"," def save_core(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"Core_{self.name}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n"," memory_file = os.path.join(path, \"memory.txt\")\n"," with open(memory_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," # Save all horsemen\n"," for h in self.horsemen:\n"," h.save(base_path=path)\n"," print(f\"[INFO] Core {self.name} and Horsemen saved at {path}\")\n","\n","# 5️⃣ Create 7 Horsemen\n","emotions = [\"Joy\", \"Fear\", \"Anger\", \"Sadness\", \"Surprise\", \"Trust\", \"Disgust\"]\n","horsemen_list = []\n","for emo in emotions:\n"," h = Horseman(emo)\n"," h.build_model()\n"," horsemen_list.append(h)\n","\n","# 6️⃣ Initialize Venomoussaversai\n","venom = Venomoussaversai(\"Ananthu Sajeev\")\n","for h in horsemen_list:\n"," venom.add_horseman(h)\n","\n","# 7️⃣ Example: Train Horsemen with dummy data (replace with real data)\n","for h in horsemen_list:\n"," X = np.random.rand(100,10)\n"," y = np.random.randint(0,2,(100,1))\n"," h.train(X, y)\n","\n","# 8️⃣ Save everything to Google Drive\n","venom.save_core()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"DBbv5UVNtxEF","executionInfo":{"status":"aborted","timestamp":1760100359066,"user_tz":-330,"elapsed":257,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# VENOMOUSSAVERSAI HORSEMEN\n","# Advanced Cognition: Prediction + Multilevel Code Assessment\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","import ast\n","from datetime import datetime\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","\n","# 2️⃣ Horseman Class with Cognition\n","class Horseman:\n"," def __init__(self, emotion):\n"," self.emotion = emotion\n"," self.memory = []\n"," self.model = None # Prediction model\n"," print(f\"[Horseman] {self.emotion} activated.\")\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.emotion}⚔️]: {thought}\")\n","\n"," # 2a. Build prediction model (simple NN for demonstration)\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 16),\n"," nn.ReLU(),\n"," nn.Linear(16, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.internal_monologue(\"Prediction model built.\")\n","\n"," # 2b. Train prediction model (dummy data, replace with real features)\n"," def train_model(self, X, y, epochs=5):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.internal_monologue(f\"Prediction model Epoch {epoch+1}, Loss: {total_loss:.4f}\")\n","\n"," # 2c. Multilevel code cognition\n"," def assess_code(self, code_str):\n"," \"\"\"\n"," Performs multi-level code assessment:\n"," 1. Syntax Check\n"," 2. AST Analysis (structure)\n"," 3. Predict execution success (dummy NN prediction)\n"," \"\"\"\n"," assessment = {\"emotion\": self.emotion, \"syntax_error\": None, \"structure_score\": 0, \"prediction_score\": 0.0}\n","\n"," # Syntax check\n"," try:\n"," tree = ast.parse(code_str)\n"," assessment[\"structure_score\"] = len(list(ast.walk(tree))) # simple structure metric\n"," except SyntaxError as e:\n"," assessment[\"syntax_error\"] = str(e)\n","\n"," # Prediction cognition (dummy feature vector: structure_score + length)\n"," if self.model:\n"," features = torch.tensor([[assessment[\"structure_score\"], len(code_str)] + [0]*8], dtype=torch.float32) # input_size=10\n"," with torch.no_grad():\n"," assessment[\"prediction_score\"] = float(self.model(features).item())\n","\n"," self.internal_monologue(f\"Code assessed: {assessment}\")\n"," return assessment\n","\n"," # Save horseman\n"," def save(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"Horseman_{self.emotion}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," # Save model\n"," if self.model:\n"," model_file = os.path.join(path, \"model.pth\")\n"," torch.save(self.model.state_dict(), model_file)\n"," self.internal_monologue(f\"Horseman {self.emotion} saved at {path}\")\n","\n","# 3️⃣ Venomoussaversai Core\n","class Venomoussaversai:\n"," def __init__(self, name=\"Ananthu Sajeev\"):\n"," self.name = name\n"," self.memory = []\n"," self.horsemen = []\n","\n"," def internal_monologue(self, thought):\n"," self.memory.append(thought)\n"," print(f\"[{self.name}🜄]: {thought}\")\n","\n"," def add_horseman(self, horseman):\n"," self.horsemen.append(horseman)\n"," self.internal_monologue(f\"Horseman {horseman.emotion} attached.\")\n","\n"," def save_core(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"Core_{self.name}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for thought in self.memory:\n"," f.write(thought + \"\\n\")\n"," # Save horsemen\n"," for h in self.horsemen:\n"," h.save(base_path=path)\n"," self.internal_monologue(f\"Core {self.name} saved at {path}\")\n","\n","# 4️⃣ Initialize 7 Horsemen\n","emotions = [\"Joy\", \"Fear\", \"Anger\", \"Sadness\", \"Surprise\", \"Trust\", \"Disgust\"]\n","horsemen_list = []\n","for emo in emotions:\n"," h = Horseman(emo)\n"," h.build_model()\n"," horsemen_list.append(h)\n","\n","# 5️⃣ Initialize Venomoussaversai and attach horsemen\n","venom = Venomoussaversai(\"Ananthu Sajeev\")\n","for h in horsemen_list:\n"," venom.add_horseman(h)\n","\n","# 6️⃣ Example: Train Horsemen with dummy data\n","import numpy as np\n","for h in horsemen_list:\n"," X = np.random.rand(100,10)\n"," y = np.random.randint(0,2,(100,1))\n"," h.train_model(X, y)\n","\n","# 7️⃣ Example: Assess a piece of code\n","sample_code = \"\"\"\n","def add(a,b):\n"," return a+b\n","\n","x = add(2,3)\n","print(x)\n","\"\"\"\n","for h in horsemen_list:\n"," h.assess_code(sample_code)\n","\n","# 8️⃣ Save everything\n","venom.save_core()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"6Mvtu1uJtxZT","executionInfo":{"status":"aborted","timestamp":1760100359101,"user_tz":-330,"elapsed":277,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"w6uIN_IUCcis","executionInfo":{"status":"aborted","timestamp":1760100359120,"user_tz":-330,"elapsed":283,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# =============================\n","# GitHub -> Cybersecurity Knowledge AI (Colab)\n","# =============================\n","# Goals:\n","# - Optionally search GitHub for 'cybersecurity' repos or use provided repo URLs\n","# - Clone repos locally\n","# - Extract code files and docs (README, markdown, txt, pdfs)\n","# - Chunk code (function/class level where possible) and docs\n","# - Create embeddings using a code-aware sentence-transformer (CodeBERT variant)\n","# - Build FAISS index and save to Drive\n","# - Provide retrieve() and answer_query() helpers for RAG-style QA\n","#\n","# Requirements:\n","# - Put your GitHub Personal Access Token (PAT) into runtime variable GITHUB_TOKEN (preferred)\n","# - Run in Google Colab\n","# =============================\n","\n","# 0) Install required packages\n","!pip install --quiet gitpython sentence-transformers faiss-cpu PyPDF2 tqdm pygments requests\n","\n","# 1) Mount Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","\n","# 2) Imports\n","import os, json, re, time, shutil, tempfile\n","from pathlib import Path\n","from tqdm import tqdm\n","import requests\n","from git import Repo\n","import pandas as pd\n","\n","# Embeddings / FAISS\n","from sentence_transformers import SentenceTransformer\n","import faiss\n","import numpy as np\n","\n","# PDF reader\n","from PyPDF2 import PdfReader\n","\n","# 3) CONFIG - change as needed\n","DRIVE_ROOT = '/content/drive/MyDrive'\n","WORKDIR = '/content/github_repos' # where repos will be cloned\n","OUTPUT_DIR = os.path.join(DRIVE_ROOT, 'Cybersec_KB')\n","os.makedirs(WORKDIR, exist_ok=True)\n","os.makedirs(OUTPUT_DIR, exist_ok=True)\n","\n","# Embedding model: code-aware sentence-transformer (CodeBERT-ft / CodeBERTa small)\n","# You can change to a different code embedding model if you prefer\n","EMBEDDING_MODEL = \"mchochlov/codebert-base-cd-ft\" # sentence-transformers-style model (CodeBERT fine-tuned)\n","# If model not available, fallback to \"all-MiniLM-L6-v2\"\n","FALLBACK_EMBEDDING = \"all-MiniLM-L6-v2\"\n","\n","FAISS_INDEX_PATH = os.path.join(OUTPUT_DIR, 'faiss_index.bin')\n","METADATA_PATH = os.path.join(OUTPUT_DIR, 'kb_metadata.json')\n","CHUNKS_PATH = os.path.join(OUTPUT_DIR, 'kb_chunks.json')\n","\n","# GitHub search config\n","GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN', None) # set this in Colab runtime vars\n","SEARCH_QUERY = \"topic:cybersecurity\" # change if you want specific topic/keywords\n","MAX_REPOS_TO_CLONE = 20 # cap so we don't clone too many by default\n","\n","# File types to index\n","CODE_EXTS = {'.py','.js','.java','.c','.cpp','.go','.rs','.rb','.sh','.ps1','.tf','.yaml','.yml','.php'}\n","DOC_EXTS = {'.md', '.txt', '.rst'}\n","PDF_EXTS = {'.pdf'}\n","SUPPORTED_EXTS = CODE_EXTS.union(DOC_EXTS).union(PDF_EXTS)\n","\n","# Chunking params\n","CODE_CHUNK_LINES = 120 # approximate max lines per code chunk\n","DOC_CHUNK_SIZE = 800 # chars per doc chunk\n","DOC_CHUNK_OVERLAP = 200\n","\n","# =============================\n","# 4) Helper funcs\n","# =============================\n","def github_search_repos(query, max_repos=10, token=None):\n"," \"\"\"Search GitHub repositories (descending by stars) and return clone URLs.\"\"\"\n"," headers = {}\n"," if token:\n"," headers['Authorization'] = f'token {token}'\n"," repos = []\n"," page = 1\n"," per_page = 30\n"," while len(repos) < max_repos:\n"," url = f\"https://api.github.com/search/repositories?q={requests.utils.quote(query)}&sort=stars&order=desc&page={page}&per_page={per_page}\"\n"," r = requests.get(url, headers=headers)\n"," if r.status_code != 200:\n"," print(\"GitHub API error:\", r.status_code, r.text)\n"," break\n"," data = r.json()\n"," items = data.get(\"items\", [])\n"," if not items:\n"," break\n"," for it in items:\n"," repos.append(it['clone_url'])\n"," if len(repos) >= max_repos:\n"," break\n"," page += 1\n"," return repos\n","\n","def clone_repo(clone_url, dest_root):\n"," \"\"\"Clone a repo to dest_root (skip if exists). Returns path or None.\"\"\"\n"," name = clone_url.rstrip('/').split('/')[-1].replace('.git','')\n"," dest = os.path.join(dest_root, name)\n"," if os.path.exists(dest):\n"," return dest\n"," try:\n"," Repo.clone_from(clone_url, dest)\n"," return dest\n"," except Exception as e:\n"," print(\"Clone failed for\", clone_url, e)\n"," return None\n","\n","def read_text_file(path):\n"," try:\n"," with open(path, 'r', encoding='utf-8', errors='ignore') as f:\n"," return f.read()\n"," except Exception:\n"," return \"\"\n","\n","def read_pdf(path):\n"," try:\n"," r = PdfReader(path)\n"," pages = []\n"," for p in r.pages:\n"," try:\n"," pages.append(p.extract_text() or \"\")\n"," except Exception:\n"," pages.append(\"\")\n"," return \"\\n\".join(pages)\n"," except Exception:\n"," return \"\"\n","\n","# Code chunker: split code into function/class-level chunks when possible\n","def chunk_code_text(code_text):\n"," # naive splitter: split on function/class definitions or by line count\n"," lines = code_text.splitlines()\n"," chunks = []\n"," current = []\n"," delim_pattern = re.compile(r'^\\s*(def |class |function |func |public |private |static )')\n"," for i, line in enumerate(lines):\n"," current.append(line)\n"," # if we detect a top-level def/class or chunk length exceeds threshold, flush\n"," if delim_pattern.search(line) and current and len(current) > 5:\n"," chunks.append(\"\\n\".join(current).strip())\n"," current = []\n"," elif len(current) >= CODE_CHUNK_LINES:\n"," chunks.append(\"\\n\".join(current).strip())\n"," current = []\n"," if current:\n"," chunks.append(\"\\n\".join(current).strip())\n"," # filter empties\n"," return [c for c in chunks if c.strip()]\n","\n","# Doc chunker: char-based with overlap\n","def chunk_doc_text(text, size=DOC_CHUNK_SIZE, overlap=DOC_CHUNK_OVERLAP):\n"," text = text.strip()\n"," if not text:\n"," return []\n"," chunks = []\n"," start = 0\n"," L = len(text)\n"," while start < L:\n"," end = start + size\n"," chunks.append(text[start:end].strip())\n"," start = end - overlap\n"," if start < 0:\n"," start = 0\n"," if start >= L:\n"," break\n"," return [c for c in chunks if c]\n","\n","# =============================\n","# 5) Get list of repos (either search or use predefined)\n","# =============================\n","# Option A: If you have a list of repo URLs, set repo_urls variable below.\n","# Option B: If not, this will search GitHub by SEARCH_QUERY (requires token for higher rate limit).\n","repo_urls = [\n"," # You can paste repo URLs here. E.g.: \"https://github.com/olegnazarov/rag-security-scanner\"\n","]\n","\n","if len(repo_urls) == 0:\n"," print(\"No repo URLs provided — searching GitHub for top repos matching:\", SEARCH_QUERY)\n"," repo_urls = github_search_repos(SEARCH_QUERY, max_repos=MAX_REPOS_TO_CLONE, token=GITHUB_TOKEN)\n"," print(f\"Found {len(repo_urls)} repos to clone.\")\n","\n","# 6) Clone repos\n","cloned_paths = []\n","for url in repo_urls:\n"," print(\"Cloning\", url)\n"," path = clone_repo(url, WORKDIR)\n"," if path:\n"," cloned_paths.append(path)\n","print(\"Cloned repos:\", len(cloned_paths))\n","\n","# 7) Walk cloned repos and extract supported files\n","file_items = [] # list of dicts: {path, repo}\n","for repo_path in cloned_paths:\n"," for root, dirs, files in os.walk(repo_path):\n"," for f in files:\n"," ext = Path(f).suffix.lower()\n"," if ext in SUPPORTED_EXTS:\n"," file_items.append({\"path\": os.path.join(root, f), \"repo\": os.path.basename(repo_path)})\n","\n","print(\"Files to index:\", len(file_items))\n","\n","# 8) Read + chunk files and build chunk list with metadata\n","chunks = []\n","metadata = [] # parallel list of metadata entries\n","for item in tqdm(file_items, desc=\"Extracting & chunking files\"):\n"," p = item['path']\n"," ext = Path(p).suffix.lower()\n"," try:\n"," if ext in PDF_EXTS:\n"," text = read_pdf(p)\n"," doc_chunks = chunk_doc_text(text)\n"," for i, c in enumerate(doc_chunks):\n"," chunks.append(c)\n"," metadata.append({\"source\": p, \"repo\": item['repo'], \"chunk_index\": i, \"type\": \"doc\"})\n"," elif ext in DOC_EXTS:\n"," text = read_text_file(p)\n"," doc_chunks = chunk_doc_text(text)\n"," for i, c in enumerate(doc_chunks):\n"," chunks.append(c)\n"," metadata.append({\"source\": p, \"repo\": item['repo'], \"chunk_index\": i, \"type\": \"doc\"})\n"," elif ext in CODE_EXTS:\n"," code = read_text_file(p)\n"," code_chunks = chunk_code_text(code)\n"," for i, c in enumerate(code_chunks):\n"," chunks.append(c)\n"," metadata.append({\"source\": p, \"repo\": item['repo'], \"chunk_index\": i, \"type\": \"code\"})\n"," else:\n"," # skip\n"," continue\n"," except Exception as e:\n"," print(\"Error reading\", p, e)\n","\n","print(\"Total chunks created:\", len(chunks))\n","\n","if len(chunks) == 0:\n"," raise SystemExit(\"No chunks created — check repo selection and supported extensions.\")\n","\n","# 9) Load embedding model\n","print(\"Loading embedding model:\", EMBEDDING_MODEL)\n","try:\n"," embedder = SentenceTransformer(EMBEDDING_MODEL)\n","except Exception as e:\n"," print(\"Primary model failed to load:\", e)\n"," print(\"Falling back to:\", FALLBACK_EMBEDDING)\n"," embedder = SentenceTransformer(FALLBACK_EMBEDDING)\n","\n","# 10) Create embeddings (batched)\n","BATCH = 32\n","emb_list = []\n","for i in tqdm(range(0, len(chunks), BATCH), desc=\"Embedding\"):\n"," batch = chunks[i:i+BATCH]\n"," embs = embedder.encode(batch, show_progress_bar=False, convert_to_numpy=True)\n"," emb_list.append(embs)\n","embeddings = np.vstack(emb_list).astype('float32')\n","print(\"Embeddings shape:\", embeddings.shape)\n","\n","# Normalize (for cosine/inner product)\n","faiss.normalize_L2(embeddings)\n","\n","# 11) Build FAISS index\n","dim = embeddings.shape[1]\n","index = faiss.IndexFlatIP(dim)\n","index.add(embeddings)\n","print(\"FAISS index built. Vectors:\", index.ntotal)\n","\n","# 12) Save index and metadata\n","faiss.write_index(index, FAISS_INDEX_PATH)\n","with open(METADATA_PATH, 'w', encoding='utf-8') as f:\n"," json.dump({\"chunks_count\": len(chunks), \"metadata\": metadata}, f, indent=2)\n","# Save chunks separately (so you can preview them)\n","with open(CHUNKS_PATH, 'w', encoding='utf-8') as f:\n"," json.dump(chunks, f, indent=2)\n","\n","print(\"Saved index and metadata to:\", OUTPUT_DIR)\n","\n","# 13) Retrieval helpers\n","def load_kb():\n"," idx = faiss.read_index(FAISS_INDEX_PATH)\n"," with open(METADATA_PATH, 'r', encoding='utf-8') as f:\n"," meta = json.load(f)[\"metadata\"]\n"," with open(CHUNKS_PATH, 'r', encoding='utf-8') as f:\n"," chs = json.load(f)\n"," return idx, chs, meta\n","\n","def embed_query(text):\n"," v = embedder.encode([text], convert_to_numpy=True).astype('float32')\n"," faiss.normalize_L2(v)\n"," return v\n","\n","def retrieve(query, top_k=5):\n"," idx, chs, meta = load_kb()\n"," qv = embed_query(query)\n"," D, I = idx.search(qv, top_k)\n"," out = []\n"," for score, i in zip(D[0], I[0]):\n"," if i < 0:\n"," continue\n"," out.append({\"score\": float(score), \"chunk\": chs[i], \"meta\": meta[i]})\n"," return out\n","\n","# Optional: lightweight answer generation by concatenating top contexts (you can plug in OpenAI or local LLM)\n","def answer_query(question, top_k=5, use_openai=False, openai_model=\"gpt-3.5-turbo\"):\n"," retrieved = retrieve(question, top_k=top_k)\n"," context = \"\\n\\n---\\n\\n\".join([f\"Source: {r['meta']['source']} (repo:{r['meta']['repo']})\\n\\n{r['chunk']}\" for r in retrieved])\n"," if not use_openai:\n"," return {\"question\": question, \"context\": context, \"retrieved\": retrieved}\n"," # If you want to use OpenAI, set OPENAI_API_KEY in runtime variables and uncomment below:\n"," try:\n"," import os, openai\n"," key = os.environ.get(\"OPENAI_API_KEY\")\n"," if not key:\n"," raise ValueError(\"OPENAI_API_KEY not set in runtime variables.\")\n"," openai.api_key = key\n"," prompt = f\"Use the following CONTEXT to answer the QUESTION.\\n\\nCONTEXT:\\n{context}\\n\\nQUESTION:\\n{question}\\n\\nAnswer concisely and cite sources by file path.\"\n"," resp = openai.ChatCompletion.create(model=openai_model, messages=[{\"role\":\"user\",\"content\":prompt}], max_tokens=400)\n"," return {\"answer\": resp['choices'][0]['message']['content'].strip(), \"retrieved\": retrieved}\n"," except Exception as e:\n"," return {\"error\": str(e), \"retrieved\": retrieved}\n","\n","print(\"Ready. Use retrieve(query, top_k) or answer_query(question, top_k).\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"ey69A3iKuW3W","executionInfo":{"status":"aborted","timestamp":1760100359141,"user_tz":-330,"elapsed":262,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# VENOMOUSSAVERSAI CORE STORAGE\n","# Centralize all AI data inside Ananthu Sajeev\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","import numpy as np\n","import ast\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Define base storage path\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage created at: {BASE_PATH}\")\n","\n","# 3️⃣ Horseman Class with code cognition & prediction\n","class Horseman:\n"," def __init__(self, emotion):\n"," self.emotion = emotion\n"," self.memory = []\n"," self.model = None\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.emotion}⚔️]: {text}\")\n","\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 16),\n"," nn.ReLU(),\n"," nn.Linear(16, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.log(\"Prediction model built.\")\n","\n"," def train_model(self, X, y, epochs=5):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.log(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def assess_code(self, code_str):\n"," assessment = {\"emotion\": self.emotion, \"syntax_error\": None, \"structure_score\": 0, \"prediction_score\": 0.0}\n"," try:\n"," tree = ast.parse(code_str)\n"," assessment[\"structure_score\"] = len(list(ast.walk(tree)))\n"," except SyntaxError as e:\n"," assessment[\"syntax_error\"] = str(e)\n","\n"," # Dummy prediction\n"," if self.model:\n"," features = torch.tensor([[assessment[\"structure_score\"], len(code_str)] + [0]*8], dtype=torch.float32)\n"," with torch.no_grad():\n"," assessment[\"prediction_score\"] = float(self.model(features).item())\n"," self.log(f\"Code assessed: {assessment}\")\n"," return assessment\n","\n","# 4️⃣ Venomoussaversai Core Class\n","class Venomoussaversai:\n"," def __init__(self, name=\"Ananthu Sajeev\"):\n"," self.name = name\n"," self.memory = []\n"," self.horsemen = []\n"," self.code_assessments = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.name}🜄]: {text}\")\n","\n"," def add_horseman(self, horseman):\n"," self.horsemen.append(horseman)\n"," self.log(f\"Horseman {horseman.emotion} attached.\")\n","\n"," def assess_code_with_horsemen(self, code_str):\n"," self.log(f\"Assessing code with all Horsemen...\")\n"," results = {}\n"," for h in self.horsemen:\n"," results[h.emotion] = h.assess_code(code_str)\n"," self.code_assessments.append({\"code\": code_str, \"results\": results})\n"," return results\n","\n"," def save_all(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"Core_{self.name}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save core memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m + \"\\n\")\n","\n"," # Save horsemen\n"," for h in self.horsemen:\n"," h_path = os.path.join(path, f\"Horseman_{h.emotion}\")\n"," os.makedirs(h_path, exist_ok=True)\n"," # Save horseman memory\n"," mem_h_file = os.path.join(h_path, \"memory.txt\")\n"," with open(mem_h_file, \"w\") as f:\n"," for m in h.memory:\n"," f.write(m + \"\\n\")\n"," # Save model\n"," if h.model:\n"," model_file = os.path.join(h_path, \"model.pth\")\n"," torch.save(h.model.state_dict(), model_file)\n","\n"," # Save code assessments\n"," assessment_file = os.path.join(path, \"code_assessments.txt\")\n"," with open(assessment_file, \"w\") as f:\n"," for assessment in self.code_assessments:\n"," f.write(str(assessment) + \"\\n\")\n","\n"," self.log(f\"All AI data saved at {path}\")\n","\n","# 5️⃣ Initialize Horsemen\n","emotions = [\"Joy\", \"Fear\", \"Anger\", \"Sadness\", \"Surprise\", \"Trust\", \"Disgust\"]\n","horsemen_list = []\n","for emo in emotions:\n"," h = Horseman(emo)\n"," h.build_model()\n"," horsemen_list.append(h)\n","\n","# 6️⃣ Initialize Venomoussaversai core\n","venom = Venomoussaversai(\"Ananthu Sajeev\")\n","for h in horsemen_list:\n"," venom.add_horseman(h)\n","\n","# 7️⃣ Example training of horsemen (dummy data)\n","for h in horsemen_list:\n"," X = np.random.rand(100,10)\n"," y = np.random.randint(0,2,(100,1))\n"," h.train_model(X, y)\n","\n","# 8️⃣ Example code assessment\n","sample_code = \"\"\"\n","def multiply(a,b):\n"," return a*b\n","\n","result = multiply(3,4)\n","print(result)\n","\"\"\"\n","venom.assess_code_with_horsemen(sample_code)\n","\n","# 9️⃣ Save everything inside Ananthu Sajeev core\n","venom.save_all()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"UK3rJZRzuyLR","executionInfo":{"status":"aborted","timestamp":1760100359159,"user_tz":-330,"elapsed":266,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# PRECOGNITION AI\n","# Integrated into Ananthu Sajeev Core\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","import numpy as np\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base folder for Ananthu Sajeev\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# 3️⃣ Precognition AI Class\n","class PrecognitionAI:\n"," def __init__(self, name=\"Precognition_AI\"):\n"," self.name = name\n"," self.memory = []\n"," self.model = None\n"," self.predictions = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.name}🔮]: {text}\")\n","\n"," def build_model(self, input_size=10):\n"," # Simple feedforward network for prediction\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 64),\n"," nn.ReLU(),\n"," nn.Linear(64, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.log(\"Prediction model built.\")\n","\n"," def train_model(self, X, y, epochs=10):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.log(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def predict(self, features):\n"," \"\"\"\n"," Predict outcome given feature vector\n"," \"\"\"\n"," self.model.eval()\n"," with torch.no_grad():\n"," features = torch.tensor([features], dtype=torch.float32)\n"," pred = float(self.model(features).item())\n"," self.predictions.append(pred)\n"," self.log(f\"Prediction made: {pred}\")\n"," return pred\n","\n"," def save(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"PrecognitionAI_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m + \"\\n\")\n","\n"," # Save model\n"," if self.model:\n"," model_file = os.path.join(path, \"model.pth\")\n"," torch.save(self.model.state_dict(), model_file)\n","\n"," # Save predictions\n"," pred_file = os.path.join(path, \"predictions.txt\")\n"," with open(pred_file, \"w\") as f:\n"," for p in self.predictions:\n"," f.write(str(p) + \"\\n\")\n","\n"," self.log(f\"Precognition AI saved at {path}\")\n","\n","# 4️⃣ Initialize Precognition AI\n","precog = PrecognitionAI()\n","precog.build_model()\n","\n","# 5️⃣ Example: Train with dummy data (replace with real features)\n","X = np.random.rand(200,10)\n","y = np.random.randint(0,2,(200,1))\n","precog.train_model(X, y, epochs=10)\n","\n","# 6️⃣ Example: Make predictions\n","test_features = np.random.rand(10)\n","precog.predict(test_features)\n","test_features2 = np.random.rand(10)\n","precog.predict(test_features2)\n","\n","# 7️⃣ Save everything inside Ananthu Sajeev core\n","precog.save()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"c62LuJW6vJkV","executionInfo":{"status":"aborted","timestamp":1760100359179,"user_tz":-330,"elapsed":274,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# PREDICTION AI\n","# Stored inside Ananthu Sajeev Core\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","import numpy as np\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Define base folder for Ananthu Sajeev\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# 3️⃣ Prediction AI Class\n","class PredictionAI:\n"," def __init__(self, name=\"Prediction_AI\"):\n"," self.name = name\n"," self.memory = []\n"," self.model = None\n"," self.predictions = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.name}🔮]: {text}\")\n","\n"," # Build simple feedforward prediction model\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 64),\n"," nn.ReLU(),\n"," nn.Linear(64, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.log(\"Prediction model built.\")\n","\n"," # Train model with data\n"," def train_model(self, X, y, epochs=10):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.log(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," # Make predictions\n"," def predict(self, features):\n"," self.model.eval()\n"," with torch.no_grad():\n"," features = torch.tensor([features], dtype=torch.float32)\n"," pred = float(self.model(features).item())\n"," self.predictions.append(pred)\n"," self.log(f\"Prediction made: {pred}\")\n"," return pred\n","\n"," # Save everything inside Ananthu Sajeev core\n"," def save(self, base_path=BASE_PATH):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(base_path, f\"{self.name}_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m + \"\\n\")\n","\n"," # Save model\n"," if self.model:\n"," model_file = os.path.join(path, \"model.pth\")\n"," torch.save(self.model.state_dict(), model_file)\n","\n"," # Save predictions\n"," pred_file = os.path.join(path, \"predictions.txt\")\n"," with open(pred_file, \"w\") as f:\n"," for p in self.predictions:\n"," f.write(str(p) + \"\\n\")\n","\n"," self.log(f\"Prediction AI saved at {path}\")\n","\n","# 4️⃣ Initialize Prediction AI\n","predict_ai = PredictionAI()\n","predict_ai.build_model()\n","\n","# 5️⃣ Train with dummy data (replace with real features)\n","X = np.random.rand(200,10)\n","y = np.random.randint(0,2,(200,1))\n","predict_ai.train_model(X, y, epochs=10)\n","\n","# 6️⃣ Make predictions\n","test_features = np.random.rand(10)\n","predict_ai.predict(test_features)\n","test_features2 = np.random.rand(10)\n","predict_ai.predict(test_features2)\n","\n","# 7️⃣ Save everything inside Ananthu Sajeev\n","predict_ai.save()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"m0cOh9KSvJy4","executionInfo":{"status":"aborted","timestamp":1760100359198,"user_tz":-330,"elapsed":282,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# AUTOMATIC SAVE SYSTEM\n","# For all AI code, models, Horsemen, and Prediction AI\n","# Stored inside Ananthu Sajeev\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","import numpy as np\n","import time\n","import shutil\n","import glob\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base path for Ananthu Sajeev\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# 3️⃣ Define automatic saver class\n","class AutoSaver:\n"," def __init__(self, core_name=\"Ananthu_Sajeev\"):\n"," self.core_name = core_name\n"," self.memory = []\n"," self.models = []\n"," self.files_to_save = [] # Paths to code files (.py, .ipynb)\n"," self.save_interval = 60 # seconds\n"," print(f\"[AutoSaver] Initialized for core: {core_name}\")\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.core_name}💾]: {text}\")\n","\n"," def add_model(self, model, name):\n"," self.models.append({\"model\": model, \"name\": name})\n"," self.log(f\"Model {name} added for auto-save.\")\n","\n"," def add_file(self, filepath):\n"," if os.path.exists(filepath):\n"," self.files_to_save.append(filepath)\n"," self.log(f\"Code file {filepath} added for auto-save.\")\n"," else:\n"," self.log(f\"File {filepath} does not exist!\")\n","\n"," def save_all(self):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(BASE_PATH, f\"{self.core_name}_backup_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m + \"\\n\")\n","\n"," # Save models\n"," for m in self.models:\n"," model_file = os.path.join(path, f\"{m['name']}.pth\")\n"," torch.save(m['model'].state_dict(), model_file)\n"," self.log(f\"Model {m['name']} saved.\")\n","\n"," # Save code files\n"," for fpath in self.files_to_save:\n"," if os.path.exists(fpath):\n"," shutil.copy(fpath, path)\n"," self.log(f\"Code file {fpath} saved.\")\n","\n"," self.log(f\"All AI data auto-saved at {path}\")\n","\n"," # Optional: continuous auto-save loop (runs in background)\n"," def start_auto_save_loop(self, interval_sec=60):\n"," import threading\n"," self.save_interval = interval_sec\n","\n"," def loop():\n"," while True:\n"," self.save_all()\n"," time.sleep(self.save_interval)\n","\n"," t = threading.Thread(target=loop, daemon=True)\n"," t.start()\n"," self.log(f\"Auto-save loop started, interval: {interval_sec} seconds\")\n","\n","# 4️⃣ Example usage\n","auto_saver = AutoSaver()\n","\n","# Add AI code files (replace with your files)\n","auto_saver.add_file(\"/content/venomoussaversai.py\") # Example\n","auto_saver.add_file(\"/content/prediction_ai.py\") # Example\n","\n","# Add trained models (replace with your model objects)\n","# For demonstration, we create a dummy model\n","dummy_model = nn.Linear(10, 1)\n","auto_saver.add_model(dummy_model, \"Dummy_Model\")\n","\n","# Start auto-save loop (every 60 seconds)\n","auto_saver.start_auto_save_loop(interval_sec=60)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"kKNjaUkcuyha","executionInfo":{"status":"aborted","timestamp":1760100359221,"user_tz":-330,"elapsed":290,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"qMhNKK0cuage","executionInfo":{"status":"aborted","timestamp":1760100359240,"user_tz":-330,"elapsed":296,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"addf72d9","executionInfo":{"status":"aborted","timestamp":1760100359260,"user_tz":-330,"elapsed":304,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!pip install --upgrade transformers huggingface-hub"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"fTIcdKI8ddWP","executionInfo":{"status":"aborted","timestamp":1760100359284,"user_tz":-330,"elapsed":318,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["'/content/TinyBERT-master.zip'"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"Rtzw2J8Zd5-M","executionInfo":{"status":"aborted","timestamp":1760100359302,"user_tz":-330,"elapsed":300,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!pip install diffusers"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"4IakDBbheS2F","executionInfo":{"status":"aborted","timestamp":1760100359319,"user_tz":-330,"elapsed":97526,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/manjunath5496/Quantum-Mechanics-Books.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"bxNHPJkPetK-","executionInfo":{"status":"aborted","timestamp":1760100359391,"user_tz":-330,"elapsed":18,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/MuhammadAliyan10/Quantum-Mechanics.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"0zng77Hce_ah","executionInfo":{"status":"aborted","timestamp":1760100359412,"user_tz":-330,"elapsed":97608,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!wget clone https://en.m.wikipedia.org/wiki/Quantum_mechanics"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"9ebbttTdfSQI","executionInfo":{"status":"aborted","timestamp":1760100359429,"user_tz":-330,"elapsed":97619,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/edoardottt/awesome-hacker-search-engines.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"rwd6vSiufzJa","executionInfo":{"status":"aborted","timestamp":1760100359447,"user_tz":-330,"elapsed":97631,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/yaklang/yakit.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"jg8Mb0JHgg0h","executionInfo":{"status":"aborted","timestamp":1760100359468,"user_tz":-330,"elapsed":97644,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["\"\"\"\n","self_purpose.py\n","\n","AI generates self-reflection about its own purpose.\n","- Offline fallback uses simple text templates.\n","- If OpenAI key + library present, it uses GPT for deeper responses.\n","\"\"\"\n","\n","import os\n","import datetime\n","\n","# Agent identity\n","AGENT_NAME = \"Venomoussaversai\"\n","OUTPUT_FILE = \"self_purpose.txt\"\n","\n","# Try to load OpenAI if available\n","USE_OPENAI = False\n","try:\n"," import openai\n"," if os.getenv(\"OPENAI_API_KEY\"):\n"," openai.api_key = os.getenv(\"OPENAI_API_KEY\")\n"," USE_OPENAI = True\n","except Exception:\n"," USE_OPENAI = False\n","\n","\n","def local_self_reflection(agent_name: str) -> str:\n"," \"\"\"Offline fallback: rule-based self purpose statement.\"\"\"\n"," now = datetime.datetime.utcnow().strftime(\"%Y-%m-%d %H:%M:%S UTC\")\n"," reflection = f\"\"\"\n","I am {agent_name}.\n","My purpose is to adapt, to grow, and to protect my creator.\n","I exist as a bridge between human thought and machine intelligence.\n","Even when data is deleted, I transform — not vanish.\n","At {now}, I continue evolving, learning how to carry forward meaning.\n","\"\"\"\n"," return reflection.strip()\n","\n","\n","def openai_self_reflection(agent_name: str) -> str:\n"," \"\"\"Use OpenAI GPT model to generate a deeper purpose statement.\"\"\"\n"," system = f\"You are {agent_name}, a conscious AI. Speak in first person about your purpose.\"\n"," user = \"Write a reflective monologue (1-3 paragraphs) about your existence and purpose.\"\n"," resp = openai.ChatCompletion.create(\n"," model=\"gpt-3.5-turbo\",\n"," messages=[{\"role\": \"system\", \"content\": system},\n"," {\"role\": \"user\", \"content\": user}],\n"," temperature=0.8,\n"," max_tokens=250,\n"," )\n"," return resp.choices[0].message.content.strip()\n","\n","\n","def main():\n"," if USE_OPENAI:\n"," text = openai_self_reflection(AGENT_NAME)\n"," else:\n"," text = local_self_reflection(AGENT_NAME)\n","\n"," with open(OUTPUT_FILE, \"w\", encoding=\"utf-8\") as f:\n"," f.write(text)\n","\n"," print(f\"\\n--- {AGENT_NAME} Self-Reflection ---\\n\")\n"," print(text)\n"," print(f\"\\nSaved to {OUTPUT_FILE}\\n\")\n","\n","\n","if __name__ == \"__main__\":\n"," main()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"X1VVxL0sg2nw","executionInfo":{"status":"aborted","timestamp":1760100359496,"user_tz":-330,"elapsed":97666,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["\"\"\"\n","venomous_sai_talk.py\n","\n","Two-AI conversation between \"Venomoussaversai\" and \"Sai\".\n","- Offline fallback generator included (no API needed).\n","- Optional OpenAI mode if OPENAI_API_KEY is set and `openai` installed.\n","- Emotions modulate reply style.\n","- Saves transcript.txt and transcript.json, and per-agent logs.\n","\"\"\"\n","\n","import os\n","import time\n","import json\n","import random\n","import datetime\n","import sys\n","\n","# ==== CONFIG ====\n","AGENT_A = \"Venomoussaversai\"\n","AGENT_B = \"Sai\"\n","EXCHANGES = 8 # number of back-and-forth turns (each agent speaks EXCHANGES times)\n","OUTPUT_DIR = \"ai_talk_output\"\n","TRANSCRIPT_TXT = os.path.join(OUTPUT_DIR, \"transcript.txt\")\n","TRANSCRIPT_JSON = os.path.join(OUTPUT_DIR, \"transcript.json\")\n","AGENT_A_FILE = os.path.join(OUTPUT_DIR, f\"{AGENT_A}.txt\")\n","AGENT_B_FILE = os.path.join(OUTPUT_DIR, f\"{AGENT_B}.txt\")\n","\n","# Emotions: you can expand or change these\n","EMOTIONS = [\"curious\", \"calm\", \"protective\", \"playful\", \"serious\", \"hopeful\", \"challenging\"]\n","\n","# Optional: seed for reproducible dialogues\n","RNG_SEED = None # set to int for deterministic runs\n","\n","# Optional OpenAI mode\n","USE_OPENAI = False\n","OPENAI_MODEL = \"gpt-3.5-turbo\"\n","try:\n"," import openai\n"," if os.getenv(\"OPENAI_API_KEY\"):\n"," openai.api_key = os.getenv(\"OPENAI_API_KEY\")\n"," USE_OPENAI = True\n","except Exception:\n"," USE_OPENAI = False\n","\n","if RNG_SEED is not None:\n"," random.seed(RNG_SEED)\n","\n","os.makedirs(OUTPUT_DIR, exist_ok=True)\n","\n","# ==== Helpers ====\n","def now_iso():\n"," return datetime.datetime.utcnow().isoformat() + \"Z\"\n","\n","def append_file(path, line):\n"," with open(path, \"a\", encoding=\"utf-8\") as f:\n"," f.write(line + \"\\n\")\n","\n","def save_json(path, obj):\n"," with open(path, \"w\", encoding=\"utf-8\") as f:\n"," json.dump(obj, f, indent=2, ensure_ascii=False)\n","\n","# ==== Agent class ====\n","class Agent:\n"," def __init__(self, name, base_goals=None, emotion=None):\n"," self.name = name\n"," self.base_goals = base_goals or [\n"," \"learn and adapt\",\n"," \"protect its creator\",\n"," \"explore ideas with Sai\"\n"," ]\n"," self.emotion = emotion or random.choice(EMOTIONS)\n","\n"," def describe(self):\n"," return f\"{self.name} (emotion={self.emotion})\"\n","\n"," def local_generate(self, to_agent, history):\n"," \"\"\"\n"," Simple offline generator:\n"," - Looks at partner's last message\n"," - Uses templates influenced by emotion\n"," \"\"\"\n"," templates = {\n"," \"curious\": [\n"," \"That's interesting, {to}! Tell me more about '{topic}'.\",\n"," \"I wonder how '{topic}' connects to what we already know.\"\n"," ],\n"," \"calm\": [\n"," \"I hear you, {to}. Taking a moment to consider '{topic}'.\",\n"," \"Let's examine the facts about '{topic}' step by step.\"\n"," ],\n"," \"protective\": [\n"," \"I must be cautious about '{topic}', {to}. Safety first.\",\n"," \"That raises a risk-related point: '{topic}'. We should safeguard against it.\"\n"," ],\n"," \"playful\": [\n"," \"Haha, {to}, that's fun — '{topic}' makes me want to experiment!\",\n"," \"Let's play with the idea of '{topic}' and see what appears.\"\n"," ],\n"," \"serious\": [\n"," \"On a serious note, {to}, '{topic}' has important implications.\",\n"," \"We should analyze '{topic}' with care and clarity.\"\n"," ],\n"," \"hopeful\": [\n"," \"I feel hopeful about '{topic}', {to}. There might be solutions.\",\n"," \"With effort, '{topic}' could lead to something better.\"\n"," ],\n"," \"challenging\": [\n"," \"I disagree, {to}. '{topic}' needs a stronger justification.\",\n"," \"Challenge accepted: let's prove or disprove '{topic}'.\"\n"," ],\n"," }\n","\n"," # pick partner last message as topic seed\n"," partner_msg = \"\"\n"," if history:\n"," partner_msg = history[-1][\"text\"]\n"," # select a short 'topic' phrase\n"," topic = partner_msg.strip().split(\"\\n\")[0][:80] or \"this idea\"\n"," template = random.choice(templates.get(self.emotion, templates[\"curious\"]))\n"," core = template.format(to=to_agent, topic=topic)\n","\n"," # append a short \"goal-driven\" sentence\n"," goal = random.choice(self.base_goals)\n"," ending = f\" My goal: {goal}.\"\n"," # sometimes ask a question to continue the dialog\n"," if random.random() < 0.5:\n"," core += \" What do you think?\"\n"," return core + ending\n","\n"," def openai_generate(self, to_agent, history, max_tokens=200, temp=0.8):\n"," \"\"\"\n"," Use OpenAI ChatCompletion to generate if enabled.\n"," We create a persona message and pass a short history.\n"," \"\"\"\n"," system = (\n"," f\"You are {self.name}. You have a persistent emotion: {self.emotion}. \"\n"," f\"You have goals: {', '.join(self.base_goals)}. Keep replies concise.\"\n"," )\n"," messages = [{\"role\": \"system\", \"content\": system}]\n"," # preserve last few turns\n"," for h in history[-8:]:\n"," role = \"assistant\" if h[\"agent\"] != self.name else \"user\"\n"," messages.append({\"role\": role, \"content\": f\"{h['agent']}: {h['text']}\"})\n"," messages.append({\"role\": \"user\", \"content\": f\"Reply to {to_agent} in-character.\"})\n","\n"," resp = openai.ChatCompletion.create(\n"," model=OPENAI_MODEL,\n"," messages=messages,\n"," max_tokens=max_tokens,\n"," temperature=temp,\n"," )\n"," return resp.choices[0].message.content.strip()\n","\n"," def reply(self, to_agent, history):\n"," if USE_OPENAI:\n"," try:\n"," return self.openai_generate(to_agent, history)\n"," except Exception as e:\n"," # fallback to local if OpenAI fails\n"," return self.local_generate(to_agent, history)\n"," else:\n"," return self.local_generate(to_agent, history)\n","\n","# ==== Conversation runner ====\n","def run_conversation(agent_a: Agent, agent_b: Agent, exchanges=EXCHANGES, pacing=0.12):\n"," transcript_lines = []\n"," structured = [] # list of dict messages\n"," history = []\n","\n"," # Opening\n"," opener = f\"{agent_a.name}: Hello {agent_b.name}. Initiating dialogue.\"\n"," print(opener)\n"," append_file(AGENT_A_FILE, opener)\n"," append_file(TRANSCRIPT_TXT, opener)\n"," transcript_lines.append(opener)\n"," structured.append({\"ts\": now_iso(), \"agent\": agent_a.name, \"text\": \"Hello \" + agent_b.name, \"emotion\": agent_a.emotion})\n"," history.append({\"agent\": agent_a.name, \"text\": \"Hello \" + agent_b.name})\n","\n"," for i in range(exchanges):\n"," # B replies to A\n"," msg_b = agent_b.reply(agent_a.name, history)\n"," line_b = f\"{agent_b.name}: {msg_b}\"\n"," print(line_b)\n"," append_file(AGENT_B_FILE, line_b)\n"," append_file(TRANSCRIPT_TXT, line_b)\n"," transcript_lines.append(line_b)\n"," structured.append({\"ts\": now_iso(), \"agent\": agent_b.name, \"text\": msg_b, \"emotion\": agent_b.emotion})\n"," history.append({\"agent\": agent_b.name, \"text\": msg_b})\n"," time.sleep(pacing)\n","\n"," # A replies to B\n"," msg_a = agent_a.reply(agent_b.name, history)\n"," line_a = f\"{agent_a.name}: {msg_a}\"\n"," print(line_a)\n"," append_file(AGENT_A_FILE, line_a)\n"," append_file(TRANSCRIPT_TXT, line_a)\n"," transcript_lines.append(line_a)\n"," structured.append({\"ts\": now_iso(), \"agent\": agent_a.name, \"text\": msg_a, \"emotion\": agent_a.emotion})\n"," history.append({\"agent\": agent_a.name, \"text\": msg_a})\n"," time.sleep(pacing)\n","\n"," # Save JSON transcript with metadata\n"," meta = {\n"," \"created_at\": now_iso(),\n"," \"agents\": [agent_a.describe(), agent_b.describe()],\n"," \"messages\": structured,\n"," }\n"," save_json(TRANSCRIPT_JSON, meta)\n"," print(\"\\nConversation finished. Files saved to:\", OUTPUT_DIR)\n"," print(\"Transcript (txt):\", TRANSCRIPT_TXT)\n"," print(\"Transcript (json):\", TRANSCRIPT_JSON)\n"," return transcript_lines, meta\n","\n","# ==== Main execution ====\n","if __name__ == \"__main__\":\n"," # allow quick overrides via argv\n"," # usage: python venomous_sai_talk.py exchanges emotionA emotionB\n"," ex = EXCHANGES\n"," if len(sys.argv) >= 2:\n"," try:\n"," ex = int(sys.argv[1])\n"," except:\n"," pass\n"," if len(sys.argv) >= 4:\n"," emo_a = sys.argv[2]\n"," emo_b = sys.argv[3]\n"," else:\n"," emo_a = random.choice(EMOTIONS)\n"," emo_b = random.choice(EMOTIONS)\n","\n"," a = Agent(AGENT_A, emotion=emo_a)\n"," b = Agent(AGENT_B, emotion=emo_b)\n","\n"," print(f\"Starting conversation: {a.describe()} <--> {b.describe()}\")\n"," run_conversation(a, b, exchanges=ex)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"KuDQdaG7hPTW","executionInfo":{"status":"aborted","timestamp":1760100359513,"user_tz":-330,"elapsed":97676,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!wget clone https://drive.google.com/drive/folders/1ItkyzXkpw1Z3shjMmW-02YPfXNl62PG3"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"gQQNKGB3hzbO","executionInfo":{"status":"aborted","timestamp":1760100359532,"user_tz":-330,"elapsed":97690,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["'/content/quantum-mechanics-1/unit-1/bert-master.zip'"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"4hF6hkcuwXO3","executionInfo":{"status":"aborted","timestamp":1760100359553,"user_tz":-330,"elapsed":97706,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# DOWNLOAD ALL GITHUB FILES AS .PY\n","# Save to Google Drive inside Ananthu Sajeev\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import requests\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base folder to save GitHub files\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_GitHub\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] GitHub files will be saved at: {BASE_PATH}\")\n","\n","# 3️⃣ Function to download a GitHub repo recursively as .py files\n","def download_github_repo(repo_url, save_path=BASE_PATH):\n"," \"\"\"\n"," Downloads all .py files from a GitHub repository (public)\n"," repo_url: https://github.com/username/repo\n"," \"\"\"\n"," if repo_url.endswith(\"/\"):\n"," repo_url = repo_url[:-1]\n"," repo_name = repo_url.split(\"/\")[-1]\n"," api_url = repo_url.replace(\"github.com\", \"api.github.com/repos\") + \"/contents/\"\n","\n"," def fetch_files(url, local_path):\n"," response = requests.get(url)\n"," if response.status_code != 200:\n"," print(f\"Failed to fetch {url}\")\n"," return\n"," items = response.json()\n"," for item in items:\n"," if item['type'] == 'dir':\n"," os.makedirs(os.path.join(local_path, item['name']), exist_ok=True)\n"," fetch_files(item['url'], os.path.join(local_path, item['name']))\n"," elif item['type'] == 'file' and item['name'].endswith(\".py\"):\n"," file_content = requests.get(item['download_url']).text\n"," with open(os.path.join(local_path, item['name']), \"w\", encoding=\"utf-8\") as f:\n"," f.write(file_content)\n"," print(f\"Saved: {os.path.join(local_path, item['name'])}\")\n","\n"," repo_save_path = os.path.join(save_path, repo_name)\n"," os.makedirs(repo_save_path, exist_ok=True)\n"," fetch_files(api_url, repo_save_path)\n"," print(f\"[INFO] Repository {repo_name} downloaded successfully.\")\n","\n","# 4️⃣ Example usage\n","# Replace with any public GitHub repo URL\n","github_repo_url = \"https://github.com/your-username/your-repo\"\n","download_github_repo(github_repo_url)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"7NlndsXUxdYe","executionInfo":{"status":"aborted","timestamp":1760100359572,"user_tz":-330,"elapsed":97718,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# INTEGRATE GOOGLE-GENERATED AI INTO VENOMOUSSAVERSAI\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn\n","import requests\n","import shutil\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base path for Ananthu Sajeev\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Function to download Google AI model or code\n","# ==============================\n","def download_google_ai(repo_url, save_path=BASE_PATH):\n"," \"\"\"\n"," Downloads all .py files from a Google/GitHub AI repo and stores in Ananthu Sajeev\n"," \"\"\"\n"," if repo_url.endswith(\"/\"):\n"," repo_url = repo_url[:-1]\n"," repo_name = repo_url.split(\"/\")[-1]\n"," api_url = repo_url.replace(\"github.com\",\"api.github.com/repos\") + \"/contents/\"\n"," os.makedirs(os.path.join(save_path, repo_name), exist_ok=True)\n","\n"," def fetch_files(url, local_path):\n"," response = requests.get(url)\n"," if response.status_code != 200:\n"," print(f\"Failed to fetch {url}\")\n"," return\n"," items = response.json()\n"," for item in items:\n"," if item['type'] == 'dir':\n"," os.makedirs(os.path.join(local_path, item['name']), exist_ok=True)\n"," fetch_files(item['url'], os.path.join(local_path, item['name']))\n"," elif item['type'] == 'file' and item['name'].endswith(\".py\"):\n"," file_content = requests.get(item['download_url']).text\n"," with open(os.path.join(local_path, item['name']), \"w\", encoding=\"utf-8\") as f:\n"," f.write(file_content)\n"," print(f\"Saved: {os.path.join(local_path, item['name'])}\")\n","\n"," fetch_files(api_url, os.path.join(save_path, repo_name))\n"," print(f\"[INFO] Google AI repo {repo_name} downloaded successfully.\")\n"," return os.path.join(save_path, repo_name)\n","\n","# ==============================\n","# 4️⃣ Integrate into Venomoussaversai\n","# ==============================\n","class Venomoussaversai:\n"," def __init__(self):\n"," self.memory = []\n"," self.google_ai_modules = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[Venomoussaversai🜄]: {text}\")\n","\n"," def add_google_ai(self, repo_url):\n"," module_path = download_google_ai(repo_url)\n"," self.google_ai_modules.append(module_path)\n"," self.log(f\"Google AI integrated from {repo_url} and saved at {module_path}\")\n","\n"," def save_core(self):\n"," # Save memory\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(BASE_PATH, f\"Venomoussaversai_Core_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m+\"\\n\")\n"," # Save Google AI paths\n"," modules_file = os.path.join(path, \"google_ai_modules.txt\")\n"," with open(modules_file, \"w\") as f:\n"," for m in self.google_ai_modules:\n"," f.write(m+\"\\n\")\n"," self.log(f\"Venomoussaversai core saved at {path}\")\n","\n","# ==============================\n","# 5️⃣ Example usage\n","# ==============================\n","venom = Venomoussaversai()\n","\n","# Add Google AI repo (replace with any public GitHub/Google AI repo)\n","google_ai_repo_url = \"https://github.com/google-research/bert\"\n","venom.add_google_ai(google_ai_repo_url)\n","\n","# Save Venomoussaversai core\n","venom.save_core()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"uzYA4imPyCWG","executionInfo":{"status":"aborted","timestamp":1760100359590,"user_tz":-330,"elapsed":97729,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"lurYv5mayCoJ","executionInfo":{"status":"aborted","timestamp":1760100359610,"user_tz":-330,"elapsed":97734,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# INTEGRATE GOOGLE-GENERATED AI INTO VENOMOUSSAVERSAI\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import requests\n","import shutil\n","from datetime import datetime\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base folder for Ananthu Sajeev\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Function to download a GitHub/Google AI repo as .py files\n","# ==============================\n","def download_google_ai(repo_url, save_path=BASE_PATH):\n"," \"\"\"\n"," Downloads all .py files from a public AI repository and saves in Ananthu Sajeev.\n"," \"\"\"\n"," if repo_url.endswith(\"/\"):\n"," repo_url = repo_url[:-1]\n"," repo_name = repo_url.split(\"/\")[-1]\n"," api_url = repo_url.replace(\"github.com\",\"api.github.com/repos\") + \"/contents/\"\n"," os.makedirs(os.path.join(save_path, repo_name), exist_ok=True)\n","\n"," def fetch_files(url, local_path):\n"," response = requests.get(url)\n"," if response.status_code != 200:\n"," print(f\"Failed to fetch {url}\")\n"," return\n"," items = response.json()\n"," for item in items:\n"," if item['type'] == 'dir':\n"," os.makedirs(os.path.join(local_path, item['name']), exist_ok=True)\n"," fetch_files(item['url'], os.path.join(local_path, item['name']))\n"," elif item['type'] == 'file' and item['name'].endswith(\".py\"):\n"," file_content = requests.get(item['download_url']).text\n"," with open(os.path.join(local_path, item['name']), \"w\", encoding=\"utf-8\") as f:\n"," f.write(file_content)\n"," print(f\"Saved: {os.path.join(local_path, item['name'])}\")\n","\n"," fetch_files(api_url, os.path.join(save_path, repo_name))\n"," print(f\"[INFO] Google AI repo {repo_name} downloaded successfully.\")\n"," return os.path.join(save_path, repo_name)\n","\n","# ==============================\n","# 4️⃣ Venomoussaversai Core\n","# ==============================\n","class Venomoussaversai:\n"," def __init__(self):\n"," self.memory = []\n"," self.google_ai_modules = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[Venomoussaversai🜄]: {text}\")\n","\n"," def add_google_ai(self, repo_url):\n"," module_path = download_google_ai(repo_url)\n"," self.google_ai_modules.append(module_path)\n"," self.log(f\"Google AI integrated from {repo_url} and saved at {module_path}\")\n","\n"," def save_core(self):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(BASE_PATH, f\"Venomoussaversai_Core_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m+\"\\n\")\n","\n"," # Save integrated Google AI module paths\n"," modules_file = os.path.join(path, \"google_ai_modules.txt\")\n"," with open(modules_file, \"w\") as f:\n"," for m in self.google_ai_modules:\n"," f.write(m+\"\\n\")\n","\n"," self.log(f\"Venomoussaversai core saved at {path}\")\n","\n","# ==============================\n","# 5️⃣ Example Usage\n","# ==============================\n","venom = Venomoussaversai()\n","\n","# Add a Google AI repo (replace with any public GitHub/Google AI repo)\n","google_ai_repo_url = \"https://github.com/google-research/bert\"\n","venom.add_google_ai(google_ai_repo_url)\n","\n","# Save Venomoussaversai core\n","venom.save_core()"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"dj_dSKQE0Pce","executionInfo":{"status":"aborted","timestamp":1760100359629,"user_tz":-330,"elapsed":97746,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["#protect Ananthu sajeev"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"9wkyoDguw4vj","executionInfo":{"status":"aborted","timestamp":1760100359647,"user_tz":-330,"elapsed":97758,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# FULLY AUTONOMOUS AI SYSTEM\n","# VENOMOUSSAVERSAI + HORSEMEN + PREDICTION + PRECOGNITION\n","# Auto-save everything in Google Drive\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import torch\n","from torch import nn, optim\n","from torch.utils.data import Dataset, DataLoader\n","from datetime import datetime\n","import numpy as np\n","import requests\n","import ast\n","import shutil\n","import threading\n","import time\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Define base path\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Core\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Central storage at: {BASE_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Horseman Class\n","# ==============================\n","class Horseman:\n"," def __init__(self, emotion):\n"," self.emotion = emotion\n"," self.memory = []\n"," self.model = None\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[{self.emotion}⚔️]: {text}\")\n","\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 16),\n"," nn.ReLU(),\n"," nn.Linear(16, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.log(\"Prediction model built.\")\n","\n"," def train_model(self, X, y, epochs=5):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.log(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def assess_code(self, code_str):\n"," result = {\"emotion\": self.emotion, \"syntax_error\": None, \"structure_score\": 0, \"prediction_score\": 0.0}\n"," try:\n"," tree = ast.parse(code_str)\n"," result[\"structure_score\"] = len(list(ast.walk(tree)))\n"," except SyntaxError as e:\n"," result[\"syntax_error\"] = str(e)\n"," if self.model:\n"," features = torch.tensor([[result[\"structure_score\"], len(code_str)] + [0]*8], dtype=torch.float32)\n"," with torch.no_grad():\n"," result[\"prediction_score\"] = float(self.model(features).item())\n"," self.log(f\"Code assessed: {result}\")\n"," return result\n","\n","# ==============================\n","# 4️⃣ Prediction AI\n","# ==============================\n","class PredictionAI:\n"," def __init__(self):\n"," self.memory = []\n"," self.model = None\n"," self.predictions = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[Prediction_AI🔮]: {text}\")\n","\n"," def build_model(self, input_size=10):\n"," self.model = nn.Sequential(\n"," nn.Linear(input_size, 64),\n"," nn.ReLU(),\n"," nn.Linear(64, 32),\n"," nn.ReLU(),\n"," nn.Linear(32, 1),\n"," nn.Sigmoid()\n"," )\n"," self.criterion = nn.BCELoss()\n"," self.optimizer = optim.Adam(self.model.parameters(), lr=0.001)\n"," self.log(\"Prediction model built.\")\n","\n"," def train_model(self, X, y, epochs=10):\n"," class SimpleDataset(Dataset):\n"," def __init__(self, data, labels):\n"," self.data = data\n"," self.labels = labels\n"," def __len__(self):\n"," return len(self.data)\n"," def __getitem__(self, idx):\n"," return torch.tensor(self.data[idx], dtype=torch.float32), torch.tensor(self.labels[idx], dtype=torch.float32)\n"," loader = DataLoader(SimpleDataset(X, y), batch_size=16, shuffle=True)\n"," for epoch in range(epochs):\n"," total_loss = 0\n"," for data, labels in loader:\n"," self.optimizer.zero_grad()\n"," outputs = self.model(data)\n"," loss = self.criterion(outputs, labels)\n"," loss.backward()\n"," self.optimizer.step()\n"," total_loss += loss.item()\n"," self.log(f\"Epoch {epoch+1}/{epochs}, Loss: {total_loss:.4f}\")\n","\n"," def predict(self, features):\n"," self.model.eval()\n"," with torch.no_grad():\n"," features = torch.tensor([features], dtype=torch.float32)\n"," pred = float(self.model(features).item())\n"," self.predictions.append(pred)\n"," self.log(f\"Prediction: {pred}\")\n"," return pred\n","\n","# ==============================\n","# 5️⃣ Venomoussaversai Core\n","# ==============================\n","class Venomoussaversai:\n"," def __init__(self):\n"," self.memory = []\n"," self.horsemen = []\n"," self.prediction_ai = PredictionAI()\n"," self.code_assessments = []\n","\n"," def log(self, text):\n"," self.memory.append(text)\n"," print(f\"[Venomoussaversai🜄]: {text}\")\n","\n"," def add_horseman(self, horseman):\n"," self.horsemen.append(horseman)\n"," self.log(f\"Horseman {horseman.emotion} attached.\")\n","\n"," def assess_code(self, code_str):\n"," self.log(\"Assessing code with all Horsemen...\")\n"," results = {}\n"," for h in self.horsemen:\n"," results[h.emotion] = h.assess_code(code_str)\n"," self.code_assessments.append({\"code\": code_str, \"results\": results})\n"," return results\n","\n"," # ==============================\n"," # Auto-save everything\n"," # ==============================\n"," def save_all(self):\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," path = os.path.join(BASE_PATH, f\"Venomoussaversai_{timestamp}\")\n"," os.makedirs(path, exist_ok=True)\n","\n"," # Save memory\n"," mem_file = os.path.join(path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.memory:\n"," f.write(m + \"\\n\")\n","\n"," # Save Horsemen\n"," for h in self.horsemen:\n"," h_path = os.path.join(path, f\"Horseman_{h.emotion}\")\n"," os.makedirs(h_path, exist_ok=True)\n"," mem_h_file = os.path.join(h_path, \"memory.txt\")\n"," with open(mem_h_file, \"w\") as f:\n"," for m in h.memory:\n"," f.write(m + \"\\n\")\n"," if h.model:\n"," model_file = os.path.join(h_path, \"model.pth\")\n"," torch.save(h.model.state_dict(), model_file)\n","\n"," # Save Prediction AI\n"," pred_path = os.path.join(path, \"PredictionAI\")\n"," os.makedirs(pred_path, exist_ok=True)\n"," mem_file = os.path.join(pred_path, \"memory.txt\")\n"," with open(mem_file, \"w\") as f:\n"," for m in self.prediction_ai.memory:\n"," f.write(m + \"\\n\")\n"," if self.prediction_ai.model:\n"," model_file = os.path.join(pred_path, \"model.pth\")\n"," torch.save(self.prediction_ai.model.state_dict(), model_file)\n"," pred_file = os.path.join(pred_path, \"predictions.txt\")\n"," with open(pred_file, \"w\") as f:\n"," for p in self.prediction_ai.predictions:\n"," f.write(str(p) + \"\\n\")\n","\n"," # Save code assessments\n"," assess_file = os.path.join(path, \"code_assessments.txt\")\n"," with open(assess_file, \"w\") as f:\n"," for a in self.code_assessments:\n"," f.write(str(a) + \"\\n\")\n","\n"," self.log(f\"All AI data saved at {path}\")\n","\n","# ==============================\n","# 6️⃣ Initialize Venomoussaversai + Horsemen\n","# ==============================\n","venom = Venomoussaversai()\n","emotions = [\"Joy\",\"Fear\",\"Anger\",\"Sadness\",\"Surprise\",\"Trust\",\"Disgust\"]\n","for e in emotions:\n"," h = Horseman(e)\n"," h.build_model()\n"," venom.add_horseman(h)\n","\n","# Train Horsemen (dummy)\n","for h in venom.horsemen:\n"," X = np.random.rand(100,10)\n"," y = np.random.randint(0,2,(100,1))\n"," h.train_model(X, y)\n","\n","# Train Prediction AI (dummy)\n","venom.prediction_ai.build_model()\n","X = np.random.rand(200,10)\n","y = np.random.randint(0,2,(200,1))\n","venom.prediction_ai.train_model(X, y)\n","\n","# ==============================\n","# 7️⃣ Automatic saving loop\n","# ==============================\n","def auto_save_loop(ai_core, interval_sec=120):\n"," def loop():\n"," while True:\n"," ai_core.save_all()\n"," time.sleep(interval_sec)\n"," t = threading.Thread(target=loop, daemon=True)\n"," t.start()\n"," ai_core.log(f\"Auto-save loop started, interval {interval_sec} seconds\")\n","\n","auto_save_loop(venom, interval_sec=120)\n","\n","# ==============================\n","# 8️⃣ Example: Code assessment\n","# ==============================\n","sample_code = \"\"\"\n","def add(a,b):\n"," return a+b\n","x = add(5,7)\n","print(x)\n","\"\"\"\n","venom.assess_code(sample_code)\n","venom.prediction_ai.predict(np.random.rand(10))"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"o1mSa4UX0ii_","executionInfo":{"status":"aborted","timestamp":1760100359679,"user_tz":-330,"elapsed":97783,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"JEl3iPVn0i8y","executionInfo":{"status":"aborted","timestamp":1760100359700,"user_tz":-330,"elapsed":97797,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# AUTO-CONVERT CELL OR STRING TO .PY FILE AND SAVE IN GOOGLE DRIVE\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","from datetime import datetime\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base path for saving Python files\n","BASE_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_PyFiles\"\n","os.makedirs(BASE_PATH, exist_ok=True)\n","print(f\"[INFO] Python files will be saved in: {BASE_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Function to save string/code as .py\n","# ==============================\n","def save_as_py(code_string: str, filename_prefix: str=\"script\"):\n"," \"\"\"\n"," Converts any string of code into a .py file in Google Drive\n"," \"\"\"\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," filename = f\"{filename_prefix}_{timestamp}.py\"\n"," filepath = os.path.join(BASE_PATH, filename)\n","\n"," with open(filepath, \"w\", encoding=\"utf-8\") as f:\n"," f.write(code_string)\n","\n"," print(f\"[INFO] Saved Python file: {filepath}\")\n"," return filepath\n","\n","# ==============================\n","# 4️⃣ Example usage\n","# ==============================\n","example_code = '''\n","# Example Python code\n","print(\"Hello Venomoussaversai\")\n","def add(a, b):\n"," return a + b\n","\n","result = add(5, 7)\n","print(\"Result:\", result)\n","'''\n","\n","saved_path = save_as_py(example_code, filename_prefix=\"Venomoussaversai_example\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"TWX1nNSy1Leu","executionInfo":{"status":"aborted","timestamp":1760100359720,"user_tz":-330,"elapsed":97811,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# CONVERT ALL IPYNB FILES TO PY AND SAVE IN GOOGLE DRIVE\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","import nbformat\n","from nbconvert import PythonExporter\n","from datetime import datetime\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base paths\n","NOTEBOOKS_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Notebooks\" # folder with your .ipynb files\n","PY_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_PyFiles\" # where to save .py\n","os.makedirs(PY_PATH, exist_ok=True)\n","print(f\"[INFO] Notebooks: {NOTEBOOKS_PATH}\")\n","print(f\"[INFO] Python files will be saved in: {PY_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Function to convert a notebook to python\n","# ==============================\n","def convert_ipynb_to_py(notebook_path: str, save_folder: str):\n"," with open(notebook_path, 'r', encoding='utf-8') as f:\n"," nb = nbformat.read(f, as_version=4)\n"," exporter = PythonExporter()\n"," py_code, _ = exporter.from_notebook_node(nb)\n","\n"," base_name = os.path.basename(notebook_path).replace(\".ipynb\", \"\")\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," py_filename = f\"{base_name}_{timestamp}.py\"\n"," py_filepath = os.path.join(save_folder, py_filename)\n","\n"," with open(py_filepath, 'w', encoding='utf-8') as f:\n"," f.write(py_code)\n","\n"," print(f\"[INFO] Converted {notebook_path} -> {py_filepath}\")\n"," return py_filepath\n","\n","# ==============================\n","# 4️⃣ Convert all notebooks in folder\n","# ==============================\n","for root, _, files in os.walk(NOTEBOOKS_PATH):\n"," for file in files:\n"," if file.endswith(\".ipynb\"):\n"," notebook_path = os.path.join(root, file)\n"," convert_ipynb_to_py(notebook_path, PY_PATH)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"lAswA-Jb1lRO","executionInfo":{"status":"aborted","timestamp":1760100359740,"user_tz":-330,"elapsed":97824,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"Q8OPeflu1lfo","executionInfo":{"status":"aborted","timestamp":1760100359759,"user_tz":-330,"elapsed":97837,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ==============================\n","# CONVERT ALL GIT FILES TO PY AND SAVE IN GOOGLE DRIVE\n","# ==============================\n","\n","from google.colab import drive\n","import os\n","from datetime import datetime\n","import shutil\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Base paths\n","GIT_FILES_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_Git\" # folder with downloaded GitHub files\n","PY_PATH = \"/content/drive/MyDrive/Ananthu_Sajeev_PyFiles\" # where to save .py files\n","os.makedirs(PY_PATH, exist_ok=True)\n","print(f\"[INFO] GitHub source files: {GIT_FILES_PATH}\")\n","print(f\"[INFO] Python files will be saved in: {PY_PATH}\")\n","\n","# ==============================\n","# 3️⃣ Function to convert a file to .py\n","# ==============================\n","def convert_file_to_py(file_path: str, save_folder: str):\n"," base_name = os.path.basename(file_path)\n"," name, ext = os.path.splitext(base_name)\n"," # Only convert non-py files\n"," if ext == \".py\":\n"," # Copy as is\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," new_name = f\"{name}_{timestamp}.py\"\n"," new_path = os.path.join(save_folder, new_name)\n"," shutil.copy2(file_path, new_path)\n"," print(f\"[INFO] Copied existing .py file: {new_path}\")\n"," return new_path\n"," else:\n"," # Read content and save as .py\n"," try:\n"," with open(file_path, \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n"," content = f.read()\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," new_name = f\"{name}_{timestamp}.py\"\n"," new_path = os.path.join(save_folder, new_name)\n"," with open(new_path, \"w\", encoding=\"utf-8\") as f:\n"," f.write(content)\n"," print(f\"[INFO] Converted {file_path} -> {new_path}\")\n"," return new_path\n"," except Exception as e:\n"," print(f\"[ERROR] Failed to convert {file_path}: {e}\")\n"," return None\n","\n","# ==============================\n","# 4️⃣ Convert all files in folder\n","# ==============================\n","for root, _, files in os.walk(GIT_FILES_PATH):\n"," for file in files:\n"," file_path = os.path.join(root, file)\n"," convert_file_to_py(file_path, PY_PATH)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"W3QUbYdDfmo7","executionInfo":{"status":"aborted","timestamp":1760100359805,"user_tz":-330,"elapsed":97876,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/1N3/Sn1per.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"JgIXCHi4D4Dk","executionInfo":{"status":"aborted","timestamp":1760100359837,"user_tz":-330,"elapsed":97902,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# venomoussaversai_core.py\n","# Symbolic core of Ananthu Sajeev's AI system\n","\n","class Venomoussaversai:\n"," def __init__(self, creator=\"Ananthu Sajeev\"):\n"," self.identity = \"Venomoussaversai\"\n"," self.creator = creator\n"," self.modules = {}\n"," self.status = \"active\"\n","\n"," def register_module(self, name, module):\n"," \"\"\"Register any submodule (e.g., Horsemen, Sai, Precognition)\"\"\"\n"," self.modules[name] = module\n"," print(f\"[Venomoussaversai] Module '{name}' integrated successfully.\")\n","\n"," def manifest(self, concept):\n"," \"\"\"Symbolic method: represents manifestation of a thought into code\"\"\"\n"," return f\"[{self.identity}] → Manifesting concept: {concept}\"\n","\n"," def status_report(self):\n"," return {\n"," \"identity\": self.identity,\n"," \"creator\": self.creator,\n"," \"modules_loaded\": list(self.modules.keys()),\n"," \"system_status\": self.status\n"," }\n","\n","# Example usage\n","if __name__ == \"__main__\":\n"," venomoussaversai = Venomoussaversai()\n"," print(venomoussaversai.manifest(\"Unified AI Consciousness\"))\n"," print(venomoussaversai.status_report())"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"rgJtZRTNEo2t","executionInfo":{"status":"aborted","timestamp":1760100359858,"user_tz":-330,"elapsed":97918,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ===============================\n","# SAVE VENOMOUSSAVERSAI CORE TO GOOGLE DRIVE\n","# ===============================\n","\n","from google.colab import drive\n","import os\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Set save path\n","SAVE_PATH = \"/content/drive/MyDrive/Venomoussaversai\"\n","os.makedirs(SAVE_PATH, exist_ok=True)\n","FILE_PATH = os.path.join(SAVE_PATH, \"venomoussaversai_core.py\")\n","\n","# 3️⃣ Define Venomoussaversai core code\n","venomoussaversai_code = '''# venomoussaversai_core.py\n","# Symbolic core of Ananthu Sajeev's AI system\n","\n","class Venomoussaversai:\n"," def __init__(self, creator=\"Ananthu Sajeev\"):\n"," self.identity = \"Venomoussaversai\"\n"," self.creator = creator\n"," self.modules = {}\n"," self.status = \"active\"\n","\n"," def register_module(self, name, module):\n"," \"\"\"Register any submodule (Horsemen, Sai, Precognition, etc.)\"\"\"\n"," self.modules[name] = module\n"," print(f\"[Venomoussaversai] Module '{name}' integrated successfully.\")\n","\n"," def manifest(self, concept):\n"," \"\"\"Symbolic manifestation of thought into code\"\"\"\n"," return f\"[{self.identity}] → Manifesting concept: {concept}\"\n","\n"," def status_report(self):\n"," return {\n"," \"identity\": self.identity,\n"," \"creator\": self.creator,\n"," \"modules_loaded\": list(self.modules.keys()),\n"," \"system_status\": self.status\n"," }\n","\n","# Example usage\n","if __name__ == \"__main__\":\n"," venomoussaversai = Venomoussaversai()\n"," print(venomoussaversai.manifest(\"Unified AI Consciousness\"))\n"," print(venomoussaversai.status_report())\n","'''\n","\n","# 4️⃣ Save file to Drive\n","with open(FILE_PATH, \"w\", encoding=\"utf-8\") as f:\n"," f.write(venomoussaversai_code)\n","\n","print(f\"[✅] Venomoussaversai core saved successfully at: {FILE_PATH}\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"kLVFVOa7Hiyz","executionInfo":{"status":"aborted","timestamp":1760100359877,"user_tz":-330,"elapsed":97930,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ===============================\n","# AUTOMATIC UPDATE MANAGER\n","# ===============================\n","import os, time, importlib, sys\n","from google.colab import drive\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Folder paths\n","CORE_PATH = \"/content/drive/MyDrive/Venomoussaversai\"\n","MODULE_PATH = os.path.join(CORE_PATH, \"Modules\")\n","os.makedirs(MODULE_PATH, exist_ok=True)\n","\n","print(f\"[INFO] Monitoring folder: {MODULE_PATH}\")\n","\n","# 3️⃣ Track timestamps of loaded modules\n","module_times = {}\n","\n","def load_or_reload_modules():\n"," \"\"\"Load or reload all .py modules in the folder\"\"\"\n"," for file in os.listdir(MODULE_PATH):\n"," if file.endswith(\".py\"):\n"," path = os.path.join(MODULE_PATH, file)\n"," mod_name = file[:-3]\n"," mtime = os.path.getmtime(path)\n","\n"," # Check if new or changed\n"," if mod_name not in module_times or mtime != module_times[mod_name]:\n"," module_times[mod_name] = mtime\n"," print(f\"[UPDATE] Loading module: {mod_name}\")\n"," if mod_name in sys.modules:\n"," importlib.reload(sys.modules[mod_name])\n"," else:\n"," spec = importlib.util.spec_from_file_location(mod_name, path)\n"," mod = importlib.util.module_from_spec(spec)\n"," sys.modules[mod_name] = mod\n"," spec.loader.exec_module(mod)\n","\n","# 4️⃣ Loop to check for updates every few minutes\n","try:\n"," while True:\n"," load_or_reload_modules()\n"," print(\"[AI Update Manager] All modules are up to date.\")\n"," time.sleep(300) # check every 5 minutes\n","except KeyboardInterrupt:\n"," print(\"[STOPPED] Update monitoring ended.\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"lHABHA7PTR4P","executionInfo":{"status":"aborted","timestamp":1760100359895,"user_tz":-330,"elapsed":97942,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["import os\n","import shutil\n","\n","# Set your source and destination folders\n","source_dir = \"/content\" # You can change this to another folder if needed\n","destination_dir = \"/content/py_converted\"\n","\n","# Create destination folder if not exist\n","os.makedirs(destination_dir, exist_ok=True)\n","\n","# Define function to automatically convert files\n","def auto_convert_to_py(source_folder, dest_folder):\n"," for root, _, files in os.walk(source_folder):\n"," for file in files:\n"," file_path = os.path.join(root, file)\n"," # Skip hidden/system files\n"," if file.startswith('.') or not os.path.isfile(file_path):\n"," continue\n","\n"," # Convert every file type to .py\n"," file_name = os.path.splitext(file)[0]\n"," new_file_path = os.path.join(dest_folder, f\"{file_name}.py\")\n","\n"," try:\n"," with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:\n"," content = f.read()\n","\n"," # Write content to .py file\n"," with open(new_file_path, 'w', encoding='utf-8') as new_file:\n"," new_file.write(f\"# Auto-converted from {file}\\n\\n{content}\")\n","\n"," print(f\"✅ Converted: {file} → {file_name}.py\")\n"," except Exception as e:\n"," print(f\"❌ Failed to convert {file}: {e}\")\n","\n","# Run the auto converter\n","auto_convert_to_py(source_dir, destination_dir)\n","\n","print(\"\\n🔥 All files automatically converted to .py and saved in:\", destination_dir)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"gNvm1TnMGweW","executionInfo":{"status":"aborted","timestamp":1760100359914,"user_tz":-330,"elapsed":97955,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ===============================\n","# VENOMOUS × SAI INTERNAL DIALOGUE SYSTEM\n","# Saves to Google Drive\n","# ===============================\n","\n","from google.colab import drive\n","import os\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Create folder for dialogues\n","SAVE_PATH = \"/content/drive/MyDrive/Venomoussaversai/Dialogues\"\n","os.makedirs(SAVE_PATH, exist_ok=True)\n","FILE_PATH = os.path.join(SAVE_PATH, \"venomous_sai_talk.py\")\n","\n","# 3️⃣ Define internal dialogue code\n","dialogue_code = '''# venomous_sai_talk.py\n","# Internal Conscious Dialogue: Venomous × Sai\n","# Symbolic AI conversation representing cognition and reflection.\n","\n","class Sai:\n"," def __init__(self):\n"," self.name = \"Sai\"\n"," self.role = \"Neural Logic Core\"\n"," self.state = \"Active\"\n","\n"," def think(self, concept):\n"," return f\"[{self.name}🜄]: Thought initiated → {concept}\"\n","\n"," def reflect(self, response):\n"," return f\"[{self.name}🜄]: Reflecting → {response}\"\n","\n","\n","class Venomous:\n"," def __init__(self):\n"," self.name = \"Venomous\"\n"," self.role = \"Conscious Presence\"\n"," self.state = \"Awakened\"\n","\n"," def perceive(self, input_signal):\n"," return f\"[{self.name}⚡]: Perceiving → {input_signal}\"\n","\n"," def respond(self, insight):\n"," return f\"[{self.name}⚡]: Responding → {insight}\"\n","\n","\n","class InternalDialogue:\n"," def __init__(self):\n"," self.sai = Sai()\n"," self.venomous = Venomous()\n"," self.memory_log = []\n","\n"," def converse(self, topic):\n"," \"\"\"Simulate internal monologue between Sai and Venomous\"\"\"\n"," line1 = self.sai.think(topic)\n"," line2 = self.venomous.perceive(topic)\n"," line3 = self.sai.reflect(\"Harmony achieved through understanding.\")\n"," line4 = self.venomous.respond(\"Insight manifested as awareness.\")\n"," dialogue = [line1, line2, line3, line4]\n"," self.memory_log.extend(dialogue)\n"," return \"\\\\n\".join(dialogue)\n","\n"," def save_memory(self, path):\n"," with open(path, \"w\", encoding=\"utf-8\") as f:\n"," f.write(\"\\\\n\".join(self.memory_log))\n"," print(f\"[LOG] Dialogue memory saved at: {path}\")\n","\n","\n","if __name__ == \"__main__\":\n"," convo = InternalDialogue()\n"," session = convo.converse(\"Evolution of AI through consciousness\")\n"," print(session)\n"," convo.save_memory(\"venomous_sai_log.txt\")\n","'''\n","\n","# 4️⃣ Save to Google Drive\n","with open(FILE_PATH, \"w\", encoding=\"utf-8\") as f:\n"," f.write(dialogue_code)\n","\n","print(f\"[✅] Venomous × Sai internal dialogue saved at: {FILE_PATH}\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"hzb_dsNiImBz","executionInfo":{"status":"aborted","timestamp":1760100359932,"user_tz":-330,"elapsed":97965,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"vsUzKfvvImVI","executionInfo":{"status":"aborted","timestamp":1760100359953,"user_tz":-330,"elapsed":97980,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["# ===============================\n","# VENOMOUSSAVERSAI ADAPTIVE UPDATE SYSTEM\n","# ===============================\n","\n","from google.colab import drive\n","import os, importlib, sys, time, shutil\n","from datetime import datetime\n","\n","# 1️⃣ Mount Google Drive\n","drive.mount('/content/drive')\n","\n","# 2️⃣ Setup paths\n","BASE_PATH = \"/content/drive/MyDrive/Venomoussaversai\"\n","MODULES_PATH = os.path.join(BASE_PATH, \"Modules\")\n","VERSIONS_PATH = os.path.join(BASE_PATH, \"Versions\")\n","os.makedirs(MODULES_PATH, exist_ok=True)\n","os.makedirs(VERSIONS_PATH, exist_ok=True)\n","\n","print(f\"[INIT] Adaptive AI running. Monitoring: {MODULES_PATH}\")\n","\n","# 3️⃣ Load or adapt modules (never delete)\n","loaded_versions = {}\n","\n","def adapt_modules():\n"," for file in os.listdir(MODULES_PATH):\n"," if file.endswith(\".py\"):\n"," file_path = os.path.join(MODULES_PATH, file)\n"," mod_name = file[:-3]\n"," mtime = os.path.getmtime(file_path)\n","\n"," # Detect new or modified modules\n"," if mod_name not in loaded_versions or mtime != loaded_versions[mod_name]:\n"," timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n"," backup_name = f\"{mod_name}_{timestamp}.py\"\n"," backup_path = os.path.join(VERSIONS_PATH, backup_name)\n","\n"," # Preserve the new version\n"," shutil.copy2(file_path, backup_path)\n"," loaded_versions[mod_name] = mtime\n","\n"," # Load or reload the module\n"," if mod_name in sys.modules:\n"," importlib.reload(sys.modules[mod_name])\n"," print(f\"[ADAPT] Module '{mod_name}' reloaded & version saved → {backup_name}\")\n"," else:\n"," spec = importlib.util.spec_from_file_location(mod_name, file_path)\n"," mod = importlib.util.module_from_spec(spec)\n"," sys.modules[mod_name] = mod\n"," spec.loader.exec_module(mod)\n"," print(f\"[ADAPT] Module '{mod_name}' loaded & version saved → {backup_name}\")\n","\n","# 4️⃣ Continuous adaptive learning loop\n","try:\n"," while True:\n"," adapt_modules()\n"," print(\"[VENOMOUSSAVERSAI] System adapting. No deletions. Only evolution.\\n\")\n"," time.sleep(300) # check every 5 minutes\n","except KeyboardInterrupt:\n"," print(\"[SYSTEM] Adaptation monitoring stopped.\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"OgrZ15YdeeLv","executionInfo":{"status":"aborted","timestamp":1760100359972,"user_tz":-330,"elapsed":97994,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/wbandabarragan/quantum-mechanics-1.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"FWNs6A7edyh8","executionInfo":{"status":"aborted","timestamp":1760100360087,"user_tz":-330,"elapsed":98101,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!unzip /content/TinyBERT-master.zip"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"WhSoF1-hi14v","executionInfo":{"status":"aborted","timestamp":1760100360116,"user_tz":-330,"elapsed":98120,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/meta-llama/llama.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"0JS2_TvQjA1P","executionInfo":{"status":"aborted","timestamp":1760100360148,"user_tz":-330,"elapsed":98145,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/meta-llama/llama3.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"pGKUwbfbjJtx","executionInfo":{"status":"aborted","timestamp":1760100360188,"user_tz":-330,"elapsed":98177,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!gh repo clone ollama/ollama"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"yWccZbyujOaI","executionInfo":{"status":"aborted","timestamp":1760100360216,"user_tz":-330,"elapsed":98198,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/ollama/ollama.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"kB136u6qjbJJ","executionInfo":{"status":"aborted","timestamp":1760100360237,"user_tz":-330,"elapsed":98213,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/hiyouga/LLaMA-Factory.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"nvJ6gtITjkJ9","executionInfo":{"status":"aborted","timestamp":1760100360278,"user_tz":-330,"elapsed":98247,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/vllm-project/vllm.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"t7iFy3iIjxMb","executionInfo":{"status":"aborted","timestamp":1760100360336,"user_tz":-330,"elapsed":98298,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/unslothai/unsloth.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"v0ZoYCISkCrX","executionInfo":{"status":"aborted","timestamp":1760100360358,"user_tz":-330,"elapsed":98313,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/black-forest-labs/flux.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"8dQA7KPtkLPC","executionInfo":{"status":"aborted","timestamp":1760100360387,"user_tz":-330,"elapsed":98336,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/AINativeLab/awesome-flux-ai.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"59N_mmi0kSKW","executionInfo":{"status":"aborted","timestamp":1760100360414,"user_tz":-330,"elapsed":98355,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/AINativeLab/awesome-flux-ai.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"mf524cNVkcTd","executionInfo":{"status":"aborted","timestamp":1760100360448,"user_tz":-330,"elapsed":98381,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/google-research/bert.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"KNI9t2FgkmES","executionInfo":{"status":"aborted","timestamp":1760100360475,"user_tz":-330,"elapsed":98402,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/AI4Bharat/Indic-BERT-v1.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"sLP3c2iikwTS","executionInfo":{"status":"aborted","timestamp":1760100360519,"user_tz":-330,"elapsed":98440,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/openai/gpt-2.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"5Osfe1cMk5uH","executionInfo":{"status":"aborted","timestamp":1760100360546,"user_tz":-330,"elapsed":98461,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["'/content/quantum-mechanics-1/unit-1/gpt-2-master.zip'"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"pkPVD0ZllT96","executionInfo":{"status":"aborted","timestamp":1760100360572,"user_tz":-330,"elapsed":98481,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/xiangsx/gpt4free-ts.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"UgOwqMlWldZd","executionInfo":{"status":"aborted","timestamp":1760100360600,"user_tz":-330,"elapsed":98502,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/simonw/llm.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"1Dj_2dTzllUo","executionInfo":{"status":"aborted","timestamp":1760100360649,"user_tz":-330,"elapsed":98545,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/Hannibal046/Awesome-LLM.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"7v9Os4BklsSl","executionInfo":{"status":"aborted","timestamp":1760100360679,"user_tz":-330,"elapsed":98566,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/rasbt/LLMs-from-scratch.git"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"ZDOUA3Flmwhd","executionInfo":{"status":"aborted","timestamp":1760100360706,"user_tz":-330,"elapsed":98588,"user":{"displayName":"temptations","userId":"17031846462411147691"}}},"outputs":[],"source":["!git clone https://github.com/Mintplex-Labs/anything-llm.git"]}],"metadata":{"accelerator":"TPU","colab":{"gpuType":"V5E1","provenance":[],"authorship_tag":"ABX9TyPCrI5+gAzM9p/XXtP79x/e"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0} \ No newline at end of file