Raiff1982 commited on
Commit
8fbbe51
Β·
1 Parent(s): 346b92c
AICoreAGIX_with_TB.py CHANGED
@@ -12,7 +12,8 @@ from typing import List, Dict, Any
12
  from cryptography.fernet import Fernet
13
  from datetime import datetime
14
  import pyttsx3
15
- import os
 
16
  import hashlib
17
 
18
  from self_trust_core import SelfTrustCore
@@ -34,6 +35,10 @@ from autonomy_engine import AutonomyEngine
34
  from codette_bridge import CodetteBridge
35
 
36
 
 
 
 
 
37
  class AICoreAGIX:
38
  def __init__(self, config_path: str = "config.json"):
39
  self.self_trust_core = SelfTrustCore()
@@ -42,24 +47,73 @@ class AICoreAGIX:
42
  self.config = self._load_config(config_path)
43
  self._load_or_generate_id_lock()
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  try:
46
- self.tokenizer = AutoTokenizer.from_pretrained(
47
- self.config["model_name"],
48
  trust_remote_code=True,
49
  use_fast=False
50
  )
51
- except KeyError as e:
52
- logger.warning(f"[Tokenizer Load]: Fallback triggered due to missing config key: {e}")
53
- self.tokenizer = AutoTokenizer.from_pretrained("gpt2")
54
-
 
 
 
 
 
 
 
 
 
55
  try:
56
- self.model = AutoModelForCausalLM.from_pretrained(
57
- self.config["model_name"],
58
  trust_remote_code=True
59
  )
60
  except Exception as e:
61
  logger.warning(f"[Model Load]: Fallback triggered due to model load failure: {e}")
62
- self.model = AutoModelForCausalLM.from_pretrained("gpt2")
 
63
 
64
  self.context_memory = self._initialize_vector_memory()
65
  self.http_session = aiohttp.ClientSession()
 
12
  from cryptography.fernet import Fernet
13
  from datetime import datetime
14
  import pyttsx3
15
+ import os # Make sure this is near the top of your file
16
+ os.environ["CUDA_VISIBLE_DEVICES"] = "-1" # Prevent TensorFlow from trying to use CUDA
17
  import hashlib
18
 
19
  from self_trust_core import SelfTrustCore
 
35
  from codette_bridge import CodetteBridge
36
 
37
 
38
+ import os # Make sure this is near the top of your file
39
+ os.environ["CUDA_VISIBLE_DEVICES"] = "-1" # Prevent TensorFlow from trying to use CUDA
40
+
41
+
42
  class AICoreAGIX:
43
  def __init__(self, config_path: str = "config.json"):
44
  self.self_trust_core = SelfTrustCore()
 
47
  self.config = self._load_config(config_path)
48
  self._load_or_generate_id_lock()
49
 
50
+ # === Safe tokenizer load ===
51
+ self.tokenizer = self._safe_load_tokenizer(self.config["model_name"])
52
+
53
+ # === Safe model load ===
54
+ self.model = self._safe_load_model(self.config["model_name"])
55
+
56
+ self.context_memory = self._initialize_vector_memory()
57
+ self.http_session = aiohttp.ClientSession()
58
+ self.database = Database()
59
+ self.multi_agent_system = MultiAgentSystem()
60
+ self.self_improving_ai = SelfImprovingAI()
61
+ self.neural_symbolic_engine = NeuroSymbolicEngine()
62
+ self.federated_ai = FederatedAI()
63
+ self.ethics_core = EthicsCore()
64
+ self.autonomy = AutonomyEngine()
65
+ self.codette_bridge = CodetteBridge(model_id="ft:gpt-4o-2024-08-06:raiffs-bits:pidette:B9TL")
66
+
67
+ self._codriao_key = self._generate_codriao_key()
68
+ self._fernet_key = Fernet.generate_key()
69
+ self._encrypted_codriao_key = Fernet(self._fernet_key).encrypt(self._codriao_key.encode())
70
+ self._codriao_journal = []
71
+ self._journal_key = Fernet.generate_key()
72
+ self._journal_fernet = Fernet(self._journal_key)
73
+
74
+ self._encryption_key = Fernet.generate_key()
75
+ secure_memory_module = load_secure_memory_module()
76
+ SecureMemorySession = secure_memory_module.SecureMemorySession
77
+ self.secure_memory_loader = SecureMemorySession(self._encryption_key)
78
+
79
+ self.speech_engine = pyttsx3.init()
80
+ self.health_module = CodriaoHealthModule(ai_core=self)
81
+ self.training_memory = []
82
+ self.quarantine_engine = QuarantineEngine()
83
+ self.anomaly_scorer = AnomalyScorer()
84
+ self.lockdown_engaged = False
85
+
86
+ logger.info("[Codriao]: SelfTrustCore initialized. Fear is now filtered by self-consent.")
87
+
88
+ def _safe_load_tokenizer(self, model_name):
89
  try:
90
+ return AutoTokenizer.from_pretrained(
91
+ model_name,
92
  trust_remote_code=True,
93
  use_fast=False
94
  )
95
+ except (ValueError, KeyError) as e:
96
+ logger.warning(f"[Tokenizer Load]: Remote code failed β€” falling back. Reason: {e}")
97
+ try:
98
+ return AutoTokenizer.from_pretrained(
99
+ model_name,
100
+ trust_remote_code=False,
101
+ use_fast=False
102
+ )
103
+ except Exception as e2:
104
+ logger.warning(f"[Tokenizer Load]: Full fallback to gpt2. Reason: {e2}")
105
+ return AutoTokenizer.from_pretrained("gpt2")
106
+
107
+ def _safe_load_model(self, model_name):
108
  try:
109
+ return AutoModelForCausalLM.from_pretrained(
110
+ model_name,
111
  trust_remote_code=True
112
  )
113
  except Exception as e:
114
  logger.warning(f"[Model Load]: Fallback triggered due to model load failure: {e}")
115
+ return AutoModelForCausalLM.from_pretrained("gpt2")
116
+
117
 
118
  self.context_memory = self._initialize_vector_memory()
119
  self.http_session = aiohttp.ClientSession()
__pycache__/AICoreAGIX_with_TB.cpython-311.pyc CHANGED
Binary files a/__pycache__/AICoreAGIX_with_TB.cpython-311.pyc and b/__pycache__/AICoreAGIX_with_TB.cpython-311.pyc differ
 
codette.log CHANGED
@@ -8,3 +8,4 @@
8
  2025-04-10 11:20:25,869 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
9
  2025-04-10 11:24:40,252 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
10
  2025-04-10 11:40:42,606 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
 
 
8
  2025-04-10 11:20:25,869 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
9
  2025-04-10 11:24:40,252 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
10
  2025-04-10 11:40:42,606 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'
11
+ 2025-04-10 11:53:12,520 - WARNING - [CodetteFallback] Local fallback unavailable: 'added_tokens'