reaperdoesntknow commited on
Commit
d649a96
·
verified ·
1 Parent(s): b1c6219

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -82
README.md CHANGED
@@ -82,16 +82,14 @@ pip install torch transformers datasets
82
 
83
  ```python
84
  from transformers import AutoTokenizer
85
- from modeling_swarm import SwarmForCausalLM
86
- from configuration_swarm import SwarmConfig
87
 
88
  # Load model and tokenizer
89
- model = SwarmForCausalLM.from_pretrained("your-username/SAGI")
90
- tokenizer = AutoTokenizer.from_pretrained("your-username/SAGI")
91
 
92
  # Generate text
93
  model.eval()
94
- model.reset_swarm_state() # Reset for clean generation
95
 
96
  prompt = "Once upon a time"
97
  inputs = tokenizer(prompt, return_tensors="pt")
@@ -109,51 +107,6 @@ outputs = model.generate(
109
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
110
  ```
111
 
112
- ### Inspecting Swarm State
113
-
114
- ```python
115
- # Get current swarm state
116
- state = model.get_swarm_state()
117
- print(f"Active agents: {sum(state['active'][0])}")
118
- print(f"Goal stack depth: {state['goal_stack_size']}")
119
-
120
- # Reset swarm state between generations
121
- model.reset_swarm_state()
122
- ```
123
-
124
- ### Training
125
-
126
- ```python
127
- from datasets import load_dataset
128
- from transformers import Trainer, TrainingArguments
129
-
130
- # Load dataset
131
- dataset = load_dataset("roneneldan/TinyStories", split="train[:5%]")
132
-
133
- # Tokenize
134
- def tokenize(examples):
135
- return tokenizer(examples["text"], truncation=True, max_length=256)
136
-
137
- tokenized = dataset.map(tokenize, batched=True, remove_columns=["text"])
138
-
139
- # Train
140
- training_args = TrainingArguments(
141
- output_dir="./sagi-finetuned",
142
- per_device_train_batch_size=4,
143
- learning_rate=3e-4,
144
- num_train_epochs=1,
145
- save_steps=500,
146
- )
147
-
148
- trainer = Trainer(
149
- model=model,
150
- args=training_args,
151
- train_dataset=tokenized,
152
- )
153
-
154
- trainer.train()
155
- ```
156
-
157
  ## Model Architecture Details
158
 
159
  ### Swarm Configuration
@@ -212,38 +165,10 @@ Not intended for:
212
  ## Citation
213
 
214
  ```bibtex
215
- @software{sagi2024,
216
  title={SAGI: Swarm AGI Language Model},
217
- author={},
218
- year={2024},
219
- url={https://huggingface.co/your-username/SAGI}
220
  }
221
  ```
222
-
223
- ## License
224
-
225
- Apache 2.0
226
-
227
- ## Files
228
-
229
- - `config.json` - Model configuration
230
- - `model.safetensors` / `pytorch_model.bin` - Model weights
231
- - `tokenizer.json` - Tokenizer configuration
232
- - `modeling_swarm.py` - Model implementation (required for loading)
233
- - `configuration_swarm.py` - Config class (required for loading)
234
- - `model.py` - Swarm-7 V2.2 core system
235
-
236
- ## Custom Code Notice
237
-
238
- This model requires custom code to run. When loading, use `trust_remote_code=True`:
239
-
240
- ```python
241
- from transformers import AutoModelForCausalLM
242
-
243
- model = AutoModelForCausalLM.from_pretrained(
244
- "your-username/SAGI",
245
- trust_remote_code=True
246
- )
247
- ```
248
-
249
- Or import the classes directly from the model files as shown in the Quick Start section.
 
82
 
83
  ```python
84
  from transformers import AutoTokenizer
85
+ from transformers import AutoModelForCausalLM, AutoConfig
 
86
 
87
  # Load model and tokenizer
88
+ model = AutoModelForCausalLM.from_pretrained("reaperdoesntknow/SAGI")
89
+ tokenizer = AutoTokenizer.from_pretrained("reaperdoesntknow/SAGI")
90
 
91
  # Generate text
92
  model.eval()
 
93
 
94
  prompt = "Once upon a time"
95
  inputs = tokenizer(prompt, return_tensors="pt")
 
107
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
108
  ```
109
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
110
  ## Model Architecture Details
111
 
112
  ### Swarm Configuration
 
165
  ## Citation
166
 
167
  ```bibtex
168
+ @software{sagi2026,
169
  title={SAGI: Swarm AGI Language Model},
170
+ author={Reaperdoesntknow},
171
+ year={2026},
172
+ url={https://huggingface.co/your-reaperdoesntknow/SAGI}
173
  }
174
  ```