DarkSca commited on
Commit
dc71c91
·
verified ·
1 Parent(s): f04d7aa

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -35,3 +35,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  lora_tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
  qmodel.lora filter=lfs diff=lfs merge=lfs -text
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  lora_tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
  qmodel.lora filter=lfs diff=lfs merge=lfs -text
38
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
.trillim-quantize-complete ADDED
@@ -0,0 +1 @@
 
 
1
+ ready
README.md CHANGED
@@ -34,7 +34,7 @@ This adapter runs entirely on CPU — no GPU required.
34
  pip install trillim
35
  trillim pull Trillim/BitNet-TRNQ
36
  trillim pull Trillim/BitNet-GenZ-LoRA-TRNQ
37
- trillim chat Trillim/BitNet-TRNQ --lora Trillim/BitNet-GenZ-LoRA-TRNQ
38
  ```
39
 
40
  This starts an interactive CLI chat.
@@ -44,11 +44,12 @@ This starts an interactive CLI chat.
44
  | File | Description |
45
  |---|---|
46
  | `qmodel.lora` | Ternary-quantized LoRA weights in Trillim format |
47
- | `lora_tokenizer.json` | Tokenizer |
48
- | `lora_tokenizer_config.json` | Tokenizer configuration |
49
- | `lora_chat_template.jinja` | Chat template |
50
  | `trillim_config.json` | Trillim metadata |
51
 
52
  ## License
53
 
54
  This adapter is released under the [MIT License](https://opensource.org/licenses/MIT), following the license of the source model.
 
 
34
  pip install trillim
35
  trillim pull Trillim/BitNet-TRNQ
36
  trillim pull Trillim/BitNet-GenZ-LoRA-TRNQ
37
+ trillim chat Trillim/BitNet-TRNQ Trillim/BitNet-GenZ-LoRA-TRNQ
38
  ```
39
 
40
  This starts an interactive CLI chat.
 
44
  | File | Description |
45
  |---|---|
46
  | `qmodel.lora` | Ternary-quantized LoRA weights in Trillim format |
47
+ | `tokenizer.json` | Tokenizer |
48
+ | `tokenizer_config.json` | Tokenizer configuration |
49
+ | `chat_template.jinja` | Chat template |
50
  | `trillim_config.json` | Trillim metadata |
51
 
52
  ## License
53
 
54
  This adapter is released under the [MIT License](https://opensource.org/licenses/MIT), following the license of the source model.
55
+
adapter_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "microsoft/bitnet-b1.58-2B-4T-bf16",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": false,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 16,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.05,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": null,
25
+ "peft_type": "LORA",
26
+ "peft_version": "0.18.1",
27
+ "qalora_group_size": 16,
28
+ "r": 16,
29
+ "rank_pattern": {},
30
+ "revision": null,
31
+ "target_modules": [
32
+ "v_proj",
33
+ "k_proj",
34
+ "q_proj",
35
+ "o_proj"
36
+ ],
37
+ "target_parameters": null,
38
+ "task_type": "CAUSAL_LM",
39
+ "trainable_token_indices": null,
40
+ "use_dora": false,
41
+ "use_qalora": false,
42
+ "use_rslora": false
43
+ }
chat_template.jinja ADDED
@@ -0,0 +1 @@
 
 
1
+ {% set loop_messages = messages %}{% for message in loop_messages %}{% set content = message['role'] | capitalize + ': '+ message['content'] | trim + '<|eot_id|>' %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant: ' }}{% endif %}
qmodel.lora CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92c46b14bf88f89fdb043f4f365120c392214a9db85f99b2a17a67a650ac3794
3
  size 15974634
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5df6e2bff9167fdea4ac3f4d97c1d88f323ac99074fa6a4390a040a64a6c3882
3
  size 15974634
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c5cf44023714fb39b05e71e425f8d7b92805ff73f7988b083b8c87f0bf87393
3
+ size 17209961
tokenizer_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|begin_of_text|>",
3
+ "eos_token": "<|eot_id|>"
4
+ }
trillim_config.json CHANGED
@@ -1,13 +1,14 @@
1
  {
2
- "trillim_version": "0.3.0",
3
- "format_version": 3,
4
- "type": "lora_adapter",
5
- "style": "genz-slang",
6
- "description": "LoRA adapter that changes the model style to speak in GenZ slang",
7
- "quantization": "ternary",
8
- "source_model": "microsoft/bitnet-b1.58-2B-4T-bf16",
9
- "base_model_dir": "BitNet",
10
- "architecture": "bitnet",
11
- "platforms": ["x86_64", "aarch64"],
12
- "base_model_config_hash": "46c46efcb3b6ca1e0e6ce2f822cdb6d211b165b4d4f6359b3581ced1616b01ba"
 
13
  }
 
1
  {
2
+ "trillim_version": "0.6.0",
3
+ "format_version": 4,
4
+ "type": "lora_adapter",
5
+ "quantization": "ternary",
6
+ "source_model": "microsoft/bitnet-b1.58-2B-4T-bf16",
7
+ "architecture": "bitnet",
8
+ "platforms": [
9
+ "x86_64",
10
+ "aarch64"
11
+ ],
12
+ "base_model_config_hash": "f70d9d651af0fcf2a7c89c2194160b1139bf77638d0d7bf7f730770984bc2623",
13
+ "remote_code": false
14
  }