Upload 2 files
Browse files- .gitattributes +1 -0
- combo-ner-sa-3b.gguf +3 -0
- config.json +31 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
combo-ner-sa-3b.gguf filter=lfs diff=lfs merge=lfs -text
|
combo-ner-sa-3b.gguf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6d7e4caf623710a1ed5227dcc5e21bb9063f5907376634370edbb9c58ce7de35
|
| 3 |
+
size 1708595168
|
config.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"StableLMEpochForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"auto_map": {
|
| 6 |
+
"AutoConfig": "configuration_stablelm_epoch.StableLMEpochConfig",
|
| 7 |
+
"AutoModelForCausalLM": "modeling_stablelm_epoch.StableLMEpochForCausalLM"
|
| 8 |
+
},
|
| 9 |
+
"_name_or_path": "llmware/bling-stable-lm-3b-4e1t-v0",
|
| 10 |
+
"bos_token_id": 0,
|
| 11 |
+
"eos_token_id": 0,
|
| 12 |
+
"hidden_act": "silu",
|
| 13 |
+
"hidden_size": 2560,
|
| 14 |
+
"initializer_range": 0.02,
|
| 15 |
+
"intermediate_size": 6912,
|
| 16 |
+
"max_position_embeddings": 4096,
|
| 17 |
+
"model_type": "stablelm_epoch",
|
| 18 |
+
"norm_eps": 1e-05,
|
| 19 |
+
"num_attention_heads": 32,
|
| 20 |
+
"num_heads": 32,
|
| 21 |
+
"num_hidden_layers": 32,
|
| 22 |
+
"num_key_value_heads": 32,
|
| 23 |
+
"rope_pct": 0.25,
|
| 24 |
+
"rope_theta": 10000,
|
| 25 |
+
"rotary_scaling_factor": 1.0,
|
| 26 |
+
"tie_word_embeddings": false,
|
| 27 |
+
"torch_dtype": "bfloat16",
|
| 28 |
+
"transformers_version": "4.33.2",
|
| 29 |
+
"use_cache": true,
|
| 30 |
+
"vocab_size": 50304
|
| 31 |
+
}
|