cam-1000 commited on
Commit
303b73e
·
verified ·
1 Parent(s): fa6b2e1

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -2
config.json CHANGED
@@ -17,14 +17,21 @@
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 28,
19
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
  "rope_theta": 1000000,
23
  "sliding_window": null,
24
  "tie_word_embeddings": true,
25
  "torch_dtype": "bfloat16",
26
- "transformers_version": "4.52.3",
27
- "use_cache": false,
28
  "use_sliding_window": false,
29
  "vocab_size": 151936
30
  }
 
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 28,
19
  "num_key_value_heads": 8,
20
+ "rag_config": {
21
+ "embedding_model_name": "sentence-transformers/all-mpnet-base-v2",
22
+ "retrieval_top_k": 5,
23
+ "index_type": "faiss",
24
+ "similarity_function": "cosine",
25
+ "max_input_length": 512
26
+ },
27
  "rms_norm_eps": 1e-06,
28
  "rope_scaling": null,
29
  "rope_theta": 1000000,
30
  "sliding_window": null,
31
  "tie_word_embeddings": true,
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.51.3",
34
+ "use_cache": true,
35
  "use_sliding_window": false,
36
  "vocab_size": 151936
37
  }