Jamie@TitanML commited on
Commit
4c0cfb5
·
1 Parent(s): 8553f31

Update ggml_cache.json

Browse files
Files changed (1) hide show
  1. ggml_cache.json +42 -6
ggml_cache.json CHANGED
@@ -5,15 +5,21 @@
5
  "model_type": "llama",
6
  "tokenizer": "llama-local"
7
  },
 
 
 
 
 
 
8
  "llama-2-7b-chat": {
9
  "repo": "TheBloke/Llama-2-7B-Chat-GGML",
10
  "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
11
  "model_type": "llama",
12
  "tokenizer": "llama-local"
13
  },
14
- "llama-2-7b-qlora": {
15
- "repo": "TheBloke/llama-2-7B-Guanaco-QLoRA-GGML",
16
- "filename": "llama-2-7b-guanaco-qlora.ggmlv3.q4_0.bin",
17
  "model_type": "llama",
18
  "tokenizer": "llama-local"
19
  },
@@ -23,27 +29,45 @@
23
  "model_type": "llama",
24
  "tokenizer": "llama-local"
25
  },
 
 
 
 
 
 
26
  "llama-2-13b-chat": {
27
  "repo": "TheBloke/Llama-2-13B-Chat-GGML",
28
  "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
29
  "model_type": "llama",
30
  "tokenizer": "llama-local"
31
  },
 
 
 
 
 
 
32
  "llama-2-70b": {
33
  "repo": "TheBloke/Llama-2-70B-GGML",
34
  "filename": "llama-2-70b.ggmlv3.q4_0.bin",
35
  "model_type": "llama",
36
  "tokenizer": "llama-local"
37
  },
 
 
 
 
 
 
38
  "llama-2-70b-chat": {
39
  "repo": "TheBloke/Llama-2-70B-Chat-GGML",
40
  "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
41
  "model_type": "llama",
42
  "tokenizer": "llama-local"
43
  },
44
- "llama-2-13b-qlora": {
45
- "repo": "TheBloke/llama-2-13B-Guanaco-QLoRA-GGML",
46
- "filename": "llama-2-13b-guanaco-qlora.ggmlv3.q4_0.bin",
47
  "model_type": "llama",
48
  "tokenizer": "llama-local"
49
  },
@@ -53,12 +77,24 @@
53
  "model_type": "falcon",
54
  "tokenizer": "tiiuae/falcon-7b"
55
  },
 
 
 
 
 
 
56
  "falcon-40b": {
57
  "repo": "TheBloke/falcon-40b-instruct-GGML",
58
  "filename": "falcon-40b-instruct.ggccv1.q4_0.bin",
59
  "model_type": "falcon",
60
  "tokenizer": "tiiuae/falcon-7b"
61
  },
 
 
 
 
 
 
62
  "gpt2": {
63
  "repo": "marella/gpt-2-ggml",
64
  "filename": "ggml-model.bin",
 
5
  "model_type": "llama",
6
  "tokenizer": "llama-local"
7
  },
8
+ "meta-llama/llama-2-7b": {
9
+ "repo": "TheBloke/Llama-2-7B-GGML",
10
+ "filename": "llama-2-7b.ggmlv3.q4_0.bin",
11
+ "model_type": "llama",
12
+ "tokenizer": "llama-local"
13
+ },
14
  "llama-2-7b-chat": {
15
  "repo": "TheBloke/Llama-2-7B-Chat-GGML",
16
  "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
17
  "model_type": "llama",
18
  "tokenizer": "llama-local"
19
  },
20
+ "meta-llama/llama-2-7b-chat": {
21
+ "repo": "TheBloke/Llama-2-7B-Chat-GGML",
22
+ "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
23
  "model_type": "llama",
24
  "tokenizer": "llama-local"
25
  },
 
29
  "model_type": "llama",
30
  "tokenizer": "llama-local"
31
  },
32
+ "meta-llama/llama-2-13b": {
33
+ "repo": "TheBloke/Llama-2-13B-GGML",
34
+ "filename": "llama-2-13b.ggmlv3.q4_0.bin",
35
+ "model_type": "llama",
36
+ "tokenizer": "llama-local"
37
+ },
38
  "llama-2-13b-chat": {
39
  "repo": "TheBloke/Llama-2-13B-Chat-GGML",
40
  "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
41
  "model_type": "llama",
42
  "tokenizer": "llama-local"
43
  },
44
+ "meta-llama/llama-2-13b-chat": {
45
+ "repo": "TheBloke/Llama-2-13B-Chat-GGML",
46
+ "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
47
+ "model_type": "llama",
48
+ "tokenizer": "llama-local"
49
+ },
50
  "llama-2-70b": {
51
  "repo": "TheBloke/Llama-2-70B-GGML",
52
  "filename": "llama-2-70b.ggmlv3.q4_0.bin",
53
  "model_type": "llama",
54
  "tokenizer": "llama-local"
55
  },
56
+ "meta-llama/llama-2-70b": {
57
+ "repo": "TheBloke/Llama-2-70B-GGML",
58
+ "filename": "llama-2-70b.ggmlv3.q4_0.bin",
59
+ "model_type": "llama",
60
+ "tokenizer": "llama-local"
61
+ },
62
  "llama-2-70b-chat": {
63
  "repo": "TheBloke/Llama-2-70B-Chat-GGML",
64
  "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
65
  "model_type": "llama",
66
  "tokenizer": "llama-local"
67
  },
68
+ "meta-llama/llama-2-70b-chat": {
69
+ "repo": "TheBloke/Llama-2-70B-Chat-GGML",
70
+ "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
71
  "model_type": "llama",
72
  "tokenizer": "llama-local"
73
  },
 
77
  "model_type": "falcon",
78
  "tokenizer": "tiiuae/falcon-7b"
79
  },
80
+ "tiiuae/falcon-7b": {
81
+ "repo": "TheBloke/falcon-7b-instruct-GGML",
82
+ "filename": "falcon-7b-instruct.ggccv1.q4_0.bin",
83
+ "model_type": "falcon",
84
+ "tokenizer": "tiiuae/falcon-7b"
85
+ },
86
  "falcon-40b": {
87
  "repo": "TheBloke/falcon-40b-instruct-GGML",
88
  "filename": "falcon-40b-instruct.ggccv1.q4_0.bin",
89
  "model_type": "falcon",
90
  "tokenizer": "tiiuae/falcon-7b"
91
  },
92
+ "ttiuae/falcon-40b": {
93
+ "repo": "TheBloke/falcon-40b-instruct-GGML",
94
+ "filename": "falcon-40b-instruct.ggccv1.q4_0.bin",
95
+ "model_type": "falcon",
96
+ "tokenizer": "tiiuae/falcon-7b"
97
+ },
98
  "gpt2": {
99
  "repo": "marella/gpt-2-ggml",
100
  "filename": "ggml-model.bin",