Kevin Ripper commited on
Commit ·
35bab69
1
Parent(s): dbc87cf
add tokenizer
Browse files- .gitattributes +1 -0
- merges.txt +0 -0
- special_tokens_map.json +6 -0
- tokenizer.json +3 -0
- tokenizer_config.json +11 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -29,3 +29,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 30 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 30 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": "<|startoftext|>",
|
| 3 |
+
"eos_token": "<|endoftext|>",
|
| 4 |
+
"pad_token": "<|pad|>",
|
| 5 |
+
"unk_token": "<|endoftext|>"
|
| 6 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:510e560c9624313899b0a23b5a1025c0e5d2ca744868b09210dddf1ef3d37e57
|
| 3 |
+
size 16394089
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"bos_token": "<|startoftext|>",
|
| 4 |
+
"eos_token": "<|endoftext|>",
|
| 5 |
+
"model_max_length": 1024,
|
| 6 |
+
"name_or_path": "/content/drive/MyDrive/Colab Notebooks/ChatbotProject/Review/Tokenizer/code-search-net-tokenizer",
|
| 7 |
+
"pad_token": "<|pad|>",
|
| 8 |
+
"special_tokens_map_file": null,
|
| 9 |
+
"tokenizer_class": "GPT2Tokenizer",
|
| 10 |
+
"unk_token": "<|endoftext|>"
|
| 11 |
+
}
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|