nellopan commited on
Commit
5a5a9b5
·
verified ·
1 Parent(s): c04fe16

Delete models--openai--clip-vit-large-patch14

Browse files
Files changed (18) hide show
  1. models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/adapter_config.json +0 -0
  2. models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/added_tokens.json +0 -0
  3. models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/processor_config.json +0 -0
  4. models--openai--clip-vit-large-patch14/blobs/2c19f6666e0e163c7954df66cb901353fcad088e +0 -171
  5. models--openai--clip-vit-large-patch14/blobs/4297ea6a8d2bae1fea8f48b45e257814dcb11f69 +0 -0
  6. models--openai--clip-vit-large-patch14/blobs/580c79c6862f31d1f9bd08dd1a415ba0d0502cd9 +0 -0
  7. models--openai--clip-vit-large-patch14/blobs/5a12a1eb250987a4eee0e3e7d7338c4b22724be1 +0 -19
  8. models--openai--clip-vit-large-patch14/blobs/702bb12920b291cade3706cf215c1604d2255d93 +0 -34
  9. models--openai--clip-vit-large-patch14/blobs/76e821f1b6f0a9709293c3b6b51ed90980b3166b +0 -0
  10. models--openai--clip-vit-large-patch14/blobs/9bfb42aa97dcd61e89f279ccaee988bccb4fabae +0 -1
  11. models--openai--clip-vit-large-patch14/refs/main +0 -1
  12. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/config.json +0 -171
  13. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/merges.txt +0 -0
  14. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/preprocessor_config.json +0 -19
  15. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/special_tokens_map.json +0 -1
  16. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/tokenizer.json +0 -0
  17. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/tokenizer_config.json +0 -34
  18. models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/vocab.json +0 -0
models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/adapter_config.json DELETED
File without changes
models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/added_tokens.json DELETED
File without changes
models--openai--clip-vit-large-patch14/.no_exist/32bd64288804d66eefd0ccbe215aa642df71cc41/processor_config.json DELETED
File without changes
models--openai--clip-vit-large-patch14/blobs/2c19f6666e0e163c7954df66cb901353fcad088e DELETED
@@ -1,171 +0,0 @@
1
- {
2
- "_name_or_path": "clip-vit-large-patch14/",
3
- "architectures": [
4
- "CLIPModel"
5
- ],
6
- "initializer_factor": 1.0,
7
- "logit_scale_init_value": 2.6592,
8
- "model_type": "clip",
9
- "projection_dim": 768,
10
- "text_config": {
11
- "_name_or_path": "",
12
- "add_cross_attention": false,
13
- "architectures": null,
14
- "attention_dropout": 0.0,
15
- "bad_words_ids": null,
16
- "bos_token_id": 0,
17
- "chunk_size_feed_forward": 0,
18
- "cross_attention_hidden_size": null,
19
- "decoder_start_token_id": null,
20
- "diversity_penalty": 0.0,
21
- "do_sample": false,
22
- "dropout": 0.0,
23
- "early_stopping": false,
24
- "encoder_no_repeat_ngram_size": 0,
25
- "eos_token_id": 2,
26
- "finetuning_task": null,
27
- "forced_bos_token_id": null,
28
- "forced_eos_token_id": null,
29
- "hidden_act": "quick_gelu",
30
- "hidden_size": 768,
31
- "id2label": {
32
- "0": "LABEL_0",
33
- "1": "LABEL_1"
34
- },
35
- "initializer_factor": 1.0,
36
- "initializer_range": 0.02,
37
- "intermediate_size": 3072,
38
- "is_decoder": false,
39
- "is_encoder_decoder": false,
40
- "label2id": {
41
- "LABEL_0": 0,
42
- "LABEL_1": 1
43
- },
44
- "layer_norm_eps": 1e-05,
45
- "length_penalty": 1.0,
46
- "max_length": 20,
47
- "max_position_embeddings": 77,
48
- "min_length": 0,
49
- "model_type": "clip_text_model",
50
- "no_repeat_ngram_size": 0,
51
- "num_attention_heads": 12,
52
- "num_beam_groups": 1,
53
- "num_beams": 1,
54
- "num_hidden_layers": 12,
55
- "num_return_sequences": 1,
56
- "output_attentions": false,
57
- "output_hidden_states": false,
58
- "output_scores": false,
59
- "pad_token_id": 1,
60
- "prefix": null,
61
- "problem_type": null,
62
- "projection_dim" : 768,
63
- "pruned_heads": {},
64
- "remove_invalid_values": false,
65
- "repetition_penalty": 1.0,
66
- "return_dict": true,
67
- "return_dict_in_generate": false,
68
- "sep_token_id": null,
69
- "task_specific_params": null,
70
- "temperature": 1.0,
71
- "tie_encoder_decoder": false,
72
- "tie_word_embeddings": true,
73
- "tokenizer_class": null,
74
- "top_k": 50,
75
- "top_p": 1.0,
76
- "torch_dtype": null,
77
- "torchscript": false,
78
- "transformers_version": "4.16.0.dev0",
79
- "use_bfloat16": false,
80
- "vocab_size": 49408
81
- },
82
- "text_config_dict": {
83
- "hidden_size": 768,
84
- "intermediate_size": 3072,
85
- "num_attention_heads": 12,
86
- "num_hidden_layers": 12,
87
- "projection_dim": 768
88
- },
89
- "torch_dtype": "float32",
90
- "transformers_version": null,
91
- "vision_config": {
92
- "_name_or_path": "",
93
- "add_cross_attention": false,
94
- "architectures": null,
95
- "attention_dropout": 0.0,
96
- "bad_words_ids": null,
97
- "bos_token_id": null,
98
- "chunk_size_feed_forward": 0,
99
- "cross_attention_hidden_size": null,
100
- "decoder_start_token_id": null,
101
- "diversity_penalty": 0.0,
102
- "do_sample": false,
103
- "dropout": 0.0,
104
- "early_stopping": false,
105
- "encoder_no_repeat_ngram_size": 0,
106
- "eos_token_id": null,
107
- "finetuning_task": null,
108
- "forced_bos_token_id": null,
109
- "forced_eos_token_id": null,
110
- "hidden_act": "quick_gelu",
111
- "hidden_size": 1024,
112
- "id2label": {
113
- "0": "LABEL_0",
114
- "1": "LABEL_1"
115
- },
116
- "image_size": 224,
117
- "initializer_factor": 1.0,
118
- "initializer_range": 0.02,
119
- "intermediate_size": 4096,
120
- "is_decoder": false,
121
- "is_encoder_decoder": false,
122
- "label2id": {
123
- "LABEL_0": 0,
124
- "LABEL_1": 1
125
- },
126
- "layer_norm_eps": 1e-05,
127
- "length_penalty": 1.0,
128
- "max_length": 20,
129
- "min_length": 0,
130
- "model_type": "clip_vision_model",
131
- "no_repeat_ngram_size": 0,
132
- "num_attention_heads": 16,
133
- "num_beam_groups": 1,
134
- "num_beams": 1,
135
- "num_hidden_layers": 24,
136
- "num_return_sequences": 1,
137
- "output_attentions": false,
138
- "output_hidden_states": false,
139
- "output_scores": false,
140
- "pad_token_id": null,
141
- "patch_size": 14,
142
- "prefix": null,
143
- "problem_type": null,
144
- "projection_dim" : 768,
145
- "pruned_heads": {},
146
- "remove_invalid_values": false,
147
- "repetition_penalty": 1.0,
148
- "return_dict": true,
149
- "return_dict_in_generate": false,
150
- "sep_token_id": null,
151
- "task_specific_params": null,
152
- "temperature": 1.0,
153
- "tie_encoder_decoder": false,
154
- "tie_word_embeddings": true,
155
- "tokenizer_class": null,
156
- "top_k": 50,
157
- "top_p": 1.0,
158
- "torch_dtype": null,
159
- "torchscript": false,
160
- "transformers_version": "4.16.0.dev0",
161
- "use_bfloat16": false
162
- },
163
- "vision_config_dict": {
164
- "hidden_size": 1024,
165
- "intermediate_size": 4096,
166
- "num_attention_heads": 16,
167
- "num_hidden_layers": 24,
168
- "patch_size": 14,
169
- "projection_dim": 768
170
- }
171
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/blobs/4297ea6a8d2bae1fea8f48b45e257814dcb11f69 DELETED
The diff for this file is too large to render. See raw diff
 
models--openai--clip-vit-large-patch14/blobs/580c79c6862f31d1f9bd08dd1a415ba0d0502cd9 DELETED
The diff for this file is too large to render. See raw diff
 
models--openai--clip-vit-large-patch14/blobs/5a12a1eb250987a4eee0e3e7d7338c4b22724be1 DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "crop_size": 224,
3
- "do_center_crop": true,
4
- "do_normalize": true,
5
- "do_resize": true,
6
- "feature_extractor_type": "CLIPFeatureExtractor",
7
- "image_mean": [
8
- 0.48145466,
9
- 0.4578275,
10
- 0.40821073
11
- ],
12
- "image_std": [
13
- 0.26862954,
14
- 0.26130258,
15
- 0.27577711
16
- ],
17
- "resample": 3,
18
- "size": 224
19
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/blobs/702bb12920b291cade3706cf215c1604d2255d93 DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "unk_token": {
3
- "content": "<|endoftext|>",
4
- "single_word": false,
5
- "lstrip": false,
6
- "rstrip": false,
7
- "normalized": true,
8
- "__type": "AddedToken"
9
- },
10
- "bos_token": {
11
- "content": "<|startoftext|>",
12
- "single_word": false,
13
- "lstrip": false,
14
- "rstrip": false,
15
- "normalized": true,
16
- "__type": "AddedToken"
17
- },
18
- "eos_token": {
19
- "content": "<|endoftext|>",
20
- "single_word": false,
21
- "lstrip": false,
22
- "rstrip": false,
23
- "normalized": true,
24
- "__type": "AddedToken"
25
- },
26
- "pad_token": "<|endoftext|>",
27
- "add_prefix_space": false,
28
- "errors": "replace",
29
- "do_lower_case": true,
30
- "name_or_path": "openai/clip-vit-base-patch32",
31
- "model_max_length": 77,
32
- "special_tokens_map_file": "./special_tokens_map.json",
33
- "tokenizer_class": "CLIPTokenizer"
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/blobs/76e821f1b6f0a9709293c3b6b51ed90980b3166b DELETED
The diff for this file is too large to render. See raw diff
 
models--openai--clip-vit-large-patch14/blobs/9bfb42aa97dcd61e89f279ccaee988bccb4fabae DELETED
@@ -1 +0,0 @@
1
- {"bos_token": {"content": "<|startoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": "<|endoftext|>"}
 
 
models--openai--clip-vit-large-patch14/refs/main DELETED
@@ -1 +0,0 @@
1
- 32bd64288804d66eefd0ccbe215aa642df71cc41
 
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/config.json DELETED
@@ -1,171 +0,0 @@
1
- {
2
- "_name_or_path": "clip-vit-large-patch14/",
3
- "architectures": [
4
- "CLIPModel"
5
- ],
6
- "initializer_factor": 1.0,
7
- "logit_scale_init_value": 2.6592,
8
- "model_type": "clip",
9
- "projection_dim": 768,
10
- "text_config": {
11
- "_name_or_path": "",
12
- "add_cross_attention": false,
13
- "architectures": null,
14
- "attention_dropout": 0.0,
15
- "bad_words_ids": null,
16
- "bos_token_id": 0,
17
- "chunk_size_feed_forward": 0,
18
- "cross_attention_hidden_size": null,
19
- "decoder_start_token_id": null,
20
- "diversity_penalty": 0.0,
21
- "do_sample": false,
22
- "dropout": 0.0,
23
- "early_stopping": false,
24
- "encoder_no_repeat_ngram_size": 0,
25
- "eos_token_id": 2,
26
- "finetuning_task": null,
27
- "forced_bos_token_id": null,
28
- "forced_eos_token_id": null,
29
- "hidden_act": "quick_gelu",
30
- "hidden_size": 768,
31
- "id2label": {
32
- "0": "LABEL_0",
33
- "1": "LABEL_1"
34
- },
35
- "initializer_factor": 1.0,
36
- "initializer_range": 0.02,
37
- "intermediate_size": 3072,
38
- "is_decoder": false,
39
- "is_encoder_decoder": false,
40
- "label2id": {
41
- "LABEL_0": 0,
42
- "LABEL_1": 1
43
- },
44
- "layer_norm_eps": 1e-05,
45
- "length_penalty": 1.0,
46
- "max_length": 20,
47
- "max_position_embeddings": 77,
48
- "min_length": 0,
49
- "model_type": "clip_text_model",
50
- "no_repeat_ngram_size": 0,
51
- "num_attention_heads": 12,
52
- "num_beam_groups": 1,
53
- "num_beams": 1,
54
- "num_hidden_layers": 12,
55
- "num_return_sequences": 1,
56
- "output_attentions": false,
57
- "output_hidden_states": false,
58
- "output_scores": false,
59
- "pad_token_id": 1,
60
- "prefix": null,
61
- "problem_type": null,
62
- "projection_dim" : 768,
63
- "pruned_heads": {},
64
- "remove_invalid_values": false,
65
- "repetition_penalty": 1.0,
66
- "return_dict": true,
67
- "return_dict_in_generate": false,
68
- "sep_token_id": null,
69
- "task_specific_params": null,
70
- "temperature": 1.0,
71
- "tie_encoder_decoder": false,
72
- "tie_word_embeddings": true,
73
- "tokenizer_class": null,
74
- "top_k": 50,
75
- "top_p": 1.0,
76
- "torch_dtype": null,
77
- "torchscript": false,
78
- "transformers_version": "4.16.0.dev0",
79
- "use_bfloat16": false,
80
- "vocab_size": 49408
81
- },
82
- "text_config_dict": {
83
- "hidden_size": 768,
84
- "intermediate_size": 3072,
85
- "num_attention_heads": 12,
86
- "num_hidden_layers": 12,
87
- "projection_dim": 768
88
- },
89
- "torch_dtype": "float32",
90
- "transformers_version": null,
91
- "vision_config": {
92
- "_name_or_path": "",
93
- "add_cross_attention": false,
94
- "architectures": null,
95
- "attention_dropout": 0.0,
96
- "bad_words_ids": null,
97
- "bos_token_id": null,
98
- "chunk_size_feed_forward": 0,
99
- "cross_attention_hidden_size": null,
100
- "decoder_start_token_id": null,
101
- "diversity_penalty": 0.0,
102
- "do_sample": false,
103
- "dropout": 0.0,
104
- "early_stopping": false,
105
- "encoder_no_repeat_ngram_size": 0,
106
- "eos_token_id": null,
107
- "finetuning_task": null,
108
- "forced_bos_token_id": null,
109
- "forced_eos_token_id": null,
110
- "hidden_act": "quick_gelu",
111
- "hidden_size": 1024,
112
- "id2label": {
113
- "0": "LABEL_0",
114
- "1": "LABEL_1"
115
- },
116
- "image_size": 224,
117
- "initializer_factor": 1.0,
118
- "initializer_range": 0.02,
119
- "intermediate_size": 4096,
120
- "is_decoder": false,
121
- "is_encoder_decoder": false,
122
- "label2id": {
123
- "LABEL_0": 0,
124
- "LABEL_1": 1
125
- },
126
- "layer_norm_eps": 1e-05,
127
- "length_penalty": 1.0,
128
- "max_length": 20,
129
- "min_length": 0,
130
- "model_type": "clip_vision_model",
131
- "no_repeat_ngram_size": 0,
132
- "num_attention_heads": 16,
133
- "num_beam_groups": 1,
134
- "num_beams": 1,
135
- "num_hidden_layers": 24,
136
- "num_return_sequences": 1,
137
- "output_attentions": false,
138
- "output_hidden_states": false,
139
- "output_scores": false,
140
- "pad_token_id": null,
141
- "patch_size": 14,
142
- "prefix": null,
143
- "problem_type": null,
144
- "projection_dim" : 768,
145
- "pruned_heads": {},
146
- "remove_invalid_values": false,
147
- "repetition_penalty": 1.0,
148
- "return_dict": true,
149
- "return_dict_in_generate": false,
150
- "sep_token_id": null,
151
- "task_specific_params": null,
152
- "temperature": 1.0,
153
- "tie_encoder_decoder": false,
154
- "tie_word_embeddings": true,
155
- "tokenizer_class": null,
156
- "top_k": 50,
157
- "top_p": 1.0,
158
- "torch_dtype": null,
159
- "torchscript": false,
160
- "transformers_version": "4.16.0.dev0",
161
- "use_bfloat16": false
162
- },
163
- "vision_config_dict": {
164
- "hidden_size": 1024,
165
- "intermediate_size": 4096,
166
- "num_attention_heads": 16,
167
- "num_hidden_layers": 24,
168
- "patch_size": 14,
169
- "projection_dim": 768
170
- }
171
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/preprocessor_config.json DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "crop_size": 224,
3
- "do_center_crop": true,
4
- "do_normalize": true,
5
- "do_resize": true,
6
- "feature_extractor_type": "CLIPFeatureExtractor",
7
- "image_mean": [
8
- 0.48145466,
9
- 0.4578275,
10
- 0.40821073
11
- ],
12
- "image_std": [
13
- 0.26862954,
14
- 0.26130258,
15
- 0.27577711
16
- ],
17
- "resample": 3,
18
- "size": 224
19
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": {"content": "<|startoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": "<|endoftext|>"}
 
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/tokenizer_config.json DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "unk_token": {
3
- "content": "<|endoftext|>",
4
- "single_word": false,
5
- "lstrip": false,
6
- "rstrip": false,
7
- "normalized": true,
8
- "__type": "AddedToken"
9
- },
10
- "bos_token": {
11
- "content": "<|startoftext|>",
12
- "single_word": false,
13
- "lstrip": false,
14
- "rstrip": false,
15
- "normalized": true,
16
- "__type": "AddedToken"
17
- },
18
- "eos_token": {
19
- "content": "<|endoftext|>",
20
- "single_word": false,
21
- "lstrip": false,
22
- "rstrip": false,
23
- "normalized": true,
24
- "__type": "AddedToken"
25
- },
26
- "pad_token": "<|endoftext|>",
27
- "add_prefix_space": false,
28
- "errors": "replace",
29
- "do_lower_case": true,
30
- "name_or_path": "openai/clip-vit-base-patch32",
31
- "model_max_length": 77,
32
- "special_tokens_map_file": "./special_tokens_map.json",
33
- "tokenizer_class": "CLIPTokenizer"
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models--openai--clip-vit-large-patch14/snapshots/32bd64288804d66eefd0ccbe215aa642df71cc41/vocab.json DELETED
The diff for this file is too large to render. See raw diff