YuPeng0214 commited on
Commit
8eac178
·
verified ·
1 Parent(s): 5af741b

Upload README_zh.md with huggingface_hub

Browse files
Files changed (1) hide show
  1. README_zh.md +6 -5
README_zh.md CHANGED
@@ -97,6 +97,7 @@ $$
97
  - Accelerate: 1.3.0
98
  - Datasets: 3.2.0
99
  - Tokenizers: 0.21.2
 
100
  #### 模型加载参数
101
  torch_dtype=torch.bfloat16<br>
102
  attn_implementation='sdpa'<br>
@@ -134,11 +135,11 @@ python3 ./run_mteb_all_v2.py \
134
  ```
135
  from sentence_transformers import SentenceTransformer
136
 
137
- model = SentenceTransformer("QZhou-Embedding")
138
 
139
  model = SentenceTransformer(
140
- "QZhou-Embedding",
141
- model_kwargs={"device_map": "auto", "trust_remote_code": True},
142
  tokenizer_kwargs={"padding_side": "left", "trust_remote_code": True},
143
  trust_remote_code=True
144
  )
@@ -196,8 +197,8 @@ documents = [
196
 
197
  input_texts = queries + documents
198
 
199
- tokenizer = AutoTokenizer.from_pretrained('QZhou-Embedding', padding_side='left', trust_remote_code=True)
200
- model = AutoModel.from_pretrained('QZhou-Embedding', trust_remote_code=True, device_map='auto')
201
 
202
  batch_dict = tokenizer(
203
  input_texts,
 
97
  - Accelerate: 1.3.0
98
  - Datasets: 3.2.0
99
  - Tokenizers: 0.21.2
100
+ - mteb: 1.38.30
101
  #### 模型加载参数
102
  torch_dtype=torch.bfloat16<br>
103
  attn_implementation='sdpa'<br>
 
135
  ```
136
  from sentence_transformers import SentenceTransformer
137
 
138
+ model = SentenceTransformer("Kingsoft-LLM/QZhou-Embedding")
139
 
140
  model = SentenceTransformer(
141
+ "Kingsoft-LLM/QZhou-Embedding",
142
+ model_kwargs={"device_map": "cuda", "trust_remote_code": True},
143
  tokenizer_kwargs={"padding_side": "left", "trust_remote_code": True},
144
  trust_remote_code=True
145
  )
 
197
 
198
  input_texts = queries + documents
199
 
200
+ tokenizer = AutoTokenizer.from_pretrained('/home/yupeng5/yupeng/output_models/output/publish/QZhou-Embedding', padding_side='left', trust_remote_code=True)
201
+ model = AutoModel.from_pretrained('/home/yupeng5/yupeng/output_models/output/publish/QZhou-Embedding', trust_remote_code=True, device_map='cuda')
202
 
203
  batch_dict = tokenizer(
204
  input_texts,