Update README.md
Browse files
README.md
CHANGED
|
@@ -7,6 +7,8 @@
|
|
| 7 |
|
| 8 |
# Introduction
|
| 9 |
|
|
|
|
|
|
|
| 10 |
## C2LLM: Advanced Code Embeddings for Deep Semantic Understanding
|
| 11 |
|
| 12 |
**C2LLMs (Code Contrastive Large Language Model)** is a powerful new model for generating code embeddings, designed to capture the deep semantics of source code.
|
|
@@ -34,6 +36,9 @@ model_path = "codefuse-ai/C2LLM-0.5B"
|
|
| 34 |
# Load the model
|
| 35 |
model = AutoModel.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True)
|
| 36 |
|
|
|
|
|
|
|
|
|
|
| 37 |
# Prepare the data
|
| 38 |
sentences = ['''int r = (int) params >> 8 & 0xff;
|
| 39 |
int p = (int) params & 0xff;
|
|
@@ -61,6 +66,8 @@ return new RangeInfo(inclusive ? tempTo : tempTo + 1, tempFrom + 1, true);
|
|
| 61 |
return new RangeInfo(tempFrom, inclusive ? tempTo + 1 : tempTo, false);
|
| 62 |
}''']
|
| 63 |
|
|
|
|
|
|
|
| 64 |
# Get the embeddings
|
| 65 |
embeddings = model.encode(sentences)
|
| 66 |
```
|
|
@@ -71,7 +78,10 @@ embeddings = model.encode(sentences)
|
|
| 71 |
from sentence_transformers import SentenceTransformer
|
| 72 |
|
| 73 |
# Load the model
|
| 74 |
-
model = SentenceTransformer("codefuse-ai/C2LLM-0.5B", trust_remote_code=True)
|
|
|
|
|
|
|
|
|
|
| 75 |
|
| 76 |
# Prepare the data
|
| 77 |
sentences = ['''int r = (int) params >> 8 & 0xff;
|
|
@@ -100,6 +110,8 @@ return new RangeInfo(inclusive ? tempTo : tempTo + 1, tempFrom + 1, true);
|
|
| 100 |
return new RangeInfo(tempFrom, inclusive ? tempTo + 1 : tempTo, false);
|
| 101 |
}''']
|
| 102 |
|
|
|
|
|
|
|
| 103 |
# Get the embeddings
|
| 104 |
embeddings = model.encode(sentences)
|
| 105 |
```
|
|
@@ -132,7 +144,6 @@ If you find this project helpful, please give it a star. It means a lot to us!
|
|
| 132 |
|
| 133 |
[](https://github.com/codefuse-ai/CodeFuse-Embeddings/tree/main)
|
| 134 |
|
| 135 |
-
|
| 136 |
## Correspondence to
|
| 137 |
|
| 138 |
Jin Qin (qj431428@antgroup.com), Zihan Liao (liaozihan.lzh@antgroup.com), Ziyin Zhang (zhangziying.zzy@antgroup.com), Hang Yu (hyu.hugo@antgroup.com), Peng Di (dipeng.dp@antgroup.com)
|
|
|
|
| 7 |
|
| 8 |
# Introduction
|
| 9 |
|
| 10 |
+
|
| 11 |
+
|
| 12 |
## C2LLM: Advanced Code Embeddings for Deep Semantic Understanding
|
| 13 |
|
| 14 |
**C2LLMs (Code Contrastive Large Language Model)** is a powerful new model for generating code embeddings, designed to capture the deep semantics of source code.
|
|
|
|
| 36 |
# Load the model
|
| 37 |
model = AutoModel.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True)
|
| 38 |
|
| 39 |
+
# Prepare your custom instruction
|
| 40 |
+
instruction = "xxxxx"
|
| 41 |
+
|
| 42 |
# Prepare the data
|
| 43 |
sentences = ['''int r = (int) params >> 8 & 0xff;
|
| 44 |
int p = (int) params & 0xff;
|
|
|
|
| 66 |
return new RangeInfo(tempFrom, inclusive ? tempTo + 1 : tempTo, false);
|
| 67 |
}''']
|
| 68 |
|
| 69 |
+
sentences = [instruction+sentence for sentence in sentences]
|
| 70 |
+
|
| 71 |
# Get the embeddings
|
| 72 |
embeddings = model.encode(sentences)
|
| 73 |
```
|
|
|
|
| 78 |
from sentence_transformers import SentenceTransformer
|
| 79 |
|
| 80 |
# Load the model
|
| 81 |
+
model = SentenceTransformer("codefuse-ai/C2LLM-0.5B", trust_remote_code=True, tokenizer_kwargs={"padding_side":"left"})
|
| 82 |
+
|
| 83 |
+
# Prepare your custom instruction
|
| 84 |
+
instruction = "xxxxx"
|
| 85 |
|
| 86 |
# Prepare the data
|
| 87 |
sentences = ['''int r = (int) params >> 8 & 0xff;
|
|
|
|
| 110 |
return new RangeInfo(tempFrom, inclusive ? tempTo + 1 : tempTo, false);
|
| 111 |
}''']
|
| 112 |
|
| 113 |
+
sentences = [instruction+sentence for sentence in sentences]
|
| 114 |
+
|
| 115 |
# Get the embeddings
|
| 116 |
embeddings = model.encode(sentences)
|
| 117 |
```
|
|
|
|
| 144 |
|
| 145 |
[](https://github.com/codefuse-ai/CodeFuse-Embeddings/tree/main)
|
| 146 |
|
|
|
|
| 147 |
## Correspondence to
|
| 148 |
|
| 149 |
Jin Qin (qj431428@antgroup.com), Zihan Liao (liaozihan.lzh@antgroup.com), Ziyin Zhang (zhangziying.zzy@antgroup.com), Hang Yu (hyu.hugo@antgroup.com), Peng Di (dipeng.dp@antgroup.com)
|