metadata
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- dense
- generated_from_trainer
- dataset_size:2392064
- loss:CachedMultipleNegativesSymmetricRankingLoss
base_model: Shuu12121/CodeModernBERT-Crow-v1.1
widget:
- source_sentence: |-
Encapsulates the work with test rules.
@param {array} aRules The test rules
@constructor
@private
sentences:
- |-
createImageResizer = (width, height) => (source) => {
const resized = new PNG({ width, height, fill: true });
PNG.bitblt(source, resized, 0, 0, source.width, source.height, 0, 0);
return resized;
}
- "TestRules = function (aRules) {\n\t\t\tthis._aRules = aRules;\n\t\t}"
- |-
function addEventTypeNameToConfig(_ref, isInteractive) {
var topEvent = _ref[0],
event = _ref[1];
var capitalizedEvent = event[0].toUpperCase() + event.slice(1);
var onEvent = 'on' + capitalizedEvent;
var type = {
phasedRegistrationNames: {
bubbled: onEvent,
captured: onEvent + 'Capture'
},
dependencies: [topEvent],
isInteractive: isInteractive
};
eventTypes$4[event] = type;
topLevelEventsToDispatchConfig[topEvent] = type;
}
- source_sentence: >-
Check if a value has one or more properties and that value is not
undefined.
@param {any} obj The value to check.
@returns {boolean} `true` if `obj` has one or more properties that value
is not undefined.
sentences:
- |-
calci = function(hashbuf, sig, pubkey) {
for (var i = 0; i < 4; i++) {
var Qprime;
try {
Qprime = getPublicKey(hashbuf, sig, i);
} catch (e) {
console.error(e);
continue;
}
if (Qprime.point.eq(pubkey.point)) {
sig.i = i;
sig.compressed = pubkey.compressed;
return sig;
}
}
throw new Error('Unable to find valid recovery factor');
}
- "function hasDefinedProperty(obj) {\n\tif (typeof obj === \"object\" && obj !== null) {\n\t\tfor (const key in obj) {\n\t\t\tif (typeof obj[key] !== \"undefined\") {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n}"
- >-
function joinSequenceDiffsByShifting(sequence1, sequence2,
sequenceDiffs) {
if (sequenceDiffs.length === 0) {
return sequenceDiffs;
}
const result = [];
result.push(sequenceDiffs[0]);
// First move them all to the left as much as possible and join them if possible
for (let i = 1; i < sequenceDiffs.length; i++) {
const prevResult = result[result.length - 1];
let cur = sequenceDiffs[i];
if (cur.seq1Range.isEmpty || cur.seq2Range.isEmpty) {
const length = cur.seq1Range.start - prevResult.seq1Range.endExclusive;
let d;
for (d = 1; d <= length; d++) {
if (sequence1.getElement(cur.seq1Range.start - d) !== sequence1.getElement(cur.seq1Range.endExclusive - d) ||
sequence2.getElement(cur.seq2Range.start - d) !== sequence2.getElement(cur.seq2Range.endExclusive - d)) {
break;
}
}
d--;
if (d === length) {
// Merge previous and current diff
result[result.length - 1] = new SequenceDiff(new OffsetRange(prevResult.seq1Range.start, cur.seq1Range.endExclusive - length), new OffsetRange(prevResult.seq2Range.start, cur.seq2Range.endExclusive - length));
continue;
}
cur = cur.delta(-d);
}
result.push(cur);
}
const result2 = [];
// Then move them all to the right and join them again if possible
for (let i = 0; i < result.length - 1; i++) {
const nextResult = result[i + 1];
let cur = result[i];
if (cur.seq1Range.isEmpty || cur.seq2Range.isEmpty) {
const length = nextResult.seq1Range.start - cur.seq1Range.endExclusive;
let d;
for (d = 0; d < length; d++) {
if (!sequence1.isStronglyEqual(cur.seq1Range.start + d, cur.seq1Range.endExclusive + d) ||
!sequence2.isStronglyEqual(cur.seq2Range.start + d, cur.seq2Range.endExclusive + d)) {
break;
}
}
if (d === length) {
// Merge previous and current diff, write to result!
result[i + 1] = new SequenceDiff(new OffsetRange(cur.seq1Range.start + length, nextResult.seq1Range.endExclusive), new OffsetRange(cur.seq2Range.start + length, nextResult.seq2Range.endExclusive));
continue;
}
if (d > 0) {
cur = cur.delta(d);
}
}
result2.push(cur);
}
if (result.length > 0) {
result2.push(result[result.length - 1]);
}
return result2;
}
- source_sentence: |-
Adds two vec2's after scaling the second operand by a scalar value
@param {vec2} out the receiving vector
@param {ReadonlyVec2} a the first operand
@param {ReadonlyVec2} b the second operand
@param {Number} scale the amount to scale b by before adding
@returns {vec2} out
sentences:
- |-
async forceStripeSubscriptionToProduct(data, options) {
if (!this._stripeAPIService.configured) {
throw new DataImportError({
message: tpl(messages.noStripeConnection, {action: 'force subscription to product'})
});
}
// Retrieve customer's existing subscription information
const stripeCustomer = await this._stripeAPIService.getCustomer(data.customer_id);
// Subscription can only be forced if the customer exists
if (!stripeCustomer) {
throw new DataImportError({message: tpl(messages.forceNoCustomer)});
}
// Subscription can only be forced if the customer has an existing subscription
if (stripeCustomer.subscriptions.data.length === 0) {
throw new DataImportError({message: tpl(messages.forceNoExistingSubscription)});
}
// Subscription can only be forced if the customer does not have multiple subscriptions
if (stripeCustomer.subscriptions.data.length > 1) {
throw new DataImportError({message: tpl(messages.forceTooManySubscriptions)});
}
const stripeSubscription = stripeCustomer.subscriptions.data[0];
// Subscription can only be forced if the existing subscription does not have multiple items
if (stripeSubscription.items.data.length > 1) {
throw new DataImportError({message: tpl(messages.forceTooManySubscriptionItems)});
}
const stripeSubscriptionItem = stripeSubscription.items.data[0];
const stripeSubscriptionItemPrice = stripeSubscriptionItem.price;
const stripeSubscriptionItemPriceCurrency = stripeSubscriptionItemPrice.currency;
const stripeSubscriptionItemPriceAmount = stripeSubscriptionItemPrice.unit_amount;
const stripeSubscriptionItemPriceType = stripeSubscriptionItemPrice.type;
const stripeSubscriptionItemPriceInterval = stripeSubscriptionItemPrice.recurring?.interval || null;
// Subscription can only be forced if the existing subscription has a recurring interval
if (!stripeSubscriptionItemPriceInterval) {
throw new DataImportError({message: tpl(messages.forceExistingSubscriptionNotRecurring)});
}
// Retrieve Ghost product
let ghostProduct = await this._productRepository.get(
{id: data.product_id},
{...options, withRelated: ['stripePrices', 'stripeProducts']}
);
if (!ghostProduct) {
throw new DataImportError({message: tpl(messages.productNotFound, {id: data.product_id})});
}
// If there is not a Stripe product associated with the Ghost product, ensure one is created before continuing
if (!ghostProduct.related('stripeProducts').first()) {
// Even though we are not updating any information on the product, calling `ProductRepository.update`
// will ensure that the product gets created in Stripe
ghostProduct = await this._productRepository.update({
id: data.product_id,
name: ghostProduct.get('name'),
// Providing the pricing details will ensure the relevant prices for the Ghost product are created
// on the Stripe product
monthly_price: {
amount: ghostProduct.get('monthly_price'),
currency: ghostProduct.get('currency')
},
yearly_price: {
amount: ghostProduct.get('yearly_price'),
currency: ghostProduct.get('currency')
}
}, options);
}
// Find price on Ghost product matching stripe subscription item price details
const ghostProductPrice = ghostProduct.related('stripePrices').find((price) => {
return price.get('currency') === stripeSubscriptionItemPriceCurrency &&
price.get('amount') === stripeSubscriptionItemPriceAmount &&
price.get('type') === stripeSubscriptionItemPriceType &&
price.get('interval') === stripeSubscriptionItemPriceInterval;
});
let stripePriceId;
let isNewStripePrice = false;
if (!ghostProductPrice) {
// If there is not a matching price, create one on the associated Stripe product using the existing
// subscription item price details and update the stripe subscription to use it
const stripeProduct = ghostProduct.related('stripeProducts').first();
const newStripePrice = await this._stripeAPIService.createPrice({
product: stripeProduct.get('stripe_product_id'),
active: true,
nickname: stripeSubscriptionItemPriceInterval === 'month' ? 'Monthly' : 'Yearly',
currency: stripeSubscriptionItemPriceCurrency,
amount: stripeSubscriptionItemPriceAmount,
type: stripeSubscriptionItemPriceType,
interval: stripeSubscriptionItemPriceInterval
});
await this._stripeAPIService.updateSubscriptionItemPrice(
stripeSubscription.id,
stripeSubscriptionItem.id,
newStripePrice.id,
{prorationBehavior: 'none'}
);
stripePriceId = newStripePrice.id;
isNewStripePrice = true;
} else {
// If there is a matching price, and the subscription is not already using it,
// update the subscription to use it
stripePriceId = ghostProductPrice.get('stripe_price_id');
if (stripeSubscriptionItem.price.id !== stripePriceId) {
await this._stripeAPIService.updateSubscriptionItemPrice(
stripeSubscription.id,
stripeSubscriptionItem.id,
stripePriceId,
{prorationBehavior: 'none'}
);
}
}
// If there is a matching price, and the subscription is already using it, nothing else needs to be done
return {
stripePriceId,
isNewStripePrice
};
}
- |-
getPrefetchedVariantTrack() {
if (!this.prefetchedVariant_) {
return null;
}
return shaka.util.StreamUtils.variantToTrack(this.prefetchedVariant_);
}
- |-
function scaleAndAdd(out, a, b, scale) {
out[0] = a[0] + b[0] * scale;
out[1] = a[1] + b[1] * scale;
return out;
}
- source_sentence: '@returns Has this player been spotted by any others?'
sentences:
- "function includes7d( x, value ) {\n\tvar xbuf;\n\tvar dx0;\n\tvar dx1;\n\tvar dx2;\n\tvar dx3;\n\tvar dx4;\n\tvar dx5;\n\tvar dx6;\n\tvar sh;\n\tvar S0;\n\tvar S1;\n\tvar S2;\n\tvar S3;\n\tvar S4;\n\tvar S5;\n\tvar S6;\n\tvar sx;\n\tvar ix;\n\tvar i0;\n\tvar i1;\n\tvar i2;\n\tvar i3;\n\tvar i4;\n\tvar i5;\n\tvar i6;\n\n\t// Note on variable naming convention: S#, dx#, dy#, i# where # corresponds to the loop number, with `0` being the innermost loop...\n\n\t// Extract loop variables for purposes of loop interchange: dimensions and loop offset (pointer) increments...\n\tsh = x.shape;\n\tsx = x.strides;\n\tif ( strides2order( sx ) === 1 ) {\n\t\t// For row-major ndarrays, the last dimensions have the fastest changing indices...\n\t\tS0 = sh[ 6 ];\n\t\tS1 = sh[ 5 ];\n\t\tS2 = sh[ 4 ];\n\t\tS3 = sh[ 3 ];\n\t\tS4 = sh[ 2 ];\n\t\tS5 = sh[ 1 ];\n\t\tS6 = sh[ 0 ];\n\t\tdx0 = sx[ 6 ]; // offset increment for innermost loop\n\t\tdx1 = sx[ 5 ] - ( S0*sx[6] );\n\t\tdx2 = sx[ 4 ] - ( S1*sx[5] );\n\t\tdx3 = sx[ 3 ] - ( S2*sx[4] );\n\t\tdx4 = sx[ 2 ] - ( S3*sx[3] );\n\t\tdx5 = sx[ 1 ] - ( S4*sx[2] );\n\t\tdx6 = sx[ 0 ] - ( S5*sx[1] ); // offset increment for outermost loop\n\t} else { // order === 'column-major'\n\t\t// For column-major ndarrays, the first dimensions have the fastest changing indices...\n\t\tS0 = sh[ 0 ];\n\t\tS1 = sh[ 1 ];\n\t\tS2 = sh[ 2 ];\n\t\tS3 = sh[ 3 ];\n\t\tS4 = sh[ 4 ];\n\t\tS5 = sh[ 5 ];\n\t\tS6 = sh[ 6 ];\n\t\tdx0 = sx[ 0 ]; // offset increment for innermost loop\n\t\tdx1 = sx[ 1 ] - ( S0*sx[0] );\n\t\tdx2 = sx[ 2 ] - ( S1*sx[1] );\n\t\tdx3 = sx[ 3 ] - ( S2*sx[2] );\n\t\tdx4 = sx[ 4 ] - ( S3*sx[3] );\n\t\tdx5 = sx[ 5 ] - ( S4*sx[4] );\n\t\tdx6 = sx[ 6 ] - ( S5*sx[5] ); // offset increment for outermost loop\n\t}\n\t// Set a pointer to the first indexed element:\n\tix = x.offset;\n\n\t// Cache a reference to the input ndarray buffer:\n\txbuf = x.data;\n\n\t// Iterate over the ndarray dimensions...\n\tfor ( i6 = 0; i6 < S6; i6++ ) {\n\t\tfor ( i5 = 0; i5 < S5; i5++ ) {\n\t\t\tfor ( i4 = 0; i4 < S4; i4++ ) {\n\t\t\t\tfor ( i3 = 0; i3 < S3; i3++ ) {\n\t\t\t\t\tfor ( i2 = 0; i2 < S2; i2++ ) {\n\t\t\t\t\t\tfor ( i1 = 0; i1 < S1; i1++ ) {\n\t\t\t\t\t\t\tfor ( i0 = 0; i0 < S0; i0++ ) {\n\t\t\t\t\t\t\t\tif ( xbuf[ ix ] === value ) {\n\t\t\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tix += dx0;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tix += dx1;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tix += dx2;\n\t\t\t\t\t}\n\t\t\t\t\tix += dx3;\n\t\t\t\t}\n\t\t\t\tix += dx4;\n\t\t\t}\n\t\t\tix += dx5;\n\t\t}\n\t\tix += dx6;\n\t}\n\treturn false;\n}"
- >-
_generateIntegrityFile(lockfile, patterns, flags, workspaceLayout,
artifacts) {
var _this3 = this;
return (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* () {
const result = (0, (_extends2 || _load_extends()).default)({}, INTEGRITY_FILE_DEFAULTS(), {
artifacts
});
result.topLevelPatterns = patterns;
// If using workspaces, we also need to add the workspaces patterns to the top-level, so that we'll know if a
// dependency is added or removed into one of them. We must take care not to read the aggregator (if !loc).
//
// Also note that we can't use of workspaceLayout.workspaces[].manifest._reference.patterns, because when
// doing a "yarn check", the _reference property hasn't yet been properly initialized.
if (workspaceLayout) {
result.topLevelPatterns = result.topLevelPatterns.filter(function (p) {
// $FlowFixMe
return !workspaceLayout.getManifestByPattern(p);
});
for (var _iterator4 = Object.keys(workspaceLayout.workspaces), _isArray4 = Array.isArray(_iterator4), _i4 = 0, _iterator4 = _isArray4 ? _iterator4 : _iterator4[Symbol.iterator]();;) {
var _ref5;
if (_isArray4) {
if (_i4 >= _iterator4.length) break;
_ref5 = _iterator4[_i4++];
} else {
_i4 = _iterator4.next();
if (_i4.done) break;
_ref5 = _i4.value;
}
const name = _ref5;
if (!workspaceLayout.workspaces[name].loc) {
continue;
}
const manifest = workspaceLayout.workspaces[name].manifest;
if (manifest) {
for (var _iterator5 = (_constants || _load_constants()).DEPENDENCY_TYPES, _isArray5 = Array.isArray(_iterator5), _i5 = 0, _iterator5 = _isArray5 ? _iterator5 : _iterator5[Symbol.iterator]();;) {
var _ref6;
if (_isArray5) {
if (_i5 >= _iterator5.length) break;
_ref6 = _iterator5[_i5++];
} else {
_i5 = _iterator5.next();
if (_i5.done) break;
_ref6 = _i5.value;
}
const dependencyType = _ref6;
const dependencies = manifest[dependencyType];
if (!dependencies) {
continue;
}
for (var _iterator6 = Object.keys(dependencies), _isArray6 = Array.isArray(_iterator6), _i6 = 0, _iterator6 = _isArray6 ? _iterator6 : _iterator6[Symbol.iterator]();;) {
var _ref7;
if (_isArray6) {
if (_i6 >= _iterator6.length) break;
_ref7 = _iterator6[_i6++];
} else {
_i6 = _iterator6.next();
if (_i6.done) break;
_ref7 = _i6.value;
}
const dep = _ref7;
result.topLevelPatterns.push(`${dep}@${dependencies[dep]}`);
}
}
}
}
}
result.topLevelPatterns.sort((_misc || _load_misc()).sortAlpha);
if (flags.checkFiles) {
result.flags.push('checkFiles');
}
if (flags.flat) {
result.flags.push('flat');
}
if (_this3.config.ignoreScripts) {
result.flags.push('ignoreScripts');
}
if (_this3.config.focus) {
result.flags.push('focus: ' + _this3.config.focusedWorkspaceName);
}
if (_this3.config.production) {
result.flags.push('production');
}
if (_this3.config.plugnplayEnabled) {
result.flags.push('plugnplay');
}
const linkedModules = _this3.config.linkedModules;
if (linkedModules.length) {
result.linkedModules = linkedModules.sort((_misc || _load_misc()).sortAlpha);
}
for (var _iterator7 = Object.keys(lockfile), _isArray7 = Array.isArray(_iterator7), _i7 = 0, _iterator7 = _isArray7 ? _iterator7 : _iterator7[Symbol.iterator]();;) {
var _ref8;
if (_isArray7) {
if (_i7 >= _iterator7.length) break;
_ref8 = _iterator7[_i7++];
} else {
_i7 = _iterator7.next();
if (_i7.done) break;
_ref8 = _i7.value;
}
const key = _ref8;
result.lockfileEntries[key] = lockfile[key].resolved || '';
}
for (var _iterator8 = _this3._getModulesFolders({ workspaceLayout }), _isArray8 = Array.isArray(_iterator8), _i8 = 0, _iterator8 = _isArray8 ? _iterator8 : _iterator8[Symbol.iterator]();;) {
var _ref9;
if (_isArray8) {
if (_i8 >= _iterator8.length) break;
_ref9 = _iterator8[_i8++];
} else {
_i8 = _iterator8.next();
if (_i8.done) break;
_ref9 = _i8.value;
}
const modulesFolder = _ref9;
if (yield (_fs || _load_fs()).exists(modulesFolder)) {
result.modulesFolders.push(path.relative(_this3.config.lockfileFolder, modulesFolder));
}
}
if (flags.checkFiles) {
const modulesRoot = _this3._getModulesRootFolder();
result.files = (yield _this3._getIntegrityListing({ workspaceLayout })).map(function (entry) {
return path.relative(modulesRoot, entry);
}).sort((_misc || _load_misc()).sortAlpha);
}
return result;
})();
}
- |-
get isSpotted() {
return this.getProp("DT_BaseEntity", "m_bSpotted");
}
- source_sentence: >-
The toggle content, if left empty it will render the default toggle (seen
above).
sentences:
- "update = () => {\n\n\t const timerId = window.requestAnimationFrame( update );\n\t const elapsed = performance.now() - timestamp;\n\t const progress = elapsed / duration;\n\t const opacity = 1.0 - progress > 0 ? 1.0 - progress : 0;\n\t const radius = progress * canvasWidth * 0.5 / dpr;\n\n\t context.clearRect( 0, 0, canvasWidth, canvasHeight );\n\t context.beginPath();\n\t context.arc( x, y, radius, 0, Math.PI * 2 );\n\t context.fillStyle = `rgba(${color.r * 255}, ${color.g * 255}, ${color.b * 255}, ${opacity})`;\n\t context.fill();\n\t context.closePath();\n\n\t if ( progress >= 1.0 ) {\n\n\t window.cancelAnimationFrame( timerId );\n\t this.updateCanvasArcByProgress( 0 );\n\n\t /**\n\t * Reticle ripple end event\n\t * @type {object}\n\t * @event Reticle#reticle-ripple-end\n\t */\n\t this.dispatchEvent( { type: 'reticle-ripple-end' } );\n\n\t }\n\n\t material.map.needsUpdate = true;\n\n\t }"
- |-
static _headersDictToHeadersArray(headersDict) {
const result = [];
for (const name of Object.keys(headersDict)) {
const values = headersDict[name].split('\n');
for (let i = 0; i < values.length; ++i) {
result.push({name: name, value: values[i]});
}
}
return result;
}
- "function NavbarToggle() {\n\t (0, _classCallCheck3['default'])(this, NavbarToggle);\n\t return (0, _possibleConstructorReturn3['default'])(this, _React$Component.apply(this, arguments));\n\t }"
pipeline_tag: sentence-similarity
library_name: sentence-transformers
SentenceTransformer based on Shuu12121/CodeModernBERT-Crow-v1.1
This is a sentence-transformers model finetuned from Shuu12121/CodeModernBERT-Crow-v1.1. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
Model Details
Model Description
- Model Type: Sentence Transformer
- Base model: Shuu12121/CodeModernBERT-Crow-v1.1
- Maximum Sequence Length: 1024 tokens
- Output Dimensionality: 768 dimensions
- Similarity Function: Cosine Similarity
Model Sources
- Documentation: Sentence Transformers Documentation
- Repository: Sentence Transformers on GitHub
- Hugging Face: Sentence Transformers on Hugging Face
Full Model Architecture
SentenceTransformer(
(0): Transformer({'max_seq_length': 1024, 'do_lower_case': False, 'architecture': 'ModernBertModel'})
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
)
Usage
Direct Usage (Sentence Transformers)
First install the Sentence Transformers library:
pip install -U sentence-transformers
Then you can load this model and run inference.
from sentence_transformers import SentenceTransformer
# Download from the 🤗 Hub
model = SentenceTransformer("sentence_transformers_model_id")
# Run inference
sentences = [
'The toggle content, if left empty it will render the default toggle (seen above).',
"function NavbarToggle() {\n\t (0, _classCallCheck3['default'])(this, NavbarToggle);\n\t return (0, _possibleConstructorReturn3['default'])(this, _React$Component.apply(this, arguments));\n\t }",
"update = () => {\n\n\t const timerId = window.requestAnimationFrame( update );\n\t const elapsed = performance.now() - timestamp;\n\t const progress = elapsed / duration;\n\t const opacity = 1.0 - progress > 0 ? 1.0 - progress : 0;\n\t const radius = progress * canvasWidth * 0.5 / dpr;\n\n\t context.clearRect( 0, 0, canvasWidth, canvasHeight );\n\t context.beginPath();\n\t context.arc( x, y, radius, 0, Math.PI * 2 );\n\t context.fillStyle = `rgba(${color.r * 255}, ${color.g * 255}, ${color.b * 255}, ${opacity})`;\n\t context.fill();\n\t context.closePath();\n\n\t if ( progress >= 1.0 ) {\n\n\t window.cancelAnimationFrame( timerId );\n\t this.updateCanvasArcByProgress( 0 );\n\n\t /**\n\t * Reticle ripple end event\n\t * @type {object}\n\t * @event Reticle#reticle-ripple-end\n\t */\n\t this.dispatchEvent( { type: 'reticle-ripple-end' } );\n\n\t }\n\n\t material.map.needsUpdate = true;\n\n\t }",
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]
# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities)
# tensor([[ 1.0000, 0.6778, -0.0447],
# [ 0.6778, 1.0000, 0.0303],
# [-0.0447, 0.0303, 1.0000]])
Training Details
Training Dataset
Unnamed Dataset
- Size: 2,392,064 training samples
- Columns:
sentence_0,sentence_1, andlabel - Approximate statistics based on the first 1000 samples:
sentence_0 sentence_1 label type string string float details - min: 8 tokens
- mean: 74.35 tokens
- max: 1024 tokens
- min: 11 tokens
- mean: 182.37 tokens
- max: 1024 tokens
- min: 1.0
- mean: 1.0
- max: 1.0
- Samples:
sentence_0 sentence_1 label Set the column title
@param column - column number (first column is: 0)
@param title - new column titlesetHeader = function(column, newValue) {
const obj = this;
if (obj.headers[column]) {
const oldValue = obj.headers[column].textContent;
const onchangeheaderOldValue = (obj.options.columns && obj.options.columns[column] && obj.options.columns[column].title) || '';
if (! newValue) {
newValue = getColumnName(column);
}
obj.headers[column].textContent = newValue;
// Keep the title property
obj.headers[column].setAttribute('title', newValue);
// Update title
if (!obj.options.columns) {
obj.options.columns = [];
}
if (!obj.options.columns[column]) {
obj.options.columns[column] = {};
}
obj.options.columns[column].title = newValue;
setHistory.call(obj, {
action: 'setHeader',
column: column,
oldValue: oldValue,
newValue: newValue
});
// On onchange header
dispatch.c...1.0Elsewhere this is known as a "Weak Value Map". Whereas a std JS WeakMap
is weak on its keys, this map is weak on its values. It does not retain these
values strongly. If a given value disappears, then the entries for it
disappear from every weak-value-map that holds it as a value.
Just as a WeakMap only allows gc-able values as keys, a weak-value-map
only allows gc-able values as values.
Unlike a WeakMap, a weak-value-map unavoidably exposes the non-determinism of
gc to its clients. Thus, both the ability to create one, as well as each
created one, must be treated as dangerous capabilities that must be closely
held. A program with access to these can read side channels though gc that do
not* rely on the ability to measure duration. This is a separate, and bad,
timing-independent side channel.
This non-determinism also enables code to escape deterministic replay. In a
blockchain context, this could cause validators to differ from each other,
preventing consensus, and thus preventing ...makeFinalizingMap = (finalizer, opts) => {
const { weakValues = false } = opts || {};
if (!weakValues || !WeakRef || !FinalizationRegistry) {
/** @type Map /
const keyToVal = new Map();
return Far('fakeFinalizingMap', {
clearWithoutFinalizing: keyToVal.clear.bind(keyToVal),
get: keyToVal.get.bind(keyToVal),
has: keyToVal.has.bind(keyToVal),
set: (key, val) => {
keyToVal.set(key, val);
},
delete: keyToVal.delete.bind(keyToVal),
getSize: () => keyToVal.size,
});
}
/* @type Map> */
const keyToRef = new Map();
const registry = new FinalizationRegistry(key => {
// Because this will delete the current binding ofkey, we need to
// be sure that it is not called because a previous binding was collected.
// We do this with theunregisterinsetbelow, assuming that
//unregisterimmediately suppresses the finalization of the thing
// it unregisters. TODO If this is...1.0Creates a function that memoizes the result offunc. Ifresolveris
provided, it determines the cache key for storing the result based on the
arguments provided to the memoized function. By default, the first argument
provided to the memoized function is used as the map cache key. Thefunc
is invoked with thethisbinding of the memoized function.
Note: The cache is exposed as thecacheproperty on the memoized
function. Its creation may be customized by replacing the_.memoize.Cache
constructor with one whose instances implement theMap
method interface ofdelete,get,has, andset.
@static
@memberOf _
@since 0.1.0
@category Function
@param {Function} func The function to have its output memoized.
@param {Function} [resolver] The function to resolve the cache key.
@returns {Function} Returns the new memoized function.
@example
var object = { 'a': 1, 'b': 2 };
var othe...function memoize(func, resolver) {
if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
throw new TypeError(FUNC_ERROR_TEXT);
}
var memoized = function() {
var args = arguments,
key = resolver ? resolver.apply(this, args) : args[0],
cache = memoized.cache;
if (cache.has(key)) {
return cache.get(key);
}
var result = func.apply(this, args);
memoized.cache = cache.set(key, result);
return result;
};
memoized.cache = new (memoize.Cache || MapCache);
return memoized;
}1.0 - Loss:
CachedMultipleNegativesSymmetricRankingLosswith these parameters:{ "scale": 20.0, "similarity_fct": "cos_sim", "mini_batch_size": 128, "gather_across_devices": false }
Training Hyperparameters
Non-Default Hyperparameters
per_device_train_batch_size: 2048per_device_eval_batch_size: 2048fp16: Truemulti_dataset_batch_sampler: round_robin
All Hyperparameters
Click to expand
overwrite_output_dir: Falsedo_predict: Falseeval_strategy: noprediction_loss_only: Trueper_device_train_batch_size: 2048per_device_eval_batch_size: 2048per_gpu_train_batch_size: Noneper_gpu_eval_batch_size: Nonegradient_accumulation_steps: 1eval_accumulation_steps: Nonetorch_empty_cache_steps: Nonelearning_rate: 5e-05weight_decay: 0.0adam_beta1: 0.9adam_beta2: 0.999adam_epsilon: 1e-08max_grad_norm: 1num_train_epochs: 3max_steps: -1lr_scheduler_type: linearlr_scheduler_kwargs: {}warmup_ratio: 0.0warmup_steps: 0log_level: passivelog_level_replica: warninglog_on_each_node: Truelogging_nan_inf_filter: Truesave_safetensors: Truesave_on_each_node: Falsesave_only_model: Falserestore_callback_states_from_checkpoint: Falseno_cuda: Falseuse_cpu: Falseuse_mps_device: Falseseed: 42data_seed: Nonejit_mode_eval: Falseuse_ipex: Falsebf16: Falsefp16: Truefp16_opt_level: O1half_precision_backend: autobf16_full_eval: Falsefp16_full_eval: Falsetf32: Nonelocal_rank: 0ddp_backend: Nonetpu_num_cores: Nonetpu_metrics_debug: Falsedebug: []dataloader_drop_last: Falsedataloader_num_workers: 0dataloader_prefetch_factor: Nonepast_index: -1disable_tqdm: Falseremove_unused_columns: Truelabel_names: Noneload_best_model_at_end: Falseignore_data_skip: Falsefsdp: []fsdp_min_num_params: 0fsdp_config: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}fsdp_transformer_layer_cls_to_wrap: Noneaccelerator_config: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}deepspeed: Nonelabel_smoothing_factor: 0.0optim: adamw_torchoptim_args: Noneadafactor: Falsegroup_by_length: Falselength_column_name: lengthddp_find_unused_parameters: Noneddp_bucket_cap_mb: Noneddp_broadcast_buffers: Falsedataloader_pin_memory: Truedataloader_persistent_workers: Falseskip_memory_metrics: Trueuse_legacy_prediction_loop: Falsepush_to_hub: Falseresume_from_checkpoint: Nonehub_model_id: Nonehub_strategy: every_savehub_private_repo: Nonehub_always_push: Falsehub_revision: Nonegradient_checkpointing: Falsegradient_checkpointing_kwargs: Noneinclude_inputs_for_metrics: Falseinclude_for_metrics: []eval_do_concat_batches: Truefp16_backend: autopush_to_hub_model_id: Nonepush_to_hub_organization: Nonemp_parameters:auto_find_batch_size: Falsefull_determinism: Falsetorchdynamo: Noneray_scope: lastddp_timeout: 1800torch_compile: Falsetorch_compile_backend: Nonetorch_compile_mode: Noneinclude_tokens_per_second: Falseinclude_num_input_tokens_seen: Falseneftune_noise_alpha: Noneoptim_target_modules: Nonebatch_eval_metrics: Falseeval_on_start: Falseuse_liger_kernel: Falseliger_kernel_config: Noneeval_use_gather_object: Falseaverage_tokens_across_devices: Falseprompts: Nonebatch_sampler: batch_samplermulti_dataset_batch_sampler: round_robinrouter_mapping: {}learning_rate_mapping: {}
Training Logs
| Epoch | Step | Training Loss |
|---|---|---|
| 0.4281 | 500 | 0.3784 |
| 0.8562 | 1000 | 0.1367 |
| 1.2842 | 1500 | 0.0707 |
| 1.7123 | 2000 | 0.0456 |
| 2.1404 | 2500 | 0.0344 |
| 2.5685 | 3000 | 0.0143 |
| 2.9966 | 3500 | 0.0136 |
Framework Versions
- Python: 3.10.12
- Sentence Transformers: 5.1.0
- Transformers: 4.55.3
- PyTorch: 2.7.0+cu128
- Accelerate: 1.7.0
- Datasets: 3.6.0
- Tokenizers: 0.21.4
Citation
BibTeX
Sentence Transformers
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084",
}