function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_get_lang(self):
self.get_lang(self.map1) | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_get_404(self):
self.get_404() | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_get_info(self):
body, locale = self.get_info(self.map1, 'en')
self.assertEqual(locale.get('lang'), 'en') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_post_error(self):
body = self.post_error({}, user='moderator')
errors = body.get('errors')
self.assertEqual(len(errors), 2)
self.assertCorniceRequired(errors[0], 'locales')
self.assertCorniceRequired(errors[1], 'geometry') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_post_non_whitelisted_attribute(self):
body = {
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'protected': True,
'geometry': {
'id': 5678, 'version': 6789,
'geom_detail': '{"type":"Polygon","coordinates":[[[668519.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668519.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy'}
]
}
self.post_non_whitelisted_attribute(body, user='moderator') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_post_success(self):
body = {
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'geometry': {
'id': 5678, 'version': 6789,
'geom_detail': '{"type":"Polygon","coordinates":[[[668518.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668518.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy'}
]
}
body, doc = self.post_success(body, user='moderator')
self.assertIsNotNone(body['geometry'].get('geom_detail'))
version = doc.versions[0]
archive_map = version.document_archive
self.assertEqual(archive_map.editor, 'IGN')
self.assertEqual(archive_map.scale, '25000')
self.assertEqual(archive_map.code, '3432OT')
archive_locale = version.document_locales_archive
self.assertEqual(archive_locale.lang, 'en')
self.assertEqual(archive_locale.title, 'Lac d\'Annecy')
archive_geometry = version.document_geometry_archive
self.assertEqual(archive_geometry.version, doc.geometry.version)
self.assertIsNotNone(archive_geometry.geom_detail)
self.assertIsNotNone(archive_geometry.geom_detail)
# check that a link for intersecting documents is created
links = self.session.query(TopoMapAssociation). \
filter(
TopoMapAssociation.topo_map_id == doc.document_id). \
order_by(TopoMapAssociation.document_id). \
all()
self.assertEqual(len(links), 2)
self.assertEqual(links[0].document_id, self.waypoint1.document_id)
self.check_cache_version(self.waypoint1.document_id, 2)
self.assertEqual(links[1].document_id, self.route.document_id)
self.check_cache_version(self.route.document_id, 2) | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_put_wrong_document_version(self):
body = {
'document': {
'document_id': self.map1.document_id,
'version': -9999,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
self.put_wrong_version(body, self.map1.document_id, user='moderator') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_put_wrong_ids(self):
body = {
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
self.put_wrong_ids(body, self.map1.document_id, user='moderator') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_put_success_all(self):
body = {
'message': 'Update',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3433OT',
'geometry': {
'version': self.map1.geometry.version,
'geom_detail': '{"type":"Polygon","coordinates":[[[668519.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668519.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'New title',
'version': self.locale_en.version}
]
}
}
(body, map1) = self.put_success_all(body, self.map1, user='moderator')
self.assertEqual(map1.code, '3433OT')
locale_en = map1.get_locale('en')
self.assertEqual(locale_en.title, 'New title')
# version with lang 'en'
versions = map1.versions
version_en = self.get_latest_version('en', versions)
archive_locale = version_en.document_locales_archive
self.assertEqual(archive_locale.title, 'New title')
archive_document_en = version_en.document_archive
self.assertEqual(archive_document_en.scale, '25000')
self.assertEqual(archive_document_en.code, '3433OT')
archive_geometry_en = version_en.document_geometry_archive
self.assertEqual(archive_geometry_en.version, 2)
# version with lang 'fr'
version_fr = self.get_latest_version('fr', versions)
archive_locale = version_fr.document_locales_archive
self.assertEqual(archive_locale.title, 'Lac d\'Annecy')
# check that the links to intersecting documents are updated
links = self.session.query(TopoMapAssociation). \
filter(
TopoMapAssociation.topo_map_id == self.map1.document_id). \
all()
self.assertEqual(len(links), 2)
self.assertEqual(links[0].document_id, self.waypoint1.document_id)
self.check_cache_version(self.waypoint1.document_id, 2)
self.assertEqual(links[1].document_id, self.route.document_id)
self.check_cache_version(self.route.document_id, 2)
# waypoint 2 is no longer associated, the cache key was incremented
self.check_cache_version(self.waypoint2.document_id, 2) | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def test_put_success_lang_only(self):
body = {
'message': 'Changing lang',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3431OT',
'locales': [
{'lang': 'en', 'title': 'New title',
'version': self.locale_en.version}
]
}
}
(body, map1) = self.put_success_lang_only(
body, self.map1, user='moderator')
self.assertEqual(
map1.get_locale('en').title, 'New title') | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def _assert_geometry(self, body):
self.assertIsNotNone(body.get('geometry'))
geometry = body.get('geometry')
self.assertIsNotNone(geometry.get('version'))
self.assertIsNotNone(geometry.get('geom_detail'))
geom = geometry.get('geom_detail')
polygon = shape(json.loads(geom))
self.assertIsInstance(polygon, Polygon) | c2corg/v6_api | [
21,
18,
21,
89,
1439983299
] |
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : ``dict``
Parameters
"""
self.param = param | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
if name in self.param:
return [self.param[name]]
return [] | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : dict of sequences
Parameters. Empty sequences act as if the key was not present.
Otherwise ``getfirst`` will return the first element and
``getlist`` will return a shallow copy of the sequence as a
``list``.
"""
self.param = param | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
result = self.param[name]
except KeyError:
pass
else:
return list(result)
return [] | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : multidict
Parameters. The object is expected to provide a getall() method
"""
self.param = param | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
return self.param.getall(name) | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def getlist(self, name):
""" :See: `ParameterAdapterInterface.getlist` """
# pylint: disable = unused-argument
return [] | ndparker/tdi | [
8,
2,
8,
2,
1381778054
] |
def __init__(self, newPersionName):
#self.name = newPersionName; | onehao/opensource | [
1,
1,
1,
1,
1414656394
] |
def sayYourName(self):
#此处,之所以没有像之前一样出现:
#AttributeError: Person instance has no attribute 'name'
#那是因为,虽然当前的实例self中,没有在__init__中初始化对应的name变量,实例self中没有对应的name变量
#但是由于实例所对应的类Person,有对应的name变量,所以也是可以正常执行代码的
#对应的,此处的self.name,实际上是Person.name
print 'My name is %s'%(self.name); # -> class global name
print 'name within class Person is actually the global name: %s'%(name); #-> whole global name
print "only access Person's name via Person.name=%s"%(Person.name); # -> class global name | onehao/opensource | [
1,
1,
1,
1,
1414656394
] |
def changeGlobalName(self,newName):
global name
name = "new class global name" | onehao/opensource | [
1,
1,
1,
1,
1414656394
] |
def say(self):
#此处,之所以没有像之前一样出现:
#AttributeError: Person instance has no attribute 'name'
#那是因为,虽然当前的实例self中,没有在__init__中初始化对应的name变量,实例self中没有对应的name变量
#但是由于实例所对应的类Person,有对应的name变量,所以也是可以正常执行代码的
#对应的,此处的self.name,实际上是Person.name
print 'My name is %s'%(self.name); # -> class global name
print 'name within class Person is actually the global name: %s'%(name); #-> whole global name
print "only access Person's name via Person.name=%s"%(Person.name); # -> class global name | onehao/opensource | [
1,
1,
1,
1,
1414656394
] |
def selfAndInitDemo():
persionInstance = Person("crifan");
persionInstance.sayYourName();
personInstance2 = Person("michael")
personInstance2.sayYourName()
personInstance2.changeGlobalName("newName")
personInstance2.sayYourName()
print "whole global name is %s"%(name); # -> whole global name
child = Child('child')
child.say() | onehao/opensource | [
1,
1,
1,
1,
1414656394
] |
def _compute_xent_loss_helper(
predictions: NestedMap, input_batch: NestedMap,
return_predictions: bool) -> Tuple[Metrics, Dict[str, Any]]:
"""Helper for computing the xent loss for Language model and Sequence model.
Args:
predictions: A `.NestedMap` containing the keys `per_example_argmax`,
`total_loss`, `avg_xent`, `aux_loss`, `total_weight` which corresponds to
the output of the Softmax layer.
input_batch: A `.NestedMap` object containing input tensors which contains
the keys `labels` and `weights` which corresponds to the labels and the
`weights` for each token in the sequence.
return_predictions: Whether to return predictions, which can be more
expensive.
Returns:
- A dict or NestedMap containing str keys and (metric, weight) pairs as
values, where one of the entries is expected to correspond to the loss.
- A dict containing arbitrary tensors describing something about each
training example, where the first dimension of each tensor is the batch
index. The base class just returns an empty dict.
"""
if 'tgt' in input_batch:
labels = input_batch.tgt.labels
if 'paddings' in input_batch.tgt:
weights = 1.0 - input_batch.tgt.paddings
else:
weights = jnp.not_equal(input_batch.tgt.segment_ids, 0)
weights = weights.astype(labels.dtype)
else:
labels = input_batch.labels
weights = input_batch.weights
predicted_labels = predictions.per_example_argmax.astype(labels.dtype)
num_preds = predictions.total_weight
mean_acc = jnp.sum(
(labels == predicted_labels) * weights) / jnp.maximum(num_preds, 1)
metric_weight = jnp.array(num_preds, predictions.avg_xent.dtype)
if hasattr(predictions, 'avg_xent_weight'):
avg_xent_weight = predictions.avg_xent_weight
else:
avg_xent_weight = metric_weight
metrics = NestedMap(
total_loss=(predictions.total_loss, metric_weight),
avg_xent=(predictions.avg_xent, avg_xent_weight),
aux_loss=(predictions.aux_loss, jnp.array(1.0,
predictions.aux_loss.dtype)),
log_pplx=(predictions.avg_xent, avg_xent_weight),
fraction_of_correct_next_step_preds=(mean_acc, metric_weight),
num_predictions=(num_preds, jnp.array(1.0, num_preds.dtype)),
)
per_example_output = NestedMap()
if return_predictions:
per_example_output = predictions
return metrics, per_example_output | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def cond_func(val):
"""Whether the while loop should continue."""
# We continue the greedy search iff both:
# (1) We have yet to exceed the max steps set by p.decoder.seqlen, AND;
# (2) At least one row in the batch has not terminated.
length_ok = val.step < seq_len - 1
all_rows_done = jnp.all(val.done)
return jnp.logical_and(length_ok, jnp.logical_not(all_rows_done)) | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch: NestedMap) -> Predictions:
"""Computes predictions for `input_batch`.
This method must be defined in a concrete derived class.
The output can be in the form of probablistic distributions, e.g., softmax
logits for discrete outputs, mixture of logistics for continuous values, or
regression values.
For training/evaluation, the output will be used for computing loss and
gradient updates, including comparing predicted distributions between
teacher and student for distillation. During inference the output can be
used to compute final outputs, perhaps with sampling.
Args:
input_batch: A `.NestedMap` object containing input tensors.
Returns:
Predictions, either a single Tensor, a `.NestedMap`, or a namedtuple.
"""
raise NotImplementedError('Abstract method') | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def fprop(self, input_batch: NestedMap) -> Tuple[Metrics, Dict[str, Any]]:
"""Forward propagation through one tower of the model.
Args:
input_batch: A `.NestedMap` object containing input tensors to this tower.
Returns:
(dict, dict):
- A dict containing str keys and (metric, weight) pairs as values, where
one of the keys is expected to be 'loss'.
- A dict containing arbitrary tensors describing something about each
training example, where the first dimension of each tensor is the batch
index.
"""
with py_utils.AuxLossContext():
predictions = self.compute_predictions(input_batch)
return self.compute_loss(predictions, input_batch) | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def process_decode_out(
self, input_obj: base_input.BaseInput,
decode_out: NestedMap) -> Tuple[NestedMap, Sequence[Tuple[str, Any]]]:
"""Processes one batch of decoded outputs.
Args:
input_obj: The input object where a tokenizer is accessible.
decode_out: The output from decode(). May have an extra leading axis.
Returns:
- metrics, a NestedMap containing str keys and (metric, weight) pairs for
the current batch (a tuple of two scalars).
- A list of tuples where each element corresponds to a row in the batch.
Each tuple is a key value pair.
"""
raise NotImplementedError('Abstract method') | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def Params(cls) -> InstantiableParams:
p = super().Params()
p.Define('mlp_tpl', layers.linears.MLPBlock.Params(),
'MLP model parameters.')
p.Define('softmax_tpl', layers.SingleShardSharedEmbeddingSoftmax.Params(),
'Input softmax embedding lookup layer.')
return p | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch: NestedMap) -> Predictions:
input_emb = self.softmax.emb_lookup(input_batch.ids)
output = self.mlp_layers.fprop(input_emb)
predictions = self.softmax.fprop(
inputs=output,
class_weights=input_batch.weights[:, :, jnp.newaxis],
class_ids=input_batch.ids[:, :, jnp.newaxis])
return predictions | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def Params(cls) -> InstantiableParams:
p = super().Params()
p.Define('lm', layers.TransformerLm.Params(), 'LM layer.')
p.Define(
'return_predictions', False, 'Whether to return predictions during'
'eval. Returning predictions is more expensive, but may be useful'
'for debugging.')
greedy_search_p = py_utils.Params()
greedy_search_p.Define('seqlen', 0, 'Maximum output sequence length.')
greedy_search_p.Define(
'min_prefix_len', 5,
'Minimum number of tokens picked to be used as decoding prefix.')
greedy_search_p.Define(
'eos_id', 2,
'The id of EOS token indicating the termination of greedy search.')
greedy_search_p.Define(
'max_decode_steps', None,
'If not None, the max decode steps for each example. If None, this '
'is set to `seqlen`, which contains prefix.')
p.Define('decoder', greedy_search_p, 'Decoder param.')
return p | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch: NestedMap) -> Predictions:
"""Computes predictions for `input_batch`."""
p = self.params
if 'tgt' in input_batch:
input_batch = input_batch.tgt
if 'paddings' in input_batch:
paddings = input_batch.paddings
else:
paddings = jnp.equal(input_batch.segment_ids, 0).astype(self.fprop_dtype)
if 'weights' in input_batch:
weights = input_batch.weights
else:
weights = 1.0 - paddings
weights = weights.astype(self.fprop_dtype)
input_batch.weights = weights
inputs = input_batch.ids
labels = NestedMap(class_ids=input_batch.labels, class_weights=weights)
if p.lm.packed_input:
packed_input_kwargs = {
'segment_ids': input_batch.segment_ids,
'segment_pos': input_batch.segment_pos,
}
else:
packed_input_kwargs = {}
return self.lm.fprop(
inputs=inputs,
paddings=paddings,
labels=labels,
**packed_input_kwargs) | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def decode(self, input_batch: NestedMap) -> Tuple[NestedMap, NestedMap]:
"""Greedy decodes the input_batch.
Args:
input_batch: The input batch, with fields like `.ids`.
Returns:
- metrics, a NestedMap containing str keys and (metrics, weight) pairs.
- A NestedMap like `input_batch`, with `.prefix_lengths` (vector of
randomly generated ints indicating the lengths of prefixes for each
row), and `.output_ids` (matrix of int ids with the decoded output).
"""
p = self.params
if p.decoder.seqlen <= 0:
raise ValueError('Must set p.decoder.seqlen > 0, current value = '
f'{p.decoder.seqlen}')
batch_size = input_batch.ids.shape[0]
maxval = jnp.sum(1 - input_batch.paddings, axis=1).astype(jnp.int32)
minval = jnp.minimum(maxval, p.decoder.min_prefix_len)
prefix_lengths = jax.random.randint(base_layer.next_prng_key(),
[batch_size], minval, maxval + 1,
input_batch.ids.dtype)
decoder_state = self.lm.init_states(
target_batch_size=batch_size,
target_max_length=p.decoder.seqlen)
global_step = base_layer.cur_global_step()
lm_theta = self.lm.local_theta()
def extend_step_fn(states, ids):
with base_layer.JaxContext.new_context(
prng_key=base_layer.next_prng_key(),
global_step=global_step) as jax_context:
jax_context.bind(self.lm, self.lm.vars_to_flax_vars(lm_theta),
[base_layer.SCOPE_AUX_LOSS])
new_states, xent = self.lm.extend_step(states, ids)
return new_states, xent.logits
result = greedy_decode(
extend_step_fn,
decoder_state,
input_batch.ids,
input_batch.paddings,
p.decoder.seqlen,
max_decode_steps=p.decoder.max_decode_steps,
prefix_lengths=prefix_lengths,
eos_id=p.decoder.eos_id)
result.update(input_batch)
metrics = NestedMap(
num_decoded=(jnp.array(0.0, jnp.float32),
jnp.array(batch_size, jnp.float32)))
return metrics, result | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def Params(cls) -> InstantiableParams:
p = super().Params()
p.Define('model', layers.TransformerEncoderDecoder.Params(),
'Sequence model layer for this task.')
p.Define(
'return_predictions', False, 'Whether to return predictions during'
'eval. Returning predictions is more expensive, but may be useful'
'for debugging.')
decoder_p = py_utils.Params()
decoder_p.Define('seqlen', 0, 'Maximum output sequence length.')
decoder_p.Define(
'eos_id', 2,
'The id of EOS token indicating the termination of decoding.')
p.Define('decoder', decoder_p, 'Decoder params.')
p.Define(
'label_smoothing_prob', 0.0,
'If > 0.0, smooth out one-hot prob by spreading this amount of'
' prob mass to all other tokens.')
return p | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch):
"""Computes predictions for `input_batch`."""
p = self.params
if p.model.packed_input:
packed_input_kwargs = {
'input_segment_ids': input_batch.src.segment_ids,
'input_segment_pos': input_batch.src.segment_pos,
'target_segment_ids': input_batch.tgt.segment_ids,
'target_segment_pos': input_batch.tgt.segment_pos,
}
else:
packed_input_kwargs = {}
labels = NestedMap(
class_ids=input_batch.tgt.labels, class_weights=input_batch.tgt.weights)
if p.label_smoothing_prob > 0.0:
vocab_size = p.model.softmax_tpl.num_classes
class_probabilities = jax.nn.one_hot(labels.class_ids, vocab_size)
fill_prob = p.label_smoothing_prob / (vocab_size - 1)
class_probabilities = (
(1.0 - p.label_smoothing_prob) * class_probabilities + fill_prob *
(1.0 - class_probabilities)).astype(self.fprop_dtype)
labels.class_probabilities = class_probabilities
return self.model.fprop(
inputs=input_batch.src.ids,
input_paddings=input_batch.src.paddings,
targets=input_batch.tgt.ids,
target_paddings=input_batch.tgt.paddings,
labels=labels,
**packed_input_kwargs) | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def decode(self, input_batch: NestedMap) -> Tuple[NestedMap, NestedMap]:
"""Decodes input_batch.
Args:
input_batch: The input batch, with a field `.src` and `.tgt` corresponding
to source and target, which itself contains the `.ids` and `.paddings.`
Returns:
- metrics, a nestedmap of metrics.
- results, a NestedMap like `input_batch`, with `.output_ids` (matrix of
int ids with the decoded output) as well as the decoded length.
"""
p = self.params
model_theta = self.model.local_theta()
if p.decoder.seqlen <= 0:
raise ValueError('Must set p.decoder.seqlen > 0, current value = '
f'{p.decoder.seqlen}')
batch_size = input_batch.tgt.ids.shape[0]
decoder_state = self.model.init_states(
inputs=input_batch.src.ids,
input_paddings=input_batch.src.paddings,
target_batch_size=batch_size,
target_max_length=p.decoder.seqlen)
global_step = base_layer.cur_global_step()
def extend_step_fn(states, ids):
with base_layer.JaxContext.new_context(
prng_key=base_layer.next_prng_key(),
global_step=global_step) as jax_context:
jax_context.bind(self.model, self.model.vars_to_flax_vars(model_theta),
[base_layer.SCOPE_AUX_LOSS])
new_states, xent = self.model.extend_step(states, ids)
return new_states, xent.logits
result = greedy_decode(
extend_step_fn,
decoder_state,
input_batch.tgt.ids,
input_batch.tgt.paddings,
p.decoder.seqlen,
eos_id=p.decoder.eos_id)
# Prefix lengths are not needed for sequence model decoding.
del result.prefix_lengths
result.update(input_batch)
metrics = NestedMap(
num_decoded=(jnp.array(0.0, jnp.float32),
jnp.array(batch_size, jnp.float32)))
return metrics, result | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def Params(cls) -> InstantiableParams:
p = super().Params()
p.Define('network', layers.ResNet.Params(),
'The classifier network, which is ResNet-50 by default.')
p.Define('softmax', layers.SingleShardFullSoftmax.Params(),
'The softmax layer used for the classification.')
p.Define(
'input_field', 'image',
'The input field which contains the image or video features to'
'pass to the classification network.')
return p | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch: NestedMap) -> Predictions:
"""Computes predictions for `input_batch`.
Args:
input_batch: A `.NestedMap` object containing input tensors to this tower.
Returns:
- A NestedMap containing str keys and features, softmax output and the
class weights as values.
"""
p = self.params
inputs = input_batch.Get(p.input_field)
features = self.network.fprop(inputs)
batch_size = inputs.shape[0]
example_weights = jnp.ones([batch_size])
if 'weight' in input_batch:
example_weights = input_batch.weight
if example_weights.shape != (batch_size,):
raise ValueError(
f'Shape of example weights should be ({batch_size},), but instead'
f'is {example_weights.shape}')
# Softmax expects weights to be of shape [..., 1].
softmax_output = self.softmax.fprop(
inputs=features,
class_weights=example_weights[:, jnp.newaxis],
class_probabilities=input_batch.label_probs)
return NestedMap(
features=features,
softmax_output=softmax_output,
example_weights=example_weights) | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def Params(cls) -> InstantiableParams:
p = super().Params()
p.Define('lm', layers.TransformerLm.Params(), 'Bert lm layer.')
p.Define(
'label_smoothing_prob', 0.0,
'If > 0.0, smooth out one-hot prob by spreading this amount of'
' prob mass to all other tokens.')
p.Define('mask_token_id', 0, 'Mask token id')
return p | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def compute_predictions(self, input_batch: NestedMap) -> Predictions:
"""Computes predictions for `input_batch`."""
p = self.params
assert p.lm.packed_input
segment_ids = input_batch.segment_ids
segment_pos = input_batch.segment_pos
paddings = input_batch.paddings
# Note that internal BertTransformer uses input_batch.ids instead.
labels = input_batch.labels
if 'masked_ids' in input_batch:
# Input data already has masking done.
augmented_labels = input_batch.masked_ids
augmented_pos = input_batch.masked_pos
else:
augmented_labels, augmented_pos = self.mlm_augmenter.fprop(
labels, paddings)
if p.label_smoothing_prob > 0.0:
class_probabilities = jax.nn.one_hot(labels, p.lm.vocab_size)
fill_prob = p.label_smoothing_prob / (p.lm.vocab_size - 1)
class_probabilities = (
(1.0 - p.label_smoothing_prob) * class_probabilities + fill_prob *
(1.0 - class_probabilities)).astype(self.fprop_dtype)
# Only compute loss on masked pos.
labels = NestedMap(
class_probabilities=class_probabilities, class_weights=augmented_pos)
else:
# Only compute loss on masked pos.
labels = NestedMap(class_ids=labels, class_weights=augmented_pos)
lm_out = self.lm.fprop(
inputs=augmented_labels,
paddings=paddings,
labels=labels,
segment_ids=segment_ids,
segment_pos=segment_pos)
lm_out.augmented_labels = augmented_labels
lm_out.augmented_pos = augmented_pos
return lm_out | tensorflow/lingvo | [
2689,
429,
2689,
115,
1532471428
] |
def setUpTestData(cls):
regions = (
Region(name='Region 1', slug='region-1', description='A'),
Region(name='Region 2', slug='region-2', description='B'),
Region(name='Region 3', slug='region-3', description='C'),
)
for region in regions:
region.save()
child_regions = (
Region(name='Region 1A', slug='region-1a', parent=regions[0]),
Region(name='Region 1B', slug='region-1b', parent=regions[0]),
Region(name='Region 2A', slug='region-2a', parent=regions[1]),
Region(name='Region 2B', slug='region-2b', parent=regions[1]),
Region(name='Region 3A', slug='region-3a', parent=regions[2]),
Region(name='Region 3B', slug='region-3b', parent=regions[2]),
)
for region in child_regions:
region.save() | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Region 1', 'Region 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_description(self):
params = {'description': ['A', 'B']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
regions = (
Region(name='Region 1', slug='region-1'),
Region(name='Region 2', slug='region-2'),
Region(name='Region 3', slug='region-3'),
)
for region in regions:
region.save()
tenant_groups = (
TenantGroup(name='Tenant group 1', slug='tenant-group-1'),
TenantGroup(name='Tenant group 2', slug='tenant-group-2'),
TenantGroup(name='Tenant group 3', slug='tenant-group-3'),
)
for tenantgroup in tenant_groups:
tenantgroup.save()
tenants = (
Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]),
Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]),
Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]),
)
Tenant.objects.bulk_create(tenants)
sites = (
Site(name='Site 1', slug='site-1', region=regions[0], tenant=tenants[0], status=SiteStatusChoices.STATUS_ACTIVE, facility='Facility 1', asn=65001, latitude=10, longitude=10, contact_name='Contact 1', contact_phone='123-555-0001', contact_email='contact1@example.com'),
Site(name='Site 2', slug='site-2', region=regions[1], tenant=tenants[1], status=SiteStatusChoices.STATUS_PLANNED, facility='Facility 2', asn=65002, latitude=20, longitude=20, contact_name='Contact 2', contact_phone='123-555-0002', contact_email='contact2@example.com'),
Site(name='Site 3', slug='site-3', region=regions[2], tenant=tenants[2], status=SiteStatusChoices.STATUS_RETIRED, facility='Facility 3', asn=65003, latitude=30, longitude=30, contact_name='Contact 3', contact_phone='123-555-0003', contact_email='contact3@example.com'),
)
Site.objects.bulk_create(sites) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Site 1', 'Site 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_facility(self):
params = {'facility': ['Facility 1', 'Facility 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_latitude(self):
params = {'latitude': [10, 20]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_contact_name(self):
params = {'contact_name': ['Contact 1', 'Contact 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_contact_email(self):
params = {'contact_email': ['contact1@example.com', 'contact2@example.com']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_region(self):
regions = Region.objects.all()[:2]
params = {'region_id': [regions[0].pk, regions[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'region': [regions[0].slug, regions[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_tenant_group(self):
tenant_groups = TenantGroup.objects.all()[:2]
params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
regions = (
Region(name='Region 1', slug='region-1'),
Region(name='Region 2', slug='region-2'),
Region(name='Region 3', slug='region-3'),
)
for region in regions:
region.save()
sites = (
Site(name='Site 1', slug='site-1', region=regions[0]),
Site(name='Site 2', slug='site-2', region=regions[1]),
Site(name='Site 3', slug='site-3', region=regions[2]),
)
Site.objects.bulk_create(sites)
parent_rack_groups = (
RackGroup(name='Parent Rack Group 1', slug='parent-rack-group-1', site=sites[0]),
RackGroup(name='Parent Rack Group 2', slug='parent-rack-group-2', site=sites[1]),
RackGroup(name='Parent Rack Group 3', slug='parent-rack-group-3', site=sites[2]),
)
for rackgroup in parent_rack_groups:
rackgroup.save()
rack_groups = (
RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0], parent=parent_rack_groups[0], description='A'),
RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1], parent=parent_rack_groups[1], description='B'),
RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2], parent=parent_rack_groups[2], description='C'),
)
for rackgroup in rack_groups:
rackgroup.save() | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Rack Group 1', 'Rack Group 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_description(self):
params = {'description': ['A', 'B']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_site(self):
sites = Site.objects.all()[:2]
params = {'site_id': [sites[0].pk, sites[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4)
params = {'site': [sites[0].slug, sites[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
rack_roles = (
RackRole(name='Rack Role 1', slug='rack-role-1', color='ff0000'),
RackRole(name='Rack Role 2', slug='rack-role-2', color='00ff00'),
RackRole(name='Rack Role 3', slug='rack-role-3', color='0000ff'),
)
RackRole.objects.bulk_create(rack_roles) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Rack Role 1', 'Rack Role 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_color(self):
params = {'color': ['ff0000', '00ff00']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
regions = (
Region(name='Region 1', slug='region-1'),
Region(name='Region 2', slug='region-2'),
Region(name='Region 3', slug='region-3'),
)
for region in regions:
region.save()
sites = (
Site(name='Site 1', slug='site-1', region=regions[0]),
Site(name='Site 2', slug='site-2', region=regions[1]),
Site(name='Site 3', slug='site-3', region=regions[2]),
)
Site.objects.bulk_create(sites)
rack_groups = (
RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]),
RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]),
RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]),
)
for rackgroup in rack_groups:
rackgroup.save()
rack_roles = (
RackRole(name='Rack Role 1', slug='rack-role-1'),
RackRole(name='Rack Role 2', slug='rack-role-2'),
RackRole(name='Rack Role 3', slug='rack-role-3'),
)
RackRole.objects.bulk_create(rack_roles)
tenant_groups = (
TenantGroup(name='Tenant group 1', slug='tenant-group-1'),
TenantGroup(name='Tenant group 2', slug='tenant-group-2'),
TenantGroup(name='Tenant group 3', slug='tenant-group-3'),
)
for tenantgroup in tenant_groups:
tenantgroup.save()
tenants = (
Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]),
Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]),
Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]),
)
Tenant.objects.bulk_create(tenants)
racks = (
Rack(name='Rack 1', facility_id='rack-1', site=sites[0], group=rack_groups[0], tenant=tenants[0], status=RackStatusChoices.STATUS_ACTIVE, role=rack_roles[0], serial='ABC', asset_tag='1001', type=RackTypeChoices.TYPE_2POST, width=RackWidthChoices.WIDTH_19IN, u_height=42, desc_units=False, outer_width=100, outer_depth=100, outer_unit=RackDimensionUnitChoices.UNIT_MILLIMETER),
Rack(name='Rack 2', facility_id='rack-2', site=sites[1], group=rack_groups[1], tenant=tenants[1], status=RackStatusChoices.STATUS_PLANNED, role=rack_roles[1], serial='DEF', asset_tag='1002', type=RackTypeChoices.TYPE_4POST, width=RackWidthChoices.WIDTH_21IN, u_height=43, desc_units=False, outer_width=200, outer_depth=200, outer_unit=RackDimensionUnitChoices.UNIT_MILLIMETER),
Rack(name='Rack 3', facility_id='rack-3', site=sites[2], group=rack_groups[2], tenant=tenants[2], status=RackStatusChoices.STATUS_RESERVED, role=rack_roles[2], serial='GHI', asset_tag='1003', type=RackTypeChoices.TYPE_CABINET, width=RackWidthChoices.WIDTH_23IN, u_height=44, desc_units=True, outer_width=300, outer_depth=300, outer_unit=RackDimensionUnitChoices.UNIT_INCH),
)
Rack.objects.bulk_create(racks) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Rack 1', 'Rack 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_asset_tag(self):
params = {'asset_tag': ['1001', '1002']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_width(self):
params = {'width': [RackWidthChoices.WIDTH_19IN, RackWidthChoices.WIDTH_21IN]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_desc_units(self):
params = {'desc_units': 'true'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
params = {'desc_units': 'false'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_outer_depth(self):
params = {'outer_depth': [100, 200]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_region(self):
regions = Region.objects.all()[:2]
params = {'region_id': [regions[0].pk, regions[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'region': [regions[0].slug, regions[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_group(self):
groups = RackGroup.objects.all()[:2]
params = {'group_id': [groups[0].pk, groups[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'group': [groups[0].slug, groups[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_role(self):
roles = RackRole.objects.all()[:2]
params = {'role_id': [roles[0].pk, roles[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'role': [roles[0].slug, roles[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_tenant(self):
tenants = Tenant.objects.all()[:2]
params = {'tenant_id': [tenants[0].pk, tenants[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'tenant': [tenants[0].slug, tenants[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
sites = (
Site(name='Site 1', slug='site-1'),
Site(name='Site 2', slug='site-2'),
Site(name='Site 3', slug='site-3'),
)
Site.objects.bulk_create(sites)
rack_groups = (
RackGroup(name='Rack Group 1', slug='rack-group-1', site=sites[0]),
RackGroup(name='Rack Group 2', slug='rack-group-2', site=sites[1]),
RackGroup(name='Rack Group 3', slug='rack-group-3', site=sites[2]),
)
for rackgroup in rack_groups:
rackgroup.save()
racks = (
Rack(name='Rack 1', site=sites[0], group=rack_groups[0]),
Rack(name='Rack 2', site=sites[1], group=rack_groups[1]),
Rack(name='Rack 3', site=sites[2], group=rack_groups[2]),
)
Rack.objects.bulk_create(racks)
users = (
User(username='User 1'),
User(username='User 2'),
User(username='User 3'),
)
User.objects.bulk_create(users)
tenant_groups = (
TenantGroup(name='Tenant group 1', slug='tenant-group-1'),
TenantGroup(name='Tenant group 2', slug='tenant-group-2'),
TenantGroup(name='Tenant group 3', slug='tenant-group-3'),
)
for tenantgroup in tenant_groups:
tenantgroup.save()
tenants = (
Tenant(name='Tenant 1', slug='tenant-1', group=tenant_groups[0]),
Tenant(name='Tenant 2', slug='tenant-2', group=tenant_groups[1]),
Tenant(name='Tenant 3', slug='tenant-3', group=tenant_groups[2]),
)
Tenant.objects.bulk_create(tenants)
reservations = (
RackReservation(rack=racks[0], units=[1, 2, 3], user=users[0], tenant=tenants[0]),
RackReservation(rack=racks[1], units=[4, 5, 6], user=users[1], tenant=tenants[1]),
RackReservation(rack=racks[2], units=[7, 8, 9], user=users[2], tenant=tenants[2]),
)
RackReservation.objects.bulk_create(reservations) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_site(self):
sites = Site.objects.all()[:2]
params = {'site_id': [sites[0].pk, sites[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'site': [sites[0].slug, sites[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_user(self):
users = User.objects.all()[:2]
params = {'user_id': [users[0].pk, users[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'user': [users[0].username, users[1].username]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_tenant_group(self):
tenant_groups = TenantGroup.objects.all()[:2]
params = {'tenant_group_id': [tenant_groups[0].pk, tenant_groups[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturers = (
Manufacturer(name='Manufacturer 1', slug='manufacturer-1', description='A'),
Manufacturer(name='Manufacturer 2', slug='manufacturer-2', description='B'),
Manufacturer(name='Manufacturer 3', slug='manufacturer-3', description='C'),
)
Manufacturer.objects.bulk_create(manufacturers) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Manufacturer 1', 'Manufacturer 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_description(self):
params = {'description': ['A', 'B']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturers = (
Manufacturer(name='Manufacturer 1', slug='manufacturer-1'),
Manufacturer(name='Manufacturer 2', slug='manufacturer-2'),
Manufacturer(name='Manufacturer 3', slug='manufacturer-3'),
)
Manufacturer.objects.bulk_create(manufacturers)
device_types = (
DeviceType(manufacturer=manufacturers[0], model='Model 1', slug='model-1', part_number='Part Number 1', u_height=1, is_full_depth=True),
DeviceType(manufacturer=manufacturers[1], model='Model 2', slug='model-2', part_number='Part Number 2', u_height=2, is_full_depth=True, subdevice_role=SubdeviceRoleChoices.ROLE_PARENT),
DeviceType(manufacturer=manufacturers[2], model='Model 3', slug='model-3', part_number='Part Number 3', u_height=3, is_full_depth=False, subdevice_role=SubdeviceRoleChoices.ROLE_CHILD),
)
DeviceType.objects.bulk_create(device_types)
# Add component templates for filtering
ConsolePortTemplate.objects.bulk_create((
ConsolePortTemplate(device_type=device_types[0], name='Console Port 1'),
ConsolePortTemplate(device_type=device_types[1], name='Console Port 2'),
))
ConsoleServerPortTemplate.objects.bulk_create((
ConsoleServerPortTemplate(device_type=device_types[0], name='Console Server Port 1'),
ConsoleServerPortTemplate(device_type=device_types[1], name='Console Server Port 2'),
))
PowerPortTemplate.objects.bulk_create((
PowerPortTemplate(device_type=device_types[0], name='Power Port 1'),
PowerPortTemplate(device_type=device_types[1], name='Power Port 2'),
))
PowerOutletTemplate.objects.bulk_create((
PowerOutletTemplate(device_type=device_types[0], name='Power Outlet 1'),
PowerOutletTemplate(device_type=device_types[1], name='Power Outlet 2'),
))
InterfaceTemplate.objects.bulk_create((
InterfaceTemplate(device_type=device_types[0], name='Interface 1'),
InterfaceTemplate(device_type=device_types[1], name='Interface 2'),
))
rear_ports = (
RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C),
RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C),
)
RearPortTemplate.objects.bulk_create(rear_ports)
FrontPortTemplate.objects.bulk_create((
FrontPortTemplate(device_type=device_types[0], name='Front Port 1', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[0]),
FrontPortTemplate(device_type=device_types[1], name='Front Port 2', type=PortTypeChoices.TYPE_8P8C, rear_port=rear_ports[1]),
))
DeviceBayTemplate.objects.bulk_create((
DeviceBayTemplate(device_type=device_types[0], name='Device Bay 1'),
DeviceBayTemplate(device_type=device_types[1], name='Device Bay 2'),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_model(self):
params = {'model': ['Model 1', 'Model 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_part_number(self):
params = {'part_number': ['Part Number 1', 'Part Number 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_is_full_depth(self):
params = {'is_full_depth': 'true'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'is_full_depth': 'false'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_manufacturer(self):
manufacturers = Manufacturer.objects.all()[:2]
params = {'manufacturer_id': [manufacturers[0].pk, manufacturers[1].pk]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'manufacturer': [manufacturers[0].slug, manufacturers[1].slug]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_console_server_ports(self):
params = {'console_server_ports': 'true'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'console_server_ports': 'false'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_power_outlets(self):
params = {'power_outlets': 'true'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'power_outlets': 'false'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_pass_through_ports(self):
params = {'pass_through_ports': 'true'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'pass_through_ports': 'false'}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
ConsolePortTemplate.objects.bulk_create((
ConsolePortTemplate(device_type=device_types[0], name='Console Port 1'),
ConsolePortTemplate(device_type=device_types[1], name='Console Port 2'),
ConsolePortTemplate(device_type=device_types[2], name='Console Port 3'),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Console Port 1', 'Console Port 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
ConsoleServerPortTemplate.objects.bulk_create((
ConsoleServerPortTemplate(device_type=device_types[0], name='Console Server Port 1'),
ConsoleServerPortTemplate(device_type=device_types[1], name='Console Server Port 2'),
ConsoleServerPortTemplate(device_type=device_types[2], name='Console Server Port 3'),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Console Server Port 1', 'Console Server Port 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
PowerPortTemplate.objects.bulk_create((
PowerPortTemplate(device_type=device_types[0], name='Power Port 1', maximum_draw=100, allocated_draw=50),
PowerPortTemplate(device_type=device_types[1], name='Power Port 2', maximum_draw=200, allocated_draw=100),
PowerPortTemplate(device_type=device_types[2], name='Power Port 3', maximum_draw=300, allocated_draw=150),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Power Port 1', 'Power Port 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_maximum_draw(self):
params = {'maximum_draw': [100, 200]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
PowerOutletTemplate.objects.bulk_create((
PowerOutletTemplate(device_type=device_types[0], name='Power Outlet 1', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A),
PowerOutletTemplate(device_type=device_types[1], name='Power Outlet 2', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_B),
PowerOutletTemplate(device_type=device_types[2], name='Power Outlet 3', feed_leg=PowerOutletFeedLegChoices.FEED_LEG_C),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Power Outlet 1', 'Power Outlet 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_feed_leg(self):
# TODO: Support filtering for multiple values
params = {'feed_leg': PowerOutletFeedLegChoices.FEED_LEG_A}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
InterfaceTemplate.objects.bulk_create((
InterfaceTemplate(device_type=device_types[0], name='Interface 1', type=InterfaceTypeChoices.TYPE_1GE_FIXED, mgmt_only=True),
InterfaceTemplate(device_type=device_types[1], name='Interface 2', type=InterfaceTypeChoices.TYPE_1GE_GBIC, mgmt_only=False),
InterfaceTemplate(device_type=device_types[2], name='Interface 3', type=InterfaceTypeChoices.TYPE_1GE_SFP, mgmt_only=False),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Interface 1', 'Interface 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_type(self):
# TODO: Support filtering for multiple values
params = {'type': InterfaceTypeChoices.TYPE_1GE_FIXED}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
rear_ports = (
RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C),
RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_8P8C),
RearPortTemplate(device_type=device_types[2], name='Rear Port 3', type=PortTypeChoices.TYPE_8P8C),
)
RearPortTemplate.objects.bulk_create(rear_ports)
FrontPortTemplate.objects.bulk_create((
FrontPortTemplate(device_type=device_types[0], name='Front Port 1', rear_port=rear_ports[0], type=PortTypeChoices.TYPE_8P8C),
FrontPortTemplate(device_type=device_types[1], name='Front Port 2', rear_port=rear_ports[1], type=PortTypeChoices.TYPE_110_PUNCH),
FrontPortTemplate(device_type=device_types[2], name='Front Port 3', rear_port=rear_ports[2], type=PortTypeChoices.TYPE_BNC),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_name(self):
params = {'name': ['Front Port 1', 'Front Port 2']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def test_type(self):
# TODO: Support filtering for multiple values
params = {'type': PortTypeChoices.TYPE_8P8C}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
def setUpTestData(cls):
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_types = (
DeviceType(manufacturer=manufacturer, model='Model 1', slug='model-1'),
DeviceType(manufacturer=manufacturer, model='Model 2', slug='model-2'),
DeviceType(manufacturer=manufacturer, model='Model 3', slug='model-3'),
)
DeviceType.objects.bulk_create(device_types)
RearPortTemplate.objects.bulk_create((
RearPortTemplate(device_type=device_types[0], name='Rear Port 1', type=PortTypeChoices.TYPE_8P8C, positions=1),
RearPortTemplate(device_type=device_types[1], name='Rear Port 2', type=PortTypeChoices.TYPE_110_PUNCH, positions=2),
RearPortTemplate(device_type=device_types[2], name='Rear Port 3', type=PortTypeChoices.TYPE_BNC, positions=3),
)) | digitalocean/netbox | [
12158,
2099,
12158,
303,
1456755346
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.