function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_search_dirs_sysroot(self):
with mock.patch('bfg9000.shell.execute', mock_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(sysroot='/sysroot'),
[abspath('/dir1'), abspath('/sysroot/dir2')]) | jimporter/bfg9000 | [
68,
20,
68,
13,
1424839632
] |
def mock_bad_execute(*args, **kwargs):
raise OSError() | jimporter/bfg9000 | [
68,
20,
68,
13,
1424839632
] |
def taylor(fx, xs, order, x_range=(0, 1), n=200):
x0, x1 = x_range
x = np.linspace(float(x0), float(x1), n)
fy = sy.lambdify(xs, fx, modules=['numpy'])(x)
tx = fx.series(xs, n=order).removeO()
if tx.is_Number:
ty = np.zeros_like(x)
ty.fill(float(tx))
else:
ty = sy.lambdify(xs, tx, modules=['numpy'])(x)
return x, fy, ty | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def update():
try:
expr = sy.sympify(text.value, dict(x=xs))
except Exception as exception:
errbox.text = str(exception)
else:
errbox.text = ""
x, fy, ty = taylor(expr, xs, slider.value, (-2*sy.pi, 2*sy.pi), 200)
p.title.text = "Taylor (n=%d) expansion comparison for: %s" % (slider.value, expr)
legend.items[0].label = value(f"{expr}")
legend.items[1].label = value(f"taylor({expr})")
source.data = dict(x=x, fy=fy, ty=ty) | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def setUp(self):
# Mock the device
self.mock_device = mock.Mock(spec=device_utils.DeviceUtils)
self.mock_device.build_product = 'blueline'
self.mock_device.adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
self.mock_device.FileExists.return_value = True
self.cpu_temp = cpu_temperature.CpuTemperature(self.mock_device)
self.cpu_temp.InitThermalDeviceInformation() | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testInitWithDeviceUtil(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
c = cpu_temperature.CpuTemperature(d)
self.assertEqual(d, c.GetDeviceForTesting()) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testGetThermalDeviceInformation_noneWhenIncorrectLabel(self):
invalid_device = mock.Mock(spec=device_utils.DeviceUtils)
invalid_device.build_product = 'invalid_name'
c = cpu_temperature.CpuTemperature(invalid_device)
c.InitThermalDeviceInformation()
self.assertEqual(c.GetDeviceInfoForTesting(), None) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testIsSupported_returnsTrue(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
d.FileExists.return_value = True
c = cpu_temperature.CpuTemperature(d)
self.assertTrue(c.IsSupported()) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testIsSupported_returnsFalse(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
d.FileExists.return_value = False
c = cpu_temperature.CpuTemperature(d)
self.assertFalse(c.IsSupported()) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testLetBatteryCoolToTemperature_coolWithin24Calls(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down0)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 24) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testLetBatteryCoolToTemperature_coolWithin16Calls(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down1)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 16) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def testLetBatteryCoolToTemperature_timeoutAfterThree(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.constant_temp)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 24) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self, expression, separator, distinct=False, ordering=None, **extra):
self.separator = separator
super(Sql_GroupConcat, self).__init__(expression,
distinct='DISTINCT ' if distinct else '',
ordering=' ORDER BY %s' % ordering if ordering is not None else '',
separator=' SEPARATOR "%s"' % separator,
output_field=CharField(),
**extra) | rackerlabs/django-DefectDojo | [
2681,
1254,
2681,
272,
1424368427
] |
def extractKoreanovelsCom(item):
'''
Parser for 'koreanovels.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if item['title'].startswith("Link ") and item['tags'] == ['RSS']:
return buildReleaseMessageWithType(item, 'Level 1 Skeleton', vol, chp, frag=frag, postfix=postfix, tl_type='translated') | fake-name/ReadableWebProxy | [
191,
16,
191,
3,
1437712243
] |
def _make_random_patches(x, y, patch_size, permutations, size=3):
batch_size = x.get_shape().as_list()[0]
crop_size = x.get_shape().as_list()[1]
perm_idx = tf.expand_dims(y, 1)
perm = tf.gather_nd(permutations, perm_idx)
WINDOW_SIZE = crop_size // size
N = x.get_shape().as_list()[0]
C = x.get_shape().as_list()[3]
patches = []
for i, j in dd.multi_range(size, size):
#tf.slice(x, [
M = WINDOW_SIZE - patch_size + 1
assert M > 0, f'Jigsaw: Window size ({WINDOW_SIZE}) and patch size ({patch_size}) not compatible'
limit = np.array([1, M, M, 1])
offset = np.array([0, i * WINDOW_SIZE, j * WINDOW_SIZE, 0]) + tf.random_uniform(
[4], dtype=tf.int32,
maxval=M,
) % limit
patch = tf.slice(x, offset, [N, patch_size, patch_size, C])
patches.append(patch)
patches1 = tf.stack(patches, axis=1)
xyz = np.arange(batch_size)[:, np.newaxis] * size**2 + (perm - 1)
#import ipdb
#ipdb.set_trace()
perm0 = tf.reshape(xyz, [-1])
patches_flat = tf.reshape(patches1, [-1] + patches1.get_shape().as_list()[2:])
#import ipdb
##ipdb.set_trace()
patches2 = tf.gather(patches_flat, perm0)
#return tf.reshape(patches2, [-1, PATCH_SIZE, PATCH_SIZE, C])
return patches2 | gustavla/self-supervision | [
29,
5,
29,
7,
1492017767
] |
def __init__(self, name, basenet, loader, patch_size=75, size=3,
reduce_channels=128, use_scalers=False):
self.name = name
self.basenet = basenet
self._size = size
self._patch_size = patch_size
self._loader = loader
self._reduce_channels = reduce_channels
if size == 3:
self._permutations = PERMUTATIONS
elif size == 2:
# They are 1-based due to the permutations file
self._permutations = 1 + np.array(list(itertools.permutations(range(size**2))))
self._use_scalers = use_scalers | gustavla/self-supervision | [
29,
5,
29,
7,
1492017767
] |
def basenet_settings(self):
return {'convolutional': False} | gustavla/self-supervision | [
29,
5,
29,
7,
1492017767
] |
def build_network(self, network, extra, phase_test, global_step):
info = selfsup.info.create(scale_summary=True)
if self._size == 3:
z = network['activations']['pool5']
else:
z = network['activations']['top']
#z = tf.squeeze(z, [1, 2])
z = tf.reshape(z, (z.get_shape().as_list()[0], -1))
if self._use_scalers:
z = selfsup.ops.scale(z, name='scale')
#W_init = tf.contrib.layers.variance_scaling_initializer()
W_init = tf.random_normal_initializer(0.0, 0.0001)
b_init = tf.constant_initializer(0.0)
reduce_ch = self._reduce_channels
with tf.variable_scope('reduction'):
c_o = reduce_ch
reduce_W = tf.get_variable('weights', [z.get_shape().as_list()[1], c_o], dtype=tf.float32,
initializer=W_init)
reduce_b = tf.get_variable('biases', [c_o], dtype=tf.float32,
initializer=b_init)
z = tf.nn.xw_plus_b(z, reduce_W, reduce_b)
z = tf.nn.relu(z)
z = tf.reshape(z, [self._loader.batch_size, -1, z.get_shape().as_list()[-1]])
z = tf.concat(tf.unstack(z, axis=1), 1)
with tf.variable_scope('jigsaw'):
c_o = len(self._permutations)
jigsaw_W = tf.get_variable('weights', [z.get_shape().as_list()[1], c_o], dtype=tf.float32,
initializer=W_init)
jigsaw_b = tf.get_variable('biases', [c_o], dtype=tf.float32,
initializer=b_init)
z = tf.nn.xw_plus_b(z, jigsaw_W, jigsaw_b)
with tf.variable_scope('primary_loss'):
loss_each = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self._y, logits=z)
primary_loss = tf.reduce_mean(loss_each)
with tf.name_scope('weight_decay'):
wd = 1e-6
l2_loss = tf.nn.l2_loss(reduce_W) + tf.nn.l2_loss(jigsaw_W)
weight_decay = wd * l2_loss
with tf.name_scope('loss'):
loss = weight_decay + primary_loss
variables = info['vars']
self.losses = OrderedDict([
('main', primary_loss),
('+weight_decay', weight_decay),
])
self.primary_loss = primary_loss
self.loss = loss
self.feedback_variables = []
info['activations']['primary_loss'] = primary_loss
info['activations']['loss'] = loss
info['activations']['weight_decay'] = weight_decay
return info | gustavla/self-supervision | [
29,
5,
29,
7,
1492017767
] |
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/weapon/shared_reinforcement_core.iff"
result.attribute_template_id = -1
result.stfName("craft_weapon_ingredients_n","reinforcement_core") | anhstudios/swganh | [
62,
37,
62,
37,
1297996365
] |
def register_in(router):
router.register(
r'booking-resources', views.ResourceViewSet, basename='booking-resource'
)
router.register(
r'booking-offerings', views.OfferingViewSet, basename='booking-offering'
) | opennode/nodeconductor-assembly-waldur | [
39,
35,
39,
3,
1484854426
] |
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/loot_schematic/shared_chemical_recycler_schematic.iff"
result.attribute_template_id = -1
result.stfName("craft_item_ingredients_n","chemical_recycler") | anhstudios/swganh | [
62,
37,
62,
37,
1297996365
] |
def reset():
global _PROPERTIES_DICT
_PROPERTIES_DICT = None | cloudera/hue | [
804,
271,
804,
38,
1277149611
] |
def has_sqoop_has_security():
return get_props().get(_CONF_SQOOP_AUTHENTICATION_TYPE, 'SIMPLE').upper() == 'KERBEROS' | cloudera/hue | [
804,
271,
804,
38,
1277149611
] |
def ReturnNumbersAsDecimal(cursor, name, defaultType, size, precision, scale):
if defaultType == cx_Oracle.NUMBER:
return cursor.var(str, 9, cursor.arraysize, outconverter = decimal.Decimal) | cloudera/hue | [
804,
271,
804,
38,
1277149611
] |
def test_oserror(self, macos_version, lenient_run_command_output):
lenient_run_command_output.return_value = (None, None, None)
self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_called_once() | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_not_linker_signed(self, macos_version, lenient_run_command_output):
lenient_run_command_output.return_value = (0, b'', b'''Executable=test | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_linker_signed_10_15(self, macos_version,
lenient_run_command_output):
lenient_run_command_output.return_value = (0, b'', b'''Executable=test | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_linker_signed_10_16(self, macos_version,
lenient_run_command_output):
# 10.16 is what a Python built against an SDK < 11.0 will see 11.0 as.
macos_version.return_value = [10, 16]
lenient_run_command_output.return_value = (0, b'', b'''Executable=test | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_linker_signed_11_0(self, macos_version,
lenient_run_command_output):
macos_version.return_value = [11, 0]
lenient_run_command_output.return_value = (0, b'', b'''Executable=test | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def setUp(self):
self.paths = model.Paths('/$I', '/$O', '/$W')
self.config = test_config.TestConfig() | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_sign_part_needs_force(self, run_command,
linker_signed_arm64_needs_force):
linker_signed_arm64_needs_force.return_value = True
part = model.CodeSignedProduct('Test.app', 'test.signing.app')
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--force', '--timestamp',
'--requirements', '=designated => identifier "test.signing.app"',
'/$W/Test.app'
]) | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_sign_part_no_identifier_requirement(
self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app', 'test.signing.app', identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with(
['codesign', '--sign', '[IDENTITY]', '--timestamp', '/$W/Test.app']) | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_sign_with_identifier_no_requirement(
self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
sign_with_identifier=True,
identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--identifier',
'test.signing.app', '/$W/Test.app'
]) | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def test_sign_part_with_entitlements(self, run_command,
linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
entitlements='entitlements.plist',
identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--entitlements',
'/$W/entitlements.plist', '/$W/Test.app'
]) | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def url_for_version(self, ver):
return "https://ftp.ncbi.nih.gov/toolbox/ncbi_tools/converters/by_program/tbl2asn/linux.tbl2asn.gz" | LLNL/spack | [
3244,
1839,
3244,
2847,
1389172932
] |
def freeze_graph_with_def_protos(
input_graph_def,
input_saver_def,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
clear_devices,
initializer_nodes,
variable_names_blacklist=''):
"""Converts all variables in a graph and checkpoint into constants."""
del restore_op_name, filename_tensor_name # Unused by updated loading code.
# 'input_checkpoint' may be a prefix if we're using Saver V2 format
if not saver_lib.checkpoint_exists(input_checkpoint):
raise ValueError(
'Input checkpoint "' + input_checkpoint + '" does not exist!')
if not output_node_names:
raise ValueError(
'You must supply the name of a node to --output_node_names.')
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ''
_ = importer.import_graph_def(input_graph_def, name='')
with session.Session() as sess:
if input_saver_def:
saver = saver_lib.Saver(saver_def=input_saver_def)
saver.restore(sess, input_checkpoint)
else:
var_list = {}
reader = pywrap_tensorflow.NewCheckpointReader(input_checkpoint)
var_to_shape_map = reader.get_variable_to_shape_map()
for key in var_to_shape_map:
try:
tensor = sess.graph.get_tensor_by_name(key + ':0')
except KeyError:
# This tensor doesn't exist in the graph (for example it's
# 'global_step' or a similar housekeeping element) so skip it.
continue
var_list[key] = tensor
saver = saver_lib.Saver(var_list=var_list)
saver.restore(sess, input_checkpoint)
if initializer_nodes:
sess.run(initializer_nodes)
variable_names_blacklist = (variable_names_blacklist.split(',') if
variable_names_blacklist else None)
output_graph_def = graph_util.convert_variables_to_constants(
sess,
input_graph_def,
output_node_names.split(','),
variable_names_blacklist=variable_names_blacklist)
return output_graph_def | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _tf_example_input_placeholder():
tf_example_placeholder = tf.placeholder(
tf.string, shape=[], name='tf_example')
tensor_dict = tf_example_decoder.TfExampleDecoder().decode(
tf_example_placeholder)
image = tensor_dict[fields.InputDataFields.image]
return tf.expand_dims(image, axis=0) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _encoded_image_string_tensor_input_placeholder():
image_str = tf.placeholder(dtype=tf.string,
shape=[],
name='encoded_image_string_tensor')
image_tensor = tf.image.decode_image(image_str, channels=3)
image_tensor.set_shape((None, None, 3))
return tf.expand_dims(image_tensor, axis=0) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _add_output_tensor_nodes(postprocessed_tensors):
"""Adds output nodes for detection boxes and scores.
Adds the following nodes for output tensors -
* num_detections: float32 tensor of shape [batch_size].
* detection_boxes: float32 tensor of shape [batch_size, num_boxes, 4]
containing detected boxes.
* detection_scores: float32 tensor of shape [batch_size, num_boxes]
containing scores for the detected boxes.
* detection_classes: float32 tensor of shape [batch_size, num_boxes]
containing class predictions for the detected boxes.
* detection_masks: (Optional) float32 tensor of shape
[batch_size, num_boxes, mask_height, mask_width] containing masks for each
detection box.
Args:
postprocessed_tensors: a dictionary containing the following fields
'detection_boxes': [batch, max_detections, 4]
'detection_scores': [batch, max_detections]
'detection_classes': [batch, max_detections]
'detection_masks': [batch, max_detections, mask_height, mask_width]
(optional).
'num_detections': [batch]
Returns:
A tensor dict containing the added output tensor nodes.
"""
label_id_offset = 1
boxes = postprocessed_tensors.get('detection_boxes')
scores = postprocessed_tensors.get('detection_scores')
classes = postprocessed_tensors.get('detection_classes') + label_id_offset
masks = postprocessed_tensors.get('detection_masks')
num_detections = postprocessed_tensors.get('num_detections')
outputs = {}
outputs['detection_boxes'] = tf.identity(boxes, name='detection_boxes')
outputs['detection_scores'] = tf.identity(scores, name='detection_scores')
outputs['detection_classes'] = tf.identity(classes, name='detection_classes')
outputs['num_detections'] = tf.identity(num_detections, name='num_detections')
if masks is not None:
outputs['detection_masks'] = tf.identity(masks, name='detection_masks')
return outputs | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _write_saved_model(inference_graph_path, inputs, outputs,
checkpoint_path=None, use_moving_averages=False):
"""Writes SavedModel to disk.
If checkpoint_path is not None bakes the weights into the graph thereby
eliminating the need of checkpoint files during inference. If the model
was trained with moving averages, setting use_moving_averages to true
restores the moving averages, otherwise the original set of variables
is restored.
Args:
inference_graph_path: Path to write inference graph.
inputs: The input image tensor to use for detection.
outputs: A tensor dictionary containing the outputs of a DetectionModel.
checkpoint_path: Optional path to the checkpoint file.
use_moving_averages: Whether to export the original or the moving averages
of the trainable variables from the checkpoint.
"""
inference_graph_def = tf.get_default_graph().as_graph_def()
checkpoint_graph_def = None
if checkpoint_path:
output_node_names = ','.join(outputs.keys())
checkpoint_graph_def = get_frozen_graph_def(
inference_graph_def=inference_graph_def,
use_moving_averages=use_moving_averages,
input_checkpoint=checkpoint_path,
output_node_names=output_node_names
)
with tf.Graph().as_default():
with session.Session() as sess:
tf.import_graph_def(checkpoint_graph_def)
builder = tf.saved_model.builder.SavedModelBuilder(inference_graph_path)
tensor_info_inputs = {
'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
tensor_info_outputs = {}
for k, v in outputs.items():
tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)
detection_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs=tensor_info_inputs,
outputs=tensor_info_outputs,
method_name=signature_constants.PREDICT_METHOD_NAME))
builder.add_meta_graph_and_variables(
sess, [tf.saved_model.tag_constants.SERVING],
signature_def_map={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
detection_signature,
},
)
builder.save() | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def create_reports_service(self, user_email):
"""Build and returns an Admin SDK Reports service object authorized with
the service accounts that act on behalf of the given user.
Args:
user_email: The email of the user. Needs permissions to access
the Admin APIs.
Returns:
Admin SDK reports service object.
"""
localDir = os.path.dirname(os.path.abspath(__file__))
filePath = os.path.join(localDir, 'service_accountkey.json')
credentials = service_account.Credentials.from_service_account_file(
filePath, scopes=self.SCOPES)
delegatedCreds = credentials.create_delegated(user_email)
return build('admin', 'reports_v1', credentials=delegatedCreds) | chromium/chromium | [
14247,
5365,
14247,
62,
1517864132
] |
def __init__(self, message, is_infra_error=False):
super(BaseError, self).__init__(message)
self._is_infra_error = is_infra_error
self.message = message | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __ne__(self, other):
return not self == other | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def line2d(p0, p1):
"""
Return 2D equation of line in the form ax+by+c = 0
"""
# x + x1 = 0
x0, y0 = p0[:2]
x1, y1 = p1[:2]
#
if x0 == x1:
a = -1
b = 0
c = x1
elif y0 == y1:
a = 0
b = 1
c = -y1
else:
a = (y0-y1)
b = (x0-x1)
c = (x0*y1 - x1*y0)
return a, b, c | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def line2d_seg_dist(p1, p2, p0):
"""distance(s) from line defined by p1 - p2 to point(s) p0
p0[0] = x(s)
p0[1] = y(s)
intersection point p = p1 + u*(p2-p1)
and intersection point lies within segment if u is between 0 and 1
"""
x21 = p2[0] - p1[0]
y21 = p2[1] - p1[1]
x01 = np.asarray(p0[0]) - p1[0]
y01 = np.asarray(p0[1]) - p1[1]
u = (x01*x21 + y01*y21)/float(abs(x21**2 + y21**2))
u = np.clip(u, 0, 1)
d = np.sqrt((x01 - u*x21)**2 + (y01 - u*y21)**2)
return d | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def mod(v):
"""3d vector length"""
return np.sqrt(v[0]**2+v[1]**2+v[2]**2) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def test_world():
xmin, xmax = 100, 120
ymin, ymax = -100, 100
zmin, zmax = 0.1, 0.2
M = world_transformation(xmin, xmax, ymin, ymax, zmin, zmax)
print(M) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def persp_transformation(zfront, zback):
a = (zfront+zback)/(zfront-zback)
b = -2*(zfront*zback)/(zfront-zback)
return np.array([[1,0,0,0],
[0,1,0,0],
[0,0,a,b],
[0,0,-1,0]
]) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def proj_transform_vec_clip(vec, M):
vecw = np.dot(M, vec)
w = vecw[3]
# clip here..
txs, tys, tzs = vecw[0]/w, vecw[1]/w, vecw[2]/w
tis = (vecw[0] >= 0) * (vecw[0] <= 1) * (vecw[1] >= 0) * (vecw[1] <= 1)
if np.sometrue(tis):
tis = vecw[1] < 1
return txs, tys, tzs, tis | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def vec_pad_ones(xs, ys, zs):
try:
try:
vec = np.array([xs,ys,zs,np.ones(xs.shape)])
except (AttributeError,TypeError):
vec = np.array([xs,ys,zs,np.ones((len(xs)))])
except TypeError:
vec = np.array([xs,ys,zs,1])
return vec | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def proj_transform_clip(xs, ys, zs, M):
"""
Transform the points by the projection matrix
and return the clipping result
returns txs,tys,tzs,tis
"""
vec = vec_pad_ones(xs, ys, zs)
return proj_transform_vec_clip(vec, M) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def proj_points(points, M):
return list(zip(*proj_trans_points(points, M))) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def proj_trans_clip_points(points, M):
xs, ys, zs = list(zip(*points))
return proj_transform_clip(xs, ys, zs, M) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def test_proj_make_M(E=None):
# eye point
E = E or np.array([1, -1, 2]) * 1000
#E = np.array([20,10,20])
R = np.array([1, 1, 1]) * 100
V = np.array([0, 0, 1])
viewM = view_transformation(E, R, V)
perspM = persp_transformation(100, -100)
M = np.dot(perspM, viewM)
return M | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def rot_x(V, alpha):
cosa, sina = np.cos(alpha), np.sin(alpha)
M1 = np.array([[1,0,0,0],
[0,cosa,-sina,0],
[0,sina,cosa,0],
[0,0,0,0]])
return np.dot(M1, V) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def __init__(self, machine: "MachineController") -> None:
"""Initialise Fadecandy.
Args:
----
machine: The main ``MachineController`` object.
"""
super().__init__(machine)
self.log = logging.getLogger("FadeCandy")
self.log.debug("Configuring FadeCandy hardware interface.") | missionpinball/mpf | [
176,
127,
176,
108,
1403853986
] |
def __init__(self, machine, config):
"""Initialise Fadecandy client.
Args:
----
machine: The main ``MachineController`` instance.
config: Dictionary which contains configuration settings for the
OPC client.
"""
super().__init__(machine, config)
self.log = logging.getLogger('FadeCandyClient')
self.update_every_tick = True
self.config = self.machine.config_validator.validate_config('fadecandy',
self.machine.config['fadecandy'])
self.gamma = self.config['gamma']
self.whitepoint = Util.string_to_event_list(self.config['whitepoint'])
self.whitepoint[0] = float(self.whitepoint[0])
self.whitepoint[1] = float(self.whitepoint[1])
self.whitepoint[2] = float(self.whitepoint[2])
self.linear_slope = self.config['linear_slope']
self.linear_cutoff = self.config['linear_cutoff']
self.keyframe_interpolation = self.config['keyframe_interpolation']
self.dithering = self.config['dithering']
if not self.keyframe_interpolation:
self.update_every_tick = False | missionpinball/mpf | [
176,
127,
176,
108,
1403853986
] |
def __repr__(self):
"""Return str representation."""
return '<Platform.FadeCandyOPClient>' | missionpinball/mpf | [
176,
127,
176,
108,
1403853986
] |
def _ls(item, recursive=False, groups=False, level=0):
keys = []
if isinstance(item, h5.Group):
if groups and level > 0:
keys.append(item.name)
if level == 0 or recursive:
for key in list(item.keys()):
keys.extend(_ls(item[key], recursive, groups, level + 1))
elif not groups:
keys.append(item.name)
return keys | cangermueller/deepcpg | [
130,
68,
130,
18,
1474306651
] |
def write_data(data, filename):
"""Write data in dict `data` to HDF5 file."""
is_root = isinstance(filename, str)
group = h5.File(filename, 'w') if is_root else filename
for key, value in six.iteritems(data):
if isinstance(value, dict):
key_group = group.create_group(key)
write_data(value, key_group)
else:
group[key] = value
if is_root:
group.close() | cangermueller/deepcpg | [
130,
68,
130,
18,
1474306651
] |
def reader(data_files, names, batch_size=128, nb_sample=None, shuffle=False,
loop=False):
if isinstance(names, dict):
names = hnames_to_names(names)
else:
names = to_list(names)
# Copy, since list will be changed if shuffle=True
data_files = list(to_list(data_files))
# Check if names exist
h5_file = h5.File(data_files[0], 'r')
for name in names:
if name not in h5_file:
raise ValueError('%s does not exist!' % name)
h5_file.close()
if nb_sample:
# Select the first k files s.t. the total sample size is at least
# nb_sample. Only these files will be shuffled.
_data_files = []
nb_seen = 0
for data_file in data_files:
h5_file = h5.File(data_file, 'r')
nb_seen += len(h5_file[names[0]])
h5_file.close()
_data_files.append(data_file)
if nb_seen >= nb_sample:
break
data_files = _data_files
else:
nb_sample = np.inf
file_idx = 0
nb_seen = 0
while True:
if shuffle and file_idx == 0:
np.random.shuffle(data_files)
h5_file = h5.File(data_files[file_idx], 'r')
data_file = dict()
for name in names:
data_file[name] = h5_file[name]
nb_sample_file = len(list(data_file.values())[0])
if shuffle:
# Shuffle data within the entire file, which requires reading
# the entire file into memory
idx = np.arange(nb_sample_file)
np.random.shuffle(idx)
for name, value in six.iteritems(data_file):
data_file[name] = value[:len(idx)][idx]
nb_batch = int(np.ceil(nb_sample_file / batch_size))
for batch in range(nb_batch):
batch_start = batch * batch_size
nb_read = min(nb_sample - nb_seen, batch_size)
batch_end = min(nb_sample_file, batch_start + nb_read)
_batch_size = batch_end - batch_start
if _batch_size == 0:
break
data_batch = dict()
for name in names:
data_batch[name] = data_file[name][batch_start:batch_end]
yield data_batch
nb_seen += _batch_size
if nb_seen >= nb_sample:
break
h5_file.close()
file_idx += 1
assert nb_seen <= nb_sample
if nb_sample == nb_seen or file_idx == len(data_files):
if loop:
file_idx = 0
nb_seen = 0
else:
break | cangermueller/deepcpg | [
130,
68,
130,
18,
1474306651
] |
def read_from(reader, nb_sample=None):
from .utils import stack_dict
data = dict()
nb_seen = 0
is_dict = True
for data_batch in reader:
if not isinstance(data_batch, dict):
data_batch = _to_dict(data_batch)
is_dict = False
for key, value in six.iteritems(data_batch):
values = data.setdefault(key, [])
values.append(value)
nb_seen += len(list(data_batch.values())[0])
if nb_sample and nb_seen >= nb_sample:
break
data = stack_dict(data)
if nb_sample:
for key, value in six.iteritems(data):
data[key] = value[:nb_sample]
if not is_dict:
data = [data[i] for i in range(len(data))]
return data | cangermueller/deepcpg | [
130,
68,
130,
18,
1474306651
] |
def __init__(self, plotly_name="delta", parent_name="indicator", **kwargs):
super(DeltaValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Delta"),
data_docs=kwargs.pop(
"data_docs",
"""
decreasing
:class:`plotly.graph_objects.indicator.delta.De
creasing` instance or dict with compatible
properties
font
Set the font used to display the delta
increasing
:class:`plotly.graph_objects.indicator.delta.In
creasing` instance or dict with compatible
properties
position
Sets the position of delta with respect to the
number.
reference
Sets the reference value to compute the delta.
By default, it is set to the current value.
relative
Show relative change
valueformat
Sets the value formatting rule using d3
formatting mini-language which is similar to
those of Python. See
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format | plotly/python-api | [
13052,
2308,
13052,
1319,
1385013188
] |
def test_distribute(tmp_path):
"""
Check that the scripts to compute a trajectory are generated correctly
"""
cmd1 = "distribute_jobs.py -i test/test_files/input_test_distribute_derivative_couplings.yml"
cmd2 = "distribute_jobs.py -i test/test_files/input_test_distribute_absorption_spectrum.yml"
for cmd in [cmd1, cmd2]:
print("testing: ", cmd)
call_distribute(tmp_path, cmd) | felipeZ/nonAdiabaticCoupling | [
7,
10,
7,
9,
1465559023
] |
def check_scripts():
"""
Check that the distribution scripts were created correctly
"""
paths = fnmatch.filter(os.listdir('.'), "chunk*")
# Check that the files are created correctly
files = ["launch.sh", "chunk_xyz*", "input.yml"]
for p in paths:
p = Path(p)
for f in files:
try:
next(p.glob(f))
except StopIteration:
msg = f"There is not file: {f}"
print(msg)
raise RuntimeError(msg) | felipeZ/nonAdiabaticCoupling | [
7,
10,
7,
9,
1465559023
] |
def id(self):
"""Gets a value that specifies the member identifier for the user or group.
:rtype: int or None
"""
return self.properties.get('Id', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def title(self):
"""Gets a value that specifies the name of the principal.
:rtype: str or None
"""
return self.properties.get('Title', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def title(self, value):
self.set_property('Title', value) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def login_name(self):
"""Gets the login name of the principal.
:rtype: str or None
"""
return self.properties.get('LoginName', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def user_principal_name(self):
"""Gets the UPN of the principal.
:rtype: str or None
"""
return self.properties.get('UserPrincipalName', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def is_hidden_in_ui(self):
"""Gets the login name of the principal.
:rtype: bool or None
"""
return self.properties.get('IsHiddenInUI', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def principal_type(self):
"""Gets the login name of the principal.
:rtype: int or None
"""
return self.properties.get('PrincipalType', None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def __init__(self, *args):
# type: (Any) -> None
"""__init__(o, sentinel=None)"""
self._iterable = iter(*args) # type: Iterable
self._cache = collections.deque() # type: collections.deque
if len(args) == 2:
self.sentinel = args[1]
else:
self.sentinel = object() | ajbouh/tfi | [
154,
12,
154,
3,
1505258255
] |
def __next__(self, n=None):
# type: (int) -> Any
# note: prevent 2to3 to transform self.next() in next(self) which
# causes an infinite loop !
return getattr(self, 'next')(n) | ajbouh/tfi | [
154,
12,
154,
3,
1505258255
] |
def has_next(self):
# type: () -> bool
"""Determine if iterator is exhausted.
Returns
-------
bool
True if iterator has more items, False otherwise.
Note
----
Will never raise :exc:`StopIteration`.
"""
return self.peek() != self.sentinel | ajbouh/tfi | [
154,
12,
154,
3,
1505258255
] |
def peek(self, n=None):
# type: (int) -> Any
"""Preview the next item or `n` items of the iterator.
The iterator is not advanced when peek is called.
Returns
-------
item or list of items
The next item or `n` items of the iterator. If `n` is None, the
item itself is returned. If `n` is an int, the items will be
returned in a list. If `n` is 0, an empty list is returned.
If the iterator is exhausted, `peek_iter.sentinel` is returned,
or placed as the last item in the returned list.
Note
----
Will never raise :exc:`StopIteration`.
"""
self._fillcache(n)
if n is None:
result = self._cache[0]
else:
result = [self._cache[i] for i in range(n)]
return result | ajbouh/tfi | [
154,
12,
154,
3,
1505258255
] |
def __init__(self, *args, **kwargs):
# type: (Any, Any) -> None
"""__init__(o, sentinel=None, modifier=lambda x: x)"""
if 'modifier' in kwargs:
self.modifier = kwargs['modifier']
elif len(args) > 2:
self.modifier = args[2]
args = args[:2]
else:
self.modifier = lambda x: x
if not callable(self.modifier):
raise TypeError('modify_iter(o, modifier): '
'modifier must be callable')
super(modify_iter, self).__init__(*args) | ajbouh/tfi | [
154,
12,
154,
3,
1505258255
] |
def test___doc__(self):
self.assertEqual(
ctds.Cursor.fetchmany.__doc__,
'''\ | zillow/ctds | [
79,
13,
79,
21,
1457558644
] |
def test_closed(self):
with self.connect() as connection:
cursor = connection.cursor()
cursor.close()
try:
cursor.fetchmany()
except ctds.InterfaceError as ex:
self.assertEqual(str(ex), 'cursor closed')
else:
self.fail('.fetchmany() did not fail as expected') # pragma: nocover | zillow/ctds | [
79,
13,
79,
21,
1457558644
] |
def test_invalid_size(self):
with self.connect() as connection:
with connection.cursor() as cursor:
self.assertRaises(TypeError, cursor.fetchmany, size='123') | zillow/ctds | [
79,
13,
79,
21,
1457558644
] |
def test_fetchmany(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT * FROM @{0};
SELECT i * 2 FROM @{0};
'''.format(self.test_fetchmany.__name__)
)
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(1,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(2,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(2,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(4,)])
self.assertEqual([tuple(row) for row in cursor.fetchmany()], [(6,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
self.assertRaises(ctds.InterfaceError, cursor.fetchmany)
cursor.arraysize = 3
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT * FROM @{0};
SELECT i * 2 FROM @{0};
'''.format(self.test_fetchmany.__name__)
)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(1,), (2,), (3,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), True)
self.assertEqual([tuple(row) for row in cursor.fetchmany(3)], [(2,), (4,), (6,)])
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None)
self.assertRaises(ctds.InterfaceError, cursor.fetchmany) | zillow/ctds | [
79,
13,
79,
21,
1457558644
] |
def test_empty_resultset(self):
with self.connect() as connection:
with connection.cursor() as cursor:
cursor.execute(
'''
DECLARE @{0} TABLE(i INT);
INSERT INTO @{0}(i) VALUES (1),(2),(3);
SELECT i FROM @{0} WHERE i < 0;
'''.format(self.test_empty_resultset.__name__)
)
self.assertEqual(list(cursor.fetchmany()), [])
self.assertEqual(cursor.nextset(), None) | zillow/ctds | [
79,
13,
79,
21,
1457558644
] |
def __init__(self, phaseSide1="C", phaseSide2="C", normalOpen=False, Switch=None, *args, **kw_args):
"""Initialises a new 'SwitchPhase' instance.
@param phaseSide1: Phase of this SwitchPhase on the “from” (Switch.Terminal.sequenceNumber=1) side. Should be a phase contained in that terminal’s Terminal.phases attribute. Values are: "C", "N", "s1", "B", "s2", "A"
@param phaseSide2: Phase of this SwitchPhase on the “to” (Switch.Terminal.sequenceNumber=2) side. Should be a phase contained in that terminal’s Terminal.phases attribute. Values are: "C", "N", "s1", "B", "s2", "A"
@param normalOpen: Used in cases when no Measurement for the status value is present. If the SwitchPhase has a status measurement the Discrete.normalValue is expected to match with this value.
@param Switch:
"""
#: Phase of this SwitchPhase on the “from” (Switch.Terminal.sequenceNumber=1) side. Should be a phase contained in that terminal’s Terminal.phases attribute. Values are: "C", "N", "s1", "B", "s2", "A"
self.phaseSide1 = phaseSide1
#: Phase of this SwitchPhase on the “to” (Switch.Terminal.sequenceNumber=2) side. Should be a phase contained in that terminal’s Terminal.phases attribute. Values are: "C", "N", "s1", "B", "s2", "A"
self.phaseSide2 = phaseSide2
#: Used in cases when no Measurement for the status value is present. If the SwitchPhase has a status measurement the Discrete.normalValue is expected to match with this value.
self.normalOpen = normalOpen
self._Switch = None
self.Switch = Switch
super(SwitchPhase, self).__init__(*args, **kw_args) | rwl/PyCIM | [
68,
33,
68,
7,
1238978196
] |
def getSwitch(self): | rwl/PyCIM | [
68,
33,
68,
7,
1238978196
] |
def setSwitch(self, value):
if self._Switch is not None:
filtered = [x for x in self.Switch.SwitchPhases if x != self]
self._Switch._SwitchPhases = filtered
self._Switch = value
if self._Switch is not None:
if self not in self._Switch._SwitchPhases:
self._Switch._SwitchPhases.append(self) | rwl/PyCIM | [
68,
33,
68,
7,
1238978196
] |
def __init__(self, path, map_size=1000000000, code_size=8, writeonly=False):
super(HammingDb, self).__init__()
self.writeonly = writeonly
if not os.path.exists(path):
os.makedirs(path)
self.path = path
self.env = lmdb.open(
self.path,
max_dbs=10,
map_size=map_size,
writemap=True,
map_async=True,
metasync=True)
self.env.reader_check()
self.metadata = self.env.open_db(b'metadata')
try:
self.code_size = int(self.get_metadata(b'codesize'))
if code_size and code_size != self.code_size:
raise ValueError(
'Database is already initialized with code size {code_size}'
', but {self.code_size} was passed to __init__'
.format(**locals()))
except TypeError:
if code_size is None:
raise ValueError(
'You must supply a code size for an uninitialized database')
if code_size % 8:
raise ValueError('code_size must be a multiple of 8')
self.set_metadata(b'codesize', str(code_size).encode())
self.code_size = code_size
self.index = self.env.open_db(b'index')
self._append_buffer = self._recarray(1)
self._code_bytearray = bytearray(b'a' * self.code_size)
self._code_buffer = np.frombuffer(self._code_bytearray, dtype=np.uint64)
self._codes = None
self._ids = set()
self._catch_up_on_in_memory_store()
self._thread_count = cpu_count()
self._pool = ThreadPool(processes=self._thread_count) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def __del__(self):
self.close() | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def __exit__(self, exc_type, exc_val, exc_tb):
self.close() | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def get_metadata(self, key):
with self.env.begin() as txn:
return txn.get(key, db=self.metadata) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def __len__(self):
with self.env.begin() as txn:
lmdb_size = txn.stat(self.index)['entries']
if not lmdb_size:
return 0
return lmdb_size | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def _initialize_in_memory_store(self):
if self.writeonly:
return
if self._codes is not None:
return
initial_size = max(int(1e6), len(self))
self._codes = Growable(self._recarray(initial_size)) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def _validate_code_size(self, code):
code_len = len(code)
if code_len != self.code_size:
fmt = '''code must be equal to code_size
({self.code_size}), but was {code_len}'''
raise ValueError(fmt.format(**locals())) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def _check_for_external_modifications(self):
if self.__len__() != self._codes.logical_size:
self._catch_up_on_in_memory_store() | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def append(self, code, data):
self._validate_code_size(code)
self._initialize_in_memory_store()
with self.env.begin(write=True) as txn:
_id = self._new_id()
try:
code = code.encode()
except AttributeError:
pass
try:
data = data.encode()
except AttributeError:
pass
txn.put(_id, code + data, db=self.index)
self._add_code(_id, code) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def random_search(self, n_results, multithreaded=False, sort=False):
code = self._random_code()
return code, self.search(code, n_results, multithreaded, sort=sort) | JohnVinyard/zounds | [
22,
6,
22,
26,
1458698104
] |
def __init__(self, parent, color):
wx.Window.__init__(self, parent, -1, style = wx.SIMPLE_BORDER)
self.SetBackgroundColour(color)
if wx.Platform == '__WXGTK__':
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM) | dnxbjyj/python-basic | [
1,
4,
1,
11,
1501510345
] |
def __init__(self, data):
self._string = data | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def unobfuscate(self):
""" Reverse of obfuscation """
out = ""
data = self._string | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def main(): | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def __init__(self, plotly_name="hoverlabel", parent_name="scattermapbox", **kwargs):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength . | plotly/python-api | [
13052,
2308,
13052,
1319,
1385013188
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.