function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def configuration(): host = os.environ.get("WD_HOST", defaults.DRIVER_HOST) port = int(os.environ.get("WD_PORT", str(defaults.DRIVER_PORT))) capabilities = json.loads(os.environ.get("WD_CAPABILITIES", "{}")) return { "host": host, "port": port, "capabilities": capabilities }
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def current_session(): return _current_session
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def url(server_config): def url(path, protocol="http", domain="", subdomain="", query="", fragment=""): domain = server_config["domains"][domain][subdomain] port = server_config["ports"][protocol][0] host = "{0}:{1}".format(domain, port) return urlunsplit((protocol, host, path, query, fragment)) return url
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def inline(url): """Take a source extract and produces well-formed documents. Based on the desired document type, the extract is embedded with predefined boilerplate in order to produce well-formed documents. The media type and character set may also be individually configured. This helper function originally used data URLs, but since these are not universally supported (or indeed standardised!) across browsers, it now delegates the serving of the document to wptserve. This file also acts as a wptserve handler (see the main function below) which configures the HTTP response using query parameters. This function returns a URL to the wptserve handler, which in turn will serve an HTTP response with the requested source extract inlined in a well-formed document, and the Content-Type header optionally configured using the desired media type and character set. Any additional keyword arguments are passed on to the build_url function, which comes from the url fixture. """ def inline(src, **kwargs): return build_inline(url, src, **kwargs) return inline
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def trueDummy(tx): scriptSig = CScript(tx.vin[0].scriptSig) newscript = [] for i in scriptSig: if len(newscript) == 0: assert len(i) == 0 newscript.append(b'\x51') else: newscript.append(i) tx.vin[0].scriptSig = CScript(newscript) tx.rehash()
JeremyRubin/bitcoin
[ 10, 7, 10, 2, 1457071573 ]
def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True # This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through # normal segwit activation here (and don't use the default always-on behaviour). self.extra_args = [[ f'-segwitheight={COINBASE_MATURITY + 5}', '-addresstype=legacy', ]]
JeremyRubin/bitcoin
[ 10, 7, 10, 2, 1457071573 ]
def run_test(self): self.nodes[0].createwallet(wallet_name='wmulti', disable_private_keys=True) wmulti = self.nodes[0].get_wallet_rpc('wmulti') w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name) self.address = w0.getnewaddress() self.pubkey = w0.getaddressinfo(self.address)['pubkey'] self.ms_address = wmulti.addmultisigaddress(1, [self.pubkey])['address'] self.wit_address = w0.getnewaddress(address_type='p2sh-segwit') self.wit_ms_address = wmulti.addmultisigaddress(1, [self.pubkey], '', 'p2sh-segwit')['address'] if not self.options.descriptors: # Legacy wallets need to import these so that they are watched by the wallet. This is unnecessary (and does not need to be tested) for descriptor wallets wmulti.importaddress(self.ms_address) wmulti.importaddress(self.wit_ms_address) self.coinbase_blocks = self.nodes[0].generate(2) # block height = 2 coinbase_txid = [] for i in self.coinbase_blocks: coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0]) self.nodes[0].generate(COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2 self.lastblockhash = self.nodes[0].getbestblockhash() self.lastblockheight = COINBASE_MATURITY + 2 self.lastblocktime = int(time.time()) + self.lastblockheight self.log.info(f"Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [{COINBASE_MATURITY + 3}]") test1txs = [create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, amount=49)] txid1 = self.nodes[0].sendrawtransaction(test1txs[0].serialize_with_witness().hex(), 0) test1txs.append(create_transaction(self.nodes[0], txid1, self.ms_address, amount=48)) txid2 = self.nodes[0].sendrawtransaction(test1txs[1].serialize_with_witness().hex(), 0) test1txs.append(create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, amount=49)) txid3 = self.nodes[0].sendrawtransaction(test1txs[2].serialize_with_witness().hex(), 0) self.block_submit(self.nodes[0], test1txs, False, True) self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation") test2tx = create_transaction(self.nodes[0], txid2, self.ms_address, amount=47) trueDummy(test2tx) assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test2tx.serialize_with_witness().hex(), 0) self.log.info(f"Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [{COINBASE_MATURITY + 4}]") self.block_submit(self.nodes[0], [test2tx], False, True) self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation") test4tx = create_transaction(self.nodes[0], test2tx.hash, self.address, amount=46) test6txs = [CTransaction(test4tx)] trueDummy(test4tx) assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test4tx.serialize_with_witness().hex(), 0) self.block_submit(self.nodes[0], [test4tx]) self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation") test5tx = create_transaction(self.nodes[0], txid3, self.wit_address, amount=48) test6txs.append(CTransaction(test5tx)) test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01' assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test5tx.serialize_with_witness().hex(), 0) self.block_submit(self.nodes[0], [test5tx], True) self.log.info(f"Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [{COINBASE_MATURITY + 5}]") for i in test6txs: self.nodes[0].sendrawtransaction(i.serialize_with_witness().hex(), 0) self.block_submit(self.nodes[0], test6txs, True, True)
JeremyRubin/bitcoin
[ 10, 7, 10, 2, 1457071573 ]
def _format_value(self, value): raise NotImplementedError()
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def process(value): value = self._format_value(value) if super_proc: value = super_proc(value) return value
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def literal_processor(self, dialect): super_proc = self.string_literal_processor(dialect) def process(value): value = self._format_value(value) if super_proc: value = super_proc(value) return value return process
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def _format_value(self, value): if isinstance(value, int): value = "$[%s]" % value else: value = '$."%s"' % value return value
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def draw(self, context): layout = self.layout layout.operator("object.vertex_group_sort", icon='SORTALPHA', text="Sort by Name").sort_type = 'NAME' layout.operator("object.vertex_group_sort", icon='ARMATURE_DATA', text="Sort by Bone Hierarchy").sort_type = 'BONE_HIERARCHY' layout.operator("object.vertex_group_copy", icon='COPY_ID') layout.operator("object.vertex_group_copy_to_linked", icon='LINK_AREA') layout.operator("object.vertex_group_copy_to_selected", icon='LINK_AREA') layout.operator("object.vertex_group_mirror", icon='ARROW_LEFTRIGHT').use_topology = False layout.operator("object.vertex_group_mirror", text="Mirror Vertex Group (Topology)", icon='ARROW_LEFTRIGHT').use_topology = True layout.operator("object.vertex_group_remove_from", icon='X', text="Remove from All Groups").use_all_groups = True layout.operator("object.vertex_group_remove_from", icon='X', text="Clear Active Group").use_all_verts = True layout.operator("object.vertex_group_remove", icon='X', text="Delete All Groups").all = True layout.separator() layout.operator("object.vertex_group_lock", icon='LOCKED', text="Lock All").action = 'LOCK' layout.operator("object.vertex_group_lock", icon='UNLOCKED', text="UnLock All").action = 'UNLOCK' layout.operator("object.vertex_group_lock", icon='LOCKED', text="Lock Invert All").action = 'INVERT'
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout layout.operator("object.shape_key_transfer", icon='COPY_ID') # icon is not ideal layout.operator("object.join_shapes", icon='COPY_ID') # icon is not ideal layout.operator("object.shape_key_mirror", icon='ARROW_LEFTRIGHT').use_topology = False layout.operator("object.shape_key_mirror", text="Mirror Shape Key (Topology)", icon='ARROW_LEFTRIGHT').use_topology = True layout.operator("object.shape_key_add", icon='ZOOMIN', text="New Shape From Mix").from_mix = True layout.operator("object.shape_key_remove", icon='X', text="Delete All Shapes").all = True layout.operator("object.shape_key_move", icon='TRIA_UP_BAR', text="Move To Top").type = 'TOP' layout.operator("object.shape_key_move", icon='TRIA_DOWN_BAR', text="Move To Bottom").type = 'BOTTOM'
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): # assert(isinstance(item, bpy.types.VertexGroup)) vgroup = item if self.layout_type in {'DEFAULT', 'COMPACT'}: layout.prop(vgroup, "name", text="", emboss=False, icon_value=icon) icon = 'LOCKED' if vgroup.lock_weight else 'UNLOCKED' layout.prop(vgroup, "lock_weight", text="", icon=icon, emboss=False) elif self.layout_type == 'GRID': layout.alignment = 'CENTER' layout.label(text="", icon_value=icon)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): # assert(isinstance(item, bpy.types.ShapeKey)) obj = active_data # key = data key_block = item if self.layout_type in {'DEFAULT', 'COMPACT'}: split = layout.split(0.66, False) split.prop(key_block, "name", text="", emboss=False, icon_value=icon) row = split.row(align=True) if key_block.mute or (obj.mode == 'EDIT' and not (obj.use_shape_key_edit_mode and obj.type == 'MESH')): row.active = False if not item.id_data.use_relative: row.prop(key_block, "frame", text="", emboss=False) elif index > 0: row.prop(key_block, "value", text="", emboss=False) else: row.label(text="") row.prop(key_block, "mute", text="", emboss=False) elif self.layout_type == 'GRID': layout.alignment = 'CENTER' layout.label(text="", icon_value=icon)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): # assert(isinstance(item, (bpy.types.MeshTexturePolyLayer, bpy.types.MeshLoopColorLayer))) if self.layout_type in {'DEFAULT', 'COMPACT'}: layout.prop(item, "name", text="", emboss=False, icon_value=icon) icon = 'RESTRICT_RENDER_OFF' if item.active_render else 'RESTRICT_RENDER_ON' layout.prop(item, "active_render", text="", icon=icon, emboss=False) elif self.layout_type == 'GRID': layout.alignment = 'CENTER' layout.label(text="", icon_value=icon)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def poll(cls, context): engine = context.scene.render.engine return context.mesh and (engine in cls.COMPAT_ENGINES)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout ob = context.object mesh = context.mesh space = context.space_data if ob: layout.template_ID(ob, "data") elif mesh: layout.template_ID(space, "pin_id")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout mesh = context.mesh split = layout.split() col = split.column() col.prop(mesh, "use_auto_smooth") sub = col.column() sub.active = mesh.use_auto_smooth and not mesh.has_custom_normals sub.prop(mesh, "auto_smooth_angle", text="Angle") split.prop(mesh, "show_double_sided")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout mesh = context.mesh layout.prop(mesh, "texture_mesh") layout.separator() layout.prop(mesh, "use_auto_texspace") row = layout.row() row.column().prop(mesh, "texspace_location", text="Location") row.column().prop(mesh, "texspace_size", text="Size")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def poll(cls, context): engine = context.scene.render.engine obj = context.object return (obj and obj.type in {'MESH', 'LATTICE'} and (engine in cls.COMPAT_ENGINES))
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def poll(cls, context): engine = context.scene.render.engine obj = context.object return (obj and obj.type in {'MESH', 'LATTICE', 'CURVE', 'SURFACE'} and (engine in cls.COMPAT_ENGINES))
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout me = context.mesh row = layout.row() col = row.column() col.template_list("MESH_UL_uvmaps_vcols", "uvmaps", me, "uv_textures", me.uv_textures, "active_index", rows=1) col = row.column(align=True) col.operator("mesh.uv_texture_add", icon='ZOOMIN', text="") col.operator("mesh.uv_texture_remove", icon='ZOOMOUT', text="")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout me = context.mesh row = layout.row() col = row.column() col.template_list("MESH_UL_uvmaps_vcols", "vcols", me, "vertex_colors", me.vertex_colors, "active_index", rows=1) col = row.column(align=True) col.operator("mesh.vertex_color_add", icon='ZOOMIN', text="") col.operator("mesh.vertex_color_remove", icon='ZOOMOUT', text="")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def draw(self, context): layout = self.layout obj = context.object me = context.mesh col = layout.column() col.operator("mesh.customdata_mask_clear", icon='X') col.operator("mesh.customdata_skin_clear", icon='X') if me.has_custom_normals: col.operator("mesh.customdata_custom_splitnormals_clear", icon='X') else: col.operator("mesh.customdata_custom_splitnormals_add", icon='ZOOMIN') col = layout.column() col.enabled = (obj.mode != 'EDIT') col.prop(me, "use_customdata_vertex_bevel") col.prop(me, "use_customdata_edge_bevel") col.prop(me, "use_customdata_edge_crease")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry): options = dict(entry.options) if CONF_FALLBACK not in options: options[CONF_FALLBACK] = entry.data.get(CONF_FALLBACK, True) hass.config_entries.async_update_entry(entry, options=options)
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def __init__(self, hass, username, password, fallback): """Initialize Tado Connector.""" self.hass = hass self._username = username self._password = password self._fallback = fallback self.device_id = None self.tado = None self.zones = None self.devices = None self.data = { "zone": {}, "device": {}, }
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def fallback(self): """Return fallback flag to Smart Schedule.""" return self._fallback
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def update(self): """Update the registered zones.""" for zone in self.zones: self.update_sensor("zone", zone["id"]) for device in self.devices: self.update_sensor("device", device["id"])
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def get_capabilities(self, zone_id): """Return the capabilities of the devices.""" return self.tado.getCapabilities(zone_id)
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def set_presence( self, presence=PRESET_HOME,
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def set_zone_overlay( self, zone_id=None, overlay_mode=None, temperature=None, duration=None, device_type="HEATING", mode=None, fan_speed=None, swing=None,
pschmitt/home-assistant
[ 1, 1, 1, 6, 1485261624 ]
def _LoadNotificationUrl(): return boto.config.get_value('GSUtil', 'test_notification_url')
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def test_watch_bucket(self): """Tests creating a notification channel on a bucket.""" bucket_uri = self.CreateBucket() self.RunGsUtil( ['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)]) identifier = str(uuid.uuid4()) token = str(uuid.uuid4()) stderr = self.RunGsUtil([ 'notification', 'watchbucket', '-i', identifier, '-t', token, NOTIFICATION_URL, suri(bucket_uri) ], return_stderr=True) self.assertIn('token: %s' % token, stderr) self.assertIn('identifier: %s' % identifier, stderr)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def test_stop_channel(self): """Tests stopping a notification channel on a bucket.""" bucket_uri = self.CreateBucket() stderr = self.RunGsUtil( ['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)], return_stderr=True) channel_id = re.findall(r'channel identifier: (?P<id>.*)', stderr) self.assertEqual(len(channel_id), 1) resource_id = re.findall(r'resource identifier: (?P<id>.*)', stderr) self.assertEqual(len(resource_id), 1) channel_id = channel_id[0] resource_id = resource_id[0] self.RunGsUtil(['notification', 'stopchannel', channel_id, resource_id])
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def test_list_one_channel(self): """Tests listing notification channel on a bucket.""" # TODO(b/132277269): Re-enable these once the service-side bug is fixed. return unittest.skip('Functionality has been disabled due to b/132277269') bucket_uri = self.CreateBucket() # Set up an OCN (object change notification) on the newly created bucket. self.RunGsUtil( ['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)], return_stderr=False) # The OCN listing in the service is eventually consistent. In initial # tests, it almost never was ready immediately after calling WatchBucket # above, so we A) sleep for a few seconds before the first OCN listing # attempt, and B) wrap the OCN listing attempt in retry logic in case # it raises a BucketNotFoundException (note that RunGsUtil will raise this # as an AssertionError due to the exit status not being 0). @Retry(AssertionError, tries=3, timeout_secs=5) def _ListObjectChangeNotifications(): stderr = self.RunGsUtil(['notification', 'list', '-o', suri(bucket_uri)], return_stderr=True) return stderr time.sleep(5) stderr = _ListObjectChangeNotifications() channel_id = re.findall(r'Channel identifier: (?P<id>.*)', stderr) self.assertEqual(len(channel_id), 1) resource_id = re.findall(r'Resource identifier: (?P<id>.*)', stderr) self.assertEqual(len(resource_id), 1) push_url = re.findall(r'Application URL: (?P<id>.*)', stderr) self.assertEqual(len(push_url), 1) subscriber_email = re.findall(r'Created by: (?P<id>.*)', stderr) self.assertEqual(len(subscriber_email), 1) creation_time = re.findall(r'Creation time: (?P<id>.*)', stderr) self.assertEqual(len(creation_time), 1)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def add_default_enable(apps, schema_editor): ForumsConfig = apps.get_model("django_comment_common", "ForumsConfig") settings_count = ForumsConfig.objects.count() if settings_count == 0: # By default we want the comment client enabled, but this is *not* enabling # discussions themselves by default, as in showing the Disucussions tab, or # inline discussions, etc. It just allows the underlying service client to work. settings = ForumsConfig(enabled=True) settings.save()
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def curves( self, basis=IECore.CubicBasisf.linear(), periodic=False, numCurves=4 ) : vertsPerCurve = IECore.IntVectorData() pData = IECore.V3fVectorData() pData.setInterpretation( IECore.GeometricData.Interpretation.Point )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def emptySop( self ) : obj = hou.node( "/obj" ) geo = obj.createNode( "geo", run_init_scripts=False ) null = geo.createNode( "null" )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def curveSop( self, order=2, periodic=False, parent=None, coordIndex=0 ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def curvesSop( self, numCurves=4, order=2, periodic=False ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def comparePrimAndSop( self, prim, sop ) : geo = sop.geometry() for key in [ "floatDetail", "intDetail", "stringDetail" ] : self.assertEqual( prim[key].data.value, geo.attribValue( key ) )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def compareOpenSplinePrimAndSop( self, prim, sop ) : geo = sop.geometry() for key in [ "floatDetail", "intDetail", "stringDetail" ] : self.assertEqual( prim[key].data.value, geo.attribValue( key ) )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def comparePrimAndAppendedSop( self, prim, sop, origSopPrim, multipleConversions=False ) : geo = sop.geometry() for key in [ "floatDetail", "intDetail", "stringDetail" ] : self.assertEqual( prim[key].data.value, geo.attribValue( key ) )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testCreateConverter( self ) : converter = IECoreHoudini.ToHoudiniCurvesConverter( self.curves() ) self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.ToHoudiniCurvesConverter ) ) )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testLinearConversion( self ) : sop = self.emptySop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testSplineConversion( self ) : sop = self.emptySop() spline = IECore.CubicBasisf.bSpline()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testConversionIntoExistingSop( self ) : curves = self.curves() sop = self.curvesSop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testAppendingIntoExistingSop( self ) : curves = self.curves() curvesNumPoints = curves.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) sop = self.curvesSop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testAppendingIntoLockedSop( self ) : curves = self.curves() curvesNumPoints = curves.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) sop = self.curvesSop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testSaveLoad( self ) : hou.hipFile.clear( suppress_save_prompt=True )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testSaveLoadWithLockedSop( self ) : hou.hipFile.clear( suppress_save_prompt=True )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testMultipleConversions( self ) : curves = self.curves() curvesNumPoints = curves.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) sop = self.curvesSop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testObjectWasDeleted( self ) : curves = self.curves() sop = self.curvesSop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testWithUnacceptablePrimVars( self ) : curves = self.curves() curves["badDetail"] = IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.Constant, IECore.TransformationMatrixfData() ) curves["badPoint"] = IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.Vertex, IECore.DoubleVectorData( [ 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 10.5, 11.5, 12.5 ] ) ) curves["badPrim"] = IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.Uniform, IECore.DoubleVectorData( [ 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 10.5, 11.5, 12.5 ] ) ) curves["badVert"] = IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.FaceVarying, IECore.DoubleVectorData( [ 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5, 10.5, 11.5, 12.5 ] ) ) sop = self.emptySop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testConvertingOverExistingAttribs( self ) : curves = self.curves() sop = self.emptySop() detailAttr = sop.createOutputNode( "attribcreate", exact_type_name=True ) detailAttr.parm( "name" ).set( "floatDetail" ) detailAttr.parm( "class" ).set( 0 ) # detail detailAttr.parm( "type" ).set( 0 ) # float detailAttr.parm( "size" ).set( 1 ) # 1 element detailAttr.parm( "value1" ).set( 123.456 )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testConvertingOverExistingAttribsWithDifferentTypes( self ) : curves = self.curves() sop = self.emptySop() detailAttr = sop.createOutputNode( "attribcreate", exact_type_name=True ) detailAttr.parm( "name" ).set( "floatDetail" ) detailAttr.parm( "class" ).set( 0 ) # detail detailAttr.parm( "type" ).set( 1 ) # int detailAttr.parm( "size" ).set( 3 ) # 3 elements detailAttr.parm( "value1" ).set( 10 ) detailAttr.parm( "value2" ).set( 11 ) detailAttr.parm( "value3" ).set( 12 )
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testVertAttribsCantBeConverted( self ) : curves = self.curves() curves["floatVert"] = IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.FaceVarying, IECore.FloatVectorData( 1 ) ) sop = self.emptySop()
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testBadCurve( self ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testName( self ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testAttributeFilter( self ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def testStandardAttributeConversion( self ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def tearDown( self ) :
code-google-com/cortex-vfx
[ 4, 3, 4, 21, 1426431447 ]
def __init__(self, subject): self.subject = subject
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def test_logic(self, member, logic, operator='eq'): """Find the type of test to run on member, and perform that test""" if type(logic) is dict: return self.test_dict(member, logic) elif type(logic) is Logic_AND: return self.test_and(member, logic) elif type(logic) is Logic_OR: return self.test_or(member, logic) elif type(logic) is Logic_Operator: return self.test_operator(member, logic) else: # compare the value, I think using Logic_Operator() here # allows completeness in test_operator(), but I can't put # my finger on why for the minute return self.test_operator(member, Logic_Operator({operator: logic}))
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def test_or(self, member, logic): """Member is a value, logic is a set of values, ANY of which can be True """ result = False for test in logic: result |= self.test_logic(member, test) return result
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def test_dict(self, member, logic): """Member is a value, logic is a dict of other members to compare to. All other member tests must be True """ result = True for other_member, test in logic.items(): result &= self.test_logic(self.get_member(other_member), test) return result
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def _GetAllCliques(root_node): """Return all cliques in the |root_node| tree.""" ret = [] for node in root_node: ret.extend(node.GetCliques()) return ret
chromium/chromium
[ 14247, 5365, 14247, 62, 1517864132 ]
def testGetPath(self): root = misc.GritNode() root.StartParsing(u'grit', None) root.HandleAttribute(u'latest_public_release', u'0') root.HandleAttribute(u'current_release', u'1') root.HandleAttribute(u'base_dir', r'..\resource') translations = empty.TranslationsNode() translations.StartParsing(u'translations', root) root.AddChild(translations) file_node = node_io.FileNode() file_node.StartParsing(u'file', translations) file_node.HandleAttribute(u'path', r'flugel\kugel.pdf') translations.AddChild(file_node) root.EndParsing() self.failUnless(root.ToRealPath(file_node.GetInputPath()) == util.normpath( os.path.join(r'../resource', r'flugel/kugel.pdf')))
chromium/chromium
[ 14247, 5365, 14247, 62, 1517864132 ]
def testLoadTranslations(self): xml = '''<?xml version="1.0" encoding="UTF-8"?> <grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir="."> <translations> <file path="generated_resources_fr.xtb" lang="fr" /> </translations> <release seq="3"> <messages> <message name="ID_HELLO">Hello!</message> <message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>Joi</ex></ph></message> </messages> </release> </grit>''' grd = grd_reader.Parse(StringIO(xml), util.PathFromRoot('grit/testdata')) grd.SetOutputLanguage('en') grd.RunGatherers() self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(_GetAllCliques(grd))
chromium/chromium
[ 14247, 5365, 14247, 62, 1517864132 ]
def testConditionalLoadTranslations(self): xml = '''<?xml version="1.0" encoding="UTF-8"?> <grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir="."> <translations> <if expr="True"> <file path="generated_resources_fr.xtb" lang="fr" /> </if> <if expr="False"> <file path="no_such_file.xtb" lang="de" /> </if> </translations> <release seq="3"> <messages> <message name="ID_HELLO">Hello!</message> <message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex> Joi</ex></ph></message> </messages> </release> </grit>''' grd = grd_reader.Parse(StringIO(xml), util.PathFromRoot('grit/testdata')) grd.SetOutputLanguage('en') grd.RunGatherers() self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(_GetAllCliques(grd))
chromium/chromium
[ 14247, 5365, 14247, 62, 1517864132 ]
def testLangCodeMapping(self): grd = grd_reader.Parse(StringIO('''<?xml version="1.0" encoding="UTF-8"?> <grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir="."> <translations> <file path="generated_resources_no.xtb" lang="nb" /> <file path="generated_resources_iw.xtb" lang="he" /> </translations> <release seq="3"> <messages></messages> </release> </grit>'''), util.PathFromRoot('grit/testdata')) grd.SetOutputLanguage('en') grd.RunGatherers() self.assertEqual([], _GetAllCliques(grd))
chromium/chromium
[ 14247, 5365, 14247, 62, 1517864132 ]
def __init__(self, stddev, **kwargs): super(GaussianNoise, self).__init__(**kwargs) self.supports_masking = True self.stddev = stddev
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def noised(): return inputs + K.random_normal( shape=K.shape(inputs), mean=0., stddev=self.stddev)
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def get_config(self): config = {'stddev': self.stddev} base_config = super(GaussianNoise, self).get_config() return dict(list(base_config.items()) + list(config.items()))
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def __init__(self, rate, **kwargs): super(GaussianDropout, self).__init__(**kwargs) self.supports_masking = True self.rate = rate
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def noised(): stddev = np.sqrt(self.rate / (1.0 - self.rate)) return inputs * K.random_normal( shape=K.shape(inputs), mean=1.0, stddev=stddev)
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def get_config(self): config = {'rate': self.rate} base_config = super(GaussianDropout, self).get_config() return dict(list(base_config.items()) + list(config.items()))
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def __init__(self, rate, noise_shape=None, seed=None, **kwargs): super(AlphaDropout, self).__init__(**kwargs) self.rate = rate self.noise_shape = noise_shape self.seed = seed self.supports_masking = True
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def call(self, inputs, training=None): if 0. < self.rate < 1.: noise_shape = self._get_noise_shape(inputs) alpha = 1.6732632423543772848170429916717 scale = 1.0507009873554804934193349852946 def dropped_inputs(inputs=inputs, rate=self.rate, seed=self.seed): alpha_p = -alpha * scale kept_idx = K.greater_equal(K.random_uniform(noise_shape, seed=seed), rate) kept_idx = K.cast(kept_idx, K.floatx()) a = ((1 - rate) * (1 + rate * alpha_p ** 2)) ** -0.5 b = -a * alpha_p * rate x = inputs * kept_idx + alpha_p * (1 - kept_idx) return a * x + b return K.in_train_phase(dropped_inputs, inputs, training=training) return inputs
npuichigo/ttsflow
[ 16, 6, 16, 1, 1500635633 ]
def run_perfstress_cmd(): main_loop = _PerfStressRunner() loop = asyncio.get_event_loop() loop.run_until_complete(main_loop.start())
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, plotly_name="colorsrc", parent_name="scattercarpet.marker", **kwargs
plotly/plotly.py
[ 13052, 2308, 13052, 1319, 1385013188 ]
def get_numeric_value(value, func): """ :returns: value if parameter value exists OR None if the parameter value does not exist """ return func(value) if value and not np.isnan(value) else None
hasadna/anyway
[ 69, 235, 69, 293, 1386619033 ]
def get_schools_with_description(schools_description_filepath, schools_coordinates_filepath): logging.info("\tReading schools description data from '%s'..." % schools_description_filepath) df_schools = pd.read_excel(schools_description_filepath) logging.info("\tReading schools coordinates data from '%s'..." % schools_coordinates_filepath) df_coordinates = pd.read_excel(schools_coordinates_filepath) schools = [] # get school_id df_schools = df_schools.drop_duplicates(school_fields["school_id"]) # sort by school_id df_schools = df_schools.sort_values(school_fields["school_id"], ascending=True) all_schools_tuples = [] for _, school in df_schools.iterrows(): school_id = get_numeric_value(school[school_fields["school_id"]], int) school_name = get_str_value(school[school_fields["school_name"]]).strip('"') if school_id in list(df_coordinates[school_fields["school_id"]].values): x_coord = df_coordinates.loc[ df_coordinates[school_fields["school_id"]] == school_id, school_fields["x"] ].values[0] y_coord = df_coordinates.loc[ df_coordinates[school_fields["school_id"]] == school_id, school_fields["y"] ].values[0] location_accuracy = get_str_value( df_coordinates.loc[ df_coordinates[school_fields["school_id"]] == school_id, school_fields["location_accuracy"], ].values[0] ) else: x_coord = None y_coord = None location_accuracy = None if x_coord and not math.isnan(x_coord) and y_coord and not math.isnan(y_coord): longitude, latitude = coordinates_converter.convert(x_coord, y_coord) else: longitude, latitude = ( None, None, ) # otherwise yield will produce: UnboundLocalError: local variable referenced before assignment # Don't insert duplicates of 'school_name','x', 'y' school_tuple = (school_name, x_coord, y_coord) if school_tuple in all_schools_tuples: continue else: all_schools_tuples.append(school_tuple) school = { "school_id": get_numeric_value(school[school_fields["school_id"]], int), "school_name": school_name, "municipality_name": get_str_value(school[school_fields["municipality_name"]]), "yishuv_name": get_str_value(school[school_fields["yishuv_name"]]), "institution_type": get_str_value(school[school_fields["institution_type"]]), "lowest_grade": get_str_value(school[school_fields["lowest_grade"]]), "highest_grade": get_str_value(school[school_fields["highest_grade"]]), "location_accuracy": location_accuracy, "longitude": longitude, "latitude": latitude, "x": x_coord, "y": y_coord, } if school["institution_type"] in [ "בית ספר", "תלמוד תורה", "ישיבה קטנה", 'בי"ס תורני', "ישיבה תיכונית", 'בי"ס חקלאי', 'בי"ס רפואי', 'בי"ס כנסייתי', "אולפנה", 'בי"ס אקסטרני', 'בי"ס קיבוצי', "תלמוד תורה ליד מעיין חינוך התורני", 'בי"ס מושבי', ]: schools.append(school) return schools
hasadna/anyway
[ 69, 235, 69, 293, 1386619033 ]
def import_to_datastore(schools_description_filepath, schools_coordinates_filepath, batch_size): try: assert batch_size > 0 started = datetime.now() schools = get_schools_with_description( schools_description_filepath, schools_coordinates_filepath ) truncate_schools_with_description() new_items = 0 logging.info("inserting " + str(len(schools)) + " new schools") for schools_chunk in chunks(schools, batch_size): db.session.bulk_insert_mappings(SchoolWithDescription2020, schools_chunk) db.session.commit() new_items += len(schools) logging.info(f"\t{new_items} items in {time_delta(started)}") return new_items except Exception as exception: error = f"Schools import succeeded partially with {new_items} schools. Got exception : {exception}" raise Exception(error)
hasadna/anyway
[ 69, 235, 69, 293, 1386619033 ]
def hotel_cost(nights): return 140 * nights
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def rental_car_cost(days): total_car = days * 40 if days >= 7: total_car -= 50 elif days >= 3: total_car -= 20 return total_car
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def test_list_repository_names(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) repositories = client.list_repository_names() assert isinstance(repositories, ItemPaged) count = 0 prev = None for repo in repositories: count += 1 assert isinstance(repo, six.string_types) assert prev != repo prev = repo assert count > 0
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_list_repository_names_by_page(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) results_per_page = 2 total_pages = 0 repository_pages = client.list_repository_names(results_per_page=results_per_page) prev = None for page in repository_pages.by_page(): page_count = 0 for repo in page: assert isinstance(repo, six.string_types) assert prev != repo prev = repo page_count += 1 assert page_count <= results_per_page total_pages += 1 assert total_pages >= 1
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_delete_repository(self, containerregistry_endpoint, containerregistry_resource_group): self.import_image(containerregistry_endpoint, HELLO_WORLD, [TO_BE_DELETED]) client = self.create_registry_client(containerregistry_endpoint) client.delete_repository(TO_BE_DELETED) for repo in client.list_repository_names(): if repo == TO_BE_DELETED: raise ValueError("Repository not deleted")
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_delete_repository_does_not_exist(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) client.delete_repository("not_real_repo")
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_get_repository_properties(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) properties = client.get_repository_properties(ALPINE) assert isinstance(properties, RepositoryProperties) assert properties.name == ALPINE
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_update_repository_properties(self, containerregistry_endpoint): repository = self.get_resource_name("repo") tag_identifier = self.get_resource_name("tag") self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repository, tag_identifier)]) client = self.create_registry_client(containerregistry_endpoint) properties = client.get_repository_properties(repository) properties.can_delete = False properties.can_read = False properties.can_list = False properties.can_write = False new_properties = client.update_repository_properties(repository, properties) assert properties.can_delete == new_properties.can_delete assert properties.can_read == new_properties.can_read assert properties.can_list == new_properties.can_list assert properties.can_write == new_properties.can_write new_properties.can_delete = True new_properties.can_read = True new_properties.can_list = True new_properties.can_write = True new_properties = client.update_repository_properties(repository, new_properties) assert new_properties.can_delete == True assert new_properties.can_read == True assert new_properties.can_list == True assert new_properties.can_write == True
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_update_repository_properties_kwargs(self, containerregistry_endpoint): repo = self.get_resource_name("repo") tag = self.get_resource_name("tag") self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)]) client = self.create_registry_client(containerregistry_endpoint) properties = client.get_repository_properties(repo) properties = self.set_all_properties(properties, True) received = client.update_repository_properties(repo, properties) self.assert_all_properties(properties, True) received = client.update_repository_properties(repo, can_delete=False) assert received.can_delete == False assert received.can_list == True assert received.can_read == True assert received.can_write == True received = client.update_repository_properties(repo, can_read=False) assert received.can_delete == False assert received.can_list == True assert received.can_read == False assert received.can_write == True received = client.update_repository_properties(repo, can_write=False) assert received.can_delete == False assert received.can_list == True assert received.can_read == False assert received.can_write == False received = client.update_repository_properties(repo, can_list=False) assert received.can_delete == False assert received.can_list == False assert received.can_read == False assert received.can_write == False received = client.update_repository_properties( repo, can_delete=True, can_read=True, can_write=True, can_list=True, ) self.assert_all_properties(received, True)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_list_registry_artifacts(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) count = 0 for artifact in client.list_manifest_properties(BUSYBOX): assert isinstance(artifact, ArtifactManifestProperties) assert isinstance(artifact.created_on, datetime) assert isinstance(artifact.last_updated_on, datetime) assert artifact.repository_name == BUSYBOX assert artifact.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, BUSYBOX, artifact.digest) count += 1 assert count > 0
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_list_registry_artifacts_by_page(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) results_per_page = 2 pages = client.list_manifest_properties(BUSYBOX, results_per_page=results_per_page) page_count = 0 for page in pages.by_page(): reg_count = 0 for tag in page: reg_count += 1 assert reg_count <= results_per_page page_count += 1 assert page_count >= 1
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_list_registry_artifacts_descending(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) prev_last_updated_on = None count = 0 for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_DESCENDING): if prev_last_updated_on: assert artifact.last_updated_on < prev_last_updated_on prev_last_updated_on = artifact.last_updated_on count += 1 assert count > 0 prev_last_updated_on = None count = 0 for artifact in client.list_manifest_properties(BUSYBOX, order_by="timedesc"): if prev_last_updated_on: assert artifact.last_updated_on < prev_last_updated_on prev_last_updated_on = artifact.last_updated_on count += 1 assert count > 0
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_list_registry_artifacts_ascending(self, containerregistry_endpoint): client = self.create_registry_client(containerregistry_endpoint) prev_last_updated_on = None count = 0 for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_ASCENDING): if prev_last_updated_on: assert artifact.last_updated_on > prev_last_updated_on prev_last_updated_on = artifact.last_updated_on count += 1 assert count > 0 prev_last_updated_on = None count = 0 for artifact in client.list_manifest_properties(BUSYBOX, order_by="timeasc"): if prev_last_updated_on: assert artifact.last_updated_on > prev_last_updated_on prev_last_updated_on = artifact.last_updated_on count += 1 assert count > 0
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_get_manifest_properties(self, containerregistry_endpoint): repo = self.get_resource_name("repo") tag = self.get_resource_name("tag") self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)]) client = self.create_registry_client(containerregistry_endpoint) properties = client.get_manifest_properties(repo, tag) assert isinstance(properties, ArtifactManifestProperties) assert properties.repository_name == repo assert properties.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, repo, properties.digest)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_get_manifest_properties_does_not_exist(self, containerregistry_endpoint): repo = self.get_resource_name("repo") tag = self.get_resource_name("tag") self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)]) client = self.create_registry_client(containerregistry_endpoint) manifest = client.get_manifest_properties(repo, tag) digest = manifest.digest digest = digest[:-10] + u"a" * 10 with pytest.raises(ResourceNotFoundError): client.get_manifest_properties(repo, digest)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]