function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def go(self):
if "package" in self.attributes:
self.packagefiles = self.attributes["package"]
if "needsRestart" in self.attributes:
self.restartRequired = True
self.cmdList = []
if self.pluginstate in ('installed', 'remove'):
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": package["name"] }))
if len(self.cmdList):
self.session.openWithCallback(self.runRemove, MessageBox, _("Do you want to remove the package:\n") + self.pluginname + "\n" + self.oktext)
else:
if iSoftwareTools.NetworkConnectionAvailable:
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package["name"] }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to install the package:\n") + self.pluginname + "\n" + self.oktext) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def runUpgradeFinished(self):
self.reloadPluginlist()
if plugins.restartRequired or self.restartRequired:
self.session.openWithCallback(self.UpgradeReboot, MessageBox, _("Installation has completed.") + "\n" + _("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO)
else:
self.close(True) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def runRemove(self, result):
if result:
self.session.openWithCallback(self.runRemoveFinished, Ipkg, cmdList = self.cmdList) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def reloadPluginlist(self):
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS)) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
self.skin_path = plugin_path
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Edit"))
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.KeyOk,
})
self["filelist"] = MenuList([])
self.fill_list()
self.onLayoutFinish.append(self.layoutFinished) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def setWindowTitle(self):
self.setTitle(_("Select upgrade source to edit.")) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
self.val = self.path + self.sel
self.session.open(IPKGSource, self.val) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def Exit(self):
self.close() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def __init__(self, session, configfile = None):
Screen.__init__(self, session)
self.session = session
self.configfile = configfile
text = ""
if self.configfile:
try:
fp = file(configfile, 'r')
sources = fp.readlines()
if sources:
text = sources[0]
fp.close()
except IOError:
pass
desk = getDesktop(0)
x= int(desk.size().width())
y= int(desk.size().height())
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
if (y>=720):
self["text"] = Input(text, maxSize=False, type=Input.TEXT)
else:
self["text"] = Input(text, maxSize=False, visible_width = 55, type=Input.TEXT)
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "TextEntryActions", "KeyboardInputActions","ShortcutActions"],
{
"ok": self.go,
"back": self.close,
"red": self.close,
"green": self.go,
"left": self.keyLeft,
"right": self.keyRight,
"home": self.keyHome,
"end": self.keyEnd,
"deleteForward": self.keyDeleteForward,
"deleteBackward": self.keyDeleteBackward,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
self.onLayoutFinish.append(self.layoutFinished) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def setWindowTitle(self):
self.setTitle(_("Edit upgrade source url.")) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def keyLeft(self):
self["text"].left() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def keyHome(self):
self["text"].home() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def keyDeleteForward(self):
self["text"].delete() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def keyNumberGlobal(self, number):
self["text"].number(number) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def __init__(self, session, plugin_path, args = None):
Screen.__init__(self, session)
NumericalTextInput.__init__(self)
self.session = session
self.skin_path = plugin_path
self.setUseableChars(u'1234567890abcdefghijklmnopqrstuvwxyz')
self["shortcuts"] = NumberActionMap(["ShortcutActions", "WizardActions", "NumberActions", "InputActions", "InputAsciiActions", "KeyboardInputActions" ],
{
"ok": self.go,
"back": self.exit,
"red": self.exit,
"green": self.reload,
"gotAsciiCode": self.keyGotAscii,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
self.list = []
self.statuslist = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Reload"))
self.list_updating = True
self.packetlist = []
self.installed_packetlist = {}
self.upgradeable_packages = {}
self.Console = Console()
self.cmdList = []
self.cachelist = []
self.cache_ttl = 86400 #600 is default, 0 disables, Seconds cache is considered valid (24h should be ok for caching ipkgs)
self.cache_file = eEnv.resolve('${libdir}/enigma2/python/Plugins/SystemPlugins/SoftwareManager/packetmanager.cache') #Path to cache directory
self.oktext = _("\nAfter pressing OK, please wait!")
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src', 'busybox')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.rebuildList)
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmAscii) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def keyGotAscii(self):
keyvalue = unichr(getPrevAsciiCode()).encode("utf-8")
if len(keyvalue) == 1:
self.setNextIdx(keyvalue[0]) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def getNextIdx(self,char):
for idx, i in enumerate(self["list"].list):
if i[0] and (i[0][0] == char):
return idx | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def reload(self):
if (os.path.exists(self.cache_file) == True):
os.unlink(self.cache_file)
self.list_updating = True
self.rebuildList() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def setStatus(self,status = None):
if status:
self.statuslist = []
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/div-h.png"))
if status == 'update':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
self.statuslist.append(( _("Package list update"), '', _("Trying to download a new packetlist. Please wait..." ),'',statuspng, divpng ))
self['list'].setList(self.statuslist)
elif status == 'error':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
self.statuslist.append(( _("Error"), '', _("An error occurred while downloading the packetlist. Please try again." ),'',statuspng, divpng ))
self['list'].setList(self.statuslist) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def go(self, returnValue = None):
cur = self["list"].getCurrent()
if cur:
status = cur[3]
package = cur[0]
self.cmdList = []
if status == 'installed':
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runRemove, MessageBox, _("Do you want to remove the package:\n") + package + "\n" + self.oktext)
elif status == 'upgradeable':
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to upgrade the package:\n") + package + "\n" + self.oktext)
elif status == "installable":
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to install the package:\n") + package + "\n" + self.oktext) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def runRemoveFinished(self):
self.session.openWithCallback(self.RemoveReboot, MessageBox, _("Removal has completed.") + "\n" + _("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def runUpgrade(self, result):
if result:
self.session.openWithCallback(self.runUpgradeFinished, Ipkg, cmdList = self.cmdList) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def UpgradeReboot(self, result):
if result is None:
return
if result is False:
cur = self["list"].getCurrent()
if cur:
item = self['list'].getIndex()
self.list[item] = self.buildEntryComponent(cur[0], cur[1], cur[2], 'installed')
self.cachelist[item] = [cur[0], cur[1], cur[2], 'installed']
self['list'].setList(self.list)
write_cache(self.cache_file, self.cachelist)
self.reloadPluginlist()
if result:
self.session.open(TryQuitMainloop,retvalue=3) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def IpkgList_Finished(self, result, retval, extra_args = None):
if result:
self.packetlist = []
last_name = ""
for x in result.splitlines():
if ' - ' in x:
tokens = x.split(' - ')
name = tokens[0].strip()
if name and not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
if name == last_name:
continue
last_name = name
self.packetlist.append([name, version, descr])
elif len(self.packetlist) > 0:
# no ' - ' in the text, assume that this is the description
# therefore add this text to the last packet description
last_packet = self.packetlist[-1]
last_packet[2] = last_packet[2] + x
self.packetlist[:-1] + last_packet
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.Console.ePopen(cmd, self.IpkgListInstalled_Finished) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def OpkgListUpgradeable_Finished(self, result, retval, extra_args = None):
if result:
self.upgradeable_packages = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 2 and tokens[2].strip() or ""
self.upgradeable_packages[name] = version
self.buildPacketList() | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def buildPacketList(self):
self.list = []
self.cachelist = []
if self.cache_ttl > 0 and self.vc != 0:
print 'Loading packagelist cache from ',self.cache_file
try:
self.cachelist = load_cache(self.cache_file)
if len(self.cachelist) > 0:
for x in self.cachelist:
self.list.append(self.buildEntryComponent(x[0], x[1], x[2], x[3]))
self['list'].setList(self.list)
except:
self.inv_cache = 1
if self.cache_ttl == 0 or self.inv_cache == 1 or self.vc == 0:
print 'rebuilding fresh package list'
for x in self.packetlist:
status = ""
if x[0] in self.installed_packetlist:
if x[0] in self.upgradeable_packages:
status = "upgradeable"
else:
status = "installed"
else:
status = "installable"
self.list.append(self.buildEntryComponent(x[0], x[1], x[2], status))
self.cachelist.append([x[0], x[1], x[2], status])
write_cache(self.cache_file, self.cachelist)
self['list'].setList(self.list) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def __init__(self, session, list):
Screen.__init__(self, session)
self.list = SelectionList()
self["list"] = self.list
p = 0
if len(list):
p = list[0].rfind("/")
title = list[0][:p]
self.title = ("%s %s %s") % (_("Install extensions"), _("from"), title)
for listindex in range(len(list)):
self.list.addSelection(list[listindex][p+1:], list[listindex], listindex, False)
self.list.sort()
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Install"))
self["key_yellow"] = StaticText()
self["key_blue"] = StaticText(_("Invert"))
self["introduction"] = StaticText(_("Press OK to toggle the selection."))
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.list.toggleSelection,
"cancel": self.close,
"red": self.close,
"green": self.install,
"blue": self.list.toggleAllSelection
}, -1) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def filescan_open(list, session, **kwargs):
filelist = [x.path for x in list]
session.open(IpkgInstaller, filelist) # list | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def UpgradeMain(session, **kwargs):
session.open(UpdatePluginMenu) | Taapat/enigma2-openpli-fulan | [
5,
11,
5,
1,
1452283820
] |
def test_ef_connect():
data, specs = three_faces_sheet()
sheet = Sheet("test", data, specs)
ef_connect = connectivity.edge_in_face_connectivity(sheet)
idx = sheet.edge_df.query(f"face == {sheet.Nf-1}").index
assert ef_connect[idx[0], idx[1]] | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def test_cell_cell_connectivity():
data, _ = three_faces_sheet()
mono = Monolayer("test", extrude(data), bulk_spec())
ccc = connectivity.cell_cell_connectivity(mono)
expected = np.array([[0, 36, 36], [36, 0, 36], [36, 36, 0]])
np.testing.assert_array_equal(ccc, expected) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def test_verts_in_face_connectivity():
data, specs = three_faces_sheet()
sheet = Sheet("test", data, specs)
vfc = connectivity.verts_in_face_connectivity(sheet)
assert vfc[0][vfc[0] == 2].shape == (3,) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def __init__(self, server=None):
self.server = server
server_selection = ServerSelectionWindow(self.server)
server_selection.connect('server-selected', self.on_server_selected)
self.app = GDanmakuApp(self)
self.thread_sub = None
self.enabled = True
self.options = load_config()
self.live_danmakus = {} | tuna/gdanmaku | [
36,
4,
36,
1,
1413132169
] |
def new_danmaku(self, dm_opts):
if not self.enabled:
return
for opt in dm_opts:
try:
dm = Danmaku(**opt)
dm.connect('delete-event', self.on_danmaku_delete)
except Exception as e:
print(e)
continue
self.live_danmakus[id(dm)] = dm | tuna/gdanmaku | [
36,
4,
36,
1,
1413132169
] |
def toggle_danmaku(self):
self.enabled = not self.enabled
if not self.enabled:
for _, dm in self.live_danmakus.iteritems():
dm.hide()
dm._clean_exit() | tuna/gdanmaku | [
36,
4,
36,
1,
1413132169
] |
def run(self):
GObject.threads_init()
Gtk.main() | tuna/gdanmaku | [
36,
4,
36,
1,
1413132169
] |
def main():
options = load_config()
parser = argparse.ArgumentParser(prog="gdanmaku")
parser.add_argument(
"--server",
type=str,
default=options["http_stream_server"],
help="danmaku stream server"
)
parser.add_argument(
'--config',
action="store_true",
help="run configuration window"
)
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
if args.config:
app_config()
else:
main_app = Main(args.server)
main_app.run() | tuna/gdanmaku | [
36,
4,
36,
1,
1413132169
] |
def __init__(self, backend):
self.config = backend.config
self.backend = backend | openwisp/netjsonconfig | [
358,
71,
358,
37,
1438272990
] |
def env_path(self):
return self.__module__ | openwisp/netjsonconfig | [
358,
71,
358,
37,
1438272990
] |
def template_env(self):
return Environment(
loader=PackageLoader(self.env_path, 'templates'), trim_blocks=True
) | openwisp/netjsonconfig | [
358,
71,
358,
37,
1438272990
] |
def get_name(cls):
"""
Returns the name of the render class without its prefix
"""
return str(cls.__name__).replace('Renderer', '').lower() | openwisp/netjsonconfig | [
358,
71,
358,
37,
1438272990
] |
def __init__(self, attrs=None, options={}, *args, **kwargs):
super(TagAutocompleteWidget, self).__init__(attrs)
print(self.__dict__)
self.options = options | hzlf/openbroadcast.org | [
9,
2,
9,
44,
1413831364
] |
def __init__(self, kt=None, vt=None, data=None):
"""This is a typed dict implementation that optionally enforces given
types on contained values on assignment."""
self._kt = kt
self._vt = vt
if kt is None and vt is None:
self.check = self._check_noop
elif kt is None:
self.check = self._check_v
elif vt is None:
self.check = self._check_k
else:
self.check = self._check_kv
if data is not None:
self.update(data) | arskom/spyne | [
1084,
311,
1084,
72,
1279866388
] |
def _check_k(self, key, _):
if not isinstance(key, self._kt):
raise TypeError(repr(key)) | arskom/spyne | [
1084,
311,
1084,
72,
1279866388
] |
def _check_kv(self, key, value):
if not isinstance(key, self._kt):
raise TypeError(repr(key))
if not isinstance(value, self._vt):
raise TypeError(repr(value)) | arskom/spyne | [
1084,
311,
1084,
72,
1279866388
] |
def update(self, E=None, **F):
try:
it = chain(E.items(), F.items())
except AttributeError:
it = chain(E, F)
for k, v in it:
self[k] = v | arskom/spyne | [
1084,
311,
1084,
72,
1279866388
] |
def fromkeys(cls, S, v=None):
kt = vt = None
if len(S) > 0:
kt, = set((type(s) for s in S))
if v is not None:
vt = type(v)
retval = tdict(kt, vt)
for s in S:
retval[s] = v
return retval | arskom/spyne | [
1084,
311,
1084,
72,
1279866388
] |
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.')) | mapnik/python-mapnik | [
148,
85,
148,
135,
1429727065
] |
def test_topojson_init():
# topojson tests/data/json/escaped.geojson -o tests/data/topojson/escaped.topojson --properties
# topojson version 1.4.2
ds = mapnik.Datasource(
type='topojson',
file='../data/topojson/escaped.topojson')
e = ds.envelope()
assert_almost_equal(e.minx, -81.705583, places=7)
assert_almost_equal(e.miny, 41.480573, places=6)
assert_almost_equal(e.maxx, -81.705583, places=5)
assert_almost_equal(e.maxy, 41.480573, places=3) | mapnik/python-mapnik | [
148,
85,
148,
135,
1429727065
] |
def test_geojson_from_in_memory_string():
ds = mapnik.Datasource(
type='topojson',
inline=open(
'../data/topojson/escaped.topojson',
'r').read())
f = list(ds.all_features())[0]
eq_(len(ds.fields()), 11)
desc = ds.describe()
eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
eq_(f['name'], u'Test')
eq_(f['int'], 1)
eq_(f['description'], u'Test: \u005C')
eq_(f['spaces'], u'this has spaces')
eq_(f['double'], 1.1)
eq_(f['boolean'], True)
eq_(f['NOM_FR'], u'Qu\xe9bec')
eq_(f['NOM_FR'], u'Québec') | mapnik/python-mapnik | [
148,
85,
148,
135,
1429727065
] |
def test_that_nonexistant_query_field_throws(**kwargs):
ds = mapnik.Datasource(
type='topojson',
file='../data/topojson/escaped.topojson')
eq_(len(ds.fields()), 11)
# TODO - this sorting is messed up
eq_(ds.fields(), ['name', 'int', 'description',
'spaces', 'double', 'boolean', 'NOM_FR',
'object', 'array', 'empty_array', 'empty_object'])
eq_(ds.field_types(), ['str', 'int',
'str', 'str', 'float', 'bool', 'str',
'str', 'str', 'str', 'str']) | mapnik/python-mapnik | [
148,
85,
148,
135,
1429727065
] |
def GetDataClient(mocked=False):
"""Returns a BigQueryClient with default credentials in the testing env."""
if mocked:
return mock_big_query_client.MockBigQueryClient(
credential_file=credentials_lib.DEFAULT_CREDENTIALS,
env=data_source_config.Environments.TESTING)
else:
return big_query_client.BigQueryClient(
credential_file=credentials_lib.DEFAULT_CREDENTIALS,
env=data_source_config.Environments.TESTING) | GoogleCloudPlatform/PerfKitExplorer | [
267,
64,
267,
24,
1405617733
] |
def setUp(self):
self.author = Author.objects.create(
name="Super powerful superhero", email="i.am@lost.com"
)
self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog")
self.other_blog = Blog.objects.create(
name="Other blog", tagline="It's another blog"
)
self.first_entry = Entry.objects.create(
blog=self.blog,
headline="headline one",
body_text="body_text two",
pub_date=timezone.now(),
mod_date=timezone.now(),
n_comments=0,
n_pingbacks=0,
rating=3,
)
self.second_entry = Entry.objects.create(
blog=self.blog,
headline="headline two",
body_text="body_text one",
pub_date=timezone.now(),
mod_date=timezone.now(),
n_comments=0,
n_pingbacks=0,
rating=1,
)
self.comment = Comment.objects.create(entry=self.first_entry)
CommentFactory.create_batch(50)
EntryFactory.create_batch(50) | django-json-api/rest_framework_ember | [
1059,
290,
1059,
30,
1405701974
] |
def test_query_count_include_author(self):
"""We expect a list view with an include have five queries:
1. Primary resource COUNT query
2. Primary resource SELECT
3. Authors prefetched
4. Author types prefetched
5. Entries prefetched
"""
with self.assertNumQueries(5):
response = self.client.get("/comments?include=author&page[size]=25")
self.assertEqual(len(response.data["results"]), 25) | django-json-api/rest_framework_ember | [
1059,
290,
1059,
30,
1405701974
] |
def test_query_prefetch_uses_included_resources(self):
"""We expect a list view with `included_resources` to have three queries:
1. Primary resource COUNT query
2. Primary resource SELECT
3. Comments prefetched
"""
with self.assertNumQueries(3):
response = self.client.get(
"/entries?fields[entries]=comments&page[size]=25"
)
self.assertEqual(len(response.data["results"]), 25) | django-json-api/rest_framework_ember | [
1059,
290,
1059,
30,
1405701974
] |
def test_parse_result(self):
x = webuntis.utils.remote._parse_result
a = {'id': 2}
b = {'id': 3}
self.assertRaisesRegex(webuntis.errors.RemoteError,
'Request ID', x, a, b)
a = b = {'id': 2}
self.assertRaisesRegex(webuntis.errors.RemoteError,
'no information', x, a, b)
a = {'id': 2}
b = {'id': 2, 'result': 'YESSIR'}
assert x(a, b) == 'YESSIR' | maphy-psd/python-webuntis | [
41,
14,
41,
9,
1347193504
] |
def RunCommand_Mock(self, result, *args, **kwargs):
output = self.mox.CreateMockAnything()
output.output = result
return output | coreos/chromite | [
6,
25,
6,
1,
1362087777
] |
def testInternalRepoCheckout(self):
"""Test we detect internal checkouts properly."""
self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
tests = [
'ssh://gerrit-int.chromium.org:29419/chromeos/manifest-internal.git',
'ssh://gerrit-int.chromium.org:29419/chromeos/manifest-internal',
'ssh://gerrit.chromium.org:29418/chromeos/manifest-internal',
'test@abcdef.bla.com:39291/bla/manifest-internal.git',
]
for test in tests:
cros_build_lib.RunCommand = functools.partial(self.RunCommand_Mock, test)
self.assertTrue(repository.IsInternalRepoCheckout('.')) | coreos/chromite | [
6,
25,
6,
1,
1362087777
] |
def _Initialize(self, branch='master'):
repo = repository.RepoRepository(constants.MANIFEST_URL, self.tempdir,
branch=branch)
repo.Initialize() | coreos/chromite | [
6,
25,
6,
1,
1362087777
] |
def setUp(self):
os.putenv('GIT_COMMITTER_EMAIL', 'chrome-bot@chromium.org')
os.putenv('GIT_AUTHOR_EMAIL', 'chrome-bot@chromium.org') | coreos/chromite | [
6,
25,
6,
1,
1362087777
] |
def find_nuget_unix():
import os
if 'NUGET_PATH' in os.environ:
hint_path = os.environ['NUGET_PATH']
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
hint_path = os.path.join(hint_path, 'nuget')
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
import os.path
import sys
hint_dirs = ['/opt/novell/mono/bin']
if sys.platform == 'darwin':
hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current/bin', '/usr/local/var/homebrew/linked/mono/bin'] + hint_dirs
for hint_dir in hint_dirs:
hint_path = os.path.join(hint_dir, 'nuget')
if os.path.isfile(hint_path):
return hint_path
elif os.path.isfile(hint_path + '.exe'):
return hint_path + '.exe'
for hint_dir in os.environ['PATH'].split(os.pathsep):
hint_dir = hint_dir.strip('"')
hint_path = os.path.join(hint_dir, 'nuget')
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
if os.path.isfile(hint_path + '.exe') and os.access(hint_path + '.exe', os.X_OK):
return hint_path + '.exe'
return None | okamstudio/godot | [
59119,
10213,
59119,
8863,
1388851536
] |
def find_msbuild_unix(filename):
import os.path
import sys
hint_dirs = ['/opt/novell/mono/bin']
if sys.platform == 'darwin':
hint_dirs = ['/Library/Frameworks/Mono.framework/Versions/Current/bin', '/usr/local/var/homebrew/linked/mono/bin'] + hint_dirs
for hint_dir in hint_dirs:
hint_path = os.path.join(hint_dir, filename)
if os.path.isfile(hint_path):
return hint_path
elif os.path.isfile(hint_path + '.exe'):
return hint_path + '.exe'
for hint_dir in os.environ['PATH'].split(os.pathsep):
hint_dir = hint_dir.strip('"')
hint_path = os.path.join(hint_dir, filename)
if os.path.isfile(hint_path) and os.access(hint_path, os.X_OK):
return hint_path
if os.path.isfile(hint_path + '.exe') and os.access(hint_path + '.exe', os.X_OK):
return hint_path + '.exe'
return None | okamstudio/godot | [
59119,
10213,
59119,
8863,
1388851536
] |
def mono_build_solution(source, target, env):
import subprocess
from shutil import copyfile
sln_path = os.path.abspath(str(source[0]))
target_path = os.path.abspath(str(target[0]))
framework_path = ''
msbuild_env = os.environ.copy()
# Needed when running from Developer Command Prompt for VS
if 'PLATFORM' in msbuild_env:
del msbuild_env['PLATFORM']
# Find MSBuild
if os.name == 'nt':
msbuild_info = find_msbuild_windows(env)
if msbuild_info is None:
raise RuntimeError('Cannot find MSBuild executable')
msbuild_path = msbuild_info[0]
framework_path = msbuild_info[1]
msbuild_env.update(msbuild_info[2])
else:
msbuild_path = find_msbuild_unix('msbuild')
if msbuild_path is None:
xbuild_fallback = env['xbuild_fallback']
if xbuild_fallback and os.name == 'nt':
print('Option \'xbuild_fallback\' not supported on Windows')
xbuild_fallback = False
if xbuild_fallback:
print('Cannot find MSBuild executable, trying with xbuild')
print('Warning: xbuild is deprecated')
msbuild_path = find_msbuild_unix('xbuild')
if msbuild_path is None:
raise RuntimeError('Cannot find xbuild executable')
else:
raise RuntimeError('Cannot find MSBuild executable')
print('MSBuild path: ' + msbuild_path)
# Find NuGet
nuget_path = find_nuget_windows(env) if os.name == 'nt' else find_nuget_unix()
if nuget_path is None:
raise RuntimeError('Cannot find NuGet executable')
print('NuGet path: ' + nuget_path)
# Do NuGet restore
try:
subprocess.check_call([nuget_path, 'restore', sln_path])
except subprocess.CalledProcessError:
raise RuntimeError('GodotSharpTools: NuGet restore failed')
# Build solution
build_config = 'Release'
msbuild_args = [
msbuild_path,
sln_path,
'/p:Configuration=' + build_config,
]
if framework_path:
msbuild_args += ['/p:FrameworkPathOverride=' + framework_path]
try:
subprocess.check_call(msbuild_args, env=msbuild_env)
except subprocess.CalledProcessError:
raise RuntimeError('GodotSharpTools: Build failed')
# Copy files
src_dir = os.path.abspath(os.path.join(sln_path, os.pardir, 'bin', build_config))
dst_dir = os.path.abspath(os.path.join(target_path, os.pardir))
asm_file = 'GodotSharpTools.dll'
if not os.path.isdir(dst_dir):
if os.path.exists(dst_dir):
raise RuntimeError('Target directory is a file')
os.makedirs(dst_dir)
copyfile(os.path.join(src_dir, asm_file), os.path.join(dst_dir, asm_file))
# Dependencies
copyfile(os.path.join(src_dir, "DotNet.Glob.dll"), os.path.join(dst_dir, "DotNet.Glob.dll")) | okamstudio/godot | [
59119,
10213,
59119,
8863,
1388851536
] |
def checkrefval(mod, errs):
for fp_text in (node for node in mod if node[0] == "fp_text"):
if fp_text[1] not in ("reference", "value"):
continue
layer = [n for n in fp_text if n[0] == "layer"][0]
if layer[1] != "F.Fab":
errs.append("Value and Reference fields must be on F.Fab")
if fp_text[1] == "reference" and fp_text[2] != "REF**":
errs.append("Reference field must contain REF**")
if fp_text[1] == "value" and not mod[1].startswith(fp_text[2]):
errs.append("Value field must contain module name") | adamgreig/agg-kicad | [
151,
45,
151,
23,
1446946977
] |
def checklines(mod, errs, check_layers, check_width):
line_types = ("fp_line", "fp_circle", "fp_arc", "fp_poly", "fp_curve")
for line in (node for node in mod if node[0] in line_types):
layer = [n for n in line if n[0] == "layer"][0]
width = [n for n in line if n[0] == "width"][0]
if layer[1] in check_layers:
if Decimal(width[1]) != Decimal(check_width):
errs.append("Lines on {} must be {}mm wide"
.format(check_layers, check_width)) | adamgreig/agg-kicad | [
151,
45,
151,
23,
1446946977
] |
def checkmod(path, verbose=False):
errs = []
with open(path) as f:
mod = sexp_parse(f.read())
checkrefval(mod, errs)
checkfont(mod, errs)
checklines(mod, errs, ("F.SilkS", "B.SilkS"), "0.15")
checklines(mod, errs, ("F.Fab", "B.Fab"), "0.01")
checkctyd(mod, errs)
if len(errs) == 0:
if verbose:
print("Checked '{}': OK".format(path))
return True
else:
print("Checked '{}': Error:".format(path), file=sys.stderr)
for err in errs:
print(" " + err, file=sys.stderr)
print("", file=sys.stderr)
return False | adamgreig/agg-kicad | [
151,
45,
151,
23,
1446946977
] |
def scanRF(LowestAmp, HighestAmp, step, numScans):
# setup
AmpList = []
fileSystem = Environs.FileSystem
file = \
fileSystem.GetDataDirectory(\
fileSystem.Paths["scanMasterDataPath"])\
+ fileSystem.GenerateNextDataFileName()
print("Saving as " + file + "_" + "MeasuredRF1Amp" + "*.zip")
print("")
# start looping
r = range(int(10*LowestAmp), int(10*HighestAmp), int(10*step))
for i in range(len(r)):
print "hc:rf1 Amplitude -> " + str(float(r[i])/10)
hc.SetGreenSynthAmp(float(r[i])/10)
# hc.GreenSynthOnAmplitude = double(r[i]/10)
hc.EnableGreenSynth( False )
hc.EnableGreenSynth( True )
hc.UpdateRFPowerMonitor()
rfAmpMeasured = hc.RF1PowerCentre
hc.StepTarget(2)
System.Threading.Thread.Sleep(500)
sm.AcquireAndWait(numScans)
scanPath = file + "_" + str(i) + "_" + str(rfAmpMeasured) + ".zip"
sm.SaveData(scanPath)
AmpList.append(str(rfAmpMeasured))
print "List of Measured Amplitudes =" + str(AmpList).strip('[]') | ColdMatter/EDMSuite | [
8,
16,
8,
2,
1350917084
] |
def __init__(self, x):
dict.__init__(self, x)
self.x = x | GlobalBoost/GlobalBoost-Y | [
13,
18,
13,
1,
1410204403
] |
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"], ["-addresstype=legacy"]] | GlobalBoost/GlobalBoost-Y | [
13,
18,
13,
1,
1410204403
] |
def setup_network(self, split=False):
super().setup_network()
connect_nodes_bi(self.nodes, 0, 2) | GlobalBoost/GlobalBoost-Y | [
13,
18,
13,
1,
1410204403
] |
def execute(f, iterable, stop_flag=None, workers=10, timeout=30):
with Executor(max_workers=workers) as executor:
threading.Timer(timeout, stop_flag.set)
for future in _batched_pool_runner(executor, workers, f,
iterable, timeout):
if xbmc.abortRequested:
break
if stop_flag and stop_flag.isSet():
break
yield future.result() | repotvsupertuga/tvsupertuga.repository | [
1,
8,
1,
4,
1493763534
] |
def format_selection(format_line):
"Apply a formatting function to all of the selected lines."
@wraps(format_line)
def apply(self, event=None):
head, tail, chars, lines = self.formatter.get_region()
for pos in range(len(lines) - 1):
line = lines[pos]
lines[pos] = format_line(self, line)
self.formatter.set_region(head, tail, chars, lines)
return 'break'
return apply | bruderstein/PythonScript | [
310,
62,
310,
74,
1280013973
] |
def __init__(self, editwin):
"Initialize the settings for this extension."
self.editwin = editwin
self.text = editwin.text
self.formatter = editwin.fregion | bruderstein/PythonScript | [
310,
62,
310,
74,
1280013973
] |
def reload(cls):
"Load class variables from config."
cls.ztext = idleConf.GetOption('extensions', 'ZzDummy', 'z-text') | bruderstein/PythonScript | [
310,
62,
310,
74,
1280013973
] |
def z_in_event(self, line):
"""Insert text at the beginning of each selected line.
This is bound to the <<z-in>> virtual event when the extensions
are loaded.
"""
return f'{self.ztext}{line}' | bruderstein/PythonScript | [
310,
62,
310,
74,
1280013973
] |
def z_out_event(self, line):
"""Remove specific text from the beginning of each selected line.
This is bound to the <<z-out>> virtual event when the extensions
are loaded.
"""
zlength = 0 if not line.startswith(self.ztext) else len(self.ztext)
return line[zlength:] | bruderstein/PythonScript | [
310,
62,
310,
74,
1280013973
] |
def get_testfile_path(name):
return os.path.join(os.path.dirname(__file__),
'PySafety_test_files',
name) | coala/coala-bears | [
291,
584,
291,
703,
1455441911
] |
def setUp(self):
self.section = Section('name')
self.uut = PySafetyBear(self.section, Queue()) | coala/coala-bears | [
291,
584,
291,
703,
1455441911
] |
def test_with_vulnerability(self):
self.check_invalidity(self.uut, ['bottle==0.10.1']) | coala/coala-bears | [
291,
584,
291,
703,
1455441911
] |
def test_without_cve_vulnerability(self):
file_name = 'requirement.txt'
file_contents = load_testfile(file_name)
file_contents = [file_contents[1]]
self.check_results(
self.uut,
file_contents,
[Result.from_values('PySafetyBear',
'locustio<0.7 is vulnerable to pyup.io-25878 '
'and your project is using 0.5.1.',
file=get_testfile_path(file_name),
line=1,
column=11,
end_line=1,
end_column=16,
severity=RESULT_SEVERITY.NORMAL,
additional_info='locustio before '
'0.7 uses pickle.',
)],
filename=get_testfile_path(file_name)) | coala/coala-bears | [
291,
584,
291,
703,
1455441911
] |
def test_with_no_requirements(self):
self.check_validity(self.uut, []) | coala/coala-bears | [
291,
584,
291,
703,
1455441911
] |
def import_modules(self):
modules = ['yaml']
if '+libyaml' in self.spec:
modules.append('yaml.cyaml')
return modules | LLNL/spack | [
3244,
1839,
3244,
2847,
1389172932
] |
def build_ext_args(self, spec, prefix):
args = []
if spec.satisfies('@:5 +libyaml'):
args.extend([
spec['libyaml'].libs.search_flags,
spec['libyaml'].headers.include_flags,
])
return args | LLNL/spack | [
3244,
1839,
3244,
2847,
1389172932
] |
def main(config_file):
log.startLogging(sys.stdout)
application = web.Application(config_file)
port = os.environ.get("PORT", 8888)
reactor.listenTCP(int(port), application)
reactor.run() | fiorix/cyclone | [
699,
99,
699,
8,
1253582104
] |
def say(self, chan, words):
send = 'PRIVMSG %s :%s\r\n' % (chan, words)
sock.send(send) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def run(self):
global done
while not done:
msg = raw_input('IRC > ')
if msg == 'QUIT':
sock.send('QUIT')
done = True
self.say(channel, msg) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def checkForMessages(self):
read_list, write_list, error_list = \
select.select([sock], [], [sock], 0)
if sock in read_list:
raw = sock.recv(8000)
self.printOutput(raw) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def printOutput(self, text):
if '\r' in text:
text = text.replace('\r', '\n')
lines = text.split('\n')
for one_line in lines:
if len(one_line) > 0:
print self.formatLine(one_line) | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def __init__(self, request_validator):
self.bearer = BearerToken(request_validator, None, None, None)
self.request_validator = request_validator
BaseEndpoint.__init__(self) | idan/oauthlib | [
2555,
477,
2555,
82,
1321744131
] |
def create_userinfo_response(self, uri, http_method='GET', body=None, headers=None):
"""Validate BearerToken and return userinfo from RequestValidator
The UserInfo Endpoint MUST return a
content-type header to indicate which format is being returned. The
content-type of the HTTP response MUST be application/json if the
response body is a text JSON object; the response body SHOULD be encoded
using UTF-8.
"""
request = Request(uri, http_method, body, headers)
request.scopes = ["openid"]
self.validate_userinfo_request(request)
claims = self.request_validator.get_userinfo_claims(request)
if claims is None:
log.error('Userinfo MUST have claims for %r.', request)
raise errors.ServerError(status_code=500)
if isinstance(claims, dict):
resp_headers = {
'Content-Type': 'application/json'
}
if "sub" not in claims:
log.error('Userinfo MUST have "sub" for %r.', request)
raise errors.ServerError(status_code=500)
body = json.dumps(claims)
elif isinstance(claims, str):
resp_headers = {
'Content-Type': 'application/jwt'
}
body = claims
else:
log.error('Userinfo return unknown response for %r.', request)
raise errors.ServerError(status_code=500)
log.debug('Userinfo access valid for %r.', request)
return resp_headers, body, 200 | idan/oauthlib | [
2555,
477,
2555,
82,
1321744131
] |
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map) | atmark-techno/atmark-dist | [
3,
2,
3,
4,
1476164728
] |
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map) | atmark-techno/atmark-dist | [
3,
2,
3,
4,
1476164728
] |
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter) | atmark-techno/atmark-dist | [
3,
2,
3,
4,
1476164728
] |
def seperate_warnings_by_location(warnings_data):
"""
Warnings originate from multiple locations, this function takes in list of warning objects
and separates them based on their filename location
"""
# first create regex for each n file location
warnings_locations = {
".*/python\d\.\d/site-packages/.*\.py": "python", # noqa pylint: disable=W1401
".*/edx-platform/lms/.*\.py": "lms", # noqa pylint: disable=W1401
".*/edx-platform/openedx/.*\.py": "openedx", # noqa pylint: disable=W1401
".*/edx-platform/cms/.*\.py": "cms", # noqa pylint: disable=W1401
".*/edx-platform/common/.*\.py": "common", # noqa pylint: disable=W1401
}
# separate into locations flow:
# - iterate through each wanring_object, see if its filename matches any regex in warning locations.
# - If so, change high_location index on warnings_object to location name
for warnings_object in warnings_data:
warning_origin_located = False
for key in warnings_locations:
if (
re.search(key, warnings_object[columns_index_dict["filename"]])
is not None
):
warnings_object[
columns_index_dict["high_location"]
] = warnings_locations[key]
warning_origin_located = True
break
if not warning_origin_located:
warnings_object[columns_index_dict["high_location"]] = "other"
return warnings_data | edx-solutions/edx-platform | [
12,
19,
12,
9,
1391522577
] |
def read_warning_data(dir_path):
"""
During test runs in jenkins, multiple warning json files are output. This function finds all files
and aggregates the warnings in to one large list
"""
# pdb.set_trace()
dir_path = os.path.expanduser(dir_path)
# find all files that exist in given directory
files_in_dir = [
f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f))
]
warnings_files = []
# TODO(jinder): currently this is hard-coded in, maybe create a constants file with info
# THINK(jinder): but creating file for one constant seems overkill
warnings_file_name_regex = (
"pytest_warnings_?\d*\.json" # noqa pylint: disable=W1401
)
# iterate through files_in_dir and see if they match our know file name pattern
for temp_file in files_in_dir:
if re.search(warnings_file_name_regex, temp_file) is not None:
warnings_files.append(temp_file)
# go through each warning file and aggregate warnings into warnings_data
warnings_data = []
for temp_file in warnings_files:
with io.open(os.path.expanduser(dir_path + "/" + temp_file), "r") as read_file:
json_input = json.load(read_file)
if "warnings" in json_input:
data = [
convert_warning_dict_to_list(warning_dict)
for warning_dict in json_input["warnings"]
]
warnings_data.extend(data)
else:
print(temp_file)
return warnings_data | edx-solutions/edx-platform | [
12,
19,
12,
9,
1391522577
] |
def process_warnings_json(dir_path):
"""
Master function to process through all warnings and output a dict
dict structure:
{
location: [{warning text: {file_name: warning object}}]
}
flow:
- Aggregate data from all warning files
- Separate warnings by deprecated vs non deprecated(has word deprecate in it)
- Further categorize warnings
- Return output
Possible Error/enhancement: there might be better ways to separate deprecates vs
non-deprecated warnings
"""
warnings_data = read_warning_data(dir_path)
for warnings_object in warnings_data:
warnings_object[columns_index_dict["deprecated"]] = bool(
"deprecated" in warnings_object[columns_index_dict["message"]]
)
warnings_data = seperate_warnings_by_location(warnings_data)
compressed_warnings_data = compress_similar_warnings(warnings_data)
return compressed_warnings_data | edx-solutions/edx-platform | [
12,
19,
12,
9,
1391522577
] |
def write_html_report(warnings_dataframe, html_path):
"""
converts from panda dataframe to our html
"""
html_path = os.path.expanduser(html_path)
if "/" in html_path:
location_of_last_dir = html_path.rfind("/")
dir_path = html_path[:location_of_last_dir]
os.makedirs(dir_path, exist_ok=True)
with io.open(html_path, "w") as fout:
html_writer = HtmlOutlineWriter(fout)
category_sorted_by_count = group_and_sort_by_sumof(
warnings_dataframe, "category", "num"
)
for category, group_in_category, category_count in category_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{category}, count: {count}</span> '.format(
category=category, count=category_count
)
html_writer.start_section(html, klass=u"category")
locations_sorted_by_count = group_and_sort_by_sumof(
group_in_category, "high_location", "num"
)
for (
location,
group_in_location,
location_count,
) in locations_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{location}, count: {count}</span> '.format(
location=location, count=location_count
)
html_writer.start_section(html, klass=u"location")
message_group_sorted_by_count = group_and_sort_by_sumof(
group_in_location, "message", "num"
)
for (
message,
message_group,
message_count,
) in message_group_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{warning_text}, count: {count}</span> '.format(
warning_text=message, count=message_count
)
html_writer.start_section(html, klass=u"warning_text")
# warnings_object[location][warning_text] is a list
for _, warning in message_group.iterrows():
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{warning_file_path}</span> '.format(
warning_file_path=warning["filename"]
)
html_writer.start_section(html, klass=u"warning")
# xss-lint: disable=python-wrap-html
html = u'<p class="lineno">lineno: {lineno}</p> '.format(
lineno=warning["lineno"]
)
html_writer.write(html)
# xss-lint: disable=python-wrap-html
html = u'<p class="num">num_occur: {num}</p> '.format(
num=warning["num"]
)
html_writer.write(html)
html_writer.end_section()
html_writer.end_section()
html_writer.end_section()
html_writer.end_section() | edx-solutions/edx-platform | [
12,
19,
12,
9,
1391522577
] |
def _recursive_apply(tensors, apply_fn):
"""Helper method to recursively apply a function to structure of tensors.
The structure of the tensors should take the form similar to fetches in
`tf.compat.v1.Session` and includes single `Tensor`, `list`, nested `list`,
`tuple`,
`namedtuple`, or `dict`.
Args:
tensors: Single `Tensor`, `list`, nested `list, `tuple`, `namedtuple`, or
`dict`.
apply_fn: Function to apply to each `Tensor` and should return a `Tensor`.
Returns:
Returns the modified tensors with the same structure.
Raises:
`TypeError` if undefined type in the tensors structure.
"""
tensors_type = type(tensors)
if tensors_type is ops.Tensor:
return apply_fn(tensors)
elif isinstance(tensors, variables.Variable):
return apply_fn(tensors.value())
elif isinstance(tensors, (list, tuple)):
tensors = [_recursive_apply(t, apply_fn) for t in tensors]
if tensors_type is list:
return list(tensors)
elif tensors_type is tuple:
return tuple(tensors)
return tensors_type(*tensors) # collections.namedtuple
elif tensors_type is dict:
return dict((k, _recursive_apply(v, apply_fn)) for k, v in tensors.items())
else:
raise TypeError(f'_recursive_apply argument {tensors!r} has invalid type '
f'{tensors_type!r}') | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def __init__(self):
self.cache = {} | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
def get_control_outputs(self, op):
"""Return the control outputs for a given op.
Args:
op: The op to fetch control outputs for.
Returns:
Iterable of control output ops.
"""
if op.graph not in self.cache:
control_outputs = self.calc_control_outputs(op.graph)
self.cache[op.graph] = control_outputs
else:
control_outputs = self.cache[op.graph]
return control_outputs.get(op, []) | tensorflow/tensorflow | [
171949,
87931,
171949,
2300,
1446859160
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.