function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def DialogProgressBG_Create(self, *args, **kwargs): dialog = xbmcgui.DialogProgressBG() self._objects[id(dialog)] = dialog dialog.create(*args, **kwargs) return id(dialog)
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def DialogProgressBG_Update(self, hwnd, *args, **kwargs): return self._objects[hwnd].update(*args, **kwargs)
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def OverlayStatus_Create(self): overlay = OverlayText() overlayId = id(overlay) self._objects[overlayId] = overlay return overlayId
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def OverlayStatus_Show(self, hwnd): self._objects[hwnd].show()
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def OverlayStatus_Close(self, hwnd): overlay = self._objects.pop(hwnd) overlay.hide() del overlay
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def check_rec (self, rec, file): self.txt = file.read() self.rec = rec self.check_attrs()
thinkle/gourmet
[ 341, 141, 341, 274, 1355706416 ]
def get_multiple_exporter (self, args): return gxml2_exporter.recipe_table_to_xml( args['rd'], args['rv'], args['file'], )
thinkle/gourmet
[ 341, 141, 341, 274, 1355706416 ]
def run_extra_prefs_dialog (self): pass
thinkle/gourmet
[ 341, 141, 341, 274, 1355706416 ]
def main(argv): str_status = '' str_uuid = '' str_primary = '' str_members = '' str_index = '' message = '' usage = "Usage: " + os.path.basename(sys.argv[0]) + " --status <status str>" usage += " --uuid <state UUID> --primary <yes/no> --members <comma-seperated" usage += " list of the component member UUIDs> --index <n>" try: opts, args = getopt.getopt(argv, "h", ["status=","uuid=",'primary=','members=','index=']) except getopt.GetoptError: print usage sys.exit(2) if(len(opts) > 0): message_obj = GaleraStatus(THIS_SERVER) for opt, arg in opts: if opt == '-h': print usage sys.exit() elif opt in ("--status"): message_obj.set_status(arg) elif opt in ("--uuid"): message_obj.set_uuid(arg) elif opt in ("--primary"): message_obj.set_primary(arg) elif opt in ("--members"): message_obj.set_members(arg) elif opt in ("--index"): message_obj.set_index(arg) try: send_notification(MAIL_FROM, MAIL_TO, 'Galera Notification: ' + THIS_SERVER, DATE, str(message_obj), SMTP_SERVER, SMTP_PORT, SMTP_SSL, SMTP_AUTH, SMTP_USERNAME, SMTP_PASSWORD) except Exception, e: print "Unable to send notification: %s" % e sys.exit(1) else: print usage sys.exit(2) sys.exit(0)
gguillen/galeranotify
[ 45, 18, 45, 2, 1380219192 ]
def __init__(self, server): self._server = server self._status = "" self._uuid = "" self._primary = "" self._members = "" self._index = "" self._count = 0
gguillen/galeranotify
[ 45, 18, 45, 2, 1380219192 ]
def set_uuid(self, uuid): self._uuid = uuid self._count += 1
gguillen/galeranotify
[ 45, 18, 45, 2, 1380219192 ]
def set_members(self, members): self._members = members.split(',') self._count += 1
gguillen/galeranotify
[ 45, 18, 45, 2, 1380219192 ]
def __str__(self): message = "Galera running on " + self._server + " has reported the following" message += " cluster membership change" if(self._count > 1): message += "s" message += ":\n\n" if(self._status): message += "Status of this node: " + self._status + "\n\n" if(self._uuid): message += "Cluster state UUID: " + self._uuid + "\n\n" if(self._primary): message += "Current cluster component is primary: " + self._primary + "\n\n" if(self._members): message += "Current members of the component:\n" if(self._index): for i in range(len(self._members)): if(i == int(self._index)): message += "-> " else: message += "-- " message += self._members[i] + "\n" else: message += "\n".join((" " + str(x)) for x in self._members) message += "\n" if(self._index): message += "Index of this node in the member list: " + self._index + "\n" return message
gguillen/galeranotify
[ 45, 18, 45, 2, 1380219192 ]
def __init__(self, *args, **kwargs): self.maxDiff = None super(TestInotify, self).__init__(*args, **kwargs)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def test__international_naming_python3(self): with inotify.test_support.temp_path() as path: inner_path = os.path.join(path, '新增資料夾') os.mkdir(inner_path) i = inotify.adapters.Inotify() i.add_watch(inner_path) with open(os.path.join(inner_path, 'filename'), 'w'): pass events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=1, mask=256, cookie=0, len=16), ['IN_CREATE'], inner_path, 'filename'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=32, cookie=0, len=16), ['IN_OPEN'], inner_path, 'filename'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], inner_path, 'filename'), ] self.assertEquals(events, expected)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def test__international_naming_python2(self): with inotify.test_support.temp_path() as path: inner_path = os.path.join(unicode(path), u'新增資料夾') os.mkdir(inner_path) i = inotify.adapters.Inotify() i.add_watch(inner_path) with open(os.path.join(inner_path, u'filename料夾'), 'w'): pass events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=1, mask=256, cookie=0, len=16), ['IN_CREATE'], inner_path, u'filename料夾'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=32, cookie=0, len=16), ['IN_OPEN'], inner_path, u'filename料夾'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], inner_path, u'filename料夾'), ] self.assertEquals(events, expected)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _open_write_close(*args): with open(os.path.join(*args), 'w'): pass
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _make_temp_path(*args): path = os.path.join(*args) os.mkdir(path) return path
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _event_general(wd, mask, type_name, path, filename): return ((inotify.adapters._INOTIFY_EVENT(wd=wd, mask=mask, cookie=0, len=16)), [type_name], path, filename)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _event_create(wd, path, filename): return TestInotify._event_general(wd, 256, 'IN_CREATE', path, filename)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _event_open(wd, path, filename): return TestInotify._event_general(wd, 32, 'IN_OPEN', path, filename)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _event_close_write(wd, path, filename): return TestInotify._event_general(wd, 8, 'IN_CLOSE_WRITE', path, filename)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def test__error_on_watch_nonexistent_folder(self): i = inotify.adapters.Inotify() with self.assertRaises(inotify.calls.InotifyError): i.add_watch('/dev/null/foo')
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def __init__(self, *args, **kwargs): self.maxDiff = None super(TestInotifyTree, self).__init__(*args, **kwargs)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def test__cycle(self): with inotify.test_support.temp_path() as path: path1 = os.path.join(path, 'aa') os.mkdir(path1) path2 = os.path.join(path, 'bb') os.mkdir(path2) i = inotify.adapters.InotifyTree(path) with open('seen_new_file1', 'w'): pass with open(os.path.join(path1, 'seen_new_file2'), 'w'): pass with open(os.path.join(path2, 'seen_new_file3'), 'w'): pass os.remove(os.path.join(path, 'seen_new_file1')) os.remove(os.path.join(path1, 'seen_new_file2')) os.remove(os.path.join(path2, 'seen_new_file3')) os.rmdir(path1) os.rmdir(path2) events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=1, mask=256, cookie=0, len=16), ['IN_CREATE'], path, 'seen_new_file1'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=32, cookie=0, len=16), ['IN_OPEN'], path, 'seen_new_file1'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], path, 'seen_new_file1'), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=256, cookie=0, len=16), ['IN_CREATE'], path1, 'seen_new_file2'), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=32, cookie=0, len=16), ['IN_OPEN'], path1, 'seen_new_file2'), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], path1, 'seen_new_file2'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=256, cookie=0, len=16), ['IN_CREATE'], path2, 'seen_new_file3'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=32, cookie=0, len=16), ['IN_OPEN'], path2, 'seen_new_file3'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], path2, 'seen_new_file3'), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=512, cookie=0, len=16), ['IN_DELETE'], path, 'seen_new_file1'), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=512, cookie=0, len=16), ['IN_DELETE'], path1, 'seen_new_file2'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=512, cookie=0, len=16), ['IN_DELETE'], path2, 'seen_new_file3'), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=1024, cookie=0, len=0), ['IN_DELETE_SELF'], path1, ''), (inotify.adapters._INOTIFY_EVENT(wd=2, mask=32768, cookie=0, len=0), ['IN_IGNORED'], path1, ''), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=1073742336, cookie=0, len=16), ['IN_ISDIR', 'IN_DELETE'], path, 'aa'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=1024, cookie=0, len=0), ['IN_DELETE_SELF'], path2, ''), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=32768, cookie=0, len=0), ['IN_IGNORED'], path2, ''), (inotify.adapters._INOTIFY_EVENT(wd=1, mask=1073742336, cookie=0, len=16), ['IN_ISDIR', 'IN_DELETE'], path, 'bb'), ] self.assertEquals(events, expected)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def test__automatic_new_watches_on_new_paths(self): # Tests that watches are actively established as new folders are # created. with inotify.test_support.temp_path() as path: i = inotify.adapters.InotifyTree(path) path1 = os.path.join(path, 'folder1') path2 = os.path.join(path1, 'folder2') os.mkdir(path1) events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=1, mask=1073742080, cookie=0, len=16), ['IN_ISDIR', 'IN_CREATE'], path, 'folder1'), ] self.assertEquals(events, expected) os.mkdir(path2) events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=2, mask=1073742080, cookie=0, len=16), ['IN_ISDIR', 'IN_CREATE'], path1, 'folder2'), ] self.assertEquals(events, expected) with open(os.path.join(path2,'filename'), 'w'): pass events = self.__read_all_events(i) expected = [ (inotify.adapters._INOTIFY_EVENT(wd=3, mask=256, cookie=0, len=16), ['IN_CREATE'], path2, 'filename'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=32, cookie=0, len=16), ['IN_OPEN'], path2, 'filename'), (inotify.adapters._INOTIFY_EVENT(wd=3, mask=8, cookie=0, len=16), ['IN_CLOSE_WRITE'], path2, 'filename'), ] self.assertEquals(events, expected)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def __init__(self, *args, **kwargs): self.maxDiff = None super(TestInotifyTrees, self).__init__(*args, **kwargs)
dsoprea/PyInotify
[ 220, 69, 220, 38, 1401343364 ]
def _pln_pdf(x, alpha, nu, tau2): A1 = np.exp(alpha * nu + alpha ** 2 * tau2 / 2) fofx = alpha * A1 * x ** (-alpha - 1) *\ norm.cdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2)) return fofx
Astroua/plndist
[ 3, 2, 3, 1, 1406562833 ]
def _pln_logpdf(x, alpha, nu, tau2): return np.log(alpha) + alpha * nu + alpha * tau2 / 2 - \ (alpha + 1) * np.log(x) + \ norm.logcdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
Astroua/plndist
[ 3, 2, 3, 1, 1406562833 ]
def _pdf(self, x, alpha, nu, tau2): return _pln_pdf(x, alpha, nu, tau2)
Astroua/plndist
[ 3, 2, 3, 1, 1406562833 ]
def _cdf(self, x, alpha, nu, tau2): return _pln_cdf(x, alpha, nu, tau2)
Astroua/plndist
[ 3, 2, 3, 1, 1406562833 ]
def writePolys(pl, f): """Write a list of polygons pl into the file f. The result is under the form [[[ra1, de1],[ra2, de2],[ra3, de3],[ra4, de4]], [[ra1, de1],[ra2, de2],[ra3, de3]]]""" f.write('[') for idx, poly in enumerate(pl): f.write('[') for iv, v in enumerate(poly): f.write('[%.8f, %.8f]' % (v[0], v[1])) if iv != len(poly) - 1: f.write(', ') f.write(']') if idx != len(pl) - 1: f.write(', ') f.write(']')
Stellarium/stellarium
[ 4600, 642, 4600, 345, 1397356916 ]
def __init__(self): self.short = None self.full = None self.infoUrl = None return
Stellarium/stellarium
[ 4600, 642, 4600, 345, 1397356916 ]
def __init__(self): self.subTiles = [] self.imageCredits = StructCredits() self.serverCredits = StructCredits() self.imageInfo = StructCredits() self.imageUrl = None self.alphaBlend = None self.maxBrightness = None return
Stellarium/stellarium
[ 4600, 642, 4600, 345, 1397356916 ]
def config_get(config, section, option, desc): val = config.get(section, option, fallback=None) if val is None: assert False, "Configuration not specified for %s.%s (%s)" % (section, option, desc) return val
ducky64/labelmaker
[ 6, 4, 6, 2, 1446095708 ]
def __init__(self, option, name=None, parent=None, url=None, api=None): self.url = url self.api = api or DEFAULT_API self.changes = 0 self.proxy = xmlrpc.client.ServerProxy("{0}{1}".format(url, self.api)) Stats.__init__(self, option, name, parent)
psss/did
[ 202, 83, 202, 59, 1429523658 ]
def header(self): """ Show summary header. """ # Different header for wiki: Updates on xxx: x changes of y pages item( "{0}: {1} change{2} of {3} page{4}".format( self.name, self.changes, "" if self.changes == 1 else "s", len(self.stats), "" if len(self.stats) == 1 else "s"), level=0, options=self.options)
psss/did
[ 202, 83, 202, 59, 1429523658 ]
def hosts(request): return Response({'hosts': DeepSea.instance().nfs_get_hosts()})
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def fsals(request): res = DeepSea.instance().nfs_get_fsals_available() if 'CEPH' in res: if not CephFSUtil: res = [f for f in res if f != 'CEPH'] else: cluster = FsidContext(request=request, module_name='ceph_nfs').cluster try: if not CephFSUtil.instance(cluster).status(): res = [f for f in res if f != 'CEPH'] except libcephfs.PermissionError: res = [f for f in res if f != 'CEPH'] if 'RGW' in res: try: if not RGWClient.admin_instance().is_service_online(): res = [f for f in res if f != 'RGW'] if not RGWClient.admin_instance().is_system_user(): res = [f for f in res if f != 'RGW'] except (RGWClient.NoCredentialsException, RequestException): res = [f for f in res if f != 'RGW'] return Response({'fsals': res})
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def status(request): return Response(DeepSea.instance().nfs_status_exports())
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def deploy(request): if 'host' in request.DATA: host = request.DATA['host'] my_task = tasks.async_deploy_exports.delay(host) else: my_task = tasks.async_deploy_exports.delay() logger.info("Scheduled deploy of NFS exports: taskqueue_id=%s", my_task.id) return Response({'taskqueue_id': my_task.id})
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def stop(request): if 'host' in request.DATA: host = request.DATA['host'] my_task = tasks.async_stop_exports.delay(host) logger.info("Scheduled stop of NFS exports for host=%s: taskqueue_id=%s", host, my_task.id) else: my_task = tasks.async_stop_exports.delay() logger.info("Scheduled stop of NFS exports: taskqueue_id=%s", my_task.id) return Response({'taskqueue_id': my_task.id})
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def ls_dir(request): if 'root_dir' in request.GET: root = request.GET['root_dir'] else: root = "/" if 'depth' in request.GET: depth = int(request.GET['depth']) else: depth = 1 if depth > 5: logger.warning("Limiting depth to maximum value of 5: input depth=%s", depth) depth = 5 root = '{}/'.format(root) if not root.endswith('/') else root try: cluster = FsidContext(request=request, module_name='ceph_nfs').cluster paths = CephFSUtil.instance(cluster).get_dir_list(root, depth) paths = [p[:-1] for p in paths if p != root] return Response({'paths': paths}) except libcephfs.ObjectNotFound, libcephfs.PermissionError: return Response({'paths': []})
openattic/openattic
[ 55, 9, 55, 5, 1498814953 ]
def init_monster( self ): self.levels.append( monsters.base.Beast( 1, self ) )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def custom_init( self, nart ): scene = self.elements.get("LOCALE") room = randmaps.rooms.FuzzyRoom() myteam = teams.Team(default_reaction=-999, rank=self.rank, strength=0, habitat=None ) room.contents.append( myteam ) monster = monsters.ignan.Azer( myteam ) room.contents.append( monster ) room.contents.append( waypoints.HealingFountain() ) mychest = waypoints.MediumChest() mychest.stock(20) room.contents.append( mychest ) self.register_element( "_ROOM", room, dident="LOCALE" ) return True
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def matches( self, pstate ): """Requires the SCENE to exist.""" return ( pstate.elements.get("LOCALE") and context.MAP_DUNGEON in pstate.elements["LOCALE"].desctags )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def matches( self, pstate ): """Requires the SCENE to exist.""" return ( pstate.elements.get("LOCALE") and context.MAP_DUNGEON in pstate.elements["LOCALE"].desctags )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def matches( self, pstate ): """Requires the SCENE to exist.""" return ( pstate.elements.get("LOCALE") and pstate.rank > 1 and context.MAP_DUNGEON in pstate.elements["LOCALE"].desctags )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def matches( self, pstate ): """Requires the SCENE to exist.""" return ( pstate.elements.get("LOCALE") and context.MAP_WILDERNESS in pstate.elements["LOCALE"].desctags )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def matches( self, pstate ): """Requires the SCENE to exist and be wilderness.""" return ( pstate.elements.get("LOCALE") and context.MAP_WILDERNESS in pstate.elements["LOCALE"].desctags )
jwvhewitt/dmeternal
[ 54, 12, 54, 15, 1381198371 ]
def __init__(self, get_response): self.get_response = get_response
briot/geneapro
[ 31, 8, 31, 10, 1230083345 ]
def get_settings(): ''' Return command-line settings ''' parser = argparse.ArgumentParser(description='Filter text corpus by date range. Only updates the metadata file.') parser.add_argument('-i', dest='input', required=True, help='Input CSV of metadata describing files') parser.add_argument('-o', dest='output', required=True, help='Output CSV for filtered results') parser.add_argument('-s', '--start', dest='start', help='Start date, YYYY-MM-DD format') parser.add_argument('-e', '--end', dest='end', help='End date, YYYY-MM-DD format') return parser.parse_args()
mwidner/WebArchiveTextTools
[ 3, 2, 3, 3, 1447367018 ]
def main(): global settings settings = get_settings() md = metadata.read_csv(settings.input) filtered = filter_dates(md, settings.start, settings.end) metadata.write_csv(settings.output, filtered)
mwidner/WebArchiveTextTools
[ 3, 2, 3, 3, 1447367018 ]
def read_gr(regno): cpu = SIM_current_processor() return SIM_read_register(cpu, r0 + regno), SIM_read_register(cpu, nat0 + regno)
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def sread_register(reg): val = read_register(reg) if val & 0x8000000000000000L: val -= 0x10000000000000000L return val
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def linux_read_bytes(cpu, address, size): return cpu.physical_memory.memory[[address, address + size - 1]]
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def linux_read_word(cpu, address): word = linux_read_bytes(cpu, address, 4) return (word[3] << 24) | (word[2] << 16) | (word[1] << 8) | word[0]
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def linux_read_string(cpu, address, maxlen): s = "" try: while len(s) < maxlen: p = SIM_logical_to_physical(cpu, 1, address) c = SIM_read_phys_memory(cpu, p, 1) if c == 0: return s s += char(c) s += "..." except: s += "???" return s
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def format_stringbuf(regno): cpu = SIM_current_processor() va = SIM_read_register(cpu, r0 + regno) len = SIM_read_register(cpu, r0 + regno + 1) s = "0x%x = \"" % va for i in xrange(0, len): if i > 64: return s + "\" ..." try: pa = SIM_logical_to_physical(cpu, 1, va + i) except: return s + "\" ..." b = linux_read_byte(cpu, pa) if b == 9: s += "\\t" elif b == 10: s += "\\n" elif b == 13: s += "\\r" elif b == 92: s += "\\\\" elif b >= 32 and b < 127: s += chr(b) else: s += "<%02x>" % b return s + "\""
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def fmt_wait4_ret(ignored_regno): try: cpu = SIM_current_processor() s = "%d" % SIM_read_register(cpu, r0 + 8) statusp = SIM_read_register(cpu, r0 + 33) rusagep = SIM_read_register(cpu, r0 + 35) if statusp != 0: try: statusp = SIM_logical_to_physical(cpu, 1, statusp) status = SIM_read_phys_memory(cpu, statusp, 2) s += " status: %d" % ((status & 0xff00) >> 8) if status & 0xf7: s += " signal(%d)" % (status & 0xf7) except: s += " status: <not in tlb>" return s except: traceback.print_exc()
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def fmt_swapflags(regno): cpu = SIM_current_processor() swapflags = SIM_read_register(cpu, regno) s = "%d" % (swapflags & 0x7fff) if swapflags & 0x8000: s += "|PREFER" return s
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def current_task(cpu): return SIM_read_register(cpu, ar_kr6)
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def current_process(cpu, task = None): if not task: task = SIM_read_register(cpu, ar_kr6) try: pid = SIM_read_phys_memory(cpu, task + task_pid_offset, 4) comm = linux_read_bytes(cpu, task + task_name_offset, 16) name = "" for c in comm: if c == 0: break name += chr(c) return pid, name except sim_core.SimExc_Memory: return None, None
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def string_argument(regno): cpu = SIM_current_processor() va, nat = read_gr(regno) if nat: return "NaT" s = "\"" for i in xrange(0, 64): try: pa = SIM_logical_to_physical(cpu, 1, va + i) except: return "0x%x" % va b = linux_read_byte(cpu, pa) if b == 0: return s + "\"" elif b == 9: s += "\\t" elif b == 10: s += "\\n" elif b == 13: s += "\\r" elif b >= 32: s += chr(b) else: s += "<%02x>" return s + "\""
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def uint_argument(regno): i, nat = read_gr(regno) if nat: return "NaT" return "%d" % i
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def format_reg(regno, fmt): try: if fmt == 'd': return int_argument(regno) if fmt == 'u': return uint_argument(regno) if fmt == 'x': return hex_argument(regno) if fmt == 's': return string_argument(regno) return fmt(regno) except sim_core.SimExc_Index: return "<can't read r%d>" % regno except TypeError: traceback.print_exc() raise "Unknown format element: %s" % fmt
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def post_syscall_hap(sc, obj, type, bp_id, dummy1, dummy2): name, params, retfmt, task = sc cpu = SIM_current_processor() # Same context? if task != current_task(cpu): return # print "post_syscall_hap(%s, %s, %s, %s, %s)" % (sc, type, bp_id, dummy1, dummy2) SIM_delete_breakpoint(bp_id) SIM_hap_delete_callback("Core_Breakpoint", post_syscall_hap, sc); del hap_data[bp_id] if not retfmt: ret = "??" else: ret = format_reg(8, retfmt) pid, comm = current_process(cpu) print "[%s] %d [%d:%s] %s(%s) -> %s" % (cpu.name, SIM_cycle_count(cpu), pid, comm, name, format_params(params), ret)
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def install_syscall_callback(syscall, fn): if len(linux_syscalls[syscall]) > 2: linux_syscalls[syscall][2] += [fn] else: linux_syscalls[syscall] += [ [fn] ]
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def syscall_trace_cmd(mode, incl, excl): global pre_syscall, post_syscall, pids, pid_default if mode == "enter" or mode == "both": pre_syscall = 1 else: pre_syscall = 0 if mode == "exit" or mode == "both": post_syscall = 1 else: post_syscall = 0 pids = {} try: if incl: for k in incl.split(","): pids[int(k)] = 1 if excl: for k in excl.split(","): pids[int(k)] = 0 except: print "Bad pid list" if incl and excl: print "Redundant use of incl" if incl: pid_default = 0 else: pid_default = 1
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def syscall_mode_expander(comp): return get_completions(comp, ["off", "enter", "exit", "both"])
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def ptwalk(addr): cpu,_ = get_cpu() vrn = addr >> 61 rr = cpu.rr[vrn] rr_ps = (rr>>2) & 0x3f pt_entries = 1L << (rr_ps - 3) pgd = cpu.ar[7] print "rr_ps: 0x%x" % rr_ps ptd_index = (addr >> rr_ps) & (pt_entries - 1) pmd_index = (addr >> (rr_ps + rr_ps-3)) & (pt_entries - 1) pgd_index = ((addr >> (rr_ps + rr_ps-3 + rr_ps-3)) & ((pt_entries>>3) - 1) | (vrn << (rr_ps - 6)))
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def exception_hap(data, cpu, exception): if exception == 33: print "Illegal instruction exception" SIM_break_simulation("Illegal instruction") elif exception == 35: break_instruction() return 0
iniverno/RnR-LLC
[ 3, 1, 3, 1, 1333487040 ]
def create_installer_config(path): """Create a basicl installation configuration file""" config = u"template=file:///etc/ister.json\n" jconfig = u'{"DestinationType" : "physical", "PartitionLayout" : \ [{"disk" : "vda", "partition" : 1, "size" : "512M", "type" : "EFI"}, \ {"disk" : "vda", "partition" : 2, \ "size" : "512M", "type" : "swap"}, {"disk" : "vda", "partition" : 3, \ "size" : "rest", "type" : "linux"}], \ "FilesystemTypes" : \ [{"disk" : "vda", "partition" : 1, "type" : "vfat"}, \ {"disk" : "vda", "partition" : 2, "type" : "swap"}, \ {"disk" : "vda", "partition" : 3, "type" : "ext4"}], \ "PartitionMountPoints" : \ [{"disk" : "vda", "partition" : 1, "mount" : "/boot"}, \ {"disk" : "vda", "partition" : 3, "mount" : "/"}], \ "Version" : 0, "Bundles" : ["kernel-native", "telemetrics", "os-core", "os-core-update"]}\n' if not os.path.isdir("{}/etc".format(path)): os.mkdir("{}/etc".format(path)) with open("{}/etc/ister.conf".format(path), "w") as cfile: cfile.write(config) with open("{}/etc/ister.json".format(path), "w") as jfile: jfile.write(jconfig.replace('"Version" : 0', '"Version" : ' + INSTALLER_VERSION))
bryteise/ister
[ 26, 16, 26, 6, 1404248072 ]
def disable_tty1_getty(path): """Add a symlink masking the systemd tty1 generator""" os.makedirs(path + "/etc/systemd/system/getty.target.wants") os.symlink("/dev/null", path + "/etc/systemd/system/getty.target.wants/getty@tty1.service")
bryteise/ister
[ 26, 16, 26, 6, 1404248072 ]
def __init__(self, mainloop, speaker, config, screen_w, screen_h): self.level = lc.Level(self, mainloop, 10, 8) gd.BoardGame.__init__(self, mainloop, speaker, config, screen_w, screen_h, 15, 9)
imiolek-ireneusz/pysiogame
[ 3, 2, 3, 2, 1387212502 ]
def handle(self, event): gd.BoardGame.handle(self, event) # send event handling up if event.type == pygame.MOUSEBUTTONUP: for each in self.board.units: if each.is_door is True: self.board.all_sprites_list.move_to_front(each)
imiolek-ireneusz/pysiogame
[ 3, 2, 3, 2, 1387212502 ]
def __init__(self, iface=None): """ Init instantiate quite a lot of class attribute like ips, ports, datas etc.. """ Layer.__init__(self) self.data = [] self.mutex = Lock() self.connectionID = None self.ipregex = re.compile("^(\d{1,3}.){3}.\d{1,3}$") self.interface = iface if iface else conf.route.route("0.0.0.0")[0] self.localIP = get_if_addr(self.interface) self.remoteIP = None self.localPort = random.randrange(0, (2**16) - 1) self.remotePort = None
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def packet_received(self, packet, **kwargs): """ Add the received datas to the buffer data. The mutex prevent any improper read/write """ self.mutex.acquire() self.data.append((kwargs["IP"]["dst"], kwargs["UDP"]["dport"], packet.load)) self.mutex.release()
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def connect(self, ip, port): """ In UDP connect is not really meaningfull. In this case it just means register an handler for the connection in the UDP layer """ if not re.match(self.ipregex, ip): #Then this is a dn realip = transversal_layer_access["DNS"].nslookup(ip) if realip: self.remoteIP = realip else: raise Exception("[Errno -5] No address associated with hostname") else: self.remoteIP = ip
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def bind(self, port, app=None, fork=None): #App and fork are just here to be generic with the tcp bind from the pysocket point of view """ Bind like connect will register a handler in the UDP layer. But it will also prevent the host to send ICMP host port unreachable """ self.localPort = port block_icmp_port_unreachable() #block_outgoing_packets("udp", self.localIP, self.localPort, None, None) self.connectionID = (self.localIP, self.localPort) self.lowerLayers['default'].register_upper_layer(self.connectionID, self)
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def close(self): """ Close just unregister himself from the lower layer and remove rules from iptables """ self.firstpacket = True unblock_icmp_port_unreachable() self.lowerLayers['default'].unregister_upper_layer(self.connectionID)
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def fetch_data_from(self, size=None): """ fetch_data_from use the socket syntax and arguments. It returns the datas associated to the given host. Because data in UDP is not a string this a list of string identified by the remote IP. """ self.mutex.acquire() elt = None if len(self.data) != 0: s = "" if size: if size < len(self.data[0][2]): elt = self.data[0] s = self.data[0][2][:size] self.data[0] = (self.data[0][0], self.data[0][1], self.data[0][2][size:]) elt = (elt[0], elt[1], s) else: elt = self.data.pop(0) else: elt = self.data.pop(0) self.mutex.release() return elt
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def get_conn_addr(self): """Return tuple of the remote IP remote port""" return (self.remoteIP, self.remotePort)
RobinDavid/pystack
[ 23, 12, 23, 2, 1381095119 ]
def __init__( self, *args, **kwargs ): JSONEncoder.__init__( self ) if args and kwargs: raise ValueError( "Please provide either args or kwargs, not both." ) self.__args = args self.__kwargs = kwargs
osborne6/luminotes
[ 3, 2, 3, 1, 1406346319 ]
def init_board_permissions(sender, **kwargs): """Link existing benchmark countries to newly created countries.""" instance = kwargs["instance"] if kwargs["created"]: BoardPermissions.objects.create(board=instance)
twschiller/open-synthesis
[ 181, 53, 181, 67, 1472061113 ]
def get_method(): """ returns a description in RST for the research paper """ txt = 'Method 1 - Ontological Mapping\n' txt += '---------------------------------------\n\n' txt += 'Using an ontology, map the columns in raw data to a set of standard tables\n\n' return txt
acutesoftware/AIKIF
[ 54, 13, 54, 10, 1378213987 ]
def get_results(fname, dat): append_rst(fname, '\nMethod 1: running source data ' + dat + ' .... \n') with open(dat, 'r') as f: for line in f: cols = parse_csv(line) print(cols)
acutesoftware/AIKIF
[ 54, 13, 54, 10, 1378213987 ]
def __str__(self): return self.name
Compizfox/Inschrijflijst
[ 1, 2, 1, 4, 1478983231 ]
def is_published(self): """ Return true if the event is published (past published date and not past end date) """ return self.published_at < timezone.now() < self.end_at
Compizfox/Inschrijflijst
[ 1, 2, 1, 4, 1478983231 ]
def is_full(self): """ Return true if there are no free places left. """ return self.get_free_places() is not None and self.get_free_places() <= 0
Compizfox/Inschrijflijst
[ 1, 2, 1, 4, 1478983231 ]
def get_active_registrations_count(self): """ Return the number of non-withdrawn registrations """ return self.registration_set.filter(withdrawn_at__isnull=True).count()
Compizfox/Inschrijflijst
[ 1, 2, 1, 4, 1478983231 ]
def get_note_field_options(self): """ Return list of tuples from list of options """ return [('', self.note_field + ':')] + [(x, x) for x in self.note_field_options]
Compizfox/Inschrijflijst
[ 1, 2, 1, 4, 1478983231 ]
def __init__(self, tree): super(BasicPathfinder.MaxItersReachedException, self).__init__( "Maximum number of iterations reached while searching " "for a path\n\t source: {0}\n\t target: {1}".format(tree.source, tree.target))
lich-uct/molpher-lib
[ 14, 7, 14, 4, 1463582736 ]
def read(fname): """Utility function to read the README file.""" return open(os.path.join(os.path.dirname(__file__), fname)).read()
grahamking/lintswitch
[ 26, 5, 26, 4, 1291506173 ]
def make_inst(g, addr, dest, op, *args): def make_arg(a): if a is None: return None if isinstance(a, int): return VALUE(a) if isinstance(a, str): return REG(a) return a b = BBlock(addr) args = [make_arg(a) for a in args] b.add(Inst(make_arg(dest), op, args, addr)) g.add_node(addr, val=b)
pfalcon/ScratchABlock
[ 87, 21, 87, 15, 1433364405 ]
def __init__(self, conf, trainingSet=None, testSet=None, fold='[1]'): super(CoFactor, self).__init__(conf, trainingSet, testSet, fold)
recq-cse/RecQ
[ 1378, 391, 1378, 1, 1474729835 ]
def printAlgorConfig(self): super(CoFactor, self).printAlgorConfig() print('Specified Arguments of', self.config['model.name'] + ':') print('k: %d' % self.negCount) print('regR: %.5f' %self.regR) print('filter: %d' %self.filter) print('=' * 80)
recq-cse/RecQ
[ 1378, 391, 1378, 1, 1474729835 ]
def buildModel(self): self.X=self.P*10 #Theta self.Y=self.Q*10 #Beta self.w = np.random.rand(self.num_items) / 10 # bias value of item self.c = np.random.rand(self.num_items) / 10 # bias value of context self.G = np.random.rand(self.num_items, self.emb_size) / 10 # context embedding print('training...') epoch = 0 while epoch < self.maxEpoch: self.loss = 0 YtY = self.Y.T.dot(self.Y) for user in self.data.user: # C_u = np.ones(self.data.getSize(self.recType)) H = np.ones(self.num_items) val, pos = [],[] P_u = np.zeros(self.num_items) uid = self.data.user[user] for item in self.data.trainSet_u[user]: iid = self.data.item[item] r_ui = float(self.data.trainSet_u[user][item]) pos.append(iid) val.append(10 * r_ui) H[iid] += 10 * r_ui P_u[iid] = 1 error = (P_u[iid] - self.X[uid].dot(self.Y[iid])) self.loss += pow(error, 2) # sparse matrix C_u = coo_matrix((val, (pos, pos)), shape=(self.num_items, self.num_items)) A = (YtY + np.dot(self.Y.T, C_u.dot(self.Y)) + self.regU * np.eye(self.emb_size)) self.X[uid] = np.dot(np.linalg.inv(A), (self.Y.T * H).dot(P_u)) XtX = self.X.T.dot(self.X) for item in self.data.item: P_i = np.zeros(self.num_users) iid = self.data.item[item] H = np.ones(self.num_users) val,pos = [],[] for user in self.data.trainSet_i[item]: uid = self.data.user[user] r_ui = float(self.data.trainSet_i[item][user]) pos.append(uid) val.append(10 * r_ui) H[uid] += 10 * r_ui P_i[uid] = 1 matrix_g1 = np.zeros((self.emb_size, self.emb_size)) matrix_g2 = np.zeros((self.emb_size, self.emb_size)) vector_m1 = np.zeros(self.emb_size) vector_m2 = np.zeros(self.emb_size) update_w = 0 update_c = 0 if len(self.SPPMI[item])>0: for context in self.SPPMI[item]: cid = self.data.item[context] gamma = self.G[cid] beta = self.Y[cid] matrix_g1 += gamma.reshape(self.emb_size, 1).dot(gamma.reshape(1, self.emb_size)) vector_m1 += (self.SPPMI[item][context]-self.w[iid]- self.c[cid])*gamma matrix_g2 += beta.reshape(self.emb_size, 1).dot(beta.reshape(1, self.emb_size)) vector_m2 += (self.SPPMI[item][context] - self.w[cid] - self.c[iid]) * beta update_w += self.SPPMI[item][context]-self.Y[iid].dot(gamma)-self.c[cid] update_c += self.SPPMI[item][context]-beta.dot(self.G[iid])-self.w[cid] C_i = coo_matrix((val, (pos, pos)), shape=(self.num_users, self.num_users)) A = (XtX + np.dot(self.X.T, C_i.dot(self.X)) + self.regU * np.eye(self.emb_size) + matrix_g1) self.Y[iid] = np.dot(np.linalg.inv(A), (self.X.T * H).dot(P_i)+vector_m1) if len(self.SPPMI[item]) > 0: self.G[iid] = np.dot(np.linalg.inv(matrix_g2 + self.regR * np.eye(self.emb_size)), vector_m2) self.w[iid] = update_w/len(self.SPPMI[item]) self.c[iid] = update_c/len(self.SPPMI[item]) epoch += 1 print('epoch:', epoch, 'loss:', self.loss)
recq-cse/RecQ
[ 1378, 391, 1378, 1, 1474729835 ]
def __init__(self, parent, graphic_view=None): self.peak = [] super().__init__(parent) self.scene = GridScene(parent=graphic_view) self.scene.setBackgroundBrush(settings.BGCOLOR) self.peak_item = self.scene.addPath(QPainterPath(), QPen(settings.PEAK_COLOR, 0)) # type: QGraphicsPathItem
jopohl/urh
[ 9230, 807, 9230, 35, 1459539787 ]
def init_scene(self, draw_grid=True): self.scene.draw_grid = draw_grid self.peak = self.plot_data if len(self.peak) < self.num_samples else np.maximum(self.peak, self.plot_data) self.scene.setSceneRect(0, -5, self.num_samples, 10)
jopohl/urh
[ 9230, 807, 9230, 35, 1459539787 ]