code
stringlengths
281
23.7M
.django_db def test_sort_order_values(client, create_idv_test_data): _test_post(client, {'award_id': 1, 'order': 'desc'}, (None, None, 1, False, False, False, 5, 4, 3)) _test_post(client, {'award_id': 1, 'order': 'asc'}, (None, None, 1, False, False, False, 3, 4, 5)) _test_post(client, {'award_id': 1, 'order': 'BOGUS ORDER'}, expected_status_code=status.HTTP_400_BAD_REQUEST)
def _discover_coreutils_tests() -> List[Tuple[(pathlib.Path, str)]]: with pathlib.Path('tests/coreutils/functions.txt').open('r', encoding='utf-8') as f: funcs_contents = f.readlines() files = [] for line in funcs_contents: f = line.split() path = pathlib.Path(f'tests/coreutils/binaries/{f[0]}') files.append(pytest.param((path, f[1]), id=f'{f[0]}:{f[1]}')) return files
class MinerPayment(BaseModel): block_number: int transaction_hash: str transaction_index: int miner_address: str coinbase_transfer: int base_fee_per_gas: int gas_price: int gas_price_with_coinbase_transfer: int gas_used: int transaction_to_address: Optional[str] transaction_from_address: Optional[str]
class Uploadr(): token = None perms = '' TOKEN_FILE = os.path.join(IMAGE_DIR, '.flickrToken') def __init__(self): self.token = self.getCachedToken() def signCall(self, data): keys = data.keys() keys.sort() foo = '' for a in keys: foo += (a + data[a]) f = (((FLICKR[api.secret] + api.key) + FLICKR[api.key]) + foo) return hashlib.md5(f).hexdigest() def urlGen(self, base, data, sig): foo = (base + '?') for d in data: foo += (((d + '=') + data[d]) + '&') return (((((((foo + api.key) + '=') + FLICKR[api.key]) + '&') + api.sig) + '=') + sig) def authenticate(self): print('Getting new token') self.getFrob() self.getAuthKey() self.getToken() self.cacheToken() def getFrob(self): d = {api.method: 'flickr.auth.getFrob'} sig = self.signCall(d) url = self.urlGen(api.rest, d, sig) try: response = self.getResponse(url) if self.isGood(response): FLICKR[api.frob] = str(response.frob) else: self.reportError(response) except: print(('Error getting frob:' + str(sys.exc_info()))) def getAuthKey(self): d = {api.frob: FLICKR[api.frob], api.perms: 'write'} sig = self.signCall(d) url = self.urlGen(api.auth, d, sig) ans = '' try: webbrowser.open(url) ans = raw_input('Have you authenticated this application? (Y/N): ') except: print(str(sys.exc_info())) if (ans.lower() == 'n'): print('You need to allow this program to access your Flickr site.') print('A web browser should pop open with instructions.') print('After you have allowed access restart uploadr.py') sys.exit() def getToken(self): d = {api.method: 'flickr.auth.getToken', api.frob: str(FLICKR[api.frob])} sig = self.signCall(d) url = self.urlGen(api.rest, d, sig) try: res = self.getResponse(url) if self.isGood(res): self.token = str(res.auth.token) self.perms = str(res.auth.perms) self.cacheToken() else: self.reportError(res) except: print(str(sys.exc_info())) def getCachedToken(self): if os.path.exists(self.TOKEN_FILE): return open(self.TOKEN_FILE).read() else: return None def cacheToken(self): try: open(self.TOKEN_FILE, 'w').write(str(self.token)) except: print('Issue writing token to local cache ', str(sys.exc_info())) def checkToken(self): if (self.token == None): return False else: d = {api.token: str(self.token), api.method: 'flickr.auth.checkToken'} sig = self.signCall(d) url = self.urlGen(api.rest, d, sig) try: res = self.getResponse(url) if self.isGood(res): self.token = res.auth.token self.perms = res.auth.perms return True else: self.reportError(res) except: print(str(sys.exc_info())) return False def upload(self): newImages = self.grabNewImages() if (not self.checkToken()): self.authenticate() self.uploaded = shelve.open(HISTORY_FILE) for (i, image) in enumerate(newImages): success = self.uploadImage(image) if (args.drip_feed and success and (i != (len(newImages) - 1))): print((('Waiting ' + str(DRIP_TIME)) + ' seconds before next upload')) time.sleep(DRIP_TIME) self.uploaded.close() def grabNewImages(self): images = [] foo = os.walk(IMAGE_DIR) for data in foo: (dirpath, dirnames, filenames) = data for f in filenames: ext = f.lower().split('.')[(- 1)] if ((ext == 'jpg') or (ext == 'gif') or (ext == 'png')): images.append(os.path.normpath(((dirpath + '/') + f))) images.sort() return images def uploadImage(self, image): success = False if (not self.uploaded.has_key(image)): print((('Uploading ' + image) + '...')) try: photo = ('photo', image, open(image, 'rb').read()) if args.title: FLICKR['title'] = args.title if args.description: FLICKR['description'] = args.description if args.tags: FLICKR['tags'] += ((' ' + args.tags) + ' ') d = {api.token: str(self.token), api.perms: str(self.perms), 'title': str(FLICKR['title']), 'description': str(FLICKR['description']), 'tags': str(FLICKR['tags']), 'is_public': str(FLICKR['is_public']), 'is_friend': str(FLICKR['is_friend']), 'is_family': str(FLICKR['is_family'])} sig = self.signCall(d) d[api.sig] = sig d[api.key] = FLICKR[api.key] url = self.build_request(api.upload, d, (photo,)) xml = urllib2.urlopen(url).read() res = xmltramp.parse(xml) if self.isGood(res): print('Success.') self.logUpload(res.photoid, image) success = True else: print('Problem:') self.reportError(res) except: print(str(sys.exc_info())) return success def logUpload(self, photoID, imageName): photoID = str(photoID) imageName = str(imageName) self.uploaded[imageName] = photoID self.uploaded[photoID] = imageName def build_request(self, theurl, fields, files, txheaders=None): (content_type, body) = self.encode_multipart_formdata(fields, files) if (not txheaders): txheaders = {} txheaders['Content-type'] = content_type txheaders['Content-length'] = str(len(body)) return urllib2.Request(theurl, body, txheaders) def encode_multipart_formdata(self, fields, files, BOUNDARY=(('-----' + mimetools.choose_boundary()) + '-----')): CRLF = '\r\n' L = [] if isinstance(fields, dict): fields = fields.items() for (key, value) in fields: L.append(('--' + BOUNDARY)) L.append(('Content-Disposition: form-data; name="%s"' % key)) L.append('') L.append(value) for (key, filename, value) in files: filetype = (mimetypes.guess_type(filename)[0] or 'application/octet-stream') L.append(('--' + BOUNDARY)) L.append(('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))) L.append(('Content-Type: %s' % filetype)) L.append('') L.append(value) L.append((('--' + BOUNDARY) + '--')) L.append('') body = CRLF.join(L) content_type = ('multipart/form-data; boundary=%s' % BOUNDARY) return (content_type, body) def isGood(self, res): if ((not (res == '')) and (res('stat') == 'ok')): return True else: return False def reportError(self, res): try: print(('Error: ' + str(((res.err('code') + ' ') + res.err('msg'))))) except: print(('Error: ' + str(res))) def getResponse(self, url): xml = urllib2.urlopen(url).read() return xmltramp.parse(xml) def run(self): while True: self.upload() print(('Last check: ' + str(time.asctime(time.localtime())))) time.sleep(SLEEP_TIME)
class OptionSeriesPictorialSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsColumnSonificationTracksMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsScatter3dPointEvents(Options): def click(self): return self._config_get(None) def click(self, value: Any): self._config(value, js_type=False) def drag(self): return self._config_get(None) def drag(self, value: Any): self._config(value, js_type=False) def dragStart(self): return self._config_get(None) def dragStart(self, value: Any): self._config(value, js_type=False) def drop(self): return self._config_get(None) def drop(self, value: Any): self._config(value, js_type=False) def mouseOut(self): return self._config_get(None) def mouseOut(self, value: Any): self._config(value, js_type=False) def mouseOver(self): return self._config_get(None) def mouseOver(self, value: Any): self._config(value, js_type=False) def remove(self): return self._config_get(None) def remove(self, value: Any): self._config(value, js_type=False) def select(self): return self._config_get(None) def select(self, value: Any): self._config(value, js_type=False) def unselect(self): return self._config_get(None) def unselect(self, value: Any): self._config(value, js_type=False) def update(self): return self._config_get(None) def update(self, value: Any): self._config(value, js_type=False)
class serienRecTimerListScreen(serienRecBaseScreen, Screen, HelpableScreen): def __init__(self, session): serienRecBaseScreen.__init__(self, session) Screen.__init__(self, session) HelpableScreen.__init__(self) self.skin = None self.session = session self.picload = ePicLoad() self.piconLoader = PiconLoader() self.WochenTag = ['Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So'] self.database = SRDatabase(getDataBaseFilePath()) self.channelList = STBHelpers.buildSTBChannelList() self.lastSelectedFSID = None self['actions'] = HelpableActionMap(self, 'SerienRecorderActions', {'ok': (self.keyOK, 'Liste der erstellten Timer bearbeiten'), 'cancel': (self.keyCancel, 'Zuruck zur Serienplaner-Ansicht'), 'left': (self.keyLeft, 'Zur vorherigen Seite blattern'), 'right': (self.keyRight, 'Zur nachsten Seite blattern'), 'up': (self.keyUp, 'Eine Zeile nach oben'), 'down': (self.keyDown, 'Eine Zeile nach unten'), 'red': (self.keyRed, 'Ausgewahlten Timer loschen'), 'green': (self.viewChange, 'Sortierung andern'), 'yellow': (self.keyYellow, 'Umschalten alle/nur aktive Timer anzeigen'), 'blue': (self.keyBlue, 'Alle noch ausstehenden Timer loschen'), 'menu': (self.recSetup, 'Menu fur globale Einstellungen offnen'), 'startTeletext': (self.wunschliste, 'Informationen zur ausgewahlten Serie auf Wunschliste anzeigen'), '0': (self.readLogFile, 'Log-File des letzten Suchlaufs anzeigen'), '3': (self.showProposalDB, 'Liste der Serien/Staffel-Starts anzeigen'), '4': (self.serieInfo, 'Informationen zur ausgewahlten Serie anzeigen'), '6': (self.showConflicts, 'Liste der Timer-Konflikte anzeigen'), '7': (self.showWishlist, 'Merkzettel (vorgemerkte Folgen) anzeigen'), '8': (self.cleanUp, 'Timerliste bereinigen'), '9': (self.dropAllTimer, 'Alle Timer aus der Datenbank loschen')}, (- 1)) self.helpList[0][2].sort() self['helpActions'] = ActionMap(['SerienRecorderActions'], {'displayHelp': self.showHelp, 'displayHelp_long': self.showManual}, 0) self.setupSkin() self.changesMade = False self.filter = True self.onLayoutFinish.append(self.readTimer) self.onClose.append(self.__onClose) self.onLayoutFinish.append(self.setSkinProperties) def callHelpAction(self, *args): HelpableScreen.callHelpAction(self, *args) def setSkinProperties(self): super(self.__class__, self).setSkinProperties() self['text_red'].setText('Timer loschen') if (config.plugins.serienRec.recordListView.value == 0): self['text_green'].setText('Neueste zuerst') elif (config.plugins.serienRec.recordListView.value == 1): self['text_green'].setText('Alteste zuerst') self['text_ok'].setText('Liste bearbeiten') self['text_yellow'].setText('Zeige auch alte Timer') self['text_blue'].setText('Losche neue Timer') self.num_bt_text[3][1] = 'Bereinigen' self.num_bt_text[4][1] = 'Datenbank leeren' super(self.__class__, self).startDisplayTimer() def setupSkin(self): InitSkin(self) self.chooseMenuList = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self.chooseMenuList.l.setFont(0, gFont('Regular', (20 + int(config.plugins.serienRec.listFontsize.value)))) self.chooseMenuList.l.setItemHeight(int((56 * skinFactor))) self['menu_list'] = self.chooseMenuList self['menu_list'].show() if config.plugins.serienRec.showCover.value: self['cover'].show() if (not config.plugins.serienRec.showAllButtons.value): self['bt_red'].show() self['bt_green'].show() self['bt_ok'].show() self['bt_yellow'].show() self['bt_blue'].show() self['bt_exit'].show() self['bt_text'].show() self['bt_info'].show() self['bt_menu'].show() self['text_red'].show() self['text_green'].show() self['text_ok'].show() self['text_yellow'].show() self['text_blue'].show() self['text_0'].show() self['text_1'].show() self['text_2'].show() self['text_3'].show() self['text_4'].show() def updateMenuKeys(self): updateMenuKeys(self) def getCurrentSelection(self): if (self['menu_list'].getCurrent() is None): return (None, None, None) serien_name = self['menu_list'].getCurrent()[0][0] serien_fsid = self['menu_list'].getCurrent()[0][10] serien_wlid = self.database.getMarkerWLID(serien_fsid) return (serien_name, serien_wlid, serien_fsid) def serieInfo(self): (serien_name, serien_wlid, serien_fsid) = self.getCurrentSelection() if (serien_name and serien_wlid): from .SerienRecorderSeriesInfoScreen import serienRecShowInfo self.session.open(serienRecShowInfo, serien_name, serien_wlid, serien_fsid) def wunschliste(self): (serien_name, serien_wlid, serien_fsid) = self.getCurrentSelection() if serien_wlid: super(self.__class__, self).wunschliste(serien_fsid) def setupClose(self, result): super(self.__class__, self).setupClose(result) if result[1]: self.readTimer() def viewChange(self): if (config.plugins.serienRec.recordListView.value == 1): config.plugins.serienRec.recordListView.value = 0 self['text_green'].setText('Neueste zuerst') else: config.plugins.serienRec.recordListView.value = 1 self['text_green'].setText('Alteste zuerst') config.plugins.serienRec.recordListView.save() configfile.save() self.readTimer() def readTimer(self, showTitle=True): current_time = int(time.time()) self['title'].setText('Lade Timer-Liste...') def loadTimer(): print('[SerienRecorder] loadAllTimer') database = SRDatabase(getDataBaseFilePath()) return database.getAllTimer((current_time if self.filter else None)) def onLoadTimerSuccessful(timers): completedTimer = 0 timerList = [] self['title'].instance.setForegroundColor(parseColor('foreground')) for timer in timers: (row_id, serie, staffel, episode, title, start_time, stbRef, webChannel, eit, activeTimer, serien_fsid) = timer if (int(start_time) < int(current_time)): completedTimer += 1 timerList.append((serie, staffel, episode, title, start_time, stbRef, webChannel, True, 0, bool(activeTimer), serien_fsid, row_id)) else: timerList.append((serie, staffel, episode, title, start_time, stbRef, webChannel, False, eit, bool(activeTimer), serien_fsid, row_id)) if showTitle: if self.filter: self['title'].setText(('Timer-Liste: %s ausstehende Timer' % len(timerList))) else: self['title'].setText(('Timer-Liste: %s abgeschlossene und %s ausstehende Timer' % (completedTimer, (len(timerList) - completedTimer)))) if (config.plugins.serienRec.recordListView.value == 0): timerList.sort(key=(lambda t: t[4])) elif (config.plugins.serienRec.recordListView.value == 1): timerList.sort(key=(lambda t: t[4])) timerList.reverse() self.chooseMenuList.setList(list(map(self.buildList, timerList))) if (len(timerList) == 0): if showTitle: self['title'].setText('Timer-Liste: 0 ausstehende Timer') self.getCover() import twisted.python.runtime if twisted.python.runtime.platform.supportsThreads(): from twisted.internet.threads import deferToThread deferToThread(loadTimer).addCallback(onLoadTimerSuccessful) else: timers = loadTimer() onLoadTimerSuccessful(timers) def buildList(self, entry): (serie, staffel, episode, title, start_time, serviceRef, webChannel, completed, eit, activeTimer, serien_fsid, row_id) = entry xtime = '' if (start_time > 0): xtime = time.strftime((self.WochenTag[time.localtime(int(start_time)).tm_wday] + ', %d.%m.%Y - %H:%M'), time.localtime(int(start_time))) if ((start_time == 0) or (title == 'dump')): title = '(Manuell hinzugefugt !!)' xtitle = ('S%sE%s - %s' % (str(staffel).zfill(2), str(episode).zfill(2), title)) imageNone = ('%s/images/black.png' % os.path.dirname(__file__)) imageTimer = imageNone channelName = webChannel if serviceRef: channelName = STBHelpers.getChannelByRef(self.channelList, serviceRef) if activeTimer: SerieColor = None else: SerieColor = parseColor('red').argb() foregroundColor = parseColor('foreground').argb() picon = loadPNG(imageNone) if (not completed): imageTimer = ('%s/images/timer.png' % os.path.dirname(__file__)) if (serviceRef and (config.plugins.serienRec.showPicons.value != '0')): piconPath = self.piconLoader.getPicon((serviceRef if (config.plugins.serienRec.showPicons.value == '1') else channelName)) if piconPath: self.picloader = PicLoader((80 * skinFactor), (40 * skinFactor)) picon = self.picloader.load(piconPath) self.picloader.destroy() return [entry, (eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, (5 * skinFactor), (5 * skinFactor), (80 * skinFactor), (40 * skinFactor), picon), (eListboxPythonMultiContent.TYPE_TEXT, (100 * skinFactor), (3 * skinFactor), (250 * skinFactor), (26 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), channelName, SerieColor, SerieColor), (eListboxPythonMultiContent.TYPE_TEXT, (100 * skinFactor), (27 * skinFactor), (220 * skinFactor), (26 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), xtime, foregroundColor, foregroundColor), (eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, (315 * skinFactor), (27 * skinFactor), (30 * skinFactor), (22 * skinFactor), loadPNG(imageTimer)), (eListboxPythonMultiContent.TYPE_TEXT, (350 * skinFactor), (3 * skinFactor), (500 * skinFactor), (26 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), serie, SerieColor, SerieColor), (eListboxPythonMultiContent.TYPE_TEXT, (350 * skinFactor), (29 * skinFactor), (500 * skinFactor), (26 * skinFactor), 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), xtitle, foregroundColor, foregroundColor)] def keyOK(self): from .SerienRecorderEditTimerListScreen import serienRecEditTimerList self.session.open(serienRecEditTimerList) def callDeleteSelectedTimer(self, answer): if answer: serien_name = self['menu_list'].getCurrent()[0][0] staffel = self['menu_list'].getCurrent()[0][1] episode = self['menu_list'].getCurrent()[0][2] serien_title = self['menu_list'].getCurrent()[0][3] serien_time = self['menu_list'].getCurrent()[0][4] serien_channel = self['menu_list'].getCurrent()[0][6] serien_eit = self['menu_list'].getCurrent()[0][8] serien_fsid = self['menu_list'].getCurrent()[0][10] row_id = self['menu_list'].getCurrent()[0][11] self.removeTimer(self.database, serien_name, serien_fsid, staffel, episode, serien_title, serien_time, serien_channel, serien_eit, row_id) self.changesMade = True self.readTimer(False) self['title'].instance.setForegroundColor(parseColor('red')) self['title'].setText(("Timer '- %s -' geloscht." % serien_name)) else: return def removeTimer(database, serien_name, serien_fsid, staffel, episode, serien_title, serien_time, serien_channel, serien_eit=0, row_id=None): markerType = database.getMarkerType(serien_fsid) if (markerType is None): markerType = 1 else: markerType = int(markerType) from .SerienRecorderTimer import serienRecTimer timerSeriesName = database.getMarkerTimerName(serien_fsid) title = serienRecTimer.getTimerName(serien_name, staffel, episode, serien_title, timerSeriesName, markerType) from .SerienRecorderTimer import serienRecBoxTimer removed = serienRecBoxTimer.removeTimerEntry(title, serien_time, serien_eit) if (not removed): print('[SerienRecorder] enigma2 NOOOTTT removed') else: print('[SerienRecorder] enigma2 Timer removed.') if row_id: database.removeTimers([row_id]) else: database.removeTimer(serien_fsid, staffel, episode, None, serien_time, serien_channel) seasonEpisodeString = ('S%sE%s' % (str(staffel).zfill(2), str(episode).zfill(2))) SRLogger.writeLogFilter('timerDebug', ("Timer geloscht: ' %s - %s - %s '" % (serien_name, seasonEpisodeString, serien_title))) def keyRed(self): if (self['menu_list'].getCurrent() is None): print('[SerienRecorder] Angelegte Timer Tabelle leer.') return serien_name = self['menu_list'].getCurrent()[0][0] staffel = self['menu_list'].getCurrent()[0][1] episode = self['menu_list'].getCurrent()[0][2] serien_title = self['menu_list'].getCurrent()[0][3] if config.plugins.serienRec.confirmOnDelete.value: title = re.sub('\\Adump\\Z', '(Manuell hinzugefugt !!)', serien_title) title = re.sub('\\Awebdump\\Z', '(Manuell ubers Webinterface hinzugefugt !!)', title) self.session.openWithCallback(self.callDeleteSelectedTimer, MessageBox, ("Soll der Timer fur '%s - S%sE%s - %s' wirklich geloscht werden?" % (serien_name, str(staffel).zfill(2), str(episode).zfill(2), title)), MessageBox.TYPE_YESNO, default=False) else: self.callDeleteSelectedTimer(True) def keyYellow(self): if self.filter: self['text_yellow'].setText('Zeige nur neue Timer') self.filter = False else: self['text_yellow'].setText('Zeige auch alte Timer') self.filter = True self.readTimer() def keyBlue(self): if config.plugins.serienRec.confirmOnDelete.value: self.session.openWithCallback(self.removeRemainingTimerFromDB, MessageBox, 'Sollen wirklich alle noch ausstehenden Timer von der Box und aus der Datenbank geloscht werden?', MessageBox.TYPE_YESNO, default=False) else: self.removeRemainingTimerFromDB(True) def removeRemainingTimerFromDB(self, answer): if answer: current_time = int(time.time()) timers = self.database.getAllTimer(current_time) for timer in timers: (row_id, serie, staffel, episode, title, start_time, stbRef, webChannel, eit, activeTimer, serien_fsid) = timer self.removeTimer(self.database, serie, serien_fsid, staffel, episode, title, start_time, webChannel, eit, row_id) self.changesMade = True self.readTimer(False) self['title'].instance.setForegroundColor(parseColor('red')) self['title'].setText('Alle noch ausstehenden Timer wurden geloscht.') else: return def removeOldTimerFromDB(self, answer): if answer: self.database.removeAllOldTimer() self.database.rebuild() self.readTimer(False) self['title'].instance.setForegroundColor(parseColor('red')) self['title'].setText('Alle alten Timer wurden geloscht.') else: return def dropAllTimer(self): if config.plugins.serienRec.confirmOnDelete.value: self.session.openWithCallback(self.removeOldTimerFromDB, MessageBox, 'Sollen wirklich alle alten Timer aus der Datenbank geloscht werden?', MessageBox.TYPE_YESNO, default=False) else: self.removeOldTimerFromDB(True) def cleanUp(self): numberOfOrphanTimers = self.database.countOrphanTimers() self.session.openWithCallback(self.removeOrphanTimerFromDB, MessageBox, ('Es wurden %d Eintrage in der Timer-Liste gefunden, fur die kein Serien-Marker vorhanden ist, sollen diese Eintrage geloscht werden?' % numberOfOrphanTimers), MessageBox.TYPE_YESNO, default=False) def removeOrphanTimerFromDB(self, answer): if answer: self.database.removeOrphanTimers() self.database.rebuild() else: return def getCover(self): (serien_name, serien_wlid, serien_fsid) = self.getCurrentSelection() if (serien_name and serien_wlid and serien_fsid and (self.lastSelectedFSID != serien_fsid)): getCover(self, serien_name, serien_fsid) self.lastSelectedFSID = serien_fsid def keyLeft(self): self['menu_list'].pageUp() self.getCover() def keyRight(self): self['menu_list'].pageDown() self.getCover() def keyDown(self): self['menu_list'].down() self.getCover() def keyUp(self): self['menu_list'].up() self.getCover() def __onClose(self): self.stopDisplayTimer() def keyCancel(self): self.close(self.changesMade)
class PRNG(): def __init__(self, seed): self.sha = sha256(seed) self.pool = bytearray() def get_bytes(self, n): while (len(self.pool) < n): self.pool.extend(self.sha) self.sha = sha256(self.sha) (result, self.pool) = (self.pool[:n], self.pool[n:]) return result def randint(self, start, end): n = (end - start) r = 0 p = 1 while (p < n): r = (self.get_bytes(1)[0] + (r << 8)) p = (p << 8) return (start + (r % n)) def choice(self, seq): return seq[self.randint(0, len(seq))] def shuffle(self, x): for i in reversed(range(1, len(x))): j = self.randint(0, (i + 1)) (x[i], x[j]) = (x[j], x[i]) def pluck(self, seq: List): return seq.pop(self.randint(0, len(seq)))
def bad_words_handler(bot, message): if config.bad_words_toggle: try: if re.findall(config.regex_filter['bad_words'], msg_type(message)): bot.reply_to(message, ' Watch your tongue...') return bad_words_handler except Exception as e: pass
class TestColumnVectorizer(): def models(self, Article): pass def Article(self, Base): class Article(Base): __tablename__ = 'textitem' id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) name = sa.Column(HSTORE) search_vector = sa.Column(TSVectorType('name', 'content', regconfig='simple')) content = sa.Column(sa.String) (Article.content) def vectorize_content(column): return sa.func.replace(column, 'bad', 'good') (HSTORE) def hstore_vectorizer(column): return sa.cast(sa.func.avals(column), sa.Text) return Article def test_column_vectorizer_has_priority_over_type_vectorizer(self, Article, session): article = Article(name={'fi': 'Joku artikkeli', 'en': 'Some article'}, content='bad') session.add(article) session.commit() session.refresh(article) for word in ['article', 'artikkeli', 'good', 'joku', 'some']: assert (word in article.search_vector) def test_unknown_vectorizable_type(self): with pytest.raises(TypeError): ('some unknown type') def my_vectorizer(column): pass
def extract_index_mapping_and_settings(client, index_pattern): results = {} logger = logging.getLogger(__name__) response = client.indices.get(index=index_pattern, params={'expand_wildcards': 'all'}) for (index, details) in response.items(): (valid, reason) = is_valid(index, index_pattern) if valid: mappings = details['mappings'] index_settings = filter_ephemeral_index_settings(details['settings']['index']) update_index_setting_parameters(index_settings) results[index] = {'mappings': mappings, 'settings': {'index': index_settings}} else: logger.info('Skipping index [%s] (reason: %s).', index, reason) return results
class OptionPlotoptionsCylinderSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def compute_interpolation_error(baseMesh, nref, space, degree): mh = MeshHierarchy(baseMesh, nref) dim = mh[0].geometric_dimension() error = np.zeros(((nref + 1), 2)) for (i, mesh) in enumerate(mh): if (dim == 2): (x, y) = SpatialCoordinate(mesh) elif (dim == 3): (x, y, z) = SpatialCoordinate(mesh) if (dim == 2): expression = as_vector([(sin(x) * cos(y)), (exp(x) * y)]) elif (dim == 3): expression = as_vector([((sin(y) * z) * cos(x)), ((cos(x) * z) * x), (exp(x) * y)]) V_el = FiniteElement(space, mesh.ufl_cell(), degree, variant='integral') V = FunctionSpace(mesh, V_el) f = interpolate(expression, V) error_l2 = errornorm(expression, f, 'L2') if ('Nedelec' in V.ufl_element().family()): error_hD = errornorm(expression, f, 'hcurl') else: error_hD = errornorm(expression, f, 'hdiv') error[i] = np.array([error_l2, error_hD]) return error
class LDAPGroups(): def group_names(username): ldap_client = LDAP(app.config['LDAP_URL'], app.config['LDAP_SEARCH_STRING']) groups = [] for group in ldap_client.get_user_groups(username): group = group.decode('utf-8') attrs = dict([tuple(x.split('=')) for x in group.split(',')]) groups.append(attrs['cn']) return groups
class RuntimeImages(Schemaspace): RUNTIME_IMAGES_SCHEMASPACE_ID = '119c9740-d73f-48c6-a97a-599d3acaf41d' RUNTIMES_IMAGES_SCHEMASPACE_NAME = 'runtime-images' RUNTIMES_IMAGES_SCHEMASPACE_DISPLAY_NAME = 'Runtime Images' def __init__(self, *args, **kwargs): super().__init__(schemaspace_id=RuntimeImages.RUNTIME_IMAGES_SCHEMASPACE_ID, name=RuntimeImages.RUNTIMES_IMAGES_SCHEMASPACE_NAME, display_name=RuntimeImages.RUNTIMES_IMAGES_SCHEMASPACE_DISPLAY_NAME, description='Schemaspace for instances of Elyra runtime images configurations')
def test_to_tjp_attribute_is_working_properly_for_multiple_work_hour_ranges(): wh = WorkingHours() wh['mon'] = [[570, 720], [780, 1110]] wh['tue'] = [[570, 720], [780, 1110]] wh['wed'] = [[570, 720], [780, 1110]] wh['thu'] = [[570, 720], [780, 1110]] wh['fri'] = [[570, 720], [780, 1110]] wh['sat'] = [[570, 720]] wh['sun'] = [] expected_tjp = ' workinghours mon 09:30 - 12:00, 13:00 - 18:30\n workinghours tue 09:30 - 12:00, 13:00 - 18:30\n workinghours wed 09:30 - 12:00, 13:00 - 18:30\n workinghours thu 09:30 - 12:00, 13:00 - 18:30\n workinghours fri 09:30 - 12:00, 13:00 - 18:30\n workinghours sat 09:30 - 12:00\n workinghours sun off' assert (wh.to_tjp == expected_tjp)
class XSensListener(object): def listen(self, host='localhost', port=9763, timeout=2): import struct import socket s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.settimeout(timeout) s.bind((host, port)) while True: data = s.recv(2048) packages = data.split('MXTP') for package in packages: packet_type_id = package[:2] raw_data = package[2:] header = raw_data[:18] if (len(header) < 18): continue header_data = Header._make(struct.unpack(header_data_format, header)) raw_pose_data = raw_data[18:] pose_data = [] if (packet_type_id == '01'): chunks = list(map(''.join, zip(*([iter(raw_pose_data)] * 28)))) for chunk in chunks: unpacked_data = struct.unpack(euler_data_format, chunk) euler_data = Euler._make(unpacked_data) pose_data.append(euler_data) elif (packet_type_id == '02'): chunks = list(map(''.join, zip(*([iter(raw_pose_data)] * 32)))) for chunk in chunks: unpacked_data = struct.unpack(quaternion_data_format, chunk) quaternion_data = Quaternion._make(unpacked_data) pose_data.append(quaternion_data) elif (packet_type_id == '25'): pose_data.append(TimeCode._make(raw_pose_data)) (yield [header_data, pose_data])
def collect(filter_fun): if sys.path[0].endswith('/benchmarks'): path = sys.path.pop(0) correct = path.rsplit('/', 1)[0] sys.path.insert(0, correct) common_prefix = 'benchmark_' result = [] for name in ('hub_timers', 'spawn'): mod = importlib.import_module(('benchmarks.' + name)) for (name, obj) in inspect.getmembers(mod): if (name.startswith(common_prefix) and inspect.isfunction(obj)): useful_name = name[len(common_prefix):] if filter_fun(useful_name): result.append(Benchmark(name=useful_name, func=obj)) return result
class controller_status_prop(loxi.OFObject): subtypes = {} def __init__(self, type=None): if (type != None): self.type = type else: self.type = 0 return def pack(self): packed = [] packed.append(struct.pack('!H', self.type)) packed.append(struct.pack('!H', 0)) length = sum([len(x) for x in packed]) packed[1] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): (subtype,) = reader.peek('!H', 0) subclass = controller_status_prop.subtypes.get(subtype) if subclass: return subclass.unpack(reader) obj = controller_status_prop() obj.type = reader.read('!H')[0] _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.type != other.type): return False return True def pretty_print(self, q): q.text('controller_status_prop {') with q.group(): with q.indent(2): q.breakable() q.breakable() q.text('}')
class Processor(Iface, TProcessor): _onewayMethods = ('reinitialize', 'shutdown') def __init__(self, handler, loop=None): TProcessor.__init__(self) self._handler = handler self._loop = (loop or asyncio.get_event_loop()) self._processMap = {} self._processMap['getName'] = Processor.process_getName self._processMap['getVersion'] = Processor.process_getVersion self._processMap['getStatus'] = Processor.process_getStatus self._processMap['getStatusDetails'] = Processor.process_getStatusDetails self._processMap['getCounters'] = Processor.process_getCounters self._processMap['getCounter'] = Processor.process_getCounter self._processMap['setOption'] = Processor.process_setOption self._processMap['getOption'] = Processor.process_getOption self._processMap['getOptions'] = Processor.process_getOptions self._processMap['getCpuProfile'] = Processor.process_getCpuProfile self._processMap['aliveSince'] = Processor.process_aliveSince self._processMap['reinitialize'] = Processor.process_reinitialize self._processMap['shutdown'] = Processor.process_shutdown def onewayMethods(self): l = [] l.extend(Processor._onewayMethods) return tuple(l) _main(asyncio=True) def process(self): pass _method(getName_args, oneway=False, asyncio=True) def process_getName(self, args, handler_ctx, seqid, oprot, fn_name): result = getName_result() if should_run_on_thread(self._handler.getName): fut = self._loop.run_in_executor(None, self._handler.getName) else: fut = call_as_future(self._handler.getName, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getVersion_args, oneway=False, asyncio=True) def process_getVersion(self, args, handler_ctx, seqid, oprot, fn_name): result = getVersion_result() if should_run_on_thread(self._handler.getVersion): fut = self._loop.run_in_executor(None, self._handler.getVersion) else: fut = call_as_future(self._handler.getVersion, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getStatus_args, oneway=False, asyncio=True) def process_getStatus(self, args, handler_ctx, seqid, oprot, fn_name): result = getStatus_result() if should_run_on_thread(self._handler.getStatus): fut = self._loop.run_in_executor(None, self._handler.getStatus) else: fut = call_as_future(self._handler.getStatus, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getStatusDetails_args, oneway=False, asyncio=True) def process_getStatusDetails(self, args, handler_ctx, seqid, oprot, fn_name): result = getStatusDetails_result() if should_run_on_thread(self._handler.getStatusDetails): fut = self._loop.run_in_executor(None, self._handler.getStatusDetails) else: fut = call_as_future(self._handler.getStatusDetails, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getCounters_args, oneway=False, asyncio=True) def process_getCounters(self, args, handler_ctx, seqid, oprot, fn_name): result = getCounters_result() if should_run_on_thread(self._handler.getCounters): fut = self._loop.run_in_executor(None, self._handler.getCounters) else: fut = call_as_future(self._handler.getCounters, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getCounter_args, oneway=False, asyncio=True) def process_getCounter(self, args, handler_ctx, seqid, oprot, fn_name): result = getCounter_result() if should_run_on_thread(self._handler.getCounter): fut = self._loop.run_in_executor(None, self._handler.getCounter, args.key) else: fut = call_as_future(self._handler.getCounter, self._loop, args.key) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(setOption_args, oneway=False, asyncio=True) def process_setOption(self, args, handler_ctx, seqid, oprot, fn_name): result = setOption_result() if should_run_on_thread(self._handler.setOption): fut = self._loop.run_in_executor(None, self._handler.setOption, args.key, args.value) else: fut = call_as_future(self._handler.setOption, self._loop, args.key, args.value) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getOption_args, oneway=False, asyncio=True) def process_getOption(self, args, handler_ctx, seqid, oprot, fn_name): result = getOption_result() if should_run_on_thread(self._handler.getOption): fut = self._loop.run_in_executor(None, self._handler.getOption, args.key) else: fut = call_as_future(self._handler.getOption, self._loop, args.key) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getOptions_args, oneway=False, asyncio=True) def process_getOptions(self, args, handler_ctx, seqid, oprot, fn_name): result = getOptions_result() if should_run_on_thread(self._handler.getOptions): fut = self._loop.run_in_executor(None, self._handler.getOptions) else: fut = call_as_future(self._handler.getOptions, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(getCpuProfile_args, oneway=False, asyncio=True) def process_getCpuProfile(self, args, handler_ctx, seqid, oprot, fn_name): result = getCpuProfile_result() if should_run_on_thread(self._handler.getCpuProfile): fut = self._loop.run_in_executor(None, self._handler.getCpuProfile, args.profileDurationInSec) else: fut = call_as_future(self._handler.getCpuProfile, self._loop, args.profileDurationInSec) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(aliveSince_args, oneway=False, asyncio=True) def process_aliveSince(self, args, handler_ctx, seqid, oprot, fn_name): result = aliveSince_result() if should_run_on_thread(self._handler.aliveSince): fut = self._loop.run_in_executor(None, self._handler.aliveSince) else: fut = call_as_future(self._handler.aliveSince, self._loop) fut.add_done_callback((lambda f: write_results_after_future(result, self._event_handler, handler_ctx, seqid, oprot, fn_name, {}, f))) return fut _method(reinitialize_args, oneway=True, asyncio=True) def process_reinitialize(self, args, handler_ctx, seqid, oprot, fn_name): if should_run_on_thread(self._handler.reinitialize): fut = self._loop.run_in_executor(None, self._handler.reinitialize) else: fut = call_as_future(self._handler.reinitialize, self._loop) return fut _method(shutdown_args, oneway=True, asyncio=True) def process_shutdown(self, args, handler_ctx, seqid, oprot, fn_name): if should_run_on_thread(self._handler.shutdown): fut = self._loop.run_in_executor(None, self._handler.shutdown) else: fut = call_as_future(self._handler.shutdown, self._loop) return fut
class OptionColoraxisDataclasses(Options): def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def from_(self): return self._config_get(None) _.setter def from_(self, num: float): self._config(num, js_type=False) def name(self): return self._config_get(None) def name(self, text: str): self._config(text, js_type=False) def to(self): return self._config_get(None) def to(self, num: float): self._config(num, js_type=False)
class PluginInterface_SmartFetch(object): name = 'SmartWebRequest' serialize = False def __init__(self, settings=None, *args, **kwargs): super().__init__(*args, **kwargs) self.settings = (settings if settings else {}) self.log = logging.getLogger(('Main.%s' % self.name)) self.log.info('SmartFetcher!') twocaptcha_api = self.settings.get('captcha_solvers', {}).get('2captcha', {}).get('api_key', None) anticaptcha_api = self.settings.get('captcha_solvers', {}).get('anti-captcha', {}).get('api_key', None) self.wg = WebRequest.WebGetRobust(auto_waf=False, cloudflare=False) self.wg.rules['auto_waf'] = False self.calls = {'qidianSmartFeedFetch': self.qidianSmartFeedFetch, 'qidianProcessReleaseList': self.qidianProcessReleaseList, 'smartGetItem': self.smartGetItem, 'getpage': self.wg.getpage, 'getItem': self.wg.getItem, 'getHead': self.wg.getHead, 'getFileNameMime': self.wg.getFileNameMime, 'getFileAndName': self.wg.getFileAndName, 'addCookie': self.wg.addCookie, 'chromiumGetRenderedItem': self.wg.chromiumGetRenderedItem, 'getHeadChromium': self.wg.getHeadChromium, 'getHeadTitleChromium': self.wg.getHeadTitleChromium, 'getItemChromium': self.wg.getItemChromium} def qidianSmartFeedFetch(self, feed_url: str, meta): proc = Processor_Qidian.QidianProcessor(wg=self.wg) content = proc.qidianProcessFeedUrls(feed_url, meta) return (content, '', 'application/rss+xml') def qidianProcessReleaseList(self, feed_urls): proc = Processor_Qidian.QidianProcessor(wg=self.wg) content = proc.process_release_list(feed_urls) return content def smartGetItem(self, itemUrl: str, *args, **kwargs): lowerspliturl = urllib.parse.urlsplit(itemUrl.lower()) for processor in PREEMPTIVE_PROCESSORS: if processor.preemptive_wants_url(lowerspliturl=lowerspliturl): self.log.info('Preemptive fetch handler %s wants to modify content', processor) return processor.premptive_handle(url=itemUrl, wg=self.wg) (content, fileN, mType) = self.wg.getItem(*args, itemUrl=itemUrl, **kwargs) if mType.startswith('text'): if isinstance(content, bytes): content = bs4.UnicodeDammit(content).unicode_markup processed = False for processor in PROCESSORS: if processor.wants_url(lowerspliturl=lowerspliturl, mimetype=mType): self.log.info('Processor %s wants to modify content', processor) processed = True content = processor.preprocess(url=itemUrl, lowerspliturl=lowerspliturl, mimeType=mType, content=content, wg=self.wg) if processed: self.log.info('All preprocessors completed!') return (content, fileN, mType)
def extractWinterleaftranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Rebirth Merchant', 'Rebirth Merchant: Wonderful Space Hunting for Military', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
('evennia.utils.gametime.gametime', new=Mock(return_value=.46)) class TestCustomGameTime(BaseEvenniaTest): def tearDown(self): if hasattr(self, 'timescript'): self.timescript.stop() def test_time_to_tuple(self): self.assertEqual(custom_gametime.time_to_tuple(10000, 34, 2, 4, 6, 1), (294, 2, 0, 0, 0, 0)) self.assertEqual(custom_gametime.time_to_tuple(10000, 3, 3, 4), (3333, 0, 0, 1)) self.assertEqual(custom_gametime.time_to_tuple(100000, 239, 24, 3), (418, 4, 0, 2)) def test_gametime_to_realtime(self): self.assertEqual(custom_gametime.gametime_to_realtime(days=2, mins=4), 86520.0) self.assertEqual(custom_gametime.gametime_to_realtime(format=True, days=2), (0, 0, 0, 1, 0, 0, 0)) def test_realtime_to_gametime(self): self.assertEqual(custom_gametime.realtime_to_gametime(days=3, mins=34), 349680.0) self.assertEqual(custom_gametime.realtime_to_gametime(days=3, mins=34, format=True), (0, 0, 0, 4, 1, 8, 0)) self.assertEqual(custom_gametime.realtime_to_gametime(format=True, days=3, mins=4), (0, 0, 0, 4, 0, 8, 0)) def test_custom_gametime(self): self.assertEqual(custom_gametime.custom_gametime(), (102, 5, 2, 6, 21, 8, 18)) self.assertEqual(custom_gametime.custom_gametime(absolute=True), (102, 5, 2, 6, 21, 8, 18)) def test_real_seconds_until(self): self.assertEqual(custom_gametime.real_seconds_until(year=2300, month=12, day=7), .77) def test_schedule(self): self.timescript = custom_gametime.schedule(_testcallback, repeat=True, min=5, sec=0) self.assertEqual(self.timescript.interval, 1700.)
(SAAS_CONNECTOR_FROM_TEMPLATE, dependencies=[Security(verify_oauth_client, scopes=[SAAS_CONNECTION_INSTANTIATE])], response_model=SaasConnectionTemplateResponse) def instantiate_connection_from_template(saas_connector_type: str, template_values: SaasConnectionTemplateValues, db: Session=Depends(deps.get_db)) -> SaasConnectionTemplateResponse: return instantiate_connection(db, saas_connector_type, template_values)
class DefRefNode(nodes.Element): def __init__(self, kind, source_doc, text): (text, target) = parse_target_from_text(text) super().__init__(ref_kind=kind, ref_source_doc=source_doc, ref_text=text, ref_target=id_from_text(target)) def astext(self): return self['ref_text']
class NetworkManager(): interfaces_file_name = '/etc/network/interfaces' resolvconf_file_name = '/etc/resolv.conf' dhcpcd_file_name = '/etc/dhcpcd.conf' def __init__(self): self.wpaconfig = '' self.WifiSSID = '' self.WifiKey = '' self.WifiSSID2 = '' self.WifiKey2 = '' self.APMode = (- 1) self.APModeDev = 99 self.APModeTime = 30 self.APStopTime = (- 1) self.WifiAPKey = 'configrpi' self.WifiAPChannel = 1 self.WifiDevWatch = (- 1) self.WifiDevNum = (- 1) self.dhcpcd_inuse = False def networkinit(self): ipi = getipinfos() ni = parseifconfig(ipi) realdevs = 0 if ni: if (len(ni) > 0): realdevs = 0 for i in range(len(ni)): if (ni[i]['mac'] != ''): if (len(Settings.NetworkDevices) <= realdevs): tarr = NetworkDevice() Settings.NetworkDevices.append(tarr) Settings.NetworkDevices[realdevs].ip = ni[i]['ip'] Settings.NetworkDevices[realdevs].mask = ni[i]['mask'] Settings.NetworkDevices[realdevs].devicename = ni[i]['name'] Settings.NetworkDevices[realdevs].connected = (int(ni[i]['active']) != 0) Settings.NetworkDevices[realdevs].lastconnectiontest = time.time() Settings.NetworkDevices[realdevs].mac = ni[i]['mac'] realdevs += 1 if os.path.exists(self.dhcpcd_file_name): self.dhcpcd_inuse = True if self.dhcpcd_inuse: try: for i in range(len(Settings.NetworkDevices)): Settings.NetworkDevices[i].dhcp = True with open(self.dhcpcd_file_name) as f: detectedcard = (- 1) for line in f: line = line.strip() if ((len(line) > 0) and (line[0] == '#')): line = '' elif line.lower().startswith('interface'): detectedcard = (- 1) for i in range(len(Settings.NetworkDevices)): if (Settings.NetworkDevices[i].devicename in line): detectedcard = i elif (('static ip_address' in line) and (detectedcard >= 0)): Settings.NetworkDevices[detectedcard].dhcp = False l1 = line.split('=') if (len(l1) > 1): l = l1[1].split('/') if (len(l) > 0): Settings.NetworkDevices[detectedcard].ip = l[0] Settings.NetworkDevices[detectedcard].mask = cidr_to_netmask(l[1]) elif (('static routers' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].gw == ''): l = line.split('=') if (len(l) > 0): Settings.NetworkDevices[detectedcard].gw = l[1] elif (('static domain_name_servers' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].dns == ''): l = line.split('=') if (len(l) > 0): Settings.NetworkDevices[detectedcard].dns = l[1] except: pass else: dhclient = isdhclient() if dhclient: for i in range(len(Settings.NetworkDevices)): Settings.NetworkDevices[i].dhcp = True try: with open(self.interfaces_file_name) as f: detectedcard = (- 1) for line in f: line = line.strip() if ((len(line) > 0) and (line[0] == '#')): line = '' elif ('iface ' in line.lower()): detectedcard = (- 1) for i in range(len(Settings.NetworkDevices)): if (Settings.NetworkDevices[i].devicename in line): detectedcard = i if (('dhcp' in line) and (detectedcard >= 0)): Settings.NetworkDevices[detectedcard].dhcp = True if (('static' in line) and (detectedcard >= 0)): Settings.NetworkDevices[detectedcard].dhcp = False elif (('address ' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].ip == ''): l = line.split(' ') if (len(l) > 0): Settings.NetworkDevices[detectedcard].ip = l[1] elif (('netmask ' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].mask == ''): l = line.split(' ') if (len(l) > 0): Settings.NetworkDevices[detectedcard].mask = l[1] elif (('gateway ' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].gw == ''): l = line.split(' ') if (len(l) > 0): Settings.NetworkDevices[detectedcard].gw = l[1] elif (('dns-nameservers ' in line.lower()) and (detectedcard >= 0)): if (Settings.NetworkDevices[detectedcard].dns == ''): l = line.split(' ') if (len(l) > 0): for d in range(len(l)): Settings.NetworkDevices[detectedcard].dns += (l[d] + ' ') elif ('wpa-conf ' in line.lower()): l = line.split(' ') if (len(l) > 0): self.wpaconfig = l[1] except: pass try: with open(self.resolvconf_file_name) as f: dnsservers = '' for line in f: line = line.strip().lower() if line.startswith('nameserver'): dl = line.split(' ') if (len(dl) > 1): fdns = dl[1].strip() for dc in range(len(Settings.NetworkDevices)): if (fdns not in Settings.NetworkDevices[dc].dns): Settings.NetworkDevices[dc].dns += (' ' + fdns) except: pass if (self.wpaconfig == ''): tv = '/etc/wpa_supplicant/wpa_supplicant.conf' if os.path.exists(tv): self.wpaconfig = tv if (self.wpaconfig != ''): try: netid = (- 1) with open(self.wpaconfig) as f: for line in f: line = line.strip() if ('network=' in line.lower()): netid += 1 if line.lower().strip().startswith('ssid='): tstrs = line.split('=') tstr = tstrs[1].replace('"', '').replace("'", '') if (netid == 0): self.WifiSSID = tstr elif (netid == 1): self.WifiSSID2 = tstr if ('psk=' in line.lower()): tstrs = line.split('=') tstr = tstrs[1].replace('"', '').replace("'", '') if (netid == 0): self.WifiKey = tstr elif (netid == 1): self.WifiKey2 = tstr except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e)) for dc in range(len(Settings.NetworkDevices)): Settings.NetworkDevices[dc].dns = Settings.NetworkDevices[dc].dns.strip() if ((Settings.NetworkDevices[dc].gw == '') and (Settings.NetworkDevices[dc].ip != '')): Settings.NetworkDevices[dc].gw = getgw(Settings.NetworkDevices[dc].devicename) Settings.NetworkDevices[dc].connectiontype = 0 Settings.NetworkDevices[dc].iswireless() if ((OS.is_command_found('hostapd') == False) or (OS.is_command_found('dnsmasq') == False)): self.APMode = (- 1) self.setAPconf(True) def setAPconf(self, startup=False): try: if self.APModeDev: pass except: self.APModeDev = 99 self.APMode = (- 1) try: if self.APModeTime: pass except: self.APModeTime = 30 try: if self.APStopTime: pass except: self.APStopTime = (- 1) if (self.APModeDev == 99): self.WifiDevNum = self.getfirstwirelessdevnum() elif (self.APModeDev == 0): self.WifiDevNum = self.getprimarydevice() elif (self.APModeDev == 1): self.WifiDevNum = self.getsecondarydevice() else: return False if (self.WifiDevNum >= 0): if (startup and (self.APMode > (- 1))): AP_stop(self.WifiDevNum) Settings.NetworkDevices[Settings.NetMan.WifiDevNum].apmode = 0 if (self.APMode == 99): self.WifiDevWatch = self.getfirstwirelessdevnum() elif (self.APMode == 0): self.WifiDevWatch = self.getprimarydevice() elif (self.APMode == 1): self.WifiDevWatch = self.getsecondarydevice() elif (self.APMode == 100): if startup: AP_start(self.WifiDevNum) elif (self.APMode == (- 1)): self.WifiDevWatch = self.getfirstwirelessdevnum() if (self.WifiDevWatch == (- 1)): self.WifiDevNum = self.getprimarydevice() def getdevicenames(self): rs = [] if (len(Settings.NetworkDevices) > 0): for n in range(len(Settings.NetworkDevices)): rs.append(Settings.NetworkDevices[n].devicename) return rs def getfirstwirelessdev(self): try: pd = self.getprimarydevice() if Settings.NetworkDevices[pd].iswireless(): return Settings.NetworkDevices[pd].devicename pd = self.getsecondarydevice() if Settings.NetworkDevices[pd].iswireless(): return Settings.NetworkDevices[pd].devicename except: return False return False def getfirstwirelessdevnum(self): try: pd = self.getprimarydevice() if Settings.NetworkDevices[pd].iswireless(): return pd pd = self.getsecondarydevice() if Settings.NetworkDevices[pd].iswireless(): return pd except: return (- 1) return (- 1) def getprimarydevice(self): rs = 0 if (len(Settings.NetworkDevices) > 0): for n in range(len(Settings.NetworkDevices)): if (Settings.NetworkDevices[n].netdevorder == 0): rs = n break return rs def getsecondarydevice(self): rs = 1 if (len(Settings.NetworkDevices) > 0): for n in range(len(Settings.NetworkDevices)): if (Settings.NetworkDevices[n].netdevorder > 0): rs = n break return rs def setdeviceorder(self, primary, secondary): if (len(Settings.NetworkDevices) > 0): for n in range(len(Settings.NetworkDevices)): if (n == primary): Settings.NetworkDevices[n].netdevorder = 0 elif (n == secondary): Settings.NetworkDevices[n].netdevorder = 1 else: Settings.NetworkDevices[n].netdevorder = (- 1) def saveconfig(self): if (len(Settings.NetworkDevices) > 0): staticused = False wifiused = False if self.dhcpcd_inuse: contents = [] try: with open(self.dhcpcd_file_name) as f: for line in f: line = line.strip() if ((len(line) > 0) and (line[0] == '#')): line = '' elif (line.startswith('interface') or line.startswith('static ')): line = '' if (line != ''): contents.append(line) except: pass try: with open(self.dhcpcd_file_name, 'w') as f: for c in range(len(contents)): f.write((contents[c] + '\n')) for n in range(len(Settings.NetworkDevices)): if ((Settings.NetworkDevices[n].dhcp == False) and (Settings.NetworkDevices[n].ip.strip() != '')): staticused = True f.write((('interface ' + str(Settings.NetworkDevices[n].devicename)) + '\n')) f.write((((('static ip_address=' + str(Settings.NetworkDevices[n].ip)) + '/') + str(netmask_to_cidr(Settings.NetworkDevices[n].mask))) + '\n')) if (Settings.NetworkDevices[n].gw.strip() != ''): f.write((('static routers=' + str(Settings.NetworkDevices[n].gw)) + '\n')) if (Settings.NetworkDevices[n].dns.strip() != ''): f.write((('static domain_name_servers=' + str(Settings.NetworkDevices[n].dns)) + '\n')) except: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, (('Can not write ' + self.dhcpcd_file_name) + ' you may have no rights.')) else: dhclient = isdhclient() for i in range(len(Settings.NetworkDevices)): if ((Settings.NetworkDevices[i].dhcp == False) and (Settings.NetworkDevices[i].ip.strip() != '')): staticused = True if ((dhclient == False) or staticused): try: with open(self.interfaces_file_name, 'w') as f: f.write('auto lo\niface lo inet loopback\n\n') for n in range(len(Settings.NetworkDevices)): f.write((('allow-hotplug ' + Settings.NetworkDevices[n].devicename) + '\n')) newentry = False if ((dhclient == False) and (Settings.NetworkDevices[n].dhcp or (Settings.NetworkDevices[n].ip.strip() == ''))): f.write((('iface ' + Settings.NetworkDevices[n].devicename) + ' inet dhcp\n')) newentry = True elif ((dhclient == False) or ((Settings.NetworkDevices[n].dhcp == False) and (Settings.NetworkDevices[n].ip.strip() != ''))): newentry = True f.write((('iface ' + Settings.NetworkDevices[n].devicename) + ' inet static\n')) if (len(Settings.NetworkDevices[n].ip) > 0): f.write(((' address ' + Settings.NetworkDevices[n].ip) + '\n')) if (len(Settings.NetworkDevices[n].mask) > 0): f.write(((' netmask ' + Settings.NetworkDevices[n].mask) + '\n')) if (len(Settings.NetworkDevices[n].gw) > 0): f.write(((' gateway ' + Settings.NetworkDevices[n].gw) + '\n')) if (len(Settings.NetworkDevices[n].dns) > 0): f.write(((' dns-nameservers ' + Settings.NetworkDevices[n].dns) + '\n')) if (newentry and Settings.NetworkDevices[n].iswireless()): wifiused = True if (len(self.wpaconfig) < 1): self.wpaconfig = '/etc/wpa_supplicant/wpa_supplicant.conf' f.write(((' wpa-conf ' + self.wpaconfig) + '\n')) f.write('\n') except: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, (('Can not write ' + self.interfaces_file_name) + ' you may have no rights.')) if staticused: dnslist = [] for n in range(len(Settings.NetworkDevices)): dl = Settings.NetworkDevices[n].dns.split(' ') if (len(dl) > 0): for i in range(len(dl)): if (dl[i].strip() not in dnslist): dnslist.append(dl[i].strip()) try: with open(self.resolvconf_file_name, 'w') as f: for i in range(len(dnslist)): f.write((('nameserver ' + dnslist[i]) + '\n')) except: pass if wifiused: wpastart = '' headerended = False try: with open(self.wpaconfig) as f: for line in f: if ('network=' in line): headerended = True break if (headerended == False): wpastart += (line.strip() + '\n') except: pass if (wpastart == ''): wpastart = (('ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=netdev\nupdate_config=1\ncountry=' + str(getcountry())) + '\n') try: with open(self.wpaconfig, 'w') as f: f.write(wpastart) if (self.WifiSSID != ''): f.write((((('network={\nssid="' + str(self.WifiSSID)) + '"\nscan_ssid=1\npsk="') + str(self.WifiKey)) + '"\nkey_mgmt=WPA-PSK\n}\n')) if (self.WifiSSID2 != ''): f.write((((('network={\nssid="' + str(self.WifiSSID2)) + '"\nscan_ssid=1\npsk="') + str(self.WifiKey2)) + '"\nkey_mgmt=WPA-PSK\n}\n')) except: pass
class CustomCrypto(Crypto[EntityClass]): def generate_private_key(cls) -> EntityClass: pass def load_private_key_from_path(cls, file_name: str, password: Optional[str]=None) -> Any: pass def public_key(self) -> str: pass def address(self) -> str: pass def private_key(self) -> str: pass def get_address_from_public_key(cls, public_key: str) -> str: pass def sign_message(self, message: bytes, is_deprecated_mode: bool=False) -> str: pass def sign_transaction(self, transaction: Any) -> Any: pass def recover_message(self, message: bytes, signature: str, is_deprecated_mode: bool=False) -> Tuple[(Address, ...)]: pass def encrypt(self, password: str) -> str: def decrypt(cls, keyfile_json: str, password: str) -> str:
class OptionSeriesSunburstStatesSelect(Options): def animation(self) -> 'OptionSeriesSunburstStatesSelectAnimation': return self._config_sub_data('animation', OptionSeriesSunburstStatesSelectAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def halo(self) -> 'OptionSeriesSunburstStatesSelectHalo': return self._config_sub_data('halo', OptionSeriesSunburstStatesSelectHalo) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesSunburstStatesSelectMarker': return self._config_sub_data('marker', OptionSeriesSunburstStatesSelectMarker)
def test_user_enters_variables_and_iqr_imputation_on_left_tail(df_na): imputer = EndTailImputer(imputation_method='iqr', tail='left', fold=1.5, variables=['Age', 'Marks']) imputer.fit(df_na) assert (imputer.imputer_dict_['Age'] == (- 6.5)) assert (np.round(imputer.imputer_dict_['Marks'], 3) == np.round(0., 3))
def get_QRuuid(self): url = ('%s/jslogin' % config.BASE_URL) params = {'appid': 'wx782c26e4c19acffb', 'fun': 'new', 'redirect_uri': ' 'lang': 'zh_CN'} headers = {'User-Agent': self.user_agent} r = self.s.get(url, params=params, headers=headers) regx = 'window.QRLogin.code = (\\d+); window.QRLogin.uuid = "(\\S+?)";' data = re.search(regx, r.text) if (data and (data.group(1) == '200')): self.uuid = data.group(2) return self.uuid
class TestSecureDescriptor(Descriptor): TEST_DESC_UUID = '-1234-5678-1234-56789abcdef6' def __init__(self, bus, index, characteristic): Descriptor.__init__(self, bus, index, self.TEST_DESC_UUID, ['secure-read', 'secure-write'], characteristic) def ReadValue(self, options): return [dbus.Byte('T'), dbus.Byte('e'), dbus.Byte('s'), dbus.Byte('t')]
def test_w3_ens_setter_sets_w3_object_reference_on_ens(w3): ns = ENS.from_web3(w3) w3.ens = ns assert (ns == w3.ens) assert (w3 == w3.ens.w3) assert w3.strict_bytes_type_checking assert w3.ens.strict_bytes_type_checking assert w3.ens.w3.strict_bytes_type_checking w3.strict_bytes_type_checking = False assert (not w3.strict_bytes_type_checking) assert (not w3.ens.strict_bytes_type_checking) assert (not w3.ens.w3.strict_bytes_type_checking)
class Wrapper(ecmwfapi.ECMWFDataServer): def __init__(self, dataset): super().__init__() self.dataset = dataset def execute(self, request, target): request = dict(**request) request['dataset'] = self.dataset request['target'] = target print(request) return self.retrieve(request)
def keypaths(d, separator='.', indexes=False): separator = (separator or '.') if (not type_util.is_string(separator)): raise ValueError('separator argument must be a (non-empty) string.') kls = keylists(d, indexes=indexes) kps = [separator.join([f'{key}' for key in kl]) for kl in kls] kps.sort() return kps
def test_num_mediums(): structures = [] grid_spec = td.GridSpec.auto(wavelength=1.0) for i in range(MAX_NUM_MEDIUMS): structures.append(td.Structure(geometry=td.Box(size=(1, 1, 1)), medium=td.Medium(permittivity=(i + 1)))) _ = td.Simulation(size=(5, 5, 5), grid_spec=grid_spec, structures=structures, run_time=1e-12, boundary_spec=td.BoundarySpec.all_sides(boundary=td.Periodic())) with pytest.raises(pydantic.ValidationError): structures.append(td.Structure(geometry=td.Box(size=(1, 1, 1)), medium=td.Medium(permittivity=(i + 2)))) _ = td.Simulation(size=(5, 5, 5), grid_spec=grid_spec, structures=structures, run_time=1e-12)
def jumpi(computation: ComputationAPI) -> None: (jump_dest, check_value) = computation.stack_pop_ints(2) if check_value: computation.code.program_counter = jump_dest next_opcode = computation.code.peek() if (next_opcode != JUMPDEST): raise InvalidJumpDestination('Invalid Jump Destination') if (not computation.code.is_valid_opcode(jump_dest)): raise InvalidInstruction('Jump resulted in invalid instruction')
class RegionRendererTest(TestCase): def prepare(self): p = Page.objects.create(page_type='standard') RichText.objects.create(parent=p, region='main', ordering=10, text='<p>Hello</p>') HTML.objects.create(parent=p, region='main', ordering=20, html='<br>') HTML.objects.create(parent=p, region='main', ordering=30, html='<hr>') RichText.objects.create(parent=p, region='main', ordering=40, text='<p>World</p>') return p def test_basic_rendering(self): p = self.prepare() renderer = RegionRenderer() renderer.register(RichText, template_renderer('renderer/richtext.html')) renderer.register(HTML, template_renderer('renderer/html.html')) regions = renderer.regions_from_item(p, timeout=1) self.assertHTMLEqual(regions.render('main', Context({'outer': 'x'})), '<div class="rt"><p>Hello</p></div> <br>x <hr>x <div class="rt"><p>World</p></div>') RichText.objects.create(parent=p, region='main', ordering=40, text='<p>World</p>') regions = renderer.regions_from_item(p, timeout=1) self.assertHTMLEqual(regions.render('main', Context({'outer': 'x'})), '<div class="rt"><p>Hello</p></div> <br>x <hr>x <div class="rt"><p>World</p></div>') def test_unconfigured_exception(self): renderer = RegionRenderer() contents = contents_for_item(self.prepare(), plugins=[RichText, HTML]) regions = renderer.regions_from_contents(contents) with self.assertRaises(PluginNotRegisteredError): regions.render('main', None) with self.assertRaises(PluginNotRegisteredError): regions.render('does_not_exist', None) def test_subregions(self): class HTMLSubregionRenderer(RegionRenderer): def handle_html(self, plugins, context): return format_html('<div class="html">{}</div>', mark_safe(''.join((self.render_plugin(plugin, context) for plugin in self.takewhile_subregion(plugins, subregion='html'))))) renderer = HTMLSubregionRenderer() renderer.register(RichText, (lambda plugin, context: mark_safe(plugin.text))) renderer.register(HTML, (lambda plugin, context: mark_safe(plugin.html)), subregion='html') regions = renderer.regions_from_item(self.prepare()) self.assertHTMLEqual(regions.render('main', None), '<p>Hello</p> <div class="html"><br><hr></div> <p>World</p>') def test_marks(self): class MarksRenderer(RegionRenderer): def render_region(self, *, region, contents, context): def _render(): plugins = deque(contents[region.key]) while plugins: if (items := list(self.takewhile_mark(plugins, mark='html'))): (yield format_html('<div class="html">{}</div>', mark_safe(''.join((self.render_plugin(plugin, context) for plugin in items))))) if (items := list(self.takewhile_mark(plugins, mark='stuff'))): (yield format_html('<div class="stuff">{}</div>', mark_safe(''.join((self.render_plugin(plugin, context) for plugin in items))))) return mark_safe(''.join((output for output in _render()))) renderer = MarksRenderer() renderer.register(RichText, (lambda plugin, context: mark_safe(plugin.text)), marks={'stuff'}) renderer.register(HTML, (lambda plugin, context: mark_safe(plugin.html)), marks=(lambda plugin: {'html'})) regions = renderer.regions_from_item(self.prepare()) self.assertHTMLEqual(regions.render('main', None), '<div class="stuff"><p>Hello</p></div><div class="html"><br><hr></div><div class="stuff"><p>World</p></div>') def test_invalid_renderer(self): r = RegionRenderer() with self.assertRaisesRegex(ImproperlyConfigured, 'has less than the two required arguments'): r.register(1, (lambda plugin: ''))
.flaky(reruns=MAX_FLAKY_RERUNS) def test_run_with_install_deps(): runner = CliRunner() agent_name = 'myagent' cwd = os.getcwd() t = tempfile.mkdtemp() packages_src = os.path.join(ROOT_DIR, 'packages') packages_dst = os.path.join(t, 'packages') shutil.copytree(packages_src, packages_dst) os.chdir(t) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', agent_name]) assert (result.exit_code == 0) os.chdir(Path(t, agent_name)) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'generate-key', FetchAICrypto.identifier]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'add-key', FetchAICrypto.identifier]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'add', '--local', 'connection', str(HTTP_ClIENT_PUBLIC_ID)]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'config', 'set', 'agent.default_connection', str(HTTP_ClIENT_PUBLIC_ID)]) assert (result.exit_code == 0) try: process = PexpectWrapper([sys.executable, '-m', 'aea.cli', '-v', 'DEBUG', 'run', '--install-deps', '--connections', str(HTTP_ClIENT_PUBLIC_ID)], env=os.environ, maxread=10000, encoding='utf-8', logfile=sys.stdout) process.expect_all(['Start processing messages...'], timeout=30) time.sleep(1.0) process.control_c() process.wait_to_complete(10) assert (process.returncode == 0) finally: process.terminate() process.wait_to_complete(10) os.chdir(cwd) try: shutil.rmtree(t) except (OSError, IOError): pass
def test_conversion_of_ai_standard_to_red_shift_material_refraction_properties(create_pymel, setup_scene): pm = create_pymel (ai_standard, ai_standard_sg) = pm.createSurfaceShader('aiStandard') refraction_color = (1, 0.5, 0) refraction_weight = 0.532 refraction_ior = 1.434 refraction_abbe = 29.942196 refraction_roughness = 0.8 refraction_transmittance = (0.57, 0.34, 0.54) opacity_color = (0.5, 0.87, 0.12) ai_standard.KtColor.set(refraction_color) ai_standard.Kt.set(refraction_weight) ai_standard.FresnelUseIOR.set(0) ai_standard.IOR.set(refraction_ior) ai_standard.dispersionAbbe.set(refraction_abbe) ai_standard.refractionRoughness.set(refraction_roughness) ai_standard.transmittance.set(refraction_transmittance) ai_standard.opacity.set(opacity_color) conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) assert (rs_material.refr_color.get()[0] == pytest.approx(refraction_color[0], abs=0.001)) assert (rs_material.refr_color.get()[1] == pytest.approx(refraction_color[1], abs=0.001)) assert (rs_material.refr_color.get()[2] == pytest.approx(refraction_color[2], abs=0.001)) assert (rs_material.refr_weight.get() == pytest.approx(refraction_weight, abs=0.001)) assert (rs_material.refr_ior.get() == pytest.approx(refraction_ior, abs=0.001)) assert (rs_material.refr_use_base_IOR.get() == 0) assert (rs_material.refr_abbe.get() == pytest.approx(refraction_abbe, abs=0.001)) assert (rs_material.refr_roughness.get() == pytest.approx(refraction_roughness, abs=0.001)) assert (rs_material.refr_transmittance.get()[0] == pytest.approx(refraction_transmittance[0], abs=0.001)) assert (rs_material.refr_transmittance.get()[1] == pytest.approx(refraction_transmittance[1], abs=0.001)) assert (rs_material.refr_transmittance.get()[2] == pytest.approx(refraction_transmittance[2], abs=0.001)) assert (rs_material.opacity_color.get()[0] == pytest.approx(opacity_color[0], abs=0.001)) assert (rs_material.opacity_color.get()[1] == pytest.approx(opacity_color[1], abs=0.001)) assert (rs_material.opacity_color.get()[2] == pytest.approx(opacity_color[2], abs=0.001))
class Arguments(argparse.ArgumentParser): def __init__(self): argparse.ArgumentParser.__init__(self, description=('Gnofract 4D %s' % VERSION), epilog='To generate an image non-interactively, use:\n gnofract4d -s myimage.png -q myfractal.fct', formatter_class=argparse.RawDescriptionHelpFormatter) self.add_argument('paramfile', nargs='?', metavar='PARAMFILE', help='Use PARAMFILE as a parameter file') self.add_argument('-v', '--version', action='version', version=('Gnofract 4D %s' % VERSION), help='Show version info') self.add_argument('-q', '--quit', action='store_true', dest='quit_when_done', help='Exit as soon as the image is finished') self.add_argument('-X', '--explorer', action='store_true', dest='explore', help='Start in explorer mode') fractal = self.add_argument_group('Fractal Settings') output = self.add_argument_group('Output Settings') position = self.add_argument_group('Position Arguments') obscure = self.add_argument_group('Obscure Settings') debug = self.add_argument_group('Debugging and Profiling Settings (most only work with --nogui)') fractal.add_argument('-P', '--path', metavar='PATH', nargs='*', dest='extra_paths', default=[], help='Add PATH to the formula search path') fractal.add_argument('-f', '--formula', type=formula_arg, metavar='F#FUNC', default=Formula(None, None, None), help="Use formula 'FUNC' from file F") fractal.add_argument('--inner', type=inner_arg, metavar='F#FUNC', default=Formula(None, None, None), help="Use coloring algorithm 'FUNC' from file F") fractal.add_argument('--outer', type=outer_arg, metavar='F#FUNC', default=Formula(None, None, None), help="Use coloring algorithm 'FUNC' from file F") fractal.add_argument('--transforms', type=transforms_arg, metavar='F#FUNC1,F2#FUNC2', default=[], help="Apply transforms 'FUNC1' and 'FUNC2'") fractal.add_argument('-m', '--maxiter', type=int, metavar='N', default=(- 1), help='Use N as maximum number of iterations') fractal.add_argument('--map', metavar='FILE', help='Load map file FILE') output.add_argument('-s', '--save', metavar='IMAGEFILE', dest='save_filename', help='Save image to IMAGEFILE after calculation') output.add_argument('-i', '--width', type=int, metavar='N', help='Make image N pixels wide') output.add_argument('-j', '--height', type=int, metavar='N', help='Make image N pixels tall') aa_modes = [i.name for i in fract4d_fractal.AntialiasModes] output.add_argument('--antialias', metavar='MODE', choices=aa_modes, help=f"Antialiasing MODE (one of {'|'.join(aa_modes)})") for p in POSITION_ARGUMENTS: position.add_argument(('--%s' % p), type=int, metavar='N') obscure.add_argument('--nogui', action='store_true', help="Run with no UI (doesn't require X or GTK+)") obscure.add_argument('--threads', type=int, metavar='N', help='Use N threads for calculations') obscure.add_argument('--nopreview', action='store_false', dest='preview', help='Use the UI, but no preview window') debug.add_argument('--trace', action='store_true', help='Produce voluminous tracing output') debug.add_argument('--tracez', action='store_true', help='Print values of #z as loop runs') debug.add_argument('--buildonly', metavar='FILE', help='Generate code to FILE and quit') debug.add_argument('--usebuilt', metavar='FILE', help='Instead of using compiler, load FILE (from buildonly)') debug.add_argument('--singlepoint', action='store_true', help='Generate only a single point many times over (for benchmarking)') debug.add_argument('--cflags', dest='flags', help='Pass these FLAGS to C compiler (overrides prefs)') def parse_args(self, args=sys.argv): opts = argparse.ArgumentParser.parse_args(self, args) if opts.formula.path: opts.extra_paths.append(opts.formula.path) if opts.inner.path: opts.extra_paths.append(opts.inner.path) if opts.outer.path: opts.extra_paths.append(opts.outer.path) for t in opts.transforms: if t.path: opts.extra_paths.append(t.path) opts.paramchanges = {} for a in POSITION_ARGUMENTS: val = getattr(opts, a) if (val is not None): pnum = getattr(fract4d_fractal.T, a.upper()) opts.paramchanges[pnum] = val return opts
def test_not_devcontainer(cookies, tmp_path): with run_within_dir(tmp_path): result = cookies.bake(extra_context={'devcontainer': 'n'}) assert (result.exit_code == 0) assert (not os.path.isfile(f'{result.project_path}/.devcontainer/devcontainer.json')) assert (not os.path.isfile(f'{result.project_path}/.devcontainer/postCreateCommand.sh'))
.router .asyncio class TestForgotPassword(): async def test_empty_body(self, test_app_client: user_manager: UserManagerMock): response = (await test_app_client.post('/forgot-password', json={})) assert (response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY) assert (user_manager.forgot_password.called is False) async def test_not_existing_user(self, test_app_client: user_manager: UserManagerMock): user_manager.get_by_email.side_effect = UserNotExists() json = {'email': ''} response = (await test_app_client.post('/forgot-password', json=json)) assert (response.status_code == status.HTTP_202_ACCEPTED) assert (user_manager.forgot_password.called is False) async def test_inactive_user(self, test_app_client: user_manager: UserManagerMock): user_manager.forgot_password.side_effect = UserInactive() json = {'email': ''} response = (await test_app_client.post('/forgot-password', json=json)) assert (response.status_code == status.HTTP_202_ACCEPTED) async def test_existing_user(self, async_method_mocker: AsyncMethodMocker, test_app_client: user_manager: UserManagerMock): async_method_mocker(user_manager, 'forgot_password', return_value=None) json = {'email': 'king.'} response = (await test_app_client.post('/forgot-password', json=json)) assert (response.status_code == status.HTTP_202_ACCEPTED)
def test_error_handler(flask_apm_client): client = flask_apm_client.app.test_client() response = client.get('/an-error/') assert (response.status_code == 500) assert (len(flask_apm_client.client.events[ERROR]) == 1) event = flask_apm_client.client.events[ERROR][0] assert ('exception' in event) exc = event['exception'] assert (exc['type'] == 'ValueError') assert (exc['message'] == 'ValueError: hello world') assert (exc['handled'] is False) assert (event['culprit'] == 'tests.contrib.flask.fixtures.an_error') transaction = flask_apm_client.client.events[TRANSACTION][0] assert (transaction['result'] == 'HTTP 5xx') assert (transaction['name'] == 'GET /an-error/')
def prepare_irfft_output(arr): res = Type(dtypes.real_for(arr.dtype), (arr.shape[:(- 1)] + ((arr.shape[(- 1)] * 2),))) return Transformation([Parameter('output', Annotation(res, 'o')), Parameter('input', Annotation(arr, 'i'))], '\n <%\n batch_idxs = " ".join((idx + ", ") for idx in idxs[:-1])\n %>\n ${input.ctype} x = ${input.load_same};\n ${output.store_idx}(${batch_idxs} ${idxs[-1]} * 2, x.x);\n ${output.store_idx}(${batch_idxs} ${idxs[-1]} * 2 + 1, x.y);\n ', connectors=['output'])
def test_communication(): with LocalNode() as node: multiplexer1 = Multiplexer([_make_local_connection('multiplexer1', 'multiplexer1_public_key', node)]) multiplexer2 = Multiplexer([_make_local_connection('multiplexer2', 'multiplexer1_public_key', node)]) multiplexer1.connect() multiplexer2.connect() msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello') envelope = Envelope(to='multiplexer2', sender='multiplexer1', message=msg) multiplexer1.put(envelope) msg = FipaMessage(performative=FipaMessage.Performative.CFP, dialogue_reference=(str(0), ''), message_id=1, target=0, query=Query([Constraint('something', ConstraintType('>', 1))])) envelope = Envelope(to='multiplexer2', sender='multiplexer1', message=msg) multiplexer1.put(envelope) msg = FipaMessage(performative=FipaMessage.Performative.PROPOSE, dialogue_reference=(str(0), ''), message_id=2, target=1, proposal=Description({})) envelope = Envelope(to='multiplexer2', sender='multiplexer1', message=msg) multiplexer1.put(envelope) msg = FipaMessage(performative=FipaMessage.Performative.ACCEPT, dialogue_reference=(str(0), ''), message_id=1, target=0) envelope = Envelope(to='multiplexer2', sender='multiplexer1', message=msg) multiplexer1.put(envelope) msg = FipaMessage(performative=FipaMessage.Performative.DECLINE, dialogue_reference=(str(0), ''), message_id=1, target=0) envelope = Envelope(to='multiplexer2', sender='multiplexer1', message=msg) multiplexer1.put(envelope) envelope = multiplexer2.get(block=True, timeout=1.0) msg = envelope.message assert (envelope.protocol_specification_id == DefaultMessage.protocol_specification_id) assert (msg.content == b'hello') envelope = multiplexer2.get(block=True, timeout=1.0) msg = envelope.message assert (envelope.protocol_specification_id == FipaMessage.protocol_specification_id) assert (msg.performative == FipaMessage.Performative.CFP) envelope = multiplexer2.get(block=True, timeout=1.0) msg = envelope.message assert (envelope.protocol_specification_id == FipaMessage.protocol_specification_id) assert (msg.performative == FipaMessage.Performative.PROPOSE) envelope = multiplexer2.get(block=True, timeout=1.0) msg = envelope.message assert (envelope.protocol_specification_id == FipaMessage.protocol_specification_id) assert (msg.performative == FipaMessage.Performative.ACCEPT) envelope = multiplexer2.get(block=True, timeout=1.0) msg = envelope.message assert (envelope.protocol_specification_id == FipaMessage.protocol_specification_id) assert (msg.performative == FipaMessage.Performative.DECLINE) multiplexer1.disconnect() multiplexer2.disconnect()
def common_gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict, src_template, problem_args_template, problem_args_template_cutlass_3x=None, bias_ptr_arg=None, extra_code=''): output_addr_calculator = common.DEFAULT_OUTPUT_ADDR_CALCULATOR.render(stride_dim='*b_dim0') return common.gen_profiler(func_attrs=func_attrs, workdir=workdir, profiler_filename=profiler_filename, dim_info_dict=dim_info_dict, src_template=src_template, problem_args_template=problem_args_template, problem_args_template_cutlass_3x=problem_args_template_cutlass_3x, args_parser_template=ARGS_PARSER_TEMPLATE, support_split_k=True, output_addr_calculator=output_addr_calculator, bias_ptr_arg=bias_ptr_arg, extra_code=extra_code)
def cbFun(errorIndication, errorStatus, errorIndex, varBindTable, **context): if errorIndication: print(errorIndication) elif errorStatus: print(('%s at %s' % (errorStatus.prettyPrint(), ((errorIndex and varBinds[(int(errorIndex) - 1)][0]) or '?')))) else: for varBindRow in varBindTable: for varBind in varBindRow: print(' = '.join([x.prettyPrint() for x in varBind])) return context.get('nextVarBinds')
def vocoder(model, mel): if use_cuda: model = model.cuda() model.eval() sequence = np.array(mel) sequence = Variable(torch.from_numpy(sequence)).unsqueeze(0) if use_cuda: sequence = sequence.cuda() waveform = model.forward_eval_sampling1(sequence) return waveform
def test_basics(): def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str): return ((a + 2), 'world') def t2(a: str, b: str) -> str: return (b + a) def my_wf(a: int, b: str) -> (int, str): (x, y) = t1(a=a) d = t2(a=y, b=b) return (x, d) wf_spec = get_serializable(OrderedDict(), serialization_settings, my_wf) assert (len(wf_spec.template.interface.inputs) == 2) assert (len(wf_spec.template.interface.outputs) == 2) assert (len(wf_spec.template.nodes) == 2) assert (wf_spec.template.id.resource_type == identifier_models.ResourceType.WORKFLOW) ssettings = serialization_settings.new_builder().with_fast_serialization_settings(FastSerializationSettings(enabled=True)).build() task_spec = get_serializable(OrderedDict(), ssettings, t1) assert ('pyflyte-execute' in task_spec.template.container.args) lp = LaunchPlan.create('testlp', my_wf) lp_model = get_serializable(OrderedDict(), serialization_settings, lp) assert (lp_model.id.name == 'testlp')
('model') def check_out_of_space_uvs(progress_controller=None): if (progress_controller is None): progress_controller = ProgressControllerBase() v = staging.get('version') if (v and (Representation.repr_separator in v.take_name)): progress_controller.complete() return all_meshes = pm.ls(type='mesh') mesh_count = len(all_meshes) progress_controller.maximum = mesh_count nodes_with_out_of_space_uvs = [] try: for node in all_meshes: (u, v) = node.getUVs() u = sorted(u) if ((u[0] < 0.0) or (u[(- 1)] > 10.0) or (v[0] < 0.0)): nodes_with_out_of_space_uvs.append(node) progress_controller.increment() except (IndexError, RuntimeError) as e: print(('node: %s' % node)) raise RuntimeError(('%s \n node: %s' % (e, node))) progress_controller.complete() if len(nodes_with_out_of_space_uvs): tra_nodes = list(map((lambda x: x.getParent()), nodes_with_out_of_space_uvs)) pm.select(tra_nodes) raise RuntimeError(('There are nodes which have a UV value bigger than <b>10</b>:\n <br><br>%s' % '<br>'.join(list(map((lambda x: x.name()), tra_nodes[:MAX_NODE_DISPLAY])))))
def submit_distances(argv, tdb, distances): system = argv.origin cmdr = argv.cmdr mode = ('TEST' if argv.test else 'Live') print() print('System:', system) print('Database:', mode) print('Distances:') for ref in distances: print(' {}: {:.02f} ly'.format(ref['name'], ref['dist'])) print() ok = input('Does this look correct (y/n)? ') if (ok != 'y'): print('Stopped') return print() print('Submitting {} {}'.format(mode, system)) sub = StarSubmission(star=system, commander=cmdr, refs=distances, test=argv.test) resp = sub.submit() result = StarSubmissionResult(star=system, response=resp) print(str(result)) if result.valid: trilats = set() for sysName in result.systems.keys(): (code, coord) = result.systems[sysName] sysName = sysName.upper() if isinstance(coord, (list, tuple)): (x, y, z) = coord system = tdb.systemByName.get(sysName, None) if system: tdb.updateLocalSystem(system, sysName, x, y, z) else: tdb.addLocalSystem(sysName, x, y, z) if result.recheck: return list(result.recheck.keys()) return None
class Strategy(GenericStrategy): def __init__(self, **kwargs) -> None: self._db_engine = db.create_engine('sqlite:///genericdb.db') self._tbl = self.create_database_and_table() self.insert_data() super().__init__(**kwargs) def collect_from_data_source(self) -> Dict[(str, str)]: connection = self._db_engine.connect() query = db.select([self._tbl]) result_proxy = connection.execute(query) data_points = result_proxy.fetchall() return {'data': json.dumps(list(map(tuple, data_points)))} def create_database_and_table(self): metadata = db.MetaData() tbl = db.Table('data', metadata, db.Column('timestamp', db.Integer()), db.Column('temprature', db.String(255), nullable=False)) metadata.create_all(self._db_engine) return tbl def insert_data(self): connection = self._db_engine.connect() for _ in range(10): query = db.insert(self._tbl).values(timestamp=time.time(), temprature=str(random.randrange(10, 25))) connection.execute(query)
('/', methods=['POST', 'GET']) def infer_route(): global reqs, reload_lock, prj, classes, num_outputs try: xin = get_input(request) if (xin is None): return ("missing 'x' parameter", 400) with reload_lock: x = prj.logic.prepare_input(xin) y = prj.model.predict(np.array(x))[0].tolist() num_y = len(y) resp = {} if (num_y != num_outputs): return (('expected %d output classes, got inference with %d results' % (num_outputs, num_y)), 500) resp = {classes[i]: y[i] for i in range(num_y)} reqs += 1 if (reqs >= 25): prj.reload_model() reqs = 0 return (jsonify(resp), 200) except Exception as e: log.exception('error while running inference') return (str(e), 400)
def main(): descr = 'A script which processes Ragout\'s debug output and draws some fancy breakpoint graph pictures. It requires a contigs alignment on "true" reference in nucmer coords format. Also, Ragout should be run with --debug key to provide necessary output. Please note, that one should point to debug dir with a chosen synteny block size (for example ragout_debug/5000). This script scipt draws only non-trivial breakpoint graph components.' parser = argparse.ArgumentParser(description=descr) parser.add_argument('nucmer_coords', metavar='nucmer_coords', help="path to contigs alignment on 'true' reference") parser.add_argument('debug_dir', metavar='debug_dir', help='path to debug dir with chosen synteny block size') parser.add_argument('--circular', action='store_const', metavar='circular', dest='circular', default=False, const=True, help='indicates that genomes are circular (like bacterial)') parser.add_argument('--predicted', action='store_const', metavar='predicted', dest='predicted', default=False, const=True, help='draw only graph components which have predicted edges') args = parser.parse_args() do_job(args.nucmer_coords, args.debug_dir, args.circular, args.predicted)
class PollForDecisionTaskResponse(): task_token: bytes = None workflow_execution: WorkflowExecution = None workflow_type: WorkflowType = None previous_started_event_id: int = None started_event_id: int = None attempt: int = None backlog_count_hint: int = None history: History = None next_page_token: bytes = None query: WorkflowQuery = None workflow_execution_task_list: TaskList = None scheduled_timestamp: int = None started_timestamp: int = None
def as_numpy_func(ds, options=None): if ((ds is None) or callable(ds)): return ds def _options(new): o = {k: v for (k, v) in ds.get_options().items()} if new: o.update(new) return o options = _options(options) to_numpy_kwargs = options.get('to_numpy_kwargs', {}) def take_i(i): return ds[i].to_numpy(**to_numpy_kwargs) if (('offset' in options) and options['offset']): offset = options['offset'] def take_i(i): return ds[(i + offset)].to_numpy(**to_numpy_kwargs) func = take_i if (('constant' in options) and options['constant']): def first(func): def wrap(i): return func(0) return wrap func = first(func) if ('normalize' in options): (a, b) = normalize_a_b(options['normalize'], ds) def normalize(func): def wrap(i): return ((a * func(i)) + b) return wrap func = normalize(func) return func
class OptionSeriesSolidgaugeOnpointConnectoroptions(Options): def dashstyle(self): return self._config_get(None) def dashstyle(self, text: str): self._config(text, js_type=False) def stroke(self): return self._config_get(None) def stroke(self, text: str): self._config(text, js_type=False) def width(self): return self._config_get(1) def width(self, num: float): self._config(num, js_type=False)
class RequirementType(Enum): master_requirement = 1 initial_requirement = 2 design_decision = 3 requirement = 4 def as_string(self): if (self.value == 1): return 'requirement' if (self.value == 2): return 'requirement' if (self.value == 3): return 'design decision' if (self.value == 4): return 'requirement' assert False
class OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def set_subject_field(doc): from frappe.utils.formatters import format_value meta = frappe.get_meta(doc.doctype) subject = '' patient_history_fields = get_patient_history_fields(doc) for entry in patient_history_fields: fieldname = entry.get('fieldname') if ((entry.get('fieldtype') == 'Table') and doc.get(fieldname)): formatted_value = get_formatted_value_for_table_field(doc.get(fieldname), meta.get_field(fieldname)) subject += (((frappe.bold((_(entry.get('label')) + ':')) + '<br>') + cstr(formatted_value)) + '<br>') elif doc.get(fieldname): formatted_value = format_value(doc.get(fieldname), meta.get_field(fieldname), doc) subject += ((frappe.bold((_(entry.get('label')) + ':')) + cstr(formatted_value)) + '<br>') return subject
class OptionPlotoptionsSankeySonificationTracksMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TracingOptions(ctypes.Union): _anonymous_ = ('bit',) _fields_ = [('bit', TracingOptions_bits), ('asByte', ctypes.c_uint8)] def __init__(self, **kwargs) -> None: super(TracingOptions, self).__init__() for (k, v) in kwargs.items(): setattr(self, k, v) def __eq__(self, other): return (self.asByte == other.asByte)
def add_eggs_on_path(working_set, path, on_error=None): environment = pkg_resources.Environment(path) (distributions, errors) = working_set.find_plugins(environment) if (len(errors) > 0): if on_error: on_error(errors) else: raise RuntimeError(('Cannot find eggs %s' % errors)) for distribution in distributions: working_set.add(distribution)
class _RegexMatcher(_Matcher): def __init__(self, tag, content, lower): _Matcher.__init__(self, tag, content, lower) self._re = re.compile(content) def _matches(self, value): if (not value): return False try: return (self._re.search(value) is not None) except TypeError: return False
def push_to_hub(self, *, repo_path_or_name: Optional[str]=None, repo_url: Optional[str]=None, commit_message: Optional[str]='Add model', organization: Optional[str]=None, private: bool=False, api_endpoint: Optional[str]=None, use_auth_token: Optional[Union[(bool, str)]]=None, git_user: Optional[str]=None, git_email: Optional[str]=None, config: Optional[dict]=None, skip_lfs_files: bool=False, repo_id: Optional[str]=None, token: Optional[str]=None, branch: Optional[str]=None, create_pr: Optional[bool]=None, allow_patterns: Optional[Union[(List[str], str)]]=None, ignore_patterns: Optional[Union[(List[str], str)]]=None) -> str: if (repo_id is not None): (token, _) = hf_api._validate_or_retrieve_token(token) api = HfApi(endpoint=api_endpoint) api.create_repo(repo_id=repo_id, repo_type='model', token=token, private=private, exist_ok=True) with tempfile.TemporaryDirectory() as tmp: saved_path = (Path(tmp) / repo_id) self.save_pretrained(saved_path, config=config) export_hf_model_card(export_dir=saved_path, labels=self.labels, backbone_config=self.config['backbone'], neck_config=self.config['neck'], preprocessor_config=self.config['preprocessor'], head_config=self.config['head'], total_model_params=self.num_total_params, total_trainable_model_params=self.num_trainable_params) return api.upload_folder(repo_id=repo_id, repo_type='model', token=token, folder_path=saved_path, commit_message=commit_message, revision=branch, create_pr=create_pr, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns) if ((repo_path_or_name is None) and (repo_url is None)): raise ValueError('You need to specify a `repo_path_or_name` or a `repo_url`.') if ((use_auth_token is None) and (repo_url is None)): token = HfFolder.get_token() if (token is None): raise ValueError('You must login to the Hugging Face hub on this computer by typing `huggingface-cli login` and entering your credentials to use `use_auth_token=True`. Alternatively, you can pass your own token as the `use_auth_token` argument.') elif isinstance(use_auth_token, str): token = use_auth_token else: token = None if (repo_path_or_name is None): repo_path_or_name = repo_url.split('/')[(- 1)] if ((repo_url is None) and (not os.path.exists(repo_path_or_name))): repo_id = Path(repo_path_or_name).name if organization: repo_id = f'{organization}/{repo_id}' repo_url = HfApi(endpoint=api_endpoint).create_repo(repo_id=repo_id, token=token, private=private, repo_type=None, exist_ok=True) repo = Repository(repo_path_or_name, clone_from=repo_url, use_auth_token=use_auth_token, git_user=git_user, git_email=git_email, skip_lfs_files=skip_lfs_files) repo.git_pull(rebase=True) self.save_pretrained(repo_path_or_name, config=config) export_hf_model_card(export_dir=saved_path, labels=self.labels, backbone_config=self.config['backbone'], neck_config=self.config['neck'], preprocessor_config=self.config['preprocessor'], head_config=self.config['head'], total_model_params=self.num_total_params, total_trainable_model_params=self.num_trainable_params) repo.git_add(auto_lfs_track=True) repo.git_commit(commit_message) return repo.git_push()
def get_pdf_notes_ordered_by_size(order: str) -> List[SiacNote]: conn = _get_connection() res = conn.execute(f"select notes.* from notes join read on notes.id == read.nid where lower(notes.source) like '%.pdf' group by notes.id order by max(read.pagestotal) {order}").fetchall() conn.close() return _to_notes(res)
def _hashimoto(header_hash: bytes, nonce: bytes, dataset_size: int, fetch_dataset_item: Callable[([int], Tuple[(int, ...)])]) -> Dict[(str, bytes)]: mix_hashes = (MIX_BYTES // HASH_BYTES) nonce_le = bytes(reversed(nonce)) seed_hash = keccak_512((header_hash + nonce_le)) seed_head = from_le_bytes(seed_hash[:4]) rows = (dataset_size // 128) mix = (le_bytes_to_uint32_sequence(seed_hash) * mix_hashes) for i in range(ACCESSES): new_data: Tuple[(int, ...)] = () parent = (fnv((i ^ seed_head), mix[(i % len(mix))]) % rows) for j in range((MIX_BYTES // HASH_BYTES)): new_data += fetch_dataset_item(((2 * parent) + j)) mix = fnv_hash(mix, new_data) compressed_mix = [] for i in range(0, len(mix), 4): compressed_mix.append(fnv(fnv(fnv(mix[i], mix[(i + 1)]), mix[(i + 2)]), mix[(i + 3)])) mix_digest = le_uint32_sequence_to_bytes(compressed_mix) result = keccak_256((seed_hash + mix_digest)) return {'mix_digest': mix_digest, 'result': result}
def extractTongtongdaydreamsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_load_images(): cfg = get_config_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'configs/images.config')) imgs = Images.get_specified_images(cfg) assert (imgs == {'abc': 'docker.io/abc', 'xyz': 'docker.io/xyz:latest'}) cfg = get_config_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'configs/sample.yaml')) imgs = Images.get_specified_images(cfg) assert (imgs == {'abc': 'docker.io/abc', 'xyz': 'docker.io/xyz:latest'})
def test_celery_config_override() -> None: config = get_config() config.celery['event_queue_prefix'] = 'overridden_fides_worker' config.celery['task_default_queue'] = 'overridden_fides' celery_app = _create_celery(config=config) assert (celery_app.conf['event_queue_prefix'] == 'overridden_fides_worker') assert (celery_app.conf['task_default_queue'] == 'overridden_fides')
.skipif((MID_MEMORY > memory), reason='Travis has too less memory to run it.') .parametrize('matrix', [matrix]) .parametrize('outFileName', [outfile_aggregate_plots]) .parametrize('BED', [BED]) .parametrize('mode', ['intra-chr']) .parametrize('ran', ['50000:900000']) .parametrize('BED2', [BED2]) .parametrize('numberOfBins', [30]) .parametrize('transform', sorted(['total-counts', 'z-score', 'obs/exp', 'none'])) .parametrize('operationType', sorted(['sum', 'mean', 'median'])) .parametrize('outFilePrefixMatrix', ['outFilePrefix']) .parametrize('outFileContactPairs', ['outFileContactPairs']) .parametrize('diagnosticHeatmapFile', [diagnosticHeatmapFile]) .parametrize('kmeans', [4]) .parametrize('hclust', [4]) .parametrize('howToCluster', sorted(['full', 'center', 'diagonal'])) .parametrize('chromosomes', ['X']) .parametrize('colorMap', ['RdYlBu_r']) .parametrize('plotType', sorted(['2d', '3d'])) .parametrize('vMin', [0.01]) .parametrize('vMax', [1.0]) def test_aggregate_contacts_three(capsys, matrix, outFileName, BED, mode, ran, BED2, numberOfBins, transform, operationType, outFilePrefixMatrix, outFileContactPairs, diagnosticHeatmapFile, kmeans, hclust, howToCluster, chromosomes, colorMap, plotType, vMin, vMax): args = '--matrix {} --BED {} --outFileName {out_agg} --numberOfBins 30 --mode {} --range 50000:900000 --hclust 4 --diagnosticHeatmapFile {out_heat} --howToCluster diagonal --disable_bbox_tight --BED2 {}'.format(matrix, BED, mode, BED2, out_agg=outFileName.name, out_heat=diagnosticHeatmapFile.name) compute(hicexplorer.hicAggregateContacts.main, args.split(), 5) os.remove(outFileName.name)
class HistoryEvent(betterproto.Message): event_id: int = betterproto.int64_field(1) event_time: datetime = betterproto.message_field(2) event_type: v1enums.EventType = betterproto.enum_field(3) version: int = betterproto.int64_field(4) task_id: int = betterproto.int64_field(5) workflow_execution_started_event_attributes: 'WorkflowExecutionStartedEventAttributes' = betterproto.message_field(6, group='attributes') workflow_execution_completed_event_attributes: 'WorkflowExecutionCompletedEventAttributes' = betterproto.message_field(7, group='attributes') workflow_execution_failed_event_attributes: 'WorkflowExecutionFailedEventAttributes' = betterproto.message_field(8, group='attributes') workflow_execution_timed_out_event_attributes: 'WorkflowExecutionTimedOutEventAttributes' = betterproto.message_field(9, group='attributes') workflow_task_scheduled_event_attributes: 'WorkflowTaskScheduledEventAttributes' = betterproto.message_field(10, group='attributes') workflow_task_started_event_attributes: 'WorkflowTaskStartedEventAttributes' = betterproto.message_field(11, group='attributes') workflow_task_completed_event_attributes: 'WorkflowTaskCompletedEventAttributes' = betterproto.message_field(12, group='attributes') workflow_task_timed_out_event_attributes: 'WorkflowTaskTimedOutEventAttributes' = betterproto.message_field(13, group='attributes') workflow_task_failed_event_attributes: 'WorkflowTaskFailedEventAttributes' = betterproto.message_field(14, group='attributes') activity_task_scheduled_event_attributes: 'ActivityTaskScheduledEventAttributes' = betterproto.message_field(15, group='attributes') activity_task_started_event_attributes: 'ActivityTaskStartedEventAttributes' = betterproto.message_field(16, group='attributes') activity_task_completed_event_attributes: 'ActivityTaskCompletedEventAttributes' = betterproto.message_field(17, group='attributes') activity_task_failed_event_attributes: 'ActivityTaskFailedEventAttributes' = betterproto.message_field(18, group='attributes') activity_task_timed_out_event_attributes: 'ActivityTaskTimedOutEventAttributes' = betterproto.message_field(19, group='attributes') timer_started_event_attributes: 'TimerStartedEventAttributes' = betterproto.message_field(20, group='attributes') timer_fired_event_attributes: 'TimerFiredEventAttributes' = betterproto.message_field(21, group='attributes') activity_task_cancel_requested_event_attributes: 'ActivityTaskCancelRequestedEventAttributes' = betterproto.message_field(22, group='attributes') activity_task_canceled_event_attributes: 'ActivityTaskCanceledEventAttributes' = betterproto.message_field(23, group='attributes') timer_canceled_event_attributes: 'TimerCanceledEventAttributes' = betterproto.message_field(24, group='attributes') marker_recorded_event_attributes: 'MarkerRecordedEventAttributes' = betterproto.message_field(25, group='attributes') workflow_execution_signaled_event_attributes: 'WorkflowExecutionSignaledEventAttributes' = betterproto.message_field(26, group='attributes') workflow_execution_terminated_event_attributes: 'WorkflowExecutionTerminatedEventAttributes' = betterproto.message_field(27, group='attributes') workflow_execution_cancel_requested_event_attributes: 'WorkflowExecutionCancelRequestedEventAttributes' = betterproto.message_field(28, group='attributes') workflow_execution_canceled_event_attributes: 'WorkflowExecutionCanceledEventAttributes' = betterproto.message_field(29, group='attributes') request_cancel_external_workflow_execution_initiated_event_attributes: 'RequestCancelExternalWorkflowExecutionInitiatedEventAttributes' = betterproto.message_field(30, group='attributes') request_cancel_external_workflow_execution_failed_event_attributes: 'RequestCancelExternalWorkflowExecutionFailedEventAttributes' = betterproto.message_field(31, group='attributes') external_workflow_execution_cancel_requested_event_attributes: 'ExternalWorkflowExecutionCancelRequestedEventAttributes' = betterproto.message_field(32, group='attributes') workflow_execution_continued_as_new_event_attributes: 'WorkflowExecutionContinuedAsNewEventAttributes' = betterproto.message_field(33, group='attributes') start_child_workflow_execution_initiated_event_attributes: 'StartChildWorkflowExecutionInitiatedEventAttributes' = betterproto.message_field(34, group='attributes') start_child_workflow_execution_failed_event_attributes: 'StartChildWorkflowExecutionFailedEventAttributes' = betterproto.message_field(35, group='attributes') child_workflow_execution_started_event_attributes: 'ChildWorkflowExecutionStartedEventAttributes' = betterproto.message_field(36, group='attributes') child_workflow_execution_completed_event_attributes: 'ChildWorkflowExecutionCompletedEventAttributes' = betterproto.message_field(37, group='attributes') child_workflow_execution_failed_event_attributes: 'ChildWorkflowExecutionFailedEventAttributes' = betterproto.message_field(38, group='attributes') child_workflow_execution_canceled_event_attributes: 'ChildWorkflowExecutionCanceledEventAttributes' = betterproto.message_field(39, group='attributes') child_workflow_execution_timed_out_event_attributes: 'ChildWorkflowExecutionTimedOutEventAttributes' = betterproto.message_field(40, group='attributes') child_workflow_execution_terminated_event_attributes: 'ChildWorkflowExecutionTerminatedEventAttributes' = betterproto.message_field(41, group='attributes') signal_external_workflow_execution_initiated_event_attributes: 'SignalExternalWorkflowExecutionInitiatedEventAttributes' = betterproto.message_field(42, group='attributes') signal_external_workflow_execution_failed_event_attributes: 'SignalExternalWorkflowExecutionFailedEventAttributes' = betterproto.message_field(43, group='attributes') external_workflow_execution_signaled_event_attributes: 'ExternalWorkflowExecutionSignaledEventAttributes' = betterproto.message_field(44, group='attributes') upsert_workflow_search_attributes_event_attributes: 'UpsertWorkflowSearchAttributesEventAttributes' = betterproto.message_field(45, group='attributes')
def load_nifti(filename): logger = logging.getLogger(__name__) try: nifti = nib.load(filename) except: logger.error('Cannot read {}'.format(filename)) sys.exit(1) affine = nifti.get_affine() header = nifti.get_header() dims = list(nifti.shape) if (len(dims) < 3): raise Exception('\n Your data has less than 3 dimensions!\n ') if (len(dims) > 4): raise Exception('\n Your data is at least a penteract (over 4 dimensions!)\n ') nifti = nifti.get_data() if (len(dims) == 3): dims.append(1) nifti = nifti.reshape(((dims[0] * dims[1]) * dims[2]), dims[3]) return (nifti, affine, header, dims)
class ControlBox(Gtk.Box, providers.ProviderHandler): __gsignals__ = {'show': 'override'} def __init__(self): Gtk.Box.__init__(self) providers.ProviderHandler.__init__(self, 'minimode-controls') self.__dirty = True self.__controls = {} event.add_ui_callback(self.on_option_set, 'plugin_minimode_option_set') def destroy(self): for control in self.__controls.values(): control.destroy() def __contains__(self, item): if (item in self.__controls): return True return False def __getitem__(self, name): return self.__controls[name] def __setitem__(self, name, control): if (name in self.__controls): self.remove(self.__controls[name]) del self.__controls[name] self.__controls[name] = control self.pack_start(control, False, True, 0) control.show_all() def __delitem__(self, name): self.__controls[name].destroy() del self.__controls[name] def update(self): selected_controls = settings.get_option('plugin/minimode/selected_controls', ['previous', 'play_pause', 'next', 'playlist_button', 'progress_bar', 'restore']) added_controls = [c for c in selected_controls if (c not in self)] for name in added_controls: try: provider = self.get_provider(name)() except Exception: logger.exception('Failed to add control provider "%s"', name) selected_controls.remove(name) else: self[name] = provider removed_controls = [c.name for c in self if (c.name not in selected_controls)] for name in removed_controls: del self[name] for name in selected_controls: self.reorder_child(self[name], (- 1)) def do_show(self): if self.__dirty: self.update() self.__dirty = False Gtk.Box.do_show(self) def on_provider_removed(self, provider): if (provider.name in self): del self[provider.name] def on_option_set(self, event, settings, option): if (option == 'plugin/minimode/selected_controls'): if self.props.visible: GLib.idle_add(self.update) else: self.__dirty = True
class DefaultDataGenerator(DataGenerator): def __init__(self, cache: bool=False): super(DefaultDataGenerator, self).__init__() self.cache = cache self.prev_config = None self.op_args = [] self.op_kwargs = {} def _find_updates(self, config: Dict[(str, Any)]): if (not self.prev_config): return (None, None) arg_updates = set() kwarg_updates = set() if ('args' in config): for (i, vals) in enumerate(zip(self.prev_config['args'], config['args'])): if (vals[0] != vals[1]): arg_updates.add(i) if ('kwargs' in config): for key in self.prev_config['kwargs']: if (self.prev_config['kwargs'][key] != config['kwargs'][key]): kwarg_updates.add(key) logger.debug(f' prev: {self.prev_config}') logger.debug(f' curr: {config}') logger.debug(f' updt: {arg_updates} {kwarg_updates}') return (arg_updates, kwarg_updates) def _generate_data(self, config: Dict[(str, Any)], device: str, op_args: List[Any], op_kwargs: Dict[(str, Any)], arg_updates: Set[Any], kwarg_updates: Set[Any]): if (len(op_args) == 0): op_args = ([None] * len(config['args'])) if ('args' in config): for (i, arg) in enumerate(config['args']): if arg_updates: if (i in arg_updates): op_args[i] = materialize_arg(arg, device) else: op_args[i] = materialize_arg(arg, device) if ('kwargs' in config): for (key, arg) in config['kwargs'].items(): if kwarg_updates: if (key in kwarg_updates): op_kwargs[key] = materialize_arg(arg, device) else: op_kwargs[key] = materialize_arg(arg, device) return (op_args, op_kwargs) def get_data(self, config: Dict[(str, Any)], device: str): if (not config): return ([], {}) elif self.cache: (arg_updates, kwarg_updates) = self._find_updates(config) self.prev_config = copy.deepcopy(config) return self._generate_data(config, device, self.op_args, self.op_kwargs, arg_updates, kwarg_updates) else: op_args = [] op_kwargs = {} return self._generate_data(config, device, op_args, op_kwargs, None, None)
('cuda.gemm_rrr_bias.gen_function') def gen_function(func_attrs, exec_cond_template, dim_info_dict): input_addr_calculator = gemm_rrr.get_input_addr_calculator(func_attrs) input_ndims = len(func_attrs['input_accessors'][0].original_shapes) weight_ndims = len(func_attrs['input_accessors'][1].original_shapes) output_ndims = len(func_attrs['output_accessors'][0].original_shapes) backend_spec = CUDASpec() elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype']) elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype']) problem_args = PROBLEM_ARGS_TEMPLATE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type) extra_code = EXTRA_CODE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type) return common.gen_function(func_attrs=func_attrs, src_template=common_bias.SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, support_split_k=True, input_addr_calculator=input_addr_calculator, output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(stride_dim='N', output_accessor=func_attrs['output_accessors'][0]), extra_code=extra_code)
class TestValidationErrorCode(): .parametrize('use_list', (False, True)) def test_validationerror_code_with_msg(self, use_list): class ExampleSerializer(serializers.Serializer): password = serializers.CharField() def validate_password(self, obj): err = DjangoValidationError('exc_msg %s', code='exc_code', params=('exc_param',)) if use_list: err = DjangoValidationError([err]) raise err serializer = ExampleSerializer(data={'password': 123}) serializer.is_valid() assert (serializer.errors == {'password': ['exc_msg exc_param']}) assert (serializer.errors['password'][0].code == 'exc_code') .parametrize('use_list', (False, True)) def test_validationerror_code_with_msg_including_percent(self, use_list): class ExampleSerializer(serializers.Serializer): password = serializers.CharField() def validate_password(self, obj): err = DjangoValidationError('exc_msg with %', code='exc_code') if use_list: err = DjangoValidationError([err]) raise err serializer = ExampleSerializer(data={'password': 123}) serializer.is_valid() assert (serializer.errors == {'password': ['exc_msg with %']}) assert (serializer.errors['password'][0].code == 'exc_code') .parametrize('code', (None, 'exc_code')) .parametrize('use_list', (False, True)) def test_validationerror_code_with_dict(self, use_list, code): class ExampleSerializer(serializers.Serializer): def validate(self, obj): if (code is None): err = DjangoValidationError({'email': 'email error'}) else: err = DjangoValidationError({'email': DjangoValidationError('email error', code=code)}) if use_list: err = DjangoValidationError([err]) raise err serializer = ExampleSerializer(data={}) serializer.is_valid() expected_code = (code if code else 'invalid') if use_list: assert (serializer.errors == {'non_field_errors': [exceptions.ErrorDetail(string='email error', code=expected_code)]}) else: assert (serializer.errors == {'email': ['email error']}) assert (serializer.errors['email'][0].code == expected_code) .parametrize('code', (None, 'exc_code')) def test_validationerror_code_with_dict_list_same_code(self, code): class ExampleSerializer(serializers.Serializer): def validate(self, obj): if (code is None): raise DjangoValidationError({'email': ['email error 1', 'email error 2']}) raise DjangoValidationError({'email': [DjangoValidationError('email error 1', code=code), DjangoValidationError('email error 2', code=code)]}) serializer = ExampleSerializer(data={}) serializer.is_valid() expected_code = (code if code else 'invalid') assert (serializer.errors == {'email': [exceptions.ErrorDetail(string='email error 1', code=expected_code), exceptions.ErrorDetail(string='email error 2', code=expected_code)]})
def generic_activation_jit(op_name: Optional[str]=None) -> Handle: def _generic_activation_jit(i: Any, outputs: List[Any]) -> Union[(typing.Counter[str], Number)]: out_shape = get_shape(outputs[0]) ac_count = prod(out_shape) if (op_name is None): return ac_count else: return Counter({op_name: ac_count}) return _generic_activation_jit
class TestJSONBoundField(): def test_as_form_fields(self): class TestSerializer(serializers.Serializer): json_field = serializers.JSONField() data = QueryDict(mutable=True) data.update({'json_field': '{"some": ["json"}'}) serializer = TestSerializer(data=data) assert (serializer.is_valid() is False) assert (serializer['json_field'].as_form_field().value == '{"some": ["json"}')
def gen_function_call(func_attrs, backend_spec, indent=' '): x = func_attrs['inputs'][0] rois = func_attrs['inputs'][1] xshape = x._attrs['shape'] y = func_attrs['outputs'][0] yshape = y._attrs['shape'] return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], in_ptr=x._attrs['name'], rois_ptr=rois._attrs['name'], out_ptr=y._attrs['name'], p_batch=('&' + xshape[0]._attrs['name']), p_in_ch=('&' + xshape[3]._attrs['name']), p_in_h=('&' + xshape[1]._attrs['name']), p_in_w=('&' + xshape[2]._attrs['name']), p_out_batch=('&' + yshape[0]._attrs['name']), p_out_h=('&' + yshape[1]._attrs['name']), p_out_w=('&' + yshape[2]._attrs['name']), sampling_ratio=func_attrs['sampling_ratio'], spatial_scale=func_attrs['spatial_scale'], position_sensitive=('true' if func_attrs['position_sensitive'] else 'false'), continuous_coordinate=('true' if func_attrs['continuous_coordinate'] else 'false'), backend_spec=backend_spec, indent=indent)
class CaseList(QWidget): def __init__(self, config: ErtConfig, notifier: ErtNotifier, ensemble_size: int): self.ert_config = config self.ensemble_size = ensemble_size self.notifier = notifier QWidget.__init__(self) layout = QVBoxLayout() self._list = QListWidget(self) self._default_selection_mode = self._list.selectionMode() self._list.setSelectionMode(QAbstractItemView.NoSelection) layout.addWidget(QLabel('Available cases:')) layout.addWidget(self._list, stretch=1) self._addRemoveWidget = AddRemoveWidget(self.addItem, self.removeItem) self._addRemoveWidget.enableRemoveButton(False) layout.addWidget(self._addRemoveWidget) self._title = 'New keyword' self._description = 'Enter name of keyword:' self.setLayout(layout) notifier.ertChanged.connect(self.updateList) self.updateList() def storage(self) -> StorageAccessor: return self.notifier.storage def addItem(self): dialog = ValidatedDialog('New case', 'Enter name of new case:', [x.name for x in self.storage.ensembles], parent=self) new_case_name = dialog.showAndTell() if (new_case_name != ''): ensemble = self.storage.create_experiment(parameters=self.ert_config.ensemble_config.parameter_configuration, responses=self.ert_config.ensemble_config.response_configuration, observations=self.ert_config.observations).create_ensemble(name=new_case_name, ensemble_size=self.ensemble_size) self.notifier.set_current_case(ensemble) self.notifier.ertChanged.emit() def removeItem(self): message = 'Support for removal of items has not been implemented!' QMessageBox.information(self, 'Not implemented!', message) def updateList(self): case_list = sorted(self.storage.ensembles, key=(lambda x: x.started_at), reverse=True) self._list.clear() for case in case_list: item = QListWidgetItem(f'{case.name} - {case.started_at} ({case.id})') item.setData(Qt.UserRole, case) self._list.addItem(item)
class OptionPlotoptionsTreegraphOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
def _make_fpds_transaction(id, award_id, obligation, action_date, recipient_duns, recipient_name, recipient_hash, piid=None): baker.make('search.TransactionSearch', transaction_id=id, is_fpds=True, award_id=award_id, federal_action_obligation=obligation, generated_pragmatic_obligation=obligation, action_date=action_date, recipient_unique_id=recipient_duns, recipient_name=recipient_name, recipient_hash=recipient_hash, piid=piid)
def extract_with_context(lst, pred, before_context, after_context): rval = [] start = 0 length = 0 while (start < len(lst)): usedfirst = False usedlast = False while (((start + length) < len(lst)) and (length < (before_context + 1)) and (not pred(lst[(start + length)]))): length += 1 while (((start + length) < len(lst)) and (not pred(lst[(start + length)]))): start += 1 if ((start + length) == len(lst)): break while (((start + length) < len(lst)) and pred(lst[(start + length)])): extendlength = 1 while ((extendlength < (after_context + 1)) and (((start + length) + extendlength) < len(lst)) and (not pred(lst[((start + length) + extendlength)]))): extendlength += 1 length += extendlength if (((start + length) < len(lst)) and (not pred(lst[(start + length)]))): extendlength = 1 while ((extendlength < before_context) and (((start + length) + extendlength) < len(lst)) and (not pred(lst[((start + length) + extendlength)]))): extendlength += 1 if ((((start + length) + extendlength) < len(lst)) and pred(lst[((start + length) + extendlength)])): length += extendlength if ((length > 0) and ((start + length) <= len(lst))): if (start == 0): usedfirst = True if ((start + length) == len(lst)): usedlast = True rval.append((lst[start:(start + length)], usedfirst, usedlast)) start += length length = 0 return rval
class TestAggregator(): def test_zero_weights(self) -> None: model = create_model_with_value(0) ag = Aggregator(module=model, aggregation_type=AggregationType.AVERAGE) weight = 1.0 steps = 5 for _ in range(steps): delta = create_model_with_value(1.0) ag.apply_weight_to_update(delta=delta, weight=weight) ag.add_update(delta=delta, weight=weight) assertEqual(ag.sum_weights.item(), (weight * steps)) ag.zero_weights() assertEqual(ag.sum_weights.item(), 0) .parametrize('agg_type,num_process,num_models,expected_value', [(AggregationType.AVERAGE, 4, 10, 1.0), (AggregationType.WEIGHTED_AVERAGE, 4, 10, 1.0), (AggregationType.WEIGHTED_SUM, 4, 10, 55.0), (AggregationType.SUM, 4, 10, 10.0)]) def test_multiprocess_aggregation(self, agg_type, num_process, num_models, expected_value): model = create_model_with_value(0) ag = Aggregator(module=model, aggregation_type=agg_type) results = run_multiprocess_aggregation_test(ag, num_processes=num_process, num_models=num_models) for result in results: assertAlmostEqual(result, expected_value, places=5) .parametrize('agg_type,expected_value', [(AggregationType.AVERAGE, 1.0), (AggregationType.WEIGHTED_AVERAGE, 1.0), (AggregationType.WEIGHTED_SUM, 55.0), (AggregationType.SUM, 10.0)]) def test_aggregate(self, agg_type, expected_value): model = create_model_with_value(0) ag = Aggregator(module=model, aggregation_type=agg_type) ag.zero_weights() for i in range(10): delta = create_model_with_value(1.0) weight = (i + 1) ag.apply_weight_to_update(delta=delta, weight=weight) ag.add_update(delta=delta, weight=weight) model = ag.aggregate() error_msg = model_parameters_equal_to_value(model, expected_value) assertEmpty(error_msg, msg=error_msg) .parametrize('agg_type,expected_value', [(AggregationType.AVERAGE, 10.0), (AggregationType.WEIGHTED_AVERAGE, 55.0), (AggregationType.WEIGHTED_SUM, 55.0), (AggregationType.SUM, 10.0)]) def test_add_update(self, agg_type, expected_value): model = create_model_with_value(0) ag = Aggregator(module=model, aggregation_type=agg_type) ag.zero_weights() for i in range(10): delta = create_model_with_value(1.0) weight = (i + 1) ag.apply_weight_to_update(delta=delta, weight=weight) ag.add_update(delta=delta, weight=weight) assertEqual(ag.sum_weights.item(), expected_value) .parametrize('agg_type,dist_op', [(AggregationType.AVERAGE, OperationType.SUM), (AggregationType.WEIGHTED_AVERAGE, OperationType.SUM), (AggregationType.WEIGHTED_SUM, OperationType.SUM), (AggregationType.SUM, OperationType.SUM)]) def test_distributed_op_aggregation(self, agg_type, dist_op): model = create_model_with_value(0) ag = Aggregator(module=model, aggregation_type=agg_type) results = run_multiprocess_aggregation_test(ag, num_processes=4, num_models=10, distributed_op=dist_op) for (r, v) in zip(results, results[1:]): assertNotEqual(r, v)
def determine_template(mode: str): render_mode = mode try: from IPython import get_ipython is_colab = type(get_ipython()).__module__.startswith('google.colab') except ImportError: is_colab = False if (mode == 'auto'): if is_colab: render_mode = 'inline' else: render_mode = 'nbextension' if (render_mode == 'inline'): return inline_iframe_html_template if (render_mode == 'nbextension'): return inline_template raise ValueError(f'Unexpected value {mode}/{render_mode} for mode')
def import_local_module(project_dir, module_name): if (not os.path.isdir(project_dir)): raise FileNotFoundError(('Project dir does not exist: %s' % project_dir)) pathname_dir = os.path.join(project_dir, module_name) pathname_file = (pathname_dir + '.py') if os.path.isfile(pathname_file): spec = importlib.util.spec_from_file_location(module_name, pathname_file) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module elif os.path.isdir(pathname_dir): sys.path.insert(0, project_dir) try: return importlib.import_module(module_name, package='main') except ImportError as e: sys.path.insert(0, module_name) module_name = os.path.basename(module_name) return importlib.import_module(module_name, package='main') else: return None
class ws_listener(tcp_handler.tcp_handler): def init_func(self, creator, listen, is_ipv6=False): if is_ipv6: fa = socket.AF_INET6 else: fa = socket.AF_INET s = socket.socket(fa, socket.SOCK_STREAM) if is_ipv6: s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.set_socket(s) self.bind(listen) return self.fileno def after(self): self.listen(10) self.register(self.fileno) self.add_evt_read(self.fileno) def tcp_accept(self): while 1: try: (cs, caddr) = self.accept() except BlockingIOError: break self.ws_accept(cs, caddr) '' def ws_accept(self, cs, caddr): pass def tcp_delete(self): self.ws_release() self.unregister(self.fileno) self.close() def ws_release(self):
def test_key_file_encryption_decryption(fetchai_private_key_file): fetchai = FetchAICrypto(fetchai_private_key_file) pk_data = Path(fetchai_private_key_file).read_text() password = uuid4().hex encrypted_data = fetchai.encrypt(password) decrypted_data = fetchai.decrypt(encrypted_data, password) assert (encrypted_data != pk_data) assert (pk_data == decrypted_data) with pytest.raises(ValueError, match='Decrypt error! Bad password?'): fetchai.decrypt(encrypted_data, 'BaD_PassWord') with pytest.raises(ValueError, match='Bad encrypted key format!'): fetchai.decrypt(('some_data' * 16), 'BaD_PassWord')
class CmdHexdump(Cmd): keywords = ['hexdump', 'hd'] description = 'Display a hexdump of a specified region in the memory.' parser = argparse.ArgumentParser(prog=keywords[0], description=description, epilog=('Aliases: ' + ', '.join(keywords))) parser.add_argument('--length', '-l', type=auto_int, default=256, help='Length of the hexdump (default: %(default)s).') parser.add_argument('--aligned', '-a', action='store_true', help='Access the memory strictly 4-byte aligned.') parser.add_argument('address', type=auto_int, help='Start address of the hexdump.') def work(self): args = self.getArgs() if (args == None): return True dump = None if args.aligned: dump = self.internalblue.readMemAligned(args.address, args.length) else: dump = self.readMem(args.address, args.length) if (dump == None): return False log.hexdump(dump, begin=args.address) return True
class TxBytPerQueue(base_tests.SimpleDataPlane): def runTest(self): logging.info('Running TxBytPerQueue test') of_ports = config['port_map'].keys() of_ports.sort() self.assertTrue((len(of_ports) > 1), 'Not enough ports for test') (queue_stats, p) = get_queuestats(self, ofp.OFPP_ALL, ofp.OFPQ_ALL) for idx in range(len(of_ports)): ingress_port = of_ports[idx] egress_port = of_ports[((idx + 1) % len(of_ports))] queue_id = port_queues_get(self, queue_stats, egress_port) for egress_queue_id in queue_id: delete_all_flows(self.controller) initial_stats = get_queue_stats(self, egress_port, egress_queue_id) (pkt, match) = enqueue(self, ingress_port, egress_port, egress_queue_id) send_packet(self, pkt, ingress_port, egress_port) verify_queue_stats(self, egress_port, egress_queue_id, initial=initial_stats, bytes=len(str(pkt)))
def test_gas_price_strategy_eth_gasstation(): gas_price_strategy = 'fast' excepted_result = 10 callable_ = get_gas_price_strategy(gas_price_strategy, 'api_key') with patch.object(requests, 'get', return_value=MagicMock(status_code=200, json=MagicMock(return_value={gas_price_strategy: excepted_result}))): result = callable_(Web3, 'tx_params') assert (result == ((excepted_result / 10) * ))
class OptionPlotoptionsDependencywheelSonificationTracks(Options): def activeWhen(self) -> 'OptionPlotoptionsDependencywheelSonificationTracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsDependencywheelSonificationTracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsDependencywheelSonificationTracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsDependencywheelSonificationTracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsDependencywheelSonificationTracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsDependencywheelSonificationTracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def render() -> None: global TRIM_FRAME_START_SLIDER global TRIM_FRAME_END_SLIDER trim_frame_start_slider_args: Dict[(str, Any)] = {'label': wording.get('trim_frame_start_slider_label'), 'step': 1, 'minimum': 0, 'maximum': 100, 'visible': False} trim_frame_end_slider_args: Dict[(str, Any)] = {'label': wording.get('trim_frame_end_slider_label'), 'step': 1, 'minimum': 0, 'maximum': 100, 'visible': False} if is_video(facefusion.globals.target_path): video_frame_total = count_video_frame_total(facefusion.globals.target_path) trim_frame_start_slider_args['value'] = (facefusion.globals.trim_frame_start or 0) trim_frame_start_slider_args['maximum'] = video_frame_total trim_frame_start_slider_args['visible'] = True trim_frame_end_slider_args['value'] = (facefusion.globals.trim_frame_end or video_frame_total) trim_frame_end_slider_args['maximum'] = video_frame_total trim_frame_end_slider_args['visible'] = True with gradio.Row(): TRIM_FRAME_START_SLIDER = gradio.Slider(**trim_frame_start_slider_args) TRIM_FRAME_END_SLIDER = gradio.Slider(**trim_frame_end_slider_args)
def main(): parser = argparse.ArgumentParser() parser.add_argument('new_version', type=str) args = parser.parse_args() version = Version.parse(args.new_version) assert (not version.dev) print(f'Bumping to {version}') if version.beta: write_version('beta', version) generate.main(['beta']) else: assert (not version.beta) write_version('stable', version) write_version('beta', version) generate.main(['stable', 'beta']) return 0
def spinner(text: str, logger: Logger, quiet=False, debug=False): try: logger.info(text) if (not quiet): print(text, end='... ', flush=True) (yield) if (not quiet): print('Done', flush=True) except Exception as exception: exception_traceback = format_exc() logger.error(exception_traceback) if (not quiet): if debug: print(exception_traceback) else: print(str(exception))
def generic_create(evm: Evm, endowment: U256, contract_address: Address, memory_start_position: U256, memory_size: U256, init_code_gas: Uint) -> None: from ...vm.interpreter import MAX_CODE_SIZE, STACK_DEPTH_LIMIT, process_create_message evm.accessed_addresses.add(contract_address) create_message_gas = max_message_call_gas(Uint(evm.gas_left)) evm.gas_left -= create_message_gas ensure((not evm.message.is_static), WriteInStaticContext) evm.return_data = b'' sender_address = evm.message.current_target sender = get_account(evm.env.state, sender_address) if ((sender.balance < endowment) or (sender.nonce == Uint(((2 ** 64) - 1))) or ((evm.message.depth + 1) > STACK_DEPTH_LIMIT)): evm.gas_left += create_message_gas push(evm.stack, U256(0)) return if account_has_code_or_nonce(evm.env.state, contract_address): increment_nonce(evm.env.state, evm.message.current_target) push(evm.stack, U256(0)) return call_data = memory_read_bytes(evm.memory, memory_start_position, memory_size) ensure((len(call_data) <= (2 * MAX_CODE_SIZE)), OutOfGasError) increment_nonce(evm.env.state, evm.message.current_target) child_message = Message(caller=evm.message.current_target, target=Bytes0(), gas=create_message_gas, value=endowment, data=b'', code=call_data, current_target=contract_address, depth=(evm.message.depth + 1), code_address=None, should_transfer_value=True, is_static=False, accessed_addresses=evm.accessed_addresses.copy(), accessed_storage_keys=evm.accessed_storage_keys.copy(), parent_evm=evm) child_evm = process_create_message(child_message, evm.env) if child_evm.error: incorporate_child_on_error(evm, child_evm) evm.return_data = child_evm.output push(evm.stack, U256(0)) else: incorporate_child_on_success(evm, child_evm) evm.return_data = b'' push(evm.stack, U256.from_be_bytes(child_evm.message.current_target))