code
stringlengths
281
23.7M
def modules_scan(url, method, headers, body, scanid=None): attack = read_scan_policy() if (attack is None): print('Failed to start scan.') sys.exit(1) if (scanid is None): scanid = generate_scanid() count = 0 for (key, value) in list(attack.items()): if ((value == 'Y') or (value == 'y')): count += 1 update_scan_status(scanid, '', count) if ((attack['zap'] == 'Y') or (attack['zap'] == 'y')): api_scan = zap_scan() status = zap_start() if (status is True): api_scan.start_scan(url, method, headers, body, scanid) analyze_attack(url, headers, attack) print(attack) if ((attack['ssrf'] == 'Y') or (attack['ssrf'] == 'y')): handleException((lambda : ssrf_check.delay(url, method, headers, body, scanid)), scanid, 'SSRF') if ((attack['cors'] == 'Y') or (attack['cors'] == 'y')): handleException((lambda : cors_main.delay(url, method, headers, body, scanid)), scanid, 'CORS') if ((attack['Broken auth'] == 'Y') or (attack['Broken auth'] == 'y')): handleException((lambda : auth_check(url, method, headers, body, scanid)), scanid, 'Authentication') if ((attack['Rate limit'] == 'Y') or (attack['Rate limit'] == 'y')): handleException((lambda : rate_limit.delay(url, method, headers, body, scanid)), scanid, 'Rate limit') if ((attack['csrf'] == 'Y') or (attack['csrf'] == 'y')): handleException((lambda : csrf_check.delay(url, method, headers, body, scanid)), scanid, 'CSRf') if ((attack['jwt'] == 'Y') or (attack['jwt'] == 'y')): handleException((lambda : jwt_check.delay(url, method, headers, body, scanid)), scanid, 'JWT') if ((attack['sqli'] == 'Y') or (attack['sqli'] == 'y')): handleException((lambda : sqli_check.delay(url, method, headers, body, scanid)), scanid, 'SQL injection') if ((attack['xss'] == 'Y') or (attack['xss'] == 'y')): handleException((lambda : xss_check.delay(url, method, headers, body, scanid)), scanid, 'XSS') if ((attack['open-redirection'] == 'Y') or (attack['open-redirection'] == 'y')): handleException((lambda : open_redirect_check.delay(url, method, headers, body, scanid)), scanid, 'Open redirect') if ((attack['xxe'] == 'Y') or (attack['xxe'] == 'y')): xxe = xxe_scan() handleException((lambda : xxe.delay(url, method, headers, body, scanid)), scanid, 'XXE') if ((attack['crlf'] == 'Y') or (attack['crlf'] == 'y')): handleException((lambda : crlf_check.delay(url, method, headers, body, scanid)), scanid, 'CRLF') if ((attack['security_headers'] == 'Y') or (attack['security_headers'] == 'y')): handleException((lambda : security_headers_missing.delay(url, method, headers, body, scanid)), scanid, 'security_headers')
('importlib.import_module', side_effect=_import_module_mock) ('setuptools.setup') class TestLedgerIntegration(BasePythonMarkdownDocs): DOC_PATH = Path(ROOT_DIR, 'docs', 'ledger-integration.md') def _assert_isinstance(self, locals_key, cls_or_str, locals_): assert (locals_key in locals_) obj = locals_[locals_key] if (type(cls_or_str) == type): assert isinstance(obj, cls_or_str) else: assert (obj.__class__.__name__ == cls_or_str) def _assert(self, locals_, *mocks): self._assert_isinstance('fetchai_crypto', 'FetchAICrypto', locals_) self._assert_isinstance('fetchai_ledger_api', 'FetchAIApi', locals_) self._assert_isinstance('fetchai_faucet_api', 'FetchAIFaucetApi', locals_) self._assert_isinstance('my_ledger_crypto', MagicMock, locals_) self._assert_isinstance('my_ledger_api', MagicMock, locals_) self._assert_isinstance('my_faucet_api', MagicMock, locals_) def teardown_class(cls): crypto_registry.specs.pop('my_ledger_id') ledger_apis_registry.specs.pop('my_ledger_id') faucet_apis_registry.specs.pop('my_ledger_id')
def sys_info(): ljust = 15 out = 'SYSTEM \n' out += (('Platform:'.ljust(ljust) + platform.platform()) + '\n') out += '\nPYTHON \n' out += (('Version:'.ljust(ljust) + str(sys.version).replace('\n', ' ')) + '\n') out += (('Executable:'.ljust(ljust) + sys.executable) + '\n') out += '\nMODULES \n' for mod_name in ('numpy', 'scipy', 'matplotlib'): out += ('%s:' % mod_name).ljust(ljust) try: mod = __import__(mod_name) version = mod.__version__ except Exception: out += 'Not found\n' out += ('%s\n' % version) print(out, end='')
class AnaPotBase2D(Calculator): def __init__(self, V_str): super(AnaPotBase2D, self).__init__() (x, y) = symbols('x y') V = sympify(V_str) dVdx = diff(V, x) dVdy = diff(V, y) self.V = lambdify((x, y), V, 'numpy') self.dVdx = lambdify((x, y), dVdx, 'numpy') self.dVdy = lambdify((x, y), dVdy, 'numpy') dVdxdx = diff(V, x, x) dVdxdy = diff(V, x, y) dVdydy = diff(V, y, y) self.dVdxdx = lambdify((x, y), dVdxdx, 'numpy') self.dVdxdy = lambdify((x, y), dVdxdy, 'numpy') self.dVdydy = lambdify((x, y), dVdydy, 'numpy') def get_energy(self, atoms, coords): (x, y) = coords energy = self.V(x, y) return {'energy': energy} def get_forces(self, atoms, coords): (x, y) = coords dVdx = self.dVdx(x, y) dVdy = self.dVdy(x, y) forces = (- np.array((dVdx, dVdy))) results = self.get_energy(atoms, coords) results['forces'] = forces return results def get_hessian(self, atoms, coords): (x, y) = coords dVdxdx = self.dVdxdx(x, y) dVdxdy = self.dVdxdy(x, y) dVdydy = self.dVdydy(x, y) hessian = np.array(((dVdxdx, dVdxdy), (dVdxdy, dVdydy))) results = self.get_forces(atoms, coords) results['hessian'] = hessian return results
class AnsiToWin32(object): ANSI_RE = re.compile('\\033\\[((?:\\d|;)*)([a-zA-Z])') def __init__(self, wrapped, convert=None, strip=None, autoreset=False): self.wrapped = wrapped self.autoreset = autoreset self.stream = StreamWrapper(wrapped, self) on_windows = sys.platform.startswith('win') if (strip is None): strip = on_windows self.strip = strip if (convert is None): convert = (on_windows and (not wrapped.closed) and is_a_tty(wrapped)) self.convert = convert self.win32_calls = self.get_win32_calls() self.on_stderr = (self.wrapped is sys.stderr) def should_wrap(self): return (self.convert or self.strip or self.autoreset) def get_win32_calls(self): if (self.convert and winterm): return {AnsiStyle.RESET_ALL: (winterm.reset_all,), AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), AnsiFore.RED: (winterm.fore, WinColor.RED), AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), AnsiFore.WHITE: (winterm.fore, WinColor.GREY), AnsiFore.RESET: (winterm.fore,), AnsiBack.BLACK: (winterm.back, WinColor.BLACK), AnsiBack.RED: (winterm.back, WinColor.RED), AnsiBack.GREEN: (winterm.back, WinColor.GREEN), AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), AnsiBack.BLUE: (winterm.back, WinColor.BLUE), AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), AnsiBack.CYAN: (winterm.back, WinColor.CYAN), AnsiBack.WHITE: (winterm.back, WinColor.GREY), AnsiBack.RESET: (winterm.back,)} return dict() def write(self, text): if (self.strip or self.convert): self.write_and_convert(text) else: self.wrapped.write(text) self.wrapped.flush() if self.autoreset: self.reset_all() def reset_all(self): if self.convert: self.call_win32('m', (0,)) elif ((not self.wrapped.closed) and is_a_tty(self.wrapped)): self.wrapped.write(Style.RESET_ALL) def write_and_convert(self, text): cursor = 0 for match in self.ANSI_RE.finditer(text): (start, end) = match.span() self.write_plain_text(text, cursor, start) self.convert_ansi(*match.groups()) cursor = end self.write_plain_text(text, cursor, len(text)) def write_plain_text(self, text, start, end): if (start < end): self.wrapped.write(text[start:end]) self.wrapped.flush() def convert_ansi(self, paramstring, command): if self.convert: params = self.extract_params(paramstring) self.call_win32(command, params) def extract_params(self, paramstring): def split(paramstring): for p in paramstring.split(';'): if (p != ''): (yield int(p)) return tuple(split(paramstring)) def call_win32(self, command, params): if (params == []): params = [0] if (command == 'm'): for param in params: if (param in self.win32_calls): func_args = self.win32_calls[param] func = func_args[0] args = func_args[1:] kwargs = dict(on_stderr=self.on_stderr) func(*args, **kwargs) elif (command in ('H', 'f')): func = winterm.set_cursor_position func(params, on_stderr=self.on_stderr) elif (command in 'J'): func = winterm.erase_data func(params, on_stderr=self.on_stderr) elif (command == 'A'): if ((params == ()) or (params == None)): num_rows = 1 else: num_rows = params[0] func = winterm.cursor_up func(num_rows, on_stderr=self.on_stderr)
class OptionPlotoptionsPolygonDragdrop(Options): def draggableX(self): return self._config_get(None) def draggableX(self, flag: bool): self._config(flag, js_type=False) def draggableY(self): return self._config_get(None) def draggableY(self, flag: bool): self._config(flag, js_type=False) def dragHandle(self) -> 'OptionPlotoptionsPolygonDragdropDraghandle': return self._config_sub_data('dragHandle', OptionPlotoptionsPolygonDragdropDraghandle) def dragMaxX(self): return self._config_get(None) def dragMaxX(self, num: float): self._config(num, js_type=False) def dragMaxY(self): return self._config_get(None) def dragMaxY(self, num: float): self._config(num, js_type=False) def dragMinX(self): return self._config_get(None) def dragMinX(self, num: float): self._config(num, js_type=False) def dragMinY(self): return self._config_get(None) def dragMinY(self, num: float): self._config(num, js_type=False) def dragPrecisionX(self): return self._config_get(0) def dragPrecisionX(self, num: float): self._config(num, js_type=False) def dragPrecisionY(self): return self._config_get(0) def dragPrecisionY(self, num: float): self._config(num, js_type=False) def dragSensitivity(self): return self._config_get(2) def dragSensitivity(self, num: float): self._config(num, js_type=False) def groupBy(self): return self._config_get(None) def groupBy(self, text: str): self._config(text, js_type=False) def guideBox(self) -> 'OptionPlotoptionsPolygonDragdropGuidebox': return self._config_sub_data('guideBox', OptionPlotoptionsPolygonDragdropGuidebox) def liveRedraw(self): return self._config_get(True) def liveRedraw(self, flag: bool): self._config(flag, js_type=False)
class OptionPlotoptionsPyramidOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
def extractOmegatranslationsBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Plugin(plugin.PluginProto): PLUGIN_ID = 1 PLUGIN_NAME = 'Input - Switch Device/Generic GPIO' PLUGIN_VALUENAME1 = 'State' def __init__(self, taskindex): plugin.PluginProto.__init__(self, taskindex) self.dtype = rpieGlobals.DEVICE_TYPE_SINGLE self.vtype = rpieGlobals.SENSOR_TYPE_SWITCH self.valuecount = 1 self.senddataoption = True self.timeroption = True self.timeroptional = True self.inverselogicoption = True self.recdataoption = False def plugin_exit(self): if (self.enabled and (self.timer100ms == False)): try: gpios.HWPorts.remove_event_detect(int(self.taskdevicepin[0])) except: pass return True def plugin_init(self, enableplugin=None): plugin.PluginProto.plugin_init(self, enableplugin) self.decimals[0] = 0 self.decimals[1] = 0 self.decimals[2] = 0 self.initialized = False try: gpioinit = (gpios.HWPorts is not None) except: gpioinit = False if ((int(self.taskdevicepin[0]) >= 0) and self.enabled and gpioinit): try: self.set_value(1, int(gpios.HWPorts.input(int(self.taskdevicepin[0]))), True) except: pass try: if (int(self.taskdevicepluginconfig[3]) < 1): self.taskdevicepluginconfig[3] = gpios.BOTH except: self.taskdevicepluginconfig[3] = gpios.BOTH try: self.plugin_exit() if self.taskdevicepluginconfig[0]: misc.addLog(rpieGlobals.LOG_LEVEL_INFO, 'Registering 10/sec timer as asked') self.timer100ms = True self.initialized = True return True if (int(self.taskdevicepluginconfig[1]) > 0): gpios.HWPorts.add_event_detect(int(self.taskdevicepin[0]), int(self.taskdevicepluginconfig[3]), self.p001_handler, int(self.taskdevicepluginconfig[1])) else: gpios.HWPorts.add_event_detect(int(self.taskdevicepin[0]), int(self.taskdevicepluginconfig[3]), self.p001_handler) misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ('Event registered to pin ' + str(self.taskdevicepin[0]))) self.timer100ms = False self._lastdataservetime = 0 if (self.taskdevicepluginconfig[4] > 0): self.valuecount = 3 self.uservar[1] = (- 1) if (len(self.valuenames) < 3): self.valuenames.append('') self.valuenames.append('') if (self.valuenames[1] == ''): self.valuenames[1] = 'Longpress' self.valuenames[2] = 'PressedTime' else: self.valuecount = 1 except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Event can not be added, register backup timer ' + str(e))) self.timer100ms = True self.initialized = True def webform_load(self): webserver.addFormNote("Please make sure to select <a href='pinout'>pin configured</a> for input for default (or output to report back its state)!") webserver.addFormCheckBox('Force 10/sec periodic checking of pin', 'p001_per', self.taskdevicepluginconfig[0]) webserver.addFormNote('For output pin, only 10/sec periodic method will work!') webserver.addFormNumericBox('De-bounce (ms)', 'p001_debounce', self.taskdevicepluginconfig[1], 0, 1000) options = ['Normal Switch', 'Push Button Active Low', 'Push Button Active High'] optionvalues = [0, 1, 2] webserver.addFormSelector('Switch Button Type', 'p001_button', len(optionvalues), options, optionvalues, None, self.taskdevicepluginconfig[2]) webserver.addFormNote('Use only normal switch for output type, i warned you!') try: options = ['BOTH', 'RISING', 'FALLING'] optionvalues = [gpios.BOTH, gpios.RISING, gpios.FALLING] webserver.addFormSelector('Event detection type', 'p001_det', len(optionvalues), options, optionvalues, None, self.taskdevicepluginconfig[3]) webserver.addFormNote('Only valid if event detection activated') except: pass options = ['None', '1-->0', '0-->1', 'Both'] optionvalues = [0, 1, 2, 3] webserver.addFormSelector('Longpress detection', 'p001_long', len(optionvalues), options, optionvalues, None, self.taskdevicepluginconfig[4]) webserver.addFormNumericBox('Longpress min time (ms)', 'p001_longtime', self.taskdevicepluginconfig[5], 0, 10000) return True def webform_save(self, params): changed = False prevval = self.taskdevicepluginconfig[0] if (webserver.arg('p001_per', params) == 'on'): self.taskdevicepluginconfig[0] = True else: self.taskdevicepluginconfig[0] = False if (prevval != self.taskdevicepluginconfig[0]): changed = True prevval = self.taskdevicepluginconfig[1] par = webserver.arg('p001_debounce', params) try: self.taskdevicepluginconfig[1] = int(par) except: self.taskdevicepluginconfig[1] = 0 if (prevval != self.taskdevicepluginconfig[1]): changed = True prevval = self.taskdevicepluginconfig[2] par = webserver.arg('p001_button', params) try: self.taskdevicepluginconfig[2] = int(par) except: self.taskdevicepluginconfig[2] = 0 if (prevval != self.taskdevicepluginconfig[2]): changed = True prevval = self.taskdevicepluginconfig[3] par = webserver.arg('p001_det', params) try: self.taskdevicepluginconfig[3] = int(par) except: self.taskdevicepluginconfig[3] = gpios.BOTH if (prevval != self.taskdevicepluginconfig[3]): changed = True prevval = self.taskdevicepluginconfig[4] par = webserver.arg('p001_long', params) try: self.taskdevicepluginconfig[4] = int(par) except: self.taskdevicepluginconfig[4] = 0 if (prevval != self.taskdevicepluginconfig[4]): changed = True par = webserver.arg('p001_longtime', params) try: self.taskdevicepluginconfig[5] = int(par) except: self.taskdevicepluginconfig[5] = 1000 if changed: self.plugin_init() return True def plugin_read(self): result = False if self.initialized: self.set_value(1, int(float(self.uservar[0])), True) self._lastdataservetime = rpieTime.millis() result = True return result def p001_handler(self, channel): self.pinstate_check(True) def timer_ten_per_second(self): self.pinstate_check() def pinstate_check(self, postcheck=False): if (self.initialized and self.enabled): prevval = int(float(self.uservar[0])) try: inval = gpios.HWPorts.input(int(self.taskdevicepin[0])) except Exception as e: self.enabled = False misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, (('Task now disabled, Pin' + str(self.taskdevicepin[0])) + ' cannot be read!')) return False if self.pininversed: prevval = (1 - int(prevval)) outval = prevval if (int(self.taskdevicepluginconfig[2]) == 0): outval = int(inval) elif (int(self.taskdevicepluginconfig[2]) == 1): if (inval == 0): outval = (1 - int(prevval)) elif (int(self.taskdevicepluginconfig[2]) == 2): if (inval == 1): outval = (1 - int(prevval)) if (prevval != outval): if ((self.taskdevicepluginconfig[4] > 0) and (self._lastdataservetime > 0)): docheck = False if (self.taskdevicepluginconfig[4] == 3): docheck = True elif ((self.taskdevicepluginconfig[4] == 1) and (int(prevval) == 1) and (int(outval) == 0)): docheck = True elif ((self.taskdevicepluginconfig[4] == 2) and (int(prevval) == 0) and (int(outval) == 1)): docheck = True self.set_value(1, int(outval), False) diff = (rpieTime.millis() - self._lastdataservetime) dolong = False if docheck: if (diff > self.taskdevicepluginconfig[5]): dolong = True if dolong: self.set_value(2, 1, False) else: self.set_value(2, 0, False) if docheck: self.set_value(3, diff, False) else: self.set_value(3, 0, False) self.plugin_senddata() else: self.set_value(1, int(outval), True) self._lastdataservetime = rpieTime.millis() if ((self.taskdevicepluginconfig[2] > 0) and self.timer100ms): time.sleep((self.taskdevicepluginconfig[1] / 1000)) if postcheck: rpieTime.addsystemtimer(1, self.postchecker, [int(self.taskdevicepin[0]), int(float(self.uservar[0]))]) def plugin_write(self, cmd): res = False cmdarr = cmd.split(',') cmdarr[0] = cmdarr[0].strip().lower() if (cmdarr[0].strip().lower() in gpiohelper.commandlist): res = gpiohelper.gpio_commands(cmd) return res def postchecker(self, timerid, pararray): if (pararray[0] == int(self.taskdevicepin[0])): self.timer_ten_per_second()
class flow_monitor_reply_entry(loxi.OFObject): def __init__(self, event=None): if (event != None): self.event = event else: self.event = 0 return def pack(self): packed = [] packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!H', self.event)) length = sum([len(x) for x in packed]) packed[0] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = flow_monitor_reply_entry() _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 2) obj.event = reader.read('!H')[0] return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.event != other.event): return False return True def pretty_print(self, q): q.text('flow_monitor_reply_entry {') with q.group(): with q.indent(2): q.breakable() q.text('event = ') value_name_map = {0: 'OFPFME_INITIAL', 1: 'OFPFME_ADDED', 2: 'OFPFME_REMOVED', 3: 'OFPFME_MODIFIED', 4: 'OFPFME_ABBREV', 5: 'OFPFME_PAUSED', 6: 'OFPFME_RESUMED'} if (self.event in value_name_map): q.text(('%s(%d)' % (value_name_map[self.event], self.event))) else: q.text(('%#x' % self.event)) q.breakable() q.text('}')
def extractFivedollarmailBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_data(url, target_datetime, session=None): s = (session or requests.Session()) headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:55.0) Gecko/ Firefox/55.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'} pagereq = requests.get(url, headers=headers) soup = BeautifulSoup(pagereq.text, 'html.parser') viewstategenerator = soup.find('input', attrs={'id': '__VIEWSTATEGENERATOR'})['value'] viewstate = soup.find('input', attrs={'id': '__VIEWSTATE'})['value'] eventvalidation = soup.find('input', attrs={'id': '__EVENTVALIDATION'})['value'] if target_datetime: target_date = target_datetime.date() else: target_date = datetime.now().date() month = target_date.month day = target_date.day year = target_date.year FromDatePicker_clientState = ('|0|01%s-%s-%s-0-0-0-0||[[[[]],[],[]],[{%s},[]],"01%s-%s-%s-0-0-0-0"]' % (year, month, day, '', year, month, day)) ToDatePicker_clientState = ('|0|01%s-%s-%s-0-0-0-0||[[[[]],[],[]],[{%s},[]],"01%s-%s-%s-0-0-0-0"]' % (year, month, day, '', year, month, day)) btnDownloadCSV = 'Download+CSV' ig_def_dp_cal_clientState = ('|0|15,2017,09,2017,%s,%s||[[null,[],null],[{%s},[]],"11,2017,09,2017,%s,%s"]' % (month, day, '', month, day)) IG_CSS_LINKS_ = 'ig_res/default/ig_monthcalendar.css|ig_res/default/ig_texteditor.css|ig_res/default/ig_shared.css' postdata = {'__VIEWSTATE': viewstate, '__VIEWSTATEGENERATOR': viewstategenerator, '__EVENTVALIDATION': eventvalidation, 'FromDatePicker_clientState': FromDatePicker_clientState, 'ToDatePicker_clientState': ToDatePicker_clientState, 'btnDownloadCSV': btnDownloadCSV, '_ig_def_dp_cal_clientState': ig_def_dp_cal_clientState, '_IG_CSS_LINKS_': IG_CSS_LINKS_} postheaders = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:55.0) Gecko/ Firefox/55.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Content-Type': 'application/x-www-form-urlencoded'} datareq = s.post(url, headers=postheaders, data=postdata) return datareq.text
def interpolate(field, xs, vals): domain_size = (2 ** (log2(max(xs)) + 1)) assert ((domain_size * 2) <= (2 ** field.height)) domain = list(range(domain_size)) big_domain = list(range((domain_size * 2))) z = zpoly(field, [x for x in domain if (x not in xs)]) z_values = fft(field, big_domain, z) p_times_z_values = ([0] * len(domain)) for (v, d) in zip(vals, xs): p_times_z_values[d] = field.mul(v, z_values[d]) p_times_z = invfft(field, domain, p_times_z_values) shifted_p_times_z_values = fft(field, big_domain, p_times_z)[domain_size:] shifted_p_values = [field.div(x, y) for (x, y) in zip(shifted_p_times_z_values, z_values[domain_size:])] shifted_p = invfft(field, domain, shifted_p_values) return shift(field, shifted_p, domain_size)
.django_db def test_spending_by_award_subaward_success(client, mock_tas_data): data = {'filters': {'tas_codes': [{'aid': '028', 'main': '8006'}], 'award_type_codes': ['A', 'B', 'C', 'D']}, 'fields': ['Sub-Award ID'], 'subawards': True} resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps(data)) assert (resp.status_code == status.HTTP_200_OK) assert (len(resp.data['results']) == 2) data = {'filters': {'tas_codes': [{'aid': '028', 'main': '8006', 'ata': '004'}], 'award_type_codes': ['A', 'B', 'C', 'D']}, 'fields': ['Sub-Award ID'], 'subawards': True} resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps(data)) assert (resp.status_code == status.HTTP_200_OK) assert (len(resp.data['results']) == 1)
class TestSuite(unittest.TestCase): def setUp(self): self.suite_config = {'name': 'test', 'description': 'desc', 'args': []} self.mock_hook = MagicMock() HookFactory.create.return_value = self.mock_hook Suite.parse = Mock() def test_arg_list(self): self.assertListEqual(['--output-format=json', 'a'], Suite.arg_list(['--output-format=json', 'a'])) expected = ['--output-format', 'json', '--file'] actual = Suite.arg_list({'output-format': 'json', 'file': None}) self.assertCountEqual(expected, actual) self.assertEqual((actual.index('--output-format') + 1), actual.index('json')) def test_run_succeed(self): mock_data = '{"key": "hello"}' self.suite_config['args'] = [mock_data] self.suite_config['metrics'] = ['key'] self.suite_config['path'] = 'echo' suite = Suite(self.suite_config) suite.parse = Mock(return_value=[TestCaseResult(name='key', status=TestStatus.PASSED)]) metrics = suite.run() suite.parse.assert_called_with([mock_data], [], 0) self.assertEqual([TestCaseResult(name='key', status=TestStatus.PASSED)], metrics) def test_run_fail(self): self.suite_config['args'] = ['-c', 'echo "error" >&2; exit 1'] self.suite_config['path'] = 'sh' suite = Suite(self.suite_config) with self.assertRaises(subprocess.CalledProcessError) as e: suite.run() e = e.exception self.assertEqual('', e.stdout.strip()) self.assertEqual('error', e.stderr.strip()) def test_run_fail_no_check_returncode(self): self.suite_config['args'] = ['-c', 'echo "error" >&2; exit 1'] self.suite_config['path'] = 'sh' self.suite_config['check_returncode'] = False suite = Suite(self.suite_config) suite.run() def test_run_no_binary(self): self.suite_config['path'] = 'somethingthatdoesntexist' self.suite_config['metrics'] = [] suite = Suite(self.suite_config) with self.assertRaises(OSError): suite.run() def test_run_parser_error(self): self.suite_config['path'] = 'true' self.suite_config['metrics'] = [] suite = Suite(self.suite_config) suite.parse = Mock(side_effect=ValueError('')) with self.assertRaises(ValueError): suite.run() def test_run_timeout(self): self.suite_config['timeout'] = 0.1 self.suite_config['path'] = '/bin/sh' self.suite_config['args'] = ['-c', 'yes'] suite = Suite(self.suite_config) with self.assertRaises(subprocess.TimeoutExpired): suite.run() def test_run_timeout_is_pass(self): self.suite_config['timeout'] = 0.1 self.suite_config['timeout_is_pass'] = True self.suite_config['path'] = '/bin/sh' self.suite_config['args'] = ['-c', 'echo "wow" && echo "err" > /dev/stderr && sleep 2'] suite = Suite(self.suite_config) suite.run() suite.parse.assert_called_with(['timed out as expected'], [], 0) def test_tee_stdouterr(self): mock_data = 'line 1 from echo\nthis is the second line' self.suite_config['args'] = [mock_data] self.suite_config['metrics'] = ['key'] self.suite_config['tee_output'] = True self.suite_config['path'] = 'echo' suite = Suite(self.suite_config) (orig_stdout, orig_stderr) = (sys.stdout, sys.stderr) sys.stdout = io.StringIO() sys.stderr = io.StringIO() suite.run() expected = 'stdout: line 1 from echo\nstdout: this is the second line\n' self.assertEqual(sys.stdout.getvalue(), expected) sys.stdout.truncate(0) sys.stdout.seek(0) self.suite_config['path'] = 'sh' self.suite_config['args'] = ['-c', 'echo "error" >&2 && echo "from stdout"'] self.suite_config['tee_output'] = True suite = Suite(self.suite_config) suite.run() expected = 'stdout: from stdout\nstderr: error\n' self.assertEqual(sys.stdout.getvalue(), expected) sys.stdout = orig_stdout sys.stderr = orig_stderr def test_tee_output_file(self): mock_data = 'line 1 from echo\nthis is the second line' self.suite_config['args'] = [mock_data] self.suite_config['metrics'] = ['key'] (fd, teefile) = tempfile.mkstemp() os.close(fd) self.suite_config['path'] = 'sh' self.suite_config['args'] = ['-c', 'echo "error" >&2 && echo "from stdout"'] self.suite_config['tee_output'] = teefile suite = Suite(self.suite_config) suite.run() expected = 'stdout: from stdout\nstderr: error\n' with open(teefile, 'r') as tmp: self.assertEqual(tmp.read(), expected) os.remove(teefile) def test_hooks(self): self.suite_config['path'] = 'true' self.suite_config['hooks'] = [{'hook': 'first', 'options': {'a': 1}}, {'hook': 'second', 'options': {'b': 1}}] mock = MagicMock() first = mock.first second = mock.second def get_mock_hook(name): if (name == 'first'): return first else: return second HookFactory.create.side_effect = get_mock_hook suite = Suite(self.suite_config) suite.run() self.assertListEqual([call.first.before({'a': 1}, suite), call.second.before({'b': 1}, suite), call.second.after({'b': 1}, suite), call.first.after({'a': 1}, suite)], mock.method_calls)
def get_mock_table_retention(table_data): def _mock_table_retention(_=None, resource_type='bigquery_table'): if (resource_type == 'bucket'): return [] if (resource_type != 'bigquery_table'): raise ValueError('unexpected resource type: got %s, table', resource_type) ret = [] for data in table_data: ret.append(frsd.get_fake_table_resource(data)) return ret return _mock_table_retention
class TrimmedEmail(fields.Email): def _serialize(self, value, *args, **kwargs): if hasattr(value, 'strip'): value = value.strip() return super()._serialize(value, *args, **kwargs) def _deserialize(self, value, *args, **kwargs): if hasattr(value, 'strip'): value = value.strip() return super()._deserialize(value, *args, **kwargs)
.parametrize('pos1,pos2,expected', [(2, 0, 24), (2, 2, 28), (2, (- 1), 26)]) def test_custom_fn_impute(pos1, pos2, expected, mw_data): mw_data[(pos1, pos2)] = np.nan imputed = impy.moving_window(mw_data, func=(lambda l: (max(l) * 2))) return_na_check(imputed) assert (imputed[(pos1, pos2)] == expected)
class GCSPath(): bucket: str key: str def __init__(self, fileURL: str) -> None: (self.bucket, self.key) = self._get_bucket_key(fileURL) def __eq__(self, other: 'GCSPath') -> bool: return ((self.bucket == other.bucket) and (self.key == other.key)) def _get_bucket_key(self, fileURL: str) -> Tuple[(str, str)]: match = re.search('^ fileURL) if (not match): raise ValueError(f'Could not parse {fileURL} as an GCSPath') (bucket, *rest) = match.group(1).split('/') key = '/'.join(rest) return (bucket, key)
def test_websocket_endpoint_on_receive_bytes(test_client_factory): class WebSocketApp(WebSocketEndpoint): encoding = 'bytes' async def on_receive(self, websocket, data): (await websocket.send_bytes((b'Message bytes was: ' + data))) client = test_client_factory(WebSocketApp) with client.websocket_connect('/ws') as websocket: websocket.send_bytes(b'Hello, world!') _bytes = websocket.receive_bytes() assert (_bytes == b'Message bytes was: Hello, world!') with pytest.raises(RuntimeError): with client.websocket_connect('/ws') as websocket: websocket.send_text('Hello world')
class OptionSeriesVectorSonificationDefaultinstrumentoptions(Options): def activeWhen(self) -> 'OptionSeriesVectorSonificationDefaultinstrumentoptionsActivewhen': return self._config_sub_data('activeWhen', OptionSeriesVectorSonificationDefaultinstrumentoptionsActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionSeriesVectorSonificationDefaultinstrumentoptionsMapping': return self._config_sub_data('mapping', OptionSeriesVectorSonificationDefaultinstrumentoptionsMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionSeriesVectorSonificationDefaultinstrumentoptionsPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesVectorSonificationDefaultinstrumentoptionsPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def test_align_explode_alignments_by_taxon(o_dir, e_dir, request): program = 'bin/align/phyluce_align_explode_alignments' output = os.path.join(o_dir, 'mafft-exploded-by-taxon') cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft'), '--output', output, '--input-format', 'fasta', '--by-taxon'] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8'))) output_files = glob.glob(os.path.join(output, '*')) assert output_files, 'There are no output files' for output_file in output_files: name = os.path.basename(output_file) expected_file = os.path.join(e_dir, 'mafft-exploded-by-taxon', name) observed = SeqIO.to_dict(SeqIO.parse(output_file, 'fasta')) expected = SeqIO.to_dict(SeqIO.parse(expected_file, 'fasta')) for (name, observed) in observed.items(): assert (expected[name].seq == observed.seq)
def stuff2icon(s): if (s == 'ORG'): return 'org' elif (s == 'GEO'): return '' elif (s == 'FIRST'): return '1' elif (s == 'LAST'): return '2' elif (s == 'FEMALE'): return '' elif (s == 'MALE'): return '' elif (s == 'CURRENCY'): return '' elif (s == 'MEDIA'): return '' elif (s == 'MISC'): return '?!' elif (s == 'CULTGRP'): return '' elif (s == 'LANGUAGE'): return '' elif (s == 'COUNTRY'): return '' elif (s == 'MEASURE'): return '' elif (s == 'TIME'): return '' else: return s
def list_run_directories(solid_run_dir): base_dir = os.path.dirname(os.path.abspath(solid_run_dir)) run_name = os.path.basename(solid_run_dir.rstrip(os.sep)) try: base_run_info = SolidRunInfo(run_name) except Exception: logging.error(("'%s' not a valid SOLiD run directory name" % solid_run_dir)) return [] dirs = [] for f in os.listdir(base_dir): if os.path.isdir(os.path.join(base_dir, f)): try: run_info = SolidRunInfo(f) if ((run_info.instrument != base_run_info.instrument) or (run_info.datestamp != base_run_info.datestamp)): continue except Exception: continue if (not os.path.exists(os.path.join(base_dir, f, (f + '_run_definition.txt')))): continue dirs.append(os.path.join(base_dir, f)) if (os.path.abspath(solid_run_dir) not in dirs): dirs = [solid_run_dir] dirs.sort() return dirs
def _get_cost_savings(measure_values, rollup_by=None, target_percentile=50): all_percentiles = defaultdict(list) all_savings = defaultdict(list) cost_saving_key = str(target_percentile) for mv in measure_values: rollup_id = (getattr(mv, rollup_by) if rollup_by else None) all_percentiles[rollup_id].append(mv.percentile) all_savings[rollup_id].append(mv.cost_savings[cost_saving_key]) total_savings = 0 for (rollup_id, percentiles) in all_percentiles.items(): mean_percentile = round((sum(percentiles) / len(percentiles)), 2) if (mean_percentile > target_percentile): total_savings += sum(all_savings[rollup_id]) return total_savings
class GungnirSkill(TreatAs, WeaponSkill): target = t_OtherOne() skill_category = ['equip', 'active'] range = 3 treat_as = PhysicalCard.classes['AttackCard'] def check(self): cl = self.associated_cards cat = ('cards', 'showncards') if (not all(((c.resides_in.type in cat) for c in cl))): return False if (not all((c.is_card(PhysicalCard) for c in cl))): return False return (len(cl) == 2)
def _handle_compatibility_operator(all_specifiers: List[Specifier], operator_to_specifiers: Dict[(str, Set[Specifier])], specifier: Specifier) -> None: spec_version = Version(specifier.version) base_version = spec_version.base_version parts = base_version.split('.') index_to_update = (- 2) if (spec_version.is_prerelease or spec_version.is_devrelease or spec_version.is_postrelease): index_to_update += 1 parts = parts[:(- 1)] parts[index_to_update] = str((int(parts[index_to_update]) + 1)) upper_version = Version('.'.join(parts)) spec_1 = Specifier(('>=' + str(spec_version))) spec_2 = Specifier(('<' + str(upper_version))) all_specifiers.extend([spec_1, spec_2]) operator_to_specifiers[spec_1.operator].add(spec_1) operator_to_specifiers[spec_2.operator].add(spec_2)
class OptionPlotoptionsParetoStatesSelectHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(10) def size(self, num: float): self._config(num, js_type=False)
def action(arguments): common.exit_on_sigpipe() source_format = (arguments.input_format or fileformat.from_handle(arguments.sequence_file)) with arguments.sequence_file: sequences = SeqIO.parse(arguments.sequence_file, source_format) if arguments.include_description: ids = (sequence.description for sequence in sequences) else: ids = (sequence.id for sequence in sequences) with arguments.output_file: for i in ids: print(i, file=arguments.output_file)
def _train_function(train_actors: DistributedActors, algorithm_config: ImpalaAlgorithmConfig) -> IMPALA: impala = IMPALA(model=_policy(train_actors.env_factory()), rollout_generator=train_actors, evaluator=algorithm_config.rollout_evaluator, algorithm_config=algorithm_config, model_selection=None) impala.train(n_epochs=algorithm_config.n_epochs) return impala
class MFContinuous(nn.Module): def __init__(self, emb_size, emb_dim, c_vector=1e-06): super().__init__() self.emb_size = emb_size self.emb_dim = emb_dim self.c_vector = c_vector self.embedding = nn.Embedding(emb_size, emb_dim) self.sig = nn.Sigmoid() self.mse = nn.MSELoss() logger.info('Model initialized: {}'.format(self)) def forward(self, product1, product2): emb_product1 = self.embedding(product1) emb_product2 = self.embedding(product2) interaction = torch.sum((emb_product1 * emb_product2), dim=1, dtype=torch.float) return interaction def predict(self, product1, product2): emb_product1 = self.embedding(product1) emb_product2 = self.embedding(product2) interaction = self.sig(torch.sum((emb_product1 * emb_product2), dim=1, dtype=torch.float)) return interaction def loss(self, pred, label): mf_loss = self.mse(pred, label) product_prior = (regularize_l2(self.embedding.weight) * self.c_vector) loss_total = (mf_loss + product_prior) return loss_total
class UnitStatus(base._Widget, base.PaddingMixin, base.MarginMixin): orientations = base.ORIENTATION_HORIZONTAL defaults = [('bus_name', 'system', "Which bus to use. Accepts 'system' or 'session'."), ('font', 'sans', 'Default font'), ('fontsize', None, 'Font size'), ('foreground', 'ffffff', 'Font colour'), ('unitname', 'NetworkManager.service', 'Name of systemd unit.'), ('label', 'NM', 'Short text to display next to indicator.'), ('colour_active', '00ff00', 'Colour for active indicator'), ('colour_inactive', 'ffffff', 'Colour for active indicator'), ('colour_failed', 'ff0000', 'Colour for active indicator'), ('colour_dead', '666666', 'Colour for dead indicator'), ('indicator_size', 10, 'Size of indicator (None = up to margin)'), ('state_map', {'active': ('colour_active', 'colour_active'), 'inactive': ('colour_inactive', 'colour_inactive'), 'deactivating': ('colour_inactive', 'colour_active'), 'activating': ('colour_active', 'colour_inactive'), 'failed': ('colour_failed', 'colour_failed'), 'not-found': ('colour_inactive', 'colour_failed'), 'dead': ('colour_dead', 'colour_dead')}, 'Map of indicator colours (border, fill)')] _screenshots = [('widget-unitstatus-screenshot.png', '')] _dependencies = ['dbus-next'] def __init__(self, **config): base._Widget.__init__(self, bar.CALCULATED, **config) self.add_defaults(UnitStatus.defaults) self.add_defaults(base.PaddingMixin.defaults) self.add_defaults(base.MarginMixin.defaults) self.colours = {} for (state, cols) in self.state_map.items(): self.colours[state] = tuple((getattr(self, col) for col in cols)) if (self.bus_name.lower() == 'session'): self.bus_type = BusType.SESSION else: if (self.bus_name.lower() not in ['session', 'system']): logger.warning('Unknown bus name. Defaulting to system bus.') self.bus_type = BusType.SYSTEM self.state = 'not-found' def _configure(self, qtile, bar): base._Widget._configure(self, qtile, bar) self.layout = self.drawer.textlayout(self.label, self.foreground, self.font, self.fontsize, None, wrap=False) if (self.indicator_size is not None): self.indicator_size = max(self.indicator_size, 6) max_indicator = (self.bar.height - (2 * self.margin)) if (self.indicator_size is None): self.indicator_size = max_indicator else: self.indicator_size = min(max_indicator, self.indicator_size) self.layout.width = self.text_width() def _config_async(self): asyncio.create_task(self._connect_dbus()) async def _connect_dbus(self): self.bus = (await MessageBus(bus_type=self.bus_type).connect()) introspection = (await self.bus.introspect('org.freedesktop.systemd1', '/org/freedesktop/systemd1')) object = self.bus.get_proxy_object('org.freedesktop.systemd1', '/org/freedesktop/systemd1', introspection) self.manager = object.get_interface('org.freedesktop.systemd1.Manager') unit_path = (await self.find_unit()) if (not unit_path): return (await self._subscribe_unit(unit_path)) async def find_unit(self): units = (await self.manager.call_list_units()) unit = [x for x in units if (x[0] == self.unitname)] if (not unit): self.unit = None return False else: path = unit[0][6] return path async def _subscribe_unit(self, path): introspection = (await self.bus.introspect('org.freedesktop.systemd1', path)) object = self.bus.get_proxy_object('org.freedesktop.systemd1', path, introspection) self.unit = object.get_interface('org.freedesktop.systemd1.Unit') props = object.get_interface('org.freedesktop.DBus.Properties') self.state = (await self.unit.get_active_state()) props.on_properties_changed(self._changed) self.draw() def _changed(self, _interface, changed, _invalidated): state = changed.get('ActiveState') if state: self.state = state.value self.draw() def text_width(self): (width, _) = self.drawer.max_layout_size([self.label], self.font, self.fontsize) return width def calculate_length(self): width = self.text_width() width = ((width + (3 * self.padding_x)) + self.indicator_size) return width def draw(self): self.drawer.clear((self.background or self.bar.background)) self.layout.draw(((self.margin * 2) + self.indicator_size), (int(((self.bar.height / 2.0) - (self.layout.height / 2.0))) + 1)) i_margin = int(((self.bar.height - self.indicator_size) / 2)) self.draw_indicator(self.margin, i_margin, self.indicator_size, self.indicator_size, 2, self.colours[self.state]) self.drawer.draw(offsetx=self.offset, offsety=self.offsety, width=self.width) def circle(self, x, y, width, height, linewidth): aspect = 1.0 corner_radius = (height / 3.0) radius = (corner_radius / aspect) degrees = (math.pi / 180.0) self.drawer.ctx.new_sub_path() delta = (radius + (linewidth / 2)) self.drawer.ctx.arc(((x + width) - delta), (y + delta), radius, ((- 90) * degrees), (0 * degrees)) self.drawer.ctx.arc(((x + width) - delta), ((y + height) - delta), radius, (0 * degrees), (90 * degrees)) self.drawer.ctx.arc((x + delta), ((y + height) - delta), radius, (90 * degrees), (180 * degrees)) self.drawer.ctx.arc((x + delta), (y + delta), radius, (180 * degrees), (270 * degrees)) self.drawer.ctx.close_path() def draw_indicator(self, x, y, width, height, linewidth, statecols): self.circle(x, y, width, height, linewidth) self.drawer.set_source_rgb(statecols[1]) self.drawer.ctx.fill() self.drawer.set_source_rgb(statecols[0]) self.circle(x, y, width, height, linewidth) self.drawer.ctx.stroke() def info(self): info = base._Widget.info(self) info['unit'] = self.unitname info['text'] = self.label info['state'] = self.state info['bus'] = self.bus_name return info
def test_condition_tuple_branches(): def sum_sub(a: int, b: int) -> typing.NamedTuple('Outputs', sum=int, sub=int): return ((a + b), (a - b)) def math_ops(a: int, b: int) -> typing.Tuple[(int, int)]: (add, sub) = conditional('noDivByZero').if_((a > b)).then(sum_sub(a=a, b=b)).else_().fail('Only positive results are allowed') return (add, sub) (x, y) = math_ops(a=3, b=2) assert (x == 5) assert (y == 1) running_xdist = (os.environ.get('PYTEST_XDIST_WORKER') is not None) prefix = ('__channelexec__.' if running_xdist else '') wf_spec = get_serializable(OrderedDict(), serialization_settings, math_ops) assert (len(wf_spec.template.nodes) == 1) assert (wf_spec.template.nodes[0].branch_node.if_else.case.then_node.task_node.reference_id.name == f'{prefix}tests.flytekit.unit.core.test_conditions.sum_sub')
class StadtradHamburgStation(BikeShareStation): def __init__(self, info): super(StadtradHamburgStation, self).__init__() self.latitude = float(info['geometry']['coordinates'][1]) self.longitude = float(info['geometry']['coordinates'][0]) self.name = info['properties']['name'].encode('utf-8') self.bikes = int(info['properties']['anzahl_raeder']) self.extra = {'uid': info['properties']['uid']}
class IndexTest(unittest.TestCase): def test_index_constant_vector_stochastic_index(self) -> None: self.maxDiff = None observed = BMGInference().to_dot([pos_real(), real(), neg_real(), prob(), natural()], {}) expected = '\ndigraph "graph" {\n N00[label=0.5];\n N01[label=Bernoulli];\n N02[label=Sample];\n N03[label=0.0];\n N04[label="[1.5,2.5]"];\n N05[label=1];\n N06[label=0];\n N07[label=if];\n N08[label=index];\n N09[label=Normal];\n N10[label=Sample];\n N11[label=Query];\n N12[label="[1.5,-1.5]"];\n N13[label=index];\n N14[label=1.0];\n N15[label=Normal];\n N16[label=Sample];\n N17[label=Query];\n N18[label="[-1.5,-2.5]"];\n N19[label=index];\n N20[label=Exp];\n N21[label=Bernoulli];\n N22[label=Sample];\n N23[label=Query];\n N24[label="[0.5,0.25]"];\n N25[label=index];\n N26[label=Bernoulli];\n N27[label=Sample];\n N28[label=Query];\n N29[label="[2,3]"];\n N30[label=index];\n N31[label=0.75];\n N32[label=Binomial];\n N33[label=Sample];\n N34[label=Query];\n N00 -> N01;\n N01 -> N02;\n N02 -> N07;\n N03 -> N09;\n N04 -> N08;\n N05 -> N07;\n N06 -> N07;\n N07 -> N08;\n N07 -> N13;\n N07 -> N19;\n N07 -> N25;\n N07 -> N30;\n N08 -> N09;\n N09 -> N10;\n N10 -> N11;\n N12 -> N13;\n N13 -> N15;\n N14 -> N15;\n N15 -> N16;\n N16 -> N17;\n N18 -> N19;\n N19 -> N20;\n N20 -> N21;\n N21 -> N22;\n N22 -> N23;\n N24 -> N25;\n N25 -> N26;\n N26 -> N27;\n N27 -> N28;\n N29 -> N30;\n N30 -> N32;\n N31 -> N32;\n N32 -> N33;\n N33 -> N34;\n}\n' self.assertEqual(expected.strip(), observed.strip()) def test_index_stochastic_tensor_constant_index(self) -> None: self.maxDiff = None observed = BMGInference().to_dot([optimize_away_index()], {}) expected = '\ndigraph "graph" {\n N0[label=0.0];\n N1[label=1.0];\n N2[label=Normal];\n N3[label=Sample];\n N4[label=0.0];\n N5[label=HalfCauchy];\n N6[label=Sample];\n N7[label=Normal];\n N8[label=Sample];\n N9[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N3 -> N7;\n N4 -> N5;\n N5 -> N6;\n N6 -> N7;\n N7 -> N8;\n N8 -> N9;\n}\n' self.assertEqual(expected.strip(), observed.strip()) def test_column_index(self) -> None: self.maxDiff = None observed = BMGInference().to_dot([column_index()], {}) expected = '\ndigraph "graph" {\n N00[label=0.0];\n N01[label=1.0];\n N02[label=Normal];\n N03[label=Sample];\n N04[label=0.0];\n N05[label=HalfCauchy];\n N06[label=Sample];\n N07[label=0.5];\n N08[label=Bernoulli];\n N09[label=Sample];\n N10[label=2];\n N11[label=ToReal];\n N12[label=ToMatrix];\n N13[label=1];\n N14[label=0];\n N15[label=if];\n N16[label=ColumnIndex];\n N17[label=index];\n N18[label=Query];\n N00 -> N02;\n N01 -> N02;\n N02 -> N03;\n N03 -> N12;\n N03 -> N12;\n N04 -> N05;\n N05 -> N06;\n N06 -> N11;\n N07 -> N08;\n N08 -> N09;\n N09 -> N15;\n N10 -> N12;\n N10 -> N12;\n N11 -> N12;\n N11 -> N12;\n N12 -> N16;\n N13 -> N15;\n N14 -> N15;\n N15 -> N16;\n N15 -> N17;\n N16 -> N17;\n N17 -> N18;\n}\n' self.assertEqual(expected.strip(), observed.strip()) def test_tuple_index(self) -> None: self.maxDiff = None observed = BMGInference().to_dot([tuple_index_0()], {}) expected = '\ndigraph "graph" {\n N0[label=0.5];\n N1[label=Bernoulli];\n N2[label=Sample];\n N3[label=5];\n N4[label=0];\n N5[label=if];\n N6[label=Query];\n N0 -> N1;\n N1 -> N2;\n N2 -> N5;\n N3 -> N5;\n N4 -> N5;\n N5 -> N6;\n}\n' self.assertEqual(expected.strip(), observed.strip()) observed = BMGInference().to_dot([tuple_index_1()], {}) expected = '\ndigraph "graph" {\n N0[label=0.5];\n N1[label=Bernoulli];\n N2[label=Sample];\n N3[label="[[2.0,3.0],\\\\n[4.0,5.0]]"];\n N4[label=1];\n N5[label=0];\n N6[label=if];\n N7[label=ColumnIndex];\n N8[label=index];\n N9[label=Query];\n N0 -> N1;\n N1 -> N2;\n N2 -> N6;\n N3 -> N7;\n N4 -> N6;\n N5 -> N6;\n N6 -> N7;\n N6 -> N8;\n N7 -> N8;\n N8 -> N9;\n}\n' self.assertEqual(expected.strip(), observed.strip()) observed = BMGInference().to_dot([tuple_index_2()], {}) expected = '\ndigraph "graph" {\n N0[label=0.0];\n N1[label=1.0];\n N2[label=Normal];\n N3[label=Sample];\n N4[label=0.0];\n N5[label=HalfCauchy];\n N6[label=Sample];\n N7[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N3 -> N7;\n N4 -> N5;\n N5 -> N6;\n}\n' self.assertEqual(expected.strip(), observed.strip()) observed = BMGInference().to_dot([tuple_index_3()], {}) expected = '\ndigraph "graph" {\n N00[label=0.0];\n N01[label=1.0];\n N02[label=Normal];\n N03[label=Sample];\n N04[label=0.0];\n N05[label=HalfCauchy];\n N06[label=Sample];\n N07[label=0.5];\n N08[label=Bernoulli];\n N09[label=Sample];\n N10[label=2];\n N11[label=ToReal];\n N12[label=ToMatrix];\n N13[label=1];\n N14[label=0];\n N15[label=if];\n N16[label=ColumnIndex];\n N17[label=index];\n N18[label=Query];\n N00 -> N02;\n N01 -> N02;\n N02 -> N03;\n N03 -> N12;\n N03 -> N12;\n N04 -> N05;\n N05 -> N06;\n N06 -> N11;\n N07 -> N08;\n N08 -> N09;\n N09 -> N15;\n N10 -> N12;\n N10 -> N12;\n N11 -> N12;\n N11 -> N12;\n N12 -> N16;\n N13 -> N15;\n N14 -> N15;\n N15 -> N16;\n N15 -> N17;\n N16 -> N17;\n N17 -> N18;\n}\n' self.assertEqual(expected.strip(), observed.strip()) def test_negative_index(self) -> None: self.maxDiff = None with self.assertRaises(ValueError) as ex: BMGInference().to_dot([negative_constant_index()], {}) self.assertEqual('The right of an index is required to be a natural but is a negative real.', str(ex.exception)) def test_unsupported_slice(self) -> None: self.maxDiff = None with self.assertRaises(ValueError) as ex: BMGInference().to_dot([unsupported_slice_1()], {}) self.assertEqual('Stochastic slices are not yet implemented.', str(ex.exception)) with self.assertRaises(ValueError) as ex: BMGInference().to_dot([unsupported_slice_2()], {}) self.assertEqual('Stochastic slices are not yet implemented.', str(ex.exception))
def expression(callable, rule_name, grammar): if (ismethoddescriptor(callable) and hasattr(callable, '__func__')): callable = callable.__func__ num_args = len(getfullargspec(callable).args) if ismethod(callable): num_args -= 1 if (num_args == 2): is_simple = True elif (num_args == 5): is_simple = False else: raise RuntimeError(('Custom rule functions must take either 2 or 5 arguments, not %s.' % num_args)) class AdHocExpression(Expression): def _uncached_match(self, text, pos, cache, error): result = (callable(text, pos) if is_simple else callable(text, pos, cache, error, grammar)) if isinstance(result, int): (end, children) = (result, None) elif isinstance(result, tuple): (end, children) = result else: return result return Node(self, text, pos, end, children=children) def _as_rhs(self): return ('{custom function "%s"}' % callable.__name__) return AdHocExpression(name=rule_name)
class ScriptForm(forms.ModelForm): db_key = forms.CharField(label='Name/Key', help_text='Script identifier, shown in listings etc.') db_typeclass_path = forms.ChoiceField(label='Typeclass', help_text='This is the Python-path to the class implementing the actual script functionality. <BR>If your custom class is not found here, it may not be imported into Evennia yet.', choices=(lambda : adminutils.get_and_load_typeclasses(parent=ScriptDB, excluded_parents=['evennia.prototypes.prototypes.DbPrototype']))) db_lock_storage = forms.CharField(label='Locks', required=False, widget=forms.Textarea(attrs={'cols': '100', 'rows': '2'}), help_text='In-game lock definition string. If not given, defaults will be used. This string should be on the form <i>type:lockfunction(args);type2:lockfunction2(args);...') db_interval = forms.IntegerField(label='Repeat Interval', help_text="Optional timer component.<BR>How often to call the Script's<BR>`at_repeat` hook, in seconds.<BR>Set to 0 to disable.") db_repeats = forms.IntegerField(help_text='Only repeat this many times.<BR>Set to 0 to run indefinitely.') db_start_delay = forms.BooleanField(help_text='Wait <B>Interval</B> seconds before first call.') db_persistent = forms.BooleanField(label='Survives reboot', help_text='If unset, a server reboot will remove the timer.')
class MasterNodeStatsRecorder(): def __init__(self, client, metrics_store, sample_interval): self.client = client self.metrics_store = metrics_store self.sample_interval = sample_interval self.logger = logging.getLogger(__name__) def __str__(self): return 'master node stats' def record(self): import elasticsearch try: state = self.client.cluster.state(metric='master_node') info = self.client.nodes.info(node_id=state['master_node'], metric='os') except elasticsearch.TransportError: msg = f'A transport error occurred while collecting master node stats on cluster [{self.cluster_name}]' self.logger.exception(msg) raise exceptions.RallyError(msg) doc = {'name': 'master-node-stats', 'node': info['nodes'][state['master_node']]['name']} self.metrics_store.put_doc(doc, level=MetaInfoScope.cluster)
class TestIndent(): def test_roundtrip_inline_list(self): s = 'a: [a, b, c]\n' output = rt(s) assert (s == output) def test_roundtrip_mapping_of_inline_lists(self): s = dedent(' a: [a, b, c]\n j: [k, l, m]\n ') output = rt(s) assert (s == output) def test_roundtrip_mapping_of_inline_lists_comments(self): s = dedent(' # comment A\n a: [a, b, c]\n # comment B\n j: [k, l, m]\n ') output = rt(s) assert (s == output) def test_roundtrip_mapping_of_inline_sequence_eol_comments(self): s = dedent(' # comment A\n a: [a, b, c] # comment B\n j: [k, l, m] # comment C\n ') output = rt(s) assert (s == output) def test_added_inline_list(self): import srsly.ruamel_yaml s1 = dedent('\n a:\n - b\n - c\n - d\n ') s = 'a: [b, c, d]\n' data = srsly.ruamel_yaml.load(s1, Loader=srsly.ruamel_yaml.RoundTripLoader) val = data['a'] val.fa.set_flow_style() output = srsly.ruamel_yaml.dump(data, Dumper=srsly.ruamel_yaml.RoundTripDumper) assert (s == output) def test_roundtrip_flow_mapping(self): import srsly.ruamel_yaml s = dedent(' - {a: 1, b: hallo}\n - {j: fka, k: 42}\n ') data = srsly.ruamel_yaml.load(s, Loader=srsly.ruamel_yaml.RoundTripLoader) output = srsly.ruamel_yaml.dump(data, Dumper=srsly.ruamel_yaml.RoundTripDumper) assert (s == output) def test_roundtrip_sequence_of_inline_mappings_eol_comments(self): s = dedent(' # comment A\n - {a: 1, b: hallo} # comment B\n - {j: fka, k: 42} # comment C\n ') output = rt(s) assert (s == output) def test_indent_top_level(self): inp = '\n - a:\n - b\n ' round_trip(inp, indent=4) def test_set_indent_5_block_list_indent_1(self): inp = '\n a:\n - b: c\n - 1\n - d:\n - 2\n ' round_trip(inp, indent=5, block_seq_indent=1) def test_set_indent_4_block_list_indent_2(self): inp = '\n a:\n - b: c\n - 1\n - d:\n - 2\n ' round_trip(inp, indent=4, block_seq_indent=2) def test_set_indent_3_block_list_indent_0(self): inp = '\n a:\n - b: c\n - 1\n - d:\n - 2\n ' round_trip(inp, indent=3, block_seq_indent=0) def Xtest_set_indent_3_block_list_indent_2(self): inp = '\n a:\n -\n b: c\n -\n 1\n -\n d:\n -\n 2\n ' round_trip(inp, indent=3, block_seq_indent=2) def test_set_indent_3_block_list_indent_2(self): inp = '\n a:\n - b: c\n - 1\n - d:\n - 2\n ' round_trip(inp, indent=3, block_seq_indent=2) def Xtest_set_indent_2_block_list_indent_2(self): inp = '\n a:\n -\n b: c\n -\n 1\n -\n d:\n -\n 2\n ' round_trip(inp, indent=2, block_seq_indent=2) def test_set_indent_2_block_list_indent_2(self): inp = '\n a:\n - b: c\n - 1\n - d:\n - 2\n ' round_trip(inp, indent=2, block_seq_indent=2) def test_roundtrip_four_space_indents(self): s = 'a:\n- foo\n- bar\n' round_trip(s, indent=4) def test_roundtrip_four_space_indents_no_fail(self): inp = '\n a:\n - foo\n - bar\n ' exp = '\n a:\n - foo\n - bar\n ' assert (round_trip_dump(round_trip_load(inp)) == dedent(exp))
class OptionPlotoptionsArearangeSonificationTracksMappingLowpass(Options): def frequency(self) -> 'OptionPlotoptionsArearangeSonificationTracksMappingLowpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsArearangeSonificationTracksMappingLowpassFrequency) def resonance(self) -> 'OptionPlotoptionsArearangeSonificationTracksMappingLowpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsArearangeSonificationTracksMappingLowpassResonance)
def if_url_param(value: str, py_func) -> JsUtils.jsWrap: return JsUtils.jsWrap(('(function(param){\nconst queryString = window.location.search; const urlParams = new URLSearchParams(queryString);\nif (urlParams.has(param)){paramValue = urlParams.get(param); %s}; \n})(%s)' % (py_func(JsUtils.jsWrap('paramValue')), JsUtils.jsConvertData(value, None))))
class TestPartitionSeparate(): test_id = '1.1' test_level = 1 partition = '/dev/sda1' test = CISAudit() (CISAudit, '_shellexec', mock_parition_exists) def test_partition_is_separate(self): state = self.test.audit_partition_is_separate(partition=self.partition) assert (state == 0) (CISAudit, '_shellexec', mock_parititon_not_exists) def test_partition_is_not_separate(self): state = self.test.audit_partition_is_separate(partition=self.partition) assert (state == 1)
class TestApsEncoder(): def _encode_aps(self, aps): return check_encoding(messaging.Message(topic='topic', apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=aps)))) .parametrize('data', NON_OBJECT_ARGS) def test_invalid_aps(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=data)))) expected = 'APNSPayload.aps must be an instance of Aps class.' assert (str(excinfo.value) == expected) .parametrize('data', NON_STRING_ARGS) def test_invalid_alert(self, data): aps = messaging.Aps(alert=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.alert must be a string or an instance of ApsAlert class.' assert (str(excinfo.value) == expected) .parametrize('data', [list(), tuple(), dict(), 'foo']) def test_invalid_badge(self, data): aps = messaging.Aps(badge=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.badge must be a number.' assert (str(excinfo.value) == expected) .parametrize('data', (NON_STRING_ARGS + [''])) def test_invalid_sound(self, data): aps = messaging.Aps(sound=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.sound must be a non-empty string or an instance of CriticalSound class.' assert (str(excinfo.value) == expected) .parametrize('data', NON_STRING_ARGS) def test_invalid_category(self, data): aps = messaging.Aps(category=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.category must be a string.' assert (str(excinfo.value) == expected) .parametrize('data', NON_STRING_ARGS) def test_invalid_thread_id(self, data): aps = messaging.Aps(thread_id=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.thread_id must be a string.' assert (str(excinfo.value) == expected) .parametrize('data', ['', list(), tuple(), True, False, 1, 0]) def test_invalid_custom_data_dict(self, data): if isinstance(data, dict): return aps = messaging.Aps(custom_data=data) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.custom_data must be a dict.' assert (str(excinfo.value) == expected) .parametrize('data', [True, False, 1, 0]) def test_invalid_custom_field_name(self, data): aps = messaging.Aps(custom_data={data: 'foo'}) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Aps.custom_data key must be a string.' assert (str(excinfo.value) == expected) def test_multiple_field_specifications(self): aps = messaging.Aps(thread_id='foo', custom_data={'thread-id': 'foo'}) with pytest.raises(ValueError) as excinfo: self._encode_aps(aps) expected = 'Multiple specifications for thread-id in Aps.' assert (str(excinfo.value) == expected) def test_aps(self): msg = messaging.Message(topic='topic', apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=messaging.Aps(alert='alert text', badge=42, sound='s', content_available=True, mutable_content=True, category='c', thread_id='t')))) expected = {'topic': 'topic', 'apns': {'payload': {'aps': {'alert': 'alert text', 'badge': 42, 'sound': 's', 'content-available': 1, 'mutable-content': 1, 'category': 'c', 'thread-id': 't'}}}} check_encoding(msg, expected) def test_aps_custom_data(self): msg = messaging.Message(topic='topic', apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=messaging.Aps(alert='alert text', custom_data={'k1': 'v1', 'k2': 1})))) expected = {'topic': 'topic', 'apns': {'payload': {'aps': {'alert': 'alert text', 'k1': 'v1', 'k2': 1}}}} check_encoding(msg, expected)
def extract_mentions(html): if (not html): return [] soup = BeautifulSoup(html, 'html.parser') mentions = [] for d in soup.find_all('span', attrs={'data-type': 'mention'}): mentions.append(frappe._dict(full_name=d.get('data-label'), email=d.get('data-id'))) return mentions
def authenticate(request): if ('authorization' not in request.headers): return None try: (authtype, token) = request.headers['authorization'].split(' ') except Exception as e: print(e) return None if (authtype.lower() != 'bearer'): print('not bearer') return None try: decoded = jwt.decode(token, secret, algorithms=['HS256']) except Exception as e: print(e) return None return decoded['username']
class LiteDRAMGENSDRPHYCRG(Module): def __init__(self, platform, core_config): assert (core_config['memtype'] in ['SDR']) self.clock_domains.cd_sys = ClockDomain() self.comb += self.cd_sys.clk.eq(platform.request('clk')) self.specials += AsyncResetSynchronizer(self.cd_sys, platform.request('rst'))
class TestFontTrait(BaseTestMixin, unittest.TestCase): def setUp(self): BaseTestMixin.setUp(self) def tearDown(self): BaseTestMixin.tearDown(self) _toolkit([ToolkitName.null]) def test_font_trait_default(self): class Foo(HasTraits): font = Font() f = Foo() self.assertEqual(f.font, '10 pt Arial') _toolkit([ToolkitName.null]) def test_font_trait_examples(self): class Foo(HasTraits): font = Font f = Foo(font='Qwerty 10') self.assertEqual(f.font, '10 pt Qwerty') f = Foo(font='nothing') self.assertEqual(f.font, 'nothing') f = Foo(font='swiss family arial') self.assertEqual(f.font, 'swiss arial') f = Foo(font='12 pt bold italic') self.assertEqual(f.font, '12 pt italic bold') f = Foo(font='123 Foo bar slant') self.assertEqual(f.font, '123 pt slant Foo bar') f = Foo(font='123 point Foo family bar slant') self.assertEqual(f.font, '123 pt slant Foo bar') f = Foo(font='16 xyzzy underline slant') self.assertEqual(f.font, '16 pt slant underline xyzzy')
class Test_move(unittest.TestCase): pitch = 60 scale = _reify([11]) def test_none(self): out = _move(None, self.scale, 1) self.assertIsNone(out) def test_step_none(self): out = _move(self.pitch, self.scale, None) self.assertIsNone(out) def test_step_0(self): out = _move(self.pitch, self.scale, 0) self.assertIsNone(out) def test_step_minus_1(self): out = _move(self.pitch, self.scale, (- 1)) expected = 59 self.assertEqual(out, expected) def test_step_1(self): out = _move(self.pitch, self.scale, 1) expected = 71 self.assertEqual(out, expected)
def main(maxExamples, includeArchives, shuffle): if includeArchives: extractionPaths = extractArchives() (schemas, badSchemaFiles) = loadSchemas() print('Loaded', len(schemas), 'schemas.', flush=True) (examples, badExampleFiles) = loadExamples() print('Loaded', len(examples), 'examples.', flush=True) (failures, unchecked, numberOfSuccessfulValidations) = validateExamples(examples, schemas, maxExamples, shuffle) if includeArchives: cleanExtractionPaths(extractionPaths) report(unchecked, failures, badSchemaFiles, badExampleFiles, numberOfSuccessfulValidations) if ((((len(badSchemaFiles) + len(badExampleFiles)) + len(failures)) + len(unchecked)) > 0): sys.exit('Validation failed.')
class Jump(): def LEFT(quantity: int=1) -> List[Dir]: return ([Dir.LEFT] * quantity) def RIGHT(quantity: int=1) -> List[Dir]: return ([Dir.RIGHT] * quantity) def UP(quantity: int=1) -> List[Dir]: return ([Dir.UP] * quantity) def DOWN(quantity: int=1) -> List[Dir]: return ([Dir.DOWN] * quantity) def UPLEFT(quantity: int=1) -> List[Dir]: return ([Dir.UPLEFT] * quantity) def UPRIGHT(quantity: int=1) -> List[Dir]: return ([Dir.UPRIGHT] * quantity) def DOWNRIGHT(quantity: int=1) -> List[Dir]: return ([Dir.DOWNRIGHT] * quantity) def DOWNLEFT(quantity: int=1) -> List[Dir]: return ([Dir.DOWNLEFT] * quantity) def eval(translations: List[Dir], radius: float) -> Tuple[(float, float)]: mapping = Dir.translation_map assert isinstance(mapping, dict) (dx, dy) = (0, 0) for direction in translations: (i, j) = mapping[direction] dx += (i * radius) dy += (j * radius) return (dx, dy)
def translate_inputs_to_literals(ctx: FlyteContext, incoming_values: Dict[(str, Any)], flyte_interface_types: Dict[(str, _interface_models.Variable)], native_types: Dict[(str, type)]) -> Dict[(str, _literals_models.Literal)]: if (incoming_values is None): raise ValueError('Incoming values cannot be None, must be a dict') result = {} for (k, v) in incoming_values.items(): if (k not in flyte_interface_types): raise ValueError(f'Received unexpected keyword argument {k}') var = flyte_interface_types[k] t = native_types[k] try: if (type(v) is Promise): v = resolve_attr_path_in_promise(v) result[k] = TypeEngine.to_literal(ctx, v, t, var.type) except TypeTransformerFailedError as exc: raise TypeTransformerFailedError(f"Failed argument '{k}': {exc}") from exc return result
def test_update_user(sample_tenant): client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) user = client.create_user() try: email = _random_email() phone = _random_phone() user = client.update_user(user.uid, email=email, phone_number=phone) assert (user.tenant_id == sample_tenant.tenant_id) assert (user.email == email) assert (user.phone_number == phone) finally: client.delete_user(user.uid)
class Switcher(wx.Panel): def __init__(self, parent, id, model, label=None, **kw): wx.Panel.__init__(self, parent, id, **kw) self.model = model self._create_widget(model, label) return def _create_widget(self, model, label): self.sizer = sizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(sizer) self.SetAutoLayout(True) self.control = control = SwitcherControl(self, (- 1), model, label) sizer.Add(control, 0, wx.EXPAND) self.panel = panel = SwitcherPanel(self, (- 1), model, label) sizer.Add(panel, 1, wx.EXPAND) sizer.Fit(self) return
class StructuredRow(Row): __slots__ = ['_changes', '_compound_rels', '_concrete', '_fields', '_virtuals'] def _from_engine(cls, data: Dict[(str, Any)]): rv = cls(__concrete=True) rv._fields.update(data) return rv def __init__(self, fields: Optional[Dict[(str, Any)]]=None, **extras: Any): object.__setattr__(self, '_changes', {}) object.__setattr__(self, '_compound_rels', {}) object.__setattr__(self, '_concrete', extras.pop('__concrete', False)) object.__setattr__(self, '_fields', (fields or {})) object.__setattr__(self, '_virtuals', {}) self.__dict__.__init__(**extras) def __contains__(self, name): return ((name in self._fields) or (name in self.__dict__)) def __getitem__(self, name): return object.__getattribute__(self, name) def __setattr__(self, key, value): if (key in self.__slots__): return oldv = (self._changes[key][0] if (key in self._changes) else getattr(self, key, None)) object.__setattr__(self, key, value) newv = getattr(self, key, None) if (((oldv is None) and (value is not None)) or (oldv != newv)): self._changes[key] = (oldv, newv) else: self._changes.pop(key, None) def __setitem__(self, key, value): self.__setattr__(key, value) def __getstate__(self): return {'__fields': self._fields, '__extras': self.__dict__, '__struct': {'_concrete': self._concrete, '_changes': {}, '_compound_rels': {}, '_virtuals': {}}} def __setstate__(self, state): self.__dict__.update(state['__extras']) object.__setattr__(self, '_fields', state['__fields']) for (key, val) in state['__struct'].items(): object.__setattr__(self, key, val) def __bool__(self): return (bool(self._fields) or bool(self.__dict__)) def __eq__(self, other): if (not isinstance(other, self.__class__)): return False return ((self._fields == other._fields) and (self.__dict__ == other.__dict__)) def __copy__(self): return StructuredRow(self._fields, __concrete=self._concrete, **self.__dict__) def keys(self): for pool in (self._fields, self.__dict__): for item in pool.keys(): (yield item) def values(self): for pool in (self._fields, self.__dict__): for item in pool.values(): (yield item) def items(self): for pool in (self._fields, self.__dict__): for item in pool.items(): (yield item) def update(self, *args, **kwargs): for arg in args: for (key, val) in arg.items(): self.__setattr__(key, val) for (key, val) in kwargs.items(): self.__setattr__(key, val) def changes(self): return sdict(self._changes) def has_changed(self): return bool(self._changes) def has_changed_value(self, key): return (key in self._changes) def get_value_change(self, key): return self._changes.get(key, None) def clone(self): fields = {} fieldset = set(self._fields.keys()) changeset = set(self._changes.keys()) for key in (fieldset & changeset): fields[key] = self._changes[key][0] for key in (fieldset - changeset): fields[key] = self._fields[key] return self.__class__(fields, __concrete=self._concrete, **self.__dict__) def clone_changed(self): return self.__class__({**self._fields}, __concrete=self._concrete, **self.__dict__) def validation_errors(self): return self._model.validate(self) def is_valid(self): return (not bool(self._model.validate(self)))
class EthSrc(MatchTest): def runTest(self): match = ofp.match([ofp.oxm.eth_src([0, 1, 2, 3, 4, 5])]) matching = {'correct': simple_tcp_packet(eth_src='00:01:02:03:04:05')} nonmatching = {'incorrect': simple_tcp_packet(eth_src='00:01:02:03:04:06'), 'multicast': simple_tcp_packet(eth_src='01:01:02:03:04:05'), 'local': simple_tcp_packet(eth_src='02:01:02:03:04:05')} self.verify_match(match, matching, nonmatching)
def _iter_module_files(): for module in list(sys.modules.values()): if (module is None): continue filename = getattr(module, '__file__', None) if filename: old = None while (not os.path.isfile(filename)): old = filename filename = os.path.dirname(filename) if (filename == old): break else: (yield filename)
class OptionPlotoptionsSunburstSonificationContexttracksMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class DelimitedBashReceiver(protocol.Protocol): def __init__(self): self.buffer = BytesIO() self.current_delimiters = [] self.handle = None self.ping_timeout = None self.ping_timer = None self.timed_out = False def command(self, cmd, delimiter=None): if (delimiter is None): delimiter = bytes('__delimiter__{}__\n'.format(uuid.uuid4()), 'utf-8') dfr = defer.Deferred() self.current_delimiters.append((delimiter, dfr)) delimited_command = ((bytes(cmd, 'utf-8') + b'\necho ') + delimiter) LOG.d('Sending:', delimited_command) self.transport.write(delimited_command) return dfr def check_for_further_responses(self): self.buffer.seek(0) rest = self.buffer.read() while self.current_delimiters: (delimiter, dfr) = self.current_delimiters[0] try: (response, rest) = rest.split(delimiter) except ValueError: break self.current_delimiters.pop(0) dfr.callback(response) new_buffer = BytesIO() new_buffer.write(rest) self.buffer = new_buffer def ping(self): self.command('ping').addCallback(self.ping_response) self.ping_timeout = reactor.callLater(self.factory.ping_timeout, self.ping_timed_out) def ping_response(self, _): if self.timed_out: LOG.w('Reconnected:', self.handle) exchange.client_reconnected(self.handle) self.timed_out = False else: self.ping_timeout.cancel() self.ping_timer = reactor.callLater(self.factory.ping_interval, self.ping) def ping_timed_out(self): LOG.w('Ping timeout:', self.handle) self.timed_out = True exchange.client_timed_out(self.handle) def connectionMade(self): def forward_connection(_result): exchange.client_connected(self.handle, self) self.ping_timer = reactor.callLater(self.factory.ping_interval, self.ping) def kill_connection(error): LOG.e('Error: Could not send the startup functions to the client:', error) self._loseConnection() peer = self.transport.getPeer() self.handle = '{}:{}'.format(peer.host, peer.port) LOG.w('Connected:', self.handle) dfr = self.command(self.factory.startup_commands) dfr.addCallback(forward_connection) dfr.addErrback(kill_connection) def connectionLost(self, reason=protocol.connectionDone): LOG.w('Connection lost:', self.handle) try: self.ping_timeout.cancel() except Exception: pass try: self.ping_timer.cancel() except Exception: pass if (self.handle in exchange.known_clients): exchange.client_disconnected(self.handle) def dataReceived(self, data): self.buffer.seek(0, os.SEEK_END) self.buffer.write(data) if (not self.current_delimiters): return (delimiter, dfr) = self.current_delimiters[0] LOG.d('Searching for delimiter:', delimiter) search_length = (len(data) + len(delimiter)) self.buffer.seek((- search_length), os.SEEK_END) search_str = self.buffer.read() search_pos = search_str.find(delimiter) if (search_pos != (- 1)): LOG.d('Found delimiter:', delimiter) self.buffer.seek((- search_length), os.SEEK_END) self.buffer.seek(search_pos, os.SEEK_CUR) pos = self.buffer.tell() self.buffer.seek(0) response = self.buffer.read(pos) self.buffer.read(len(delimiter)) new_buffer = BytesIO() new_buffer.write(self.buffer.read()) self.buffer = new_buffer self.current_delimiters.pop(0) dfr.callback(response) self.check_for_further_responses()
def extractGraintransWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Options(): host: BasicConfig = field(default_factory=dict) environment: BasicConfig = field(default_factory=dict) gateway: BasicConfig = field(default_factory=dict) def add_requirements(self, requirements: list[str]): kind = self.environment['kind'] if (kind == 'virtualenv'): pip_requirements = self.environment.setdefault('requirements', []) elif (kind == 'conda'): pip_requirements = self.environment.setdefault('pip', []) else: raise FalServerlessError('Only conda and virtualenv is supported as environment options') if set(pip_requirements).issuperset(set(requirements)): return None pip_requirements.extend(requirements) def get_exposed_port(self) -> (int | None): if self.gateway.get('serve'): return _SERVE_PORT else: return self.gateway.get('exposed_port')
def untypable_node_reporter() -> GraphFixer: typer = LatticeTyper() def get_error(bmg: BMGraphBuilder, node: bn.BMGNode) -> Optional[BMGError]: if isinstance(node, bn.ConstantNode): return None t = typer[node] if ((t != bt.Untypable) and (t != bt.Tensor)): return None for i in node.inputs: t = typer[i] if ((t == bt.Untypable) or (t == bt.Tensor)): return None return UntypableNode(node, bmg.execution_context.node_locations(node)) return node_error_pass(get_error)
def channels_list(key, part, categoryId=None, forUsername=None, id=None, managedByMe=None, mine=None, mySubscribers=None, hl=None, maxResults=None, onBehalfOfContentOwner=None, pageToken=None): args = locals() part_params = {'contentDetails', 'id', '(deprecated) localizations', 'snippet', 'auditDetails', 'statistics', 'status', 'invideoPromotion', 'brandingSettings', 'contentOwnerDetails', 'topicDetails'} if (not set(part.split(',')).issubset(part_params)): raise ValueError(('make sure your `part` parameter is one or more of ' + str(part_params))) if (sum([bool(p) for p in [categoryId, forUsername, id, managedByMe, mine, mySubscribers]]) != 1): raise ValueError("make sure you specify exactly one of ['categoryId', 'forUsername', 'id', 'managedByMe', 'mine', 'mySubscribers']") base_url = ' return _combine_requests(args, base_url, count=maxResults, max_allowed=50)
def test_formatter_plugin(tmp_path, monkeypatch): monkeypatch.setitem(CODEFORMATTERS, 'lang', example_formatter) file_path = (tmp_path / 'test_markdown.md') file_path.write_text('```lang\nother\n```\n') assert (run((str(file_path),)) == 0) assert (file_path.read_text() == '```lang\ndummy\n```\n')
def ethtest_fixtures_as_pytest_fixtures(*test_files: str) -> List[Tuple[(RLP, Bytes)]]: base_path = f'{ETHEREUM_TESTS_PATH}/RLPTests/' test_data = dict() for test_file in test_files: with open(os.path.join(base_path, test_file), 'r') as fp: test_data.update(json.load(fp)) pytest_fixtures = [] for test_details in test_data.values(): if (isinstance(test_details['in'], str) and test_details['in'].startswith('#')): test_details['in'] = int(test_details['in'][1:]) pytest_fixtures.append((convert_to_rlp_native(test_details['in']), hex_to_bytes(test_details['out']))) return pytest_fixtures
.parametrize('original_schema,algorithm,expected_fingerprint', [('int', 'CRC-64-AVRO', '8f5c393f1ad57572'), ('int', 'md5', 'ef524ea1b91e73173d938ade36c1db32'), ('int', 'sha256', '3f2b87a9fe7cc9bc3981cd45e3e355309e5090aa0933d7becb6fba45'), ({'type': 'int'}, 'CRC-64-AVRO', '8f5c393f1ad57572'), ({'type': 'int'}, 'md5', 'ef524ea1b91e73173d938ade36c1db32'), ({'type': 'int'}, 'sha256', '3f2b87a9fe7cc9bc3981cd45e3e355309e5090aa0933d7becb6fba45'), ('float', 'CRC-64-AVRO', '90d7a83ecb027c4d'), ('float', 'md5', '50a6b9db85da367a6d2df400a41758a6'), ('float', 'sha256', '1e71f9ec051d663f56b0d8e1fc84d71aa56ccfe9fa93aa20d10547a7abeb5cc0'), ({'type': 'float'}, 'CRC-64-AVRO', '90d7a83ecb027c4d'), ({'type': 'float'}, 'md5', '50a6b9db85da367a6d2df400a41758a6'), ({'type': 'float'}, 'sha256', '1e71f9ec051d663f56b0d8e1fc84d71aa56ccfe9fa93aa20d10547a7abeb5cc0'), ('long', 'CRC-64-AVRO', 'b71df49344e154d0'), ('long', 'md5', 'e1dd9a1ef98b451bb393966b'), ('long', 'sha256', 'c32c497df6730c97fa07362aa5023f37d49a027eccf427965add'), ({'type': 'long'}, 'CRC-64-AVRO', 'b71df49344e154d0'), ({'type': 'long'}, 'md5', 'e1dd9a1ef98b451bb393966b'), ({'type': 'long'}, 'sha256', 'c32c497df6730c97fa07362aa5023f37d49a027eccf427965add'), ('double', 'CRC-64-AVRO', '7e95ab32c035758e'), ('double', 'md5', 'bfc71a62f38b99d6a93690deeb4b3af6'), ('double', 'sha256', '730a9a8c611681d7eef442e03c16c70d13bca3eb8b977bb403eaff52176af254'), ({'type': 'double'}, 'CRC-64-AVRO', '7e95ab32c035758e'), ({'type': 'double'}, 'md5', 'bfc71a62f38b99d6a93690deeb4b3af6'), ({'type': 'double'}, 'sha256', '730a9a8c611681d7eef442e03c16c70d13bca3eb8b977bb403eaff52176af254'), ('bytes', 'CRC-64-AVRO', '651920c3da16c04f'), ('bytes', 'md5', 'b462f06cb909be57ccde6'), ('bytes', 'sha256', '9ae507a9dd39ee5b7c7e285da2c0846521c8ae8d80feeae5504e0c981d53f5fa'), ({'type': 'bytes'}, 'CRC-64-AVRO', '651920c3da16c04f'), ({'type': 'bytes'}, 'md5', 'b462f06cb909be57ccde6'), ({'type': 'bytes'}, 'sha256', '9ae507a9dd39ee5b7c7e285da2c0846521c8ae8d80feeae5504e0c981d53f5fa'), ('string', 'CRC-64-AVRO', 'cf'), ('string', 'md5', '095d71cf12556b9d5e330ad575b3df5d'), ('string', 'sha256', 'e9e5c1c9e4f6277339d1bcde0733a59bd42f8731f449da6dc13010a916930d48'), ({'type': 'string'}, 'CRC-64-AVRO', 'cf'), ({'type': 'string'}, 'md5', '095d71cf12556b9d5e330ad575b3df5d'), ({'type': 'string'}, 'sha256', 'e9e5c1c9e4f6277339d1bcde0733a59bd42f8731f449da6dc13010a916930d48'), ('boolean', 'CRC-64-AVRO', '64f7d4a478fc429f'), ('boolean', 'md5', '01f692b30d4a1c8a3e600b1440637f8f'), ('boolean', 'sha256', 'a5b031ab62bc416d720c0410d802ea46b910c4fbe85c50a946ccc658b74e677e'), ({'type': 'boolean'}, 'CRC-64-AVRO', '64f7d4a478fc429f'), ({'type': 'boolean'}, 'md5', '01f692b30d4a1c8a3e600b1440637f8f'), ({'type': 'boolean'}, 'sha256', 'a5b031ab62bc416d720c0410d802ea46b910c4fbe85c50a946ccc658b74e677e'), ('null', 'CRC-64-AVRO', '8a8f25cce724dd63'), ('null', 'md5', '9b41ef67651c18488a8b08bb67c75699'), ('null', 'sha256', 'f072cbec3bf8841871d4284230c5e983dc211a56837aedf947d1a1f'), ({'type': 'null'}, 'CRC-64-AVRO', '8a8f25cce724dd63'), ({'type': 'null'}, 'md5', '9b41ef67651c18488a8b08bb67c75699'), ({'type': 'null'}, 'sha256', 'f072cbec3bf8841871d4284230c5e983dc211a56837aedf947d1a1f'), ({'type': 'fixed', 'name': 'Test', 'size': 1}, 'CRC-64-AVRO', '6869897b4049355b'), ({'type': 'fixed', 'name': 'Test', 'size': 1}, 'md5', 'db01bc515fcfcd2d4be82ed'), ({'type': 'fixed', 'name': 'Test', 'size': 1}, 'sha256', 'f527116a6fe935afc31dc60ad0f95caf35e1d9c9db62edb3ffeb9170'), ({'type': 'fixed', 'name': 'MyFixed', 'namespace': 'org.apache.hadoop.avro', 'size': 1}, 'CRC-64-AVRO', 'fadbd138e85bdf45'), ({'type': 'fixed', 'name': 'MyFixed', 'namespace': 'org.apache.hadoop.avro', 'size': 1}, 'md5', 'd74bc465d49e857b1ba'), ({'type': 'fixed', 'name': 'MyFixed', 'namespace': 'org.apache.hadoop.avro', 'size': 1}, 'sha256', '28e493a44771cecc5deca4bd938cdc3d5a24cfe1f3760bc938fa1057df6334fc'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}, 'CRC-64-AVRO', '03a2f2c2e27f7a16'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}, 'md5', 'd883f2a9b16ed085fcc5e4ca6c8f6ed1'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}, 'sha256', '9bf87ce5aebdc61ca834379effa5a41ce6ac0938630ff246297caca8'), ({'type': 'array', 'items': 'long'}, 'CRC-64-AVRO', '715e2ea28bc91654'), ({'type': 'array', 'items': 'long'}, 'md5', 'c1c387e8d6a58f0df749b698991b1f43'), ({'type': 'array', 'items': 'long'}, 'sha256', 'f78e954167feb23dcb1ce01e8463cebf3408e0a4259e16f24bd38f6d0f1d578b'), ({'type': 'array', 'items': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'CRC-64-AVRO', '10d9ade1fa3a0387'), ({'type': 'array', 'items': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'md5', 'cfc7b861c7cfef082a6effa'), ({'type': 'array', 'items': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'sha256', '0d8edd49d7f7e9553668f133577bc99f842852b55d9f84f1f7511e4961aa685c'), ({'type': 'map', 'values': 'long'}, 'CRC-64-AVRO', '6f74f4e409b1334e'), ({'type': 'map', 'values': 'long'}, 'md5', '32b3f1a3177a0ef00448b56e'), ({'type': 'map', 'values': 'long'}, 'sha256', 'b8fad07d458971ab8a7cf626c86c62fe6bcff7c1b11bc7295de34853'), ({'type': 'map', 'values': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'CRC-64-AVRO', 'df2ab0626f6b812d'), ({'type': 'map', 'values': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'md5', 'c588da6ba99701c41e73fd30d23f994e'), ({'type': 'map', 'values': {'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B']}}, 'sha256', '3886747ed1669a8af476b549e97b34222afb2fed5f18bb27c6f367ea0351a576'), (['string', 'null', 'long'], 'CRC-64-AVRO', '65a5be410d687566'), (['string', 'null', 'long'], 'md5', 'b11cf95f0a55dd55f9ee515a37bf937a'), (['string', 'null', 'long'], 'sha256', 'ed8dbb35e237ad0563cf5432b8c975334bd222c1eed95bb'), ({'type': 'record', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'CRC-64-AVRO', 'ed94e5f5e6eb588e'), ({'type': 'record', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'md5', '69531a03db788afe353244cd049b1e6d'), ({'type': 'record', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'sha256', '9670f15a8f96d23e92830d00b8bd57275e02e3e173ffef7c253c170b6beabeb8'), ({'type': 'error', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'CRC-64-AVRO', 'ed94e5f5e6eb588e'), ({'type': 'error', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'md5', '69531a03db788afe353244cd049b1e6d'), ({'type': 'error', 'name': 'Test', 'fields': [{'name': 'f', 'type': 'long'}]}, 'sha256', '9670f15a8f96d23e92830d00b8bd57275e02e3e173ffef7c253c170b6beabeb8'), ({'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}, 'CRC-64-AVRO', '52cba544c3e756b7'), ({'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}, 'md5', '99625b0cce89ef66b0f406c9'), ({'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}, 'sha256', '65d80dc8c95c98a9671d92cf0415edfabfee2cb058dfcd6ae4dc59'), ({'type': 'record', 'name': 'Lisp', 'fields': [{'name': 'value', 'type': ['null', 'string', {'type': 'record', 'name': 'Cons', 'fields': [{'name': 'car', 'type': 'Lisp'}, {'name': 'cdr', 'type': 'Lisp'}]}]}]}, 'CRC-64-AVRO', '68d91a23eda0b306'), ({'type': 'record', 'name': 'Lisp', 'fields': [{'name': 'value', 'type': ['null', 'string', {'type': 'record', 'name': 'Cons', 'fields': [{'name': 'car', 'type': 'Lisp'}, {'name': 'cdr', 'type': 'Lisp'}]}]}]}, 'md5', '9e1d0d15b52789fcb8e3a88b53059d5f'), ({'type': 'record', 'name': 'Lisp', 'fields': [{'name': 'value', 'type': ['null', 'string', {'type': 'record', 'name': 'Cons', 'fields': [{'name': 'car', 'type': 'Lisp'}, {'name': 'cdr', 'type': 'Lisp'}]}]}]}, 'sha256', 'e5ce4f4a15ce19fa1047cfe16a3b0e13a755db40f00f23284fdd376fc1c7dd21'), ({'type': 'record', 'name': 'HandshakeRequest', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'clientHash', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'clientProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': 'MD5'}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'CRC-64-AVRO', 'b96ad79e5a7c5757'), ({'type': 'record', 'name': 'HandshakeRequest', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'clientHash', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'clientProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': 'MD5'}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'md5', '4c822af2e17eecdeede97f5b'), ({'type': 'record', 'name': 'HandshakeRequest', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'clientHash', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'clientProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': 'MD5'}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'sha256', '2b2f7a9b22991fe0df9134cb6b5ff7355343e797aaea337e0150e20f3a35800e'), ({'type': 'record', 'name': 'HandshakeResponse', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'match', 'type': {'type': 'enum', 'name': 'HandshakeMatch', 'symbols': ['BOTH', 'CLIENT', 'NONE']}}, {'name': 'serverProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': ['null', {'name': 'MD5', 'size': 16, 'type': 'fixed'}]}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'CRC-64-AVRO', '00feee01de4ea50e'), ({'type': 'record', 'name': 'HandshakeResponse', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'match', 'type': {'type': 'enum', 'name': 'HandshakeMatch', 'symbols': ['BOTH', 'CLIENT', 'NONE']}}, {'name': 'serverProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': ['null', {'name': 'MD5', 'size': 16, 'type': 'fixed'}]}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'md5', 'afe529d01132daab7f4e2a6663e7a2f5'), ({'type': 'record', 'name': 'HandshakeResponse', 'namespace': 'org.apache.avro.ipc', 'fields': [{'name': 'match', 'type': {'type': 'enum', 'name': 'HandshakeMatch', 'symbols': ['BOTH', 'CLIENT', 'NONE']}}, {'name': 'serverProtocol', 'type': ['null', 'string']}, {'name': 'serverHash', 'type': ['null', {'name': 'MD5', 'size': 16, 'type': 'fixed'}]}, {'name': 'meta', 'type': ['null', {'type': 'map', 'values': 'bytes'}]}]}, 'sha256', 'a303cbbfe13958f880605d70c521a4b7be34d9265ac5a848f25916a67b11d889'), ({'type': 'record', 'name': 'Interop', 'namespace': 'org.apache.avro', 'fields': [{'name': 'intField', 'type': 'int'}, {'name': 'longField', 'type': 'long'}, {'name': 'stringField', 'type': 'string'}, {'name': 'boolField', 'type': 'boolean'}, {'name': 'floatField', 'type': 'float'}, {'name': 'doubleField', 'type': 'double'}, {'name': 'bytesField', 'type': 'bytes'}, {'name': 'nullField', 'type': 'null'}, {'name': 'arrayField', 'type': {'type': 'array', 'items': 'double'}}, {'name': 'mapField', 'type': {'type': 'map', 'values': {'name': 'Foo', 'type': 'record', 'fields': [{'name': 'label', 'type': 'string'}]}}}, {'name': 'unionField', 'type': ['boolean', 'double', {'type': 'array', 'items': 'bytes'}]}, {'name': 'enumField', 'type': {'type': 'enum', 'name': 'Kind', 'symbols': ['A', 'B', 'C']}}, {'name': 'fixedField', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'recordField', 'type': {'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}}]}, 'CRC-64-AVRO', 'e82c0a93a6a0b5a4'), ({'type': 'record', 'name': 'Interop', 'namespace': 'org.apache.avro', 'fields': [{'name': 'intField', 'type': 'int'}, {'name': 'longField', 'type': 'long'}, {'name': 'stringField', 'type': 'string'}, {'name': 'boolField', 'type': 'boolean'}, {'name': 'floatField', 'type': 'float'}, {'name': 'doubleField', 'type': 'double'}, {'name': 'bytesField', 'type': 'bytes'}, {'name': 'nullField', 'type': 'null'}, {'name': 'arrayField', 'type': {'type': 'array', 'items': 'double'}}, {'name': 'mapField', 'type': {'type': 'map', 'values': {'name': 'Foo', 'type': 'record', 'fields': [{'name': 'label', 'type': 'string'}]}}}, {'name': 'unionField', 'type': ['boolean', 'double', {'type': 'array', 'items': 'bytes'}]}, {'name': 'enumField', 'type': {'type': 'enum', 'name': 'Kind', 'symbols': ['A', 'B', 'C']}}, {'name': 'fixedField', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'recordField', 'type': {'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}}]}, 'md5', '994fea1a1be7ff8603cbe40c3bc7e4ca'), ({'type': 'record', 'name': 'Interop', 'namespace': 'org.apache.avro', 'fields': [{'name': 'intField', 'type': 'int'}, {'name': 'longField', 'type': 'long'}, {'name': 'stringField', 'type': 'string'}, {'name': 'boolField', 'type': 'boolean'}, {'name': 'floatField', 'type': 'float'}, {'name': 'doubleField', 'type': 'double'}, {'name': 'bytesField', 'type': 'bytes'}, {'name': 'nullField', 'type': 'null'}, {'name': 'arrayField', 'type': {'type': 'array', 'items': 'double'}}, {'name': 'mapField', 'type': {'type': 'map', 'values': {'name': 'Foo', 'type': 'record', 'fields': [{'name': 'label', 'type': 'string'}]}}}, {'name': 'unionField', 'type': ['boolean', 'double', {'type': 'array', 'items': 'bytes'}]}, {'name': 'enumField', 'type': {'type': 'enum', 'name': 'Kind', 'symbols': ['A', 'B', 'C']}}, {'name': 'fixedField', 'type': {'type': 'fixed', 'name': 'MD5', 'size': 16}}, {'name': 'recordField', 'type': {'type': 'record', 'name': 'Node', 'fields': [{'name': 'label', 'type': 'string'}, {'name': 'children', 'type': {'type': 'array', 'items': 'Node'}}]}}]}, 'sha256', 'cccfd6e3f917cf53b0f90c206342e6703b0d905071f724a1c1f85b731c74058d'), ({'type': 'record', 'name': 'ipAddr', 'fields': [{'name': 'addr', 'type': [{'name': 'IPv6', 'type': 'fixed', 'size': 16}, {'name': 'IPv4', 'type': 'fixed', 'size': 4}]}]}, 'CRC-64-AVRO', '8d961b4e298a1844'), ({'type': 'record', 'name': 'ipAddr', 'fields': [{'name': 'addr', 'type': [{'name': 'IPv6', 'type': 'fixed', 'size': 16}, {'name': 'IPv4', 'type': 'fixed', 'size': 4}]}]}, 'md5', '45d85c69b353a99b93d7c4f2fcf0c30d'), ({'type': 'record', 'name': 'ipAddr', 'fields': [{'name': 'addr', 'type': [{'name': 'IPv6', 'type': 'fixed', 'size': 16}, {'name': 'IPv4', 'type': 'fixed', 'size': 4}]}]}, 'sha256', '6f6fc8f685a4f07ddd55a8620febea047cf52cb0ac181'), ({'type': 'record', 'name': 'TestDoc', 'doc': 'Doc string', 'fields': [{'name': 'name', 'type': 'string', 'doc': 'Doc String'}]}, 'CRC-64-AVRO', '0e6660f02bcdc109'), ({'type': 'record', 'name': 'TestDoc', 'doc': 'Doc string', 'fields': [{'name': 'name', 'type': 'string', 'doc': 'Doc String'}]}, 'md5', 'f2da75f5131f5abb8beb2'), ({'type': 'record', 'name': 'TestDoc', 'doc': 'Doc string', 'fields': [{'name': 'name', 'type': 'string', 'doc': 'Doc String'}]}, 'sha256', '0b3644f7aa5ca2fc4bad93ca2d3609c12aa9dbda9c15e68b34c120beff08e7b9'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B'], 'doc': 'Doc String'}, 'CRC-64-AVRO', '03a2f2c2e27f7a16'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B'], 'doc': 'Doc String'}, 'md5', 'd883f2a9b16ed085fcc5e4ca6c8f6ed1'), ({'type': 'enum', 'name': 'Test', 'symbols': ['A', 'B'], 'doc': 'Doc String'}, 'sha256', '9bf87ce5aebdc61ca834379effa5a41ce6ac0938630ff246297caca8'), ({'type': 'int'}, 'MD5', 'ef524ea1b91e73173d938ade36c1db32'), ({'type': 'int'}, 'SHA-256', '3f2b87a9fe7cc9bc3981cd45e3e355309e5090aa0933d7becb6fba45')]) def test_random_cases(original_schema, algorithm, expected_fingerprint): canonical_form = to_parsing_canonical_form(original_schema) assert (fingerprint(canonical_form, algorithm) == expected_fingerprint)
def extractVentifrappeWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None titlemap = [('Doctoring the world Chapter ', 'Doctoring the world', 'translated'), ('Doctoring the world: Chapter ', 'Doctoring the world', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class TestUnsqueezeConverter(AITTestCase): ([['default', 1], ['negative_dim', (- 1)]]) def test_simple(self, name: str, dim: int): class TestModule(torch.nn.Module): def forward(self, x: torch.Tensor) -> torch.Tensor: return torch.unsqueeze(x, dim) model = TestModule().cuda() inputs = [torch.randn(2, 3, 4).half().cuda()] self.run_test(model, inputs, expected_ops={acc_ops.unsqueeze}) def test_simple_dynamic_shape(self): class TestModule(torch.nn.Module): def forward(self, x: torch.Tensor) -> torch.Tensor: return torch.unsqueeze(x, 1) model = TestModule().cuda() inputs_spec = TensorSpec.create_spec_from_shapes(inputs_min=[[2, 3, 4]], inputs_max=[[20, 3, 4]], dtype_list=[torch.float16]) self.run_test_with_dynamic_shape(model, inputs_spec, expected_ops={acc_ops.unsqueeze})
class TargetCaseModel(ValueModel): def __init__(self, analysis_config: AnalysisConfig, notifier: ErtNotifier, format_mode: bool=False): self.analysis_config = analysis_config self.notifier = notifier self._format_mode = format_mode self._custom = False super().__init__(self.getDefaultValue()) notifier.ertChanged.connect(self.on_current_case_changed) notifier.current_case_changed.connect(self.on_current_case_changed) def setValue(self, value: str): if ((value is None) or (value.strip() == '') or (value == self.getDefaultValue())): self._custom = False ValueModel.setValue(self, self.getDefaultValue()) else: self._custom = True ValueModel.setValue(self, value) def getDefaultValue(self) -> str: if self._format_mode: analysis_config = self.analysis_config if analysis_config.case_format_is_set(): return analysis_config.case_format else: case_name = self.notifier.current_case_name return f'{case_name}_%d' else: case_name = self.notifier.current_case_name return f'{case_name}_smoother_update' def on_current_case_changed(self, *args) -> None: if (not self._custom): super().setValue(self.getDefaultValue())
_wrapper('wolfe') def hager_zhang(x0, p, get_phi_dphi, get_fg, conds, max_cycles, alpha_init=None, alpha_prev=None, f_prev=None, dphi0_prev=None, quad_step=False, eps=1e-06, theta=0.5, gamma=0.5, rho=5, psi_0=0.01, psi_1=0.1, psi_2=2.0, psi_low=0.1, psi_hi=10, Delta=0.7, omega=0.001, max_bisects=10): epsk = (eps * abs(get_fg('f', 0.0))) (phi0, dphi0) = get_phi_dphi('fg', 0.0) (f0, g0) = get_fg('fg', 0.0) cond = conds['wolfe'] import pdb pdb.set_trace() def bisect(a, b): for i in range(max_bisects): d = (((1 - theta) * a) + (theta * b)) dphi_d = get_phi_dphi('g', d) if (dphi_d >= 0): return (a, d) phi_d = get_phi_dphi('f', d) if (phi_d <= (phi0 + epsk)): a = d elif (phi_d > (phi0 + epsk)): b = d raise Exception('Bisect failed!') def interval_update(a, b, c): if (not (a < c < b)): return (a, b) (phi_c, dphi_c) = get_phi_dphi('fg', c) if (dphi_c >= 0): return (a, c) elif (phi_c <= (phi0 + epsk)): return (c, b) return bisect(a, c) def secant(a, b): dphia = get_phi_dphi('g', a) dphib = get_phi_dphi('g', b) return (((a * dphib) - (b * dphia)) / (dphib - dphia)) def double_secant(a, b): c = secant(a, b) (A, B) = interval_update(a, b, c) cB_close = np.isclose(c, B) cA_close = np.isclose(c, A) if cB_close: c_dash = secant(b, B) elif cA_close: c_dash = secant(a, A) if (cB_close or cA_close): (a_dash, b_dash) = interval_update(A, B, c_dash) else: (a_dash, b_dash) = (A, B) return (a_dash, b_dash) def bracket(c): cs = list() for j in range(10): cs.append(c) dphi_j = get_phi_dphi('g', c) if ((dphi_j >= 0) and (j == 0)): return (0, c) phi_j = get_phi_dphi('f', c) if (dphi_j >= 0): phi_inds = (np.array([get_fg('f', c) for c in cs[:(- 1)]]) <= (phi0 + epsk)) ci = ((len(phi_inds) - phi_inds[::(- 1)].argmax()) - 1) return (cs[ci], c) elif (phi_j > (phi0 + epsk)): return bisect(0, c) c *= rho def norm_inf(arr): return np.linalg.norm(arr, np.inf) def initial(): if (~ np.isclose(x0, np.zeros_like(x0))).any(): c = ((psi_0 * norm_inf(x0)) / norm_inf(g0)) elif (not np.isclose(f0, 0)): c = ((psi_0 * f0) / (norm_inf(g0) ** 2)) else: c = 1 return c def take_quad_step(alpha, g0_): import pdb pdb.set_trace() fact = max(psi_low, (g0_ / (dphi0 * psi_2))) alpha_ = (min(fact, psi_hi) * alpha) phi_ = get_phi_dphi('f', alpha_) denom = (2 * (((phi_ - phi0) / alpha_) - dphi0)) f_temp = get_fg('f', alpha_) if (denom > 0.0): c = (((- dphi0) * alpha_) / denom) if (f_temp > get_fg('f', 0)): c = max(c, (alpha_ * 1e-10)) else: c = alpha return c if ((alpha_init is None) and alpha_prev): alpha_init = alpha_prev if ((alpha_init is None) and (alpha_prev is None)): alpha_init = initial() try: if quad_step: g0_ = (((- 2) * abs((get_fg('f', 0) / alpha_init))) if (dphi0_prev is None) else dphi0_prev) alpha_init = take_quad_step((psi_2 * alpha_init), g0_) _ = get_phi_dphi('fg', alpha_init) (ak, bk) = bracket(alpha_init) for k in range(max_cycles): if cond(ak): break (a, b) = double_secant(ak, bk) if ((b - a) > (gamma * (bk - ak))): c = ((a + b) / 2) (a, b) = interval_update(a, b, c) (ak, bk) = (a, b) except LineSearchConverged as lsc: ak = lsc.alpha (f_new, g_new) = get_fg('fg', ak) return (ak, f_new, g_new, dphi0)
class Notification(FlyteIdlEntity): def __init__(self, phases, email: EmailNotification=None, pager_duty: PagerDutyNotification=None, slack: SlackNotification=None): self._phases = phases self._email = email self._pager_duty = pager_duty self._slack = slack def phases(self): return self._phases def email(self): return self._email def pager_duty(self): return self._pager_duty def slack(self): return self._slack def to_flyte_idl(self): return _common_pb2.Notification(phases=self.phases, email=(self.email.to_flyte_idl() if self.email else None), pager_duty=(self.pager_duty.to_flyte_idl() if self.pager_duty else None), slack=(self.slack.to_flyte_idl() if self.slack else None)) def from_flyte_idl(cls, p): return cls(p.phases, email=(EmailNotification.from_flyte_idl(p.email) if p.HasField('email') else None), pager_duty=(PagerDutyNotification.from_flyte_idl(p.pager_duty) if p.HasField('pager_duty') else None), slack=(SlackNotification.from_flyte_idl(p.slack) if p.HasField('slack') else None))
.skipcomplexnoslate def test_multiple_custom_transfer_split(): mesh = UnitIntervalMesh(2) mh = MeshHierarchy(mesh, 2) mesh = mh[(- 1)] count_V = 0 count_Q = 0 def prolong_V(coarse, fine): nonlocal count_V prolong(coarse, fine) count_V += 1 def prolong_Q(fine, coarse): nonlocal count_Q prolong(fine, coarse) count_Q -= 1 V = FunctionSpace(mesh, 'CG', 1) Q = FunctionSpace(mesh, 'DG', 0) W = (V * Q) (u, p) = TrialFunctions(W) (v, q) = TestFunctions(W) a = ((inner(u, v) * dx) + (inner(p, q) * dx)) L = (conj(v) * dx) options = {'ksp_type': 'preonly', 'pc_type': 'fieldsplit', 'fieldsplit_pc_type': 'mg', 'pc_fieldsplit_type': 'additive', 'fieldsplit_ksp_type': 'preonly', 'mat_type': 'aij'} wh = Function(W) transfer = TransferManager(native_transfers={V.ufl_element(): (prolong_V, restrict, inject), Q.ufl_element(): (prolong_Q, restrict, inject)}) problem = LinearVariationalProblem(a, L, wh) solver = LinearVariationalSolver(problem, solver_parameters=options) solver.set_transfer_manager(transfer) solver.solve() assert (count_V == 2) assert (count_Q == (- 2))
class CloseToTrayPreference(widgets.CheckPreference, widgets.CheckConditional): default = False name = 'gui/close_to_tray' condition_preference_name = 'gui/use_tray' def __init__(self, preferences, widget): widgets.CheckPreference.__init__(self, preferences, widget) widgets.CheckConditional.__init__(self)
class TestOFPGroupDescStats(unittest.TestCase): length = ((ofproto.OFP_GROUP_DESC_STATS_SIZE + ofproto.OFP_BUCKET_SIZE) + ofproto.OFP_ACTION_OUTPUT_SIZE) type_ = 128 group_id = 6606 port = 10976 max_len = ofproto.OFP_ACTION_OUTPUT_SIZE actions = [OFPActionOutput(port, max_len)] buf_actions = bytearray() actions[0].serialize(buf_actions, 0) weight = 4386 watch_port = 8006 watch_group = 3 buckets = [OFPBucket(weight, watch_port, watch_group, actions)] bucket_cnt = 1024 def test_init(self): c = OFPGroupDescStats(self.type_, self.group_id, self.buckets) eq_(self.type_, c.type) eq_(self.group_id, c.group_id) eq_(self.buckets, c.buckets) def _test_parser(self, type_, group_id, bucket_cnt): length = (ofproto.OFP_GROUP_DESC_STATS_SIZE + ((ofproto.OFP_BUCKET_SIZE + ofproto.OFP_ACTION_OUTPUT_SIZE) * bucket_cnt)) fmt = ofproto.OFP_GROUP_DESC_STATS_PACK_STR buf = pack(fmt, length, type_, group_id) buckets = [] for b in range(bucket_cnt): weight = watch_port = watch_group = b bucket = OFPBucket(weight, watch_port, watch_group, self.actions) buckets.append(bucket) buf_buckets = bytearray() buckets[b].serialize(buf_buckets, 0) buf += six.binary_type(buf_buckets) res = OFPGroupDescStats.parser(buf, 0) eq_(type_, res.type) eq_(group_id, res.group_id) for b in range(bucket_cnt): eq_(buckets[b].weight, res.buckets[b].weight) eq_(buckets[b].watch_port, res.buckets[b].watch_port) eq_(buckets[b].watch_group, res.buckets[b].watch_group) eq_(buckets[b].actions[0].port, res.buckets[b].actions[0].port) eq_(buckets[b].actions[0].max_len, res.buckets[b].actions[0].max_len) def test_parser_mid(self): self._test_parser(self.type_, self.group_id, self.bucket_cnt) def test_parser_max(self): group_id = type_ = 255 bucket_cnt = 2047 self._test_parser(type_, group_id, bucket_cnt) def test_parser_min(self): group_id = 0 type_ = ofproto.OFPGT_ALL bucket_cnt = 0 self._test_parser(type_, group_id, bucket_cnt) def test_parser_p1(self): type_ = ofproto.OFPGT_SELECT self._test_parser(type_, self.group_id, self.bucket_cnt) def test_parser_p2(self): type_ = ofproto.OFPGT_INDIRECT self._test_parser(type_, self.group_id, self.bucket_cnt) def test_parser_p3(self): type_ = ofproto.OFPGT_FF self._test_parser(type_, self.group_id, self.bucket_cnt)
('/gpustat', methods=['GET']) def report_gpustat(): def _date_handler(obj): if hasattr(obj, 'isoformat'): return obj.isoformat() else: raise TypeError(type(obj)) response.content_type = 'application/json' if EXCLUDE_SELF: resp = {'error': 'Excluded self!'} else: resp = core.my_gpustat() return json.dumps(resp, default=_date_handler)
def test_correct_response_with_geo_filters(client, monkeypatch, elasticsearch_transaction_index, awards_and_transactions): setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index) test_cases = [_test_correct_response_for_place_of_performance_county_with_geo_filters, _test_correct_response_for_place_of_performance_district_with_geo_filters, _test_correct_response_for_place_of_performance_state_with_geo_filters, _test_correct_response_for_place_of_perforance_country_with_geo_filters, _test_correct_response_for_recipient_location_county_with_geo_filters, _test_correct_response_for_recipient_location_district_with_geo_filters, _test_correct_response_for_recipient_location_state_with_geo_filters, _test_correct_response_for_recipient_location_country_with_geo_filters] for test in test_cases: test(client)
def module_transformation(output, param, subtree_parameters, tr_snippet, tr_args): return Module(_module_transformation, render_kwds=dict(output=output, name=param.name, param_cnames_seq=param_cnames_seq, subtree_parameters=subtree_parameters, q_indices=index_cnames_seq(param, qualified=True), VALUE_NAME=VALUE_NAME, nq_params=param_cnames_seq(subtree_parameters), nq_indices=index_cnames_seq(param), connector_ctype=param.annotation.type.ctype, tr_snippet=tr_snippet, tr_args=tr_args))
class OptionSeriesOrganizationDataDatalabelsTextpath(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False)
class CircuitBreakerError(Exception): def __init__(self, circuit_breaker, *args, **kwargs): super(CircuitBreakerError, self).__init__(*args, **kwargs) self._circuit_breaker = circuit_breaker def __str__(self, *args, **kwargs): return ('Circuit "%s" OPEN until %s (%d failures, %d sec remaining) (last_failure: %r)' % (self._circuit_breaker.name, self._circuit_breaker.open_until, self._circuit_breaker.failure_count, round(self._circuit_breaker.open_remaining), self._circuit_breaker.last_failure))
class TestGethostbyname_ex(tests.LimitedTestCase): def _make_mock_getaliases(self): class GetAliases(): aliases = ['cname.example.com'] def __call__(self, *args, **kwargs): return self.aliases getaliases = GetAliases() return getaliases def setUp(self): self._old_resolve = greendns.resolve greendns.resolve = _make_mock_resolve() self._old_getaliases = greendns.getaliases def tearDown(self): greendns.resolve = self._old_resolve greendns.getaliases = self._old_getaliases def test_ipaddr(self): res = greendns.gethostbyname_ex('1.2.3.4') assert (res == ('1.2.3.4', [], ['1.2.3.4'])) def test_name(self): greendns.resolve.add('host.example.com', '1.2.3.4') greendns.getaliases = self._make_mock_getaliases() greendns.getaliases.aliases = [] res = greendns.gethostbyname_ex('host.example.com') assert (res == ('host.example.com', [], ['1.2.3.4'])) def test_multiple_addrs(self): greendns.resolve.add('host.example.com', '1.2.3.4') greendns.resolve.add('host.example.com', '1.2.3.5') greendns.getaliases = self._make_mock_getaliases() greendns.getaliases.aliases = [] res = greendns.gethostbyname_ex('host.example.com') assert (res == ('host.example.com', [], ['1.2.3.4', '1.2.3.5']))
def extractOutspanFoster(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if (('Chapter' in item['tags']) and ('ascension' in item['tags'])): return buildReleaseMessageWithType(item, 'The Ascension Chronicle', vol, chp, frag=frag, postfix=postfix, tl_type='oel') return False
class WafFirewallsResponseAllOf(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'data': ([WafFirewallResponseData],), 'included': (IncludedWithWafFirewall,)} _property def discriminator(): return None attribute_map = {'data': 'data', 'included': 'included'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
def delete_directory_contents(directory: Path) -> None: enforce(directory.is_dir(), f"Path '{directory}' must be a directory.") for filename in directory.iterdir(): if (filename.is_file() or filename.is_symlink()): filename.unlink() elif filename.is_dir(): shutil.rmtree(str(filename), ignore_errors=False)
class Fuzzel(Selector): def supported() -> bool: return (is_wayland() and is_installed('fuzzel')) def name() -> str: return 'fuzzel' def show_character_selection(self, characters: Dict[(str, str)], recent_characters: List[str], prompt: str, show_description: bool, use_icons: bool, keybindings: Dict[(Action, str)], additional_args: List[str]) -> Tuple[(Union[(Action, DEFAULT, CANCEL)], Union[(List[str], Shortcut)])]: parameters = ['fuzzel', '--dmenu', '--fuzzy-min-length', '1', '--index', '-p', prompt, *additional_args] fuzzel = run(parameters, input='\n'.join(self.basic_format_characters(characters)), capture_output=True, encoding='utf-8') return (DEFAULT(), [self.extract_char_from_input(list(characters)[int(fuzzel.stdout.strip())])]) def show_skin_tone_selection(self, tones_emojis: List[str], prompt: str, additional_args: List[str]) -> Tuple[(int, str)]: fuzzel = run(['fuzzel', '--dmenu', '--fuzzy-min-length', '1', '-p', prompt, *additional_args], input='\n'.join(tones_emojis), capture_output=True, encoding='utf-8') return (fuzzel.returncode, fuzzel.stdout) def show_action_menu(self, additional_args: List[str]) -> List[Action]: fuzzel = run(['fuzzel', '--dmenu', *additional_args], input='\n'.join([it.value for it in Action if (it != Action.MENU)]), capture_output=True, encoding='utf-8') return [Action(fuzzel.stdout.strip())]
class LookupTable(NamedTuple): version: int = TABLE_VERSION file_index: Dict[(FileID, str)] = {} entries: LookupEntries = {} def to_json(self) -> str: return json.dumps((self.version, self.file_index, self.entries)) def from_json(cls, value: str) -> 'LookupTable': (version, file_index, entries) = json.loads(value) file_index = {int(index): filename for (index, filename) in file_index.items()} return cls(version=version, file_index=file_index, entries=entries)
def update_rules(xkb_root, kb_index): for filename in ['base.xml', 'evdev.xml']: filepath = ((xkb_root / 'rules') / filename) if (not filepath.exists()): continue try: tree = etree.parse(filepath, etree.XMLParser(remove_blank_text=True)) for (locale, named_layouts) in kb_index.items(): vlist = get_rules_locale(tree, locale).xpath('variantList') if (len(vlist) != 1): exit(f'Error: unexpected xml format in {filepath}.') for (name, layout) in named_layouts.items(): remove_rules_variant(vlist[0], name) if (layout is not None): description = layout.meta['description'] add_rules_variant(vlist[0], name, description) tree.write(filepath, pretty_print=True, xml_declaration=True, encoding='utf-8') print(f'... {filepath}') except Exception as exc: exit_FileNotWritable(exc, filepath)
class FlattenConcatCategoricalStateValueNet(FlattenConcatBaseNet): def __init__(self, obs_shapes: Dict[(str, Sequence[int])], hidden_units: List[int], non_lin: nn.Module, support_range: Tuple[(int, int)]): super().__init__(obs_shapes, hidden_units, non_lin) support_set_size = ((support_range[1] - support_range[0]) + 1) self.perception_dict['probabilities'] = LinearOutputBlock(in_keys='latent', out_keys='probabilities', in_shapes=self.perception_dict['latent'].out_shapes(), output_units=support_set_size) def _to_scalar(x: torch.Tensor) -> torch.Tensor: return support_to_scalar(x, support_range=support_range) self.perception_dict['value'] = FunctionalBlock(in_keys='probabilities', out_keys='value', in_shapes=self.perception_dict['probabilities'].out_shapes(), func=_to_scalar) module_init = make_module_init_normc(std=0.01) self.perception_dict['probabilities'].apply(module_init) self.net = InferenceBlock(in_keys=list(obs_shapes.keys()), out_keys=['probabilities', 'value'], in_shapes=list(obs_shapes.values()), perception_blocks=self.perception_dict) def forward(self, x): return self.net(x)
class GethAdmin(Module): is_async = False add_peer: Method[Callable[([EnodeURI], bool)]] = Method(RPC.admin_addPeer, mungers=[default_root_munger]) datadir: Method[Callable[([], str)]] = Method(RPC.admin_datadir, is_property=True) node_info: Method[Callable[([], NodeInfo)]] = Method(RPC.admin_nodeInfo, is_property=True) peers: Method[Callable[([], List[Peer])]] = Method(RPC.admin_peers, is_property=True) start_ Method[ServerConnection] = Method(RPC.admin_startHTTP, mungers=[admin_start_params_munger]) start_ws: Method[ServerConnection] = Method(RPC.admin_startWS, mungers=[admin_start_params_munger]) stop_ Method[Callable[([], bool)]] = Method(RPC.admin_stopHTTP, is_property=True) stop_ws: Method[Callable[([], bool)]] = Method(RPC.admin_stopWS, is_property=True)
def main(): data = datasets.load_iris() X = normalize(data.data[(data.target != 0)]) y = data.target[(data.target != 0)] y[(y == 1)] = (- 1) y[(y == 2)] = 1 (X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.33) clf = SupportVectorMachine(kernel=polynomial_kernel, power=4, coef=1) clf.fit(X_train, y_train) y_pred = clf.predict(X_test) accuracy = accuracy_score(y_test, y_pred) print('Accuracy:', accuracy) Plot().plot_in_2d(X_test, y_pred, title='Support Vector Machine', accuracy=accuracy)
class FinancialParserObjectDataClass(BaseModel): customer_information: FinancialCustomerInformation merchant_information: FinancialMerchantInformation payment_information: FinancialPaymentInformation financial_document_information: FinancialDocumentInformation local: FinancialLocalInformation bank: FinancialBankInformation item_lines: List[FinancialLineItem] = Field(default_factory=list, description='List of line items associated with the document.') document_metadata: FinancialDocumentMetadata
class ResponseCacheMiddlware(): PROCESS_REQUEST_CACHED_BODY = {'cached': True} PROCESS_RESOURCE_CACHED_BODY = {'cached': True, 'resource': True} def process_request(self, req, resp): if (req.path == '/cached'): resp.media = self.PROCESS_REQUEST_CACHED_BODY resp.complete = True return def process_resource(self, req, resp, resource, params): if (req.path == '/cached/resource'): resp.media = self.PROCESS_RESOURCE_CACHED_BODY resp.complete = True return
class CriticViewPreprocessor(Preprocessor): def __init__(self, critic_stash): super(CriticViewPreprocessor, self).__init__() self.critic_stash = critic_stash def _ins(self, text): if RE_BLOCK_SEP.match(text): return ('\n\n%s\n\n' % self.critic_stash.store('<ins class="critic break">&nbsp;</ins>')) return ((self.critic_stash.store('<ins class="critic">') + text) + self.critic_stash.store('</ins>')) def _del(self, text): if RE_BLOCK_SEP.match(text): return self.critic_stash.store('<del class="critic break">&nbsp;</del>') return ((self.critic_stash.store('<del class="critic">') + text) + self.critic_stash.store('</del>')) def _mark(self, text): return ((self.critic_stash.store('<mark class="critic">') + text) + self.critic_stash.store('</mark>')) def _comment(self, text): return self.critic_stash.store((('<span class="critic comment">' + self.html_escape(text, strip_nl=True)) + '</span>')) def critic_view(self, m): if m.group('ins_open'): return self._ins(m.group('ins_text')) elif m.group('del_open'): return self._del(m.group('del_text')) elif m.group('sub_open'): return (self._del(m.group('sub_del_text')) + self._ins(m.group('sub_ins_text'))) elif m.group('mark_open'): return self._mark(m.group('mark_text')) elif m.group('com_open'): return self._comment(m.group('com_text')) def critic_parse(self, m): accept = (self.config['mode'] == 'accept') if m.group('ins_open'): return (m.group('ins_text') if accept else '') elif m.group('del_open'): return ('' if accept else m.group('del_text')) elif m.group('mark_open'): return m.group('mark_text') elif m.group('com_open'): return '' elif m.group('sub_open'): return (m.group('sub_ins_text') if accept else m.group('sub_del_text')) def html_escape(self, txt, strip_nl=False): txt = txt.replace('&', '&amp;') txt = txt.replace('<', '&lt;') txt = txt.replace('>', '&gt;') txt = txt.replace('"', '&quot;') txt = txt.replace('\n', ('<br>' if (not strip_nl) else ' ')) return txt def run(self, lines): if (self.config['mode'] == 'view'): processor = self.critic_view else: processor = self.critic_parse text = RE_CRITIC.sub(processor, '\n'.join(lines)) return text.split('\n')
def test_commit_merges_checkpoint_into_previous(journal_db): checkpoint = journal_db.record() journal_db.set(b'1', b'test-a') assert (journal_db.get(b'1') == b'test-a') before_diff = journal_db.diff() journal_db.commit(checkpoint) assert (journal_db.diff() == before_diff) assert (journal_db.get(b'1') == b'test-a') assert (journal_db.has_checkpoint(checkpoint) is False)
def are_rules_up_to_date(rules, current_version=VERSION): version_regexp = re.compile('.*rivalcfg\\s+v([0-9]+\\.[0-9]+\\.[0-9]+(.+)?)\\s*.*') rules_version = None if version_regexp.match(rules): rules_version = version_regexp.match(rules).group(1) return (rules_version == current_version)
.parametrize('dim, degree, quad_degree', [(dim, d, q) for dim in (0, 1) for q in range(1, 8) for d in range((q + 1))]) def test_integrate_triangle(dim, degree, quad_degree): q = gauss_quadrature(ReferenceTriangle, quad_degree) numeric = q.integrate((lambda x: (x[dim] ** degree))) analytic = ((1.0 / (degree + 1)) - (1.0 / (degree + 2))) assert (round((numeric - analytic), 12) == 0)
class TestAbstractDataExporter(TestCase): def setUp(self): self.value_type = Mock() self.value_type.has_text = Mock(return_value=True) self.value_type.get_text = Mock(return_value='text') self.value_type.has_editor_value = Mock(return_value=True) self.value_type.get_editor_value = Mock(return_value=1) self.model = Mock() self.model.get_value = Mock(return_value=0.0) self.model.get_value_type = Mock(return_value=self.value_type) def test_is_text_default_false(self): exporter = TrivialExporter(format=trivial_format) self.assertFalse(exporter.is_text) def test_is_text_default_true(self): exporter = TrivialExporter(format=trivial_text_format) self.assertTrue(exporter.is_text) def test_add_data(self): exporter = TrivialExporter(format=trivial_format) data_wrapper = DataWrapper() exporter.add_data(data_wrapper, self.model, [((0,), (0,))]) self.assertTrue(data_wrapper.has_format(trivial_format)) self.assertEqual(data_wrapper.get_mimedata('null/null'), b'data') def test_add_data_fail(self): exporter = TrivialExporter(format=trivial_format) data_wrapper = DataWrapper() exporter.add_data(data_wrapper, self.model, []) self.assertFalse(data_wrapper.has_format(trivial_format)) def test_get_value_is_text(self): exporter = TrivialExporter(format=trivial_format, is_text=True) value = exporter.get_value(self.model, (0,), (0,)) self.assertEqual(value, 'text') def test_get_value_is_text_not_has_text(self): self.value_type.has_text = Mock(return_value=False) exporter = TrivialExporter(format=trivial_format, is_text=True) value = exporter.get_value(self.model, (0,), (0,)) self.assertEqual(value, '') def test_get_value_is_not_text(self): exporter = TrivialExporter(format=trivial_format, is_text=False) value = exporter.get_value(self.model, (0,), (0,)) self.assertEqual(value, 1.0) def test_get_value_is_not_text_not_editor_value(self): self.value_type.has_editor_value = Mock(return_value=False) exporter = TrivialExporter(format=trivial_format, is_text=False) value = exporter.get_value(self.model, (0,), (0,)) self.assertEqual(value, 0.0)
def get_opt_kwargs(opt_key, layer_ind, thresh): opt_defaults = {'lbfgs': {'mu_reg': 0.1}, 'plbfgs': {'precon_kind': 'full_fast', 'precon_update': 50}} if (layer_ind == 0): opt_kwargs = {'type': 'rfo', 'thresh': 'never'} else: if (opt_key is None): opt_key = 'lbfgs' opt_kwargs = {'type': opt_key, 'max_cycles': 1500, 'thresh': thresh, 'overachieve_factor': 3} try: opt_kwargs.update(opt_defaults[opt_key]) except KeyError: pass return opt_kwargs
class OptionPlotoptionsXrangeSonificationTracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class DeferredMessage(): def __init__(self, message_type: Type[Message], *args: Any, **kwargs: Any) -> None: self._message_type = message_type self._args = args self._kwargs = kwargs self._message: Optional[Message] = None def build_message(self) -> Message: if (not self._message): if issubclass(self._message_type, TimestampedMessage): self._args = ((time.time(),) + self._args) self._message = self._message_type(*self._args, **self._kwargs) del self._args del self._kwargs return self._message
def mock_constructor(target: str, class_name: str, allow_private: bool=False, type_validation: bool=True, **kwargs: Any) -> _MockConstructorDSL: if (not isinstance(class_name, str)): raise ValueError('Second argument must be a string with the name of the class.') _bail_if_private(class_name, allow_private) if isinstance(target, str): from testslide import _importer target = _importer(target) target_class_id = (id(target), class_name) if (target_class_id in _mocked_target_classes): (original_class, mocked_class) = _mocked_target_classes[target_class_id] if (not (getattr(target, class_name) is mocked_class)): raise AssertionError('The class {} at {} was changed after mock_constructor() mocked it!'.format(class_name, target)) callable_mock = mocked_class.__new__ else: original_class = getattr(target, class_name) if ('__new__' in original_class.__dict__): raise NotImplementedError('Usage with classes that define __new__() is currently not supported.') if (not inspect.isclass(original_class)): raise ValueError('Target must be a class.') elif (not issubclass(original_class, object)): raise ValueError('Old style classes are not supported.') caller_frame = inspect.currentframe() if (caller_frame is not None): prev_frame = caller_frame.f_back if prev_frame: caller_frame_info = inspect.getframeinfo(prev_frame, context=0) callable_mock = _CallableMock(original_class, '__new__', caller_frame_info) mocked_class = _patch_and_return_mocked_class(target, class_name, target_class_id, original_class, callable_mock, type_validation, **kwargs) else: raise Exception('Cannot retrieve previous frame for caller frame') else: raise Exception('Cannot retrieve current frame, cannot create a CallableMock') def original_callable(cls: type, *args: Any, **kwargs: Any) -> Any: global _init_args_from_original_callable, _init_kwargs_from_original_callable assert (cls is mocked_class) _init_args_from_original_callable = args _init_kwargs_from_original_callable = kwargs return object.__new__(cls) return _MockConstructorDSL(target=mocked_class, method='__new__', cls=mocked_class, callable_mock=callable_mock, original_callable=original_callable)
def get_flaskbb_config(app, config_file): if (config_file is not None): if (not isinstance(config_file, str)): return config_file if os.path.exists(os.path.join(app.instance_path, config_file)): return os.path.join(app.instance_path, config_file) if os.path.exists(os.path.abspath(config_file)): return os.path.join(os.path.abspath(config_file)) try: return import_string(config_file) except ImportStringError: return None else: project_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) project_config = os.path.join(project_dir, 'flaskbb.cfg') instance_config = os.path.join(app.instance_path, 'flaskbb.cfg') if os.path.exists(instance_config): return instance_config if os.path.exists(project_config): return project_config