code
stringlengths
281
23.7M
class Automation(BaseObject): def __init__(self, api=None, actions=None, active=None, conditions=None, created_at=None, id=None, position=None, raw_title=None, title=None, updated_at=None, url=None, **kwargs): self.api = api self.actions = actions self.active = active self.conditions = conditions self.created_at = created_at self.id = id self.position = position self.raw_title = raw_title self.title = title self.updated_at = updated_at self.url = url for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue def created(self): if self.created_at: return dateutil.parser.parse(self.created_at) def created(self, created): if created: self.created_at = created def updated(self): if self.updated_at: return dateutil.parser.parse(self.updated_at) def updated(self, updated): if updated: self.updated_at = updated
('cuda.pad_last_dim.gen_function') def gen_function(func_attrs, template_path, shape_eval_template, shape_save_template): func_name = func_attrs['name'] backend_spec = CUDASpec() elem_input_type = backend_spec.dtype_to_backend_type(func_attrs['inputs'][0]._attrs['dtype']) elem_input_type2 = None if (elem_input_type == 'half'): elem_input_type2 = 'half2' elif (elem_input_type == 'float'): elem_input_type2 = 'float2' elif (elem_input_type == 'bfloat16'): elem_input_type2 = 'bfloat16_2' else: raise NotImplementedError(f'unsupported elem_input_type={elem_input_type!r}') ndim = func_attrs['ndim'] xshape = [('*x_dim%d' % i) for i in range(ndim)] shape_eval_func = shape_eval_template.render(indent=' ', dtype='int64_t ', shape=xshape, out_dim='out_dim') yshape = [('*y_dim%d' % i) for i in range((ndim - 1))] shape_save_func = shape_save_template.render(indent=' ', shape=yshape, last_dim=('*y_dim%d' % (ndim - 1))) shape_func = (shape_eval_func + shape_save_func) exec_paths = EXEC_TEMPLATE.render(elem_input_type=elem_input_type, elem_input_type2=elem_input_type2, ndim=func_attrs['ndim'], indent=' ') return SRC_TEMPLATE.render(function_name=func_name, elem_input_type=elem_input_type, shape_function=shape_func, exec_paths=exec_paths, ndim=func_attrs['ndim'])
def test_change(): string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)}) file_path = ((test_dir / 'subdir') / 'test_unknown.f90') string += write_rpc_notification('textDocument/didOpen', {'textDocument': {'uri': str(file_path)}}) string += write_rpc_notification('textDocument/didChange', {'textDocument': {'uri': str(file_path)}, 'contentChanges': [{'text': 'module test_unkown\nend module test_unknown\n', 'range': {'start': {'line': 0, 'character': 0}, 'end': {'line': 0, 'character': 0}}}]}) string += write_rpc_request(2, 'textDocument/documentSymbol', {'textDocument': {'uri': str(file_path)}}) file_path = ((test_dir / 'subdir') / 'test_free.f90') string += write_rpc_notification('textDocument/didChange', {'textDocument': {'uri': str(file_path)}, 'contentChanges': [{'text': ' unicode test', 'range': {'start': {'line': 3, 'character': 3}, 'end': {'line': 3, 'character': 3}}}, {'text': '', 'range': {'start': {'line': 6, 'character': 0}, 'end': {'line': 31, 'character': 0}}}, {'text': '', 'range': {'start': {'line': 7, 'character': 0}, 'end': {'line': 39, 'character': 0}}}]}) string += write_rpc_request(3, 'textDocument/documentSymbol', {'textDocument': {'uri': str(file_path)}}) (errcode, results) = run_request(string, fortls_args=['--disable_diagnostics']) assert (errcode == 0) assert (len(results) == 3) assert (len(results[1]) == 1) assert (len(results[2]) == 5)
class TestOkhsvProperties(util.ColorAsserts, unittest.TestCase): def test_names(self): self.assertEqual(Color('color(--okhsv 120 50% 50% / 1)')._space.names(), ('h', 's', 'v')) def test_hue(self): c = Color('color(--okhsv 120 50% 50% / 1)') self.assertEqual(c['hue'], 120) c['hue'] = 110 self.assertEqual(c['hue'], 110) def test_saturation(self): c = Color('color(--okhsv 120 50% 50% / 1)') self.assertEqual(c['saturation'], 0.5) c['saturation'] = 0.6 self.assertEqual(c['saturation'], 0.6) def test_value(self): c = Color('color(--okhsv 120 50% 50% / 1)') self.assertEqual(c['value'], 0.5) c['value'] = 0.4 self.assertEqual(c['value'], 0.4) def test_alpha(self): c = Color('color(--okhsv 120 50% 50% / 1)') self.assertEqual(c['alpha'], 1) c['alpha'] = 0.5 self.assertEqual(c['alpha'], 0.5)
def from_kg(): gram = (float(e2_value.get()) * 1000) pound = (float(e2_value.get()) * 2.20462) ounce = (float(e2_value.get()) * 35.274) stone = (float(e2_value.get()) * 0.157473) uston = (float(e2_value.get()) * 0.) t1.delete('1.0', END) t1.insert(END, gram) t2.delete('1.0', END) t2.insert(END, pound) t3.delete('1.0', END) t3.insert(END, ounce) t4.delete('1.0', END) t4.insert(END, stone) t5.delete('1.0', END) t5.insert(END, uston)
('subprocess.check_output', side_effect=[subprocess.CalledProcessError(1, cmd='check_output', output=b'')]) ('Updater.sdlog.error') ('Updater.sdlog.info') def test_apply_dom0_state_failure(mocked_info, mocked_error, mocked_subprocess): updater.apply_dom0_state() log_error_calls = [call('Failed to apply dom0 state. See launcher-detail.log for details.'), call("Command 'check_output' returned non-zero exit status 1.")] mocked_subprocess.assert_called_once_with(['sudo', 'qubesctl', '--show-output', 'state.highstate']) mocked_info.assert_called_once_with('Applying dom0 state') mocked_error.assert_has_calls(log_error_calls)
class PidEnv(dict): def __init__(self, prog): dict.__init__(self) self.killed = set() self.spawned = defaultdict(list) self.next = {} self._vars = set() self(prog) def __call__(self, prog): exec(prog, self) self._vars.update(self.killed) for l in self.spawned.values(): self._vars.update(l) def __getitem__(self, name): if (name in self): return dict.__getitem__(self, name) else: pid = self[name] = ParentPid(name, self) return pid def __setitem__(self, name, value): if isinstance(value, ChildPid): self.spawned[value.parent.name].append(name) dict.__setitem__(self, name, value) def vars(self): return set(self._vars)
class _coconut_tail_call(_coconut_baseclass): __slots__ = ('func', 'args', 'kwargs') def __init__(self, _coconut_func, *args, **kwargs): self.func = _coconut_func self.args = args self.kwargs = kwargs def __reduce__(self): return (self.__class__, (self.func, self.args, self.kwargs))
def CheckUpdate(force_appear=True, geekey=None): if INFO['version_processing']: if INFO['check_timer']: INFO['check_timer'].Stop() INFO['check_timer'] = WxCallLater(1, CheckUpdate, force_appear=force_appear, geekey=geekey) return INFO['check_timer'] = None try: current_version = INFO['Version'] new_version = INFO['NetVersion'] if (not new_version): raise Exception('failed to get network version information') if (current_version < new_version): style = ((wx.OK | wx.CANCEL) | wx.ICON_NONE) dlg = wx.MessageDialog(geekey, lt('_new_version_detail'), lt('_new_version_available'), style=style) dlg.SetSize(800, 500) dlg.SetOKCancelLabels(lt('Update'), lt('Cancel')) if (dlg.ShowModal() == wx.ID_OK): webbrowser.open(INFO['UpdateAddr']) pass dlg.Destroy() pass else: if force_appear: DialogShow(lt('_version_fine'), lt('_version_fine_detail'), geekey=geekey) pass pass pass except Exception as e: log.log(lt('Check update failed for: {0}', e)) if force_appear: style = ((wx.OK | wx.CANCEL) | wx.ICON_NONE) dlg = wx.MessageDialog(geekey, lt('_network_unavailable_detail'), lt('_network_unavailable'), style=style) dlg.SetSize(800, 500) dlg.SetOKCancelLabels(lt('_to_index'), lt('Cancel')) if (dlg.ShowModal() == wx.ID_OK): webbrowser.open(lt(INFO['HomePage'])) pass pass pass pass
class OptionPlotoptionsAreasplinerangeSonificationDefaultinstrumentoptionsMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def main() -> None: parser = argparse.ArgumentParser(description='Convert a diffusers ControlNet model to a Refiners ControlNet model') parser.add_argument('--from', type=str, dest='source_path', default='lllyasviel/sd-controlnet-depth', help='Can be a path to a .bin, a .safetensors file, or a model identifier from Hugging Face Hub. Defaults to lllyasviel/sd-controlnet-depth') parser.add_argument('--to', type=str, dest='output_path', required=False, default=None, help='Output path (.safetensors) for converted model. If not provided, the output path will be the same as the source path.') args = parser.parse_args(namespace=Args()) if (args.output_path is None): args.output_path = f'{Path(args.source_path).stem}-controlnet.safetensors' state_dict = convert(args=args) save_to_safetensors(path=args.output_path, tensors=state_dict)
def extractHumbugbistroBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class CCITTG4Parser(BitParser): MODE = [None, None] BitParser.add(MODE, 0, '1') BitParser.add(MODE, (+ 1), '011') BitParser.add(MODE, (- 1), '010') BitParser.add(MODE, 'h', '001') BitParser.add(MODE, 'p', '0001') BitParser.add(MODE, (+ 2), '000011') BitParser.add(MODE, (- 2), '000010') BitParser.add(MODE, (+ 3), '0000011') BitParser.add(MODE, (- 3), '0000010') BitParser.add(MODE, 'u', '') BitParser.add(MODE, 'x1', '') BitParser.add(MODE, 'x2', '') BitParser.add(MODE, 'x3', '') BitParser.add(MODE, 'x4', '') BitParser.add(MODE, 'x5', '') BitParser.add(MODE, 'x6', '') BitParser.add(MODE, 'x7', '') BitParser.add(MODE, 'e', '') WHITE = [None, None] BitParser.add(WHITE, 0, '') BitParser.add(WHITE, 1, '000111') BitParser.add(WHITE, 2, '0111') BitParser.add(WHITE, 3, '1000') BitParser.add(WHITE, 4, '1011') BitParser.add(WHITE, 5, '1100') BitParser.add(WHITE, 6, '1110') BitParser.add(WHITE, 7, '1111') BitParser.add(WHITE, 8, '10011') BitParser.add(WHITE, 9, '10100') BitParser.add(WHITE, 10, '00111') BitParser.add(WHITE, 11, '01000') BitParser.add(WHITE, 12, '001000') BitParser.add(WHITE, 13, '000011') BitParser.add(WHITE, 14, '110100') BitParser.add(WHITE, 15, '110101') BitParser.add(WHITE, 16, '101010') BitParser.add(WHITE, 17, '101011') BitParser.add(WHITE, 18, '0100111') BitParser.add(WHITE, 19, '0001100') BitParser.add(WHITE, 20, '0001000') BitParser.add(WHITE, 21, '0010111') BitParser.add(WHITE, 22, '0000011') BitParser.add(WHITE, 23, '0000100') BitParser.add(WHITE, 24, '0101000') BitParser.add(WHITE, 25, '0101011') BitParser.add(WHITE, 26, '0010011') BitParser.add(WHITE, 27, '0100100') BitParser.add(WHITE, 28, '0011000') BitParser.add(WHITE, 29, '') BitParser.add(WHITE, 30, '') BitParser.add(WHITE, 31, '') BitParser.add(WHITE, 32, '') BitParser.add(WHITE, 33, '') BitParser.add(WHITE, 34, '') BitParser.add(WHITE, 35, '') BitParser.add(WHITE, 36, '') BitParser.add(WHITE, 37, '') BitParser.add(WHITE, 38, '') BitParser.add(WHITE, 39, '') BitParser.add(WHITE, 40, '') BitParser.add(WHITE, 41, '') BitParser.add(WHITE, 42, '') BitParser.add(WHITE, 43, '') BitParser.add(WHITE, 44, '') BitParser.add(WHITE, 45, '') BitParser.add(WHITE, 46, '') BitParser.add(WHITE, 47, '') BitParser.add(WHITE, 48, '') BitParser.add(WHITE, 49, '') BitParser.add(WHITE, 50, '') BitParser.add(WHITE, 51, '') BitParser.add(WHITE, 52, '') BitParser.add(WHITE, 53, '') BitParser.add(WHITE, 54, '') BitParser.add(WHITE, 55, '') BitParser.add(WHITE, 56, '') BitParser.add(WHITE, 57, '') BitParser.add(WHITE, 58, '') BitParser.add(WHITE, 59, '') BitParser.add(WHITE, 60, '') BitParser.add(WHITE, 61, '') BitParser.add(WHITE, 62, '') BitParser.add(WHITE, 63, '') BitParser.add(WHITE, 64, '11011') BitParser.add(WHITE, 128, '10010') BitParser.add(WHITE, 192, '010111') BitParser.add(WHITE, 256, '0110111') BitParser.add(WHITE, 320, '') BitParser.add(WHITE, 384, '') BitParser.add(WHITE, 448, '') BitParser.add(WHITE, 512, '') BitParser.add(WHITE, 576, '') BitParser.add(WHITE, 640, '') BitParser.add(WHITE, 704, '') BitParser.add(WHITE, 768, '') BitParser.add(WHITE, 832, '') BitParser.add(WHITE, 896, '') BitParser.add(WHITE, 960, '') BitParser.add(WHITE, 1024, '') BitParser.add(WHITE, 1088, '') BitParser.add(WHITE, 1152, '') BitParser.add(WHITE, 1216, '') BitParser.add(WHITE, 1280, '') BitParser.add(WHITE, 1344, '') BitParser.add(WHITE, 1408, '') BitParser.add(WHITE, 1472, '') BitParser.add(WHITE, 1536, '') BitParser.add(WHITE, 1600, '') BitParser.add(WHITE, 1664, '011000') BitParser.add(WHITE, 1728, '') BitParser.add(WHITE, 1792, '') BitParser.add(WHITE, 1856, '') BitParser.add(WHITE, 1920, '') BitParser.add(WHITE, 1984, '') BitParser.add(WHITE, 2048, '') BitParser.add(WHITE, 2112, '') BitParser.add(WHITE, 2176, '') BitParser.add(WHITE, 2240, '') BitParser.add(WHITE, 2304, '') BitParser.add(WHITE, 2368, '') BitParser.add(WHITE, 2432, '') BitParser.add(WHITE, 2496, '') BitParser.add(WHITE, 2560, '') BLACK = [None, None] BitParser.add(BLACK, 0, '') BitParser.add(BLACK, 1, '010') BitParser.add(BLACK, 2, '11') BitParser.add(BLACK, 3, '10') BitParser.add(BLACK, 4, '011') BitParser.add(BLACK, 5, '0011') BitParser.add(BLACK, 6, '0010') BitParser.add(BLACK, 7, '00011') BitParser.add(BLACK, 8, '000101') BitParser.add(BLACK, 9, '000100') BitParser.add(BLACK, 10, '0000100') BitParser.add(BLACK, 11, '0000101') BitParser.add(BLACK, 12, '0000111') BitParser.add(BLACK, 13, '') BitParser.add(BLACK, 14, '') BitParser.add(BLACK, 15, '') BitParser.add(BLACK, 16, '') BitParser.add(BLACK, 17, '') BitParser.add(BLACK, 18, '') BitParser.add(BLACK, 19, '') BitParser.add(BLACK, 20, '') BitParser.add(BLACK, 21, '') BitParser.add(BLACK, 22, '') BitParser.add(BLACK, 23, '') BitParser.add(BLACK, 24, '') BitParser.add(BLACK, 25, '') BitParser.add(BLACK, 26, '') BitParser.add(BLACK, 27, '') BitParser.add(BLACK, 28, '') BitParser.add(BLACK, 29, '') BitParser.add(BLACK, 30, '') BitParser.add(BLACK, 31, '') BitParser.add(BLACK, 32, '') BitParser.add(BLACK, 33, '') BitParser.add(BLACK, 34, '') BitParser.add(BLACK, 35, '') BitParser.add(BLACK, 36, '') BitParser.add(BLACK, 37, '') BitParser.add(BLACK, 38, '') BitParser.add(BLACK, 39, '') BitParser.add(BLACK, 40, '') BitParser.add(BLACK, 41, '') BitParser.add(BLACK, 42, '') BitParser.add(BLACK, 43, '') BitParser.add(BLACK, 44, '') BitParser.add(BLACK, 45, '') BitParser.add(BLACK, 46, '') BitParser.add(BLACK, 47, '') BitParser.add(BLACK, 48, '') BitParser.add(BLACK, 49, '') BitParser.add(BLACK, 50, '') BitParser.add(BLACK, 51, '') BitParser.add(BLACK, 52, '') BitParser.add(BLACK, 53, '') BitParser.add(BLACK, 54, '') BitParser.add(BLACK, 55, '') BitParser.add(BLACK, 56, '') BitParser.add(BLACK, 57, '') BitParser.add(BLACK, 58, '') BitParser.add(BLACK, 59, '') BitParser.add(BLACK, 60, '') BitParser.add(BLACK, 61, '') BitParser.add(BLACK, 62, '') BitParser.add(BLACK, 63, '') BitParser.add(BLACK, 64, '') BitParser.add(BLACK, 128, '') BitParser.add(BLACK, 192, '') BitParser.add(BLACK, 256, '') BitParser.add(BLACK, 320, '') BitParser.add(BLACK, 384, '') BitParser.add(BLACK, 448, '') BitParser.add(BLACK, 512, '') BitParser.add(BLACK, 576, '') BitParser.add(BLACK, 640, '') BitParser.add(BLACK, 704, '') BitParser.add(BLACK, 768, '') BitParser.add(BLACK, 832, '') BitParser.add(BLACK, 896, '') BitParser.add(BLACK, 960, '') BitParser.add(BLACK, 1024, '') BitParser.add(BLACK, 1088, '') BitParser.add(BLACK, 1152, '') BitParser.add(BLACK, 1216, '') BitParser.add(BLACK, 1280, '') BitParser.add(BLACK, 1344, '') BitParser.add(BLACK, 1408, '') BitParser.add(BLACK, 1472, '') BitParser.add(BLACK, 1536, '') BitParser.add(BLACK, 1600, '') BitParser.add(BLACK, 1664, '') BitParser.add(BLACK, 1728, '') BitParser.add(BLACK, 1792, '') BitParser.add(BLACK, 1856, '') BitParser.add(BLACK, 1920, '') BitParser.add(BLACK, 1984, '') BitParser.add(BLACK, 2048, '') BitParser.add(BLACK, 2112, '') BitParser.add(BLACK, 2176, '') BitParser.add(BLACK, 2240, '') BitParser.add(BLACK, 2304, '') BitParser.add(BLACK, 2368, '') BitParser.add(BLACK, 2432, '') BitParser.add(BLACK, 2496, '') BitParser.add(BLACK, 2560, '') UNCOMPRESSED = [None, None] BitParser.add(UNCOMPRESSED, '1', '1') BitParser.add(UNCOMPRESSED, '01', '01') BitParser.add(UNCOMPRESSED, '001', '001') BitParser.add(UNCOMPRESSED, '0001', '0001') BitParser.add(UNCOMPRESSED, '00001', '00001') BitParser.add(UNCOMPRESSED, '00000', '000001') BitParser.add(UNCOMPRESSED, 'T00', '') BitParser.add(UNCOMPRESSED, 'T10', '') BitParser.add(UNCOMPRESSED, 'T000', '') BitParser.add(UNCOMPRESSED, 'T100', '') BitParser.add(UNCOMPRESSED, 'T0000', '') BitParser.add(UNCOMPRESSED, 'T1000', '') BitParser.add(UNCOMPRESSED, 'T00000', '') BitParser.add(UNCOMPRESSED, 'T10000', '') class EOFB(Exception): pass class InvalidData(Exception): pass class ByteSkip(Exception): pass def __init__(self, width, bytealign=False): BitParser.__init__(self) self.width = width self.bytealign = bytealign self.reset() return def feedbytes(self, data): for b in data: try: for m in (128, 64, 32, 16, 8, 4, 2, 1): self._parse_bit((b & m)) except self.ByteSkip: self._accept = self._parse_mode self._state = self.MODE except self.EOFB: break return def _parse_mode(self, mode): if (mode == 'p'): self._do_pass() self._flush_line() return self.MODE elif (mode == 'h'): self._n1 = 0 self._accept = self._parse_horiz1 if self._color: return self.WHITE else: return self.BLACK elif (mode == 'u'): self._accept = self._parse_uncompressed return self.UNCOMPRESSED elif (mode == 'e'): raise self.EOFB elif isinstance(mode, int): self._do_vertical(mode) self._flush_line() return self.MODE else: raise self.InvalidData(mode) def _parse_horiz1(self, n): if (n is None): raise self.InvalidData self._n1 += n if (n < 64): self._n2 = 0 self._color = (1 - self._color) self._accept = self._parse_horiz2 if self._color: return self.WHITE else: return self.BLACK def _parse_horiz2(self, n): if (n is None): raise self.InvalidData self._n2 += n if (n < 64): self._color = (1 - self._color) self._accept = self._parse_mode self._do_horizontal(self._n1, self._n2) self._flush_line() return self.MODE elif self._color: return self.WHITE else: return self.BLACK def _parse_uncompressed(self, bits): if (not bits): raise self.InvalidData if bits.startswith('T'): self._accept = self._parse_mode self._color = int(bits[1]) self._do_uncompressed(bits[2:]) return self.MODE else: self._do_uncompressed(bits) return self.UNCOMPRESSED def _get_bits(self): return ''.join((str(b) for b in self._curline[:self._curpos])) def _get_refline(self, i): if (i < 0): return ('[]' + ''.join((str(b) for b in self._refline))) elif (len(self._refline) <= i): return (''.join((str(b) for b in self._refline)) + '[]') else: return ((((''.join((str(b) for b in self._refline[:i])) + '[') + str(self._refline[i])) + ']') + ''.join((str(b) for b in self._refline[(i + 1):]))) def reset(self): self._y = 0 self._curline = array.array('b', ([1] * self.width)) self._reset_line() self._accept = self._parse_mode self._state = self.MODE return def output_line(self, y, bits): print(y, ''.join((str(b) for b in bits))) return def _reset_line(self): self._refline = self._curline self._curline = array.array('b', ([1] * self.width)) self._curpos = (- 1) self._color = 1 return def _flush_line(self): if (self.width <= self._curpos): self.output_line(self._y, self._curline) self._y += 1 self._reset_line() if self.bytealign: raise self.ByteSkip return def _do_vertical(self, dx): x1 = (self._curpos + 1) while 1: if (x1 == 0): if ((self._color == 1) and (self._refline[x1] != self._color)): break elif (x1 == len(self._refline)): break elif ((self._refline[(x1 - 1)] == self._color) and (self._refline[x1] != self._color)): break x1 += 1 x1 += dx x0 = max(0, self._curpos) x1 = max(0, min(self.width, x1)) if (x1 < x0): for x in range(x1, x0): self._curline[x] = self._color elif (x0 < x1): for x in range(x0, x1): self._curline[x] = self._color self._curpos = x1 self._color = (1 - self._color) return def _do_pass(self): x1 = (self._curpos + 1) while 1: if (x1 == 0): if ((self._color == 1) and (self._refline[x1] != self._color)): break elif (x1 == len(self._refline)): break elif ((self._refline[(x1 - 1)] == self._color) and (self._refline[x1] != self._color)): break x1 += 1 while 1: if (x1 == 0): if ((self._color == 0) and (self._refline[x1] == self._color)): break elif (x1 == len(self._refline)): break elif ((self._refline[(x1 - 1)] != self._color) and (self._refline[x1] == self._color)): break x1 += 1 for x in range(self._curpos, x1): self._curline[x] = self._color self._curpos = x1 return def _do_horizontal(self, n1, n2): if (self._curpos < 0): self._curpos = 0 x = self._curpos for _ in range(n1): if (len(self._curline) <= x): break self._curline[x] = self._color x += 1 for _ in range(n2): if (len(self._curline) <= x): break self._curline[x] = (1 - self._color) x += 1 self._curpos = x return def _do_uncompressed(self, bits): for c in bits: self._curline[self._curpos] = int(c) self._curpos += 1 self._flush_line() return
('config_type', ['strict']) def test_missing_envs_not_required_in_strict_mode(config, yaml_config_file_3): with open(yaml_config_file_3, 'w') as file: file.write('section:\n undefined: ${UNDEFINED}\n') config.set_yaml_files([yaml_config_file_3]) config.load(envs_required=False) assert (config.section.undefined() is None)
def fortios_endpoint_control(data, fos, check_mode): fos.do_member_operation('endpoint-control', 'profile') if data['endpoint_control_profile']: resp = endpoint_control_profile(data, fos, check_mode) else: fos._module.fail_json(msg=('missing task body: %s' % 'endpoint_control_profile')) if check_mode: return resp return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
_os(*metadata.platforms) def main(): common.log('Compressing sensitive files') files = ['totally-legit.tar', 'official-business.zip', 'expense-reports.gz'] commands = [['tar', '-cvf', files[0], '/etc/shadow'], ['zip', files[1], '/etc/passwd'], ['gzip', '/etc/group', files[2]]] for command in commands: try: common.execute(command) except OSError as exc: common.log(str(exc))
def genPassTable(satellites, qth, howmany=20): passTable = {} for satellite in satellites: tleData = getTleData(satellite) priority = satellitesData[satellite]['priority'] if tleData: czasStart = time.time() p = predict.transits(tleData, qth, czasStart) for i in range(1, howmany): transit = next(p) if (not (time.time() > (((transit.start + transit.duration()) - skipLast) - 1))): if (int(transit.peak()['elevation']) >= minElev): passTable[transit.start] = [satellite, int((transit.start + skipFirst)), int(((transit.duration() - skipFirst) - skipLast)), int(transit.peak()['elevation']), int(transit.peak()['azimuth']), priority] elif ('fixedTime' in satellitesData[satellite]): cron = satellitesData[satellite]['fixedTime'] duration = getFixedRecordingTime(satellite)['fixedDuration'] delta = 0 for i in range(0, howmany): entry = CronTab(cron).next(now=(time.time() + delta), default_utc=False) delta += entry start = (delta + time.time()) passTable[start] = [satellite, int(start), int(duration), '0', '0', priority] else: log((" Can't find TLE data (in keplers) nor fixed time schedule (in config) for " + satellite), style=bc.FAIL) passTableSorted = [] for start in sorted(passTable): passTableSorted.append(passTable[start]) passTableSortedPrioritized = passTableSorted[:] passCount = len(passTableSorted) for i in range(0, (passCount - 1)): (satelliteI, startI, durationI, peakI, azimuthI, priorityI) = passTableSorted[i] (satelliteJ, startJ, durationJ, peakJ, azimuthJ, priorityJ) = passTableSorted[(i + 1)] endTimeI = (startI + durationI) if (priorityI != priorityJ): if ((startJ + priorityTimeMargin) < endTimeI): if (priorityJ < priorityI): log((' 1. discard %s, keep %s' % (satelliteI, satelliteJ))) passTableSortedPrioritized[i] = '' elif (priorityJ > priorityI): log((' 2. discard %s, keep %s' % (satelliteJ, satelliteI))) passTableSortedPrioritized[(i + 1)] = '' passTableSortedPrioritized = [x[:5] for x in passTableSortedPrioritized if (x != '')] return passTableSortedPrioritized
def test_item_getter(): provides = providers.Object({'foo': 'bar'}) provider = providers.ItemGetter() provider.set_provides(provides) provider.set_name('foo') assert (provider.provides is provides) assert (provider.name == 'foo') assert (provider.set_provides(providers.Provider()) is provider) assert (provider.set_name('foo') is provider)
('cuda.gemm_rcr_bias.config') def gemm_rcr_config(func_attrs, dtype='float16'): common.make_fproc(func_attrs, RCR, include_cutlass_3x_ops=True) import cutlass_lib for op in func_attrs['op_instance'].values(): if common.has_tma_epilogue(op): op.C.element = cutlass_lib.library.DataType.void op.C.layout = cutlass_lib.library.LayoutType.ColumnMajor op.D.layout = cutlass_lib.library.LayoutType.ColumnMajor op.epilogue_schedule = cutlass_lib.library.EpilogueScheduleBiasElementwiseMapping[op.epilogue_schedule]
class MixamoPropertyGroup(bpy.types.PropertyGroup): advanced: bpy.props.BoolProperty(name='Advanced Options', description='Display advanced options', default=False) experimental: bpy.props.BoolProperty(name='Experimental Options', description='Experimental Options (use with caution, dirty workarounds)', default=False) verbose_mode: bpy.props.BoolProperty(name='Verbose Mode', description='Enables verbose output for each step when converting', default=False) use_x: bpy.props.BoolProperty(name='Use X', description='If enabled, Horizontal motion is transfered to RootBone', default=True) use_y: bpy.props.BoolProperty(name='Use Y', description='If enabled, Horizontal motion is transfered to RootBone', default=True) use_z: bpy.props.BoolProperty(name='Use Z', description='If enabled, vertical motion is transfered to RootBone', default=True) on_ground: bpy.props.BoolProperty(name='On Ground', description='If enabled, root bone is on ground and only moves up at jumps', default=True) use_rotation: bpy.props.BoolProperty(name='Transfer Rotation', description='Whether to transfer roation to root motion. Should be enabled for curve walking animations. Can be disabled for straight animations with strong hip Motion like Rolling', default=True) scale: bpy.props.FloatProperty(name='Scale', description='Scale down the Rig by this factor', default=1.0) restoffset: bpy.props.FloatVectorProperty(name='Restpose Offset', description='Offset restpose by this. Use to correct if origin is not on ground', default=(0.0, 0.0, 0.0)) knee_offset: bpy.props.FloatVectorProperty(name='Knee Offset', description='Offset knee joints by this. Use to fix flipping legs.', default=(0.0, 0.0, 0.0), subtype='TRANSLATION') knee_bones: bpy.props.StringProperty(name='Knee Bones', description='Names of knee bones to offset. Seperate names with commas.', maxlen=256, default='RightUpLeg,LeftUpLeg', subtype='NONE') force_overwrite: bpy.props.BoolProperty(name='Force Overwrite', description='If enabled, overwrites files if output path is the same as input', default=False) inpath: bpy.props.StringProperty(name='Input Path', description='Path to mixamorigs', maxlen=256, default='', subtype='DIR_PATH') add_leaf_bones: bpy.props.BoolProperty(name='Add Leaf Bones', description='If enabled, adds leaf bones on export when batchconverting', default=False) outpath: bpy.props.StringProperty(name='Output Path', description='Where Processed rigs should be saved to', maxlen=256, default='', subtype='DIR_PATH') ignore_leaf_bones: bpy.props.BoolProperty(name='Ignore Leaf Bones', description='Ignore leaf bones on import', default=False) automatic_bone_orientation: bpy.props.BoolProperty(name='Automatic Bone Orientation', description='Try to align the major bone axis with the bone children', default=True) hipname: bpy.props.StringProperty(name='Hip Name', description='Additional Hipname to search for if not MixamoRig', maxlen=256, default='', subtype='NONE') b_remove_namespace: bpy.props.BoolProperty(name='Remove Namespace', description='Removes Naespaces from objects and bones', default=True) b_unreal_bones: bpy.props.BoolProperty(name='Use Unreal Engine bone schema', description='Renames bones to match unreal engine schema', default=False) fixbind: bpy.props.BoolProperty(name='Fix Bind', description='If enabled, adds a dummy mesh and binds it, to prevent loss of bindpose when exporting fbx', default=True) apply_rotation: bpy.props.BoolProperty(name='Apply Rotation', description='Applies rotation during conversion to prevent rotation and scaling issues', default=True) apply_scale: bpy.props.BoolProperty(name='Apply Scale', description='Applies scale during conversion to prevent rotation and scaling issues', default=False) quaternion_clean_pre: bpy.props.BoolProperty(name='Quaternion Clean Pre', description='Performs quaternion cleanup to before conversion', default=True) quaternion_clean_post: bpy.props.BoolProperty(name='Quaternion Clean Post', description='Performs quaternion cleanup after conversion', default=True) foot_bone_workaround: bpy.props.BoolProperty(name='Foot Bone Workaround', description='Attempts to fix twisting of the foot bones', default=False)
class BusinessRoleRequest(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isBusinessRoleRequest = True super(BusinessRoleRequest, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): created_by = 'created_by' created_time = 'created_time' email = 'email' expiration_time = 'expiration_time' expiry_time = 'expiry_time' finance_role = 'finance_role' id = 'id' invite_link = 'invite_link' ip_role = 'ip_role' owner = 'owner' role = 'role' status = 'status' updated_by = 'updated_by' updated_time = 'updated_time' class Role(): admin = 'ADMIN' ads_rights_reviewer = 'ADS_RIGHTS_REVIEWER' value_default = 'DEFAULT' developer = 'DEVELOPER' employee = 'EMPLOYEE' finance_analyst = 'FINANCE_ANALYST' finance_edit = 'FINANCE_EDIT' finance_editor = 'FINANCE_EDITOR' finance_view = 'FINANCE_VIEW' manage = 'MANAGE' partner_center_admin = 'PARTNER_CENTER_ADMIN' partner_center_analyst = 'PARTNER_CENTER_ANALYST' partner_center_education = 'PARTNER_CENTER_EDUCATION' partner_center_marketing = 'PARTNER_CENTER_MARKETING' partner_center_operations = 'PARTNER_CENTER_OPERATIONS' def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessRoleRequest, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {'role': 'role_enum'} enums = {'role_enum': BusinessRoleRequest.Role.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=BusinessRoleRequest, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() _field_types = {'created_by': 'Object', 'created_time': 'datetime', 'email': 'string', 'expiration_time': 'datetime', 'expiry_time': 'datetime', 'finance_role': 'string', 'id': 'string', 'invite_link': 'string', 'ip_role': 'string', 'owner': 'Business', 'role': 'string', 'status': 'string', 'updated_by': 'Object', 'updated_time': 'datetime'} def _get_field_enum_info(cls): field_enum_info = {} field_enum_info['Role'] = BusinessRoleRequest.Role.__dict__.values() return field_enum_info
class IsVersionCorrectTestCase(TestCase): def test__is_version_correct_positive(self): public_id = PublicId('author', 'package', '0.1.0') ctx_mock = ContextMock(version=public_id.version) ctx_mock.agent_config.public_id = public_id result = _is_version_correct(ctx_mock, public_id) self.assertTrue(result) def test__is_version_correct_negative(self): public_id_a = PublicId('author', 'package', '0.1.0') public_id_b = PublicId('author', 'package', '0.1.1') ctx_mock = ContextMock(version=public_id_b.version) ctx_mock.agent_config.public_id = public_id_b result = _is_version_correct(ctx_mock, public_id_a) self.assertFalse(result)
def scrape_linkedin_profile(linkedin_profile_url: str): api_endpoint = ' header_dic = {'Authorization': f"Bearer {os.environ.get('PROXYCURL_API_KEY')}"} response = requests.get(api_endpoint, params={'url': linkedin_profile_url}, headers=header_dic) data = response.json() data = {k: v for (k, v) in data.items() if ((v not in ([], '', '', None)) and (k not in ['people_also_viewed', 'certifications']))} if data.get('groups'): for group_dict in data.get('groups'): group_dict.pop('profile_pic_url') return data
def populate_directory(fargs): (directory, checkmodes) = fargs start_time = time.time() try: if (not validate_directory(directory, checkmodes)): return (time.time() - start_time) logging.debug('Populating: %s', directory) for filename in filepaths_in_dir(directory, checkmodes): for appname in database.issues: for loc in database.locations(appname, with_lists=False): if filename.endswith(loc): queue.put((filename, loc, appname)) except Exception: logging.error(traceback.format_exc()) return (time.time() - start_time)
def test_gen_noise(): xs = gen_freqs([3, 50], 0.5) nlv = 0.1 noise = gen_noise(xs, nlv) assert np.all(np.invert(np.isnan(noise))) assert np.isclose(np.std(noise), nlv, 0.25) nlv = 0.5 noise = gen_noise(xs, nlv) assert np.all(np.invert(np.isnan(noise))) assert np.isclose(np.std(noise), nlv, 0.25)
.skipif((platform.machine() == 'aarch64'), reason='Flaky, skip temporarily') .parametrize('ops', XP_OPS) (max_examples=MAX_EXAMPLES, deadline=None) (args=draw_lstm_args()) def test_lstm_forward_training_fuzz(ops, args): (params, H0, C0, X, size_at_t) = args reference_ops = Ops() reference = reference_ops.lstm_forward_training(params, H0, C0, X, size_at_t) (Y, fwd_state) = ops.lstm_forward_training(params, H0, C0, X, size_at_t) assert_allclose(fwd_state[2], reference[1][2], atol=0.0001, rtol=0.001) assert_allclose(fwd_state[1], reference[1][1], atol=0.0001, rtol=0.001) assert_allclose(Y, reference[0], atol=0.0001, rtol=0.001)
def test_stereo(audio, multifile_format): if (audio.ndim == 2): with tmp.NamedTemporaryFile(delete=False, suffix=('.' + multifile_format)) as tempfile: stempeg.write_audio(tempfile.name, audio, sample_rate=44100) (loaded_audio, rate) = stempeg.read_stems(tempfile.name, always_3d=True) assert (audio.shape == loaded_audio.shape)
class SubCategoryBase(str): def list_choices(cls) -> Dict[('SubCategoryBase', List[str])]: raise NotImplementedError def list_available_type(cls): return [category for category in cls] def get_choices(cls, subcategory: 'SubCategoryBase') -> list: try: return cls.list_choices()[subcategory] except KeyError: raise ValueError(f'Unknown subcategory {subcategory}. Only {cls.list_choices().keys()} are allowed.') def choose_label(cls, label: str) -> 'SubCategoryBase': normalized_label = label.lower() for subcategory in cls.list_choices().keys(): choices: list = list(map((lambda label: (normalized_label in label)), cls.get_choices(subcategory))) if (sum(choices) > 0): return subcategory raise ValueError(f'Unknown label {label}. Only {cls.list_choices().values()} are allowed.')
def _get_trie_nodes(db, node_hash): if (node_hash in db): node = db[node_hash] else: return (nodetype, left_child, right_child) = parse_node(node) if (nodetype == KV_TYPE): (yield node) (yield from get_trie_nodes(db, right_child)) elif (nodetype == BRANCH_TYPE): (yield node) (yield from get_trie_nodes(db, left_child)) (yield from get_trie_nodes(db, right_child)) elif (nodetype == LEAF_TYPE): (yield node) else: raise Exception('Invariant: unreachable code path')
class PartialTests(unittest.TestCase): def test_can_create_invalid_partial_and_raise_not_implemented_error(self): ipar = InvalidPartial() def c(): str(ipar) self.assertRaises(NotImplementedError, c) def test_can_add_partial_to_pypher(self): p = Pypher() vp = ValidPartial() p.THIS.IS.A.apply_partial(vp) exp = 'THIS IS A VALID PARTIAL' self.assertEqual(str(p), exp) def test_can_create_partial_with_args_and_pass_them_to_main_pypher(self): arg = 'my_arg_{}'.format(random()) p = Pypher() ap = PartialWithArgs(arg) p.THIS.PARTIAL.HAS.apply_partial(ap) c = str(p) params = p.bound_params exp = 'THIS PARTIAL HAS toInteger(${})'.format(get_dict_key(params, arg)) self.assertEqual(c, exp) def test_can_chain_multiple_partials(self): arg = 'my_arg_{}'.format(random()) p = Pypher() ap = PartialWithArgs(arg) vp = ValidPartial() p.THIS.IS.A.apply_partial(vp).WITH.AN.apply_partial(ap) c = str(p) params = p.bound_params exp = 'THIS IS A VALID PARTIAL WITH AN toInteger(${})'.format(get_dict_key(params, arg)) self.assertEqual(c, exp) def test_can_pass_partial_as_an_argument(self): p = Pypher() vp = ValidPartial() (p.n.__name__ + vp) exp = 'n.`name` + VALID PARTIAL' self.assertEqual(str(p), exp) def test_can_pass_partial_as_an_argument_and_bubble_its_params(self): arg = 'my_arg_{}'.format(random()) p = Pypher() ap = PartialWithArgs(arg) (p.n.__name__ + ap) c = str(p) params = p.bound_params exp = 'n.`name` + toInteger(${})'.format(get_dict_key(params, arg)) self.assertEqual(c, exp) self.assertEqual(1, len(params)) def test_can_nest_multiple_partials_as_an_argument_and_bubble_its_params(self): arg = 'my_arg_{}'.format(random()) arg2 = 'my_arg2_{}'.format(random()) p = Pypher() ap2 = PartialWithArgs(arg2) ap = PartialWithArgs(arg, ap2) (p.n.__name__ + ap) c = str(p) params = p.bound_params exp = 'n.`name` + toInteger(${}, toInteger(${}))'.format(get_dict_key(params, arg), get_dict_key(params, arg2)) self.assertEqual(c, exp) self.assertEqual(2, len(params))
def get_graphic_matching_candidate_page_numbers_for_semantic_content_list(semantic_content_list: Sequence[SemanticContentWrapper], layout_document: Optional[LayoutDocument]=None) -> Sequence[int]: page_numbers = get_page_numbers_for_semantic_content_list(semantic_content_list) if layout_document: document_page_numbers = set(get_all_page_numbers_of_layout_document(layout_document)) page_numbers = sorted(set(page_numbers).union({(page_number + 1) for page_number in page_numbers if ((page_number + 1) in document_page_numbers)})) return page_numbers
def test_fnr_test() -> None: test_dataset = pd.DataFrame({'target': ['a', 'a', 'b', 'b'], 'prediction': ['a', 'b', 'b', 'b']}) column_mapping = ColumnMapping(pos_label='a') suite = TestSuite(tests=[TestFNR(lt=0.8)]) suite.run(current_data=test_dataset, reference_data=None, column_mapping=column_mapping) suite._inner_suite.raise_for_error() assert suite assert suite.show() assert suite.json()
class BracketPluginRunCommand(sublime_plugin.TextCommand): def run(self, edit): try: Payload.args['edit'] = edit Payload.plugin.run(**Payload.args) Payload.status = True except Exception: print(('BracketHighlighter: Plugin Run Error:\n%s' % str(traceback.format_exc())))
class TestSigmoidConverter(AITTestCase): def test_sigmoid(self): class Sigmoid(nn.Module): def forward(self, x): return torch.sigmoid(x) model = Sigmoid().cuda() inputs = [torch.randn(1, 2, 3).half().cuda()] self.run_test(model, inputs, expected_ops={acc_ops.sigmoid})
class OptionSeriesPyramid3dStatesHoverHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(10) def size(self, num: float): self._config(num, js_type=False)
class Field(DslBase): _type_name = 'field' _type_shortcut = staticmethod(construct_field) _param_defs = {'fields': {'type': 'field', 'hash': True}} name = None _coerce = False def __init__(self, multi=False, required=False, *args, **kwargs): self._multi = multi self._required = required super().__init__(*args, **kwargs) def __getitem__(self, subfield): return self._params.get('fields', {})[subfield] def _serialize(self, data): return data def _deserialize(self, data): return data def _empty(self): return None def empty(self): if self._multi: return AttrList([]) return self._empty() def serialize(self, data): if isinstance(data, (list, AttrList, tuple)): return list(map(self._serialize, data)) return self._serialize(data) def deserialize(self, data): if isinstance(data, (list, AttrList, tuple)): data = [(None if (d is None) else self._deserialize(d)) for d in data] return data if (data is None): return None return self._deserialize(data) def clean(self, data): if (data is not None): data = self.deserialize(data) if ((data in (None, [], {})) and self._required): raise ValidationException('Value required for this field.') return data def to_dict(self): d = super().to_dict() (name, value) = d.popitem() value['type'] = name return value
((detect_target().name() == 'rocm'), 'Not supported by ROCM.') class SlicePermute021FusionTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(SlicePermute021FusionTestCase, self).__init__(*args, **kwargs) self._test_id = 0 def _test_slice_permute021_fusion(self, N, K, slice_input_shape, slice_start_indices, slice_end_indices, dims, test_name, dtype='float16'): X = Tensor(shape=slice_input_shape, dtype=dtype, name='input_x', is_input=True) slice_op = ops.dynamic_slice() tensor_A = slice_op(X, start_indices=slice_start_indices, end_indices=slice_end_indices) tensor_A._attrs['name'] = 'slice_output' permute_op = ops.permute021() Y = permute_op(tensor_A) Y._attrs['is_output'] = True Y._attrs['name'] = 'output' target = detect_target() with compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}', dll_name=f'test_{self._test_id}.so') as module: self._test_id += 1 sorted_graph = module.debug_sorted_graph self.assertEqual(len(sorted_graph), 2) sorted_ops = graph_utils.get_sorted_ops(sorted_graph) self.assertEqual(len(sorted_ops), 1) input_pt = get_random_torch_tensor(slice_input_shape, dtype) slice_indices = [slice(i, j) for (i, j) in zip(slice_start_indices, slice_end_indices)] a_pt = input_pt[slice_indices] y_pt = torch.permute(a_pt, dims) y = get_torch_empty_tensor(y_pt.size(), dtype) module.run_with_tensors([input_pt], [y]) self.assertTrue(torch.allclose(y, y_pt, atol=0.01, rtol=0.01)) def test_slice_permute021_fusion(self): self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=(2, 2, 8), slice_start_indices=(0, 0, 4), slice_end_indices=(2, 2, 8), dims=(0, 2, 1), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=(2, 2, 8), slice_start_indices=(0, 1, 0), slice_end_indices=(2, 3, 8), dims=(0, 2, 1), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=[2, 9, 4], slice_start_indices=[0, 0, 1], slice_end_indices=[None, None, 3], dims=(0, 2, 1), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=[3, 4, 120], slice_start_indices=[0, 0, 3], slice_end_indices=[None, None, 110], dims=(0, 2, 1), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=[3, 121, 4], slice_start_indices=[0, 5, 0], slice_end_indices=[None, 115, None], dims=(0, 2, 1), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=(2, 3, 8, 62), slice_start_indices=(0, 0, 0, 2), slice_end_indices=(2, 3, 8, 50), dims=(0, 1, 3, 2), test_name='slice_permute021', dtype='float16') self._test_slice_permute021_fusion(N=2, K=2, slice_input_shape=(2, 3, 4, 4, 8), slice_start_indices=(0, 0, 0, 0, 0), slice_end_indices=(2, 3, 4, 4, 2), dims=(0, 1, 2, 4, 3), test_name='slice_permute021', dtype='float16')
def handle_alternative_result_files(files_is, files_should): res_missing_files = set() res_files_mapping = {} matched_files = set() for fis in files_is: (normal_match, alt_match_set) = search_matches(fis, files_should) if (normal_match and alt_match_set): print('Normal and alternative match: error in test case') assert False if ((not normal_match) and (not alt_match_set)): res_missing_files.add(fis) continue if normal_match: res_files_mapping[fis] = fis matched_files.add(fis) continue alt_ids = extract_alternative_ids(alt_match_set) alt_id = choose_alternative_id(alt_ids) res_files_mapping[fis] = alt_ids[alt_id] matched_files = matched_files.union(alt_match_set) res_additional_files = (files_should - matched_files) return (res_additional_files, res_missing_files, res_files_mapping)
def main(): ap = argparse.ArgumentParser(description="Find redundant definitions in Omorfi's lexical data") ap.add_argument('--input', '-i', metavar='INFILE', help='read data from INFILE') ap.add_argument('--output', '-o', metavar='OUTFILE', help='write output to OUTFILE') ap.add_argument('--verbose', '-v', action='store_true', help='Print verbosely while processing') ap.add_argument('--version', '-V', action='version') args = ap.parse_args() if args.input: input = open(args.input, 'r', newline='') else: input = stdin paralist = defaultdict(list) for line in input: fields = line.strip('\n').split('\t') if (len(fields) < 2): if len((fields > 0)): if args.verbose: print('Skipping too short line:', line, file=stderr) continue paralist[fields[0]].append(fields[1]) input.close() redulist = list() for (lemma, paras) in paralist.items(): for para in paras: if (superpara[para] in paras): redulist.append([lemma, para]) if args.output: output = open(args.output, 'w', newline='') else: output = stdout for lex in redulist: print('\t'.join(lex), file=output) output.close() exit()
class MuirGlacierUnsignedTransaction(IstanbulUnsignedTransaction): def as_signed_transaction(self, private_key: PrivateKey, chain_id: int=None) -> MuirGlacierTransaction: (v, r, s) = create_transaction_signature(self, private_key, chain_id=chain_id) return MuirGlacierTransaction(nonce=self.nonce, gas_price=self.gas_price, gas=self.gas, to=self.to, value=self.value, data=self.data, v=v, r=r, s=s)
class OptionSeriesVariablepieSonificationTracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class DjangoTemplateInstrumentation(AbstractInstrumentedModule): name = 'django_template' instrument_list = [('django.template', 'Template.render')] def call(self, module, method, wrapped, instance, args, kwargs): name = getattr(instance, 'name', None) if (not name): name = '<template string>' with capture_span(name, span_type='template', span_subtype='django', span_action='render'): return wrapped(*args, **kwargs)
def test_serialization(): snowflake_task = SnowflakeTask(name='flytekit.demo.snowflake_task.query', inputs=kwtypes(ds=str), task_config=SnowflakeConfig(account='snowflake', warehouse='my_warehouse', schema='my_schema', database='my_database'), query_template=query_template, output_schema_type=FlyteSchema) def my_wf(ds: str) -> FlyteSchema: return snowflake_task(ds=ds) default_img = Image(name='default', fqn='test', tag='tag') serialization_settings = SerializationSettings(project='proj', domain='dom', version='123', image_config=ImageConfig(default_image=default_img, images=[default_img]), env={}) task_spec = get_serializable(OrderedDict(), serialization_settings, snowflake_task) assert ('{{ .rawOutputDataPrefix' in task_spec.template.sql.statement) assert ('insert overwrite directory' in task_spec.template.sql.statement) assert (task_spec.template.sql.dialect == task_spec.template.sql.Dialect.ANSI) assert ('snowflake' == task_spec.template.config['account']) assert ('my_warehouse' == task_spec.template.config['warehouse']) assert ('my_schema' == task_spec.template.config['schema']) assert ('my_database' == task_spec.template.config['database']) assert (len(task_spec.template.interface.inputs) == 1) assert (len(task_spec.template.interface.outputs) == 1) admin_workflow_spec = get_serializable(OrderedDict(), serialization_settings, my_wf) assert (admin_workflow_spec.template.interface.outputs['o0'].type.schema is not None) assert (admin_workflow_spec.template.outputs[0].var == 'o0') assert (admin_workflow_spec.template.outputs[0].binding.promise.node_id == 'n0') assert (admin_workflow_spec.template.outputs[0].binding.promise.var == 'results')
class RequestPipeline(Pipeline): __slots__ = [] _type_suffix = 'request' def _get_proper_wrapper(self, pipe): if pipe._pipeline_all_methods_.issuperset({'on_pipe_success', 'on_pipe_failure'}): rv = _wrap_flow_request_complete elif ('on_pipe_success' in pipe._pipeline_all_methods_): rv = _wrap_flow_request_success elif ('on_pipe_failure' in pipe._pipeline_all_methods_): rv = _wrap_flow_request_failure else: rv = _wrap_flow_request_basic return rv def __call__(self, f): if (not asyncio.iscoroutinefunction(f)): f = self._awaitable_wrap(f) for pipe in reversed(self.pipes): if (not isinstance(pipe, Pipe)): continue if (not pipe._is_flow_request_responsible): continue wrapper = self._get_proper_wrapper(pipe) pipe_method = (pipe.pipe_request if ('pipe_request' in pipe._pipeline_all_methods_) else pipe.pipe) f = wrapper(pipe_method, pipe.on_pipe_success, pipe.on_pipe_failure, f) return f def _output_type(self): rv = None for pipe in reversed(self.pipes): if ((not pipe._is_flow_request_responsible) or (pipe.output is None)): continue rv = pipe.output return rv
class DocsDataStore(object): _data = {} def set(self, key, value): self._data[key] = value handle = open(DocsDataStore.get('filename'), 'a') handle.write((((('docs_data#' + key) + '\n') + value) + '\n\n')) handle.close() def get(self, key): return self._data[key]
.parametrize('option,values', [('flash_opacity', ['-1', '2', 'foo', (- 1), 2]), ('default_opacity', ['-1', '2', 'foo', (- 1), 2]), ('time', ['0', '-1', 'foo', 0, (- 1)]), ('ntimepoints', ['0', '-1', 'foo', 0, (- 1)]), ('simple', ['foo', '10']), ('flash_lone_windows', ['foo', 'true']), ('flash_fullscreen', ['foo', 3]), ('rules', lazy_fixture('invalid_rules'))]) .parametrize('input_type', ['cli', 'file']) def test_invalid_param(option: str, values: list, input_type: str, blank_cli_options: dict, default_config: dict) -> None: if ((input_type == 'cli') and (option == 'rules')): return for value in values: defaults = deepcopy(default_config) blanks = deepcopy(blank_cli_options) with pytest.raises(ConfigLoadError): if (input_type == 'cli'): blanks[option] = value merge_config_sources(cli_options=blanks, user_config={}, default_config=defaults) else: config = {option: value} merge_config_sources(cli_options=blanks, user_config=config, default_config=defaults)
def extractHinjakuhonyakuCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('hellmode', 'hellmode', 'translated'), ('the sole monster tamer in the world', 'the sole monster tamer in the world', 'translated'), ("the tale of the teapot hero's revenge", "he tale of the teapot hero's revenge", 'translated'), ('rrg', 'my reality is a romance game', 'translated'), ('my reality is a romance game', 'my reality is a romance game', 'translated'), ('rose princess of hellrage', 'rose princess of hellrage', 'translated'), ("a maiden's unwanted heroic epic", "a maiden's unwanted heroic epic", 'translated'), ('transition to another world, landmines included', 'transition to another world, landmines included', 'translated'), ('warlords of sigrdrifa', 'Warlords of Sigrdrifa', 'translated'), ('ws', 'Warlords of Sigrdrifa', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class _RegStart(): def __init__(self, rmap): self._rmap = rmap def val(self): return 0 def val(self, val): rdata = self._rmap._if.read(self._rmap.START_ADDR) rdata = (rdata & (~ (self._rmap.START_VAL_MSK << self._rmap.START_VAL_POS))) rdata = (rdata | (val << self._rmap.START_VAL_POS)) self._rmap._if.write(self._rmap.START_ADDR, rdata)
class BigQueryTask(AsyncAgentExecutorMixin, SQLTask[BigQueryConfig]): _TASK_TYPE = 'bigquery_query_job_task' def __init__(self, name: str, query_template: str, task_config: Optional[BigQueryConfig], inputs: Optional[Dict[(str, Type)]]=None, output_structured_dataset_type: Optional[Type[StructuredDataset]]=None, **kwargs): outputs = None if (output_structured_dataset_type is not None): outputs = {'results': output_structured_dataset_type} super().__init__(name=name, task_config=task_config, query_template=query_template, inputs=inputs, outputs=outputs, task_type=self._TASK_TYPE, **kwargs) self._output_structured_dataset_type = output_structured_dataset_type def get_custom(self, settings: SerializationSettings) -> Dict[(str, Any)]: config = {'Location': self.task_config.Location, 'ProjectID': self.task_config.ProjectID} if (self.task_config.QueryJobConfig is not None): config.update(self.task_config.QueryJobConfig.to_api_repr()['query']) s = Struct() s.update(config) return json_format.MessageToDict(s) def get_sql(self, settings: SerializationSettings) -> Optional[_task_model.Sql]: sql = _task_model.Sql(statement=self.query_template, dialect=_task_model.Sql.Dialect.ANSI) return sql
class BaseWeb3(): _strict_bytes_type_checking = True HTTPProvider = HTTPProvider IPCProvider = IPCProvider EthereumTesterProvider = EthereumTesterProvider WebsocketProvider = WebsocketProvider AsyncHTTPProvider = AsyncHTTPProvider RequestManager = DefaultRequestManager eth: Union[(Eth, AsyncEth)] net: Union[(Net, AsyncNet)] geth: Union[(Geth, AsyncGeth)] (to_bytes) def to_bytes(primitive: Primitives=None, hexstr: HexStr=None, text: str=None) -> bytes: return to_bytes(primitive, hexstr, text) (to_int) def to_int(primitive: Primitives=None, hexstr: HexStr=None, text: str=None) -> int: return to_int(primitive, hexstr, text) (to_hex) def to_hex(primitive: Primitives=None, hexstr: HexStr=None, text: str=None) -> HexStr: return to_hex(primitive, hexstr, text) (to_text) def to_text(primitive: Primitives=None, hexstr: HexStr=None, text: str=None) -> str: return to_text(primitive, hexstr, text) (to_json) def to_json(obj: Dict[(Any, Any)]) -> str: return to_json(obj) (to_wei) def to_wei(number: Union[(int, float, str, decimal.Decimal)], unit: str) -> Wei: return cast(Wei, to_wei(number, unit)) (from_wei) def from_wei(number: int, unit: str) -> Union[(int, decimal.Decimal)]: return from_wei(number, unit) (is_address) def is_address(value: Any) -> bool: return is_address(value) (is_checksum_address) def is_checksum_address(value: Any) -> bool: return is_checksum_address(value) (to_checksum_address) def to_checksum_address(value: Union[(AnyAddress, str, bytes)]) -> ChecksumAddress: return to_checksum_address(value) def api(self) -> str: from web3 import __version__ return __version__ def strict_bytes_type_checking(self) -> bool: return self._strict_bytes_type_checking _bytes_type_checking.setter def strict_bytes_type_checking(self, strict_bytes_type_check: bool) -> None: self.codec = (ABICodec(build_strict_registry()) if strict_bytes_type_check else ABICodec(build_non_strict_registry())) self._strict_bytes_type_checking = strict_bytes_type_check _to_return_value(HexBytes) def keccak(primitive: Optional[Primitives]=None, text: Optional[str]=None, hexstr: Optional[HexStr]=None) -> bytes: if isinstance(primitive, (bytes, int, type(None))): input_bytes = to_bytes(primitive, hexstr=hexstr, text=text) return eth_utils_keccak(input_bytes) raise TypeError(f"You called keccak with first arg {primitive!r} and keywords {{'text': {text!r}, 'hexstr': {hexstr!r}}}. You must call it with one of these approaches: keccak(text='txt'), keccak(hexstr='0x747874'), keccak(b'txt'), or keccak(0x747874).") def normalize_values(cls, w3: 'BaseWeb3', abi_types: List[TypeStr], values: List[Any]) -> List[Any]: return map_abi_data([abi_ens_resolver(w3)], abi_types, values) def solidity_keccak(cls, abi_types: List[TypeStr], values: List[Any]) -> bytes: if (len(abi_types) != len(values)): raise ValueError(f'Length mismatch between provided abi types and values. Got {len(abi_types)} types and {len(values)} values.') if isinstance(cls, type): w3 = None else: w3 = cls normalized_values = cls.normalize_values(w3, abi_types, values) hex_string = add_0x_prefix(HexStr(''.join((remove_0x_prefix(hex_encode_abi_type(abi_type, value)) for (abi_type, value) in zip(abi_types, normalized_values))))) return cls.keccak(hexstr=hex_string) def attach_modules(self, modules: Optional[Dict[(str, Union[(Type[Module], Sequence[Any])])]]) -> None: _attach_modules(self, modules) def is_encodable(self, _type: TypeStr, value: Any) -> bool: return self.codec.is_encodable(_type, value) def pm(self) -> 'PM': if hasattr(self, '_pm'): return self._pm else: raise AttributeError('The Package Management feature is disabled by default until its API stabilizes. To use these features, please enable them by running `w3.enable_unstable_package_management_api()` and try again.') def enable_unstable_package_management_api(self) -> None: if (not hasattr(self, '_pm')): warnings.warn('The ``ethPM`` module is no longer being maintained and will be deprecated with ``web3.py`` version 7', UserWarning) from web3.pm import PM self.attach_modules({'_pm': PM})
class OptionPlotoptionsBubbleSonificationContexttracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class Micaps17Data(Micaps): def __init__(self, filename, encoding='GBK'): super().__init__(filename, encoding=encoding) self.stationsum = None self.stations = [] self.ReadFromFile() def ReadFromFile(self): try: all_the_text = self.Read(self.filename, self.encoding) if (all_the_text is None): print(('Micaps 17 file error: ' + self.filename)) return None contents = re.split('[\\s]+', all_the_text) if (len(contents) < 4): return self.dataflag = contents[0].strip() self.style = contents[1].strip() self.title = contents[2].strip() self.stationsum = int(contents[3].strip()) if ((self.dataflag == 'diamond') and (self.style == '17')): begin = 4 step = 0 for i in range(self.stationsum): k = ((step + begin) + (7 * i)) code = contents[(k + 0)].strip() lat = self.ChangeLL(contents[(k + 1)].strip()) lon = self.ChangeLL(contents[(k + 2)].strip()) height = float(contents[(k + 3)].strip()) iclass = int(contents[(k + 4)].strip()) infosum = int(contents[(k + 5)].strip()) info = [] for j in range(infosum): info.append(contents[((k + 6) + j)].strip()) step += (infosum - 1) self.stations.append([code, lat, lon, height, iclass, infosum, info[0]]) except Exception as err: print('{0}{1}-{2}'.format(self.filename, err, datetime.now())) def ChangeLL(lonlat): if ('.' in lonlat): return float(lonlat) else: just = math.floor((float(lonlat) / 100)) return (just + ((((float(lonlat) / 100) - just) * 100) / 60.0))
class OptionPlotoptionsAreaMarkerStatesSelect(Options): def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def fillColor(self): return self._config_get('#cccccc') def fillColor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('#000000') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(2) def lineWidth(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(None) def radius(self, num: float): self._config(num, js_type=False)
def normalize_hue(color1: Vector, color2: Optional[Vector], index: int, offset: float, hue: str, fallback: Optional[float]) -> Tuple[(Vector, float)]: if (hue == 'specified'): return ((color2 or color1), offset) if (color2 is None): color1[index] = util.constrain_hue(color1[index]) return (color1, offset) if (hue == 'shorter'): adjuster = adjust_shorter elif (hue == 'longer'): adjuster = adjust_longer elif (hue == 'increasing'): adjuster = adjust_increase elif (hue == 'decreasing'): adjuster = adjust_decrease else: raise ValueError("Unknown hue adjuster '{}'".format(hue)) c1 = (color1[index] + offset) c2 = (util.constrain_hue(color2[index]) + offset) if (not math.isnan(c2)): if (not math.isnan(c1)): (c2, offset) = adjuster(c1, c2, offset) elif (fallback is not None): (c2, offset) = adjuster(fallback, c2, offset) color2[index] = c2 return (color2, offset)
class ExceptionMiddleware(): def __init__(self, app: ASGIApp, handlers: typing.Optional[typing.Mapping[(typing.Any, typing.Callable[([Request, Exception], Response)])]]=None, debug: bool=False) -> None: self.app = app self.debug = debug self._status_handlers: StatusHandlers = {} self._exception_handlers: ExceptionHandlers = {HTTPException: self. WebSocketException: self.websocket_exception} if (handlers is not None): for (key, value) in handlers.items(): self.add_exception_handler(key, value) def add_exception_handler(self, exc_class_or_status_code: typing.Union[(int, typing.Type[Exception])], handler: typing.Callable[([Request, Exception], Response)]) -> None: if isinstance(exc_class_or_status_code, int): self._status_handlers[exc_class_or_status_code] = handler else: assert issubclass(exc_class_or_status_code, Exception) self._exception_handlers[exc_class_or_status_code] = handler async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: if (scope['type'] not in (' 'websocket')): (await self.app(scope, receive, send)) return scope['starlette.exception_handlers'] = (self._exception_handlers, self._status_handlers) conn: typing.Union[(Request, WebSocket)] if (scope['type'] == ' conn = Request(scope, receive, send) else: conn = WebSocket(scope, receive, send) (await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)) def request: Request, exc: Exception) -> Response: assert isinstance(exc, HTTPException) if (exc.status_code in {204, 304}): return Response(status_code=exc.status_code, headers=exc.headers) return PlainTextResponse(exc.detail, status_code=exc.status_code, headers=exc.headers) async def websocket_exception(self, websocket: WebSocket, exc: Exception) -> None: assert isinstance(exc, WebSocketException) (await websocket.close(code=exc.code, reason=exc.reason))
def extractKuromin(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())): return None if item['title'].startswith('Potatoes are the only thing thats needed in this world! Chapter'): return buildReleaseMessageWithType(item, 'Potatoes are the only thing thats needed in this world!', vol, chp, frag=frag, postfix=postfix) return False
('type_, path', [param(FileConfigSource, 'file://tests/test_apps/config_source_test/dir', id='FileConfigSource'), param(ImportlibResourcesConfigSource, 'pkg://tests.test_apps.config_source_test.dir', id='ImportlibResourcesConfigSource'), param(StructuredConfigSource, 'structured://tests.test_apps.config_source_test.structured', id='StructuredConfigSource')]) class TestCoreConfigSources(ConfigSourceTestSuite): pass
('aea.cli.remove.shutil.rmtree') ('aea.cli.remove.Path.exists', return_value=True) ('aea.cli.remove.ItemRemoveHelper.get_component_directory') ('aea.cli.remove.load_item_config') ('aea.cli.remove.try_to_load_agent_config') class RemoveItemBadConfigurationTestCase(TestCase): def test_remove_item_item_folder_not_exists(self, *mocks): public_id = PublicIdMock.from_str('author/name:0.1.0') with pytest.raises(ClickException, match='Error loading .* configuration, author/name do not match: .*'): remove_item(ContextMock(protocols=[public_id]), 'protocol', public_id)
def timeStringToSeconds(tstr): cc = tstr.count(':') tv = None if ((cc == 1) or (cc == 2)): tv = 0 try: tva = tstr.split(':') h = int(tva[0]) m = int(tva[1]) if ((h > 24) or (h < 0) or (m < 0) or (m > 59)): return None tv = ((h * 3600) + (m * 60)) if (cc == 2): s = int(tva[2]) if ((s < 0) or (s > 59)): return None tv += s except: tv = None return tv
def check(f): result = 0 r = randint(0, MODULUS) rn2 = pow(r, (WIDTH // 2), MODULUS) for i in range(WIDTH): summand = (f[i] * ((((- 1) ** i) * rn2) - 1)) summand = primefield.div(summand, (DOMAIN[((i * ((WIDTH // 2) - 1)) % WIDTH)] * (r - DOMAIN[i]))) result += summand return (result % MODULUS)
def stream_dbfile(db: Any, name: str): items = _re_dbstream.match(name) if (not items): abort(404) (table_name, field_name) = (items.group('table'), items.group('field')) try: field = db[table_name][field_name] except AttributeError: abort(404) try: (filename, path_or_stream) = field.retrieve(name, nameonly=True) except NotAuthorizedException: abort(403) except NotFoundException: abort(404) except IOError: abort(404) if isinstance(path_or_stream, str): raise HTTPFile(path_or_stream, headers=current.response.headers, cookies=current.response.cookies) raise HTTPIO(path_or_stream, headers=current.response.headers, cookies=current.response.cookies)
def _config_to_hf(cls, curated_config: FalconConfig) -> Dict[(str, Any)]: out = config_to_hf(curated_config, [k for (k, _) in HF_CONFIG_KEYS_FALCON]) if issubclass(cls, DecoderModule): return HF_SPECIFIC_CONFIG_DECODER.merge(out) else: return HF_SPECIFIC_CONFIG_CAUSAL_LM.merge(out)
class FolderInput(BaseInput): def is_supported(self): if isinstance(self.param.type, click.Path): if self.param.type.dir_okay: return True return False def type_attrs(self): type_attrs = {} mode = ('r' if self.param.type.exists else 'w') type_attrs['click_type'] = f'path[{mode}]' if self.param.type.exists: type_attrs['accept'] = 'application/zip' type_attrs['type'] = 'file' else: type_attrs['type'] = 'hidden' return type_attrs
_mode() def huggingface_chat_generate_stream(model: AutoModelForCausalLM, tokenizer: AutoTokenizer, params, device, context_len=4096): prompt = params['prompt'] temperature = float(params.get('temperature', 0.7)) top_p = float(params.get('top_p', 1.0)) echo = params.get('echo', False) max_new_tokens = int(params.get('max_new_tokens', 2048)) input_ids = tokenizer(prompt).input_ids if model.config.is_encoder_decoder: max_src_len = context_len else: max_src_len = ((context_len - max_new_tokens) - 1) input_ids = input_ids[(- max_src_len):] input_echo_len = len(input_ids) input_ids = torch.as_tensor([input_ids], device=device) streamer = TextIteratorStreamer(tokenizer, skip_prompt=(not echo), skip_special_tokens=True) generate_kwargs = {'input_ids': input_ids, 'max_length': context_len, 'temperature': temperature, 'streamer': streamer} thread = Thread(target=model.generate, kwargs=generate_kwargs) thread.start() out = '' for new_text in streamer: out += new_text (yield out)
class serienRecFileListScreen(serienRecBaseScreen, Screen, HelpableScreen): def __init__(self, session, initDir, title, seriesName=''): serienRecBaseScreen.__init__(self, session) Screen.__init__(self, session) HelpableScreen.__init__(self) self.fullpath = '' self.initDir = initDir self.title = title self.seriesNames = seriesName self.skin = None self['actions'] = HelpableActionMap(self, 'SerienRecorderActions', {'cancel': (self.keyCancel, 'Zuruck zur vorherigen Ansicht'), 'left': (self.keyLeft, 'Zur vorherigen Seite blattern'), 'right': (self.keyRight, 'Zur nachsten Seite blattern'), 'up': (self.keyUp, 'Eine Zeile nach oben'), 'down': (self.keyDown, 'Eine Zeile nach unten'), 'ok': (self.keyOk, 'In das ausgewahlte Verzeichnis wechseln'), 'green': (self.keyGreen, 'Ausgewahltes Verzeichnis ubernehmen'), 'red': (self.keyRed, 'Ausgewahltes Verzeichnis loschen'), 'yellow': (self.keyYellow, 'Auf globales Aufnahmeverzeichnis zurucksetzen'), 'blue': (self.keyBlue, 'Neues Verzeichnis anlegen')}, (- 1)) self.helpList[0][2].sort() self['helpActions'] = ActionMap(['SerienRecorderActions'], {'displayHelp': self.showHelp, 'displayHelp_long': self.showManual}, 0) self.setupSkin() if config.plugins.serienRec.showAllButtons.value: setMenuTexts(self) self.updateFile() self.onLayoutFinish.append(self.setSkinProperties) def callHelpAction(self, *args): HelpableScreen.callHelpAction(self, *args) def setSkinProperties(self): super(self.__class__, self).setSkinProperties() super(self.__class__, self).startDisplayTimer() def setupSkin(self): self.skin = None InitSkin(self) self['menu_list'] = FileList(self.initDir, inhibitMounts=False, inhibitDirs=False, showMountpoints=False, showFiles=False) self['menu_list'].show() self['title'].hide() self['path'].show() self['text_red'].setText('Verzeichnis loschen') self['text_green'].setText('Speichern') self['text_ok'].setText('Auswahl') self['text_yellow'].setText('Zurucksetzen') self['text_blue'].setText('Verzeichnis anlegen') self.num_bt_text = ([buttonText_na, buttonText_na, 'Abbrechen'], [buttonText_na, buttonText_na, buttonText_na], [buttonText_na, buttonText_na, buttonText_na], [buttonText_na, buttonText_na, 'Hilfe'], [buttonText_na, buttonText_na, buttonText_na]) if (not config.plugins.serienRec.showAllButtons.value): self['text_0'].setText('Abbrechen') self['text_1'].setText('About') self['bt_red'].show() self['bt_green'].show() self['bt_ok'].show() self['bt_yellow'].show() self['bt_blue'].show() self['bt_exit'].show() self['bt_text'].show() self['text_red'].show() self['text_green'].show() self['text_ok'].show() self['text_yellow'].show() self['text_blue'].show() self['text_0'].show() self['text_1'].show() self['text_2'].show() self['text_3'].show() self['text_4'].show() def keyCancel(self): self.close(None) def keyRed(self): currentDirectory = self['menu_list'].getSelection()[0] self.session.openWithCallback(self.confirmDeleteCallback, MessageBox, ("Das Verzeichnis '%s' wirklich loschen?" % currentDirectory), MessageBox.TYPE_YESNO, default=False) def confirmDeleteCallback(self, answer): if answer: directoryToBeDeleted = self['menu_list'].getSelection()[0] try: os.rmdir(directoryToBeDeleted) self['menu_list'].refresh() self.updateFile() except OSError as error: print(("Das Verzeichnis '%s' konnte nicht geloscht werden. %s" % (directoryToBeDeleted, str(error)))) self.session.open(MessageBox, ('Das Verzeichnis %s konnte nicht geloscht werden.\n\n%s' % (directoryToBeDeleted, error)), MessageBox.TYPE_INFO, timeout=10) def keyGreen(self): currentDirectory = self['menu_list'].getCurrentDirectory() if currentDirectory.endswith('/'): self.fullpath = currentDirectory else: self.fullpath = ('%s/' % currentDirectory) if (self.fullpath == config.plugins.serienRec.savetopath.value): self.fullpath = '' self.close(self.fullpath) def keyYellow(self): self.fullpath = '' self.close(self.fullpath) def keyBlue(self): self.session.openWithCallback(self.newFolderNameCallback, NTIVirtualKeyBoard, title='Verzeichnisname eingeben:', text=self.seriesNames) def newFolderNameCallback(self, path_name): if path_name: path_name = ('%s/%s/' % (self['menu_list'].getCurrentDirectory(), path_name)) print(path_name) if (not os.path.exists(path_name)): try: os.makedirs(path_name) except: pass self['menu_list'].refresh() self.updateFile() def keyUp(self): self['menu_list'].up() self.updateFile() def keyDown(self): self['menu_list'].down() self.updateFile() def keyLeft(self): self['menu_list'].pageUp() self.updateFile() def keyRight(self): self['menu_list'].pageDown() self.updateFile() def keyOk(self): if self['menu_list'].canDescent(): self['menu_list'].descent() self.updateFile() def updateFile(self): currentFolder = self['menu_list'].getCurrentDirectory() self['path'].setText(('Auswahl:\n%s' % currentFolder)) def __onClose(self): self.stopDisplayTimer()
def evaluate(base, string, gamut_map): colors = [] try: color = string.strip() second = None ratio = None (first, ratio, more) = parse_color(base, color) if (first and (more is not None)): if (more is False): first = None else: (second, ratio, more) = parse_color(base, color, start=first.end, second=True) if ((not second) or (more is False)): first = None second = None if first: first = first.color if second: second = second.color elif first: first = first.color second = base(('white' if (first.luminance() < 0.5) else 'black')) if first: colors.append(first.fit('srgb', method=gamut_map)) if second: if (second[(- 1)] < 1.0): second[(- 1)] = 1.0 colors.append(second.fit('srgb', method=gamut_map)) if ratio: if (first[(- 1)] < 1.0): first = first.compose(second, space='srgb', out_space=first.space()) hwb_fg = first.convert('hwb').clip() hwb_bg = second.convert('hwb').clip() first.update(hwb_fg) second.update(hwb_bg) colormod = util.import_color('ColorHelper.custom.st_colormod.Color') color = colormod('color({} min-contrast({} {}))'.format(hwb_fg.to_string(**util.FULL_PREC), hwb_bg.to_string(**util.FULL_PREC), ratio)) first.update(base(color)) colors[0] = first if (first[(- 1)] < 1.0): colors.append(first.compose(second, space='srgb', out_space=first.space())) colors.append(first.compose('white', space='srgb', out_space=first.space())) colors.append(first.compose('black', space='srgb', out_space=first.space())) else: colors.append(first) except Exception as e: print(e) colors = [] return colors
def log_mask_scat_gsm(num_max_items=4087): return create_log_config_set_mask(DIAG_SUBSYS_ID_GSM, num_max_items, diag_log_code_gsm.LOG_GSM_L1_FCCH_ACQUISITION_C, diag_log_code_gsm.LOG_GSM_L1_SCH_ACQUISITION_C, diag_log_code_gsm.LOG_GSM_L1_NEW_BURST_METRICS_C, diag_log_code_gsm.LOG_GSM_L1_BURST_METRICS_C, diag_log_code_gsm.LOG_GSM_L1_SCELL_BA_LIST_C, diag_log_code_gsm.LOG_GSM_L1_SCELL_AUX_MEASUREMENTS_C, diag_log_code_gsm.LOG_GSM_L1_NCELL_AUX_MEASUREMENTS_C, diag_log_code_gsm.LOG_GSM_RR_SIGNALING_MESSAGE_C, diag_log_code_gsm.LOG_GSM_RR_CELL_INFORMATION_C, diag_log_code_gsm.LOG_GPRS_RR_PACKET_SI_1_C, diag_log_code_gsm.LOG_GPRS_RR_PACKET_SI_2_C, diag_log_code_gsm.LOG_GPRS_RR_PACKET_SI_3_C, diag_log_code_gsm.LOG_GPRS_MAC_SIGNALING_MESSACE_C, diag_log_code_gsm.LOG_GPRS_SM_GMM_OTA_SIGNALING_MESSAGE_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_FCCH_ACQUISITION_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_SCH_ACQUISITION_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_BURST_METRICS_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_SCELL_BA_LIST_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_SCELL_AUX_MEASUREMENTS_C, diag_log_code_gsm.LOG_GSM_DSDS_L1_NCELL_AUX_MEASUREMENTS_C, diag_log_code_gsm.LOG_GSM_DSDS_RR_SIGNALING_MESSAGE_C, diag_log_code_gsm.LOG_GSM_DSDS_RR_CELL_INFORMATION_C, diag_log_code_gsm.LOG_GPRS_DSDS_RR_PACKET_SI_1_C, diag_log_code_gsm.LOG_GPRS_DSDS_RR_PACKET_SI_2_C, diag_log_code_gsm.LOG_GPRS_DSDS_RR_PACKET_SI_3_C)
class Solution(): def change(self, amount: int, coins: List[int]) -> int: N = len(coins) if (N == 0): return int((N == amount)) dp_sum = [([0] * N) for _ in range((amount + 1))] for i in range(N): dp_sum[0][i] = 1 for (i, j) in product(range(amount), range(N)): dp_sum[(i + 1)][j] = dp_sum[(i + 1)][(j - 1)] if (((i + 1) - coins[j]) >= 0): dp_sum[(i + 1)][j] += dp_sum[((i + 1) - coins[j])][j] return dp_sum[(- 1)][(- 1)]
def package_sqls(sql_path, db_root_path, mode='gpt', data_mode='dev'): clean_sqls = [] db_path_list = [] if (mode == 'gpt'): with open(sql_path) as f: for l in f.readlines(): clean_sqls.append(l.strip()) elif (mode == 'gt'): sqls = open(sql_path) sql_txt = sqls.readlines() for (idx, sql_str) in enumerate(sql_txt): (sql, db_name) = sql_str.strip().split('\t') clean_sqls.append(sql) db_path_list.append(((((db_root_path + db_name) + '/') + db_name) + '.sqlite')) return (clean_sqls, db_path_list)
.parametrize('cls, indices, ref_val', [(CVDistance, (0, 1), 1.0), (CVBend, (0, 1, 2), (np.pi / 2)), (CVTorsion, (0, 1, 2, 3), (- 2.1025204))]) def test_prim_internal_cv(cls, indices, ref_val): c3d = np.array(((0.0, 0.0, 0.0), (1.0, 0.0, 0.0), (1.0, (- 1.0), 0.0), (2.0, (- 0.5), 1.7))) cv = cls(indices) agrad_cv = cls(indices, force_agrad=True) assert agrad_cv.agrad (val, grad) = cv.eval(c3d) agrad_grad = agrad_cv.gradient(c3d) assert (val == pytest.approx(ref_val)) np.testing.assert_allclose(grad, agrad_grad, atol=1e-12)
def download_image_from_url(image_url): filename = (UPLOAD_FOLDER + str(uuid.uuid4())) if ('png' in image_url): filename = (filename + '.png') elif ('jpg' in image_url): filename = (filename + '.jpg') elif ('gif' in image_url): filename = (filename + '.gif') print('Downloading this {} image'.format(image_url)) r = requests.get(image_url, stream=True) if (r.status_code == 200): r.raw.decode_content = True with open(filename, 'wb') as f: shutil.copyfileobj(r.raw, f) print('Image sucessfully Downloaded: ', filename) else: print("Image Couldn't be retreived") return None return filename
def _build_tree_structure(queryset): all_nodes = {} mptt_opts = queryset.model._mptt_meta items = queryset.order_by(mptt_opts.tree_id_attr, mptt_opts.left_attr).values_list('pk', ('%s_id' % mptt_opts.parent_attr)) for (p_id, parent_id) in items: all_nodes.setdefault((str(parent_id) if parent_id else 0), []).append(p_id) return all_nodes
def resolve_globs(glob_path: str, root_path: str=None) -> list[str]: if ((not os.path.isabs(glob_path)) and root_path): return [str(p.resolve()) for p in Path(root_path).resolve().glob(glob_path)] p = Path(glob_path).resolve() root = p.anchor rel = str(p.relative_to(root)) return [str(p.resolve()) for p in Path(root).glob(rel)]
def _setup_argparser(): parser = argparse.ArgumentParser(description=f'{PROGRAM_NAME} - {PROGRAM_DESCRIPTION}') parser.add_argument('-V', '--version', action='version', version=f'{PROGRAM_NAME} {PROGRAM_VERSION}') parser.add_argument('test_file', help='File containing the list of signatures') parser.add_argument('--yara_path', help='File or Folder containing yara signatures (Extension .yara mandatory)', default='software_signatures/') return parser.parse_args()
class OptionPlotoptionsErrorbarSonificationTracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class SortFieldAction(PostProcessingAction): def __init__(self, sort_params: Dict[(str, str)]) -> None: super().__init__('sort_field') if (sort_params is None): raise ValueError('Expected valid dictionary') (sort_field, sort_order) = list(sort_params.items())[0] if ((not sort_field) or (sort_order not in ('asc', 'desc'))): raise ValueError(f"Expected ES sort params dictionary (e.g. {{'_doc': 'desc'}}). Got '{sort_params}'") self._sort_field = sort_field self._sort_order = SortOrder.from_string(sort_order) def resolve_action(self, df: 'pd.DataFrame') -> 'pd.DataFrame': return df.sort_values(self._sort_field, (self._sort_order == SortOrder.ASC)) def __repr__(self) -> str: return f"('{self.type}': ('sort_field': '{self._sort_field}', 'sort_order': {self._sort_order}))"
def savenetsettings(): global NetworkDevices, NetMan, netdevfile, netmanfile success = 1 try: f = open(netdevfile, 'w', encoding='utf8') settingjson = jsonpickle.encode(NetworkDevices) f.write(settingjson) except: success = 0 try: f = open(netmanfile, 'w', encoding='utf8') settingjson = jsonpickle.encode(NetMan) f.write(settingjson) except: success = 0 return success
class TimeLogger(): def __init__(self, name): self.colors = {'forward': 'green', 'backward': 'blue'} self.name = name self.timers = {} def start(self, name): self.timers[name] = default_timer() def end(self, name): result = (default_timer() - self.timers[name]) label = f'{name.upper():<8}' label = color(label, self.colors.get(name), bold=True) logger.debug(f'{self.name:<12} | {label} | {result:.6f}')
(st.integers()) (deadline=timedelta(seconds=2)) def test_workflow_with_output_error(correct_input): with pytest.raises(TypeError, match="Encountered error while executing workflow '{}':\\n Failed to convert output in position 0 of value .+, expected type \\<class 'int'\\>".format(wf_with_output_error.name)): wf_with_output_error(a=correct_input)
class _ButtonDirective(SphinxDirective): required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True has_content = True option_spec = {'color': make_choice(SEMANTIC_COLORS), 'outline': directives.flag, 'align': text_align, 'expand': directives.flag, 'click-parent': directives.flag, 'tooltip': directives.unchanged_required, 'shadow': directives.flag, 'ref-type': make_choice(['any', 'ref', 'doc', 'myst']), 'class': directives.class_option} def create_ref_node(self, rawtext: str, target: str, explicit_title: bool, classes: List[str]) -> nodes.Node: raise NotImplementedError def run(self) -> List[nodes.Node]: rawtext = self.arguments[0] target = directives.uri(rawtext) classes = ['sd-sphinx-override', 'sd-btn', 'sd-text-wrap'] if ('color' in self.options): if ('outline' in self.options): classes.append(f"sd-btn-outline-{self.options['color']}") else: classes.append(f"sd-btn-{self.options['color']}") if ('click-parent' in self.options): classes.append('sd-stretched-link') if ('shadow' in self.options): classes.append('sd-shadow-sm') if ('class' in self.options): classes.extend(self.options['class']) node = self.create_ref_node(rawtext, target, bool(self.content), classes) self.set_source_info(node) if ('tooltip' in self.options): node['reftitle'] = self.options['tooltip'] if self.content: (textnodes, _) = self.state.inline_text('\n'.join(self.content), (self.lineno + self.content_offset)) content = nodes.inline('', '') content.extend(textnodes) else: content = nodes.inline(target, target) node.append(content) if ('expand' in self.options): grid_container = nodes.inline(classes=['sd-d-grid']) self.set_source_info(grid_container) grid_container += node node = grid_container container = nodes.paragraph(classes=self.options.get('align', [])) self.set_source_info(container) container += node return [container]
class APIIntEnum(enum.IntEnum): def convert(cls: type[_T], value: int) -> (_T | None): try: return cls(value) except ValueError: return None def convert_list(cls: type[_T], value: list[int]) -> list[_T]: ret = [] for x in value: try: ret.append(cls(x)) except ValueError: pass return ret
class ShortestPath(object): def __init__(self, graph): if (graph is None): raise TypeError('graph cannot be None') self.graph = graph self.previous = {} self.path_weight = {} self.remaining = PriorityQueue() for key in self.graph.nodes.keys(): self.previous[key] = None self.path_weight[key] = sys.maxsize self.remaining.insert(PriorityQueueNode(key, self.path_weight[key])) def find_shortest_path(self, start_node_key, end_node_key): if ((start_node_key is None) or (end_node_key is None)): raise TypeError('Input node keys cannot be None') if ((start_node_key not in self.graph.nodes) or (end_node_key not in self.graph.nodes)): raise ValueError('Invalid start or end node key') self.path_weight[start_node_key] = 0 self.remaining.decrease_key(start_node_key, 0) while self.remaining: min_node_key = self.remaining.extract_min().obj min_node = self.graph.nodes[min_node_key] for adj_key in min_node.adj_nodes.keys(): new_weight = (min_node.adj_weights[adj_key] + self.path_weight[min_node_key]) if (self.path_weight[adj_key] > new_weight): self.previous[adj_key] = min_node_key self.path_weight[adj_key] = new_weight self.remaining.decrease_key(adj_key, new_weight) result = [] current_node_key = end_node_key while (current_node_key is not None): result.append(current_node_key) current_node_key = self.previous[current_node_key] return result[::(- 1)]
def test_mapping_exclusion(monkeypatch, tmp_path): _ = make_coupler() ports = make_ports() EXCLUDE_INDEX = ('right_bot', 0) element_mappings = [] for port in ports: for mode_index in range(port.mode_spec.num_modes): row_index = (port.name, mode_index) if (row_index != EXCLUDE_INDEX): mapping = ((row_index, row_index), (row_index, EXCLUDE_INDEX), (+ 1)) element_mappings.append(mapping) mapping = ((('right_bot', 1), ('right_bot', 1)), (EXCLUDE_INDEX, EXCLUDE_INDEX), (+ 1)) element_mappings.append(mapping) modeler = make_component_modeler(element_mappings=element_mappings, path_dir=str(tmp_path)) run_sim_indices = modeler.matrix_indices_run_sim assert (EXCLUDE_INDEX not in run_sim_indices), 'mapping didnt exclude row properly' s_matrix = run_component_modeler(monkeypatch, modeler) _test_mappings(element_mappings, s_matrix)
def make_endpoint_summary(endpoint, requests_criterion, baseline_requests_criterion): questions = [MedianLatency(), StatusCodeDistribution()] summary = dict(endpoint_id=endpoint.id, endpoint_name=endpoint.name, answers=[], has_anything_significant=False) for question in questions: answer = question.get_answer(endpoint, requests_criterion, baseline_requests_criterion) if answer.is_significant(): summary['has_anything_significant'] = True summary['answers'].append(answer.serialize()) return summary
class XsltTransformerWrapper(): def __init__(self, xslt_template: str, xslt_template_parameters: Optional[Mapping[(str, Any)]]=None): self.xslt_template = xslt_template if (xslt_template_parameters is None): xslt_template_parameters = {} self.xslt_template_parameters = xslt_template_parameters self.__transformer: Optional[etree.XSLT] = None etree.fromstring(self.xslt_template) def from_template_string(xslt_template: str, **kwargs) -> 'XsltTransformerWrapper': return XsltTransformerWrapper(xslt_template, **kwargs) def from_template_file(xslt_template_file: str, **kwargs) -> 'XsltTransformerWrapper': return XsltTransformerWrapper.from_template_string(etree.tostring(etree.parse(xslt_template_file)), **kwargs) def _get_transformer(self) -> etree.XSLT: if (self.__transformer is None): transform = etree.XSLT(etree.fromstring(self.xslt_template)) self.__transformer = transform return self.__transformer def __call__(self, xslt_input: T_XSLT_Input, xslt_template_parameters: Optional[Mapping[(str, Any)]]=None): xslt_template_parameters = {**self.xslt_template_parameters, **(xslt_template_parameters or {})} LOGGER.debug('xslt_input: %r (xslt_template_parameters=%r)', xslt_input, xslt_template_parameters) _xslt_transformer = self._get_transformer() return _xslt_transformer(xslt_input, **{key: etree.XSLT.strparam(value) for (key, value) in xslt_template_parameters.items()})
def _create_feature(): fc1 = QuarterlyFeatures(data_key='quarterly', columns=QUARTER_COLUMNS, quarter_counts=QUARTER_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, verbose=VERBOSE) fc2 = BaseCompanyFeatures(data_key='base', cat_columns=CAT_COLUMNS, verbose=VERBOSE) fc3 = QuarterlyDiffFeatures(data_key='quarterly', columns=QUARTER_COLUMNS, compare_quarter_idxs=COMPARE_QUARTER_IDXS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, verbose=VERBOSE) fc4 = DailyAggQuarterFeatures(daily_data_key='daily', quarterly_data_key='quarterly', columns=DAILY_AGG_COLUMNS, agg_day_counts=AGG_DAY_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, verbose=VERBOSE) feature = FeatureMerger(fc1, fc2, on='ticker') feature = FeatureMerger(feature, fc3, on=['ticker', 'date']) feature = FeatureMerger(feature, fc4, on=['ticker', 'date']) return feature
(name=LABEL_RANGE) def validate_label_range(label_range): (min_label, max_label) = label_range if ((not min_label) or (not max_label) or (not isinstance(min_label, numbers.Integral)) or (not isinstance(max_label, numbers.Integral)) or (min_label < 17) or (min_label >= max_label)): raise ConfigValueError(desc=('Invalid label_range configuration value: (%s).' % label_range)) return label_range
(params=[JournalDB, BatchDB, MemoryDB, AtomicDB, CacheDB, KeyAccessLoggerAtomicDB, KeyAccessLoggerDB]) def db(request): base_db = MemoryDB() if (request.param is JournalDB): (yield JournalDB(base_db)) elif (request.param is BatchDB): (yield BatchDB(base_db)) elif (request.param is MemoryDB): (yield base_db) elif (request.param is AtomicDB): atomic_db = AtomicDB(base_db) with atomic_db.atomic_batch() as batch: (yield batch) elif (request.param is CacheDB): (yield CacheDB(base_db)) elif (request.param is KeyAccessLoggerAtomicDB): atomic_db = AtomicDB(base_db) (yield KeyAccessLoggerAtomicDB(atomic_db)) elif (request.param is KeyAccessLoggerDB): (yield KeyAccessLoggerDB(base_db)) else: raise Exception('Invariant')
class BaseEventFilterBuilder(): formatter = None _fromBlock = None _toBlock = None _address = None _immutable = False def __init__(self, event_abi: ABIEvent, abi_codec: ABICodec, formatter: Optional[EventData]=None) -> None: self.event_abi = event_abi self.abi_codec = abi_codec self.formatter = formatter self.event_topic = initialize_event_topics(self.event_abi) self.args = AttributeDict(_build_argument_filters_from_event_abi(event_abi, abi_codec)) self._ordered_arg_names = tuple((arg['name'] for arg in event_abi['inputs'])) def fromBlock(self) -> BlockIdentifier: return self._fromBlock def fromBlock(self, value: BlockIdentifier) -> None: if ((self._fromBlock is None) and (not self._immutable)): self._fromBlock = value else: raise ValueError(f'fromBlock is already set to {self._fromBlock!r}. Resetting filter parameters is not permitted') def toBlock(self) -> BlockIdentifier: return self._toBlock def toBlock(self, value: BlockIdentifier) -> None: if ((self._toBlock is None) and (not self._immutable)): self._toBlock = value else: raise ValueError(f'toBlock is already set to {self._toBlock!r}. Resetting filter parameters is not permitted') def address(self) -> ChecksumAddress: return self._address def address(self, value: ChecksumAddress) -> None: if ((self._address is None) and (not self._immutable)): self._address = value else: raise ValueError(f'address is already set to {self.address!r}. Resetting filter parameters is not permitted') def ordered_args(self) -> Tuple[(Any, ...)]: return tuple(map(self.args.__getitem__, self._ordered_arg_names)) _tuple def indexed_args(self) -> Tuple[(Any, ...)]: return tuple(filter(is_indexed, self.ordered_args)) _tuple def data_args(self) -> Tuple[(Any, ...)]: return tuple(filter(is_not_indexed, self.ordered_args)) def topics(self) -> List[HexStr]: arg_topics = tuple((arg.match_values for arg in self.indexed_args)) return normalize_topic_list(cons(to_hex(self.event_topic), arg_topics)) def data_argument_values(self) -> Tuple[(Any, ...)]: if (self.data_args is not None): return tuple((arg.match_values for arg in self.data_args)) else: return (None,) def filter_params(self) -> FilterParams: params = {'topics': self.topics, 'fromBlock': self.fromBlock, 'toBlock': self.toBlock, 'address': self.address} return valfilter((lambda x: (x is not None)), params)
class __cppClass_WavesCharacteristics(): def __init__(self, waves, vert_axis, wind_speed=None, b_or=None, smoothing=0.0, vof_water=0.0, vof_air=1.0): self.WT = waves self.vert_axis = vert_axis self.zero_vel = np.zeros(3) self._b_or = b_or self.smoothing = smoothing self.vof_air = vof_air self.vof_water = vof_water if (wind_speed is None): self.wind_speed = self.zero_vel else: self.wind_speed = wind_speed def __cpp_calculate_velocity(self, x, t): cython.declare(u=cython.double[3]) cython.declare(xx=cython.double[3]) cython.declare(x_max=cython.double[3]) xx[0] = x[0] xx[1] = x[1] xx[2] = x[2] phi = self.__cpp_calculate_phi(x, t) if (phi <= 0.0): H = 0 waterSpeed = self.WT.u(xx, t) elif (0 < phi <= self.smoothing): H = smoothedHeaviside(old_div(self.smoothing, 2.0), (phi - old_div(self.smoothing, 2.0))) x_max[0] = x[0] x_max[1] = x[1] x_max[2] = x[2] x_max[self.vert_axis] = (x[self.vert_axis] - phi) waterSpeed = self.WT.u(x_max, t) else: H = 1.0 waterSpeed = self.zero_vel u[0] = ((H * self.wind_speed[0]) + ((1 - H) * waterSpeed[0])) u[1] = ((H * self.wind_speed[1]) + ((1 - H) * waterSpeed[1])) u[2] = ((H * self.wind_speed[2]) + ((1 - H) * waterSpeed[2])) return u def __cpp_calculate_pressure(self, x, t): ux = self.__cpp_calculate_velocity(x, t) (b0, b1, b2) = (self._b_or[0], self._b_or[1], self._b_or[2]) (u0, u1, u2) = (ux[0], ux[1], ux[2]) return (((b0 * u0) + (b1 * u1)) + (b2 * u2)) def __cpp_calculate_phi(self, x, t): cython.declare(xx=cython.double[3]) xx[0] = x[0] xx[1] = x[1] xx[2] = x[2] level = (self.WT.mwl + self.WT.eta(xx, t)) return (x[self.vert_axis] - level) def __cpp_calculate_vof(self, x, t): phi = self.__cpp_calculate_phi(x, t) H = self.__cpp_calculate_smoothing_H(phi) return H def __cpp_calculate_smoothing_H(self, phi): if (phi >= self.smoothing): H = 1.0 elif ((self.smoothing > 0) and ((- self.smoothing) < phi < self.smoothing)): H = smoothedHeaviside(self.smoothing, phi) elif (phi <= (- self.smoothing)): H = 0.0 return H
class TestDialogues(BaseSkillTestCase): path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'ml_train') def setup(cls): super().setup() cls.default_dialogues = cast(DefaultDialogues, cls._skill.skill_context.default_dialogues) cls.ml_dialogues = cast(MlTradeDialogues, cls._skill.skill_context.ml_trade_dialogues) cls.ledger_api_dialogues = cast(LedgerApiDialogues, cls._skill.skill_context.ledger_api_dialogues) cls.oef_search_dialogues = cast(OefSearchDialogues, cls._skill.skill_context.oef_search_dialogues) cls.signing_dialogues = cast(SigningDialogues, cls._skill.skill_context.signing_dialogues) def test_default_dialogues(self): (_, dialogue) = self.default_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=DefaultMessage.Performative.BYTES, content=b'some_content') assert (dialogue.role == DefaultDialogue.Role.AGENT) assert (dialogue.self_address == self.skill.skill_context.agent_address) def test_ml_dialogue(self): ml_dialogue = MlTradeDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=MlTradeDialogue.Role.BUYER) with pytest.raises(AEAEnforceError, match='Terms not set!'): assert ml_dialogue.terms terms = Terms('some_ledger_id', self.skill.skill_context.agent_address, 'counterprty', {'currency_id': 50}, {'good_id': (- 10)}, 'some_nonce') ml_dialogue.terms = terms with pytest.raises(AEAEnforceError, match='Terms already set!'): ml_dialogue.terms = terms assert (ml_dialogue.terms == terms) def test_ml_dialogues(self): (_, dialogue) = self.ml_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=MlTradeMessage.Performative.CFP, query='some_query') assert (dialogue.role == MlTradeDialogue.Role.BUYER) assert (dialogue.self_address == self.skill.skill_context.agent_address) def test_ledger_api_dialogue(self): ledger_api_dialogue = LedgerApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=LedgerApiDialogue.Role.AGENT) with pytest.raises(AEAEnforceError, match='MlTradeDialogue not set!'): assert ledger_api_dialogue.associated_ml_trade_dialogue ml_dialogue = MlTradeDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=MlTradeDialogue.Role.BUYER) ledger_api_dialogue.associated_ml_trade_dialogue = ml_dialogue with pytest.raises(AEAEnforceError, match='MlTradeDialogue already set!'): ledger_api_dialogue.associated_ml_trade_dialogue = ml_dialogue assert (ledger_api_dialogue.associated_ml_trade_dialogue == ml_dialogue) def test_ledger_api_dialogues(self): (_, dialogue) = self.ledger_api_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=LedgerApiMessage.Performative.GET_BALANCE, ledger_id='some_ledger_id', address='some_address') assert (dialogue.role == LedgerApiDialogue.Role.AGENT) assert (dialogue.self_address == str(self.skill.public_id)) def test_oef_search_dialogues(self): (_, dialogue) = self.oef_search_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=OefSearchMessage.Performative.SEARCH_SERVICES, query='some_query') assert (dialogue.role == OefSearchDialogue.Role.AGENT) assert (dialogue.self_address == str(self.skill.public_id)) def test_signing_dialogue(self): signing_dialogue = SigningDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=SigningDialogue.Role.SKILL) with pytest.raises(AEAEnforceError, match='LedgerApiDialogue not set!'): assert signing_dialogue.associated_ledger_api_dialogue ledger_api_dialogue = LedgerApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=LedgerApiDialogue.Role.AGENT) signing_dialogue.associated_ledger_api_dialogue = ledger_api_dialogue with pytest.raises(AEAEnforceError, match='LedgerApiDialogue already set!'): signing_dialogue.associated_ledger_api_dialogue = ledger_api_dialogue assert (signing_dialogue.associated_ledger_api_dialogue == ledger_api_dialogue) def test_signing_dialogues(self): (_, dialogue) = self.signing_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=SigningMessage.Performative.SIGN_TRANSACTION, terms='some_terms', raw_transaction='some_raw_transaction') assert (dialogue.role == SigningDialogue.Role.SKILL) assert (dialogue.self_address == str(self.skill.public_id))
class Update(Operation): def __init__(self, function: Callable[([Any], Any)], query: Optional[QueryFunction]=None, doc_ids: Optional[Iterable[int]]=None) -> None: if ((query is None) and (doc_ids is None)): raise TypeError('query or doc_ids must be specified') self.function = (function if callable(function) else (lambda x: x.update(function))) self.query = query self.doc_ids = doc_ids def perform(self, data: Dict[(int, Any)]) -> None: if (self.query is not None): for key in data: value = data[key] if self.query(value): self.function(value) else: assert (self.doc_ids is not None) for (key, value) in data.items(): if (key in self.doc_ids): self.function(value)
class PrometheusMessage(Message): protocol_id = PublicId.from_str('fetchai/prometheus:1.1.7') protocol_specification_id = PublicId.from_str('fetchai/prometheus:1.0.0') class Performative(Message.Performative): ADD_METRIC = 'add_metric' RESPONSE = 'response' UPDATE_METRIC = 'update_metric' def __str__(self) -> str: return str(self.value) _performatives = {'add_metric', 'response', 'update_metric'} __slots__: Tuple[(str, ...)] = tuple() class _SlotsCls(): __slots__ = ('callable', 'code', 'description', 'dialogue_reference', 'labels', 'message', 'message_id', 'performative', 'target', 'title', 'type', 'value') def __init__(self, performative: Performative, dialogue_reference: Tuple[(str, str)]=('', ''), message_id: int=1, target: int=0, **kwargs: Any): super().__init__(dialogue_reference=dialogue_reference, message_id=message_id, target=target, performative=PrometheusMessage.Performative(performative), **kwargs) def valid_performatives(self) -> Set[str]: return self._performatives def dialogue_reference(self) -> Tuple[(str, str)]: enforce(self.is_set('dialogue_reference'), 'dialogue_reference is not set.') return cast(Tuple[(str, str)], self.get('dialogue_reference')) def message_id(self) -> int: enforce(self.is_set('message_id'), 'message_id is not set.') return cast(int, self.get('message_id')) def performative(self) -> Performative: enforce(self.is_set('performative'), 'performative is not set.') return cast(PrometheusMessage.Performative, self.get('performative')) def target(self) -> int: enforce(self.is_set('target'), 'target is not set.') return cast(int, self.get('target')) def callable(self) -> str: enforce(self.is_set('callable'), "'callable' content is not set.") return cast(str, self.get('callable')) def code(self) -> int: enforce(self.is_set('code'), "'code' content is not set.") return cast(int, self.get('code')) def description(self) -> str: enforce(self.is_set('description'), "'description' content is not set.") return cast(str, self.get('description')) def labels(self) -> Dict[(str, str)]: enforce(self.is_set('labels'), "'labels' content is not set.") return cast(Dict[(str, str)], self.get('labels')) def message(self) -> Optional[str]: return cast(Optional[str], self.get('message')) def title(self) -> str: enforce(self.is_set('title'), "'title' content is not set.") return cast(str, self.get('title')) def type(self) -> str: enforce(self.is_set('type'), "'type' content is not set.") return cast(str, self.get('type')) def value(self) -> float: enforce(self.is_set('value'), "'value' content is not set.") return cast(float, self.get('value')) def _is_consistent(self) -> bool: try: enforce(isinstance(self.dialogue_reference, tuple), "Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.".format(type(self.dialogue_reference))) enforce(isinstance(self.dialogue_reference[0], str), "Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[0]))) enforce(isinstance(self.dialogue_reference[1], str), "Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[1]))) enforce((type(self.message_id) is int), "Invalid type for 'message_id'. Expected 'int'. Found '{}'.".format(type(self.message_id))) enforce((type(self.target) is int), "Invalid type for 'target'. Expected 'int'. Found '{}'.".format(type(self.target))) enforce(isinstance(self.performative, PrometheusMessage.Performative), "Invalid 'performative'. Expected either of '{}'. Found '{}'.".format(self.valid_performatives, self.performative)) actual_nb_of_contents = (len(self._body) - DEFAULT_BODY_SIZE) expected_nb_of_contents = 0 if (self.performative == PrometheusMessage.Performative.ADD_METRIC): expected_nb_of_contents = 4 enforce(isinstance(self.type, str), "Invalid type for content 'type'. Expected 'str'. Found '{}'.".format(type(self.type))) enforce(isinstance(self.title, str), "Invalid type for content 'title'. Expected 'str'. Found '{}'.".format(type(self.title))) enforce(isinstance(self.description, str), "Invalid type for content 'description'. Expected 'str'. Found '{}'.".format(type(self.description))) enforce(isinstance(self.labels, dict), "Invalid type for content 'labels'. Expected 'dict'. Found '{}'.".format(type(self.labels))) for (key_of_labels, value_of_labels) in self.labels.items(): enforce(isinstance(key_of_labels, str), "Invalid type for dictionary keys in content 'labels'. Expected 'str'. Found '{}'.".format(type(key_of_labels))) enforce(isinstance(value_of_labels, str), "Invalid type for dictionary values in content 'labels'. Expected 'str'. Found '{}'.".format(type(value_of_labels))) elif (self.performative == PrometheusMessage.Performative.UPDATE_METRIC): expected_nb_of_contents = 4 enforce(isinstance(self.title, str), "Invalid type for content 'title'. Expected 'str'. Found '{}'.".format(type(self.title))) enforce(isinstance(self.callable, str), "Invalid type for content 'callable'. Expected 'str'. Found '{}'.".format(type(self.callable))) enforce(isinstance(self.value, float), "Invalid type for content 'value'. Expected 'float'. Found '{}'.".format(type(self.value))) enforce(isinstance(self.labels, dict), "Invalid type for content 'labels'. Expected 'dict'. Found '{}'.".format(type(self.labels))) for (key_of_labels, value_of_labels) in self.labels.items(): enforce(isinstance(key_of_labels, str), "Invalid type for dictionary keys in content 'labels'. Expected 'str'. Found '{}'.".format(type(key_of_labels))) enforce(isinstance(value_of_labels, str), "Invalid type for dictionary values in content 'labels'. Expected 'str'. Found '{}'.".format(type(value_of_labels))) elif (self.performative == PrometheusMessage.Performative.RESPONSE): expected_nb_of_contents = 1 enforce((type(self.code) is int), "Invalid type for content 'code'. Expected 'int'. Found '{}'.".format(type(self.code))) if self.is_set('message'): expected_nb_of_contents += 1 message = cast(str, self.message) enforce(isinstance(message, str), "Invalid type for content 'message'. Expected 'str'. Found '{}'.".format(type(message))) enforce((expected_nb_of_contents == actual_nb_of_contents), 'Incorrect number of contents. Expected {}. Found {}'.format(expected_nb_of_contents, actual_nb_of_contents)) if (self.message_id == 1): enforce((self.target == 0), "Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.".format(self.target)) except (AEAEnforceError, ValueError, KeyError) as e: _default_logger.error(str(e)) return False return True
_validator def validate_updates(request, **kwargs): updates = request.validated.get('updates') if (updates is None): return db = request.db bad_updates = [] validated_updates = [] for u in updates: update = db.query(Update).filter((Update.alias == u)).first() if (not update): bad_updates.append(u) else: validated_updates.append(update) if bad_updates: request.errors.add('querystring', 'updates', 'Invalid updates specified: {}'.format(', '.join(bad_updates))) else: request.validated['updates'] = validated_updates
def publish_cell_voltages(client, device_topic, voltages): if (not voltages): return for i in range(0, len(voltages)): topic = f'{device_topic}/cell_voltages/{(i + 1)}' mqtt_single_out(client, topic, (voltages[i] / 1000)) if (len(voltages) > 1): x = range(len(voltages)) high_i = max(x, key=(lambda i: voltages[i])) low_i = min(x, key=(lambda i: voltages[i])) mqtt_single_out(client, f'{device_topic}/cell_voltages/min', (voltages[low_i] / 1000)) mqtt_single_out(client, f'{device_topic}/cell_voltages/min_index', (low_i + 1)) mqtt_single_out(client, f'{device_topic}/cell_voltages/max', (voltages[high_i] / 1000)) mqtt_single_out(client, f'{device_topic}/cell_voltages/max_index', (high_i + 1)) mqtt_single_out(client, f'{device_topic}/cell_voltages/delta', ((voltages[high_i] - voltages[low_i]) / 1000)) mqtt_single_out(client, f'{device_topic}/cell_voltages/average', (round((sum(voltages) / len(voltages))) / 1000)) mqtt_single_out(client, f'{device_topic}/cell_voltages/median', (statistics.median(voltages) / 1000))
class Quantity(unittest.TestCase): def test_fromstring(self): F = SI.parse('5kN') self.assertEqual(type(F), SI.Force) self.assertEqual((F / 'N'), 5000) v = SI.parse('-864km/24h') self.assertEqual(type(v), SI.Velocity) self.assertEqual((v / 'm/s'), (- 10)) v = SI.parse('2m/5cm') self.assertEqual(v, 40) def test_fromvalue(self): F = SI.Force('10N') self.assertEqual(type(F), SI.Force) self.assertEqual((F / SI.Force('2N')), 5) def test_getitem(self): F = (SI.units.N * numpy.arange(6).reshape(2, 3)) self.assertEqual(F[(0, 0)], SI.Force('0N')) self.assertEqual(F[(0, 1)], SI.Force('1N')) self.assertEqual(F[(0, 2)], SI.Force('2N')) self.assertEqual(F[(1, 0)], SI.Force('3N')) self.assertEqual(F[(1, 1)], SI.Force('4N')) self.assertEqual(F[(1, 2)], SI.Force('5N')) def test_setitem(self): F = (SI.units.N * numpy.zeros(3)) F[0] = SI.Force('1N') F[1] = SI.Force('2N') with self.assertRaisesRegex(TypeError, 'cannot assign \\[L2\\] to \\[M\\*L/T2\\]'): F[2] = SI.Area('10m2') F[2] = SI.Force('3N') self.assertTrue(numpy.all((F == (SI.units.N * numpy.array([1, 2, 3]))))) def test_iter(self): F = (SI.units.N * numpy.arange(6).reshape(2, 3)) for (i, Fi) in enumerate(F): for (j, Fij) in enumerate(Fi): self.assertEqual(Fij, (SI.units.N * ((i * 3) + j))) def test_multiply(self): self.assertEqual((SI.Mass('2kg') * SI.Acceleration('10m/s2')), SI.Force('20N')) self.assertEqual((2 * SI.Acceleration('10m/s2')), SI.Acceleration('20m/s2')) self.assertEqual((SI.Mass('2kg') * 10), SI.Mass('20kg')) self.assertEqual((SI.Time('2s') * SI.Frequency('10/s')), 20) self.assertEqual(numpy.multiply(SI.Mass('2kg'), SI.Acceleration('10m/s2')), SI.Force('20N')) def test_matmul(self): self.assertEqual(((SI.units.kg * numpy.array([2, 3])) (SI.parse('m/s2') * numpy.array([5, (- 3)]))), SI.Force('1N')) def test_divide(self): self.assertEqual((SI.Length('2m') / SI.Time('10s')), SI.Velocity('.2m/s')) self.assertEqual((2 / SI.Time('10s')), SI.Frequency('.2/s')) self.assertEqual((SI.Length('2m') / 10), SI.Length('.2m')) self.assertEqual((SI.Density('2kg/m3') / SI.Density('10kg/m3')), 0.2) self.assertEqual(numpy.divide(SI.Length('2m'), SI.Time('10s')), SI.Velocity('.2m/s')) def test_power(self): self.assertEqual((SI.Length('3m') ** 2), SI.Area('9m2')) self.assertEqual((SI.Length('3m') ** 0), 1) self.assertEqual(numpy.power(SI.Length('3m'), 2), SI.Area('9m2')) def test_add(self): self.assertEqual((SI.Mass('2kg') + SI.Mass('3kg')), SI.Mass('5kg')) self.assertEqual(numpy.add(SI.Mass('2kg'), SI.Mass('3kg')), SI.Mass('5kg')) with self.assertRaisesRegex(TypeError, 'incompatible arguments for add: \\[M\\], \\[L\\]'): (SI.Mass('2kg') + SI.Length('3m')) def test_sub(self): self.assertEqual((SI.Mass('2kg') - SI.Mass('3kg')), SI.Mass('-1kg')) self.assertEqual(numpy.subtract(SI.Mass('2kg'), SI.Mass('3kg')), SI.Mass('-1kg')) with self.assertRaisesRegex(TypeError, 'incompatible arguments for sub: \\[M\\], \\[L\\]'): (SI.Mass('2kg') - SI.Length('3m')) def test_hypot(self): self.assertEqual(numpy.hypot(SI.Mass('3kg'), SI.Mass('4kg')), SI.Mass('5kg')) with self.assertRaisesRegex(TypeError, 'incompatible arguments for hypot: \\[M\\], \\[L\\]'): numpy.hypot(SI.Mass('3kg'), SI.Length('4m')) def test_neg(self): self.assertEqual((- SI.Mass('2kg')), SI.Mass('-2kg')) self.assertEqual(numpy.negative(SI.Mass('2kg')), SI.Mass('-2kg')) def test_pos(self): self.assertEqual((+ SI.Mass('2kg')), SI.Mass('2kg')) self.assertEqual(numpy.positive(SI.Mass('2kg')), SI.Mass('2kg')) def test_abs(self): self.assertEqual(numpy.abs(SI.Mass('-2kg')), SI.Mass('2kg')) def test_real(self): self.assertEqual(numpy.real((SI.ElectricPotential('1V') + (1j * SI.ElectricPotential('2V')))), SI.ElectricPotential('1V')) def test_imag(self): self.assertEqual(numpy.imag((SI.ElectricPotential('1V') + (1j * SI.ElectricPotential('2V')))), SI.ElectricPotential('2V')) def test_conjugate(self): self.assertEqual(numpy.conjugate((SI.ElectricPotential('1V') + (1j * SI.ElectricPotential('2V')))), (SI.ElectricPotential('1V') - (1j * SI.ElectricPotential('2V')))) def test_sqrt(self): self.assertEqual(numpy.sqrt(SI.Area('4m2')), SI.Length('2m')) def test_sum(self): self.assertTrue(numpy.all((numpy.sum((SI.units.kg * numpy.arange(6).reshape(2, 3)), 0) == (SI.units.kg * numpy.array([3, 5, 7]))))) self.assertTrue(numpy.all((numpy.sum((SI.units.kg * numpy.arange(6).reshape(2, 3)), 1) == (SI.units.kg * numpy.array([3, 12]))))) def test_mean(self): self.assertTrue(numpy.all((numpy.mean((SI.units.kg * numpy.arange(6).reshape(2, 3)), 0) == (SI.units.kg * numpy.array([1.5, 2.5, 3.5]))))) self.assertTrue(numpy.all((numpy.mean((SI.units.kg * numpy.arange(6).reshape(2, 3)), 1) == (SI.units.kg * numpy.array([1, 4]))))) def test_broadcast_to(self): v = numpy.array([1, 2, 3]) A = (SI.units.kg * v) B = numpy.broadcast_to(A, (2, 3)) self.assertEqual(B.unwrap().shape, (2, 3)) self.assertEqual(B[(1, 1)], SI.Mass('2kg')) def test_trace(self): A = (SI.units.kg * numpy.arange(18).reshape(3, 2, 3)) self.assertTrue(numpy.all((numpy.trace(A, axis1=0, axis2=2) == (SI.units.kg * numpy.array([21, 30]))))) def test_ptp(self): A = (SI.units.kg * numpy.array([2, (- 10), 5, 0])) self.assertEqual(numpy.ptp(A), SI.Mass('15kg')) def test_min(self): A = (SI.units.kg * numpy.array([2, (- 10), 5, 0])) self.assertEqual(numpy.max(A), SI.Mass('5kg')) def test_max(self): A = (SI.units.kg * numpy.array([2, (- 10), 5, 0])) self.assertEqual(numpy.min(A), SI.Mass('-10kg')) def test_cmp(self): A = SI.Mass('2kg') B = SI.Mass('3kg') self.assertTrue((A < B)) self.assertTrue(numpy.less(A, B)) self.assertTrue((A <= B)) self.assertTrue(numpy.less_equal(A, B)) self.assertFalse((A > B)) self.assertFalse(numpy.greater(A, B)) self.assertFalse((A >= B)) self.assertFalse(numpy.greater_equal(A, B)) self.assertFalse((A == B)) self.assertFalse(numpy.equal(A, B)) self.assertTrue((A != B)) self.assertTrue(numpy.not_equal(A, B)) def test_shape(self): A = SI.Mass('2kg') self.assertEqual(numpy.shape(A), ()) A = (SI.units.kg * numpy.arange(3)) self.assertEqual(numpy.shape(A), (3,)) self.assertEqual(A.unwrap().shape, (3,)) def test_ndim(self): A = SI.Mass('2kg') self.assertEqual(numpy.ndim(A), 0) A = (SI.units.kg * numpy.arange(3)) self.assertEqual(numpy.ndim(A), 1) self.assertEqual(A.unwrap().ndim, 1) def test_size(self): A = SI.Mass('2kg') self.assertEqual(numpy.size(A), 1) A = (SI.units.kg * numpy.arange(3)) self.assertEqual(numpy.size(A), 3) self.assertEqual(A.unwrap().size, 3) def test_isnan(self): self.assertTrue(numpy.isnan((SI.units.kg * float('nan')))) self.assertFalse(numpy.isnan(SI.Mass('2kg'))) def test_isfinite(self): self.assertFalse(numpy.isfinite((SI.units.kg * float('nan')))) self.assertFalse(numpy.isfinite((SI.units.kg * float('inf')))) self.assertTrue(numpy.isfinite(SI.Mass('2kg'))) def test_stack(self): A = SI.Mass('2kg') B = SI.Mass('3kg') C = SI.Mass('4kg') D = SI.Time('5s') self.assertTrue(numpy.all((numpy.stack([A, B, C]) == (SI.units.kg * numpy.array([2, 3, 4]))))) with self.assertRaisesRegex(TypeError, 'incompatible arguments for stack: \\[M\\], \\[M\\], \\[M\\], \\[T\\]'): numpy.stack([A, B, C, D]) def test_concatenate(self): A = (SI.units.kg * numpy.array([1, 2])) B = (SI.units.kg * numpy.array([3, 4])) C = (SI.units.s * numpy.array([5, 6])) self.assertTrue(numpy.all((numpy.concatenate([A, B]) == (SI.units.kg * numpy.array([1, 2, 3, 4]))))) with self.assertRaisesRegex(TypeError, 'incompatible arguments for concatenate: \\[M\\], \\[M\\], \\[T\\]'): numpy.concatenate([A, B, C]) def test_format(self): s = 'velocity: {:.1m/s}'.format(SI.parse('9km/h')) self.assertEqual(s, 'velocity: 2.5m/s') def test_pickle(self): v = SI.Velocity('2m/s') s = pickle.dumps(v) self.assertEqual(pickle.loads(s), v) def test_string_representation(self): F = (numpy.array([1.0, 2.0]) * SI.units.N) self.assertEqual(str(F), '[1. 2.][M*L/T2]') self.assertEqual(repr(F), 'array([1., 2.])[M*L/T2]') def test_wrap_unwrap(self): T = (SI.Length / SI.Time) v = T.wrap(5.0) self.assertIsInstance(v, T) self.assertEqual(v.unwrap(), 5) def test_hash(self): v = SI.Velocity('2m/s') h = hash(v)
class OptionSeriesPyramidStatesHoverHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(10) def size(self, num: float): self._config(num, js_type=False)
class AugAssign(stmt): _fields = ('target', 'op', 'value') _attributes = ('lineno', 'col_offset') def __init__(self, target, op, value, lineno=0, col_offset=0, **ARGS): stmt.__init__(self, **ARGS) self.target = target self.op = op self.value = value self.lineno = int(lineno) self.col_offset = int(col_offset)
class Scatter3D(Chart): def chart(self) -> JsPlotly.Pie: if (self._chart is None): self._chart = JsPlotly.Pie(page=self.page, component=self, js_code=self.js_code) return self._chart def layout(self) -> OptPlotly.Layout3D: if (self._layout is None): self._layout = OptPlotly.Layout3D(page=self.page, component=self) return self._layout def data(self): return self._traces[(- 1)] def add_trace(self, data, type='scatter3d', mode='lines'): c_data = dict(data) if (type is not None): c_data['type'] = self.options.type if (mode is not None): c_data['mode'] = (self.options.mode or mode) self._traces.append(OptPlotly.DataSurface(component=self, page=self.page, attrs=c_data)) return self _js__builder__ = '\nvar temp = {}; var tempZ = {}; var labels = []; var uniqLabels = {}; var result = [] ;\noptions.y_columns.forEach(function(series){temp[series] = {}});\noptions.y_columns.forEach(function(series){tempZ[series] = {}});\ndata.forEach(function(rec){ \n options.y_columns.forEach(function(name){\n if(rec[name] !== undefined){\n if(!(rec[options.x_column] in uniqLabels)){\n labels.push(rec[options.x_column]); uniqLabels[rec[options.x_column]] = true};\n temp[name][rec[options.x_column]] = rec[name];\n tempZ[name][rec[options.x_column]] = rec[options.z_axis];\n }})});\noptions.y_columns.forEach(function(series){\n dataSet = {x: [], y: [], z: [], name: series, type: options.type, mode: options.mode, marker: {}};\n if(typeof options.attrs !== undefined){ for(var attr in options.attrs){dataSet[attr] = options.attrs[attr]} };\n if(typeof options.marker !== undefined){ \n for(var attr in options.marker){dataSet.marker[attr] = options.marker[attr]} };\n labels.forEach(function(x, i){\n dataSet.x.push(x);\n if(temp[series][x] == undefined){dataSet.y.push(null)} else{dataSet.y.push(temp[series][x])};\n if(tempZ[series][x] == undefined){dataSet.y.push(null)} else{dataSet.z.push(tempZ[series][x])};\n }); result.push(dataSet)});\nreturn result'
class RegexInputFormatter(string.Formatter): def __init__(self, engine): self._engine = engine self.implicit = (- 1) self.explicit = False super(RegexInputFormatter, self).__init__() def convert_field(self, value, conversion): if ((conversion is not None) and (conversion == 'e')): return self._engine.escape(value) return super(RegexInputFormatter, self).convert_field(value, conversion) def get_value(self, key, args, kwargs): if (key == ''): if (not self.explicit): self.implicit += 1 key = self.implicit else: raise ValueError('Cannot change from explicit index to implicit!') elif (self.implicit >= 0): raise ValueError('Cannot change from implict to explicit indexing!') return super(RegexInputFormatter, self).get_value(key, args, kwargs)