code
stringlengths
281
23.7M
def _test_get(client, _id, expected_response=None, expected_status_code=status.HTTP_200_OK): endpoint = ((ENDPOINT + str(_id)) + '/') response = client.get(endpoint) assert (response.status_code == expected_status_code) if (expected_response is not None): assert (json.loads(response.content.decode('utf-8')) == expected_response)
def parse_imports(filepath: str) -> List[ImportedItem]: with open(filepath, 'r') as filehandle: filecontent = filehandle.read() imports = [] for node in ast.parse(filecontent, filename=filepath).body: if isinstance(node, ast.Import): imports += [ImportedItem(alias.name, None) for alias in node.names] elif isinstance(node, ast.ImportFrom): modname = (('.' * (node.level or 0)) + (node.module or '')) imports += [ImportedItem(modname, alias.name) for alias in node.names] return imports
class ESMasterRunner(TrainingRunner, ABC): shared_noise_table_size: int shared_noise: Optional[SharedNoiseTable] = dataclasses.field(default=None, init=False) (TrainingRunner) def setup(self, cfg: DictConfig) -> None: super().setup(cfg) print(' Init Shared Noise Table ') self.shared_noise = SharedNoiseTable(count=self.shared_noise_table_size) torch_policy = TorchPolicy(networks=self._model_composer.policy.networks, distribution_mapper=self._model_composer.distribution_mapper, device='cpu') torch_policy.seed(self.maze_seeding.global_seed) if self._cfg.algorithm.policy_wrapper: policy = Factory(Policy).instantiate(self._cfg.algorithm.policy_wrapper, torch_policy=torch_policy) assert (isinstance(policy, Policy) and isinstance(policy, TorchModel)) torch_policy = policy print(' Trainer Setup ') self._trainer = ESTrainer(algorithm_config=cfg.algorithm, torch_policy=torch_policy, shared_noise=self.shared_noise, normalization_stats=self._normalization_statistics) self._init_trainer_from_input_dir(trainer=self._trainer, state_dict_dump_file=self.state_dict_dump_file, input_dir=cfg.input_dir) self._model_selection = BestModelSelection(dump_file=self.state_dict_dump_file, model=torch_policy, dump_interval=self.dump_interval) def create_distributed_rollouts(self, env: Union[(StructuredEnv, StructuredEnvSpacesMixin)], shared_noise: SharedNoiseTable, agent_instance_seed: int) -> ESDistributedRollouts: (TrainingRunner) def run(self, n_epochs: Optional[int]=None, distributed_rollouts: Optional[ESDistributedRollouts]=None, model_selection: Optional[ModelSelectionBase]=None) -> None: print(' Run Trainer ') env = self.env_factory() env.seed(self.maze_seeding.generate_env_instance_seed()) self._trainer.train(n_epochs=(self._cfg.algorithm.n_epochs if (n_epochs is None) else n_epochs), distributed_rollouts=(self.create_distributed_rollouts(env=env, shared_noise=self.shared_noise, agent_instance_seed=self.maze_seeding.generate_agent_instance_seed()) if (distributed_rollouts is None) else distributed_rollouts), model_selection=(self._model_selection if (model_selection is None) else model_selection))
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TestCommentedMapMerge(): def test_in_operator(self): data = round_trip_load('\n x: &base\n a: 1\n b: 2\n c: 3\n y:\n <<: *base\n k: 4\n l: 5\n ') assert (data['x']['a'] == 1) assert ('a' in data['x']) assert (data['y']['a'] == 1) assert ('a' in data['y']) def test_issue_60(self): data = round_trip_load('\n x: &base\n a: 1\n y:\n <<: *base\n ') assert (data['x']['a'] == 1) assert (data['y']['a'] == 1) if (sys.version_info >= (3, 12)): assert (str(data['y']) == "ordereddict({'a': 1})") else: assert (str(data['y']) == "ordereddict([('a', 1)])") def test_issue_60_1(self): data = round_trip_load('\n x: &base\n a: 1\n y:\n <<: *base\n b: 2\n ') assert (data['x']['a'] == 1) assert (data['y']['a'] == 1) if (sys.version_info >= (3, 12)): assert (str(data['y']) == "ordereddict({'b': 2, 'a': 1})") else: assert (str(data['y']) == "ordereddict([('b', 2), ('a', 1)])")
class HeapMax(): def __init__(self, size): self.nheap = 0 self.maxheap = size self.heap = ([0] * size) def downheap(self): tmp = 0 i = 0 while True: left = ((i << 1) + 1) right = (left + 1) if (left >= self.nheap): return if (right >= self.nheap): if (self.heap[i] < self.heap[left]): tmp = self.heap[left] self.heap[left] = self.heap[i] self.heap[i] = tmp return if ((self.heap[i] >= self.heap[left]) and (self.heap[i] >= self.heap[right])): return if (self.heap[left] > self.heap[right]): tmp = self.heap[left] self.heap[left] = self.heap[i] self.heap[i] = tmp i = left else: tmp = self.heap[right] self.heap[right] = self.heap[i] self.heap[i] = tmp i = right def get_features(self): f = [] while (self.nheap > 0): f.append(self.heap_extract_max()) return f def heap_extract_max(self): assert (self.nheap > 0) m = self.heap[0] self.nheap -= 1 self.heap[0] = self.heap[self.nheap] self.downheap() return m def upheap(self): i = (self.nheap - 1) assert (self.nheap > 0) while (i > 0): parent = ((i - 1) >> 1) if (self.heap[parent] >= self.heap[i]): return tmp = self.heap[parent] self.heap[parent] = self.heap[i] self.heap[i] = tmp i = parent def heap_insert(self, v): assert (self.nheap < self.maxheap) self.heap[self.nheap] = v self.nheap += 1 self.upheap()
class WebFrontEnd(): def __init__(self, db: (FrontendDatabase | None)=None, intercom=None, status_interface=None): self.program_version = __VERSION__ self.intercom = (InterComFrontEndBinding() if (intercom is None) else intercom()) self.db = (FrontendDatabase() if (db is None) else db) self.status_interface = (RedisStatusInterface() if (status_interface is None) else status_interface) self._setup_app() logging.info('Web front end online') def _setup_app(self): self.app = create_app() (self.user_db, self.user_datastore) = add_flask_security_to_app(self.app) base_args = {'app': self.app, 'db': self.db, 'intercom': self.intercom, 'status': self.status_interface} AjaxRoutes(**base_args) AnalysisRoutes(**base_args) CompareRoutes(**base_args) DatabaseRoutes(**base_args) IORoutes(**base_args) MiscellaneousRoutes(**base_args) StatisticRoutes(**base_args) UserManagementRoutes(**base_args, user_db=self.user_db, user_db_interface=self.user_datastore) rest_base = RestBase(**base_args) PluginRoutes(**base_args, api=rest_base.api) FilterClass(self.app, self.program_version, self.db)
class ThresholdArgs(): def __init__(self, arguments): self.max = arguments([]) area_threshold = arguments['--area-threshold'] self.volume_distance = arguments['--volume-distance'] min_threshold = arguments['--min-threshold'] max_threshold = arguments['--max-threshold'] area_threshold = arguments['--area-thratlas_settingseshold']
class TestShrinkIndex(): ('elasticsearch.Elasticsearch') ('asyncio.sleep') .asyncio async def test_shrink_index_with_shrink_node(self, sleep, es): es.indices.get = mock.AsyncMock(return_value={'src': {}}) es.cluster.health = mock.AsyncMock(return_value={'status': 'green', 'relocating_shards': 0}) es.indices.put_settings = mock.AsyncMock() es.indices.shrink = mock.AsyncMock() r = runner.ShrinkIndex() params = {'source-index': 'src', 'target-index': 'target', 'target-body': {'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0}}, 'shrink-node': 'rally-node-0'} (await r(es, params)) es.indices.put_settings.assert_awaited_once_with(index='src', body={'settings': {'index.routing.allocation.require._name': 'rally-node-0', 'index.blocks.write': 'true'}}, preserve_existing=True) es.cluster.health.assert_has_awaits([mock.call(index='src', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='target', params={'wait_for_no_relocating_shards': 'true'})]) es.indices.shrink.assert_awaited_once_with(index='src', target='target', body={'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0, 'index.routing.allocation.require._name': None, 'index.blocks.write': None}}) ('elasticsearch.Elasticsearch') ('asyncio.sleep') .asyncio async def test_shrink_index_derives_shrink_node(self, sleep, es): es.indices.get = mock.AsyncMock(return_value={'src': {}}) es.cluster.health = mock.AsyncMock(return_value={'status': 'green', 'relocating_shards': 0}) es.nodes.info = mock.AsyncMock(return_value={'_nodes': {'total': 3, 'successful': 3, 'failed': 0}, 'cluster_name': 'elasticsearch', 'nodes': {'lsM0-tKnQqKEGVw-OZU5og': {'name': 'node0', 'roles': ['master', 'data', 'ingest']}, 'kxM0-tKnQqKEGVw-OZU5og': {'name': 'node1', 'roles': ['master']}, 'yyM0-tKnQqKEGVw-OZU5og': {'name': 'node0', 'roles': ['ingest']}}}) es.indices.put_settings = mock.AsyncMock() es.indices.shrink = mock.AsyncMock() r = runner.ShrinkIndex() params = {'source-index': 'src', 'target-index': 'target', 'target-body': {'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0}}} (await r(es, params)) es.indices.put_settings.assert_awaited_once_with(index='src', body={'settings': {'index.routing.allocation.require._name': 'node0', 'index.blocks.write': 'true'}}, preserve_existing=True) es.cluster.health.assert_has_awaits([mock.call(index='src', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='target', params={'wait_for_no_relocating_shards': 'true'})]) es.indices.shrink.assert_awaited_once_with(index='src', target='target', body={'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0, 'index.routing.allocation.require._name': None, 'index.blocks.write': None}}) ('elasticsearch.Elasticsearch') ('asyncio.sleep') .asyncio async def test_shrink_index_pattern_with_shrink_node(self, sleep, es): es.indices.get = mock.AsyncMock(return_value={'src1': {}, 'src2': {}, 'src-2020': {}}) es.cluster.health = mock.AsyncMock(return_value={'status': 'green', 'relocating_shards': 0}) es.indices.put_settings = mock.AsyncMock() es.indices.shrink = mock.AsyncMock() r = runner.ShrinkIndex() params = {'source-index': 'src*', 'target-index': 'target', 'target-body': {'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0}}, 'shrink-node': 'rally-node-0'} (await r(es, params)) es.indices.put_settings.assert_has_awaits([mock.call(index='src1', body={'settings': {'index.routing.allocation.require._name': 'rally-node-0', 'index.blocks.write': 'true'}}, preserve_existing=True), mock.call(index='src2', body={'settings': {'index.routing.allocation.require._name': 'rally-node-0', 'index.blocks.write': 'true'}}, preserve_existing=True), mock.call(index='src-2020', body={'settings': {'index.routing.allocation.require._name': 'rally-node-0', 'index.blocks.write': 'true'}}, preserve_existing=True)]) es.cluster.health.assert_has_awaits([mock.call(index='src1', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='target1', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='src2', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='target2', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='src-2020', params={'wait_for_no_relocating_shards': 'true'}), mock.call(index='target-2020', params={'wait_for_no_relocating_shards': 'true'})]) es.indices.shrink.assert_has_awaits([mock.call(index='src1', target='target1', body={'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0, 'index.routing.allocation.require._name': None, 'index.blocks.write': None}}), mock.call(index='src2', target='target2', body={'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0, 'index.routing.allocation.require._name': None, 'index.blocks.write': None}}), mock.call(index='src-2020', target='target-2020', body={'settings': {'index.number_of_replicas': 2, 'index.number_of_shards': 0, 'index.routing.allocation.require._name': None, 'index.blocks.write': None}})])
def extractDianilleBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Decoder(): def __init__(self): self.vm = None def loaded(self): return bool(self.vm) def load(self, bili_js_url): bili_js = grabhtml(bili_js_url) js = (('\n\t\tconst window = {};\n\t\tconst self = window;\n\t\t' + bili_js) + '\n\t\tlet decode;\n\t\tlet factory;\n\t\tlet webpack;\n\t\tfor (const key in window) {\n\t\t\tif (key.startsWith("webpack")) {\n\t\t\t\twebpack = window[key];\n\t\t\t}\n\t\t}\n\t\tfor (const fn of Object.values(webpack[0][1])) {\n\t\t if (fn.toString().includes(\'Indexer\')) {\n\t\t\tfactory = fn;\n\t\t\tbreak;\n\t\t }\n\t\t}\n\t\tconst _require = () => ({\n\t\t async loadAsync(data) {\n\t\t\tthrow {\n\t\t\t data,\n\t\t\t message: \'extract data\'\n\t\t\t};\n\t\t }\n\t\t});\n\t\t_require.d = (exports, o) => {\n\t\t const getValue = Object.values(o)[0];\n\t\t decode = getValue();\n\t\t};\n\n\t\tfactory({}, {}, _require);\n\n\t\tvar exportDecode = (seasonId, episodeId, data) => {\n\t\t return decode(seasonId, episodeId, (data))\n\t\t\t.catch(err => {\n\t\t\t\tif (err.message !== "extract data") throw err;\n\t\t\t\treturn Array.from(err.data);\n\t\t\t});\n\t\t};\n\t\t') self.vm = VM(js) self.vm.create() def decode(self, id, ep_id, data): return bytes(self.vm.call('exportDecode', id, ep_id, list(data)))
def instance_norm(x, num_filters): epsilon = 0.001 shape = [num_filters] scale = tf.Variable(tf.ones(shape), name='scale') shift = tf.Variable(tf.zeros(shape), name='shift') (mean, var) = tf.nn.moments(x, [1, 2], keep_dims=True) x_normed = tf.div(tf.subtract(x, mean), tf.sqrt(tf.add(var, epsilon))) return ((scale * x_normed) + shift)
def update_mix(): scale = patch.getfloat('scale', 'mix', default=1.0) offset = patch.getfloat('offset', 'mix', default=0.0) mix = np.zeros((input_nchans, output_nchans), dtype=float) for o in range(0, output_nchans): channame = ('mix%d' % (o + 1)) chanmix = patch.getfloat('output', channame, multiple=True) for i in range(0, input_nchans): mix[(i, o)] = EEGsynth.rescale(chanmix[i], slope=scale, offset=offset) return mix
class MyIterator(): def __init__(self, data): self.index = 0 self.data = data def __iter__(self): return self def __next__(self): if (self.index >= len(self.data)): raise StopIteration result = self.data[self.index] self.index += 1 return result
def script_name_for_python_version(name, version, minor=False, default_number=True): if (not default_number): if (version == settings.DEFAULT_PYTHON_VERSIONS[name_convertor.NameConvertor.distro][0]): return name if minor: if (len(version) > 1): return '{0}-{1}'.format(name, '.'.join(list(version))) else: return '{0}-%{{python{1}_version}}'.format(name, version) else: return '{0}-{1}'.format(name, version[0])
class SettingsDialog(QtWidgets.QDialog): def __init__(self, common): super(SettingsDialog, self).__init__() self.c = common self.setWindowTitle('GPG Sync Settings') self.setMinimumSize(425, 200) self.setMaximumSize(425, 400) self.settings = self.c.settings self.settings_layout = SettingsLayout(self.settings) layout = QtWidgets.QVBoxLayout() layout.addLayout(self.settings_layout) self.setLayout(layout)
(CreateSearchFunctionSQL) def compile_create_search_function_sql(element, compiler): return f'''CREATE FUNCTION {element.search_function_name}() RETURNS TRIGGER AS $$ BEGIN NEW.{element.tsvector_column.name} = {element.search_vector(compiler)}; RETURN NEW; END $$ LANGUAGE 'plpgsql'; '''
.parametrize('calc_cls, start, ref_cycle, ref_coords', [(AnaPot, (0.667, 1.609, 0.0), 18, (1.941, 3.8543, 0.0)), (AnaPot3, ((- 0.36), 0.93, 0.0), 10, ((- 1.0), 0.0, 0.0)), (AnaPot4, ((- 0.5), 3.32, 0.0), 11, ((- 2.2102), 0.3297, 0.0)), (AnaPotCBM, ((- 0.32), 0.71, 0.0), 10, ((- 1.0), 0.0, 0.0)), (CerjanMiller, ((- 0.46), 1.48, 0.0), 10, (0.0, 0.0, 0.0)), (FourWellAnaPot, (1.45, 0.04, 0.0), 10, (1.1241, (- 1.4853), 0.0)), (LEPSBase, (1.31, 0.82, 0.0), 28, (0., 7., 0.0)), (MullerBrownPot, ((- 0.69), 0.55, 0.0), 12, ((- 0.05), 0.4667, 0.0)), (NFK, (2.0, (- 1.0), 0.0), 10, (2., (- 0.), 0.0)), (Rosenbrock, ((- 1.0), 1.0, 0.0), 43, (1.0, 1.0, 0.0)), (VRIPot, ((- 0.125), 0, 0.0), 8, ((- 1.0), 0.0, 0.0))]) def test_rfoptimizer(calc_cls, start, ref_cycle, ref_coords): geom = calc_cls.get_geom(start) print('', calc_cls) opt_kwargs = {'thresh': 'gau_tight', 'dump': True, 'overachieve_factor': 4.0} opt = RFOptimizer(geom, **opt_kwargs) opt.run() assert opt.is_converged assert (opt.cur_cycle == ref_cycle) ref_coords = np.array(ref_coords) diff = (ref_coords - geom.coords) diff_norm = np.linalg.norm(diff) print(f' norm(diff)={diff_norm:.8f}') assert (diff_norm < 6e-05) print('\tFinal coords', geom.coords)
_admin_required def BookingSendReceipt(request, location_slug, booking_id): if (not (request.method == 'POST')): return HttpResponseRedirect('/404') location = get_object_or_404(Location, slug=location_slug) booking = Booking.objects.get(id=booking_id) if booking.is_paid(): status = send_booking_receipt(booking) if (status is not False): messages.add_message(request, messages.INFO, 'The receipt was sent.') else: messages.add_message(request, messages.INFO, 'Hmm, there was a problem and the receipt was not sent. Please contact an administrator.') else: messages.add_message(request, messages.INFO, 'This booking has not been paid, so the receipt was not sent.') if ('manage' in request.META.get('HTTP_REFERER')): return HttpResponseRedirect(reverse('booking_manage', args=(location.slug, booking_id))) else: return HttpResponseRedirect(reverse('booking_detail', args=(location.slug, booking_id)))
class OFPQueueProp(OFPQueuePropHeader): _QUEUE_PROP_PROPERTIES = {} def register_property(property_, len_): def _register_property(cls): cls.cls_property = property_ cls.cls_len = len_ OFPQueueProp._QUEUE_PROP_PROPERTIES[cls.cls_property] = cls return cls return _register_property def __init__(self): cls = self.__class__ super(OFPQueueProp, self).__init__(cls.cls_property, cls.cls_len) def parser(cls, buf, offset): (property_, len_) = struct.unpack_from(ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR, buf, offset) cls_ = cls._QUEUE_PROP_PROPERTIES.get(property_) offset += ofproto.OFP_QUEUE_PROP_HEADER_SIZE return cls_.parser(buf, offset)
def fill_state_test(filler: Dict[(str, Any)]) -> Dict[(str, Dict[(str, Any)])]: test_name = get_test_name(filler) test = filler[test_name] environment = normalize_environment(test['env']) pre_state = normalize_state(test['pre']) transaction_group = normalize_transaction_group(test['transaction']) post: Dict[(int, List[Dict[(str, str)]])] = defaultdict(list) for expect in test['expect']: indexes = expect['indexes'] networks = normalize_networks(expect['networks']) result = normalize_state(expect['result']) post_state = deep_merge(pre_state, result) for network in networks: state_class = STATE_CLASSES[network] post_state_root = calc_state_root(post_state, state_class) post[network].append({'hash': encode_hex(post_state_root), 'indexes': indexes}) return {test_name: {'env': environment, 'pre': pre_state, 'transaction': transaction_group, 'post': post}}
class TestSessionValidation(OpenEventTestCase): def test_date_db_populate(self): with self.app.test_request_context(): schema = SessionSchema() SessionFactory() original_data = {'data': {'id': 1}} data = {} SessionSchema.validate_fields(schema, data, original_data)
def test_tensor_spatialcoordinate_interpolation(parentmesh, vertexcoords): if (parentmesh.name == 'immersedsphere'): vertexcoords = immersed_sphere_vertexcoords(parentmesh, vertexcoords) vm = VertexOnlyMesh(parentmesh, vertexcoords, missing_points_behaviour=None) vertexcoords = vm.coordinates.dat.data_ro W = TensorFunctionSpace(vm, 'DG', 0) x = SpatialCoordinate(parentmesh) gdim = parentmesh.geometric_dimension() expr = (2 * as_tensor(([x] * gdim))) assert (W.shape == expr.ufl_shape) w_expr = interpolate(expr, W) result = (2 * np.asarray([([vertexcoords[i]] * gdim) for i in range(len(vertexcoords))])) if (len(result) == 0): result = result.reshape((vertexcoords.shape + (gdim,))) assert np.allclose(w_expr.dat.data_ro.reshape(result.shape), result)
class KthLargest(object): def __init__(self, k, nums): self.hp = nums heapq.heapify(self.hp) self.k = k while (len(self.hp) > k): heapq.heappop(self.hp) def add(self, val): heapq.heappush(self.hp, val) if (len(self.hp) > self.k): heapq.heappop(self.hp) return self.hp[0]
class Plugin(plugin.PluginProto): PLUGIN_ID = 19 PLUGIN_NAME = 'Extra IO - PCF8574' PLUGIN_VALUENAME1 = 'State' def __init__(self, taskindex): plugin.PluginProto.__init__(self, taskindex) self.dtype = rpieGlobals.DEVICE_TYPE_I2C self.vtype = rpieGlobals.SENSOR_TYPE_SWITCH self.ports = 0 self.readinprogress = 0 self.valuecount = 1 self.senddataoption = True self.timeroption = True self.timeroptional = True self.inverselogicoption = True self.recdataoption = True self.pcf = None self.i2ca = 0 self.rpin = (- 1) self.i2cport = (- 1) self.timer100ms = False def plugin_init(self, enableplugin=None): plugin.PluginProto.plugin_init(self, enableplugin) self.decimals[0] = 0 self.initialized = False if self.enabled: try: i2cport = gpios.HWPorts.geti2clist() except: i2cport = [] if (len(i2cport) > 0): try: pinnum = int(self.taskdevicepluginconfig[0]) except: pinnum = 0 try: (i2ca, self.rpin) = lib_pcfrouter.get_pcf_pin_address(pinnum) self.pcf = lib_pcfrouter.request_pcf_device(pinnum) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PCF device requesting failed: ' + str(e))) self.pcf = None if ((self.pcf is None) or (self.pcf.initialized == False)): misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'PCF can not be initialized! ') else: self.initialized = True intok = False try: self.uservar[0] = self.pcf.readpin(self.rpin) if ((int(self.taskdevicepin[0]) > 0) and (int(self.taskdevicepluginconfig[1]) != 2)): self.pcf.setexternalint(int(self.taskdevicepin[0])) self.pcf.setcallback(self.rpin, self.p019_handler) intok = True except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PCF interrupt configuration failed:' + str(e))) intok = False if (int(self.taskdevicepluginconfig[1]) == 2): self.pcf.writepin(self.rpin, 0) if intok: self.timer100ms = False misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'PCF 1/10s timer disabled') elif (int(self.interval) == 0): if (int(self.taskdevicepluginconfig[1]) != 2): self.timer100ms = True misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'PCF 1/10s timer enabled') else: self.timer1s = True misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'PCF 1/s timer enabled for sync') try: self.ports = str(self.taskdevicepluginconfig[0]) except: self.ports = 0 else: self.ports = 0 self.timer100ms = False self.timer1s = False self.pcf.setcallback(self.rpin, None) def plugin_exit(self): try: if (self.rpin > (- 1)): self.pcf.setcallback(self.rpin, None) except: pass plugin.PluginProto.plugin_exit(self) def webform_load(self): webserver.addFormNote('I2C Line is not selectable currently!') try: if self.pcf.externalintsetted: self.taskdevicepin[0] = self.pcf.extinta except Exception as e: pass webserver.addFormPinSelect('PCF interrupt', 'taskdevicepin0', self.taskdevicepin[0]) webserver.addFormNote('Add one RPI INPUT-PULLUP pin to handle input changes immediately - not needed for interval input reading and output using') webserver.addFormNumericBox('Port', 'p019_pnum', self.taskdevicepluginconfig[0], 1, 128) webserver.addFormNote('First extender 1-8 (0x20), Second 9-16 (0x21)...') choice2 = self.taskdevicepluginconfig[1] options = ['Input', 'Output'] optionvalues = [0, 2] webserver.addFormSelector('Type', 'p019_ptype', len(optionvalues), options, optionvalues, None, int(choice2)) return True def webform_save(self, params): p1 = self.taskdevicepin[0] p2 = self.taskdevicepluginconfig[0] par = webserver.arg('p019_ptype', params) try: self.taskdevicepluginconfig[1] = int(par) except: self.taskdevicepluginconfig[1] = 0 par = webserver.arg('p019_pnum', params) try: self.taskdevicepluginconfig[0] = int(par) except: self.taskdevicepluginconfig[0] = 0 try: self.taskdevicepin[0] = webserver.arg('taskdevicepin0', params) except: self.taskdevicepin[0] = (- 1) if ((p1 != self.taskdevicepin[0]) or (p2 != self.taskdevicepluginconfig[0])): self.plugin_init() return True def p019_handler(self, pin, val): if (pin == self.rpin): try: if (float(val) != float(self.uservar[0])): self.set_value(1, val, True) self._lastdataservetime = rpieTime.millis() except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e)) def timer_ten_per_second(self): if (self.initialized and self.enabled): try: val = self.pcf.readpin(self.rpin) if (float(val) != float(self.uservar[0])): self.set_value(1, val, True) self._lastdataservetime = rpieTime.millis() except: pass return self.timer100ms def timer_once_per_second(self): if (self.initialized and self.enabled): if (self.timer100ms == False): self.timer_ten_per_second() return self.timer1s def plugin_read(self): result = False if (self.initialized and self.enabled and (self.readinprogress == 0)): self.readinprogress = 1 try: result = self.pcf.readpin(self.rpin) self.set_value(1, result, True) self._lastdataservetime = rpieTime.millis() except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e)) self.readinprogress = 0 result = True return result def plugin_write(self, cmd): res = False cmdarr = cmd.split(',') cmdarr[0] = cmdarr[0].strip().lower() if (cmdarr[0] == 'pcfgpio'): pin = (- 1) val = (- 1) try: pin = int(cmdarr[1].strip()) (ti2ca, trpin) = lib_pcfrouter.get_pcf_pin_address(pin) val = int(cmdarr[2].strip()) except: pin = (- 1) trpin = (- 1) if ((pin > (- 1)) and (val in [0, 1]) and (trpin > (- 1))): misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ((('PCFGPIO' + str(pin)) + ' set to ') + str(val))) try: tmcp = lib_pcfrouter.request_pcf_device(int(pin)) tmcp.writepin(trpin, val) res = True self.syncvalue(pin, val) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('PCFGPIO' + str(pin)) + ': ') + str(e))) return res elif (cmdarr[0] == 'pcfpulse'): pin = (- 1) val = (- 1) try: pin = int(cmdarr[1].strip()) (ti2ca, trpin) = lib_pcfrouter.get_pcf_pin_address(pin) val = int(cmdarr[2].strip()) except: pin = (- 1) trpin = (- 1) dur = 100 try: dur = float(cmdarr[3].strip()) except: dur = 100 if ((pin > (- 1)) and (val in [0, 1]) and (trpin > (- 1))): misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, (('PCFGPIO' + str(pin)) + ': Pulse started')) try: self.syncvalue(pin, val) tmcp = lib_pcfrouter.request_pcf_device(int(pin)) tmcp.writepin(trpin, val) s = float((dur / 1000)) time.sleep(s) tmcp.writepin(trpin, (1 - val)) res = True self.syncvalue(pin, (1 - val)) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('PCFGPIO' + str(pin)) + ': ') + str(e))) misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, (('PCFGPIO' + str(pin)) + ': Pulse ended')) return res elif (cmdarr[0] == 'pcflongpulse'): pin = (- 1) val = (- 1) try: pin = int(cmdarr[1].strip()) (ti2ca, trpin) = lib_pcfrouter.get_pcf_pin_address(pin) val = int(cmdarr[2].strip()) except: pin = (- 1) trpin = (- 1) dur = 2 try: dur = float(cmdarr[3].strip()) except: dur = 2 if ((pin > (- 1)) and (val in [0, 1]) and (trpin > (- 1))): misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, (('PCFGPIO' + str(pin)) + ': LongPulse started')) try: tmcp = lib_pcfrouter.request_pcf_device(int(pin)) tmcp.writepin(trpin, val) res = True except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('PCFGPIO' + str(pin)) + ': ') + str(e))) rarr = [int(pin), (1 - val)] rpieTime.addsystemtimer(dur, self.p019_timercb, rarr) return res def p019_timercb(self, stimerid, ioarray): if (ioarray[0] > (- 1)): misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, (('PCFGPIO' + str(ioarray[0])) + ': LongPulse ended')) try: tmcp = lib_pcfrouter.request_pcf_device(int(ioarray[0])) (ti2ca, trpin) = lib_pcfrouter.get_pcf_pin_address(int(ioarray[0])) tmcp.writepin(trpin, int(ioarray[1])) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('PCFGPIO' + str(ioarray[0])) + ': ') + str(e))) def syncvalue(self, epin, value): for x in range(0, len(Settings.Tasks)): if (Settings.Tasks[x] and (type(Settings.Tasks[x]) is not bool)): if Settings.Tasks[x].enabled: try: if ((Settings.Tasks[x].pluginid == 19) and (int(Settings.Tasks[x].taskdevicepluginconfig[0]) == epin)): Settings.Tasks[x].uservar[0] = value if (Settings.Tasks[x].valuenames[0] != ''): commands.rulesProcessing(((((Settings.Tasks[x].taskname + '#') + Settings.Tasks[x].valuenames[0]) + '=') + str(value)), rpieGlobals.RULE_USER) Settings.Tasks[x].plugin_senddata() break except: pass def set_value(self, valuenum, value, publish=True, suserssi=(- 1), susebattery=(- 1)): if self.initialized: if (self.taskdevicepluginconfig[1] == 2): if (('on' in str(value).lower()) or (str(value) == '1')): val = 1 else: val = 0 try: self.pcf.writepin(self.rpin, val) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PCF output error ' + str(e))) plugin.PluginProto.set_value(self, valuenum, value, publish, suserssi, susebattery) def plugin_receivedata(self, data): if ((len(data) > 0) and self.initialized and self.enabled): if (('on' in str(data[0]).lower()) or (str(data[0]) == '1')): val = 1 else: val = 0 self.set_value(1, val, False)
.usefixtures('use_tmpdir') def test_queue_option_max_running_non_int(): assert_that_config_leads_to_error(config_file_contents=dedent('\n NUM_REALIZATIONS 1\n QUEUE_SYSTEM LOCAL\n QUEUE_OPTION LOCAL MAX_RUNNING s\n '), expected_error=ExpectedErrorInfo(line=4, column=32, end_column=33, match="'s' for MAX_RUNNING is not a valid positive integer."))
class ModelTransformer(): def __init__(self, model: Any, feature_names: Sequence[str], classification_labels: Optional[Sequence[str]]=None, classification_weights: Optional[Sequence[float]]=None): self._feature_names = feature_names self._model = model self._classification_labels = classification_labels self._classification_weights = classification_weights def transform(self) -> ModelSerializer: raise NotImplementedError() def model_type(self) -> str: raise NotImplementedError()
.parametrize('node,description', [(OBJ, '```TypeScript\n\nobject: { // Object Description\n number: number // Number Description\n text: string // Text Description\n selection: "blue" // Selection Description\n selection2: Array<"blue" | "red"> // Selection2 Description\n bool: boolean // Bool Description\n}\n```\n')]) def test_typescript_description(node: Object, description: str) -> None: assert (TypeScriptDescriptor().describe(node) == description)
_default class FileAttachment(Attachment): url = attr.ib(None, type=Optional[str]) size = attr.ib(None, type=Optional[int]) name = attr.ib(None, type=Optional[str]) is_malicious = attr.ib(None, type=Optional[bool]) def _from_graphql(cls, data, size=None): return cls(url=data.get('url'), size=size, name=data.get('filename'), is_malicious=data.get('is_malicious'), id=data.get('message_file_fbid'))
(stop_max_attempt_number=5, wait_fixed=5000) def get_image_build(image_name, version, image_offset, with_timestamp): response = get_session().get((((((base_images_url + '/') + image_name) + '/versions/') + version) + '/builds'), params={'image_offset': image_offset, 'with_timestamp': with_timestamp}, timeout=3.5) if (response.status_code != 200): raise get_exception(response) else: return response.json()
class TestDataFrameCount(TestData): filter_data = ['AvgTicketPrice', 'Cancelled', 'dayOfWeek', 'timestamp', 'DestCountry'] def test_count(self, df): df.load_dataset('ecommerce') df.count() def test_count_flights(self): pd_flights = self.pd_flights().filter(self.filter_data) ed_flights = self.ed_flights().filter(self.filter_data) pd_count = pd_flights.count() ed_count = ed_flights.count() assert_series_equal(pd_count, ed_count)
def _validate_parameter(code: ExampleFunction, name: str, index: int, allow_async: bool=True) -> None: parameters = list(inspect.signature(code).parameters.keys()) if ((not parameters) or (parameters[index] != name)): raise ValueError(f"Function must receive parameter #{(index + 1)} named '{name}', but given function has parameters: {parameters}.") if ((not allow_async) and inspect.iscoroutinefunction(code)): raise RuntimeError(f'TestSlide DSL context function `{code.__name__}` can not be async!')
class FBPrintAsCurl(fb.FBCommand): def name(self): return 'pcurl' def description(self): return 'Print the NSURLRequest (HTTP) as curl command.' def options(self): return [fb.FBCommandArgument(short='-e', long='--embed-data', arg='embed', boolean=True, default=False, help='Embed request data as base64.')] def args(self): return [fb.FBCommandArgument(arg='request', type='NSURLRequest*/NSMutableURLRequest*', help='The request to convert to the curl command.')] def generateTmpFilePath(self): return '/tmp/curl_data_{}'.format(fb.evaluateExpression('(NSTimeInterval)[NSDate timeIntervalSinceReferenceDate]')) def run(self, arguments, options): request = fb.evaluateInputExpression(arguments[0]) HTTPHeaderSring = '' HTTPMethod = fb.evaluateExpressionValue('(id)[{} HTTPMethod]'.format(request)).GetObjectDescription() URL = fb.evaluateExpressionValue('(id)[{} URL]'.format(request)).GetObjectDescription() timeout = fb.evaluateExpression('(NSTimeInterval)[{} timeoutInterval]'.format(request)) HTTPHeaders = fb.evaluateObjectExpression('(id)[{} allHTTPHeaderFields]'.format(request)) HTTPHeadersCount = fb.evaluateIntegerExpression('[{} count]'.format(HTTPHeaders)) allHTTPKeys = fb.evaluateObjectExpression('[{} allKeys]'.format(HTTPHeaders)) for index in range(0, HTTPHeadersCount): key = fb.evaluateObjectExpression('[{} objectAtIndex:{}]'.format(allHTTPKeys, index)) keyDescription = fb.evaluateExpressionValue('(id){}'.format(key)).GetObjectDescription() value = fb.evaluateExpressionValue('(id)[(id){} objectForKey:{}]'.format(HTTPHeaders, key)).GetObjectDescription() if (len(HTTPHeaderSring) > 0): HTTPHeaderSring += ' ' HTTPHeaderSring += '-H "{}: {}"'.format(keyDescription, value) HTTPData = fb.evaluateObjectExpression('[{} HTTPBody]'.format(request)) dataFile = None dataAsString = None if (fb.evaluateIntegerExpression('[{} length]'.format(HTTPData)) > 0): if options.embed: if fb.evaluateIntegerExpression('[{} respondsToSelector:(base64EncodedStringWithOptions:)]'.format(HTTPData)): dataAsString = fb.evaluateExpressionValue('(id)[(id){} base64EncodedStringWithOptions:0]'.format(HTTPData)).GetObjectDescription() else: print("This version of OS doesn't supports base64 data encoding") return False elif (not runtimeHelpers.isIOSDevice()): dataFile = self.generateTmpFilePath() if (not fb.evaluateBooleanExpression('(BOOL)[{} writeToFile:"{}" atomically:NO]'.format(HTTPData, dataFile))): print("Can't write data to file {}".format(dataFile)) return False else: print('HTTPBody data for iOS Device is supported only with "--embed-data" flag') return False commandString = '' if ((dataAsString is not None) and (len(dataAsString) > 0)): dataFile = self.generateTmpFilePath() commandString += 'echo "{}" | base64 -D -o "{}" && '.format(dataAsString, dataFile) commandString += 'curl -X {} --connect-timeout {}'.format(HTTPMethod, timeout) if (len(HTTPHeaderSring) > 0): commandString += (' ' + HTTPHeaderSring) if (dataFile is not None): commandString += ' --data-binary "{}"'.format(dataFile) commandString += ' "{}"'.format(URL) print(commandString)
class ButtonsWidget(QWidget): buttons_mode = ButtonsMode.INTERNAL qt_css_extra = '' def __init__(self): super().__init__() self.buttons: Iterable[QAbstractButton] = [] def resizeButtons(self): frame_width = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth) if (self.buttons_mode == ButtonsMode.INTERNAL): x = (self.rect().right() - frame_width) y = (self.rect().top() + frame_width) for button in self.buttons: sz = button.sizeHint() x -= sz.width() button.move(x, y) elif (self.buttons_mode == ButtonsMode.TOOLBAR_RIGHT): x = (self.rect().right() - frame_width) y = (self.rect().top() - frame_width) for (i, button) in enumerate(self.buttons): sz = button.sizeHint() if (i > 0): y += sz.height() button.move((x - sz.width()), y) elif (self.buttons_mode == ButtonsMode.TOOLBAR_BOTTOM): x = (self.rect().left() - frame_width) y = (self.rect().bottom() + frame_width) for (i, button) in enumerate(self.buttons): sz = button.sizeHint() if (i > 0): x += sz.width() button.move(x, (y - sz.height())) def addButton(self, icon_name: str, on_click: Callable[([], None)], tooltip: str, insert: bool=False) -> None: button = QToolButton(self) button.setIcon(read_QIcon(icon_name)) if (self.buttons_mode == ButtonsMode.INTERNAL): button.setStyleSheet('QToolButton { border: none; hover {border: 1px} pressed {border: 1px} padding: 0px; }') button.setVisible(True) button.setToolTip(tooltip) button.setCursor(QCursor(Qt.PointingHandCursor)) button.clicked.connect(on_click) if insert: self.buttons.insert(0, button) else: self.buttons.append(button) frame_width = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth) if (self.buttons_mode == ButtonsMode.TOOLBAR_RIGHT): self.button_padding = (max((button.sizeHint().width() for button in self.buttons)) + 4) self.setStyleSheet(((((self.qt_css_class + ' { margin-right: ') + str(self.button_padding)) + 'px; }') + self.qt_css_extra)) elif (self.buttons_mode == ButtonsMode.TOOLBAR_BOTTOM): self.button_padding = (max((button.sizeHint().height() for button in self.buttons)) + frame_width) self.setStyleSheet(((((self.qt_css_class + ' { margin-bottom: ') + str(self.button_padding)) + 'px; }') + self.qt_css_extra)) return button def addCopyButton(self, app, tooltipText: Optional[str]=None) -> QAbstractButton: if (tooltipText is None): tooltipText = _('Copy to clipboard') self.app = app return self.addButton('icons8-copy-to-clipboard-32.png', self._on_copy, tooltipText) def _on_copy(self) -> None: self.app.clipboard().setText(self.text()) QToolTip.showText(QCursor.pos(), _('Text copied to clipboard'), self)
def dipole_magnetic(coordinates, dipoles, magnetic_moments, parallel=True, dtype='float64', disable_checks=False): cast = np.broadcast(*coordinates[:3]) (b_e, b_n, b_u) = tuple((np.zeros(cast.size, dtype=dtype) for _ in range(3))) coordinates = tuple((np.atleast_1d(i).ravel() for i in coordinates[:3])) dipoles = tuple((np.atleast_1d(i).ravel() for i in dipoles[:3])) magnetic_moments = np.atleast_2d(magnetic_moments) if (not disable_checks): _check_dipoles_and_magnetic_moments(dipoles, magnetic_moments) if parallel: _jit_dipole_magnetic_field_cartesian_parallel(coordinates, dipoles, magnetic_moments, b_e, b_n, b_u) else: _jit_dipole_magnetic_field_cartesian_serial(coordinates, dipoles, magnetic_moments, b_e, b_n, b_u) b_e *= .0 b_n *= .0 b_u *= .0 return (b_e.reshape(cast.shape), b_n.reshape(cast.shape), b_u.reshape(cast.shape))
class multi_level_roi_align(roi_ops_base): def __init__(self, num_rois, pooled_size, sampling_ratio, spatial_scale, position_sensitive, continuous_coordinate, im_shape) -> None: super().__init__(num_rois, pooled_size, sampling_ratio, spatial_scale, position_sensitive, continuous_coordinate) self._attrs['op'] = 'multi_level_roi_align' self._attrs['im_shape'] = im_shape def _infer_shape(self, x: List[int]): eval_func = self.shape_eval_template.render(indent='', dtype='', div='//', x_dim0=x[0], x_dim1=x[1], x_dim2=x[2], x_dim3=x[3], num_rois=(self._attrs['num_rois'] * x[0]), pooled_size=self._attrs['pooled_size'], position_sensitive=self._attrs['position_sensitive']) output = {} exec(eval_func, output) return [int(output['NO']), int(output['HO']), int(output['WO']), int(output['CO'])] def __call__(self, p2: Tensor, p3: Tensor, p4: Tensor, p5: Tensor, rois: Tensor) -> List[Tensor]: self._attrs['inputs'] = [p2, p3, p4, p5, rois] x = p2 self._set_depth() self._extract_exec_path(x) output_shape = self._infer_shapes(x) output = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype']) self._attrs['outputs'] = [output] return output def _get_op_attributes(self): attr = super()._get_op_attributes() attr['im_shape'] = self._attrs['im_shape'] return attr
.usefixtures('migrate_db') class CandidatesTestCase(common.BaseTestCase): def setUp(self): super().setUp() self.longMessage = True self.maxDiff = None self.request_context = rest.app.test_request_context() self.request_context.push() self.connection = db.engine.connect() def _response(self, qry): response = self.app.get(qry) self.assertEqual(response.status_code, 200) result = json.loads(codecs.decode(response.data)) self.assertNotEqual(result, [], 'Empty response!') self.assertEqual(result['api_version'], __API_VERSION__) return result def _results(self, qry): response = self._response(qry) return response['results'] def test_candidate_counts_house(self): cand_valid_fec_yr_data = [{'cand_valid_yr_id': 1, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cand_status': 'A', 'cand_office': 'H', 'cand_office_st': 'MD', 'cand_office_district': '01', 'date_entered': 'now()'}, {'cand_valid_yr_id': 2, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cand_status': 'A', 'cand_office': 'H', 'cand_office_st': 'MD', 'cand_office_district': '01', 'date_entered': 'now()'}, {'cand_valid_yr_id': 3, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cand_status': 'A', 'cand_office': 'H', 'cand_office_st': 'MD', 'cand_office_district': '01', 'date_entered': 'now()'}] election_year = 2020 self.create_cand_valid(cand_valid_fec_yr_data) cand_cmte_linkage_data = [{'linkage_id': 2, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cmte_id': '2', 'cmte_count_cand_yr': 1, 'cmte_tp': 'H', 'cmte_dsgn': 'P', 'linkage_type': 'P', 'date_entered': 'now()'}, {'linkage_id': 4, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cmte_id': '3', 'cmte_count_cand_yr': 1, 'cmte_tp': 'H', 'cmte_dsgn': 'P', 'linkage_type': 'P', 'date_entered': 'now()'}, {'linkage_id': 6, 'cand_id': 'H', 'fec_election_yr': 2020, 'cand_election_yr': 2020, 'cmte_id': '3', 'cmte_count_cand_yr': 1, 'cmte_tp': 'H', 'cmte_dsgn': 'P', 'linkage_type': 'P', 'date_entered': 'now()'}] self.create_cand_cmte_linkage(cand_cmte_linkage_data) manage.refresh_materialized(concurrent=False) sql_extract = ('SELECT * from disclosure.cand_valid_fec_yr ' + 'WHERE cand_election_yr in ({}, {})'.format((election_year - 1), election_year)) results_tab = self.connection.execute(sql_extract).fetchall() candidate_params = {'election_year': election_year, 'cycle': election_year, 'district': '01', 'state': 'MD'} election_params = {'cycle': election_year, 'election_full': True, 'district': '01', 'state': 'MD'} total_params = {'cycle': election_year, 'election_full': True, 'district': '01', 'state': 'MD', 'election_year': election_year} candidates_api = self._results(rest.api.url_for(CandidateList, **candidate_params)) candidates_totals_api = self._results(rest.api.url_for(TotalsCandidateView, **total_params)) elections_api = self._results(rest.api.url_for(ElectionView, office='house', **election_params)) assert (len(results_tab) == len(candidates_api) == len(candidates_totals_api) == len(elections_api)) def create_cand_valid(self, candidate_data): sql_insert = 'INSERT INTO disclosure.cand_valid_fec_yr (cand_valid_yr_id, cand_id, fec_election_yr, cand_election_yr, cand_status, cand_office, cand_office_st, cand_office_district, date_entered) VALUES (%(cand_valid_yr_id)s, %(cand_id)s, %(fec_election_yr)s, %(cand_election_yr)s, %(cand_status)s, %(cand_office)s, %(cand_office_st)s, %(cand_office_district)s, %(date_entered)s)' self.connection.execute(sql_insert, candidate_data) def create_cand_cmte_linkage(self, linkage_data): sql_insert = 'INSERT INTO disclosure.cand_cmte_linkage (linkage_id, cand_id, fec_election_yr, cand_election_yr, cmte_id, cmte_count_cand_yr, cmte_tp, cmte_dsgn, linkage_type, date_entered) VALUES (%(linkage_id)s, %(cand_id)s, %(fec_election_yr)s, %(cand_election_yr)s, %(cmte_id)s, %(cmte_count_cand_yr)s, %(cmte_tp)s, %(cmte_dsgn)s, %(linkage_type)s, %(date_entered)s)' self.connection.execute(sql_insert, linkage_data)
('click.echo') class TestNothingToUpgrade(AEATestCaseEmpty): def test_nothing_to_upgrade(self, mock_click_echo): agent_config = self.load_agent_config(self.agent_name) result = self.run_cli_command('upgrade', cwd=self._get_cwd()) assert (result.exit_code == 0) mock_click_echo.assert_any_call('Starting project upgrade...') mock_click_echo.assert_any_call(f"Checking if there is a newer remote version of agent package '{agent_config.public_id}'...") mock_click_echo.assert_any_call('Package not found, continuing with normal upgrade.') mock_click_echo.assert_any_call('Everything is already up to date!')
class SystemStatus(): def __init__(self) -> None: self.status: Dict[(str, StatusInfo)] = {} def failure(self, key: str, message: str) -> None: self.info_for_key(key).failure(message) def OK(self, key: str, message: str) -> None: self.info_for_key(key).OK(message) def info_for_key(self, key) -> StatusInfo: if (key not in self.status): self.status[key] = StatusInfo() return self.status[key] def to_dict(self) -> Dict[(str, Dict[(str, Union[(bool, List[Tuple[(bool, str)]])])])]: return {key: info.to_dict() for (key, info) in self.status.items()}
class ValidatedTuple(BaseTuple): def __init__(self, *types, **metadata): metadata.setdefault('fvalidate', None) metadata.setdefault('fvalidate_info', '') super().__init__(*types, **metadata) def validate(self, object, name, value): values = super().validate(object, name, value) if ((self.fvalidate is None) or self.fvalidate(values)): return values else: self.error(object, name, value) def full_info(self, object, name, value): message = 'a tuple of the form: ({0}) that passes custom validation{1}' types_info = ', '.join([type_.full_info(object, name, value) for type_ in self.types]) if (self.fvalidate_info is not None): fvalidate_info = ': {0}'.format(self.fvalidate_info) else: fvalidate_info = '' return message.format(types_info, fvalidate_info)
def check_event_user_ticket_holders(order, data, element): if ((element in ['event', 'user']) and (data[element] != str(getattr(order, element, None).id))): raise ForbiddenError({'pointer': f'data/{element}'}, f'You cannot update {element} of an order') if (element == 'ticket_holders'): ticket_holders = [] for ticket_holder in order.ticket_holders: ticket_holders.append(str(ticket_holder.id)) if ((data[element] != ticket_holders) and (element not in get_updatable_fields())): raise ForbiddenError({'pointer': f'data/{element}'}, f'You cannot update {element} of an order')
class MultiNetworkIPForm(SerializerForm): _api_call = net_ip_list template = 'gui/dc/network_ips_form.html' ips = ArrayField(required=True, widget=forms.HiddenInput()) def __init__(self, request, net, ip, *args, **kwargs): self.net = net super(MultiNetworkIPForm, self).__init__(request, ip, *args, **kwargs) def api_call_args(net_name): return (net_name,) def get_action_message(self): assert (self.action == 'delete'), 'Unknown action' count = len(self.cleaned_data.get('ips', ())) return (ungettext_lazy('IP address was successfully deleted', '%(count)d IP addresses were successfully deleted', count) % {'count': count})
class TraitsTool(BaseTool): draw_mode = 'none' visible = False classes = List([PlotAxis, ColorBar]) views = Dict event = Str('left_dclick') def _dispatch_stateful_event(self, event, suffix): if (suffix != self.event): return x = event.x y = event.y candidates = get_nested_components(self.component, ([Container] + self.classes)) item = None for (candidate, offset) in candidates: if candidate.is_in((x - offset[0]), (y - offset[1])): item = candidate break if (item is not None): self.component.active_tool = self if (item.__class__ in self.views): item.edit_traits(kind='livemodal', view=self.views[item.__class__], parent=event.window.control) else: item.edit_traits(kind='livemodal', parent=event.window.control) event.handled = True self.component.active_tool = None item.request_redraw()
class LocalBinaryPath(BinaryPath): def __init__(self, exe_folder: str, binary_info: BinaryInfo) -> None: self.exe_folder: str = exe_folder self.binary: str = (binary_info.binary or binary_info.package.rsplit('/')[(- 1)]) def _stringify(self) -> str: return f'{self.exe_folder}{self.binary}'
class OptionPlotoptionsParetoSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
(max_runs=3) def test_miner_set_extra(web3_empty, wait_for_block): web3 = web3_empty initial_extra = decode_hex(web3.eth.get_block(web3.eth.block_number)['extraData']) new_extra_data = b'-this-is-32-bytes-of-extra-data-' assert (initial_extra != new_extra_data) web3.geth.miner.set_extra(new_extra_data) with Timeout(60) as timeout: while True: extra_data = decode_hex(web3.eth.get_block(web3.eth.block_number)['extraData']) if (extra_data == new_extra_data): break timeout.sleep(random.random()) after_extra = decode_hex(web3.eth.get_block(web3.eth.block_number)['extraData']) assert (after_extra == new_extra_data)
class Logger(): _static_cache = set() def __init__(self): self.handlers = {} self.suppression = True self._counts = None self._stack = None self._capture = False self._captured_warnings = [] def set_capture(self, capture: bool): self._capture = capture def captured_warnings(self): captured_warnings = self._captured_warnings self._captured_warnings = [] return captured_warnings def __enter__(self): if (self.suppression and (self._counts is None)): self._counts = {} return self def __exit__(self, exc_type, exc_value, traceback): if (self._counts is not None): total = sum((v for v in self._counts.values())) if (total > 0): max_level = max((k for (k, v) in self._counts.items() if (v > 0))) counts = [f'{v} {_level_name[k]}' for (k, v) in self._counts.items() if (v > 0)] self._counts = None if (total > 0): noun = (' messages.' if (total > 1) else ' message.') stack = self._stack self._stack = None self.log(max_level, (('Suppressed ' + ', '.join(counts)) + noun)) self._stack = stack return False def begin_capture(self): if (not self._capture): return stack_item = {'messages': [], 'children': {}} if self._stack: self._stack.append(stack_item) else: self._stack = [stack_item] def end_capture(self, model): if (not self._stack): return stack_item = self._stack.pop() if (len(self._stack) == 0): self._stack = None if ((len(stack_item['messages']) > 0) or (len(stack_item['children']) > 0)): stack_item['type'] = model.__class__.__name__ model_fields = model.get_submodels_by_hash() for (child_hash, child_dict) in stack_item['children'].items(): child_dict['parent_fields'] = model_fields.get(child_hash, []) if (self._stack is None): self._parse_warning_capture(current_loc=[], stack_item=stack_item) else: hash_ = hash(model) self._stack[(- 1)]['children'][hash_] = stack_item def _parse_warning_capture(self, current_loc, stack_item): if ('parent_fields' in stack_item): for field in stack_item['parent_fields']: if isinstance(field, tuple): new_loc = (current_loc + list(field)) else: new_loc = (current_loc + [field]) for (level, msg, custom_loc) in stack_item['messages']: if (level == 'WARNING'): self._captured_warnings.append({'loc': (new_loc + custom_loc), 'msg': msg}) for child_stack in stack_item['children'].values(): self._parse_warning_capture(current_loc=new_loc, stack_item=child_stack) else: for (level, msg, custom_loc) in stack_item['messages']: if (level == 'WARNING'): self._captured_warnings.append({'loc': (current_loc + custom_loc), 'msg': msg}) for child_stack in stack_item['children'].values(): self._parse_warning_capture(current_loc=current_loc, stack_item=child_stack) def _log(self, level: int, level_name: str, message: str, *args, log_once: bool=False, custom_loc: List=None, capture: bool=True) -> None: if (len(args) > 0): try: composed_message = (str(message) % args) except Exception as e: composed_message = f'''{message} % {args} {e}''' else: composed_message = str(message) if (self._stack and capture): if (custom_loc is None): custom_loc = [] self._stack[(- 1)]['messages'].append((level_name, composed_message, custom_loc)) if log_once: if (message in self._static_cache): return self._static_cache.add(message) if (self._counts is not None): if (len(self._counts) > 0): self._counts[level] = (1 + self._counts.get(level, 0)) return self._counts[level] = 0 for handler in self.handlers.values(): handler.handle(level, level_name, composed_message) def log(self, level: LogValue, message: str, *args, log_once: bool=False) -> None: if isinstance(level, str): level_name = level level = _get_level_int(level) else: level_name = _level_name.get(level, 'unknown') self._log(level, level_name, message, *args, log_once=log_once) def debug(self, message: str, *args, log_once: bool=False) -> None: self._log(_level_value['DEBUG'], 'DEBUG', message, *args, log_once=log_once) def info(self, message: str, *args, log_once: bool=False) -> None: self._log(_level_value['INFO'], 'INFO', message, *args, log_once=log_once) def warning(self, message: str, *args, log_once: bool=False, custom_loc: List=None, capture: bool=True) -> None: self._log(_level_value['WARNING'], 'WARNING', message, *args, log_once=log_once, custom_loc=custom_loc, capture=capture) def error(self, message: str, *args, log_once: bool=False) -> None: self._log(_level_value['ERROR'], 'ERROR', message, *args, log_once=log_once) def critical(self, message: str, *args, log_once: bool=False) -> None: self._log(_level_value['CRITICAL'], 'CRITICAL', message, *args, log_once=log_once)
class VideoUploader(): def __init__(self, protect: ProtectApiClient, upload_queue: VideoQueue, rclone_destination: str, rclone_args: str, file_structure_format: str, db: aiosqlite.Connection, color_logging: bool): self._protect: ProtectApiClient = protect self.upload_queue: VideoQueue = upload_queue self._rclone_destination: str = rclone_destination self._rclone_args: str = rclone_args self._file_structure_format: str = file_structure_format self._db: aiosqlite.Connection = db self.current_event = None self.base_logger = logging.getLogger(__name__) setup_event_logger(self.base_logger, color_logging) self.logger = logging.LoggerAdapter(self.base_logger, {'event': ''}) async def start(self): self.logger.info('Starting Uploader') while True: try: (event, video) = (await self.upload_queue.get()) self.current_event = event self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'}) self.logger.info(f'Uploading event: {event.id}') self.logger.debug(f' Remaining Upload Queue: {self.upload_queue.qsize_files()} ({human_readable_size(self.upload_queue.qsize())})') destination = (await self._generate_file_path(event)) self.logger.debug(f' Destination: {destination}') try: (await self._upload_video(video, destination, self._rclone_args)) (await self._update_database(event, destination)) self.logger.debug('Uploaded') except SubprocessException: self.logger.error(f" Failed to upload file: '{destination}'") self.current_event = None except Exception as e: self.logger.error(f'Unexpected exception occurred, abandoning event {event.id}:', exc_info=e) async def _upload_video(self, video: bytes, destination: pathlib.Path, rclone_args: str): (returncode, stdout, stderr) = (await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)) if (returncode != 0): raise SubprocessException(stdout, stderr, returncode) async def _update_database(self, event: Event, destination: str): assert isinstance(event.start, datetime) assert isinstance(event.end, datetime) (await self._db.execute(f"INSERT INTO events VALUES ('{event.id}', '{event.type.value}', '{event.camera_id}','{event.start.timestamp()}', '{event.end.timestamp()}')")) (remote, file_path) = str(destination).split(':') (await self._db.execute(f'''INSERT INTO backups VALUES ('{event.id}', '{remote}', '{file_path}') ''')) (await self._db.commit()) async def _generate_file_path(self, event: Event) -> pathlib.Path: assert isinstance(event.camera_id, str) assert isinstance(event.start, datetime) assert isinstance(event.end, datetime) format_context = {'event': event, 'duration_seconds': (event.end - event.start).total_seconds(), 'detection_type': (f"{event.type.value} ({' '.join(event.smart_detect_types)})" if event.smart_detect_types else f'{event.type.value}'), 'camera_name': (await get_camera_name(self._protect, event.camera_id))} file_path = self._file_structure_format.format(**format_context) file_path = re.sub('[^\\w\\-_\\.\\(\\)/ ]', '', file_path) return pathlib.Path(f'{self._rclone_destination}/{file_path}')
def _build_token_prices(coingecko_price_data, token_address) -> List[Price]: time_series = coingecko_price_data['prices'] prices = [] for entry in time_series: timestamp = datetime.fromtimestamp((entry[0] / 1000)) token_price = entry[1] prices.append(Price(timestamp=timestamp, usd_price=token_price, token_address=token_address)) return prices
class ACEScc(sRGB): BASE = 'acescg' NAME = 'acescc' SERIALIZE = ('--acescc',) WHITE = (0.32168, 0.33767) CHANNELS = (Channel('r', CC_MIN, CC_MAX, bound=True, nans=CC_MIN), Channel('g', CC_MIN, CC_MAX, bound=True, nans=CC_MIN), Channel('b', CC_MIN, CC_MAX, bound=True, nans=CC_MIN)) DYNAMIC_RANGE = 'hdr' def to_base(self, coords: Vector) -> Vector: return acescc_to_acescg(coords) def from_base(self, coords: Vector) -> Vector: return acescg_to_acescc(coords)
class RelationshipMemberWafRule(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'type': (TypeWafRule,), 'id': (str,)} _property def discriminator(): return None attribute_map = {'type': 'type', 'id': 'id'} read_only_vars = {'id'} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
def extractMagnogartenBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class DBStorageConversationItemAdapter(StorageItemAdapter[(StorageConversation, ChatHistoryEntity)]): def to_storage_format(self, item: StorageConversation) -> ChatHistoryEntity: message_ids = ','.join(item.message_ids) messages = None if ((not item.save_message_independent) and item.messages): messages = _conversation_to_dict(item) return ChatHistoryEntity(conv_uid=item.conv_uid, chat_mode=item.chat_mode, summary=(item.summary or item.get_latest_user_message().content), user_name=item.user_name, messages=messages, message_ids=message_ids, sys_code=item.sys_code) def from_storage_format(self, model: ChatHistoryEntity) -> StorageConversation: message_ids = (model.message_ids.split(',') if model.message_ids else []) old_conversations: List[Dict] = (json.loads(model.messages) if model.messages else []) old_messages = [] save_message_independent = True if old_conversations: old_messages_dict = [] for old_conversation in old_conversations: old_messages_dict.extend((old_conversation['messages'] if ('messages' in old_conversation) else [])) save_message_independent = False old_messages: List[BaseMessage] = _messages_from_dict(old_messages_dict) return StorageConversation(conv_uid=model.conv_uid, chat_mode=model.chat_mode, summary=model.summary, user_name=model.user_name, message_ids=message_ids, sys_code=model.sys_code, save_message_independent=save_message_independent, messages=old_messages) def get_query_for_identifier(self, storage_format: Type[ChatHistoryEntity], resource_id: ConversationIdentifier, **kwargs): session: Session = kwargs.get('session') if (session is None): raise Exception('session is None') return session.query(ChatHistoryEntity).filter((ChatHistoryEntity.conv_uid == resource_id.conv_uid))
class OptionSeriesHistogramSonificationContexttracksMapping(Options): def frequency(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingFrequency': return self._config_sub_data('frequency', OptionSeriesHistogramSonificationContexttracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionSeriesHistogramSonificationContexttracksMappingGapbetweennotes) def highpass(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingHighpass': return self._config_sub_data('highpass', OptionSeriesHistogramSonificationContexttracksMappingHighpass) def lowpass(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingLowpass': return self._config_sub_data('lowpass', OptionSeriesHistogramSonificationContexttracksMappingLowpass) def noteDuration(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionSeriesHistogramSonificationContexttracksMappingNoteduration) def pan(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingPan': return self._config_sub_data('pan', OptionSeriesHistogramSonificationContexttracksMappingPan) def pitch(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingPitch': return self._config_sub_data('pitch', OptionSeriesHistogramSonificationContexttracksMappingPitch) def playDelay(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionSeriesHistogramSonificationContexttracksMappingPlaydelay) def rate(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingRate': return self._config_sub_data('rate', OptionSeriesHistogramSonificationContexttracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingTime': return self._config_sub_data('time', OptionSeriesHistogramSonificationContexttracksMappingTime) def tremolo(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingTremolo': return self._config_sub_data('tremolo', OptionSeriesHistogramSonificationContexttracksMappingTremolo) def volume(self) -> 'OptionSeriesHistogramSonificationContexttracksMappingVolume': return self._config_sub_data('volume', OptionSeriesHistogramSonificationContexttracksMappingVolume)
def consolidate_data_for_date(date): pattern = '{}*.csv.gz'.format(local_storage_prefix_for_date(date)) input_files = glob.glob(pattern) target_file = get_prescribing_filename(date) temp_file = get_temp_filename(target_file) logger.info('Consolidating %s data files into %s', len(input_files), target_file) sort_and_merge_gzipped_csv_files(input_files, temp_file, ('bnf_code', 'practice', 'month')) os.rename(temp_file, target_file)
def flatten(inputs: List[Optional[typing.Union[(bn.BMGNode, List[bn.BMGNode])]]]) -> List[bn.BMGNode]: parents = [] for input in inputs: if (input is None): continue if isinstance(input, List): for i in input: parents.append(i) else: parents.append(input) return parents
def check_repo_clean(root: Path, main: DecoratedMain): out = run_command(['git', 'status', '--porcelain']) filtered = [] grid_name = main.dora.grid_package if (grid_name is None): grid_name = (main.package + '.grids') spec = importlib.util.find_spec(grid_name) grid_path: tp.Optional[Path] = None if (spec is not None): assert (spec.origin is not None) grid_path = Path(spec.origin).resolve().parent for line in out.split('\n'): if (not line): continue parts = shlex.split(line) paths: tp.List[str] = [] if (len(parts) == 2): paths.append(parts[1]) elif (len(parts) == 4): assert (parts[3] == '->') paths += [parts[1], parts[2]] else: assert 'Invalid parts', parts line_clean = True for path in paths: if (grid_path is None): line_clean = False break rpath = (root / path).resolve() try: rpath.relative_to(grid_path) except ValueError: line_clean = False if (not line_clean): filtered.append(line) if filtered: files = '\n'.join(filtered) fatal(f'''Repository is not clean! The following files should be commited or git ignored: {files}''')
def _discover_test_functions_in_sample_code(sample: pathlib.Path) -> List[str]: test_names = list() with sample.open('r', encoding='utf-8') as f: for line in f.readlines(): if (match := re.match('\\w+ (?P<test_name>test\\d+)\\(.*\\)', line)): test_names.append(match.group('test_name')) return test_names
def check_terminal_encoding(): if (sys.stdout.isatty() and ((sys.stdout.encoding == None) or (sys.stdin.encoding == None))): print(_("WARNING: The terminal encoding is not correctly configured. gitinspector might malfunction. The encoding can be configured with the environment variable 'PYTHONIOENCODING'."), file=sys.stderr)
def get_stats(paths): cc = CCHarvester(paths, cc_config) raw = RawHarvester(paths, cc_config) cc.run() raw.run() header = ['Filename', 'SLOC', '#Functions', '#Intercepts', 'Max CC', 'Ave CC', 'Median CC', 'Min CC'] data = {} for file_data in cc.results: (filename, cc_results) = file_data complexity = [x.complexity for x in cc_results if (hasattr(x, 'is_method') and x.is_method)] if (len(complexity) > 0): print('Getting Complexity for:', filename) data[filename] = {} data[filename]['Filename'] = filename data[filename]['Max CC'] = max(complexity) data[filename]['Min CC'] = min(complexity) data[filename]['Med CC'] = np.median(complexity) data[filename]['Ave CC'] = np.mean(complexity) data[filename]['#Functions'] = len(complexity) else: print('Skipping ', filename) for file_data in raw.results: (filename, results) = file_data if (filename in data): data[filename]['SLOC'] = results['sloc'] else: print('Skipping ', filename) return data
class LedgerDialogues(ContractApiDialogues): def __init__(self, self_address: Address) -> None: def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role: return ContractApiDialogue.Role.LEDGER ContractApiDialogues.__init__(self, self_address=self_address, role_from_first_message=role_from_first_message, dialogue_class=LedgerDialogue)
def additionals(utils): obj = (lambda : None) obj.maxDiff = None obj.emptyAnalyserName = 'empty' obj.WHITELIST = ['sel1/an1', 'sel1/an2', 'sel2'] obj.sel1 = 'sel1' obj.sel2 = 'sel2' obj.sel1_elements = ['el1', 'el2'] obj.sel2_elements = ['el4', 'el5', 'el6'] utils.scaffold_empty(obj.sel1, elements=obj.sel1_elements, analysers=['an1', 'an2']) utils.scaffold_empty(obj.sel2, elements=obj.sel2_elements) os.rmdir(utils.get_element_path(obj.sel1, 'el1', analyser='an2')) obj.config = {'elements_in': obj.WHITELIST, 'dev': True} obj.emptyAnalyser = EmptyAnalyser(obj.config, obj.emptyAnalyserName, storage=LocalStorage(folder=utils.TEMP_ELEMENT_DIR)) utils.setup() (yield obj) utils.cleanup()
class TestTimezoneField(): class TimezoneForm(Form): timezone = TimezoneField() .parametrize('timezone', ['Europe/Nantes', 'US/Paris']) def test_invalid(self, timezone: str): form = TestTimezoneField.TimezoneForm(FormData({'timezone': timezone})) assert (form.validate() is False) assert (len(form.timezone.errors) == 1) .parametrize('timezone', ['Europe/Paris', 'US/Eastern']) def test_valid(self, timezone: str): form = TestTimezoneField.TimezoneForm(FormData({'timezone': timezone})) assert (form.validate() is True) assert (form.timezone.data == timezone)
_abort.register_cause_code _error.register_cause_code class cause_missing_param(cause): _PACK_STR = '!HHI' _MIN_LEN = struct.calcsize(_PACK_STR) def cause_code(cls): return CCODE_MISSING_PARAM def __init__(self, types=None, num=0, length=0): super(cause_missing_param, self).__init__(length) types = (types or []) assert isinstance(types, list) for one in types: assert isinstance(one, int) self.types = types self.num = num def parser(cls, buf): (_, length, num) = struct.unpack_from(cls._PACK_STR, buf) types = [] offset = cls._MIN_LEN for count in range(num): offset = (cls._MIN_LEN + (struct.calcsize('!H') * count)) (one,) = struct.unpack_from('!H', buf, offset) types.append(one) return cls(types, num, length) def serialize(self): buf = bytearray(struct.pack(self._PACK_STR, self.cause_code(), self.length, self.num)) for one in self.types: buf.extend(struct.pack('!H', one)) if (0 == self.num): self.num = len(self.types) struct.pack_into('!I', buf, 4, self.num) if (0 == self.length): self.length = len(buf) struct.pack_into('!H', buf, 2, self.length) mod = (len(buf) % 4) if mod: buf.extend(bytearray((4 - mod))) return six.binary_type(buf)
def modify_function_body(function_definition: ast.FunctionDefinition, modifiers: List[ast.ModifierInvocation]): modifiers = [m for m in modifiers if (not m.is_constructor)] cfg = function_definition.cfg_body cfg = wrap_function(cfg, function_definition.arguments, function_definition.returns, function_definition) for modifier in reversed(modifiers): modifier_arg_cfg: CfgSimple (modifier_arg_cfg, modifier_arg_evs) = modifier.modifier_arguments (modifier_template, modifier_arguments_original) = modifier.modifier_template modifier_arguments_original = [a for (_, a) in modifier_arguments_original] modifier_template = deepcopy_with_mapping(modifier_template, zip(modifier_arguments_original, modifier_arg_evs)) cfg = splice_function(modifier_template, cfg) if modifier_arg_evs: cfg.cfg += modifier_arg_cfg cfg.cfg += (modifier_arg_cfg.last_appendable, cfg.first) cfg.first = modifier_arg_cfg.first last_goto = cfg.last if last_goto: args_return = last_goto.args[:len(function_definition.return_parameters.parameters)] cfg = cfg.cfg cfg = cfg.replace(last_goto, ir.Return(last_goto.ast_node, args_return, None)) cfg_init = function_definition.cfg_return_inits cfg_init <<= ir.Block(function_definition, list(map(__[1], function_definition.arguments)), info='FUNCTION INIT') cfg_init >>= ir.Goto(function_definition, None, (list(map(__[1], function_definition.returns)) + list(map(__[1], function_definition.arguments)))) return (cfg_init >> cfg)
class AdPromotedObject(AbstractObject): def __init__(self, api=None): super(AdPromotedObject, self).__init__() self._isAdPromotedObject = True self._api = api class Field(AbstractObject.Field): application_id = 'application_id' conversion_goal_id = 'conversion_goal_id' custom_conversion_id = 'custom_conversion_id' custom_event_str = 'custom_event_str' custom_event_type = 'custom_event_type' event_id = 'event_id' fundraiser_campaign_id = 'fundraiser_campaign_id' mcme_conversion_id = 'mcme_conversion_id' object_store_url = 'object_store_url' offer_id = 'offer_id' offline_conversion_data_set_id = 'offline_conversion_data_set_id' offsite_conversion_event_id = 'offsite_conversion_event_id' omnichannel_object = 'omnichannel_object' page_id = 'page_id' pixel_aggregation_rule = 'pixel_aggregation_rule' pixel_id = 'pixel_id' pixel_rule = 'pixel_rule' place_page_set = 'place_page_set' place_page_set_id = 'place_page_set_id' product_catalog_id = 'product_catalog_id' product_item_id = 'product_item_id' product_set_id = 'product_set_id' retention_days = 'retention_days' class CustomEventType(): achievement_unlocked = 'ACHIEVEMENT_UNLOCKED' add_payment_info = 'ADD_PAYMENT_INFO' add_to_cart = 'ADD_TO_CART' add_to_wishlist = 'ADD_TO_WISHLIST' ad_impression = 'AD_IMPRESSION' complete_registration = 'COMPLETE_REGISTRATION' contact = 'CONTACT' content_view = 'CONTENT_VIEW' customize_product = 'CUSTOMIZE_PRODUCT' d2_retention = 'D2_RETENTION' d7_retention = 'D7_RETENTION' donate = 'DONATE' find_location = 'FIND_LOCATION' initiated_checkout = 'INITIATED_CHECKOUT' lead = 'LEAD' level_achieved = 'LEVEL_ACHIEVED' listing_interaction = 'LISTING_INTERACTION' messaging_conversation_started_7d = 'MESSAGING_CONVERSATION_STARTED_7D' other = 'OTHER' purchase = 'PURCHASE' rate = 'RATE' schedule = 'SCHEDULE' search = 'SEARCH' service_booking_request = 'SERVICE_BOOKING_REQUEST' spent_credits = 'SPENT_CREDITS' start_trial = 'START_TRIAL' submit_application = 'SUBMIT_APPLICATION' subscribe = 'SUBSCRIBE' tutorial_completion = 'TUTORIAL_COMPLETION' _field_types = {'application_id': 'string', 'conversion_goal_id': 'string', 'custom_conversion_id': 'string', 'custom_event_str': 'string', 'custom_event_type': 'CustomEventType', 'event_id': 'string', 'fundraiser_campaign_id': 'string', 'mcme_conversion_id': 'string', 'object_store_url': 'string', 'offer_id': 'string', 'offline_conversion_data_set_id': 'string', 'offsite_conversion_event_id': 'string', 'omnichannel_object': 'Object', 'page_id': 'string', 'pixel_aggregation_rule': 'string', 'pixel_id': 'string', 'pixel_rule': 'string', 'place_page_set': 'AdPlacePageSet', 'place_page_set_id': 'string', 'product_catalog_id': 'string', 'product_item_id': 'string', 'product_set_id': 'string', 'retention_days': 'string'} def _get_field_enum_info(cls): field_enum_info = {} field_enum_info['CustomEventType'] = AdPromotedObject.CustomEventType.__dict__.values() return field_enum_info
class PySOALogContextFilter(logging.Filter): def __init__(self): super(PySOALogContextFilter, self).__init__('') def filter(self, record): context = self.get_logging_request_context() if context: setattr(record, 'correlation_id', (context.get('correlation_id') or '--')) setattr(record, 'request_id', (context.get('request_id') or '--')) else: setattr(record, 'correlation_id', '--') setattr(record, 'request_id', '--') setattr(record, 'service_name', (self._service_name or 'unknown')) setattr(record, 'action_name', (self.get_logging_action_name() or '(n/a)')) return True _context_stack = ContextVar('logging_context_stack', default=None) _action_stack = ContextVar('logging_action_stack', default=None) _service_name = None def set_logging_request_context(cls, **context): value = cls._context_stack.get() if (not value): value = [] cls._context_stack.set(value) value.append(context) def clear_logging_request_context(cls): value = cls._context_stack.get() if value: value.pop() def get_logging_request_context(cls): value = cls._context_stack.get() if value: return value[(- 1)] return None def set_logging_action_name(cls, action_name): value = cls._action_stack.get() if (not value): value = [] cls._action_stack.set(value) value.append(action_name) def clear_logging_action_name(cls): value = cls._action_stack.get() if value: value.pop() def get_logging_action_name(cls): value = cls._action_stack.get() if value: return value[(- 1)] return None def set_service_name(cls, service_name): cls._service_name = service_name
def csl(): datasets_name = sys._getframe().f_code.co_name writer = csv.writer(open('./pretrain_datasets/output/{}.txt'.format(datasets_name), 'w'), delimiter='\t') base_dir = './pretrain_datasets/csl/' for (root, dirs, files) in os.walk(base_dir): for file in files: file_path = ((root + '/') + file) for line in tqdm(csv.reader(open(file_path), delimiter='\t')): content = line[1] line = [content, datasets_name] writer.writerow(line)
class BaseTrainer(): def __init__(self, datamodule: video_transformers.data.VideoDataModule, model: VideoModel, max_epochs: int=12, cpu: bool=False, mixed_precision: str='no', output_dir: str='runs', seed: int=42, trackers: List[GeneralTracker]=None, checkpoint_save: bool=True, checkpoint_save_interval: int=1, checkpoint_save_policy: str='epoch', experiment_name='exp', optimizer: torch.optim.Optimizer=None, scheduler: _LRScheduler=None, config_dict: dict=None, loss_function: torch.nn.modules.loss._Loss=None): self.experiment_dir = increment_path((Path(output_dir) / experiment_name), exist_ok=False) self._set_optimizer(model, optimizer) self._set_scheduler(scheduler, max_epochs) self._set_loss_function(loss_function) self._set_trackers(trackers, experiment_name) self.accelerator = Accelerator(cpu=cpu, mixed_precision=mixed_precision, log_with=self.trackers, logging_dir=self.experiment_dir) labels = datamodule.labels ignore_args = ['self', 'ignore_args', 'model', 'datamodule', 'output_dir', 'experiment_name', 'optimizer', 'scheduler', 'config_dict', 'loss_function', 'trackers', 'vt_version'] vt_version = {'video_transformers_version': video_transformers.__version__} hparams = locals() hparams = {k: v for (k, v) in hparams.items() if (k not in ignore_args)} if (config_dict is not None): hparams.update(config_dict) if isinstance(model, VideoModel): hparams.update({'model': model.config}) if isinstance(datamodule, video_transformers.data.VideoDataModule): hparams.update({'data': datamodule.config}) if isinstance(self.scheduler, torch.optim.lr_scheduler.SequentialLR): hparams.update({'scheduler': scheduler_to_config(self.scheduler)}) hparams.update(vt_version) self.hparams = hparams if self.accelerator.is_main_process: self.accelerator.init_trackers(experiment_name, hparams) np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed_all(seed) (self.model, self.optimizer, self.train_dataloader, self.val_dataloader, self.test_dataloader, self.scheduler) = self.accelerator.prepare(model, self.optimizer, datamodule.train_dataloader, datamodule.val_dataloader, datamodule.test_dataloader, self.scheduler) self.overall_step = 0 self.overall_epoch = 0 self.starting_epoch = 0 self.labels = labels self.last_saved_checkpoint_epoch = (- 1) def train_metrics(self): raise NotImplementedError() def val_metrics(self): raise NotImplementedError() def last_train_result(self): return None def last_val_result(self): return None def _set_optimizer(self, model, optimizer): if (optimizer is None): optimizer = torch.optim.AdamW(model.parameters(), lr=0.0001) self.optimizer = optimizer def _set_scheduler(self, scheduler, max_epochs=None): if (scheduler is None): scheduler = get_linear_scheduler_with_warmup(self.optimizer, max_epochs=max_epochs) self.scheduler = scheduler def _set_loss_function(self, loss_function): if (loss_function is None): loss_function = torch.nn.CrossEntropyLoss() self.loss_function = loss_function def _set_trackers(self, trackers, experiment_name: str): tensorboard_present = False tb_tracker = TensorBoardTracker(run_name=experiment_name, logging_dir=self.experiment_dir) if (trackers is None): trackers = tb_tracker tensorboard_present = True else: for tracker in trackers: if (tracker.name == 'tensorbaord'): tracker = tb_tracker tensorboard_present = True if (not tensorboard_present): trackers.append(tb_tracker) self.trackers = trackers def _log_last_lr(self): if self.accelerator.is_main_process: lr = self.optimizer.param_groups[(- 1)]['lr'] self.accelerator.log({'lr': lr}, step=self.overall_epoch) def _get_last_train_score(self) -> Union[(None, Dict[(str, float)])]: if (self.last_train_result is not None): first_train_metric = list(self.last_train_result.keys())[0] return (('train/' + first_train_metric), self.last_train_result[first_train_metric].mean()) else: return None def _get_last_val_score(self) -> Union[(None, Dict[(str, float)])]: if (self.last_val_result is not None): first_val_metric = list(self.last_val_result.keys())[0] return (('val/' + first_val_metric), self.last_val_result[first_val_metric].mean()) else: return None def _save_state_and_checkpoint(self): if self.accelerator.is_main_process: output_name = 'checkpoint' save_path = (Path(self.experiment_dir) / output_name) if (self.hparams['checkpoint_save_policy'] in ['steps', 'step']): if ((self.overall_step % self.hparams['checkpoint_save_interval']) == 0): self.accelerator.save_state(save_path) elif (self.hparams['checkpoint_save_policy'] in ['epochs', 'epoch']): if ((self.last_saved_checkpoint_epoch != self.overall_epoch) and ((self.overall_epoch % self.hparams['checkpoint_save_interval']) == 0)): self.accelerator.save_state(save_path) self.save_checkpoint(save_path) self.last_saved_checkpoint_epoch = self.overall_epoch else: raise ValueError(f"Unknown checkpoint save policy: {self.hparams['checkpoint_save_policy']}") def save_checkpoint(self, save_path: Union[(str, Path)]): config = self.model.config.copy() data_config = self.hparams['data'] try: scheduler_config = scheduler_to_config(self.scheduler) except TypeError: scheduler_config = None config.update({'preprocessor': {'means': data_config['preprocessor_config']['means'], 'stds': data_config['preprocessor_config']['stds'], 'min_short_side': data_config['preprocessor_config']['min_short_side'], 'input_size': data_config['preprocessor_config']['input_size'], 'clip_duration': data_config['preprocessor_config']['clip_duration'], 'num_timesteps': data_config['preprocessor_config']['num_timesteps']}, 'labels': data_config['labels'], 'scheduler': scheduler_config}) self.model.save_pretrained(save_path, config) def _update_state(self, loss): self.accelerator.backward(loss) self.optimizer.step() self.optimizer.zero_grad() self.overall_step += 1 def _one_train_loop(self, pbar, len_train_dataloader): pbar.set_description(f'Epoch {self.overall_epoch} (Train)') self.model.train() train_loss = 0 train_dataloader_iter = iter(self.train_dataloader) for step in range(len_train_dataloader): pbar.update(1) batch = next(train_dataloader_iter) loss = self.training_step(batch) train_loss += loss self._update_state(loss) pbar.set_postfix({'loss': f'{loss:.4f}'}) self._save_state_and_checkpoint() return train_loss def _one_val_loop(self, pbar, len_val_dataloader): pbar.set_description(f'Epoch {self.overall_epoch} (Val) ') self.model.eval() val_loss = 0 val_dataloader_iter = iter(self.val_dataloader) for step in range(len_val_dataloader): pbar.update(1) batch = next(val_dataloader_iter) loss = self.validation_step(batch) val_loss += loss train_score = self._get_last_train_score() val_score = self._get_last_val_score() if (val_score is not None): pbar.set_postfix({'loss': f'{loss:.4f}', train_score[0]: f'{train_score[1]:.3f}'}) else: pbar.set_postfix({'loss': f'{loss:.4f}', train_score[0]: f'{train_score[1]:.3f}'}) return val_loss def fit(self): self.accelerator.print(f'Trainable parameteres: {self.model.num_trainable_params}') self.accelerator.print(f'Total parameteres: {self.model.num_total_params}') len_train_dataloader = len(self.train_dataloader) len_val_dataloader = len(self.val_dataloader) for epoch in range(self.starting_epoch, self.hparams['max_epochs']): self._log_last_lr() with tqdm(total=(len_train_dataloader + len_val_dataloader), unit=' batch', disable=(not self.accelerator.is_local_main_process)) as pbar: train_loss = self._one_train_loop(pbar, len_train_dataloader) self.on_training_epoch_end() train_loss = (train_loss / len_train_dataloader) train_score = self._get_last_train_score() pbar.set_postfix({'loss': f'{train_loss:.4f}', train_score[0]: f'{train_score[1]:.3f}'}) with torch.inference_mode(): val_loss = self._one_val_loop(pbar, len_val_dataloader) self.on_validation_epoch_end() val_loss = (val_loss / len_val_dataloader) val_score = self._get_last_val_score() pbar.set_postfix({'loss': f'{val_loss:.4f}', val_score[0]: f'{val_score[1]:.3f}', train_score[0]: f'{train_score[1]:.3f}'}) pbar.set_description(f'Epoch {self.overall_epoch} (Done) ') self._save_state_and_checkpoint() self.overall_epoch = int((self.overall_epoch + 1)) self.scheduler.step()
def test_parse_parameter_event(): data = bytes.fromhex(rcl_interfaces__ParameterEvent) reader = CdrReader(data) assert (reader.uint32() == ) assert (reader.int32() == ) assert (reader.string() == '/_ros2cli_378363') assert (reader.sequence_length() == 1) assert (reader.string() == 'use_sim_time') assert (reader.uint8() == 1) assert (reader.int8() == 0) assert (reader.int64() == 0) assert (reader.float64() == 0) assert (reader.string() == '') assert (reader.sequence_length() == 0) assert (reader.int8_array(0) == []) assert (reader.sequence_length() == 0) assert (reader.uint8_array(0) == bytes()) assert (reader.sequence_length() == 0) assert (reader.int64_array(0) == []) assert (reader.sequence_length() == 0) assert (reader.float64_array(0) == []) assert (reader.sequence_length() == 0) assert (reader.string_array(0) == []) assert (reader.sequence_length() == 0) assert (reader.sequence_length() == 0) assert (reader.decoded_bytes() == len(data))
def cluster_lines(node): node.collapsed = False node.img_style['fgcolor'] = '#3333FF' node.img_style['size'] = 4 matrix_max = numpy.max(node.arraytable._matrix_max) matrix_min = numpy.min(node.arraytable._matrix_min) matrix_avg = (matrix_min + ((matrix_max - matrix_min) / 2)) ProfileFace = faces.ProfileFace(matrix_max, matrix_min, matrix_avg, 200, 50, 'lines') if node.is_leaf: nameFace = faces.AttrFace('name', fsize=6) faces.add_face_to_node(nameFace, node, 1, aligned=True) faces.add_face_to_node(ProfileFace, node, 0, aligned=True) else: faces.add_face_to_node(ProfileFace, node, 0, aligned=True)
def check_label_consistency(task: SpanTask) -> List[SpanExample]: assert task.prompt_examples assert issubclass(task.prompt_example_type, SpanExample) example_labels = {task.normalizer(key): key for example in task.prompt_examples for key in example.entities} unspecified_labels = {example_labels[key] for key in (set(example_labels.keys()) - set(task.label_dict.keys()))} if (not (set(example_labels.keys()) <= set(task.label_dict.keys()))): warnings.warn(f'Examples contain labels that are not specified in the task configuration. The latter contains the following labels: {sorted(list(set(task.label_dict.values())))}. Labels in examples missing from the task configuration: {sorted(list(unspecified_labels))}. Please ensure your label specification and example labels are consistent.') return [example for example in [task.prompt_example_type(text=example.text, entities={label: entities for (label, entities) in example.entities.items() if (task.normalizer(label) in task.label_dict)}) for example in task.prompt_examples] if len(example.entities)]
class DataSetClipper(Filter): __version__ = 0 widget = Instance(ImplicitWidgets, allow_none=False, record=True) filter = Instance(tvtk.Object, allow_none=False, record=True) update_mode = Delegate('widget', modify=True) input_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['any']) output_info = PipelineInfo(datasets=['any'], attributes=['any']) reset_button = Button('Reset Boundaries') view = View(Group(Group(Item('update_mode')), Group(Item('reset_button'), Item(name='widget', style='custom', resizable=True), show_labels=False), label='ImplicitWidget'), Group(Group(Item('filter', style='custom'), show_labels=False), label='Clipper'), resizable=True) _transform = Instance(tvtk.Transform, allow_none=False) def __get_pure_state__(self): d = super(DataSetClipper, self).__get_pure_state__() for name in ('_first', '_observer_id'): d.pop(name, None) d['matrix'] = cPickle.dumps(self._transform.matrix) return d def __set_pure_state__(self, state): mat = state.pop('matrix') super(DataSetClipper, self).__set_pure_state__(state) state_pickler.set_state(self, state) self._transform.set_matrix(cPickle.loads(mat)) self.widget.set_transform(self._transform) def setup_pipeline(self): self.widget = ImplicitWidgets() self._transform = tvtk.Transform() self.filter = tvtk.ClipDataSet() self.widget.on_trait_change(self._handle_widget, 'widget') super(DataSetClipper, self).setup_pipeline() def update_pipeline(self): inputs = self.inputs if (len(inputs) == 0): return widget = self.widget widget.inputs = inputs widget.update_pipeline() filter = self.filter self.configure_connection(filter, inputs[0]) widget.update_implicit_function() filter.clip_function = widget.implicit_function filter.update() self._set_outputs([filter]) self.pipeline_changed = True def update_data(self): if (len(self.inputs) == 0): return self.filter.update() self.data_changed = True def _on_interaction_event(self, obj, event): tfm = self._transform self.widget.widget.get_transform(tfm) recorder = self.recorder if (recorder is not None): state = {} state['elements'] = tfm.matrix.__getstate__()['elements'] name = recorder.get_script_id(self) recorder.record(('%s._transform.matrix.__setstate__(%s)' % (name, state))) recorder.record(('%s.widget.widget.set_transform(%s._transform)' % (name, name))) recorder.record(('%s.widget.update_implicit_function()' % name)) recorder.record(('%s.render()' % name)) def _widget_changed(self, old, new): self.widgets = self.widget.widgets if (len(self.inputs) > 0): new.inputs = self.inputs new.update_pipeline() self._observer_id = new.widget.add_observer(self.update_mode_, self._on_interaction_event) def _filter_changed(self, old, new): if (old is not None): old.on_trait_change(self.render, remove=True) new.on_trait_change(self.render) if (len(self.inputs) > 0): self.configure_connection(new, self.inputs[0]) self.outputs = [new] def _reset_button_fired(self): self.widget.widget.place_widget() self.widget.update_implicit_function() self.filter.update() self.render() def _handle_widget(self, value): self.widgets = self.widget.widgets f = self.filter f.clip_function = self.widget.implicit_function f.update() self.update_pipeline()
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'name' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'dlp_fp_sensitivity': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['dlp_fp_sensitivity']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['dlp_fp_sensitivity']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=True) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'dlp_fp_sensitivity') (is_error, has_changed, result, diff) = fortios_dlp(module.params, fos, module.check_mode) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
def extract_domain(data): regexp = re.compile('([a-zA-Z0-9_.-]+)') match = regexp.finditer(data) result = set() for m in match: candidate = m.group(0).lower() if ('.' not in candidate): continue if (not re.match('[a-z]+', candidate)): continue if (not re.match('[a-z0-9]+\\.[a-z0-9]', candidate)): continue tld = tldextract.extract(candidate) if tld.suffix: result.add(((tld.domain + '.') + tld.suffix.rstrip('.'))) return list(result)
class AnalyseSummaryTotalsTest(SeleniumTestCase): def test_summary_totals_on_analyse_page(self): self.browser.get((self.live_server_url + '/analyse/#org=CCG&numIds=0212000AA')) expected = {'panel-heading': 'Total prescribing for Rosuvastatin Calcium across all Sub-ICB Locations in NHS England', 'js-selected-month': "Sep '16", 'js-financial-year-range': "AprSep '16", 'js-year-range': "Oct '15Sep '16", 'js-cost-month-total': '29,720', 'js-cost-financial-year-total': '178,726', 'js-cost-year-total': '379,182', 'js-items-month-total': '1,669', 'js-items-financial-year-total': '9,836', 'js-items-year-total': '20,622'} for (classname, value) in expected.items(): selector = '//*[="{id}"]//*[contains(concat(" ", , " "), " {classname} ")]'.format(id='js-summary-totals', classname=classname) element = self.find_visible_by_xpath(selector) self.assertTrue(element.is_displayed(), '.{} is not visible'.format(classname)) self.assertEqual(element.text.strip(), value)
class TestTicketGeneratorSlice(ZenpyApiTestCase): __test__ = False def test_ticket_slice_low_bound(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[:1] self.check_slice_range(values, range(1, 2)) def test_ticket_slice_lower_page_size_boundary(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[99:100] self.check_slice_range(values, range(100, 101)) def test_ticket_slice_cross_page_size_boundary(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[99:101] self.check_slice_range(values, range(100, 102)) def test_ticket_slice_on_lower_boundary(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[100:101] self.check_slice_range(values, range(101, 102)) def test_ticket_slice_exact_page_size_boundary(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[100:200] self.check_slice_range(values, range(101, 201)) def test_ticket_slice_with_page_size(self): with self.recorder.use_cassette(self.generate_cassette_name(), serialize_with='prettyjson'): ticket_generator = self.zenpy_client.tickets() values = ticket_generator[3950:4000:50] self.check_slice_range(values, range(3951, 4001)) def check_slice_range(self, values, slice_range): self.assertEqual(len(values), len(slice_range)) for (i, n) in enumerate(slice_range): self.assertIsInstance(values[i], Ticket) self.assertTrue((values[i].id == n), msg='expected Ticket id: {}, found: {}, values: {}'.format(n, values[i], values))
class OrderControl(StateMachine): waiting_for_payment = State(initial=True) processing = State() shipping = State() completed = State(final=True) add_to_order = waiting_for_payment.to(waiting_for_payment) receive_payment = (waiting_for_payment.to(processing, cond='payments_enough') | waiting_for_payment.to(waiting_for_payment, unless='payments_enough')) process_order = processing.to(shipping, cond='payment_received') ship_order = shipping.to(completed) def __init__(self): self.order_total = 0 self.payments = [] self.payment_received = False super().__init__() def payments_enough(self, amount): return ((sum(self.payments) + amount) >= self.order_total) def before_add_to_order(self, amount): self.order_total += amount return self.order_total def before_receive_payment(self, amount): self.payments.append(amount) return self.payments def after_receive_payment(self): self.payment_received = True def on_enter_waiting_for_payment(self): self.payment_received = False
class CustomResponseCode(CustomCodeBase): HTTP_200 = (200, '') HTTP_201 = (201, '') HTTP_202 = (202, ',') HTTP_204 = (204, ',') HTTP_400 = (400, '') HTTP_401 = (401, '') HTTP_403 = (403, '') HTTP_404 = (404, '') HTTP_410 = (410, '') HTTP_422 = (422, '') HTTP_425 = (425, ',') HTTP_429 = (429, ',') HTTP_500 = (500, '') HTTP_502 = (502, '') HTTP_503 = (503, '') HTTP_504 = (504, '')
def calc_greit_figures_of_merit(target_image, reconstruction_image, conductive_target=True, return_extras=False) -> (Tuple | Tuple[(Tuple, Dict)]): extras = {'shape_deformation': {}, 'ringing': {}} amplitude = calc_amplitude(reconstruction_image) position_error = calc_position_error(target_image, reconstruction_image, method='GREIT', conductive_target=conductive_target) resolution = calc_resolution(reconstruction_image, conductive_target=conductive_target) (shape_deformation, shape_out) = calc_shape_deformation(reconstruction_image, target_image=None, circular=True, conductive_target=conductive_target, return_extras=True) extras['shape_deformation'] = shape_out (ringing, ringing_out) = calc_ringing(reconstruction_image, target_image=target_image, circular=False, conductive_target=conductive_target, return_extras=True) extras['ringing'] = ringing_out if return_extras: return ((amplitude, position_error, resolution, shape_deformation, ringing), extras) return (amplitude, position_error, resolution, shape_deformation, ringing)
class BuildTimedeltaSchema(StrictSchema): units = ma.fields.String(required=True) _schema def check_valid_units(self, data): ALLOWED_UNITS = ['seconds', 'minutes', 'hours', 'days'] if (data.get('units') not in ALLOWED_UNITS): raise ma.ValidationError('`units` must be one of `{}`'.format('`, `'.join(ALLOWED_UNITS)), ['units'])
def getCbText(): txt = wx.TextDataObject() while True: try: if (not wx.TheClipboard.IsOpened()): if wx.TheClipboard.Open(): wx.TheClipboard.GetData(txt) wx.TheClipboard.Close() break pass except Exception as e: pass time.sleep(0.01) pass return txt.GetText()
class JoyStickKey(Enum): StartKey = 7 SelectKey = 6 ModeKey = 8 RLeftKey = 2 RRightKey = 1 RTopKey = 3 RBottomKey = 0 R1 = 5 L1 = 4 LJoyStickKey = 9 RJoyStickKey = 10 ArrowUp = (0, 1) ArrowDown = (0, (- 1)) ArrowLeft = ((- 1), 0) ArrowRight = (1, 0) ArrowReleased = (0, 0)
class Agg(DslBase): _type_name = 'agg' _type_shortcut = staticmethod(A) name = None def __contains__(self, key): return False def to_dict(self): d = super().to_dict() if ('meta' in d[self.name]): d['meta'] = d[self.name].pop('meta') return d def result(self, search, data): return AggResponse(self, search, data)
def extractDaydreamTranslations(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('WATTT' in item['tags']): return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix) return False
class OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_shutdown_raises(): class HandlerA(): def __init__(self): self._startup_called = False async def process_startup(self, scope, event): self._startup_called = True async def process_shutdown(self, scope, event): raise Exception('testing 321') class HandlerB(): def __init__(self): self._startup_called = False self._shutdown_called = False async def process_startup(self, scope, event): self._startup_called = True async def process_shutdown(self, scope, event): self._shutdown_called = True a = HandlerA() b1 = HandlerB() b2 = HandlerB() app = App() app.add_middleware(b1) app.add_middleware([a, b2]) client = testing.TestClient(app) with pytest.raises(RuntimeError) as excinfo: client.simulate_get() message = str(excinfo.value) assert message.startswith('ASGI app returned lifespan.shutdown.failed.') assert ('testing 321' in message) assert a._startup_called assert b1._startup_called assert (not b1._shutdown_called) assert b2._startup_called assert b2._shutdown_called
def test_discriminant_raises_exception_if_called_with_incorrect_data_type(): def d(data, axis): return np.nanmax(data, axis=axis) with pytest.raises(TypeError): d('foo') with pytest.raises(TypeError): d(np.array(['foo'])) with pytest.raises(TypeError): d([1, 2, 3])
class OptionPlotoptionsNetworkgraphSonificationContexttracksMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class QueryServicer(object): def ClientState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ClientStates(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ConsensusState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ConsensusStates(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ClientStatus(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ClientParams(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpgradedClientState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpgradedConsensusState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
class OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMapping(Options): def frequency(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingNoteduration) def pan(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPan': return self._config_sub_data('pan', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPan) def pitch(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPitch) def playDelay(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingPlaydelay) def time(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingTime': return self._config_sub_data('time', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingTime) def tremolo(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingTremolo) def volume(self) -> 'OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsDumbbellSonificationDefaultinstrumentoptionsMappingVolume)
class OptionPlotoptionsArearangeSonificationTracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_cache(maxsize=1024) def arp_reply(vid, eth_src, eth_dst, src_ip, dst_ip): pkt = build_pkt_header(vid, eth_src, eth_dst, valve_of.ether.ETH_TYPE_ARP) arp_pkt = arp.arp(opcode=arp.ARP_REPLY, src_mac=eth_src, src_ip=src_ip, dst_mac=eth_dst, dst_ip=dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt
class BaseF3XFilingSchema(BaseEfileSchema): class Meta(BaseEfileSchema.Meta): model = models.BaseF3XFiling def parse_summary_rows(self, obj, **kwargsj): descriptions = decoders.f3x_description line_list = extract_columns(obj, decoders.f3x_col_a, decoders.f3x_col_b, descriptions) if line_list: line_list['cash_on_hand_beginning_calendar_ytd'] = line_list.pop('coh_begin_calendar_yr') line_list['cash_on_hand_beginning_period'] = line_list.pop('coh_bop') return line_list
def main(args): images_loader = ReadFromFolder(args.input_folder_path) video_writer = VideoWriter(args.output_video_path, args.framerate) img_displayer = ImageDisplayer() N = len(images_loader) i = 0 while (i < N): print('Processing {}/{}th image'.format(i, N)) img = images_loader.read_image() if ((i % args.sample_interval) == 0): video_writer.write(img) img_displayer.display(img) i += 1
def generic_create(evm: Evm, endowment: U256, contract_address: Address, memory_start_position: U256, memory_size: U256) -> None: from ...vm.interpreter import STACK_DEPTH_LIMIT, process_create_message evm.accessed_addresses.add(contract_address) create_message_gas = max_message_call_gas(Uint(evm.gas_left)) evm.gas_left -= create_message_gas ensure((not evm.message.is_static), WriteInStaticContext) evm.return_data = b'' sender_address = evm.message.current_target sender = get_account(evm.env.state, sender_address) if ((sender.balance < endowment) or (sender.nonce == Uint(((2 ** 64) - 1))) or ((evm.message.depth + 1) > STACK_DEPTH_LIMIT)): evm.gas_left += create_message_gas push(evm.stack, U256(0)) return if account_has_code_or_nonce(evm.env.state, contract_address): increment_nonce(evm.env.state, evm.message.current_target) push(evm.stack, U256(0)) return call_data = memory_read_bytes(evm.memory, memory_start_position, memory_size) increment_nonce(evm.env.state, evm.message.current_target) child_message = Message(caller=evm.message.current_target, target=Bytes0(), gas=create_message_gas, value=endowment, data=b'', code=call_data, current_target=contract_address, depth=(evm.message.depth + 1), code_address=None, should_transfer_value=True, is_static=False, accessed_addresses=evm.accessed_addresses.copy(), accessed_storage_keys=evm.accessed_storage_keys.copy(), parent_evm=evm) child_evm = process_create_message(child_message, evm.env) if child_evm.error: incorporate_child_on_error(evm, child_evm) evm.return_data = child_evm.output push(evm.stack, U256(0)) else: incorporate_child_on_success(evm, child_evm) evm.return_data = b'' push(evm.stack, U256.from_be_bytes(child_evm.message.current_target))
def prioritize_dtypes(cell_outputs: List[NotebookNode]) -> Tuple[(List[List[str]], List[bool])]: cell_output_dtypes = [(list(cell_output['data'].keys()) if ('data' in cell_output) else [cell_output['output_type']]) for cell_output in cell_outputs] prioritized_cell_output_dtypes = [sorted(set(dtypes).intersection(set(priorities)), key=(lambda dtype: priorities.index(dtype))) for dtypes in cell_output_dtypes] prioritized_cell_output_dtypes = [[str(item) for item in items] for items in prioritized_cell_output_dtypes] plotly_flags = [any([('plotly' in output) for output in outputs]) for outputs in cell_output_dtypes] return (prioritized_cell_output_dtypes, plotly_flags)
def test_signal_integration(django_elasticapm_client): try: int('hello') except ValueError: got_request_exception.send(sender=None, request=None) assert (len(django_elasticapm_client.events[ERROR]) == 1) event = django_elasticapm_client.events[ERROR][0] assert ('exception' in event) exc = event['exception'] assert (exc['type'] == 'ValueError') assert (exc['message'] == "ValueError: invalid literal for int() with base 10: 'hello'") assert (exc['handled'] is False) assert (event['culprit'] == 'tests.contrib.django.django_tests.test_signal_integration')
class Email(DB.Model): __tablename__ = 'emails' address = DB.Column(DB.Text, primary_key=True) owner_id = DB.Column(DB.Integer, DB.ForeignKey('users.id'), primary_key=True) registered_on = DB.Column(DB.DateTime, default=DB.func.now()) def send_confirmation(addr, user_id): g.log = g.log.new(address=addr, user_id=user_id) g.log.info('Sending email confirmation for new address on account.') addr = addr.lower().strip() if (not IS_VALID_EMAIL(addr)): g.log.info('Failed. Invalid address.') raise ValueError(u'Cannot send confirmation. {} is not a valid email.'.format(addr)) message = u'email={email}&user_id={user_id}'.format(email=addr, user_id=user_id) digest = hmac.new(settings.NONCE_SECRET, message.encode('utf-8'), hashlib.sha256).hexdigest() link = url_for('confirm-account-email', digest=digest, email=addr, _external=True) res = send_email(to=addr, subject=('Confirm email for your account at %s' % settings.SERVICE_NAME), text=render_template('email/confirm-account.txt', email=addr, link=link), html=render_template_string(TEMPLATES.get('confirm-account.html'), email=addr, link=link), sender=settings.ACCOUNT_SENDER) if (not res[0]): g.log.info('Failed to send email.', reason=res[1], code=res[2]) return False else: return True def create_with_digest(cls, addr, user_id, digest): addr = addr.lower() message = u'email={email}&user_id={user_id}'.format(email=addr, user_id=user_id) what_should_be = hmac.new(settings.NONCE_SECRET, message.encode('utf-8'), hashlib.sha256).hexdigest() if (digest == what_should_be): return cls(address=addr, owner_id=user_id) else: return None
class Config(object): def __init__(self, path=None): cconfig = ffi.new('git_config **') if (not path): err = C.git_config_new(cconfig) else: assert_string(path, 'path') err = C.git_config_open_ondisk(cconfig, to_bytes(path)) check_error(err, True) self._config = cconfig[0] def from_c(cls, repo, ptr): config = cls.__new__(cls) config._repo = repo config._config = ptr return config def __del__(self): C.git_config_free(self._config) def _get(self, key): assert_string(key, 'key') cstr = ffi.new('char **') err = C.git_config_get_string(cstr, self._config, to_bytes(key)) return (err, cstr) def _get_string(self, key): (err, cstr) = self._get(key) if (err == C.GIT_ENOTFOUND): raise KeyError(key) check_error(err) return cstr[0] def __contains__(self, key): (err, cstr) = self._get(key) if (err == C.GIT_ENOTFOUND): return False check_error(err) return True def __getitem__(self, key): val = self._get_string(key) return ffi.string(val).decode('utf-8') def __setitem__(self, key, value): assert_string(key, 'key') err = 0 if isinstance(value, bool): err = C.git_config_set_bool(self._config, to_bytes(key), value) elif isinstance(value, int): err = C.git_config_set_int64(self._config, to_bytes(key), value) else: err = C.git_config_set_string(self._config, to_bytes(key), to_bytes(value)) check_error(err) def __delitem__(self, key): assert_string(key, 'key') err = C.git_config_delete_entry(self._config, to_bytes(key)) check_error(err) def __iter__(self): citer = ffi.new('git_config_iterator **') err = C.git_config_iterator_new(citer, self._config) check_error(err) return ConfigIterator(self, citer[0]) def get_multivar(self, name, regex=None): assert_string(name, 'name') citer = ffi.new('git_config_iterator **') err = C.git_config_multivar_iterator_new(citer, self._config, to_bytes(name), to_bytes(regex)) check_error(err) return ConfigMultivarIterator(self, citer[0]) def set_multivar(self, name, regex, value): assert_string(name, 'name') assert_string(regex, 'regex') assert_string(value, 'value') err = C.git_config_set_multivar(self._config, to_bytes(name), to_bytes(regex), to_bytes(value)) check_error(err) def get_bool(self, key): val = self._get_string(key) res = ffi.new('int *') err = C.git_config_parse_bool(res, val) check_error(err) return (res[0] != 0) def get_int(self, key): val = self._get_string(key) res = ffi.new('int64_t *') err = C.git_config_parse_int64(res, val) check_error(err) return res[0] def add_file(self, path, level=0, force=0): err = C.git_config_add_file_ondisk(self._config, to_bytes(path), level, force) check_error(err) def snapshot(self): ccfg = ffi.new('git_config **') err = C.git_config_snapshot(ccfg, self._config) check_error(err) return Config.from_c(self._repo, ccfg[0]) def parse_bool(text): res = ffi.new('int *') err = C.git_config_parse_bool(res, to_bytes(text)) check_error(err) return (res[0] != 0) def parse_int(text): res = ffi.new('int64_t *') err = C.git_config_parse_int64(res, to_bytes(text)) check_error(err) return res[0] def _from_found_config(fn): buf = ffi.new('git_buf *', (ffi.NULL, 0)) err = fn(buf) check_error(err, True) cpath = ffi.string(buf.ptr).decode() C.git_buf_free(buf) return Config(cpath) def get_system_config(): return Config._from_found_config(C.git_config_find_system) def get_global_config(): return Config._from_found_config(C.git_config_find_global) def get_xdg_config(): return Config._from_found_config(C.git_config_find_xdg)