code
stringlengths
281
23.7M
.skipif((not has_cupy_gpu), reason='needs CuPy GPU') def test_model_gpu(): pytest.importorskip('ml_datasets') import ml_datasets with use_ops('cupy'): n_hidden = 32 dropout = 0.2 ((train_X, train_Y), (dev_X, dev_Y)) = ml_datasets.mnist() model = chain(Relu(nO=n_hidden, dropout=dropout), Relu(nO=n_hidden, dropout=dropout), Softmax()) train_X = model.ops.asarray(train_X) train_Y = model.ops.asarray(train_Y) dev_X = model.ops.asarray(dev_X) dev_Y = model.ops.asarray(dev_Y) model.initialize(X=train_X[:5], Y=train_Y[:5]) optimizer = Adam(0.001) batch_size = 128 for i in range(2): batches = model.ops.multibatch(batch_size, train_X, train_Y, shuffle=True) for (X, Y) in batches: (Yh, backprop) = model.begin_update(X) backprop((Yh - Y)) model.finish_update(optimizer) correct = 0 total = 0 for (X, Y) in model.ops.multibatch(batch_size, dev_X, dev_Y): Yh = model.predict(X) correct += (Yh.argmax(axis=1) == Y.argmax(axis=1)).sum() total += Yh.shape[0]
class ParallelPreprocessor(Preprocessor): def __init__(self, *args, **kwargs): self.tasks = mp.cpu_count() super(ParallelPreprocessor, self).__init__(*args, **kwargs) def on_result(self, result): self.results.append(result) def transform(self, fileids=None, categories=None): if (not os.path.exists(self.target)): os.makedirs(self.target) self.replicate(self.corpus.root) self.results = [] pool = mp.Pool(processes=self.tasks) tasks = [pool.apply_async(self.process, (idx, fileid), callback=self.on_result) for (idx, fileid) in enumerate(self.fileids(fileids, categories))] pool.close() pool.join() return self.results
def test_ordered_set(): ordered_set = OrderedSet([1, 3, 1, 2]) assert (list(ordered_set) == [1, 3, 2]) assert (1 in ordered_set) assert bool(ordered_set) ordered_set.add(1) assert (1 in ordered_set) ordered_set.remove(1) assert (1 not in ordered_set) for i in range(4): ordered_set.discard(i) assert (not bool(ordered_set))
class UnsignedShortType(Type): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.name = UNSIGNED_SHORT self.byte_size = 2 def debug_info(self): bs = bytearray() bs.append(ENUM_ABBREV_CODE['BASE_TYPE_WITH_ENCODING']) bs.append(self.byte_size) bs.append(ENUM_DW_ATE['DW_ATE_unsigned']) bs.extend(map(ord, self.name)) bs.append(0) return bs
(urls.PRIVACY_NOTICE, status_code=HTTP_200_OK, response_model=Page[schemas.PrivacyNoticeResponse], dependencies=[Security(verify_oauth_client, scopes=[scope_registry.PRIVACY_NOTICE_READ])]) def get_privacy_notice_list(*, db: Session=Depends(deps.get_db), params: Params=Depends(), show_disabled: Optional[bool]=True, region: Optional[PrivacyNoticeRegion]=None, systems_applicable: Optional[bool]=False, request: Request) -> AbstractPage[PrivacyNotice]: logger.info("Finding all PrivacyNotices with pagination params '{}'", params) notice_query = generate_notice_query(db=db, show_disabled=show_disabled, systems_applicable=systems_applicable, region=region) should_unescape = request.headers.get(UNESCAPE_SAFESTR_HEADER) privacy_notices = notice_query.order_by(PrivacyNotice.created_at.desc()) return paginate([(transform_fields(transformation=unescape, model=notice, fields=PRIVACY_NOTICE_ESCAPE_FIELDS) if should_unescape else notice) for notice in privacy_notices], params=params)
class OptionPlotoptionsArcdiagramSonificationTracksMapping(Options): def frequency(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsArcdiagramSonificationTracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsArcdiagramSonificationTracksMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsArcdiagramSonificationTracksMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsArcdiagramSonificationTracksMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsArcdiagramSonificationTracksMappingNoteduration) def pan(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingPan': return self._config_sub_data('pan', OptionPlotoptionsArcdiagramSonificationTracksMappingPan) def pitch(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsArcdiagramSonificationTracksMappingPitch) def playDelay(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsArcdiagramSonificationTracksMappingPlaydelay) def rate(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingRate': return self._config_sub_data('rate', OptionPlotoptionsArcdiagramSonificationTracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingTime': return self._config_sub_data('time', OptionPlotoptionsArcdiagramSonificationTracksMappingTime) def tremolo(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsArcdiagramSonificationTracksMappingTremolo) def volume(self) -> 'OptionPlotoptionsArcdiagramSonificationTracksMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsArcdiagramSonificationTracksMappingVolume)
_api.route('/v1/check/images/<path:image_name>', methods=['POST']) def check_docker_by_image_name(image_name): if (not image_name): return (json.dumps({'err': 400, 'msg': 'Bad image name'}, sort_keys=True), 400) try: pulled = False if (not InternalServer.get_docker_driver().is_docker_image(image_name)): if (':' in image_name): tmp = image_name.split(':')[0] tag = image_name.split(':')[1] msg = (((('Error: image library/' + image_name) + ':') + tag) + ' not found') output = InternalServer.get_docker_driver().docker_pull(tmp, tag=tag) else: msg = (('Error: image library/' + image_name) + ':latest not found') output = InternalServer.get_docker_driver().docker_pull(image_name) if ('errorDetail' in output): DagdaLogger.get_logger().error(msg) raise DagdaError(msg) pulled = True except Exception as ex: message = 'Unexpected exception of type {0} occurred while pulling the docker image: {1!r}'.format(type(ex).__name__, (ex.get_message() if (type(ex).__name__ == 'DagdaError') else ex.args)) DagdaLogger.get_logger().error(message) return (json.dumps({'err': 404, 'msg': 'Image name not found'}, sort_keys=True), 404) data = {} data['image_name'] = image_name data['timestamp'] = datetime.datetime.now().timestamp() data['status'] = 'Analyzing' id = InternalServer.get_mongodb_driver().insert_docker_image_scan_result_to_history(data) InternalServer.get_dagda_edn().put({'msg': 'check_image', 'image_name': image_name, '_id': str(id), 'pulled': pulled}) output = {} output['id'] = str(id) output['msg'] = (('Accepted the analysis of <' + image_name) + '>') return (json.dumps(output, sort_keys=True), 202)
class TestTransformerModelConverter(AITTestCase): def test_transformer_encoder(self): torch.manual_seed(0) class EncoderBlock(torch.nn.Module): def __init__(self, input_dim, num_heads, dim_feedforward, dropout=0.0): super().__init__() self.attn = torch.nn.MultiheadAttention(embed_dim=input_dim, num_heads=num_heads, batch_first=True) self.linear_net = torch.nn.Sequential(torch.nn.Linear(input_dim, dim_feedforward), torch.nn.Dropout(dropout), torch.nn.ReLU(inplace=True), torch.nn.Linear(dim_feedforward, input_dim)) self.norm1 = torch.nn.LayerNorm(input_dim) self.norm2 = torch.nn.LayerNorm(input_dim) self.dropout = torch.nn.Dropout(dropout) def forward(self, x): (attn_out, _) = self.attn(query=x, key=x, value=x) x = (x + self.dropout(attn_out)) x = self.norm1(x) linear_out = self.linear_net(x) x = (x + self.dropout(linear_out)) x = self.norm2(x) return x model = EncoderBlock(input_dim=512, num_heads=16, dim_feedforward=12).cuda().half() inputs = [torch.randn(10, 32, 512).half().cuda()] self.run_test(model, inputs, expected_ops={})
class UserSettingsHandler(BaseHandler): def initialize(self, app): self.user_settings = app.user_settings def get(self, path): if (path == 'style.css'): self.set_status(200) self.set_header('Content-type', 'text/css') self.write(self.user_settings['user_css'])
class ConnectorRunner(): def __init__(self, db, cache: FidesopsRedis, connector_type: str, secrets: Dict[(str, Any)], external_references: Optional[Dict[(str, Any)]]=None, erasure_external_references: Optional[Dict[(str, Any)]]=None): self.db = db self.cache = cache self.connector_type = connector_type self.external_references = external_references self.erasure_external_references = erasure_external_references self.config = _config(connector_type) self.dataset = _dataset(connector_type) for external_reference in self.config.get('external_references', []): external_reference_name = external_reference['name'] secrets[external_reference_name] = {'dataset': f'{connector_type}_external_dataset', 'field': f'{connector_type}_external_collection.{external_reference_name}', 'direction': 'from'} self.connection_config = _connection_config(db, self.config, secrets) self.dataset_config = dataset_config(db, self.connection_config, self.dataset) def test_connection(self): get_connector(self.connection_config).test_connection() async def access_request(self, access_policy: Policy, identities: Dict[(str, Any)]) -> Dict[(str, List[Row])]: fides_key = self.connection_config.key privacy_request = PrivacyRequest(id=f'test_{fides_key}_access_request_{random.randint(0, 1000)}') identity = Identity(**identities) privacy_request.cache_identity(identity) if self.external_references: self.cache.set_encoded_object(f'{privacy_request.id}__access_request__{self.connector_type}_external_dataset:{self.connector_type}_external_collection', [self.external_references]) graph_list = [self.dataset_config.get_graph()] connection_config_list = [self.connection_config] _process_external_references(self.db, graph_list, connection_config_list) dataset_graph = DatasetGraph(*graph_list) access_results = (await graph_task.run_access_request(privacy_request, access_policy, dataset_graph, connection_config_list, identities, self.db)) for collection in self.dataset['collections']: assert len(access_results[f"{fides_key}:{collection['name']}"]), f"No rows returned for collection '{collection['name']}'" return access_results async def strict_erasure_request(self, access_policy: Policy, erasure_policy: Policy, identities: Dict[(str, Any)]) -> Tuple[(Dict, Dict)]: masking_strict = CONFIG.execution.masking_strict CONFIG.execution.masking_strict = True (access_results, erasure_results) = (await self._base_erasure_request(access_policy, erasure_policy, identities)) CONFIG.execution.masking_strict = masking_strict return (access_results, erasure_results) async def non_strict_erasure_request(self, access_policy: Policy, erasure_policy: Policy, identities: Dict[(str, Any)]) -> Tuple[(Dict, Dict)]: masking_strict = CONFIG.execution.masking_strict CONFIG.execution.masking_strict = False (access_results, erasure_results) = (await self._base_erasure_request(access_policy, erasure_policy, identities)) CONFIG.execution.masking_strict = masking_strict return (access_results, erasure_results) async def _base_erasure_request(self, access_policy: Policy, erasure_policy: Policy, identities: Dict[(str, Any)]) -> Tuple[(Dict, Dict)]: fides_key = self.connection_config.key privacy_request = PrivacyRequest(id=f'test_{fides_key}_access_request_{random.randint(0, 1000)}') identity = Identity(**identities) privacy_request.cache_identity(identity) if self.erasure_external_references: self.cache.set_encoded_object(f'{privacy_request.id}__access_request__{self.connector_type}_external_dataset:{self.connector_type}_external_collection', [self.erasure_external_references]) graph_list = [self.dataset_config.get_graph()] connection_config_list = [self.connection_config] _process_external_references(self.db, graph_list, connection_config_list) dataset_graph = DatasetGraph(*graph_list) access_results = (await graph_task.run_access_request(privacy_request, access_policy, dataset_graph, connection_config_list, identities, self.db)) for collection in self.dataset['collections']: assert len(access_results[f"{fides_key}:{collection['name']}"]), f"No rows returned for collection '{collection['name']}'" erasure_results = (await graph_task.run_erasure(privacy_request, erasure_policy, dataset_graph, connection_config_list, identities, get_cached_data_for_erasures(privacy_request.id), self.db)) return (access_results, erasure_results)
def freq_from_hps(signal, fs): signal = (asarray(signal) + 0.0) N = len(signal) signal -= mean(signal) windowed = (signal * kaiser(N, 100)) X = log(abs(rfft(windowed))) X -= mean(X) hps = copy(X) for h in range(2, 9): dec = decimate(X, h, zero_phase=True) hps[:len(dec)] += dec i_peak = argmax(hps[:len(dec)]) i_interp = parabolic(hps, i_peak)[0] return ((fs * i_interp) / N)
_routes.route('/<int:stream_id>/join') _required def join_stream(stream_id: int): stream = VideoStream.query.get_or_404(stream_id) if (not stream.user_can_access): raise NotFoundError({'source': ''}, 'Video Stream Not Found') if ((not stream.channel) or (stream.channel.provider != 'bbb')): raise BadRequestError({'param': 'stream_id'}, 'Join action is not applicable on this stream provider') options = (stream.extra.get('bbb_options') or stream.extra.get('jitsi_options') or default_options) params = dict(name=stream.name, meetingID=stream.extra['response']['meetingID'], moderatorPW=stream.extra['response']['moderatorPW'], attendeePW=stream.extra['response']['attendeePW'], **options) channel = stream.channel bbb = BigBlueButton(channel.api_url, channel.api_key) result = bbb.request('create', params) if (result.success and result.data): stream.extra = {**result.data, 'bbb_options': options} db.session.commit() elif (result.data and (result.data.get('response', {}).get('messageKey') == 'idNotUnique')): pass else: logger.error('Error creating BBB Meeting: %s', result) raise BadRequestError('', 'Cannot create Meeting on BigBlueButton') join_url = bbb.build_url('join', {'fullName': (current_user.public_name or current_user.full_name or current_user.anonymous_name), 'join_via_html5': 'true', 'meetingID': params['meetingID'], 'password': params[('moderatorPW' if stream.user_is_moderator else 'attendeePW')]}) return jsonify(url=join_url)
class OptionPlotoptionsBubbleTooltip(Options): def clusterFormat(self): return self._config_get('Clustered points: {point.clusterPointsAmount}') def clusterFormat(self, text: str): self._config(text, js_type=False) def dateTimeLabelFormats(self) -> 'OptionPlotoptionsBubbleTooltipDatetimelabelformats': return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsBubbleTooltipDatetimelabelformats) def distance(self): return self._config_get(16) def distance(self, num: float): self._config(num, js_type=False) def followPointer(self): return self._config_get(False) def followPointer(self, flag: bool): self._config(flag, js_type=False) def followTouchMove(self): return self._config_get(True) def followTouchMove(self, flag: bool): self._config(flag, js_type=False) def footerFormat(self): return self._config_get('') def footerFormat(self, text: str): self._config(text, js_type=False) def format(self): return self._config_get('undefined') def format(self, text: str): self._config(text, js_type=False) def headerFormat(self): return self._config_get('<span style="color:{point.color}"></span> <span style="font-size: 0.8em"> {series.name}</span><br/>') def headerFormat(self, text: str): self._config(text, js_type=False) def nullFormat(self): return self._config_get(None) def nullFormat(self, text: str): self._config(text, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def pointFormat(self): return self._config_get('({point.x}, {point.y}), Size: {point.z}') def pointFormat(self, text: str): self._config(text, js_type=False) def pointFormatter(self): return self._config_get(None) def pointFormatter(self, value: Any): self._config(value, js_type=False) def valueDecimals(self): return self._config_get(None) def valueDecimals(self, num: float): self._config(num, js_type=False) def valuePrefix(self): return self._config_get(None) def valuePrefix(self, text: str): self._config(text, js_type=False) def valueSuffix(self): return self._config_get(None) def valueSuffix(self, text: str): self._config(text, js_type=False) def xDateFormat(self): return self._config_get(None) def xDateFormat(self, text: str): self._config(text, js_type=False)
class OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsDumbbellSonificationContexttracksMapping(Options): def frequency(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsDumbbellSonificationContexttracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsDumbbellSonificationContexttracksMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsDumbbellSonificationContexttracksMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsDumbbellSonificationContexttracksMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsDumbbellSonificationContexttracksMappingNoteduration) def pan(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingPan': return self._config_sub_data('pan', OptionPlotoptionsDumbbellSonificationContexttracksMappingPan) def pitch(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsDumbbellSonificationContexttracksMappingPitch) def playDelay(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsDumbbellSonificationContexttracksMappingPlaydelay) def rate(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingRate': return self._config_sub_data('rate', OptionPlotoptionsDumbbellSonificationContexttracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingTime': return self._config_sub_data('time', OptionPlotoptionsDumbbellSonificationContexttracksMappingTime) def tremolo(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsDumbbellSonificationContexttracksMappingTremolo) def volume(self) -> 'OptionPlotoptionsDumbbellSonificationContexttracksMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsDumbbellSonificationContexttracksMappingVolume)
class FromCrypto(): env_int_def_from_crypto: int = '${spock.crypto:gAAAAABigpYHrKffEQ203V6L5YEikgAfuzOU6i0xigLinKlXeR7seWHji4aHyoQ-H9IGaXcCns65AZq-cSyXcUFtQ_9w43RUraUM-tqDdCXeiDygeA_BEC0=}' env_float_def_from_crypto: float = '${spock.crypto:gAAAAABigpYHuJndgXM8wQ17uDblBfgm256VzXNjCiblpPfL08LndRWSG4E8v7rSPB7AmfoUwmvTW91b1qn1O1UL2aTNdNz-pmkmf6ZrOpxNnSgOF7TSpE8=}' env_bool_def_from_crypto: bool = '${spock.crypto:gAAAAABigpYHfzExxlvyFcIjzOMn25Gj-2luN0tGQ1dpDb8lInCY3C5PNTlaV4xLxekQ6x2SJli37dpaRNB4vXBqE1MLU5V9Rth9dlu6olmEuomIzx8V_Nw=}' env_str_def_from_crypto: str = '${spock.crypto:gAAAAABigpYH8mqVr8LCATnJBHyTAhnoO6nDXAjzyVlxiXSPSqlmYMp9h4i2S552DC_xQHgUiN11dbyD2psroKUxF_uPDRzhPfvG9mkZvbTEpMpb5JPqJxs=}'
def safe_save_model_for_hf_trainer(trainer: transformers.Trainer, output_dir: str): state_dict = trainer.model.state_dict() if trainer.args.should_save: cpu_state_dict = {} for key in state_dict.keys(): if ('teacher' in key): continue cpu_state_dict[key] = state_dict[key] del state_dict trainer._save(output_dir, state_dict=cpu_state_dict)
def get(obj): if (not isinstance(obj, bytes)): raise TypeError('object type must be bytes') info = {'type': dict(), 'extension': dict(), 'mime': dict()} stream = ' '.join(['{:02}'.format(byte) for byte in obj]) for element in data: for signature in element['signature']: offset = ((element['offset'] * 2) + element['offset']) if (signature == stream[offset:(len(signature) + offset)]): for key in ['type', 'extension', 'mime']: info[key][element[key]] = len(signature) for key in ['type', 'extension', 'mime']: info[key] = [element for element in sorted(info[key], key=info[key].get, reverse=True)] return Info(info['type'], info['extension'], info['mime'])
def test_grid_from_hdf_warns(tmp_path, any_grid): if (version.parse(xtgeo_version) < version.parse('2.16')): pytest.skip() any_grid.to_hdf((tmp_path / 'grid.hdf')) with pytest.warns(DeprecationWarning, match='from_hdf is deprecated'): any_grid.from_hdf((tmp_path / 'grid.hdf'))
_routes.route('/<int:stream_id>/chat-token') _required def get_chat_token(stream_id: int): stream = VideoStream.query.get_or_404(stream_id) event = stream.event if (not stream.user_can_access): raise NotFoundError({'source': ''}, 'Video Stream Not Found') if (not event.is_chat_enabled): raise NotFoundError({'source': ''}, 'Chat Not Enabled') try: data = get_rocket_chat_token(current_user, event) return jsonify({'success': True, 'token': data['token']}) except RocketChatException as rce: if (rce.code == RocketChatException.CODES.DISABLED): return jsonify({'success': False, 'code': rce.code}) else: return jsonify({'success': False, 'code': rce.code, 'response': ((rce.response is not None) and rce.response.json())})
class MockTaskExecutor(TaskExecutorBase): def __init__(self): self.command_queue: PersistentQueue = None self.started_task = [] self.stopped_task = [] def start_task_execution(self, key: TaskExecutionKey): self.started_task.append(key) def stop_task_execution(self, key: TaskExecutionKey): self.stopped_task.append(key) def start(self): self.command_queue = PersistentQueue(maxsize=100) def stop(self): if (not self.command_queue): raise AIFlowException('The executor should be started first!')
class Thunderbird(Backend): _unread = set() def init(self): dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) bus = dbus.SessionBus() bus.add_signal_receiver(self.new_msg, dbus_interface='org.mozilla.thunderbird.DBus', signal_name='NewMessageSignal') bus.add_signal_receiver(self.changed_msg, dbus_interface='org.mozilla.thunderbird.DBus', signal_name='ChangedMessageSignal') loop = GObject.MainLoop() dbus.mainloop.glib.threads_init() self.context = loop.get_context() self.run = partial(self.context.iteration, False) def new_msg(self, id, author, subject): if (id not in self._unread): self._unread.add(id) def changed_msg(self, id, event): if ((event == 'read') and (id in self._unread)): self._unread.remove(id) def unread(self): self.run() return len(self._unread)
def downgrade(): op.drop_column('ctl_data_uses', 'replaced_by') op.drop_column('ctl_data_uses', 'version_deprecated') op.drop_column('ctl_data_uses', 'version_added') op.drop_column('ctl_data_subjects', 'replaced_by') op.drop_column('ctl_data_subjects', 'version_deprecated') op.drop_column('ctl_data_subjects', 'version_added') op.drop_column('ctl_data_qualifiers', 'replaced_by') op.drop_column('ctl_data_qualifiers', 'version_deprecated') op.drop_column('ctl_data_qualifiers', 'version_added') op.drop_column('ctl_data_categories', 'replaced_by') op.drop_column('ctl_data_categories', 'version_deprecated') op.drop_column('ctl_data_categories', 'version_added')
class UserProfileView(APIView): serializer = UserProfileSerializer dc_bound = False order_by_default = ('user__username',) order_by_field_map = {'created': 'user_id', 'username': 'user__username'} def __init__(self, request, username, data, many=False): super(UserProfileView, self).__init__(request) self.username = username self.data = data self.many = many if username: self.user = get_user(request, username, sr=('userprofile',), exists_ok=True, noexists_fail=True) self.profile = self.user.userprofile else: self._full = True self.profile = get_user_profiles(request, data=data, sr=('user',), pr=(), order_by=self.order_by) def get(self): return self._get(self.profile, self.data, self.many, field_name='user__username') def post(self): raise OperationNotSupported def put(self): profile = self.profile ser = self.serializer(self.request, profile, data=self.data, partial=True) if (not ser.is_valid()): return FailureTaskResponse(self.request, ser.errors, obj=profile, dc_bound=False) ser.save() res = SuccessTaskResponse(self.request, ser.data, obj=self.user, detail_dict=ser.detail_dict(), owner=ser.object.user, msg=LOG_PROFILE_UPDATE, dc_bound=False) task_id = res.data.get('task_id') connection.on_commit((lambda : user_profile_changed.send(task_id, user_name=self.user.username))) return res def delete(self): raise OperationNotSupported
def prepare_df_for_time_index_plot(df: DataFrame, column_name: str, datetime_name: Optional[str]) -> Tuple[(pd.DataFrame, Optional[str])]: if (datetime_name is not None): (prefix, pattern, freq) = choose_agg_period(df, None, datetime_name) date_col = sf.col(datetime_name) if (pattern == 'week'): period_col = sf.concat(sf.year(date_col), '-', sf.weekofyear(date_col)).alias(PERIOD_COL) else: period_col = sf.date_format(date_col, pattern).alias(PERIOD_COL) plot_df = df.select(column_name, period_col).groupby(PERIOD_COL).agg(sf.mean(column_name).alias('mean'), sf.stddev_pop(column_name).alias('std')) if (pattern == 'week'): split = sf.split(PERIOD_COL, '-') week = split.getItem(1) year = sf.to_timestamp(split.getItem(0), 'y') week_start_diff = sf.date_format(year, 'F') plot_df = plot_df.select('*', sf.date_add(year, ((week * 7) - week_start_diff)).alias(PERIOD_COL)) return (plot_df.toPandas(), prefix) ptp = (df.count() - 1) schema = StructType(fields=[StructField('_1', dataType=df.schema), StructField('_2', dataType=LongType())]) plot_df = df.rdd.zipWithIndex().toDF(schema=schema).select(sf.col('_1').getItem(column_name).alias(column_name), sf.floor((sf.col('_2') / (ptp / (OPTIMAL_POINTS - 1)))).alias(PERIOD_COL)) plot_df = plot_df.groupby(PERIOD_COL).agg(sf.mean(column_name).alias('mean'), sf.stddev(column_name).alias('std')).toPandas().sort_values(PERIOD_COL) return (plot_df, None)
def LMQLOp(name): def class_transformer(cls): cls_name = cls.__name__ if (cls.__module__ != 'lmql.ops.ops'): cls_name = cls if (type(name) is list): for n in name: lmql_operation_registry[n] = cls_name return cls lmql_operation_registry[name] = cls_name return cls return class_transformer
(nopython=True) def kernel_g_z_spherical(longitude, cosphi, sinphi, radius, longitude_p, cosphi_p, sinphi_p, radius_p): (distance, cospsi, _) = distance_spherical_core(longitude, cosphi, sinphi, radius, longitude_p, cosphi_p, sinphi_p, radius_p) delta_z = (radius - (radius_p * cospsi)) return (delta_z / (distance ** 3))
class TestEmailLogin(BaseEvenniaCommandTest): def test_connect(self): self.call(email_login.CmdUnconnectedConnect(), ' test', "The email '' does not match any accounts.", inputs=['Y']) self.call(email_login.CmdUnconnectedCreate(), '"mytest" test11111', "A new account 'mytest' was created. Welcome!", inputs=['Y']) self.call(email_login.CmdUnconnectedConnect(), ' test11111', '', caller=self.account.sessions.get()[0], inputs=['Y']) def test_quit(self): self.call(email_login.CmdUnconnectedQuit(), '', '', caller=self.account.sessions.get()[0]) def test_unconnectedlook(self): self.call(email_login.CmdUnconnectedLook(), '', '') def test_unconnectedhelp(self): self.call(email_login.CmdUnconnectedHelp(), '', 'You are not yet logged into the game.')
class PParallelInpOut32(object): def __init__(self, address=888): from ctypes import windll if (isinstance(address, str) and address.startswith('0x')): self.base = int(address, 16) else: self.base = address try: self.port = windll.inpout32 except Exception: self.port = windll.inpoutx64 BYTEMODEMASK = ((((1 << 5) | (1 << 6)) | (1 << 7)) & 255) self.port.Out32((self.base + 1026), int(((self.port.Inp32((self.base + 1026)) & ((~ BYTEMODEMASK) & 255)) | (1 << 5)))) self.reverse = False self.status = None def reverse(self): return self._reverse def reverse(self, value): if value: self.port.Out32((self.base + 2), int((self.port.Inp32((self.base + 2)) | ((1 << 5) & 255)))) self._reverse = True else: self.port.Out32((self.base + 2), int((self.port.Inp32((self.base + 2)) & ((~ (1 << 5)) & 255)))) self._reverse = False def setData(self, data): self.port.Out32(self.base, data) def setPin(self, pinNumber, state): if state: if (pinNumber in range(2, 10)): self.port.Out32(self.base, (self.port.Inp32(self.base) | (2 ** (pinNumber - 2)))) elif (pinNumber == 1): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) | 1)) elif (pinNumber == 14): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) | 2)) elif (pinNumber == 16): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) | 4)) elif (pinNumber == 17): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) | 8)) elif (pinNumber in range(2, 10)): self.port.Out32(self.base, (self.port.Inp32(self.base) & (255 ^ (2 ** (pinNumber - 2))))) elif (pinNumber == 1): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) & (255 ^ 1))) elif (pinNumber == 14): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) & (255 ^ 2))) elif (pinNumber == 16): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) & (255 ^ 4))) elif (pinNumber == 17): self.port.Out32((self.base + 2), (self.port.Inp32((self.base + 2)) & (255 ^ 8))) def readData(self): return self.port.Inp32(self.base) def readPin(self, pinNumber): if (pinNumber == 10): return ((self.port.Inp32((self.base + 1)) >> 6) & 1) elif (pinNumber == 11): return ((self.port.Inp32((self.base + 1)) >> 7) & 1) elif (pinNumber == 12): return ((self.port.Inp32((self.base + 1)) >> 5) & 1) elif (pinNumber == 13): return ((self.port.Inp32((self.base + 1)) >> 4) & 1) elif (pinNumber == 15): return ((self.port.Inp32((self.base + 1)) >> 3) & 1) elif ((pinNumber >= 2) and (pinNumber <= 9)): return ((self.port.Inp32(self.base) >> (pinNumber - 2)) & 1) elif (pinNumber == 1): return ((self.port.Inp32((self.base + 2)) >> 0) & 1) elif (pinNumber == 14): return ((self.port.Inp32((self.base + 2)) >> 1) & 1) elif (pinNumber == 16): return ((self.port.Inp32((self.base + 2)) >> 2) & 1) elif (pinNumber == 17): return ((self.port.Inp32((self.base + 2)) >> 3) & 1) else: print(('Pin %i cannot be read (by the PParallelInpOut32.readPin() yet)' % pinNumber))
def get_releases(metadata: DictConfig) -> List[Version]: ret: List[Version] = [] for (ver, files) in metadata.releases.items(): for file in files: if ((file.packagetype == 'bdist_wheel') and (file.yanked is not True)): v = parse_version(ver) ret.append(v) return sorted(ret)
class BlockUidHistory(ReprMixIn): def __init__(self) -> None: self._history: Dict[(int, Dict[(int, SparseBitfield)])] = {} def add(self, storage_id: int, block_uid: BlockUid) -> None: assert ((block_uid.left is not None) and (block_uid.right is not None)) history = self._history if (storage_id not in history): history[storage_id] = {} if (block_uid.left not in history[storage_id]): history[storage_id][block_uid.left] = SparseBitfield() history[storage_id][block_uid.left].add(block_uid.right) def seen(self, storage_id: int, block_uid: BlockUid) -> bool: history = self._history if (storage_id not in history): return False if (block_uid.left not in history[storage_id]): return False return (block_uid.right in history[storage_id][block_uid.left])
class LondonVM(BerlinVM): fork = 'london' block_class: Type[BaseBlock] = LondonBlock _state_class: Type[BaseState] = LondonState create_header_from_parent = staticmethod(create_london_header_from_parent(compute_london_difficulty)) compute_difficulty = staticmethod(compute_london_difficulty) configure_header = configure_london_header def validate_gas(cls, header: BlockHeaderAPI, parent_header: BlockHeaderAPI) -> None: if hasattr(parent_header, 'base_fee_per_gas'): parent_gas_limit = parent_header.gas_limit else: parent_gas_limit = (parent_header.gas_limit * ELASTICITY_MULTIPLIER) validate_gas_limit(header.gas_limit, parent_gas_limit) expected_base_fee_per_gas = calculate_expected_base_fee_per_gas(parent_header) if (expected_base_fee_per_gas != header.base_fee_per_gas): raise ValidationError(f'Header has invalid base fee per gas (has {header.base_fee_per_gas}, expected {expected_base_fee_per_gas})') def calculate_net_gas_refund(cls, consumed_gas: int, gross_refund: int) -> int: max_refund = (consumed_gas // EIP3529_MAX_REFUND_QUOTIENT) return min(max_refund, gross_refund)
.django_db def test_alternate_agency(client, monkeypatch, transaction_search_1, elasticsearch_transaction_index): setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index) resp = client.get(url.format(toptier_code='002', filter='?fiscal_year=2021')) assert (resp.status_code == status.HTTP_200_OK) expected_results = {'total_aggregated_amount': 400.0, 'results': [{'category': 'contracts', 'aggregated_amount': 0.0}, {'category': 'direct_payments', 'aggregated_amount': 0.0}, {'category': 'grants', 'aggregated_amount': 0.0}, {'category': 'idvs', 'aggregated_amount': 400.0}, {'category': 'loans', 'aggregated_amount': 0.0}, {'category': 'other', 'aggregated_amount': 0.0}]} assert (resp.json() == expected_results)
class CyclicBehaviour(SimpleBehaviour, ABC): def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) self._number_of_executions = 0 def number_of_executions(self) -> int: return self._number_of_executions def act_wrapper(self) -> None: if (not self.is_done()): super().act_wrapper() self._number_of_executions += 1 def is_done(self) -> bool: return False
def fetch_production(zone_key: str='PE', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list: if target_datetime: raise NotImplementedError('This parser is not yet able to parse past dates') r = (session or Session()) url = ' current_date = arrow.now(tz=tz) today = current_date.format('DD/MM/YYYY') yesterday = current_date.shift(days=(- 1)).format('DD/MM/YYYY') end_date = current_date.shift(days=(+ 1)).format('DD/MM/YYYY') response_today = r.post(url, data={'fechaInicial': today, 'fechaFinal': end_date, 'indicador': 0}) response_yesterday = r.post(url, data={'fechaInicial': yesterday, 'fechaFinal': today, 'indicador': 0}) data_today = response_today.json()['GraficoTipoCombustible']['Series'] data_yesterday = response_yesterday.json()['GraficoTipoCombustible']['Series'] raw_data = (data_today + data_yesterday) interval_hours = ((parse_date(raw_data[0]['Data'][1]) - parse_date(raw_data[0]['Data'][0])).total_seconds() / 3600) data = [] datetimes = [] for series in raw_data: k = series['Name'] if (k not in MAP_GENERATION): logger.warning(f'Unknown production type "{k}" for Peru') continue for v in series['Data']: dt = parse_date(v) try: i = datetimes.index(dt) except ValueError: i = len(datetimes) datetimes.append(dt) data.append({'zoneKey': zone_key, 'datetime': dt.datetime, 'production': {}, 'source': 'coes.org.pe'}) data[i]['production'][MAP_GENERATION[k]] = (data[i]['production'].get(MAP_GENERATION[k], 0) + (v['Valor'] / interval_hours)) data = sorted(data, key=(lambda d: d['datetime'])) total_production_per_datapoint = list(map((lambda d: sum(d['production'].values())), data)) mean_production = (sum(total_production_per_datapoint) / len(total_production_per_datapoint)) if ((total_production_per_datapoint[(- 1)] < (mean_production * 0.9)) and (target_datetime is None)): logger.warning('Dropping last datapoint as it is probably incomplete. Total production is less than 90% of the mean.') data = data[:(- 1)] return list(filter((lambda x: (validate(x, logger, required=['gas'], expected_range={'gas': (100, 6000)}, floor=0.0) is not None)), data))
def test_transition_enum(): r = ft.AnimatedSwitcher() assert (r.transition is None) assert (r._get_attr('transition') is None) r = ft.AnimatedSwitcher(transition=ft.AnimatedSwitcherTransition.FADE) assert isinstance(r.transition, ft.AnimatedSwitcherTransition) assert (r.transition == ft.AnimatedSwitcherTransition.FADE) assert (r._get_attr('transition') == 'fade') r = ft.AnimatedSwitcher(transition='scale') assert isinstance(r.transition, str) assert (r._get_attr('transition') == 'scale')
def set_t_ids_val(unit_drops_stats: list[int], data: dict[(str, Any)], user_t_ids: list[int]) -> list[int]: for t_id in user_t_ids: if (t_id in data['t_ids']): index = data['t_ids'].index(t_id) save_index = data['indexes'][index] unit_drops_stats[save_index] = 1 return unit_drops_stats
def attach_modules(parent_module: Union[('BaseWeb3', 'Module')], module_definitions: Dict[(str, Any)], w3: Optional[Union[('BaseWeb3', 'Module')]]=None) -> None: for (module_name, module_info) in module_definitions.items(): module_info_is_list_like = isinstance(module_info, Sequence) module_class = (module_info[0] if module_info_is_list_like else module_info) if hasattr(parent_module, module_name): raise AttributeError(f"Cannot set {parent_module} module named '{module_name}'. The web3 object already has an attribute with that name") if (w3 is None): from web3 import AsyncWeb3, Web3 if (isinstance(parent_module, Web3) or isinstance(parent_module, AsyncWeb3)): w3 = parent_module module_init_params = _validate_init_params_and_return_if_found(module_class) if (len(module_init_params) == 1): setattr(parent_module, module_name, module_class(w3)) else: setattr(parent_module, module_name, module_class()) if module_info_is_list_like: if (len(module_info) == 2): submodule_definitions = module_info[1] module = getattr(parent_module, module_name) attach_modules(module, submodule_definitions, w3) elif (len(module_info) != 1): raise Web3ValidationError('Module definitions can only have 1 or 2 elements.')
def transform_interface_to_typed_interface(interface: typing.Optional[Interface]) -> typing.Optional[_interface_models.TypedInterface]: if (interface is None): return None if (interface.docstring is None): input_descriptions = output_descriptions = {} else: input_descriptions = interface.docstring.input_descriptions output_descriptions = remap_shared_output_descriptions(interface.docstring.output_descriptions, interface.outputs) inputs_map = transform_variable_map(interface.inputs, input_descriptions) outputs_map = transform_variable_map(interface.outputs, output_descriptions) return _interface_models.TypedInterface(inputs_map, outputs_map)
_bad_request def ghost_generics_for_entity(request, code, entity_type): date = _specified_or_last_date(request, 'prescribing') entity = _get_entity(entity_type, code) measure_for_entity_url = reverse('measure_for_one_{}'.format(entity_type.lower()), kwargs={'measure': 'ghost_generic_measure', 'entity_code': code}) context = {'entity': entity, 'entity_name': entity.cased_name, 'entity_name_and_status': entity.name_and_status, 'entity_type': entity_type, 'highlight': entity.code, 'highlight_name': entity.cased_name, 'date': date, 'measure_for_entity_url': measure_for_entity_url} if (entity_type == 'practice'): context['by_practice'] = True elif (entity_type == 'CCG'): context['by_ccg'] = True else: raise ValueError('Unhandled entity_type: {}'.format(entity_type)) return render(request, 'ghost_generics.html', context)
class Migration(migrations.Migration): dependencies = [('home', '0002_create_homepage')] operations = [migrations.AddField(model_name='homepage', name='how_body', field=wagtail.core.fields.RichTextField(default='Blah blah')), migrations.AddField(model_name='homepage', name='how_header', field=models.CharField(default='Switching to HTTPS is easier than ever', max_length=50)), migrations.AddField(model_name='homepage', name='main_title', field=models.CharField(default='Every news site should be secure.', max_length=50)), migrations.AddField(model_name='homepage', name='sub_title', field=models.CharField(default="It's critical for both journalists and readers.", max_length=50)), migrations.AddField(model_name='homepage', name='why_body', field=wagtail.core.fields.RichTextField(default='Blah blah')), migrations.AddField(model_name='homepage', name='why_header', field=models.CharField(default='Encryption protects your readers', max_length=50))]
class OptionSeriesLollipopAccessibility(Options): def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def exposeAsGroupOnly(self): return self._config_get(None) def exposeAsGroupOnly(self, flag: bool): self._config(flag, js_type=False) def keyboardNavigation(self) -> 'OptionSeriesLollipopAccessibilityKeyboardnavigation': return self._config_sub_data('keyboardNavigation', OptionSeriesLollipopAccessibilityKeyboardnavigation) def point(self) -> 'OptionSeriesLollipopAccessibilityPoint': return self._config_sub_data('point', OptionSeriesLollipopAccessibilityPoint)
class TwigSecuredTest(unittest.TestCase, BaseTest): expected_data = {'language': 'php', 'engine': 'twig', 'trailer': '{{%(trailer)s}}', 'header': '{{%(header)s}}', 'render': '{{%(code)s}}', 'prefix': '', 'suffix': ''} url = ' url_blind = '' plugin = Twig blind_tests = [] reflection_tests = [(0, 0, '%s', {}), (0, 0, 'AAA%sAAA', {}), (1, 0, '{{ %s }}', {'prefix': '1}}', 'suffix': '{{1'}), (0, 0, '{% block title %}%s{% endblock %}', {}), (1, 0, "{% set foo = '%s' %}", {'prefix': "1' %}", 'suffix': ''}), (5, 2, '{% set %s = 1 %}', {'prefix': 'a = 1 %}', 'suffix': ''}), (5, 1, '{% for item in %s %}{% endfor %}', {'prefix': '1 %}{% endfor %}{% for a in [1] %}', 'suffix': ''}), (1, 0, '{% if %s == 1 %}{% endif %}', {'prefix': '1 %}', 'suffix': ''}), (1, 2, '{% if 1 in %s %}{% endif %}', {'prefix': '"1" %}', 'suffix': ''}), (1, 3, '{% if 1 in [%s] %}{% endif %}', {'prefix': '1] %}', 'suffix': ''})] def test_download(self): pass def test_upload(self): pass def test_upload_blind(self): pass
def create_sdm_item_selection(item_count, *items): ret = b'' if (item_count == 255): return b'\xff' elif (item_count == 0): return b'\x00' ret += struct.pack('<B', item_count) for item in items: ret += struct.pack('<BB', item[0], (1 if item[1] else 0)) return ret
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.') def test_pass_annotated_to_downstream_tasks(): import pandas as pd def hash_pandas_dataframe(df: pd.DataFrame) -> str: return str(pd.util.hash_pandas_object(df)) def t0(a: int) -> Annotated[(int, HashMethod(function=str))]: return (a + 1) def annotated_return_task() -> Annotated[(pd.DataFrame, HashMethod(hash_pandas_dataframe))]: return pd.DataFrame({'column_1': [1, 2, 3]}) (cache=True, cache_version='42') def downstream_t(a: int, df: pd.DataFrame) -> int: return ((a + 2) + len(df)) def t1(a: int) -> int: v = t0(a=a) df = annotated_return_task() v_1 = downstream_t(a=v, df=df) downstream_t(a=v, df=df) return v_1 assert (t1(a=3) == 9)
def _get_resources(resources: List[Type[Resource]]): ret = [] for resource in resources: item = {'icon': resource.icon, 'label': resource.label} if issubclass(resource, Link): item['type'] = 'link' item['url'] = resource.url item['target'] = resource.target elif issubclass(resource, Model): item['type'] = 'model' item['model'] = resource.model.__name__.lower() elif issubclass(resource, Dropdown): item['type'] = 'dropdown' item['resources'] = _get_resources(resource.resources) else: raise InvalidResource('Should be subclass of Resource') ret.append(item) return ret
def inject_attrs(cls, base, names=None, *, exclude=None, fail=True): if (not isinstance(cls, type)): raise TypeError(f'cls must be a type, got {cls!r}') ns = _resolve_base_namespace(base, names, exclude) if (not _check_matching_attrs(cls, ns, base, fail)): return None vars(cls).update(ns) return cls
class SGDGraftingTest(unittest.TestCase): def _setup_test(self) -> Tuple[(SGDGrafting, torch.Tensor)]: param = torch.tensor([1.0, 2.0]) return (SGDGrafting(param), param) def test_init(self): (grafting, _) = self._setup_test() self.assertEqual(grafting.parameter_count, 0) def test_precondition(self): (grafting, grad) = self._setup_test() torch.testing.assert_close(grafting.precondition(grad, 1), grad) def test_direction_norm(self): (grafting, grad) = self._setup_test() torch.testing.assert_close(grafting.direction_norm(grad, 1), torch.sqrt(torch.tensor(5.0)))
.parametrize('contents, expected_errors', [(dedent('\n INCLUDE does_not_exist0\n NUM_REALIZATIONS 1\n INCLUDE does_not_exist1\n INCLUDE does_not_exist2\n INCLUDE does_not_exist3\n JOBNAME my_name%d\n INCLUDE does_not_exist4\n INCLUDE does_not_exist5\n INCLUDE does_not_exist6\n INCLUDE does_not_exist7\n '), [*[ExpectedErrorInfo(line=line, column=9, end_column=24, match=f'INCLUDE file:.*does_not_exist{i} not found') for (i, line) in enumerate([2, 4, 5, 6, 8, 9, 10, 11])]])]) .usefixtures('use_tmpdir') def test_multiple_include_non_existing_files_are_located(contents, expected_errors): for expected_error in expected_errors: assert_that_config_leads_to_error(config_file_contents=contents, expected_error=expected_error)
def get_latest_anomaly_test_metrics(dbt_project: DbtProject, test_id: str): results = dbt_project.run_query(ANOMALY_TEST_POINTS_QUERY.format(test_id=test_id)) result_rows = [json.loads(result['result_row']) for result in results] return {(dateutil.parser.parse(result['bucket_start']).replace(tzinfo=None), dateutil.parser.parse(result['bucket_end']).replace(tzinfo=None)): result['metric_value'] for result in result_rows}
class PlayerRole(Generic[T], GameViralContext): _role: T def __init__(self, typ: Type[T]): self._typ = typ self._role = typ(0) def __str__(self) -> str: return self._role.name def __eq__(self, other: object) -> bool: if (not isinstance(other, self._typ)): return False return (self._role == other) def dump(self) -> Any: return self._role.value def sync(self, data) -> None: self._role = self._typ(data) '\n def is_type(self, t: Enum) -> bool:\n g = self.game\n pl = g.players\n return sync_primitive(self.identity == t, pl)\n ' def set(self, t: T) -> None: assert isinstance(t, self._typ) if self.game.is_server_side(): self._role = self._typ(t) def get(self) -> T: return self._role
def _start(): global patch, name, path, monitor global list_input, list_output, list1, list2, list3, i, j, lock, trigger, key1, key2, key3, this, thread, context, socket list_input = patch.config.items('input') list_output = patch.config.items('output') list1 = [] list2 = [] list3 = [] for i in range(len(list_input)): for j in range(len(list_output)): if (list_input[i][0] == list_output[j][0]): list1.append(list_input[i][0]) list2.append(list_input[i][1]) list3.append(list_output[j][1]) lock = threading.Lock() trigger = [] for (key1, key2, key3) in zip(list1, list2, list3): this = TriggerThread(key2, key1, key3) trigger.append(this) monitor.debug((key1 + ' trigger configured')) for thread in trigger: thread.start() try: context = zmq.Context() socket = context.socket(zmq.PUB) socket.bind(('tcp://*:%i' % config.getint('zeromq', 'port'))) socket.send_string('Hello') except: raise RuntimeError('cannot connect to ZeroMQ') if len(locals()): print(('LOCALS: ' + ', '.join(locals().keys())))
class TaskModelOutputMetaData(TaskOutputMetaData): def __init__(self, o_sequence, o_label, model_md): super(TaskModelOutputMetaData, self).__init__(o_sequence, o_label, elmdpenum.TaskOutputMetaDataTypes.MODEL_OUTPUT.value) self.model_md = model_md def to_dict(self): meta_data = super(TaskModelOutputMetaData, self).to_dict() meta_data.update(self.model_md) return meta_data def to_json(self): return json.dumps(self.to_dict())
class _IndexedRadioEditor(BaseSourceWithLocation): source_class = RadioEditor locator_class = Index handlers = [(MouseClick, (lambda wrapper, _: _interaction_helpers.mouse_click_radiobutton_child_in_panel(control=wrapper._target.source.control, index=convert_index(source=wrapper._target.source, index=wrapper._target.location.index), delay=wrapper.delay)))]
def test_firedrake_integral_sphere_high_order_netgen(): try: from netgen.csg import CSGeometry, Pnt, Sphere import netgen except ImportError: pytest.skip(reason='Netgen unavailable, skipping Netgen test.') comm = COMM_WORLD if (comm.Get_rank() == 0): geo = CSGeometry() geo.Add(Sphere(Pnt(0, 0, 0), 1).bc('sphere')) ngmesh = geo.GenerateMesh(maxh=0.1) else: ngmesh = netgen.libngpy._meshing.Mesh(3) msh = Mesh(ngmesh) homsh = Mesh(msh.curve_field(4)) V = FunctionSpace(homsh, 'CG', 4) (x, y, z) = SpatialCoordinate(homsh) f = Function(V).interpolate((1 + (0 * x))) assert (abs((assemble((f * dx)) - ((4 / 3) * np.pi))) < 0.0001)
class Test(unittest.TestCase): def tearDown(self): if hasattr(self, 'ie'): self.ie.Quit() del self.ie def test_mycomobject(self): o = MyComObject() p = comtypes2pywin(o, IDispatch) disp = win32com.client.Dispatch(p) self.assertEqual(repr(disp), '<COMObject <unknown>>') def test_ie(self): ie = self.ie = CreateObject('InternetExplorer.Application') self.assertEqual(comtypes_get_refcount(ie), 1) self.assertEqual(ie.Visible, False) p = comtypes2pywin(ie, interface=IDispatch) self.assertEqual(comtypes_get_refcount(ie), 2) disp = win32com.client.Dispatch(p) self.assertEqual(comtypes_get_refcount(ie), 2) self.assertEqual(disp.Visible, False) del p, disp self.assertEqual(comtypes_get_refcount(ie), 1)
class TestPackageModel(BasePyTestCase): def test_two_package_different_types(self): package1 = model.Package(name='python-requests') package2 = model.RpmPackage(name='python-requests') self.db.add(package1) self.db.add(package2) self.db.flush() def test_two_package_same_type(self): package1 = model.RpmPackage(name='python-requests') package2 = model.RpmPackage(name='python-requests') self.db.add(package1) self.db.add(package2) pytest.raises(IntegrityError, self.db.flush) .parametrize('exists', (False, True)) def test_package_existence(self, exists): if exists: package1 = model.RpmPackage(name='python-requests') else: package1 = model.ModulePackage(name='python-requests') self.db.add(package1) self.db.flush() koji = buildsys.get_session() kbuildinfo = koji.getBuild('python-requests-1.0-1.fc36') rbuildinfo = {'info': kbuildinfo, 'nvr': kbuildinfo['nvr'].rsplit('-', 2)} assert (model.Package.check_existence(rbuildinfo) is exists)
def is_directly_connected(node, node_tree, wire1, wire2): if ('wires' in node_tree): node_tree_wires = node_tree['wires'] elif ((len(node_tree['edges']) == 1) and (len(node_tree['joins']) == 0)): node_tree_wires = node_tree['edges'][0] else: return None if (wire1 not in node_tree_wires): return None if (wire2 not in node_tree_wires): return None for edge in node_tree['edges']: idx1 = None idx2 = None try: idx1 = edge.index(wire1) except ValueError: pass try: idx2 = edge.index(wire2) except ValueError: pass if ((idx1 is not None) and (idx2 is not None)): return (abs((idx1 - idx2)) == 1) if ((idx1 is not None) and ((idx1 != 0) and (idx1 != (len(edge) - 1)))): return False if ((idx2 is not None) and ((idx2 != 0) and (idx2 != (len(edge) - 1)))): return False if (wire1 in node_tree['joins']): return (wire2 in node_tree['joins'][wire1]) if (wire2 in node_tree['joins']): assert (wire1 not in node_tree['joins'][wire2]) return None
class OptionPlotoptionsNetworkgraphOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
class JaggedDim(Node): def __init__(self, min_value: IntVar, max_value: IntVar): if isinstance(min_value, int): min_value = IntImm(min_value) if isinstance(max_value, int): max_value = IntImm(max_value) if (min_value.lower_bound() < 0): raise ValueError(f'min_value={min_value!r}, but must be non-negative.') if (min_value.lower_bound() > max_value.upper_bound()): raise ValueError(f"min_value={min_value!r} can't be larger than max_value={max_value!r}.") super().__init__() self._attrs['values'] = [min_value, max_value] self._attrs['offsets'] = None def __eq__(self, another: JaggedDim) -> bool: return (isinstance(another, JaggedDim) and (self.min_value() == another.min_value()) and (self.max_value() == another.max_value()) and (self.offsets() == another.offsets())) def __str__(self) -> str: attrs = dict(self._attrs) if (self._attrs['offsets'] is not None): attrs['offsets'] = {'name': self._attrs['offsets']._attrs['name']} return str(attrs) def min_value(self) -> IntVar: return self._attrs['values'][0] def max_value(self) -> IntVar: return self._attrs['values'][1] def offsets(self) -> Optional[Tensor]: return self._attrs['offsets'] def pseudo_code(self, with_shape=False) -> str: return f"JaggedDim({str(self._attrs['values'])})"
_processor def inject_variables(): if get_authed(): mod_links = [('boards', ['mod.mod_boards', 'mod.mod_board_log']), ('reports', ['mod.mod_report'])] if request_has_role(roles.ROLE_ADMIN): mod_links += [('bans', ['mod.mod_bans']), ('moderators', ['mod.mod_moderators']), ('pages', ['mod.mod_pages']), ('site', ['mod.mod_site'])] mod_links += [('account', ['mod.mod_self']), ('logout', ['mod.mod_auth'])] with_current_and_url = [] for mod_link in mod_links: current = any((i.startswith(request.endpoint) for i in mod_link[1])) with_current_and_url.append((mod_link[0], url_for(mod_link[1][0]), current)) return dict(mod_links=with_current_and_url, is_authed=True) else: return {}
class Restore(object): _MODULE_NAME = 'fledge_restore_common' SCHEDULE_RESTORE_ON_DEMAND = '8d4d3ca0-de80-11e7-80c1-9a214cf093ae' _MESSAGES_LIST = {'i000000': 'general information', 'i000001': 'On demand restore successfully launched.', 'e000000': 'general error', 'e000001': 'cannot launch on demand restore - error details |{0}|'} _logger = None def __init__(self, _storage): self._storage = _storage if (not Restore._logger): Restore._logger = logger.setup(self._MODULE_NAME, destination=_LOGGER_DESTINATION, level=logging.INFO) async def restore_backup(self, backup_id: int): self._logger.debug('{func} - backup id |{backup_id}|'.format(func='restore_backup', backup_id=backup_id)) try: (await server.Server.scheduler.queue_task(uuid.UUID(Restore.SCHEDULE_RESTORE_ON_DEMAND))) server.Server.scheduler._restore_backup_id = backup_id self._logger.debug('Scheduler restore_backup_id: {}'.format(server.Server.scheduler._restore_backup_id)) _message = self._MESSAGES_LIST['i000001'] Restore._logger.info('{0}'.format(_message)) status = 'running' except Exception as _ex: _message = self._MESSAGES_LIST['e000001'].format(_ex) Restore._logger.error('{0}'.format(_message)) status = 'failed' return status
class OtherVarNode(GivElm): total = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.name = kwargs['name'] def __repr__(self): return self.name def __str__(self): return repr(self) def str_noindex(self): return self.name def stat(self): super().stat() OtherVarNode.total += 1
class Core(core.Core): core_type = 'S' def __init__(self, **options: Dict[(str, Any)]) -> None: super().__init__() self.options = Options(options) self.events = Events(self) disables = self.options.disables if ('serve' not in disables): self.serve = parts.serve.Serve(self) if ('auth' not in disables): self.auth = parts.auth.Auth(self) if ('lobby' not in disables): self.lobby = parts.lobby.Lobby(self) if ('room' not in disables): self.room = parts.room.Room(self) if ('game' not in disables): self.game = parts.game.GamePart(self) if ('matching' not in disables): self.matching = parts.matching.Matching(self) if ('observe' not in disables): self.observe = parts.observe.Observe(self) if ('invite' not in disables): self.invite = parts.invite.Invite(self) if ('item' not in disables): self.item = parts.item.Item(self) if ('contest' not in disables): self.contest = parts.contest.Contest(self) if ('admin' not in disables): self.admin = parts.admin.Admin(self) if ('hooks' not in disables): self.hooks = parts.hooks.Hooks(self) if ('connect' not in disables): self.connect = parts.connect.Connect(self) if ('backend' not in disables): self.backend = parts.backend.Backend(self) if ('log' not in disables): self.log = parts.log.Log(self) if ('stats' not in disables): self.stats = parts.stats.Stats(self) if ('view' not in disables): self.view = parts.view.View(self) self.events.core_initialized.emit(self)
('build', 'build a given project') class BuildCmd(ProjectCmdBase): def run_project_cmd(self, args, loader, manifest): if args.clean: clean_dirs(loader.build_opts) print(('Building on %s' % loader.ctx_gen.get_context(args.project))) projects = loader.manifests_in_dependency_order() cache = (cache_module.create_cache() if args.use_build_cache else None) install_dirs = [] for m in projects: fetcher = loader.create_fetcher(m) if isinstance(fetcher, SystemPackageFetcher): continue if args.clean: fetcher.clean() build_dir = loader.get_project_build_dir(m) inst_dir = loader.get_project_install_dir(m) if (((m == manifest) and (not args.only_deps)) or ((m != manifest) and (not args.no_deps))): print(('Assessing %s...' % m.name)) project_hash = loader.get_project_hash(m) ctx = loader.ctx_gen.get_context(m.name) built_marker = os.path.join(inst_dir, '.built-by-getdeps') cached_project = CachedProject(cache, loader, m) (reconfigure, sources_changed) = self.compute_source_change_status(cached_project, fetcher, m, built_marker, project_hash) if (os.path.exists(built_marker) and (not cached_project.was_cached())): (dep_reconfigure, dep_build) = self.compute_dep_change_status(m, built_marker, loader) if dep_reconfigure: reconfigure = True if dep_build: sources_changed = True extra_cmake_defines = (json.loads(args.extra_cmake_defines) if args.extra_cmake_defines else {}) extra_b2_args = (args.extra_b2_args or []) if (sources_changed or reconfigure or (not os.path.exists(built_marker))): if os.path.exists(built_marker): os.unlink(built_marker) src_dir = fetcher.get_src_dir() prepare_builders = m.create_prepare_builders(loader.build_opts, ctx, src_dir, build_dir, inst_dir, loader) for preparer in prepare_builders: preparer.prepare(install_dirs, reconfigure=reconfigure) builder = m.create_builder(loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=loader.get_project_install_prefix(m), extra_cmake_defines=extra_cmake_defines, extra_b2_args=extra_b2_args) builder.build(install_dirs, reconfigure=reconfigure) with open(built_marker, 'w') as f: f.write(project_hash) if (args.schedule_type == 'continuous'): cached_project.upload() elif args.verbose: print(('found good %s' % built_marker)) install_dirs.insert(0, inst_dir) def compute_dep_change_status(self, m, built_marker, loader): reconfigure = False sources_changed = False st = os.lstat(built_marker) ctx = loader.ctx_gen.get_context(m.name) dep_list = m.get_dependencies(ctx) for dep in dep_list: if (reconfigure and sources_changed): break dep_manifest = loader.load_manifest(dep) dep_root = loader.get_project_install_dir(dep_manifest) for dep_file in list_files_under_dir_newer_than_timestamp(dep_root, st.st_mtime): if (os.path.basename(dep_file) == '.built-by-getdeps'): continue if file_name_is_cmake_file(dep_file): if (not reconfigure): reconfigure = True print(f'Will reconfigure cmake because {dep_file} is newer than {built_marker}') elif (not sources_changed): sources_changed = True print(f'Will run build because {dep_file} is newer than {built_marker}') if (reconfigure and sources_changed): break return (reconfigure, sources_changed) def compute_source_change_status(self, cached_project, fetcher, m, built_marker, project_hash): reconfigure = False sources_changed = False if (not cached_project.download()): check_fetcher = True if os.path.exists(built_marker): check_fetcher = False with open(built_marker, 'r') as f: built_hash = f.read().strip() if (built_hash == project_hash): if cached_project.is_cacheable(): reconfigure = False sources_changed = False else: check_fetcher = True else: os.unlink(built_marker) reconfigure = True sources_changed = True fetcher.update() if check_fetcher: change_status = fetcher.update() reconfigure = change_status.build_changed() sources_changed = change_status.sources_changed() return (reconfigure, sources_changed) def setup_project_cmd_parser(self, parser): parser.add_argument('--clean', action='store_true', default=False, help='Clean up the build and installation area prior to building, causing the projects to be built from scratch') parser.add_argument('--no-deps', action='store_true', default=False, help="Only build the named project, not its deps. This is most useful after you've built all of the deps, and helps to avoid waiting for relatively slow up-to-date-ness checks") parser.add_argument('--only-deps', action='store_true', default=False, help="Only build the named project's deps. This is most useful when you want to separate out building of all of the deps and your project") parser.add_argument('--no-build-cache', action='store_false', default=True, dest='use_build_cache', help='Do not attempt to use the build cache.') parser.add_argument('--schedule-type', help='Indicates how the build was activated') parser.add_argument('--extra-cmake-defines', help='Input json map that contains extra cmake defines to be used when compiling the current project and all its deps. e.g: \'{"CMAKE_CXX_FLAGS": "--bla"}\'') parser.add_argument('--extra-b2-args', help="Repeatable argument that contains extra arguments to pass to b2, which compiles boost. e.g.: 'cxxflags=-fPIC' 'cflags=-fPIC'", action='append') parser.add_argument('--shared-libs', help='Build shared libraries if possible', action='store_true', default=False)
def _get_file_kind(filename): try: st = os.lstat(filename) except FileNotFoundError: return None kinds = [] if _stat.S_ISDIR(st.st_mode): kinds.append('dir') elif _stat.S_ISREG(st.st_mode): kinds.append('file') else: kinds.append('other') if _stat.S_ISLNK(st.st_mode): kinds.append('symlink') return (' '.join(kinds), st)
def test_dependency_getter(tmp_path: Path) -> None: fake_pyproject_toml = '[project]\n# PEP 621 project metadata\n# See = [\n "qux",\n "bar>=20.9",\n "optional-foo[option]>=0.12.11",\n "conditional-bar>=1.1.0; python_version < 3.11",\n "fox-python", # top level module is called "fox"\n]\n\n[project.optional-dependencies]\ngroup1 = [\n "foobar",\n "barfoo",\n]\ngroup2 = [\n "dep",\n]\n' with run_within_dir(tmp_path): with Path('pyproject.toml').open('w') as f: f.write(fake_pyproject_toml) getter = PEP621DependencyGetter(config=Path('pyproject.toml'), package_module_name_map={'fox-python': ('fox',)}) dependencies = getter.get().dependencies assert (len(dependencies) == 8) assert (dependencies[0].name == 'qux') assert (not dependencies[0].is_conditional) assert (not dependencies[0].is_optional) assert ('qux' in dependencies[0].top_levels) assert (dependencies[1].name == 'bar') assert (not dependencies[1].is_conditional) assert (not dependencies[1].is_optional) assert ('bar' in dependencies[1].top_levels) assert (dependencies[2].name == 'optional-foo') assert (not dependencies[2].is_conditional) assert dependencies[2].is_optional assert ('optional_foo' in dependencies[2].top_levels) assert (dependencies[3].name == 'conditional-bar') assert dependencies[3].is_conditional assert (not dependencies[3].is_optional) assert ('conditional_bar' in dependencies[3].top_levels) assert (dependencies[4].name == 'fox-python') assert (not dependencies[4].is_conditional) assert (not dependencies[4].is_optional) assert ('fox' in dependencies[4].top_levels) assert (dependencies[5].name == 'foobar') assert (not dependencies[5].is_conditional) assert (not dependencies[5].is_optional) assert ('foobar' in dependencies[5].top_levels) assert (dependencies[6].name == 'barfoo') assert (not dependencies[6].is_conditional) assert (not dependencies[6].is_optional) assert ('barfoo' in dependencies[6].top_levels) assert (dependencies[7].name == 'dep') assert (not dependencies[7].is_conditional) assert (not dependencies[7].is_optional) assert ('dep' in dependencies[7].top_levels)
def rewrite_nn_resize_op(is_quantized=False): input_pattern = graph_matcher.OpTypePattern(('FakeQuantWithMinMaxVars' if is_quantized else '*')) reshape_1_pattern = graph_matcher.OpTypePattern('Reshape', inputs=[input_pattern, 'Const'], ordered_inputs=False) mul_pattern = graph_matcher.OpTypePattern('Mul', inputs=[reshape_1_pattern, 'Const'], ordered_inputs=False) fake_quant_pattern = graph_matcher.OpTypePattern('FakeQuantWithMinMaxVars', inputs=[mul_pattern, 'Identity', 'Identity'], ordered_inputs=False) reshape_2_pattern = graph_matcher.OpTypePattern('Reshape', inputs=[graph_matcher.OneofPattern([fake_quant_pattern, mul_pattern]), 'Const'], ordered_inputs=False) add_pattern = graph_matcher.OpTypePattern('Add', inputs=[reshape_2_pattern, '*'], ordered_inputs=False) matcher = graph_matcher.GraphMatcher(add_pattern) for match in matcher.match_graph(tf.get_default_graph()): projection_op = match.get_op(input_pattern) reshape_2_op = match.get_op(reshape_2_pattern) add_op = match.get_op(add_pattern) nn_resize = tf.image.resize_nearest_neighbor(projection_op.outputs[0], add_op.outputs[0].shape.dims[1:3], align_corners=False, name=(os.path.split(reshape_2_op.name)[0] + '/resize_nearest_neighbor')) for (index, op_input) in enumerate(add_op.inputs): if (op_input == reshape_2_op.outputs[0]): add_op._update_input(index, nn_resize) break
class OptionPlotoptionsVariablepieSonificationContexttracksMappingLowpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.parametrize('elasticapm_client', [{'breakdown_metrics': False}], indirect=True) def test_disable_breakdowns(elasticapm_client): with pytest.raises(LookupError): elasticapm_client.metrics.get_metricset('elasticapm.metrics.sets.breakdown.BreakdownMetricSet') with mock.patch('elasticapm.traces.BaseSpan.child_started') as mock_child_started, mock.patch('elasticapm.traces.Transaction.track_span_duration') as mock_track_span_duration: transaction = elasticapm_client.begin_transaction('test') assert (transaction._breakdown is None) with elasticapm.capture_span('test', span_type='template', span_subtype='django', duration=5): pass elasticapm_client.end_transaction('test', 'OK', duration=5) assert (mock_child_started.call_count == 0) assert (mock_track_span_duration.call_count == 0)
class AbstractWhoosheer(object): auto_update = True def search(cls, search_string, values_of='', group=whoosh.qparser.OrGroup, match_substrings=True, limit=None): index = Whooshee.get_or_create_index(_get_app(cls), cls) prepped_string = cls.prep_search_string(search_string, match_substrings) with index.searcher() as searcher: parser = whoosh.qparser.MultifieldParser(cls.schema.names(), index.schema, group=group) query = parser.parse(prepped_string) results = searcher.search(query, limit=limit) if values_of: return [x[values_of] for x in results] return results def prep_search_string(cls, search_string, match_substrings): if ((sys.version < '3') and (not isinstance(search_string, unicode))): search_string = search_string.decode('utf-8') s = search_string.strip() s = s.replace('*', '') if (len(s) < _get_config(cls)['search_string_min_len']): raise ValueError('Search string must have at least 3 characters') if match_substrings: s = u'*{0}*'.format(re.sub('[\\s]+', '* *', s)) return s
def test_named_schema_with_logical_type_in_union(): schema = [{'name': 'named_schema_with_logical_type', 'namespace': 'com.example', 'type': 'record', 'fields': [{'name': 'item', 'type': {'type': 'int', 'logicalType': 'date'}}]}, {'type': 'record', 'name': 'test_named_schema_with_logical_type', 'fields': [{'name': 'item', 'type': ['null', 'com.example.named_schema_with_logical_type']}]}] records = [{'item': None}, {'item': {'item': '2019-05-06'}}] expected = [{'item': None}, {'item': {'item': datetime.date(2019, 5, 6)}}] assert (expected == roundtrip(schema, records))
def _create_firmware_directory(): logging.info('Creating firmware directory') data_dir_name = config.backend.firmware_file_storage_directory mkdir_process = subprocess.run(f'sudo mkdir -p --mode=0744 {data_dir_name}', shell=True, stdout=PIPE, stderr=STDOUT, text=True) chown_process = subprocess.run(f'sudo chown {os.getuid()}:{os.getgid()} {data_dir_name}', shell=True, stdout=PIPE, stderr=STDOUT, text=True) if (not all(((code == 0) for code in (mkdir_process.returncode, chown_process.returncode)))): raise InstallationError(f'''Failed to create directories for binary storage {mkdir_process.stdout} {chown_process.stdout}''')
(type_strs) def test_has_arrlist_has_expected_behavior_for_parsable_types(type_str): if type_str.startswith('('): assert (not has_arrlist(type_str)) event('No match for tuple type') elif ARRAY_RE.search(type_str): assert has_arrlist(type_str) event('Match for array type') else: assert (not has_arrlist(type_str)) event('No match for non-array type')
def test_arbitrary_encoding_automatically_find_variables_ignore_format(df_enc_numeric): encoder = OrdinalEncoder(encoding_method='arbitrary', variables=None, ignore_format=True) X = encoder.fit_transform(df_enc_numeric[['var_A', 'var_B']]) transf_df = df_enc_numeric[['var_A', 'var_B']].copy() transf_df['var_A'] = [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2] transf_df['var_B'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2] assert (encoder.encoding_method == 'arbitrary') assert (encoder.variables is None) assert (encoder.variables_ == ['var_A', 'var_B']) assert (encoder.encoder_dict_ == {'var_A': {1: 0, 2: 1, 3: 2}, 'var_B': {1: 0, 2: 1, 3: 2}}) assert (encoder.n_features_in_ == 2) pd.testing.assert_frame_equal(X, transf_df)
class TestOneDockerServiceAsync(IsolatedAsyncioTestCase): ('fbpcp.service.container.ContainerService') def setUp(self, MockContainerService): self.container_svc = MockContainerService() self.insights = AsyncMock() self.onedocker_svc = OneDockerService(container_svc=self.container_svc, task_definition=TEST_TASK_DEF, insights=self.insights) async def test_waiting_for_pending_container(self): pending_containers = _get_pending_container_instances() running_containers = _get_running_container_instances() self.onedocker_svc.get_containers = MagicMock(return_value=running_containers) expected_container = (await self.onedocker_svc.wait_for_pending_container(pending_containers[0].instance_id)) self.assertEqual(expected_container, running_containers[0]) async def test_waiting_for_pending_containers(self): pending_containers = _get_pending_container_instances() running_containers = _get_running_container_instances() self.onedocker_svc.get_containers = MagicMock(side_effect=([running_containers[0]], [running_containers[1]])) expected_containers = (await self.onedocker_svc.wait_for_pending_containers([container.instance_id for container in pending_containers])) self.assertEqual(expected_containers, running_containers) ('time.time', MagicMock(return_value=TEST_TIME)) async def test_insights_emit_async(self): running_containers = _get_running_container_instances() pending_containers = _get_pending_container_instances() self.onedocker_svc.get_containers = MagicMock(side_effect=([running_containers[0]], [running_containers[1]])) self.container_svc.get_cluster.return_value = TEST_CLUSTER_STR calls = [call(json.dumps({'time': TEST_TIME, 'cluster_name': TEST_CLUSTER_STR, 'instance_id': TEST_INSTANCE_ID_1, 'status': 'STARTED', 'exit_code': None, 'class_name': TEST_CLASS_NAME})), call(json.dumps({'time': TEST_TIME, 'cluster_name': TEST_CLUSTER_STR, 'instance_id': TEST_INSTANCE_ID_2, 'status': 'STARTED', 'exit_code': None, 'class_name': TEST_CLASS_NAME}))] (await self.onedocker_svc.wait_for_pending_containers([container.instance_id for container in pending_containers])) self.insights.emit_async.assert_has_calls(calls)
class ArgmaxTestCase(unittest.TestCase): def _test_argmax(self, batch_size=1, shape=(2, 6), dim=0, test_name='argmax', copy_op=False, dtype='float16'): o_shape = list(shape)[:(- 1)] X1 = Tensor(shape=shape, dtype=dtype, name='X', is_input=True) X4_op = ops.argmax(dim=dim) if copy_op: X4_op = ops.argmax(**X4_op._get_op_attributes()) X4 = X4_op(X1) X4._attrs['is_output'] = True X4._attrs['name'] = 'output' target = detect_target() module = compile_model(X4, target, './tmp', test_name) scores = get_random_torch_tensor(shape, dtype=dtype) y_pt = torch.argmax(scores, dim=dim) y = torch.empty_like(y_pt, dtype=torch.int64) module.run_with_tensors([scores], [y]) y_reshape = y.reshape(o_shape) self.assertTrue(torch.allclose(y_pt, y_reshape, atol=0.01, rtol=0.01)) def test_fp16(self): self._test_argmax(shape=(300, 80), dim=1, test_name='argmax_fp16', dtype='float16') self._test_argmax(shape=(300, 80), dim=1, test_name='argmax_fp16_copy_op', copy_op=True, dtype='float16') ((detect_target().name() == 'rocm'), 'float32 not supported in ROCm') def test_fp32(self): self._test_argmax(shape=(300, 80), dim=1, test_name='argmax_fp32', dtype='float32') self._test_argmax(shape=(300, 80), dim=1, test_name='argmax_fp32_copy_op', copy_op=True, dtype='float32')
_ns.route('/bitbucket/<int:copr_id>/<uuid>/', methods=['POST']) _ns.route('/bitbucket/<int:copr_id>/<uuid>/<string:pkg_name>/', methods=['POST']) def webhooks_bitbucket_push(copr_id, uuid, pkg_name: Optional[str]=None): copr = ComplexLogic.get_copr_by_id(copr_id) if (copr.webhook_secret != uuid): raise AccessRestricted('This webhook is not valid') try: payload = flask.request.json api_url = payload['repository']['links']['self']['href'] clone_url = payload['repository']['links']['html']['href'] commits = [] ref_type = payload['push']['changes'][0]['new']['type'] ref = payload['push']['changes'][0]['new']['name'] try: actor = payload['actor']['links']['html']['href'] except KeyError: actor = None if (ref_type == 'tag'): committish = ref else: committish = payload['push']['changes'][0]['new']['target']['hash'] except KeyError: return ('Bad Request', 400) packages = PackagesLogic.get_for_webhook_rebuild(copr_id, uuid, clone_url, commits, ref_type, ref, pkg_name) for package in packages: BuildsLogic.rebuild_package(package, {'committish': committish}, submitted_by=actor) db.session.commit() return ('OK', 200)
def test_default_param_filler(): generator_config = {'name': 'do_snapshot_copiers', 'type': 'requests_json_generator', 'target': 'do_something_with_items', 'requires_resources': ['dataproc-cluster'], 'properties': {'url': ' 'list_json_key': 'my-key'}} g = plugins.manager.generators(generator_config) assert (g.type == NodeTypes.GENERATOR) assert (g.resolve_properties(ExecutionContext(referrer=None, resources={})).values == {'url': ' 'list_json_key': 'my-key', 'timeout_sec': 5, 'headers': {}})
class DummyObserver(): def __init__(self, notify=True, observables=(), next_objects=(), notifier=None, maintainer=None, extra_graphs=()): if (notifier is None): notifier = DummyNotifier() if (maintainer is None): maintainer = DummyNotifier() self.notify = notify self.observables = observables self.next_objects = next_objects self.notifier = notifier self.maintainer = maintainer self.extra_graphs = extra_graphs def __eq__(self, other): return (other is self) def __hash__(self): return 1 def iter_observables(self, object): (yield from self.observables) def iter_objects(self, object): (yield from self.next_objects) def get_notifier(self, handler, target, dispatcher): return self.notifier def get_maintainer(self, graph, handler, target, dispatcher): return self.maintainer def iter_extra_graphs(self, graph): (yield from self.extra_graphs)
def from_csv(file, name='name', id_ft='id_ft', id_fld='id_fld', name_fld=None): sites = [] with open(file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: params = [row[name], row[id_ft], row[id_fld]] params = (params.append(row[name_fld]) if name_fld else params) site = (row[name], Site(*params)) sites.append(site) return dict(sites)
('ciftify.bidsapp.fmriprep_ciftify.run') def test_ux21_synth_will_run_from_derivatives(mock_run): uargs = [synth_bids, os.path.join(synth_bids, 'derivatives'), 'participant', '--participant_label=02', '--task=nback', '--surf-reg', 'FS'] ret = simple_main_run(uargs) call_list = parse_call_list_into_strings(mock_run.call_args_list) print(call_list) assert (count_calls_to('fmriprep', call_list, call_contains='--anat-only') == 1) assert (count_calls_to('fmriprep', call_list) == 1) assert (count_calls_to('ciftify_recon_all', call_list) == 1) assert (count_calls_to('ciftify_subject_fmri', call_list, call_contains='fmriprep') == 16)
def format_continuation_lexicon_xml(tsvparts): xmlstring = ' <e>' if (tsvparts[1] != ''): xmlstring += '<a>' for anal in tsvparts[1].split('|'): if (anal in Ftb3Formatter.stuff2ftb3): anal = make_xmlid(anal) xmlstring += (('<s mcs="' + anal) + '"/>') else: xmlstring += xml_escape(anal) xmlstring += '</a>' else: xmlstring += '<a/>' xmlstring += (('<i>' + xml_escape(tsvparts[2])) + '</i>') xmlstring += (('<cont lexica="' + ' '.join(tsvparts[3:]).replace('#', '_END')) + '"/></e>\n') return xmlstring
def get_otp_url(kp_entry): otp_url = '' if hasattr(kp_entry, 'otp'): otp_url = kp_entry.deref('otp') else: otp_url = kp_entry.get_custom_property('otp') if otp_url: return otp_url otp_url_format = 'otpauth://totp/Entry?secret={}&period={}&digits={}&algorithm={}' (digits, period, algorithm) = (6, 30, 'sha1') seed = kp_entry.get_custom_property('TOTP Seed') if seed: settings = (kp_entry.get_custom_property('TOTP Settings') or '') try: (period, digits) = settings.split(';') except ValueError: pass return otp_url_format.format(seed, period, digits, algorithm) seed = kp_entry.get_custom_property('TimeOtp-Secret-Base32') if seed: period = int((kp_entry.get_custom_property('TimeOtp-Period') or period)) digits = int((kp_entry.get_custom_property('TimeOtp-Length') or digits)) algorithm = (kp_entry.get_custom_property('TimeOtp-Algorithm') or algorithm) algo_map = {'hmac-sha-1': 'sha1', 'hmac-sha-256': 'sha256', 'hmac-sha-512': 'sha512'} algorithm = algo_map.get(algorithm.lower(), 'sha1') return otp_url_format.format(seed, period, digits, algorithm) return otp_url
class TestPyQtFont(unittest.TestCase): def test_create_traitsfont(self): expected_outcomes = {} expected_outcomes[''] = TraitsFont() for (weight, qt_weight) in font_weights.items(): expected_outcomes[weight] = TraitsFont() expected_outcomes[weight].setWeight(qt_weight) for (style, qt_style) in font_styles.items(): expected_outcomes[style] = TraitsFont() expected_outcomes[style].setStyle(qt_style) expected_outcomes['underline'] = TraitsFont() expected_outcomes['underline'].setUnderline(True) expected_outcomes['18'] = TraitsFont() expected_outcomes['18'].setPointSize(18) expected_outcomes['18 pt'] = TraitsFont() expected_outcomes['18 pt'].setPointSize(18) expected_outcomes['18 point'] = TraitsFont() expected_outcomes['18 point'].setPointSize(18) for (family, qt_style_hint) in font_families.items(): expected_outcomes[family] = TraitsFont() expected_outcomes[family].setStyleHint(qt_style_hint) default_size = QApplication.font().pointSize() expected_outcomes['Courier'] = TraitsFont('Courier', default_size) expected_outcomes['Comic Sans'] = TraitsFont('Comic Sans', default_size) expected_outcomes['18 pt Bold Oblique Underline Comic Sans script'] = TraitsFont('Comic Sans', 18, QFont.Weight.Bold, False) expected_outcomes['18 pt Bold Oblique Underline Comic Sans script'].setStyleHint(QFont.StyleHint.Cursive) expected_outcomes['18 pt Bold Oblique Underline Comic Sans script'].setStyle(QFont.Style.StyleOblique) expected_outcomes['18 pt Bold Oblique Underline Comic Sans script'].setUnderline(True) for (name, expected) in expected_outcomes.items(): with self.subTest(name=name): result = create_traitsfont(name) self.assertIsInstance(result, TraitsFont) self.assert_qfont_equal(result, expected) result_2 = create_traitsfont(font_to_str(result)) self.assert_qfont_equal(result, result_2) def test_create_traitsfont_qfont(self): font = QFont('Comic Sans', 18, QFont.Weight.Bold, False) traits_font = create_traitsfont(font) self.assertIsInstance(traits_font, TraitsFont) self.assert_qfont_equal(traits_font, font) def test_create_traitsfont_pyface_font(self): args = simple_parser('18 pt Bold Oblique Underline Courier') font = PyfaceFont(**args) traits_font = create_traitsfont(font) self.assertIsInstance(traits_font, TraitsFont) self.assert_qfont_equal(traits_font, font.to_toolkit()) def test_font_trait_default(self): obj = FontExample() self.assertIsInstance(obj.font, TraitsFont) self.assert_qfont_equal(obj.font, TraitsFont()) def test_font_trait_str(self): obj = FontExample(font='18 pt Bold Oblique Underline Comic Sans script') qfont = TraitsFont('Comic Sans', 18, QFont.Weight.Bold, False) qfont.setStyleHint(QFont.StyleHint.Cursive) qfont.setStyle(QFont.Style.StyleOblique) qfont.setUnderline(True) self.assertIsInstance(obj.font, TraitsFont) self.assert_qfont_equal(obj.font, qfont) def test_font_trait_qfont(self): qfont = TraitsFont('Comic Sans', 18, QFont.Weight.Bold, False) qfont.setStyleHint(QFont.StyleHint.Cursive) qfont.setStyle(QFont.Style.StyleOblique) qfont.setUnderline(True) obj = FontExample(font=qfont) self.assertIsInstance(obj.font, TraitsFont) self.assert_qfont_equal(obj.font, qfont) def test_font_trait_pyface_font(self): args = simple_parser('18 pt Bold Oblique Underline Courier typewriter') font = PyfaceFont(**args) obj = FontExample(font=font) self.assertIsInstance(obj.font, TraitsFont) self.assert_qfont_equal(obj.font, font.to_toolkit()) def test_font_trait_none(self): obj = FontExample(font=None) self.assertIsNone(obj.font) def test_font_trait_bad(self): with self.assertRaises(TraitError): obj = FontExample(font=1) def test_traits_font_reduce(self): traits_font = TraitsFont('Comic Sans', 18, QFont.Weight.Bold, False) traits_font.setStyleHint(QFont.StyleHint.Cursive) traits_font.setStyle(QFont.Style.StyleOblique) traits_font.setUnderline(True) result = traits_font.__reduce_ex__(None) self.assertEqual(result, (create_traitsfont, ('18 point Comic Sans Oblique Bold underline',))) def test_traits_font_str(self): traits_font = TraitsFont('Comic Sans', 18, QFont.Weight.Bold, False) traits_font.setStyleHint(QFont.StyleHint.Cursive) traits_font.setStyle(QFont.Style.StyleOblique) traits_font.setUnderline(True) result = str(traits_font) self.assertEqual(result, '18 point Comic Sans Oblique Bold underline') def assert_qfont_equal(self, font, other): self.assertIsInstance(font, QFont) self.assertIsInstance(other, QFont) self.assertEqual(font.family(), other.family()) self.assertEqual(font.styleHint(), font.styleHint()) self.assertEqual(font.pointSize(), other.pointSize()) self.assertEqual(font.style(), other.style()) self.assertEqual(font.weight(), other.weight()) self.assertEqual(font.underline(), other.underline())
class TestOldAPI(): .skipif((sys.version_info >= (3, 0)), reason='ok on Py3') def test_duplicate_keys_02(self): from srsly.ruamel_yaml import safe_load from srsly.ruamel_yaml.constructor import DuplicateKeyError with pytest.raises(DuplicateKeyError): safe_load('type: Domestica\ntype: International')
def heading(state: StateBlock, startLine: int, endLine: int, silent: bool) -> bool: LOGGER.debug('entering heading: %s, %s, %s, %s', state, startLine, endLine, silent) pos = (state.bMarks[startLine] + state.tShift[startLine]) maximum = state.eMarks[startLine] if state.is_code_block(startLine): return False ch: (str | None) = state.src[pos] if ((ch != '#') or (pos >= maximum)): return False level = 1 pos += 1 try: ch = state.src[pos] except IndexError: ch = None while ((ch == '#') and (pos < maximum) and (level <= 6)): level += 1 pos += 1 try: ch = state.src[pos] except IndexError: ch = None if ((level > 6) or ((pos < maximum) and (not isStrSpace(ch)))): return False if silent: return True maximum = state.skipSpacesBack(maximum, pos) tmp = state.skipCharsStrBack(maximum, '#', pos) if ((tmp > pos) and isStrSpace(state.src[(tmp - 1)])): maximum = tmp state.line = (startLine + 1) token = state.push('heading_open', ('h' + str(level)), 1) token.markup = ''[:level] token.map = [startLine, state.line] token = state.push('inline', '', 0) token.content = state.src[pos:maximum].strip() token.map = [startLine, state.line] token.children = [] token = state.push('heading_close', ('h' + str(level)), (- 1)) token.markup = ''[:level] return True
('/api/user/create', methods=['POST']) _secure def user_create(): username = request.form['username'] password = request.form['password'] password2 = request.form['password2'] is_admin = (request.form['is_admin'] == 'true') if (password != password2): return (jsonify({'message': "Passwords don't match."}), BAD_REQUEST_STATUS) with session_scope() as session: try: user = User(username=username, is_admin=is_admin) user.set_password(password=password) session.add(user) session.commit() except IntegrityError: return (jsonify({'message': 'Username already exists.'}), BAD_REQUEST_STATUS) except Exception as e: return (jsonify({'message': str(e)}), BAD_REQUEST_STATUS) return 'OK'
class AccountTable(BaseWalletStore): LOGGER_NAME = 'db-table-account' CREATE_SQL = 'INSERT INTO Accounts (account_id, default_masterkey_id, default_script_type, account_name, date_created, date_updated) VALUES (?, ?, ?, ?, ?, ?)' READ_SQL = 'SELECT account_id, default_masterkey_id, default_script_type, account_name FROM Accounts' UPDATE_MASTERKEY_SQL = 'UPDATE Accounts SET date_updated=?, default_masterkey_id=?, default_script_type=? WHERE account_id=?' UPDATE_NAME_SQL = 'UPDATE Accounts SET date_updated=?, account_name=? WHERE account_id=?' UPDATE_SCRIPT_TYPE_SQL = 'UPDATE Accounts SET date_updated=?, default_script_type=? WHERE account_id=?' DELETE_SQL = 'DELETE FROM Accounts WHERE account_id=?' def create(self, entries: Iterable[AccountRow], completion_callback: Optional[CompletionCallbackType]=None) -> None: timestamp = self._get_current_timestamp() datas = [(*t, timestamp, timestamp) for t in entries] def _write(db: sqlite3.Connection): db.executemany(self.CREATE_SQL, datas) self._db_context.queue_write(_write, completion_callback) def read(self) -> List[AccountRow]: cursor = self._db.execute(self.READ_SQL) rows = cursor.fetchall() cursor.close() return [AccountRow(*t) for t in rows] def update_masterkey(self, entries: Iterable[Tuple[(Optional[int], ScriptType, int)]], date_updated: Optional[int]=None, completion_callback: Optional[CompletionCallbackType]=None) -> None: date_updated = (self._get_current_timestamp() if (date_updated is None) else date_updated) datas = [] for (masterkey_id, script_type, account_id) in entries: datas.append((date_updated, masterkey_id, script_type, account_id)) def _write(db: sqlite3.Connection): db.executemany(self.UPDATE_MASTERKEY_SQL, datas) self._db_context.queue_write(_write, completion_callback) def update_name(self, entries: Iterable[Tuple[(int, str)]], date_updated: Optional[int]=None, completion_callback: Optional[CompletionCallbackType]=None) -> None: date_updated = (self._get_current_timestamp() if (date_updated is None) else date_updated) datas = [] for (account_id, account_name) in entries: datas.append((date_updated, account_name, account_id)) def _write(db: sqlite3.Connection): db.executemany(self.UPDATE_NAME_SQL, datas) self._db_context.queue_write(_write, completion_callback) def update_script_type(self, entries: Iterable[Tuple[(ScriptType, int)]], date_updated: Optional[int]=None, completion_callback: Optional[CompletionCallbackType]=None) -> None: date_updated = (self._get_current_timestamp() if (date_updated is None) else date_updated) datas = [] for entry in entries: datas.append((date_updated, *entry)) def _write(db: sqlite3.Connection): db.executemany(self.UPDATE_SCRIPT_TYPE_SQL, datas) self._db_context.queue_write(_write, completion_callback) def delete(self, account_ids: Iterable[int], completion_callback: Optional[CompletionCallbackType]=None) -> None: manyparams = [(account_id,) for account_id in account_ids] def _write(db: sqlite3.Connection): db.executemany(self.DELETE_SQL, manyparams) self._db_context.queue_write(_write, completion_callback)
class _ContainerProjectsLocationsRepository(_base_repository.GCPRepository): def __init__(self, **kwargs): super(_ContainerProjectsLocationsRepository, self).__init__(component='projects.locations', **kwargs) def get_serverconfig(self, project_id, location, fields=None, **kwargs): name = 'projects/{}/locations/{}'.format(project_id, location) arguments = {'name': name, 'fields': fields} if kwargs: arguments.update(kwargs) return self.execute_query(verb='getServerConfig', verb_arguments=arguments)
def long_field(data): str_or_bytes = (str(data) if (not isinstance(data, (str, bytes))) else data) if (len(str_or_bytes) > LONG_FIELD_MAX_LENGTH): if isinstance(str_or_bytes, bytes): return (str_or_bytes[:(LONG_FIELD_MAX_LENGTH - 3)] + b'...') else: return (str_or_bytes[:(LONG_FIELD_MAX_LENGTH - 1)] + '...') else: return data
_ns.route('/<username>/new-fedora-review/', methods=['GET', 'POST']) _required def copr_add_fedora_review(username): delete_after_days = app.config['DELETE_AFTER_DAYS'] if (username != flask.g.user.username): flask.flash("You can not add projects for '{}' user".format(username), 'error') return flask.redirect('/') form = forms.CoprFedoraReviewForm(user=flask.g.user) if (flask.request.method == 'POST'): if form.validate_on_submit(): copr = coprs_logic.CoprsLogic.add(flask.g.user, name=form.name.data, selected_chroots=['fedora-rawhide-x86_64'], description='Project was created only to execute Fedora Review tool for all builds.', instructions='You should ask the project owner before installing anything from this project.', unlisted_on_hp=True, delete_after_days=delete_after_days, follow_fedora_branching=False, fedora_review=True) db.session.commit() flask.flash('New review project has been created successfully, will be removed after {} days (if not prolonged)'.format(delete_after_days), 'success') return flask.redirect(helpers.copr_url('coprs_ns.copr_add_build', copr)) flask.flash('Error in project config', 'error') return flask.render_template('coprs/add_fedora_review.html', form=form)
class ElasticsearchAccountDisasterBase(DisasterBase): agg_group_name: str = 'group_by_agg_key' agg_key: str bucket_count: int filter_query: ES_Q has_children: bool = False nested_nonzero_fields: Dict[(str, str)] = [] query_fields: List[str] sub_agg_group_name: str = 'sub_group_by_sub_agg_key' sub_agg_key: str = None sub_top_hits_fields: List[str] top_hits_fields: List[str] pagination: Pagination _response() def post(self, request): return Response(self.perform_elasticsearch_search()) def perform_elasticsearch_search(self, loans=False) -> Response: filters = {f'nested_{key}': val for (key, val) in self.filters.items() if (key != 'award_type_codes')} if (self.filters.get('award_type_codes') is not None): filters['award_type_codes'] = self.filters['award_type_codes'] query = filters.pop('nested_query', None) if query: filters['nested_query'] = {'text': query, 'fields': self.query_fields} filters['nonzero_fields'] = ['obligated_sum', 'outlay_sum'] self.filter_query = QueryWithFilters.generate_accounts_elasticsearch_query(filters) self.bucket_count = 1000 messages = [] if (self.pagination.sort_key in ('id', 'code')): messages.append(f"Notice! API Request to sort on '{self.pagination.sort_key}' field isn't fully implemented. Results were actually sorted using 'description' field.") response = self.query_elasticsearch(loans) response['page_metadata'] = get_pagination_metadata(len(response['results']), self.pagination.limit, self.pagination.page) response['results'] = response['results'][self.pagination.lower_limit:self.pagination.upper_limit] if messages: response['messages'] = messages return Response(response) def build_elasticsearch_result(self, info_buckets: List[dict]) -> List[dict]: pass def build_elasticsearch_search_with_aggregations(self) -> Optional[AccountSearch]: if (self.bucket_count == 0): return None search = AccountSearch().filter(self.filter_query) financial_accounts_agg = A('nested', path='financial_accounts_by_award') if ('query' in self.filters): terms = ES_Q('terms', **{'financial_accounts_by_award.disaster_emergency_fund_code': self.filters.get('def_codes')}) query = ES_Q('multi_match', query=self.filters['query'], type='phrase_prefix', fields=[f'financial_accounts_by_award.{query}' for query in self.query_fields]) filter_agg_query = ES_Q('bool', should=[terms, query], minimum_should_match=2) else: filter_agg_query = ES_Q('terms', **{'financial_accounts_by_award.disaster_emergency_fund_code': self.filters.get('def_codes')}) filtered_aggs = A('filter', filter_agg_query) group_by_dim_agg = A('terms', field=self.agg_key, size=self.bucket_count) dim_metadata = A('top_hits', size=1, sort=[{'financial_accounts_by_award.update_date': {'order': 'desc'}}], _source={'includes': self.top_hits_fields}) sum_covid_outlay = A('sum', script="doc['financial_accounts_by_award.is_final_balances_for_fy'].value ? (\n ( doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].size() > 0 ? doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].value : 0) ) : 0") sum_covid_obligation = A('sum', field='financial_accounts_by_award.transaction_obligated_amount') count_awards_by_dim = A('reverse_nested', **{}) award_count = A('value_count', field='financial_account_distinct_award_key') loan_value = A('sum', field='total_loan_value') search.aggs.bucket(self.agg_group_name, financial_accounts_agg).bucket('filtered_aggs', filtered_aggs).bucket('group_by_dim_agg', group_by_dim_agg).metric('dim_metadata', dim_metadata).metric('sum_transaction_obligated_amount', sum_covid_obligation).metric('sum_gross_outlay_amount_by_award_cpe', sum_covid_outlay).bucket('count_awards_by_dim', count_awards_by_dim).metric('award_count', award_count).metric('sum_loan_value', loan_value) if self.sub_agg_key: self.extend_elasticsearch_search_with_sub_aggregation(search) search.update_from_dict({'size': 0}) return search def extend_elasticsearch_search_with_sub_aggregation(self, search: AccountSearch): sub_bucket_count = 1000 size = sub_bucket_count shard_size = (sub_bucket_count + 100) if (shard_size > 10000): raise ForbiddenException('Current filters return too many unique items. Narrow filters to return results or use downloads.') sub_group_by_sub_agg_key_values = {'field': self.sub_agg_key, 'size': size, 'shard_size': shard_size} sub_group_by_sub_agg_key = A('terms', **sub_group_by_sub_agg_key_values) sub_dim_metadata = A('top_hits', size=1, sort=[{'financial_accounts_by_award.update_date': {'order': 'desc'}}], _source={'includes': self.sub_top_hits_fields}) sub_sum_covid_outlay = A('sum', script="doc['financial_accounts_by_award.is_final_balances_for_fy'].value ? ( ( doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].size() > 0 ? doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].value : 0) ) : 0") sub_sum_covid_obligation = A('sum', field='financial_accounts_by_award.transaction_obligated_amount') sub_count_awards_by_dim = A('reverse_nested', **{}) sub_award_count = A('value_count', field='financial_account_distinct_award_key') loan_value = A('sum', field='total_loan_value') sub_group_by_sub_agg_key.metric('dim_metadata', sub_dim_metadata).metric('sum_transaction_obligated_amount', sub_sum_covid_obligation).metric('sum_gross_outlay_amount_by_award_cpe', sub_sum_covid_outlay).bucket('count_awards_by_dim', sub_count_awards_by_dim).metric('award_count', sub_award_count).metric('sum_loan_value', loan_value) search.aggs[self.agg_group_name]['group_by_dim_agg'].bucket(self.sub_agg_group_name, sub_group_by_sub_agg_key) def build_totals(self, response: List[dict], loans: bool=False) -> dict: obligations = 0 outlays = 0 award_count = 0 loan_sum = 0 for item in response: obligations += item['obligation'] outlays += item['outlay'] award_count += item['award_count'] if loans: loan_sum += item['face_value_of_loan'] retval = {'obligation': round(obligations, 2), 'outlay': round(outlays, 2), 'award_count': award_count} if loans: retval['face_value_of_loan'] = loan_sum return retval def query_elasticsearch(self, loans) -> dict: search = self.build_elasticsearch_search_with_aggregations() if (search is None): totals = self.build_totals(response=[], loans=loans) return {'totals': totals, 'results': []} response = search.handle_execute() response = response.aggs.to_dict() buckets = response.get(self.agg_group_name, {}).get('filtered_aggs', {}).get('group_by_dim_agg', {}).get('buckets', []) results = self.build_elasticsearch_result(buckets) totals = self.build_totals(results, loans) sorted_results = self.sort_results(results) return {'totals': totals, 'results': sorted_results} def sort_results(self, results: List[dict]) -> List[dict]: sorted_parents = sorted(results, key=(lambda val: val.get(self.pagination.sort_key, 'id')), reverse=(self.pagination.sort_order == 'desc')) if self.has_children: for parent in sorted_parents: parent['children'] = sorted(parent.get('children', []), key=(lambda val: val.get(self.pagination.sort_key, 'id')), reverse=(self.pagination.sort_order == 'desc')) return sorted_parents
class OptionPlotoptionsVectorOnpoint(Options): def connectorOptions(self) -> 'OptionPlotoptionsVectorOnpointConnectoroptions': return self._config_sub_data('connectorOptions', OptionPlotoptionsVectorOnpointConnectoroptions) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def position(self) -> 'OptionPlotoptionsVectorOnpointPosition': return self._config_sub_data('position', OptionPlotoptionsVectorOnpointPosition)
class TopicSetWCI(object): def __init__(self, topic_set, commit_info): self.__topic_set = topic_set self.__commit_info = commit_info def get_topic_set(self): return self.__topic_set def get_commit_info(self): return self.__commit_info def execute(self, executor, func_prefix): tracer.debug('Calling pre.') FuncCall.pcall(executor, (func_prefix + 'topic_set_pre'), self) tracer.debug('Calling sub topic.') self.__topic_set.execute(executor, func_prefix) tracer.debug('Calling post.') FuncCall.pcall(executor, (func_prefix + 'topic_set_post'), self) tracer.debug('Finished.') def create_makefile_name(self, name, topicn): return self.__topic_set.create_makefile_name(name, topicn) def get_master_topic(self): return self.__topic_set.get_master_topic() def get_requirement_set(self): return self.__topic_set.get_requirement_set()
class _TabWidget(QtGui.QTabWidget): _active_icon = None _spinner_data = None def __init__(self, root, *args): QtGui.QTabWidget.__init__(self, *args) if (sys.platform == 'darwin'): self.setDocumentMode(True) self._root = root self.setTabBar(_DragableTabBar(self._root, self)) self.setTabsClosable(True) self.tabCloseRequested.connect(self._close_tab) if (not _TabWidget._spinner_data): _TabWidget._spinner_data = ImageResource('spinner.gif') def show_button(self, index): lbl = QtGui.QLabel(self) movie = QtGui.QMovie(_TabWidget._spinner_data.absolute_path, parent=lbl) movie.setCacheMode(QtGui.QMovie.CacheMode.CacheAll) movie.setScaledSize(QtCore.QSize(16, 16)) lbl.setMovie(movie) movie.start() self.tabBar().setTabButton(index, QtGui.QTabBar.ButtonPosition.LeftSide, lbl) def hide_button(self, index): curr = self.tabBar().tabButton(index, QtGui.QTabBar.ButtonPosition.LeftSide) if curr: curr.close() self.tabBar().setTabButton(index, QtGui.QTabBar.ButtonPosition.LeftSide, None) def active_icon(self): if (_TabWidget._active_icon is None): start = QtGui.QColor(0, 255, 0) stop = QtGui.QColor(0, 63, 0) size = self.iconSize() width = size.width() height = size.height() pm = QtGui.QPixmap(size) p = QtGui.QPainter() p.begin(pm) p.initFrom(self.tabBar()) p.fillRect(0, 0, width, height, p.background()) rg = QtGui.QRadialGradient((width / 2), (height / 2), width) rg.setColorAt(0.0, start) rg.setColorAt(1.0, stop) p.setBrush(rg) p.setPen(QtCore.Qt.PenStyle.NoPen) p.setRenderHint(QtGui.QPainter.RenderHint.Antialiasing) p.drawEllipse(0, 0, width, height) p.end() _TabWidget._active_icon = QtGui.QIcon(pm) return _TabWidget._active_icon def _still_needed(self): if (self.count() == 0): prune = self parent = prune.parent() while ((parent is not self._root) and (parent.count() == 1)): prune = parent parent = prune.parent() prune.hide() prune.deleteLater() def tabRemoved(self, idx): self._still_needed() if ((self._root._current_tab_w is self) and (self._root._current_tab_idx == idx)): self._root._current_tab_w = None def _close_tab(self, index): self._root._close_tab_request(self.widget(index))
def test_append_to_recent_files_working_properly(test_data): test_data['version'].id = 234 test_data['external_env'].append_to_recent_files(test_data['version']) path = test_data['external_env'].get_settings_file_path() with open(path, 'r') as f: vid = f.read() assert (vid == str(234))
class RegisterWithArgChecks(object): def __init__(self, name, req_args=None, opt_args=None): self._name = name if (not req_args): req_args = [] self._req_args = req_args if (not opt_args): opt_args = [] self._opt_args = opt_args self._all_args = (set(self._req_args) | set(self._opt_args)) def __call__(self, func): def wrapped_fun(**kwargs): if ((not kwargs) and (len(self._req_args) > 0)): raise MissingRequiredConf(desc='Missing all required attributes.') given_args = set(kwargs.keys()) unknown_attrs = (given_args - set(self._all_args)) if unknown_attrs: raise RuntimeConfigError(desc=('Unknown attributes %r' % unknown_attrs)) missing_req_args = (set(self._req_args) - given_args) if missing_req_args: conf_name = ', '.join(missing_req_args) raise MissingRequiredConf(conf_name=conf_name) req_values = [] for req_arg in self._req_args: req_value = kwargs.get(req_arg) validator = get_validator(req_arg) if (not validator): raise ValueError(('No validator registered for function=%s and arg=%s' % (func, req_arg))) validator(req_value) req_values.append(req_value) opt_items = {} for (opt_arg, opt_value) in kwargs.items(): if (opt_arg in self._opt_args): validator = get_validator(opt_arg) if validator: validator(opt_value) opt_items[opt_arg] = opt_value return func(*req_values, **opt_items) _CALL_REGISTRY[self._name] = wrapped_fun return func
class RepresentationTests(TestCase): def test_unicode_crash(self): grammar = Grammar('string = ~r"\\S+"u') str(grammar.parse('')) def test_unicode(self): str(rule_grammar) def test_unicode_keep_parens(self): self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs")* "spam"')), "foo = 'bar' ('baz' 'eggs')* 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){2,4} "spam"')), "foo = 'bar' ('baz' 'eggs'){2,4} 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){2,} "spam"')), "foo = 'bar' ('baz' 'eggs'){2,} 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){1,} "spam"')), "foo = 'bar' ('baz' 'eggs')+ 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){,4} "spam"')), "foo = 'bar' ('baz' 'eggs'){,4} 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){0,1} "spam"')), "foo = 'bar' ('baz' 'eggs')? 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" "eggs"){0,} "spam"')), "foo = 'bar' ('baz' 'eggs')* 'spam'") self.assertEqual(str(Grammar('foo = "bar" ("baz" / "eggs") "spam"')), "foo = 'bar' ('baz' / 'eggs') 'spam'") self.assertEqual(str(Grammar('foo = "bar" &("baz" "eggs") "spam"')), "foo = 'bar' &('baz' 'eggs') 'spam'") self.assertEqual(str(Grammar('foo = ("bar" "baz") / ("baff" "bam")')), "foo = ('bar' 'baz') / ('baff' 'bam')") def test_unicode_surrounding_parens(self): self.assertEqual(str(Grammar('foo = ("foo" ("bar" "baz"))')), "foo = 'foo' ('bar' 'baz')")
.usefixtures('use_tmpdir') def test_rms_job_script_parser(monkeypatch, source_root): with open('rms_config.yml', 'w', encoding='utf-8') as f: json.dump({'executable': os.path.realpath('bin/rms'), 'env': {'10.1.3': {'PATH': ''}}}, f) monkeypatch.setenv('RMS_TEST_VAR', 'fdsgfdgfdsgfds') os.mkdir('run_path') os.mkdir('bin') os.mkdir('project') shutil.copy(os.path.join(source_root, 'tests/unit_tests/shared/share/rms'), 'bin') monkeypatch.setenv('RMS_SITE_CONFIG', 'rms_config.yml') action = {'exit_status': 0} with open('run_path/action.json', 'w', encoding='utf-8') as f: f.write(json.dumps(action)) rms_exec = (_get_ert_shared_dir() + '/share/ert/forward-models/res/script/rms.py') subprocess.check_call([rms_exec, '--run-path', 'run_path', '0', '--version', '10.1.3', 'project', '--import-path', './', '--export-path', './', 'workflow', '']) subprocess.check_call([rms_exec, '--run-path', 'run_path', '0', '--version', '10.1.3', 'project', 'workflow', '-a'])