code
stringlengths
281
23.7M
def test_invalid_type(client): resp = client.get(url.format(toptier_code='043', fiscal_year=2020, fiscal_period=8, type='procurementS')) assert (resp.status_code == status.HTTP_400_BAD_REQUEST) response = resp.json() detail = response['detail'] assert (detail == "Field 'type' is outside valid values ['assistance', 'procurement']")
class Function_Signature(Node): def __init__(self): super().__init__() self.n_name = None self.l_inputs = None self.l_outputs = None self.is_constructor = False def loc(self): if self.n_name: return self.n_name.loc() else: raise ICE('cannot attach error to blank signature') def set_name(self, n_name): assert isinstance(n_name, Name) assert n_name.is_simple_dotted_name() self.n_name = n_name self.n_name.set_parent(self) def set_inputs(self, l_inputs): assert isinstance(l_inputs, list) for n in l_inputs: assert isinstance(n, Identifier), (str(n) + (' is %s and not an Identifier' % n.__class__.__name__)) self.l_inputs = l_inputs for n_input in self.l_inputs: n_input.set_parent(self) def set_outputs(self, l_outputs): assert isinstance(l_outputs, list) for n in l_outputs: assert isinstance(n, Identifier) self.l_outputs = l_outputs for n_output in self.l_outputs: n_output.set_parent(self) def set_parent(self, n_parent): assert isinstance(n_parent, (Function_Definition, Special_Block)) super().set_parent(n_parent) def visit(self, parent, function, relation): self._visit(parent, function, relation) self.n_name.visit(self, function, 'Name') self._visit_list(self.l_inputs, function, 'Inputs') self._visit_list(self.l_outputs, function, 'Outputs') self._visit_end(parent, function, relation) def sty_check_naming(self, mh, cfg): assert isinstance(mh, Message_Handler) assert isinstance(cfg, Config) if cfg.active('naming_parameters'): for param in (self.l_inputs + self.l_outputs): param.sty_check_naming(mh, cfg, 'parameter', 'naming_parameters') if (not cfg.active('naming_functions')): return n_fdef = self.n_parent if self.is_constructor: if (not isinstance(self.n_name, Identifier)): raise ICE(('class constructor with %s node as name' % self.n_name.__class__.__name__)) self.n_name.sty_check_naming(mh, cfg, 'class', 'naming_functions') elif isinstance(n_fdef, Special_Block): if (not isinstance(self.n_name, Identifier)): raise ICE(('forward declaration with %s node as name' % self.n_name.__class__.__name__)) self.n_name.sty_check_naming(mh, cfg, 'method', 'naming_functions') elif isinstance(n_fdef.n_parent, Function_Definition): if (not isinstance(self.n_name, Identifier)): raise ICE(('nested function with %s node as name' % self.n_name.__class__.__name__)) self.n_name.sty_check_naming(mh, cfg, 'nested', 'naming_functions') elif n_fdef.is_class_method(): if isinstance(self.n_name, Identifier): self.n_name.sty_check_naming(mh, cfg, 'method', 'naming_functions') elif (not isinstance(self.n_name, Selection)): raise ICE(('class method with %s node as name' % self.n_name.__class__.__name__)) else: self.n_name.n_field.sty_check_naming(mh, cfg, 'method', 'naming_functions') else: if (not isinstance(self.n_name, Identifier)): raise ICE(('ordinary function with %s node as name' % self.n_name.__class__.__name__)) self.n_name.sty_check_naming(mh, cfg, 'function', 'naming_functions')
_arg_type(1) def apply_formatters_to_sequence(formatters: List[Any], sequence: List[Any]) -> Generator[(List[Any], None, None)]: if (len(formatters) > len(sequence)): raise IndexError(f'Too many formatters for sequence: {len(formatters)} formatters for {repr(sequence)}') elif (len(formatters) < len(sequence)): raise IndexError(f'Too few formatters for sequence: {len(formatters)} formatters for {repr(sequence)}') else: for (formatter, item) in zip(formatters, sequence): (yield formatter(item))
_os(*metadata.platforms) (TARGET_APP, common.CMD_PATH) def main(): common.log('Execute files from the Recycle Bin') target_dir = None for recycle_path in RECYCLE_PATHS: if Path(recycle_path).exists(): target_dir = common.find_writeable_directory(recycle_path) if target_dir: break else: common.log('Could not find a writeable directory in the recycle bin') exit(1) commands = [[TARGET_APP], [common.CMD_PATH, '/c', 'echo hello world']] common.log(('Running commands from recycle bin in %s' % target_dir)) for command in commands: source_path = command[0] arguments = command[1:] target_path = (Path(target_dir) / 'recycled_process.exe') common.copy_file(source_path, target_path) arguments.insert(0, target_path) common.execute(arguments) time.sleep(0.5) common.remove_file(target_path)
class SimsetMixin(MixinMeta): _or_permissions(manage_guild=True) (autohelp=True) async def simset(self, ctx): if (ctx.invoked_subcommand is not None): return guild = ctx.guild gametime = (await self.config.guild(guild).gametime()) htbreak = (await self.config.guild(guild).htbreak()) results = (await self.config.guild(guild).resultchannel()) bettoggle = (await self.config.guild(guild).bettoggle()) maxplayers = (await self.config.guild(guild).maxplayers()) redcardmodif = (await self.config.guild(guild).redcardmodifier()) transfers = (await self.config.guild(guild).transferwindow()) mentions = (await self.config.guild(guild).mentions()) msg = '' msg += 'Game Time: 1m for every {}s.\n'.format(gametime) msg += 'Team Limit: {} players.\n'.format(maxplayers) msg += 'HT Break: {}s.\n'.format(htbreak) msg += 'Red Card Modifier: {}% loss per red card.\n'.format(redcardmodif) msg += 'Posting Results: {}.\n'.format(('Yes' if results else 'No')) msg += 'Transfer Window: {}.\n'.format(('Open' if transfers else 'Closed')) msg += 'Accepting Bets: {}.\n'.format(('Yes' if bettoggle else 'No')) msg += 'Mentions on game start: {}.\n'.format(('Yes' if mentions else 'No')) if bettoggle: bettime = (await self.config.guild(guild).bettime()) betmax = (await self.config.guild(guild).betmax()) betmin = (await self.config.guild(guild).betmin()) msg += 'Bet Time: {}s.\n'.format(bettime) msg += 'Max Bet: {}.\n'.format(betmax) msg += 'Min Bet: {}.\n'.format(betmin) (await ctx.send(box(msg))) _or_permissions(manage_guild=True) (autohelp=True) async def bet(self, ctx): () (autohelp=True, hidden=True) async def cupmode(self, ctx, bool: bool): if bool: (await ctx.send('Cup mode is now active.')) else: (await ctx.send('Cup mode is now disabled.')) (await self.config.guild(ctx.guild).cupmode.set(bool)) () (autohelp=True, hidden=True) async def probability(self, ctx): if (ctx.invoked_subcommand is None): (await ctx.send(box('This has the chance to break the game completely, no support is offered.'))) () async def goals(self, ctx, amount: int=96): if ((amount > 100) or (amount < 1)): return (await ctx.send('Amount must be greater than 0 and less than 100.')) async with self.config.guild(ctx.guild).probability() as probability: probability['goalchance'] = amount (await ctx.tick()) () async def yellow(self, ctx, amount: int=98): if ((amount > 100) or (amount < 1)): return (await ctx.send('Amount must be greater than 0 and less than 100.')) async with self.config.guild(ctx.guild).probability() as probability: probability['yellowchance'] = amount (await ctx.tick()) () () async def maxplayers(self, ctx, amount: int): if ((amount < 3) or (amount > 7)): return (await ctx.send('Amount must be between 3 and 7.')) (await self.config.guild(ctx.guild).maxplayers.set(amount)) (await ctx.tick()) () async def redcardmodifier(self, ctx, amount: int): if ((amount < 1) or (amount > 30)): return (await ctx.send('Amount must be between 1 and 30.')) (await self.config.guild(ctx.guild).redcardmodifier.set(amount)) (await ctx.tick()) () async def red(self, ctx, amount: int=398): if ((amount > 400) or (amount < 1)): return (await ctx.send('Amount must be greater than 0 and less than 400.')) async with self.config.guild(ctx.guild).probability() as probability: probability['redchance'] = amount (await ctx.tick()) () async def penalty(self, ctx, amount: int=249): if ((amount > 250) or (amount < 1)): return (await ctx.send('Amount must be greater than 0 and less than 250.')) async with self.config.guild(ctx.guild).probability() as probability: probability['penaltychance'] = amount (await ctx.tick()) () async def penaltyblock(self, ctx, amount: float=0.6): if ((amount > 1) or (amount < 0)): return (await ctx.send('Amount must be greater than 0 and less than 1.')) async with self.config.guild(ctx.guild).probability() as probability: probability['penaltyblock'] = amount (await ctx.tick()) () async def time(self, ctx, time: int=180): if ((time < 0) or (time > 600)): time = 180 (await self.config.guild(ctx.guild).bettime.set(time)) (await ctx.tick()) () async def max(self, ctx, amount: int): if (amount < 1): return (await ctx.send('Amount must be greater than 0.')) (await self.config.guild(ctx.guild).betmax.set(amount)) (await ctx.tick()) () async def min(self, ctx, amount: int): if (amount < 1): return (await ctx.send('Amount must be greater than 0.')) (await self.config.guild(ctx.guild).betmin.set(amount)) (await ctx.tick()) () async def toggle(self, ctx, toggle: bool): (await self.config.guild(ctx.guild).bettoggle.set(toggle)) (await ctx.tick()) () async def gametime(self, ctx, time: float=1): if ((time < 0) or (time > 5)): time = 90 (await self.config.guild(ctx.guild).gametime.set(time)) (await ctx.tick()) () async def halftimebreak(self, ctx, time: int=1): if ((time < 0) or (time > 20)): time = 5 (await self.config.guild(ctx.guild).htbreak.set(time)) (await ctx.tick()) () async def resultchannel(self, ctx, channel: discord.TextChannel): async with self.config.guild(ctx.guild).resultchannel() as channels: if (channel.id in channels): (await ctx.send('Results are already posted in this channel')) return channels.append(channel.id) (await ctx.tick()) () async def resultchannels(self, ctx, option: str): if (option == 'clear'): (await self.config.guild(ctx.guild).resultchannel.set([])) (await ctx.tick()) elif (option == 'show'): async with self.config.guild(ctx.guild).resultchannel() as result: a = [] for res in result: channel = ctx.guild.get_channel(res) if (channel is not None): a.append(channel.name) embed = discord.Embed(title='Result channels', description='\n'.join(a), colour=) (await ctx.send(embed=embed)) else: (await ctx.send("No parameter for resultchannels, you must choose 'show' or 'clear'")) () async def window(self, ctx, status: str): if (status.lower() not in ['open', 'close']): return (await ctx.send("You must specify either 'open' or 'close'.")) if (status == 'open'): (await self.config.guild(ctx.guild).transferwindow.set(True)) (await ctx.send('Window is now open.')) else: (await self.config.guild(ctx.guild).transferwindow.set(False)) (await ctx.send('Window is now closed.')) () async def mentions(self, ctx, bool: bool): if bool: (await self.config.guild(ctx.guild).mentions.set(True)) else: (await self.config.guild(ctx.guild).mentions.set(False)) (name='updatecache') async def levels_updatecache(self, ctx): async with ctx.typing(): (await self.updatecacheall(ctx.guild)) (await ctx.tick()) () _has_permissions(manage_roles=True) async def createroles(self, ctx): async with self.config.guild(ctx.guild).teams() as teams: for team in teams: if (teams[team]['role'] is not None): continue role = (await ctx.guild.create_role(name=team)) teams[team]['role'] = role.id (await ctx.tick()) () _has_permissions(manage_roles=True) async def updateroles(self, ctx): teams = (await self.config.guild(ctx.guild).teams()) for team in teams: if (teams[team]['role'] is None): self.log.debug(f'Skipping {team}, no role found.') continue role = ctx.guild.get_role(teams[team]['role']) for user in teams[team]['members']: member = ctx.guild.get_member(int(user)) (await member.add_roles(role)) (await ctx.tick()) () async def createfixtures(self, ctx): teams = (await self.config.guild(ctx.guild).teams()) teams = list(teams.keys()) if (len(teams) % 2): teams.append('DAY OFF') n = len(teams) matchs = [] fixtures = [] return_matchs = [] for _ in range(1, n): for i in range((n // 2)): matchs.append((teams[i], teams[((n - 1) - i)])) return_matchs.append((teams[((n - 1) - i)], teams[i])) teams.insert(1, teams.pop()) fixtures.insert((len(fixtures) // 2), matchs) fixtures.append(return_matchs) matchs = [] return_matchs = [] a = [] for (k, fixture) in enumerate(fixtures, 1): a.append(f'''Week {k} ''') for (i, game) in enumerate(fixture, 1): a.append(f'Game {i}: {game[0]} vs {game[1]}') a.append('') (await self.config.guild(ctx.guild).fixtures.set(fixtures)) (await ctx.tick()) () () async def clear(self, ctx): (name='all') async def clear_all(self, ctx): (await self.config.guild(ctx.guild).clear()) (await self.config.guild(ctx.guild).standings.set({})) (await self.config.guild(ctx.guild).stats.set({})) (await ctx.tick()) (name='stats') async def clear_stats(self, ctx): (await self.config.guild(ctx.guild).standings.set({})) teams = (await self.config.guild(ctx.guild).teams()) async with self.config.guild(ctx.guild).standings() as standings: for team in teams: standings[team] = {'played': 0, 'wins': 0, 'losses': 0, 'points': 0, 'gd': 0, 'gf': 0, 'ga': 0, 'draws': 0} (await self.config.guild(ctx.guild).stats.set({})) (await ctx.tick())
def except_handle(tokens): if (len(tokens) == 2): (except_kwd, errs) = tokens asname = None elif (len(tokens) == 3): (except_kwd, errs, asname) = tokens else: raise CoconutInternalException('invalid except tokens', tokens) out = (except_kwd + ' ') if ('list' in tokens): out += (('(' + errs) + ')') else: out += errs if (asname is not None): out += (' as ' + asname) return out
def uses_ancestry(query): from . import ast if isinstance(query, ast.EqlAnalytic): query = query.query elif (not isinstance(query, ast.EqlNode)): raise TypeError('unsupported type {} to eql.utils.uses_ancestry. Expected {}'.format(type(query), ast.EqlNode)) return any((isinstance(node, ast.NamedSubquery) for node in query))
def extractWwwVeratalesCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.network .skipif((paramiko is None), reason='requires paramiko to run SFTP') def test_sftp_downloader_fail_if_file_object(): with TemporaryDirectory() as local_store: downloader = SFTPDownloader(username='demo', password='password') url = 'sftp://test.rebex.net/pub/example/pocketftp.png' outfile = os.path.join(local_store, 'pocketftp.png') with open(outfile, 'wb') as outfile_obj: with pytest.raises(TypeError): downloader(url, outfile_obj, None)
class OptionSeriesBarSonificationTracksMappingLowpass(Options): def frequency(self) -> 'OptionSeriesBarSonificationTracksMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesBarSonificationTracksMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesBarSonificationTracksMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesBarSonificationTracksMappingLowpassResonance)
def as_completed(jobs: tp.Sequence[core.Job[core.R]], timeout: tp.Optional[tp.Union[(int, float)]]=None, poll_frequency: float=10) -> tp.Iterator[core.Job[core.R]]: start = time.time() jobs_done: tp.Set[int] = set() while True: if ((timeout is not None) and ((time.time() - start) > timeout)): raise TimeoutError for (i, job) in enumerate(jobs): if (i in jobs_done): continue if job.done(): jobs_done.add(i) (yield job) if (len(jobs_done) == len(jobs)): break time.sleep(poll_frequency)
def test_source_on_clone(): assert ({'_source': {'includes': ['foo.bar.*'], 'excludes': ['foo.one']}, 'query': {'bool': {'filter': [{'term': {'title': 'python'}}]}}} == search.Search().source(includes=['foo.bar.*']).source(excludes=['foo.one']).filter('term', title='python').to_dict()) assert ({'_source': False, 'query': {'bool': {'filter': [{'term': {'title': 'python'}}]}}} == search.Search().source(False).filter('term', title='python').to_dict())
class _RegData(): def __init__(self, rmap): self._rmap = rmap def fifo(self): rdata = self._rmap._if.read(self._rmap.DATA_ADDR) return ((rdata >> self._rmap.DATA_FIFO_POS) & self._rmap.DATA_FIFO_MSK) def fifo(self, val): rdata = self._rmap._if.read(self._rmap.DATA_ADDR) rdata = (rdata & (~ (self._rmap.DATA_FIFO_MSK << self._rmap.DATA_FIFO_POS))) rdata = (rdata | (val << self._rmap.DATA_FIFO_POS)) self._rmap._if.write(self._rmap.DATA_ADDR, rdata) def ferr(self): rdata = self._rmap._if.read(self._rmap.DATA_ADDR) return ((rdata >> self._rmap.DATA_FERR_POS) & self._rmap.DATA_FERR_MSK) def perr(self): rdata = self._rmap._if.read(self._rmap.DATA_ADDR) return ((rdata >> self._rmap.DATA_PERR_POS) & self._rmap.DATA_PERR_MSK)
class AdgroupReviewFeedback(AbstractObject): def __init__(self, api=None): super(AdgroupReviewFeedback, self).__init__() self._isAdgroupReviewFeedback = True self._api = api class Field(AbstractObject.Field): field_global = 'global' placement_specific = 'placement_specific' _field_types = {'global': 'map<string, string>', 'placement_specific': 'AdgroupPlacementSpecificReviewFeedback'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def test_create_default_project_workspace_mel_already_exists(create_test_data, trash_bin): data = create_test_data arch = Archiver() tempdir = tempfile.gettempdir() project_path = arch.create_default_project(tempdir) trash_bin.append(project_path) project_path = arch.create_default_project(tempdir) project_path = arch.create_default_project(tempdir)
def test_multichannel_containers(audio, nb_channels, multichannel_format): with tmp.NamedTemporaryFile(delete=False, suffix=('.' + multichannel_format)) as tempfile: stempeg.write_stems(tempfile.name, audio, sample_rate=44100, writer=ChannelsWriter()) (loaded_audio, rate) = stempeg.read_stems(tempfile.name, always_3d=True, reader=stempeg.ChannelsReader(nb_channels=nb_channels)) assert (audio.shape == loaded_audio.shape)
class ApertureSetLever(): def __init__(self, exposure_control_system=None, aperture=16): self.exposure_control_system = exposure_control_system self._aperture = aperture def aperture(self): return self._aperture def aperture(self, value): if (self.exposure_control_system.mode == 'Manual'): if self.exposure_control_system.shutter.cocked: self.exposure_control_system.iris.aperture = value elif (value > self.aperture): self.exposure_control_system.iris.aperture = value self._aperture = value
class TriggerWordsPresence(FeatureDescriptor): words_list: Tuple lemmatize: bool = True def feature(self, column_name: str) -> GeneratedFeature: return trigger_words_presence_feature.TriggerWordsPresent(column_name, self.words_list, self.lemmatize, self.display_name) def for_column(self, column_name: str): return trigger_words_presence_feature.TriggerWordsPresent(column_name, self.words_list, self.lemmatize, self.display_name).feature_name()
class OptionSeriesBellcurveSonificationDefaultinstrumentoptionsMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
('/callers/manage/<int:call_no>', methods=['GET', 'POST']) def callers_manage(call_no): post_count = None if (request.method == 'POST'): number = transform_number(request.form['phone_no']) if (request.form['action'] == 'add-permit'): caller = {} caller['NMBR'] = number caller['NAME'] = request.form['name'] print(((' >> Adding ' + caller['NAME']) + ' to whitelist')) whitelist = Whitelist(get_db(), current_app.config) whitelist.add_caller(caller, request.form['reason']) elif (request.form['action'] == 'remove-permit'): print(((' >> Removing ' + number) + ' from whitelist')) whitelist = Whitelist(get_db(), current_app.config) whitelist.remove_number(number) elif (request.form['action'] == 'add-block'): caller = {} caller['NMBR'] = number caller['NAME'] = request.form['name'] print(((' >> Adding ' + caller['NAME']) + ' to blacklist')) blacklist = Blacklist(get_db(), current_app.config) blacklist.add_caller(caller, request.form['reason']) elif (request.form['action'] == 'remove-block'): print(((' >> Removing ' + number) + ' from blacklist')) blacklist = Blacklist(get_db(), current_app.config) blacklist.remove_number(number) post_count = int(request.form['post_count']) post_count += 1 else: post_count = 0 query = "SELECT\n a.CallLogID,\n a.Name,\n a.Number,\n CASE WHEN b.PhoneNo IS NULL THEN 'N' ELSE 'Y' END Whitelisted,\n CASE WHEN c.PhoneNo IS NULL THEN 'N' ELSE 'Y' END Blacklisted,\n CASE WHEN b.PhoneNo IS NOT NULL THEN b.Reason ELSE '' END WhitelistReason,\n CASE WHEN c.PhoneNo IS NOT NULL THEN c.Reason ELSE '' END BlacklistReason\n FROM calllog AS a\n LEFT JOIN whitelist AS b ON a.Number = b.PhoneNo\n LEFT JOIN blacklist AS c ON a.Number = c.PhoneNo\n WHERE a.CallLogID=:call_log_id" arguments = {'call_log_id': call_no} result_set = query_db(get_db(), query, arguments) caller = {} if (len(result_set) > 0): record = result_set[0] number = record[2] caller.update(dict(call_no=record[0], phone_no=format_phone_no(number), name=record[1], whitelisted=record[3], blacklisted=record[4], whitelist_reason=record[5], blacklist_reason=record[6])) else: caller.update(dict(call_no=call_no, phone_no='Number Not Found', name='', whitelisted='N', blacklisted='N', whitelist_reason='', blacklist_reason='')) return render_template('callers_manage.html', caller=caller, post_count=post_count)
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def bitwise_sar(evm: Evm) -> None: shift = pop(evm.stack) signed_value = pop(evm.stack).to_signed() charge_gas(evm, GAS_VERY_LOW) if (shift < 256): result = U256.from_signed((signed_value >> shift)) elif (signed_value >= 0): result = U256(0) else: result = U256.MAX_VALUE push(evm.stack, result) evm.pc += 1
def test_deepcopy_overridden(): provider = providers.Self() overriding_provider = providers.Provider() provider.override(overriding_provider) provider_copy = providers.deepcopy(provider) overriding_provider_copy = provider_copy.overridden[0] assert (provider is not provider_copy) assert isinstance(provider, providers.Self) assert (overriding_provider is not overriding_provider_copy) assert isinstance(overriding_provider_copy, providers.Provider)
def parse_production_per_units(xml_text: str) -> (Any | None): values = {} if (not xml_text): return None soup = BeautifulSoup(xml_text, 'html.parser') for timeseries in soup.find_all('timeseries'): resolution = str(timeseries.find_all('resolution')[0].contents[0]) datetime_start: arrow.Arrow = arrow.get(timeseries.find_all('start')[0].contents[0]) is_production = (len(timeseries.find_all('inBiddingZone_Domain.mRID'.lower())) > 0) psr_type = str(timeseries.find_all('mktpsrtype')[0].find_all('psrtype')[0].contents[0]) unit_key = str(timeseries.find_all('mktpsrtype')[0].find_all('powersystemresources')[0].find_all('mrid')[0].contents[0]) unit_name = str(timeseries.find_all('mktpsrtype')[0].find_all('powersystemresources')[0].find_all('name')[0].contents[0]) if (not is_production): continue for entry in timeseries.find_all('point'): quantity = float(entry.find_all('quantity')[0].contents[0]) position = int(entry.find_all('position')[0].contents[0]) datetime = datetime_from_position(datetime_start, position, resolution) key = (unit_key, datetime) if (key in values): if is_production: values[key]['production'] += quantity else: values[key]['production'] -= quantity else: values[key] = {'datetime': datetime, 'production': quantity, 'productionType': ENTSOE_PARAMETER_BY_GROUP[psr_type], 'unitKey': unit_key, 'unitName': unit_name} return values.values()
class NameSpace(dict): _pscript_overload = True def set_nonlocal(self, key): self[key] = 2 def set_global(self, key): self[key] = 3 def use(self, key, how): hows = self.setdefault(key, set()) if isinstance(hows, set): hows.add(how) def add(self, key): curval = self.get(key, 0) if (curval not in (2, 3)): self[key] = 1 def discard(self, key): self.pop(key, None) def leak_stack(self, sub): for name in sub.get_globals(): sub.discard(name) if (name not in self): self[name] = 4 for (name, hows) in sub.get_undefined(): sub.discard(name) for how in hows: self.use(name, how) def is_known(self, name): return (self.get(name, 0) in (1, 2, 3)) def get_defined(self): return set([name for (name, val) in self.items() if (val == 1)]) def get_globals(self): return set([name for (name, val) in self.items() if (val in (3, 4))]) def get_undefined(self): return [(name, val) for (name, val) in self.items() if isinstance(val, set)]
class TestDataAvailabilityBasedNotificationRuleOnEgress(): def test_data_availability_north(self, check_eds_installed, reset_fledge, start_notification, reset_eds, start_north, fledge_url, wait_time, skip_verify_north_interface, add_south, retries): put_url = '/fledge/category/ruletest #1' data = {'auditCode': '', 'assetCode': SOUTH_ASSET_NAME} utils.put_request(fledge_url, urllib.parse.quote(put_url), data) south_plugin = 'sinusoid' config = {'assetName': {'value': SOUTH_ASSET_NAME}} add_south(south_plugin, None, fledge_url, service_name='sine-test', installation_type='package', config=config) get_url = '/fledge/audit?source=NTFSN' resp1 = utils.get_request(fledge_url, get_url) time.sleep(wait_time) get_url = '/fledge/audit?source=NTFSN' resp2 = utils.get_request(fledge_url, get_url) assert (len(resp2['audit']) > len(resp1['audit'])), 'ERROR: NTFSN not triggered properly with asset code' time.sleep(wait_time) verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries) r = verify_eds_data() assert (SOUTH_DP_NAME in r), 'Data in EDS not found!' ts = r.get('Time') assert (ts.find(datetime.now().strftime('%Y-%m-%d')) != (- 1)), 'Latest data not found in EDS!'
class TestLightSettingsEncoder(): def _check_light_settings(self, light_settings): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=messaging.AndroidNotification(light_settings=light_settings)))) return excinfo .parametrize('data', NON_OBJECT_ARGS) def test_invalid_light_settings(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=messaging.AndroidNotification(light_settings=data)))) expected = 'AndroidNotification.light_settings must be an instance of LightSettings class.' assert (str(excinfo.value) == expected) def test_no_color(self): light_settings = messaging.LightSettings(color=None, light_on_duration_millis=200, light_off_duration_millis=200) excinfo = self._check_light_settings(light_settings) expected = 'LightSettings.color is required.' assert (str(excinfo.value) == expected) def test_no_light_on_duration_millis(self): light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=None, light_off_duration_millis=200) excinfo = self._check_light_settings(light_settings) expected = 'LightSettings.light_on_duration_millis is required.' assert (str(excinfo.value) == expected) def test_no_light_off_duration_millis(self): light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=200, light_off_duration_millis=None) excinfo = self._check_light_settings(light_settings) expected = 'LightSettings.light_off_duration_millis is required.' assert (str(excinfo.value) == expected) .parametrize('data', NON_UINT_ARGS) def test_invalid_light_off_duration_millis(self, data): light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=200, light_off_duration_millis=data) excinfo = self._check_light_settings(light_settings) if isinstance(data, numbers.Number): assert (str(excinfo.value) == 'LightSettings.light_off_duration_millis must not be negative.') else: assert (str(excinfo.value) == 'LightSettings.light_off_duration_millis must be a duration in milliseconds or an instance of datetime.timedelta.') .parametrize('data', NON_UINT_ARGS) def test_invalid_light_on_duration_millis(self, data): light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=data, light_off_duration_millis=200) excinfo = self._check_light_settings(light_settings) if isinstance(data, numbers.Number): assert (str(excinfo.value) == 'LightSettings.light_on_duration_millis must not be negative.') else: assert (str(excinfo.value) == 'LightSettings.light_on_duration_millis must be a duration in milliseconds or an instance of datetime.timedelta.') .parametrize('data', (NON_STRING_ARGS + ['foo', '#xxyyzz', '112233', '#11223'])) def test_invalid_color(self, data): notification = messaging.LightSettings(color=data, light_on_duration_millis=300, light_off_duration_millis=200) excinfo = self._check_light_settings(notification) if isinstance(data, str): assert (str(excinfo.value) == 'LightSettings.color must be in the form #RRGGBB or #RRGGBBAA.') else: assert (str(excinfo.value) == 'LightSettings.color must be a non-empty string.') def test_light_settings(self): msg = messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=messaging.AndroidNotification(light_settings=messaging.LightSettings(color='#aabbcc', light_on_duration_millis=200, light_off_duration_millis=300)))) expected = {'topic': 'topic', 'android': {'notification': {'light_settings': {'color': {'red': 0., 'green': 0., 'blue': 0.8, 'alpha': 1}, 'light_on_duration': '0.s', 'light_off_duration': '0.s'}}}} check_encoding(msg, expected)
class ResourceUtilTest(ForsetiTestCase): def test_create_resource_is_ok(self): expect_org = Organization(12345) actual_org = resource_util.create_resource(12345, ResourceType.ORGANIZATION) self.assertEqual(expect_org, actual_org) expect_proj = Project('abcd', project_number=54321) actual_proj = resource_util.create_resource('abcd', ResourceType.PROJECT, project_number=54321) self.assertEqual(expect_proj, actual_proj) self.assertEqual(expect_proj.project_number, actual_proj.project_number) def test_create_nonexist_resource_returns_None(self): self.assertIsNone(resource_util.create_resource('fake-id', 'nonexist')) def test_plural_is_correct(self): self.assertEqual('Organizations', resource_util.pluralize(ResourceType.ORGANIZATION)) self.assertEqual('Projects', resource_util.pluralize(ResourceType.PROJECT)) def test_plural_nonexist_resource_returns_none(self): self.assertIsNone(resource_util.pluralize('nonexistent')) def test_can_create_from_json(self): for resource in list(resource_util._RESOURCE_TYPE_MAP.values()): if resource['can_create_resource']: self.assertTrue(hasattr(resource['class'], 'from_json'), msg=('%s missing from_json' % resource['class']))
class ClassificationDummyMetricResults(MetricResult): class Config(): dict_exclude_fields = {'metrics_matrix'} pd_exclude_fields = {'metrics_matrix'} dummy: DatasetClassificationQuality by_reference_dummy: Optional[DatasetClassificationQuality] model_quality: Optional[DatasetClassificationQuality] metrics_matrix: ClassesMetrics
(scope='function') def fullstory_postgres_db(postgres_integration_session): postgres_integration_session = seed_postgres_data(postgres_integration_session, './tests/fixtures/saas/external_datasets/fullstory.sql') (yield postgres_integration_session) drop_database(postgres_integration_session.bind.url)
class TestControllerGenerator(): def controller(self): controller = copy.deepcopy(ControllerGenerator) copier_patch = mock.patch('fastapi_mvc.generators.controller.copier') insert_patch = mock.patch('fastapi_mvc.generators.controller.insert_router_import') controller.copier = copier_patch.start() controller.insert_router_import = insert_patch.start() (yield controller) copier_patch.stop() insert_patch.stop() del controller def test_should_exit_zero_when_invoked_with_help(self, controller, cli_runner): result = cli_runner.invoke(controller, ['--help']) assert (result.exit_code == 0) def test_should_exit_error_when_invoked_with_invalid_option(self, controller, cli_runner): result = cli_runner.invoke(controller, ['--not_exists']) assert (result.exit_code == 2) def test_should_call_copier_using_default_values(self, controller, monkeypatch, fake_project, cli_runner): monkeypatch.chdir(fake_project['root']) result = cli_runner.invoke(controller, ['fake-controller']) assert (result.exit_code == 0) controller.copier.run_copy.assert_called_once_with(src_path=COPIER_CONTROLLER.template, vcs_ref=COPIER_CONTROLLER.vcs_ref, dst_path=str(fake_project['root']), answers_file=ANSWERS_FILE, data={'project_name': 'fake-project', 'controller': 'fake_controller', 'endpoints': {}}) controller.insert_router_import.assert_called_once_with('fake_project', 'fake_controller') def test_should_call_copier_with_parsed_arguments(self, controller, monkeypatch, fake_project, cli_runner): monkeypatch.chdir(fake_project['root']) result = cli_runner.invoke(controller, ['--skip-routes', 'STOCK-MARKET', 'ticker', 'buy:post', 'sell:delete']) assert (result.exit_code == 0) controller.copier.run_copy.assert_called_once_with(src_path=COPIER_CONTROLLER.template, vcs_ref=COPIER_CONTROLLER.vcs_ref, dst_path=str(fake_project['root']), answers_file=ANSWERS_FILE, data={'project_name': 'fake-project', 'controller': 'stock_market', 'endpoints': {'ticker': 'get', 'buy': 'post', 'sell': 'delete'}}) def test_should_skip_router_import_insert(self, controller, monkeypatch, fake_project, cli_runner): monkeypatch.chdir(fake_project['root']) result = cli_runner.invoke(controller, ['fake-controller', '--skip-routes']) assert (result.exit_code == 0) controller.copier.run_copy.assert_called_once_with(src_path=COPIER_CONTROLLER.template, vcs_ref=COPIER_CONTROLLER.vcs_ref, dst_path=str(fake_project['root']), answers_file=ANSWERS_FILE, data={'project_name': 'fake-project', 'controller': 'fake_controller', 'endpoints': {}}) controller.insert_router_import.assert_not_called() def test_should_exit_error_when_not_in_fastapi_mvc_project(self, controller, cli_runner, caplog): result = cli_runner.invoke(controller, ['fake-controller']) assert (result.exit_code == 1) msg = "Not a fastapi-mvc project. Try 'fastapi-mvc new --help' for details how to create one." assert (msg in caplog.text)
def test_receipt_processing_with_ignore_flag(event_contract, indexed_event_contract, dup_txn_receipt, wait_for_transaction): event_instance = indexed_event_contract.events.LogSingleWithIndex() returned_logs = event_instance.process_receipt(dup_txn_receipt, errors=IGNORE) assert (len(returned_logs) == 2) second_log = returned_logs[1] abi_error = re.compile('The event signature did not match the provided ABI') assert (abi_error.search(str(second_log.errors)) is not None) first_log = returned_logs[0] log_error = re.compile('Expected 1 log topics. Got 0') assert (log_error.search(str(first_log.errors)) is not None) for log in returned_logs: orig_log = dissoc(dict(log), 'errors') assert (orig_log in dup_txn_receipt['logs']) assert is_same_address(log['address'], event_contract.address)
.parametrize('content,expected', (('a', [{'body': (['a'], 0)}]), ('---\na', [{'body': (['a'], 1)}]), ('a\n^^^', [{'body': ([], 2), 'header': (['a'], 0)}]), ('a\n+++', [{'body': (['a'], 0), 'footer': ([], 1)}]), ('a\n^^^\nb\n+++\nc', [{'body': (['b'], 2), 'footer': (['c'], 3), 'header': (['a'], 0)}]), ('---\n:card: a', [{'body': ([], 2), 'classes': {'card': ['a']}}]), ('a\n---\nb', [{'body': (['a'], 0)}, {'body': (['b'], 2)}]))) def test_parse_panels(content, expected): output = parse_panels(content, content_offset=0, default_classes={}) assert (output == expected)
class TestMd5sums(unittest.TestCase): def setUp(self): self.dir = ExampleDirScooby() self.dir.create_directory() self.md5sum_dir = tempfile.mkdtemp() self.checksum_file = os.path.join(self.md5sum_dir, 'checksums') self.reference_checksums = [] for f in self.dir.filelist(full_path=False): self.reference_checksums.append(('%s %s' % (self.dir.checksum_for_file(f), f))) self.reference_checksums.append('') self.reference_checksums = '\n'.join(self.reference_checksums) self.pwd = os.getcwd() os.chdir(self.dir.dirn) def tearDown(self): os.chdir(self.pwd) self.dir.delete_directory() shutil.rmtree(self.md5sum_dir) def test_compute_md5sums(self): compute_md5sums('.', output_file=self.checksum_file, relative=True) checksums = io.open(self.checksum_file, 'rt').read() reference_checksums = self.reference_checksums.split('\n') reference_checksums.sort() checksums = checksums.split('\n') checksums.sort() for (l1, l2) in zip(reference_checksums, checksums): self.assertEqual(l1, l2) def test_verify_md5sums(self): fp = io.open(self.checksum_file, 'wt') fp.write(self.reference_checksums) fp.close() self.assertEqual(verify_md5sums(self.checksum_file), 0) def test_compute_md5sum_for_file(self): compute_md5sum_for_file('test.txt', output_file=self.checksum_file) checksum = io.open(self.checksum_file, 'rt').read() self.assertEqual('0b26e313ed4a7ca6904b0e9369e5b957 test.txt\n', checksum) def test_broken_links(self): self.dir.add_link('broken', 'missing.txt') compute_md5sums('.', output_file=self.checksum_file, relative=True)
class HeterodoxyAction(UserAction): def apply_action(self): g = self.game sk = self.associated_card assert isinstance(sk, Heterodoxy) card = sk.associated_cards[0] src = self.source victim = self.target tgts = self.target_list[1:] g.players.reveal(card) if card.is_card(AttackCard): if (not AttackCardVitalityHandler.is_disabled(src)): ttags(src)['vitality'] -= 1 lc = LaunchCard(victim, tgts, card) g = self.game g.process_action(lc) return True def is_valid(self): src = self.source sk = self.associated_card assert isinstance(sk, Heterodoxy) card = sk.associated_cards[0] if (card.is_card(AttackCard) and (ttags(src)['vitality'] < 1)): if (not AttackCardVitalityHandler.is_disabled(src)): return False if (card.usage != 'launch'): return False victim = self.target tgts = self.target_list[1:] lc = LaunchCard(victim, tgts, card) return lc.can_fire()
def _flag_level(name: str, default=None): level = uuid4() LEVEL_FLAGS[name] = LEVEL_FLAGS.get(name, _LevelFlag(default)) level_flag = LEVEL_FLAGS[name] level_flag.levels.append(level) if (default != level_flag.default): raise FalArgsError(f"Different defaults '{default}' and '{level_flag.default}' for flag '{name}'") return f'{name}_{level}'
class MetadataServerTest(ForsetiTestCase): .object( 'request', autospec=True) def test_issue_ mock_req): mock_req.side_effect = _MockHttpError('Unreachable') with self.assertRaises(errors.MetadataServerHttpError): metadata_server._issue_ '', {}) def test_obtain_ returned_object = metadata_server._obtain_ self.assertIsInstance(returned_object, .object(metadata_server, '_issue_ autospec=True) def test_get_value_for_attribute_with_exception(self, mock_meta_req): mock_meta_req.side_effect = _MockMetadataServerHttpError('Unreachable') actual_response = metadata_server.get_value_for_attribute('') self.assertIsNone(actual_response) .object(metadata_server, '_issue_ autospec=True) def test_get_value_for_attribute_with_a_present_attribute(self, mock_meta_req): mock_response = 'expected_response' with mock.patch(' mock.mock_open(read_data=mock_response)) as mock_ mock_ = mock_meta_req.side_effect = mock_ actual_response = metadata_server.get_value_for_attribute('') self.assertEqual(actual_response, mock_response) .object(metadata_server, '_issue_ autospec=True) def test_get_project_id_with_exception(self, mock_meta_req): mock_meta_req.side_effect = _MockMetadataServerHttpError('Unreachable') actual_response = metadata_server.get_project_id() self.assertIsNone(actual_response) .object(metadata_server, '_issue_ autospec=True) def test_get_project_id(self, mock_meta_req): mock_response = 'test-project' with mock.patch(' mock.mock_open(read_data=mock_response)) as mock_ mock_ = mock_meta_req.side_effect = mock_ actual_response = metadata_server.get_project_id() self.assertEqual(actual_response, mock_response) .object(metadata_server, '_obtain_ autospec=True) def test_can_reach_metadata_server(self, mock_client): mock_ = mock.Mock(spec= mock_ = mock_ = metadata_server._METADATA_FLAVOR_VALUE mock_client.return_value.getresponse.side_effect = mock_ self.assertTrue(metadata_server.can_reach_metadata_server()) .object( 'request', autospec=True) def test_can_reach_metadata_server_timeout(self, mock_req): mock_req.side_effect = self.assertFalse(metadata_server.can_reach_metadata_server())
def test_multi_index_slicing_block_single(): mask = torch.rand([2, 1, 4]) in_shape = (mask.shape[(- 1)],) select_block = MultiIndexSlicingBlock(in_keys='mask', out_keys='selected', in_shapes=in_shape, select_dim=(- 1), select_idxs=[0]) selected = select_block({'mask': mask}) assert (selected['selected'].shape == torch.Size([2, 1, 1]))
def test_longitude_continuity(): longitude_360 = np.linspace(0, 350, 36) longitude_180 = np.hstack((longitude_360[:18], (longitude_360[18:] - 360))) latitude = np.linspace((- 90), 90, 36) (s, n) = ((- 90), 90) (w, e) = (10.5, 20.3) for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == w) assert (e_new == e) npt.assert_allclose(coordinates_new[0], longitude_360) (w, e) = ((- 20), 20) for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == (- 20)) assert (e_new == 20) npt.assert_allclose(coordinates_new[0], longitude_180) for (w, e) in [[0, 360], [(- 180), 180], [(- 20), 340]]: for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == 0) assert (e_new == 360) npt.assert_allclose(coordinates_new[0], longitude_360) (w, e) = (20, 20) for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == 20) assert (e_new == 20) npt.assert_allclose(coordinates_new[0], longitude_360) (w, e) = (0, 200) for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == 0) assert (e_new == 200) npt.assert_allclose(coordinates_new[0], longitude_360) (w, e) = ((- 160), 160) for longitude in [longitude_360, longitude_180]: coordinates = [longitude, latitude] (coordinates_new, region_new) = longitude_continuity(coordinates, (w, e, s, n)) (w_new, e_new) = region_new[:2] assert (w_new == (- 160)) assert (e_new == 160) npt.assert_allclose(coordinates_new[0], longitude_180)
def test_reset_settings(): default = 'parse' non_default = 'raw' try: settings = Settings(organization='normcap_TEST') settings.setValue('mode', non_default) assert (settings.value('mode') == non_default) settings.reset() assert (settings.value('mode') == default) finally: settings.clear()
class cached_property(): def __init__(self, func: Callable) -> None: self.func = func self.attrname = None self.__doc__ = func.__doc__ self.lock = RLock() def __set_name__(self, _: Any, name: Any) -> None: if (self.attrname is None): self.attrname = name elif (name != self.attrname): raise TypeError(f'Cannot assign the same cached_property to two different names ({self.attrname!r} and {name!r}).') def __get__(self, instance: Any, _: Optional[Any]=None) -> Any: if (instance is None): return self if (self.attrname is None): raise TypeError('Cannot use cached_property instance without calling __set_name__ on it.') try: cache = instance.__dict__ except AttributeError: msg = f"No '__dict__' attribute on {type(instance).__name__!r} instance to cache {self.attrname!r} property." raise TypeError(msg) from None val = cache.get(self.attrname, _NOT_FOUND) if (val is _NOT_FOUND): with self.lock: val = cache.get(self.attrname, _NOT_FOUND) if (val is _NOT_FOUND): val = self.func(instance) try: cache[self.attrname] = val except TypeError: msg = f"The '__dict__' attribute on {type(instance).__name__!r} instance does not support item assignment for caching {self.attrname!r} property." raise TypeError(msg) from None return val
def user_remove_moderator(moderator: ModeratorModel, board: BoardModel, username: str): member = find_moderator_username(username) if (not member): raise ArgumentError('Moderator not found') if has_any_of_board_roles(member, board, [roles.BOARD_ROLE_CREATOR]): raise ArgumentError('Cannot remove creator') if (moderator.id == member.id): action_authorizer.authorize_board_action(moderator, board, action_authorizer.ModeratorBoardAction.MODERATOR_REMOVE_SELF) board_service.remove_moderator(board, member) log(ModeratorLogType.MODERATOR_REMOVE, moderator, board, 'Removed self') return True else: action_authorizer.authorize_board_action(moderator, board, action_authorizer.ModeratorBoardAction.MODERATOR_REMOVE) board_service.remove_moderator(board, member) log(ModeratorLogType.MODERATOR_REMOVE, moderator, board, 'Removed {}'.format(member.username)) return False
class VRRPStatistics(object): def __init__(self, name, resource_id, statistics_interval): self.name = name self.resource_id = resource_id self.statistics_interval = statistics_interval self.tx_vrrp_packets = 0 self.rx_vrrp_packets = 0 self.rx_vrrp_zero_prio_packets = 0 self.tx_vrrp_zero_prio_packets = 0 self.rx_vrrp_invalid_packets = 0 self.rx_vrrp_bad_auth = 0 self.idle_to_master_transitions = 0 self.idle_to_backup_transitions = 0 self.backup_to_master_transitions = 0 self.master_to_backup_transitions = 0 def get_stats(self): ts = time.strftime('%Y-%m-%dT%H:%M:%S') stats_dict = dict(timestamp=ts, resource_id=self.resource_id, tx_vrrp_packets=self.tx_vrrp_packets, rx_vrrp_packets=self.rx_vrrp_packets, rx_vrrp_zero_prio_packets=self.rx_vrrp_zero_prio_packets, tx_vrrp_zero_prio_packets=self.tx_vrrp_zero_prio_packets, rx_vrrp_invalid_packets=self.rx_vrrp_invalid_packets, rx_vrrp_bad_auth=self.rx_vrrp_bad_auth, idle_to_master_transitions=self.idle_to_master_transitions, idle_to_backup_transitions=self.idle_to_backup_transitions, backup_to_master_transitions=self.backup_to_master_transitions, master_to_backup_transitions=self.master_to_backup_transitions) return stats_dict
class RejectRequestsWithEscapedSlashesDisabled(AmbassadorTest): target: ServiceType def init(self): self.target = HTTPBin() def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]: (yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.name}\nhostname: "*"\nprefix: /{self.name}/status/\nrewrite: /status/\nservice: {self.target.path.fqdn}\n'))) def queries(self): (yield Query(self.url((self.name + '/status/%2F200')), expected=404)) def check(self): print('headers=%s', repr(self.results[0].headers)) assert ('X-Envoy-Upstream-Service-Time' in self.results[0].headers)
def typeof(obj): if isinstance(obj, _simple_types): return type(obj) if isinstance(obj, tuple): return Tuple[tuple((typeof(elem) for elem in obj))] if (isinstance(obj, (list, dict, set)) and (not obj)): raise ValueError(f'Cannot determine the full type of an empty {type(obj)}') if isinstance(obj, list): type_elem = type(obj[0]) if (not all((isinstance(elem, type_elem) for elem in obj))): raise ValueError(f'The list {obj} is not homogeneous in type') return List[typeof(obj[0])] if isinstance(obj, (dict, set)): key = next(iter(obj)) type_key = type(key) if (not all((isinstance(key, type_key) for key in obj))): raise ValueError('The dict {obj} is not homogeneous in type') if isinstance(obj, dict): value = next(iter(obj.values())) type_value = type(value) if (not all((isinstance(value, type_value) for value in obj.values()))): raise ValueError('The dict {obj} is not homogeneous in type') return Dict[(typeof(key), typeof(value))] else: return Set[typeof(key)] if isinstance(obj, tuple): raise NotImplementedError if isinstance(obj, np.ndarray): if np.isscalar(obj): return obj.dtype.type return Array[(obj.dtype, f'{obj.ndim}d')] if isinstance(obj, np.generic): return type(obj) raise NotImplementedError(f'Not able to determine the full type of {obj} (of type {type(obj)})')
def test_chaindb_get_score(chaindb): genesis = BlockHeader(difficulty=1, block_number=0, gas_limit=0) chaindb.persist_header(genesis) genesis_score_key = SchemaV1.make_block_hash_to_score_lookup_key(genesis.hash) genesis_score = rlp.decode(chaindb.db.get(genesis_score_key), sedes=rlp.sedes.big_endian_int) assert (genesis_score == 1) assert (chaindb.get_score(genesis.hash) == 1) block1 = BlockHeader(difficulty=10, block_number=1, gas_limit=0, parent_hash=genesis.hash, timestamp=(genesis.timestamp + 1)) chaindb.persist_header(block1) block1_score_key = SchemaV1.make_block_hash_to_score_lookup_key(block1.hash) block1_score = rlp.decode(chaindb.db.get(block1_score_key), sedes=rlp.sedes.big_endian_int) assert (block1_score == 11) assert (chaindb.get_score(block1.hash) == 11)
(IDialog) class Dialog(MDialog, Window): cancel_label = Str() help_id = Str() help_label = Str() ok_label = Str() resizeable = Bool(True) return_code = Int(OK) style = Enum('modal', 'nonmodal') title = Str('Dialog') _connections_to_remove = List(Tuple(Any, Callable)) def _create_buttons(self, parent): buttons = QtGui.QDialogButtonBox() if self.ok_label: btn = buttons.addButton(self.ok_label, QtGui.QDialogButtonBox.ButtonRole.AcceptRole) else: btn = buttons.addButton(QtGui.QDialogButtonBox.StandardButton.Ok) btn.setDefault(True) btn.clicked.connect(self.control.accept) self._connections_to_remove.append((btn.clicked, self.control.accept)) if self.cancel_label: btn = buttons.addButton(self.cancel_label, QtGui.QDialogButtonBox.ButtonRole.RejectRole) else: btn = buttons.addButton(QtGui.QDialogButtonBox.StandardButton.Cancel) btn.clicked.connect(self.control.reject) self._connections_to_remove.append((btn.clicked, self.control.reject)) if (len(self.help_id) > 0): if self.help_label: buttons.addButton(self.help_label, QtGui.QDialogButtonBox.ButtonRole.HelpRole) else: buttons.addButton(QtGui.QDialogButtonBox.StandardButton.Help) return buttons def _create_contents(self, parent): layout = QtGui.QVBoxLayout() if (not self.resizeable): layout.setSizeConstraint(QtGui.QLayout.SizeConstraint.SetFixedSize) layout.addWidget(self._create_dialog_area(parent)) layout.addWidget(self._create_buttons(parent)) parent.setLayout(layout) def _create_dialog_area(self, parent): panel = QtGui.QWidget(parent) panel.setMinimumSize(QtCore.QSize(100, 200)) palette = panel.palette() palette.setColor(QtGui.QPalette.ColorRole.Window, QtGui.QColor('red')) panel.setPalette(palette) panel.setAutoFillBackground(True) return panel def _show_modal(self): dialog = self.control dialog.setWindowModality(QtCore.Qt.WindowModality.ApplicationModal) dialog.setWindowFlags((dialog.windowFlags() & (~ QtCore.Qt.WindowType.WindowContextHelpButtonHint))) if hasattr(self.control, 'exec'): retval = self.control.exec() else: retval = self.control.exec_() return _RESULT_MAP[retval] def destroy(self): while self._connections_to_remove: (signal, handler) = self._connections_to_remove.pop() signal.disconnect(handler) super().destroy() def _create_control(self, parent): dlg = QtGui.QDialog(parent) if (self.style == 'nonmodal'): dlg.finished.connect(self._finished_fired) self._connections_to_remove.append((dlg.finished, self._finished_fired)) if (self.size != ((- 1), (- 1))): dlg.resize(*self.size) if (self.position != ((- 1), (- 1))): dlg.move(*self.position) dlg.setWindowTitle(self.title) return dlg def _finished_fired(self, result): self.return_code = _RESULT_MAP[result] self.close()
def _ocr_tables_standarize_cell(cell) -> Cell: is_header = ('COLUMN_HEADER' in cell.entityTypes) return Cell(text=cell.mergedText, row_index=cell.columnIndex, col_index=cell.rowIndex, row_span=cell.rowSpan, col_span=cell.columnSpan, confidence=cell.confidence, is_header=is_header, bounding_box=BoundixBoxOCRTable(left=cell.geometry.boundingBox.left, top=cell.geometry.boundingBox.top, width=cell.geometry.boundingBox.width, height=cell.geometry.boundingBox.height))
class TestColumnValueMean(BaseFeatureDataQualityMetricsTest): name: ClassVar = 'Mean Value' def get_stat(self, current: NumericCharacteristics): return current.mean def get_condition_from_reference(self, reference: Optional[ColumnCharacteristics]) -> TestValueCondition: if (reference is not None): if (not isinstance(reference, NumericCharacteristics)): raise ValueError(f'{self.column_name} should be numerical or bool') return TestValueCondition(eq=approx(reference.mean, 0.1)) raise ValueError('Neither required test parameters nor reference data has been provided.') def calculate_value_for_test(self) -> Optional[Numeric]: features_stats = self.metric.get_result().current_characteristics if (not isinstance(features_stats, NumericCharacteristics)): raise ValueError(f'{self.column_name} should be numerical or bool') return features_stats.mean def get_description(self, value: Numeric) -> str: return f'The mean value of the column **{self.column_name}** is {value:.3g}. The test threshold is {self.get_condition()}.'
class TestMap8(_MapTest): map_data = {'map': MAP8, 'zcoord': 'map8'} map_display = MAP8_DISPLAY def test_str_output(self): stripped_map = '\n'.join((line.rstrip() for line in str(self.map).split('\n'))) self.assertEqual(MAP8_DISPLAY, stripped_map.replace('||', '|')) ([((2, 0), (2, 2), ('n',)), ((0, 0), (5, 3), ('e', 'e')), ((5, 1), (0, 3), ('w', 'w', 'n', 'w')), ((1, 1), (2, 2), ('n', 'w', 's')), ((5, 3), (5, 3), ()), ((5, 3), (0, 4), ('s', 'n', 'w', 'n')), ((1, 4), (3, 3), ('e', 'w', 'e'))]) def test_shortest_path(self, startcoord, endcoord, expected_directions): (directions, _) = self.map.get_shortest_path(startcoord, endcoord) self.assertEqual(expected_directions, tuple(directions)) ([((2, 2), 1, None, ' #-o \n | \n# o \n| | \no-o--#\n | \n o \n | \n # ')]) def test_get_visual_range__nodes__character(self, coord, dist, max_size, expected): mapstr = self.map.get_visual_range(coord, dist=dist, mode='nodes', character='', max_size=max_size) self.assertEqual(expected, mapstr.replace('||', '|')) ([((2, 2), (3, 2), 1, None, ' #-o \n | \n# o \n| | \no-o-.\n | \n o \n | \n # '), ((2, 2), (5, 3), 1, None, ' #-o \n | \n# o \n| | \no-o--#\n . \n . \n . \n ...'), ((2, 2), (5, 3), 2, None, '#-#-o \n| \\| \n#-o-o-# .\n| |\\ .\no-o--# .\n . . \n . . \n . . \n#---... '), ((5, 3), (2, 2), 2, (13, 7), ' o-o\n | |\n o-\n .\n. .\n. . '), ((5, 3), (1, 1), 2, None, ' o-o\n | |\n o-\n. .\n..... .\n . . \n . . \n . . \n#---... ')]) def test_get_visual_range_with_path(self, coord, target, dist, max_size, expected): mapstr = self.map.get_visual_range(coord, dist=dist, mode='nodes', target=target, target_path_style='.', character='', max_size=max_size) self.assertEqual(expected, mapstr.replace('||', '|')) def test_spawn(self): self.grid.spawn() self.assertEqual(xyzroom.XYZRoom.objects.all().count(), 12) self.assertEqual(xyzroom.XYZExit.objects.all().count(), 28)
_required _required _required def dc_node_list(request): context = collect_view_data(request, 'dc_node_list', mb_addon=SIZE_FIELD_MB_ADDON) context['can_edit'] = can_edit = request.user.is_staff context['all'] = _all = (can_edit and request.GET.get('all', False)) context['qs'] = get_query_string(request, all=_all).urlencode() context['dc_nodes'] = get_dc_nodes(request, prefetch_dc=_all, prefetch_vms_count=True) if can_edit: context['form'] = DcNodeForm(request, None, initial={'strategy': DcNode.SHARED, 'priority': DcNode.PRIORITY}) if _all: context['can_add'] = Node.objects.exclude(dc=request.dc) else: context['can_add'] = Node.objects.exclude(dc=request.dc).exists() return render(request, 'gui/dc/node_list.html', context)
.django_db def test_contract_pricing(award_data_fixture, elasticsearch_award_index): elasticsearch_award_index.update_index() should = {'match': {'type_of_contract_pricing.keyword': '2'}} query = create_query(should) client = elasticsearch_award_index.client response = client.search(index=elasticsearch_award_index.index_name, body=query) assert (response['hits']['total']['value'] == 1) should = {'match': {'type_of_contract_pricing.keyword': '1'}} query = create_query(should) response = client.search(index=elasticsearch_award_index.index_name, body=query) assert (response['hits']['total']['value'] == 0)
class NullableUUIDForeignKeySourceSerializer(serializers.ModelSerializer): target = serializers.PrimaryKeyRelatedField(pk_field=serializers.UUIDField(), queryset=UUIDForeignKeyTarget.objects.all(), allow_null=True) class Meta(): model = NullableUUIDForeignKeySource fields = ('id', 'name', 'target')
def get_stattest(reference_data: SparkSeries, current_data: SparkSeries, feature_type: Union[(ColumnType, str)], stattest_func: Optional[PossibleStatTestType]) -> StatTest: if isinstance(feature_type, str): feature_type = ColumnType(feature_type) if (stattest_func is None): return _get_default_stattest(reference_data, current_data, feature_type) return get_registered_stattest(stattest_func, feature_type, engine=SparkEngine)
class LegacyWafRule(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'message': (str,), 'rule_id': (str,), 'severity': (int,), 'source': (str,), 'vcl': (str,)} _property def discriminator(): return None attribute_map = {'message': 'message', 'rule_id': 'rule_id', 'severity': 'severity', 'source': 'source', 'vcl': 'vcl'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class BaseInfoTarget(): def __init__(self, data_key: str, col: str): self.data_key = data_key self.col = col def calculate(self, data, index: pd.DataFrame) -> pd.DataFrame: base_df = data[self.data_key].load(index['ticker'].values)[['ticker', self.col]] result = pd.merge(index, base_df, on='ticker', how='left') result = result.rename({self.col: 'y'}, axis=1) result = result[['ticker', 'y']] result = result.set_index(['ticker']) return result
def profile_callable(func: Callable, cache_flush_slab: torch.Tensor, n_iter: int) -> Tuple[(List[int], List[int])]: if (n_iter <= 0): return ([], []) for _ in range(5): func() with torch.profiler.profile(activities=[torch.profiler.ProfilerActivity.CUDA], record_shapes=True) as prof: for _ in range(n_iter): cache_flush_slab.fill_(3.7) func() results = prof.key_averages().table(sort_by='self_cuda_time_total', max_name_column_width=120, row_limit=(- 1)) logger.info(results) events = [{'name': e.name, 'cuda_time': e.cuda_time, 'start': e.time_range.start, 'end': e.time_range.end} for e in prof.events() if (e.cuda_time != 0)] sorted_events = sorted(filter((lambda e: (e['name'] != 'Context Sync')), events), key=itemgetter('start')) assert (0 == (len(sorted_events) % n_iter)) n_groups = (len(sorted_events) // n_iter) event_groups = [g[1:] for g in zip(*([iter(sorted_events)] * n_groups))] logger.info(f"First kernel sequence: {list(map(itemgetter('name'), event_groups[0]))}") device_times = [sum(map(itemgetter('cuda_time'), g)) for g in event_groups] wall_times = [((g[(- 1)]['end'] - g[0]['start']) if (len(g) > 0) else 0) for g in event_groups] return (device_times, wall_times)
class _CoreManager(Activity): def __init__(self): self._common_conf = None self._neighbors_conf = None self._vrfs_conf = None self._core_service = None super(_CoreManager, self).__init__() def _run(self, *args, **kwargs): self._common_conf = kwargs.pop('common_conf') self._neighbors_conf = NeighborsConf() self._vrfs_conf = VrfsConf() from ryu.services.protocols.bgp.core import CoreService self._core_service = CoreService(self._common_conf, self._neighbors_conf, self._vrfs_conf) waiter = kwargs.pop('waiter') core_activity = self._spawn_activity(self._core_service, waiter=waiter) core_activity.wait() def get_core_service(self): self._check_started() return self._core_service def _check_started(self): if (not self.started): raise ActivityException('Cannot access any property before activity has started') def common_conf(self): self._check_started() return self._common_conf def neighbors_conf(self): self._check_started() return self._neighbors_conf def vrfs_conf(self): self._check_started() return self._vrfs_conf
def extractAvert(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if (not (vol or chp or frag)): return False if ('rokujouma' in item['title'].lower()): return buildReleaseMessageWithType(item, 'Rokujouma no Shinryakusha!', vol, chp, frag=frag, postfix=postfix) elif (('fuyo shoukan mahou' in item['title'].lower()) or ('fuyo shoukan mahou' in item['tags']) or ('fuyou shoukan mahou' in item['title'].lower())): return buildReleaseMessageWithType(item, 'Boku wa Isekai de Fuyo Mahou to Shoukan Mahou wo Tenbin ni Kakeru', vol, chp, frag=frag, postfix=postfix) elif (('regarding reincarnated to slime chapter' in item['title'].lower()) or ('Tensei Shitara Slime Datta Ken' in item['tags'])): return buildReleaseMessageWithType(item, 'Tensei Shitara Slime Datta Ken', vol, chp, frag=frag, postfix=postfix) if re.match('^Release:? fuyo shoukan mahou? vol ?\\d+ chapter \\d+', item['title'], re.IGNORECASE): return buildReleaseMessageWithType(item, 'Boku wa Isekai de Fuyo Mahou to Shoukan Mahou wo Tenbin ni Kakeru', vol, chp, frag=frag, postfix=postfix) return False
class op(bpy.types.Operator): bl_idname = 'uv.textools_stitch' bl_label = 'Stitch' bl_description = 'Stitch other Islands to the selection' bl_options = {'REGISTER', 'UNDO'} def poll(cls, context): if (bpy.context.area.ui_type != 'UV'): return False if (not bpy.context.active_object): return False if (bpy.context.active_object.mode != 'EDIT'): return False if (bpy.context.active_object.type != 'MESH'): return False if context.scene.tool_settings.use_uv_select_sync: return False if (not bpy.context.object.data.uv_layers): return False return True def execute(self, context): utilities_uv.multi_object_loop(main, self, context) return {'FINISHED'}
class SelectFwdSingle(GroupTest): def runTest(self): (port1, port2) = openflow_ports(2) msg = ofp.message.group_add(group_type=ofp.OFPGT_SELECT, group_id=1, buckets=[create_bucket(weight=1, actions=[ofp.action.output(port2)])]) self.controller.message_send(msg) do_barrier(self.controller) msg = ofp.message.flow_add(buffer_id=ofp.OFP_NO_BUFFER, instructions=[ofp.instruction.apply_actions([ofp.action.group(1)])]) self.controller.message_send(msg) do_barrier(self.controller) verify_no_errors(self.controller) pkt = simple_tcp_packet() self.dataplane.send(port1, str(pkt)) verify_packets(self, pkt, [port2])
class Migration(migrations.Migration): dependencies = [('forum', '0010_auto__1401')] operations = [migrations.AlterField(model_name='forum', name='level', field=models.PositiveIntegerField(editable=False)), migrations.AlterField(model_name='forum', name='lft', field=models.PositiveIntegerField(editable=False)), migrations.AlterField(model_name='forum', name='rght', field=models.PositiveIntegerField(editable=False))]
def load_video_transcript(url, audio_url, page_id='', data_folder='', run_id='', audio2text=True, enable_cache=True): loader = LLMYoutubeLoader() transcript_langs = os.getenv('YOUTUBE_TRANSCRIPT_LANGS', 'en') langs = transcript_langs.split(',') print(f'Loading Youtube transcript, supported language list: {langs}, video_url: {url}, audio_url: {audio_url}, page_id: {page_id}, audio2text: {audio2text}, enable_cache: {enable_cache}') excluded_list = ['twitch.tv'] for excluded_site in excluded_list: if (excluded_site in url): print(f"[WARN] Doesn't support load video transcript from {excluded_site}, SKIP and RETURN") return ('', {}) client = DBClient() redis_key_expire_time = os.getenv('BOT_REDIS_KEY_EXPIRE_TIME', 604800) if enable_cache: transcript = client.get_notion_summary_item_id('reddit_transcript', 'default', page_id) if transcript: transcript = bytes2str(transcript) print(f'[[utils.load_video_transcript]] Found cached video transcript: {transcript[:200]}...') return (transcript, {}) else: print(f'[utils.load_video_transcript] cannot find cached transcript, will load it from original video, url: {url}, page_id: {page_id}') docs = [] transcript = '' metadata = {} for lang in langs: print(f'Loading Youtube transcript with language {lang} ...') docs = loader.load(url, language=lang) if (len(docs) > 0): print(f'Found transcript for language {lang}, number of docs returned: {len(docs)}') break if (not docs): print(f'[WARN] Transcipt not found for language list: {langs}') if audio2text: st = time.time() print(f'Audio2Text enabled, transcribe it, page_id: {page_id}, url: {url}, audio_url: {audio_url} ...') op_a2t = OperatorAudioToText(model_name='base') audio_file = op_a2t.extract_audio(page_id, audio_url, data_folder, run_id) print(f'Extracted audio file: {audio_file}') audio_text = op_a2t.transcribe(audio_file) print(f'Transcribed audio text (total {(time.time() - st):.2f}s): {audio_text}') transcript = (audio_text.get('text') or '') else: for doc in docs: transcript += doc.page_content transcript += '\n' metadata = doc.metadata if enable_cache: client.set_notion_summary_item_id('reddit_transcript', 'default', page_id, transcript, expired_time=int(redis_key_expire_time)) return (transcript, metadata)
class BatchResponse(): def __init__(self, responses): self._responses = responses self._success_count = len([resp for resp in responses if resp.success]) def responses(self): return self._responses def success_count(self): return self._success_count def failure_count(self): return (len(self.responses) - self.success_count)
_stats_type() _set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats) _set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPTableFeaturesStatsReply(OFPMultipartReply): def __init__(self, datapath, type_=None, **kwargs): super(OFPTableFeaturesStatsReply, self).__init__(datapath, **kwargs)
class Solution(): def findBall(self, grid: List[List[int]]) -> List[int]: (m, n) = (len(grid), len(grid[0])) def check(row, col): if (row == m): return col new_col = (col + grid[row][col]) if ((new_col == n) or (new_col == (- 1)) or (grid[row][new_col] != grid[row][col])): return (- 1) else: return check((row + 1), new_col) res = [] for i in range(n): res.append(check(0, i)) return res
def test_example_app(tmpdir: Path) -> None: cmd = ['example/banana.py', '-m', ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True', 'banana.x=int(interval(-5, 5))', 'banana.y=interval(-5, 10.1)', 'hydra.sweeper.ax_config.max_trials=2'] (result, _) = run_python_script(cmd) assert ('banana.x: range=[-5, 5]' in result) assert ('banana.y: range=[-5.0, 10.1]' in result)
class ResetAllPerspectivesAction(WorkbenchAction): id = 'pyface.workbench.action.reset_all_perspectives' name = 'Reset All Perspectives' def perform(self, event): window = self.window if (window.confirm(MESSAGE) == YES): window.reset_all_perspectives() return
class Serializable(ABC): serializer: 'Serializer' = None def to_dict(self) -> Dict: def serialize(self) -> bytes: if (self.serializer is None): raise ValueError('Serializer is not set. Please set the serializer before serialization.') return self.serializer.serialize(self) def set_serializer(self, serializer: 'Serializer') -> None: self.serializer = serializer
class OptionSeriesColumnSonificationDefaultspeechoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def convert_composable_query_to_string(sql, model=Award, cursor=None): if isinstance(sql, Composable): if (cursor is None): connection = get_connection(model) with connection.cursor() as _cursor: return sql.as_string(_cursor.connection) else: return sql.as_string(cursor.connection) return sql
_module() class NaiveSVCDataset(NaiveDataset): processing_pipeline = [dict(type='PickKeys', keys=['path', 'time_stretch', 'mel', 'contents', 'pitches', 'key_shift', 'speaker']), dict(type='Transpose', keys=[('mel', 1, 0), ('contents', 1, 0)])] collating_pipeline = [dict(type='ListToDict'), dict(type='PadStack', keys=[('mel', (- 2)), ('contents', (- 2)), ('pitches', (- 1))]), dict(type='ToTensor', keys=[('time_stretch', torch.float32), ('key_shift', torch.float32), ('speaker', torch.int64)]), dict(type='UnSqueeze', keys=[('pitches', (- 1)), ('time_stretch', (- 1)), ('key_shift', (- 1))])]
def embedding_attention_decoder(decoder_inputs, initial_state, attention_states, cell, num_symbols, embedding_size, num_heads=1, output_size=None, output_projection=None, feed_previous=False, update_embedding_for_previous=True, dtype=tf.float32, scope=None, initial_state_attention=False, attn_num_hidden=128): if (output_size is None): output_size = cell.output_size if (output_projection is not None): proj_biases = tf.convert_to_tensor(output_projection[1], dtype=dtype) proj_biases.get_shape().assert_is_compatible_with([num_symbols]) with tf.compat.v1.variable_scope((scope or 'embedding_attention_decoder')): with tf.device('/cpu:0'): embedding = tf.compat.v1.get_variable('embedding', [num_symbols, embedding_size]) loop_function = (_extract_argmax_and_embed(embedding, output_projection, update_embedding_for_previous) if feed_previous else None) emb_inp = [tf.nn.embedding_lookup(embedding, i) for i in decoder_inputs] return attention_decoder(emb_inp, initial_state, attention_states, cell, output_size=output_size, num_heads=num_heads, loop_function=loop_function, initial_state_attention=initial_state_attention, attn_num_hidden=attn_num_hidden)
class OptionPlotoptionsPyramidSonificationContexttracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class WarProcessor(AbstractGamestateDataProcessor): ID = 'wars' DEPENDENCIES = [RulerEventProcessor.ID, CountryProcessor.ID, SystemProcessor.ID, PlanetProcessor.ID] def __init__(self): super().__init__() self._ruler_dict = None self._countries_dict = None self._system_models_dict = None self._planet_models_dict = None self.active_wars: Dict[(int, datamodel.War)] = None def initialize_data(self): self.active_wars = {} def data(self) -> Any: return dict(active_wars=self.active_wars) def extract_data_from_gamestate(self, dependencies): self._ruler_dict = dependencies[RulerEventProcessor.ID] self._countries_dict = dependencies[CountryProcessor.ID] self._system_models_dict = dependencies[SystemProcessor.ID]['systems_by_ingame_id'] self._planet_models_dict = dependencies[PlanetProcessor.ID] wars_dict = self._gamestate_dict.get('war', {}) if (not isinstance(wars_dict, dict)): return for (war_id, war_dict) in wars_dict.items(): war_model = self._update_war(war_id, war_dict) if (war_model is None): continue self.active_wars[war_id] = war_model self.update_war_participants(war_dict, war_model) self._extract_combat_victories(war_dict, war_model) def _update_war(self, war_id: int, war_dict): if (not isinstance(war_dict, dict)): return war_model = self._session.query(datamodel.War).order_by(datamodel.War.start_date_days.desc()).filter_by(war_id_in_game=war_id).first() if (war_model is None): start_date_days = datamodel.date_to_days(war_dict.get('start_date')) war_model = datamodel.War(war_id_in_game=war_id, game=self._db_game, start_date_days=start_date_days, end_date_days=self._basic_info.date_in_days, outcome=datamodel.WarOutcome.in_progress) elif (war_model.outcome != datamodel.WarOutcome.in_progress): return war_model.attacker_war_exhaustion = war_dict.get('attacker_war_exhaustion', 0.0) war_model.defender_war_exhaustion = war_dict.get('defender_war_exhaustion', 0.0) war_model.end_date_days = (self._basic_info.date_in_days - 1) self._session.add(war_model) return war_model def update_war_participants(self, war_dict, war_model): war_goal_attacker = war_dict.get('attacker_war_goal', {}).get('type') war_goal_defender = war_dict.get('defender_war_goal', {}) if isinstance(war_goal_defender, dict): war_goal_defender = war_goal_defender.get('type') elif ((not war_goal_defender) or (war_goal_defender == 'none')): war_goal_defender = None attackers = {p['country'] for p in war_dict['attackers']} for war_party_info in itertools.chain(war_dict.get('attackers', []), war_dict.get('defenders', [])): if (not isinstance(war_party_info, dict)): continue country_id_ingame = war_party_info.get('country') db_country = self._countries_dict.get(country_id_ingame) if (db_country is None): logger.warning(f'{self._basic_info.logger_str} Could not find country matching war participant {war_party_info}') continue call_type = war_party_info.get('call_type', 'unknown') caller = None if (war_party_info.get('caller') in self._countries_dict): caller = self._countries_dict[war_party_info.get('caller')] is_attacker = (country_id_ingame in attackers) war_participant = self._session.query(datamodel.WarParticipant).filter_by(war=war_model, country=db_country).one_or_none() if (war_participant is None): war_goal = (war_goal_attacker if is_attacker else war_goal_defender) war_participant = datamodel.WarParticipant(war=war_model, war_goal=war_goal, country=db_country, caller_country=caller, call_type=call_type, is_attacker=is_attacker) self._session.add(datamodel.HistoricalEvent(event_type=datamodel.HistoricalEventType.war, country=war_participant.country, target_country=war_participant.caller_country, leader=self._ruler_dict.get(country_id_ingame), start_date_days=self._basic_info.date_in_days, end_date_days=self._basic_info.date_in_days, war=war_model, event_is_known_to_player=war_participant.country.has_met_player(), db_description=self._get_or_add_shared_description(call_type))) if (war_participant.war_goal is None): war_participant.war_goal = war_goal_defender self._session.add(war_participant) def _extract_combat_victories(self, war_dict, war: datamodel.War): battles = war_dict.get('battles', []) if (not isinstance(battles, list)): battles = [battles] for battle_dict in battles: if (not isinstance(battle_dict, dict)): continue battle_attackers = battle_dict.get('attackers') battle_defenders = battle_dict.get('defenders') if ((not battle_attackers) or (not battle_defenders)): continue if (battle_dict.get('attacker_victory') not in {'yes', 'no'}): continue attacker_victory = (battle_dict.get('attacker_victory') == 'yes') planet_model = self._planet_models_dict.get(battle_dict.get('planet')) if (planet_model is None): system_id_in_game = battle_dict.get('system') system = self._system_models_dict.get(system_id_in_game) if (system is None): continue else: system = planet_model.system combat_type = datamodel.CombatType.__members__.get(battle_dict.get('type'), datamodel.CombatType.other) date_str = battle_dict.get('date') date_in_days = datamodel.date_to_days(date_str) if (date_in_days < 0): date_in_days = self._basic_info.date_in_days attacker_exhaustion = battle_dict.get('attacker_war_exhaustion', 0.0) defender_exhaustion = battle_dict.get('defender_war_exhaustion', 0.0) if (((defender_exhaustion + attacker_exhaustion) <= 0.001) and (combat_type != datamodel.CombatType.armies)): continue combat = self._session.query(datamodel.Combat).filter_by(war=war, system=(system if (system is not None) else planet_model.system), planet=planet_model, combat_type=combat_type, attacker_victory=attacker_victory, attacker_war_exhaustion=attacker_exhaustion, defender_war_exhaustion=defender_exhaustion).order_by(datamodel.Combat.date.desc()).first() if (combat is not None): continue combat = datamodel.Combat(war=war, date=date_in_days, attacker_war_exhaustion=attacker_exhaustion, defender_war_exhaustion=defender_exhaustion, system=system, planet=planet_model, combat_type=combat_type, attacker_victory=attacker_victory) self._session.add(combat) is_known_to_player = False for country_id in itertools.chain(battle_attackers, battle_defenders): db_country = self._countries_dict.get(country_id) if (db_country is None): logger.warning(f'{self._basic_info.logger_str} Could not find country with ID {country_id} when processing battle {battle_dict}') continue is_known_to_player |= db_country.has_met_player() war_participant = self._session.query(datamodel.WarParticipant).filter_by(war=war, country=db_country).one_or_none() if (war_participant is None): logger.info(f'{self._basic_info.logger_str} Could not find War participant matching country {db_country.country_name} and war {war.name}.') continue self._session.add(datamodel.CombatParticipant(combat=combat, war_participant=war_participant, is_attacker=(country_id in battle_attackers))) event_type = (datamodel.HistoricalEventType.army_combat if (combat_type == datamodel.CombatType.armies) else datamodel.HistoricalEventType.fleet_combat) self._session.add(datamodel.HistoricalEvent(event_type=event_type, combat=combat, system=system, planet=planet_model, war=war, start_date_days=date_in_days, event_is_known_to_player=is_known_to_player))
class OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def prepare_fixed_decimal(data, schema): if (not isinstance(data, decimal.Decimal)): return data scale = schema.get('scale', 0) size = schema['size'] precision = schema['precision'] (sign, digits, exp) = data.as_tuple() if (len(digits) > precision): raise ValueError('The decimal precision is bigger than allowed by schema') if ((- exp) > scale): raise ValueError('Scale provided in schema does not match the decimal') delta = (exp + scale) if (delta > 0): digits = (digits + ((0,) * delta)) unscaled_datum = 0 for digit in digits: unscaled_datum = ((unscaled_datum * 10) + digit) bits_req = (unscaled_datum.bit_length() + 1) size_in_bits = (size * 8) offset_bits = (size_in_bits - bits_req) mask = ((2 ** size_in_bits) - 1) bit = 1 for i in range(bits_req): mask ^= bit bit <<= 1 if (bits_req < 8): bytes_req = 1 else: bytes_req = (bits_req // 8) if ((bits_req % 8) != 0): bytes_req += 1 tmp = BytesIO() if sign: unscaled_datum = ((1 << bits_req) - unscaled_datum) unscaled_datum = (mask | unscaled_datum) for index in range((size - 1), (- 1), (- 1)): bits_to_write = (unscaled_datum >> (8 * index)) tmp.write(bytes([(bits_to_write & 255)])) else: for i in range((offset_bits // 8)): tmp.write(bytes([0])) for index in range((bytes_req - 1), (- 1), (- 1)): bits_to_write = (unscaled_datum >> (8 * index)) tmp.write(bytes([(bits_to_write & 255)])) return tmp.getvalue()
def test_import_objects_DuplicateObject(raw_user): expected_sid = 'S-1-5-21----500' expected_dn = 'CN=ADMINISTRATOR,CN=USERS,DC=TEST,DC=LAB' adds = ADDS() adds.import_objects([raw_user, raw_user]) sid_map_object = adds.SID_MAP[expected_sid] dn_map_object = adds.DN_MAP[expected_dn] assert (len(adds.SID_MAP) == 1) assert (sid_map_object.Properties[ADDS.AT_DISTINGUISHEDNAME] == expected_dn) assert (dn_map_object.ObjectIdentifier == expected_sid)
def prepare_experiment_dir(experiment_dir, cfg, rank): config_path = osp.join(experiment_dir, 'config.yaml') last_checkpoint_path = find_last_checkpoint_path(experiment_dir) if (last_checkpoint_path is not None): if (rank == 0): logger.info('Resuming the training from checkpoint %s', last_checkpoint_path) if osp.exists(config_path): with open(config_path, 'r') as fp: cfg_prev = OmegaConf.create(fp.read()) compare_keys = ['experiment', 'data', 'model', 'training'] if (OmegaConf.masked_copy(cfg, compare_keys) != OmegaConf.masked_copy(cfg_prev, compare_keys)): raise ValueError(f'Attempting to resume training with a different config: {OmegaConf.masked_copy(cfg, compare_keys)} vs {OmegaConf.masked_copy(cfg_prev, compare_keys)}') if (rank == 0): Path(experiment_dir).mkdir(exist_ok=True, parents=True) with open(config_path, 'w') as fp: OmegaConf.save(cfg, fp) return last_checkpoint_path
def test_try_extract_random_fail(): for fp in glob.glob(str((TEST_DATA_DIR / 'random_invalid/*.image'))): test_file = FileObject(file_path=fp) test_file.processed_analysis['file_type'] = {'result': {'mime': 'application/octet-stream'}} test_file.processed_analysis['software_components'] = {'summary': ['Linux Kernel']} assert (AnalysisPlugin.try_object_extract_ikconfig(test_file.binary) == b'')
def format_capacity(df: pd.DataFrame, target_datetime: datetime) -> dict[(str, Any)]: df = df.copy() df = df.loc[(df['statusDescription'] == 'Operating')] df['mode'] = df['technology'].map(TECHNOLOGY_TO_MODE) df_aggregated = df.groupby(['mode'])[['nameplate-capacity-mw']].sum().reset_index() capacity_dict = {} for mode in df_aggregated['mode'].unique(): mode_dict = {} mode_dict['value'] = float(df_aggregated.loc[(df_aggregated['mode'] == mode)]['nameplate-capacity-mw'].sum()) mode_dict['source'] = SOURCE mode_dict['datetime'] = target_datetime.strftime('%Y-%m-%d') capacity_dict[mode] = mode_dict return capacity_dict
class Migration(migrations.Migration): dependencies = [('references', '0045_disasteremergencyfundcode_url')] operations = [migrations.CreateModel(name='GTASSF133Balances', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('fiscal_year', models.IntegerField()), ('fiscal_period', models.IntegerField()), ('obligations_incurred_total_cpe', models.DecimalField(decimal_places=2, max_digits=23)), ('budget_authority_appropriation_amount_cpe', models.DecimalField(decimal_places=2, max_digits=23)), ('other_budgetary_resources_amount_cpe', models.DecimalField(decimal_places=2, max_digits=23)), ('gross_outlay_amount_by_tas_cpe', models.DecimalField(decimal_places=2, max_digits=23)), ('unobligated_balance_cpe', models.DecimalField(decimal_places=2, max_digits=23)), ('disaster_emergency_fund_code', models.TextField(null=True)), ('create_date', models.DateTimeField(auto_now_add=True)), ('update_date', models.DateTimeField(auto_now=True))], options={'db_table': 'gtas_sf133_balances', 'managed': True, 'unique_together': {('fiscal_year', 'fiscal_period', 'disaster_emergency_fund_code')}})]
class BaseSkillTestCase(): path_to_skill: Path = Path('.') is_agent_to_agent_messages: bool = True _skill: Skill _multiplexer: AsyncMultiplexer _outbox: OutBox def skill(self) -> Skill: try: value = self._skill except AttributeError: raise ValueError('Ensure skill is set during setup_class.') return value def get_quantity_in_outbox(self) -> int: return self._multiplexer.out_queue.qsize() def get_message_from_outbox(self) -> Optional[Message]: if self._outbox.empty(): return None envelope = self._multiplexer.out_queue.get_nowait() return envelope.message def drop_messages_from_outbox(self, number: int=1) -> None: while ((not self._outbox.empty()) and (number != 0)): self._multiplexer.out_queue.get_nowait() number -= 1 def get_quantity_in_decision_maker_inbox(self) -> int: return self._skill.skill_context.decision_maker_message_queue.qsize() def get_message_from_decision_maker_inbox(self) -> Optional[Message]: if self._skill.skill_context.decision_maker_message_queue.empty(): return None return self._skill.skill_context.decision_maker_message_queue.get_nowait() def drop_messages_from_decision_maker_inbox(self, number: int=1) -> None: while ((not self._skill.skill_context.decision_maker_message_queue.empty()) and (number != 0)): self._skill.skill_context.decision_maker_message_queue.get_nowait() number -= 1 def assert_quantity_in_outbox(self, expected_quantity: int) -> None: quantity = self.get_quantity_in_outbox() assert (quantity == expected_quantity), f'Invalid number of messages in outbox. Expected {expected_quantity}. Found {quantity}.' def assert_quantity_in_decision_making_queue(self, expected_quantity: int) -> None: quantity = self.get_quantity_in_decision_maker_inbox() assert (quantity == expected_quantity), f'Invalid number of messages in decision maker queue. Expected {expected_quantity}. Found {quantity}.' def message_has_attributes(actual_message: Message, message_type: Type[Message], **kwargs: Any) -> Tuple[(bool, str)]: if (type(actual_message) != message_type): return (False, 'The message types do not match. Actual type: {}. Expected type: {}'.format(type(actual_message), message_type)) for (attribute_name, expected_value) in kwargs.items(): actual_value = getattr(actual_message, attribute_name) if (actual_value != expected_value): return (False, f"The '{attribute_name}' fields do not match. Actual '{attribute_name}': {actual_value}. Expected '{attribute_name}': {expected_value}") return (True, 'The message has the provided expected attributes.') def build_incoming_message(self, message_type: Type[Message], performative: Message.Performative, dialogue_reference: Optional[Tuple[(str, str)]]=None, message_id: Optional[int]=None, target: Optional[int]=None, to: Optional[Address]=None, sender: Optional[Address]=None, is_agent_to_agent_messages: Optional[bool]=None, **kwargs: Any) -> Message: if (is_agent_to_agent_messages is None): is_agent_to_agent_messages = self.is_agent_to_agent_messages if (sender is None): sender = (COUNTERPARTY_AGENT_ADDRESS if is_agent_to_agent_messages else COUNTERPARTY_SKILL_ADDRESS) message_attributes = {} default_dialogue_reference = Dialogues.new_self_initiated_dialogue_reference() dialogue_reference = (default_dialogue_reference if (dialogue_reference is None) else dialogue_reference) message_attributes['dialogue_reference'] = dialogue_reference if (message_id is not None): message_attributes['message_id'] = message_id if (target is not None): message_attributes['target'] = target message_attributes['performative'] = performative message_attributes.update(kwargs) incoming_message = message_type(**message_attributes) incoming_message.sender = sender default_to = (self.skill.skill_context.agent_address if is_agent_to_agent_messages else str(self.skill.public_id)) incoming_message.to = (default_to if (to is None) else to) return incoming_message def build_incoming_message_for_skill_dialogue(self, dialogue: Dialogue, performative: Message.Performative, message_type: Optional[Type[Message]]=None, dialogue_reference: Optional[Tuple[(str, str)]]=None, message_id: Optional[int]=None, target: Optional[int]=None, to: Optional[Address]=None, sender: Optional[Address]=None, **kwargs: Any) -> Message: if (dialogue is None): raise AEAEnforceError('dialogue cannot be None.') if (dialogue.last_message is None): raise AEAEnforceError('dialogue cannot be empty.') message_type = (message_type if (message_type is not None) else dialogue.message_class) dialogue_reference = (dialogue_reference if (dialogue_reference is not None) else dialogue.dialogue_label.dialogue_reference) message_id = (message_id if (message_id is not None) else dialogue.get_incoming_next_message_id()) target = (target if (target is not None) else dialogue.last_message.message_id) to = (to if (to is not None) else dialogue.self_address) sender = (sender if (sender is not None) else dialogue.dialogue_label.dialogue_opponent_addr) incoming_message = self.build_incoming_message(message_type=message_type, performative=performative, dialogue_reference=dialogue_reference, message_id=message_id, target=target, to=to, sender=sender, **kwargs) return incoming_message def _provide_unspecified_fields(message: DialogueMessage, last_is_incoming: Optional[bool]) -> Tuple[(bool, Optional[int])]: default_is_incoming = (not last_is_incoming) is_incoming = (default_is_incoming if (message[2] is None) else message[2]) default_target = None target = (default_target if (message[3] is None) else message[3]) return (is_incoming, target) def _non_initial_incoming_message_dialogue_reference(dialogue: Dialogue) -> Tuple[(str, str)]: dialogue_reference = (dialogue.dialogue_label.dialogue_reference[0], (Dialogues._generate_dialogue_nonce() if (dialogue.dialogue_label.dialogue_reference[1] == Dialogue.UNASSIGNED_DIALOGUE_REFERENCE) else dialogue.dialogue_label.dialogue_reference[1])) return dialogue_reference def _extract_message_fields(self, message: DialogueMessage, index: int, last_is_incoming: bool) -> Tuple[(Message.Performative, Dict, int, bool, Optional[int])]: performative = message[0] contents = message[1] message_id = (index + 1) (is_incoming, target) = self._provide_unspecified_fields(message, last_is_incoming=last_is_incoming) return (performative, contents, message_id, is_incoming, target) def prepare_skill_dialogue(self, dialogues: Dialogues, messages: Tuple[(DialogueMessage, ...)], counterparty: Optional[Address]=None, is_agent_to_agent_messages: Optional[bool]=None) -> Dialogue: if (is_agent_to_agent_messages is None): is_agent_to_agent_messages = self.is_agent_to_agent_messages if (counterparty is None): counterparty = (COUNTERPARTY_AGENT_ADDRESS if is_agent_to_agent_messages else COUNTERPARTY_SKILL_ADDRESS) if (len(messages) == 0): raise AEAEnforceError('the list of messages must be positive.') (performative, contents, message_id, is_incoming, target) = self._extract_message_fields(messages[0], index=0, last_is_incoming=True) if is_incoming: dialogue_reference = dialogues.new_self_initiated_dialogue_reference() message = self.build_incoming_message(message_type=dialogues.message_class, dialogue_reference=dialogue_reference, message_id=Dialogue.STARTING_MESSAGE_ID, target=(target or Dialogue.STARTING_TARGET), performative=performative, to=dialogues.self_address, sender=counterparty, is_agent_to_agent_messages=is_agent_to_agent_messages, **contents) dialogue = cast(Dialogue, dialogues.update(message)) if (dialogue is None): raise AEAEnforceError('Cannot update the dialogue with message number {}'.format(message_id)) else: (_, dialogue) = dialogues.create(counterparty=counterparty, performative=performative, **contents) for (idx, dialogue_message) in enumerate(messages[1:]): (performative, contents, message_id, is_incoming, target) = self._extract_message_fields(dialogue_message, (idx + 1), is_incoming) if (target is None): target = cast(Message, dialogue.last_message).message_id if is_incoming: dialogue_reference = self._non_initial_incoming_message_dialogue_reference(dialogue) message_id = dialogue.get_incoming_next_message_id() message = self.build_incoming_message(message_type=dialogues.message_class, dialogue_reference=dialogue_reference, message_id=message_id, target=target, performative=performative, to=dialogues.self_address, sender=counterparty, is_agent_to_agent_messages=is_agent_to_agent_messages, **contents) dialogue = cast(Dialogue, dialogues.update(message)) if (dialogue is None): raise AEAEnforceError('Cannot update the dialogue with message number {}'.format(message_id)) else: dialogue.reply(performative=performative, target=target, **contents) return dialogue def setup(cls, **kwargs: Any) -> None: identity = Identity('test_agent_name', 'test_agent_address', 'test_agent_public_key') cls._multiplexer = AsyncMultiplexer() cls._multiplexer._out_queue = asyncio.Queue() cls._outbox = OutBox(cast(Multiplexer, cls._multiplexer)) _shared_state = cast(Optional[Dict[(str, Any)]], kwargs.pop('shared_state', None)) _skill_config_overrides = cast(Optional[Dict[(str, Any)]], kwargs.pop('config_overrides', None)) _dm_context_kwargs = cast(Dict[(str, Any)], kwargs.pop('dm_context_kwargs', dict())) agent_context = AgentContext(identity=identity, connection_status=cls._multiplexer.connection_status, outbox=cls._outbox, decision_maker_message_queue=Queue(), decision_maker_handler_context=SimpleNamespace(**_dm_context_kwargs), task_manager=TaskManager(), default_ledger_id=identity.default_address_key, currency_denominations=DEFAULT_CURRENCY_DENOMINATIONS, default_connection=None, default_routing={}, search_service_address='dummy_author/dummy_search_skill:0.1.0', decision_maker_address='dummy_decision_maker_address', data_dir=os.getcwd()) if (_shared_state is not None): for (key, value) in _shared_state.items(): agent_context.shared_state[key] = value skill_configuration_file_path: Path = Path(cls.path_to_skill, 'skill.yaml') loader = ConfigLoaders.from_package_type(PackageType.SKILL) with open_file(skill_configuration_file_path) as fp: skill_config: SkillConfig = loader.load(fp) if (_skill_config_overrides is not None): skill_config.update(_skill_config_overrides) skill_config.directory = cls.path_to_skill cls._skill = Skill.from_config(skill_config, agent_context)
class NetworkDiscoveryDialog(QDialog, threading.Thread): TIMEOUT = 0.1 display_clear_signal = Signal() display_append_signal = Signal(str) status_text_signal = Signal(str) network_join_request = Signal(int) def __init__(self, default_mcast_group, default_port, networks_count, parent=None): QDialog.__init__(self, parent=parent) threading.Thread.__init__(self) self.default_port = default_port self.setObjectName('NetworkDiscoveryDialog') self.setAttribute(Qt.WA_DeleteOnClose, True) self.setWindowFlags(Qt.Window) self.setWindowTitle('Network Discovery') self.resize(728, 512) self.verticalLayout = QVBoxLayout(self) self.verticalLayout.setObjectName('verticalLayout') self.verticalLayout.setContentsMargins(1, 1, 1, 1) self.display = QTextBrowser(self) self.display.setReadOnly(True) self.verticalLayout.addWidget(self.display) self.display_clear_signal.connect(self.display.clear) self.display_append_signal.connect(self.display.append) self.display.anchorClicked.connect(self.on_anchorClicked) self.status_label = QLabel('0 messages', self) self.verticalLayout.addWidget(self.status_label) self.status_text_signal.connect(self.status_label.setText) self._msg_counts = dict() self._networks_count = networks_count self._running = True self._received_msgs = 0 self._discovered = dict() self._hosts = dict() self.mutex = threading.RLock() self.sockets = [] with self.mutex: try: for p in range(networks_count): msock = DiscoverSocket((default_port + p), default_mcast_group) self.sockets.append(msock) msock.settimeout(self.TIMEOUT) except Exception as e: self.display.setText(utf8(e)) self.setDaemon(True) self.start() def on_heartbeat_received(self, msg, address, is_multicast): if (len(msg) == 0): return force_update = False with self.mutex: try: hostname = self._hosts[address[0]] except Exception: self.status_text_signal.emit(('resolve %s' % address[0])) hostname = nm.nameres().hostname(utf8(address[0]), resolve=True) self._hosts[address[0]] = hostname try: (_version, _msg_tuple) = Discoverer.msg2masterState(msg, address) index = (address[1] - self.default_port) if (index not in self._discovered): self._discovered[index] = dict() self._discovered[index][address] = (hostname, time.time()) if (hostname not in self._msg_counts): self._msg_counts[hostname] = 0 self._msg_counts[hostname] += 1 self._received_msgs += 1 force_update = True except Exception: print(traceback.format_exc(1)) if force_update: self._updateDisplay() def run(self): self.parent().masterlist_service.refresh(self.parent().getMasteruri(), False) while ((not rospy.is_shutdown()) and self._running): with self.mutex: for msock in self.sockets: received = True while received: try: recv_item = msock.receive_queue.get(False) self._received_msgs += 1 self.on_heartbeat_received(recv_item.msg, recv_item.sender_addr, (recv_item.via == QueueReceiveItem.MULTICAST)) except queue.Empty: received = False status_text = ('received messages: %d' % self._received_msgs) self.status_text_signal.emit(status_text) time.sleep(3) def closeEvent(self, event): self.stop() QDialog.closeEvent(self, event) def stop(self): self._running = False with self.mutex: for p in range(len(self.sockets)): try: self.sockets[p].close() except Exception: pass def _updateDisplay(self): self.display_clear_signal.emit() text = '<div style="font-family:Fixedsys,Courier,monospace; padding:10px;">\n' for (index, addr_dict) in self._discovered.items(): text += ('Network <b>%s</b>: <a href="%s">join</a><dl>' % (utf8(index), utf8(index))) for (addr, (hostname, ts)) in addr_dict.items(): text += ('<dt>%s <b><u>%s</u></b> %s, received messages: %s</dt>\n' % (self._getTsStr(ts), utf8(hostname), utf8(addr), str(self._msg_counts[hostname]))) text += '</dl><br>' text += '</div>' self.display_append_signal.emit(text) def _getTsStr(self, timestamp): dt = datetime.fromtimestamp(timestamp) diff = (time.time() - timestamp) diff_dt = datetime.fromtimestamp(diff) before = '0 sec' if (diff < 60): before = diff_dt.strftime('%S sec') elif (diff < 3600): before = diff_dt.strftime('%M:%S min') elif (diff < 86400): before = diff_dt.strftime('%H:%M:%S std') else: before = diff_dt.strftime('%d Day(s) %H:%M:%S') return ('%s (%s)' % (dt.strftime('%H:%M:%S'), before)) def on_anchorClicked(self, url): self._updateDisplay() try: self.network_join_request.emit(int(url.toString())) except Exception: print(traceback.format_exc(1))
def get_args(): parser = argparse.ArgumentParser('Debin to hack binaries. This script is used to train CRF model with Nice2Predict. Make sure you have enough disk space.') parser.add_argument('--bin_list', dest='bin_list', type=str, required=True, help='list of binaries to train.') parser.add_argument('--bin_dir', dest='bin_dir', type=str, required=True, help='directory of the stripped binaries.') parser.add_argument('--debug_dir', dest='debug_dir', type=str, required=True, help='directory of debug information files.') parser.add_argument('--bap_dir', dest='bap_dir', type=str, default='', help='directory of cached BAP-IR files.') parser.add_argument('--workers', dest='workers', type=int, default=1, help='number of workers (i.e., parallization).') parser.add_argument('--out_model', dest='out_model', type=str, required=True, help='prefix of the output model.') parser.add_argument('--bin_to_graph', dest='bin_to_graph', type=str, default='py/bin_to_graph.py', help='path to bin_to_graph.py script') parser.add_argument('--n2p_train', dest='n2p_train', type=str, required=True, help='Nice2Predict train executable.') parser.add_argument('--max_labels_z', dest='max_labels_z', type=int, default=8, help='max_labels_z parameter of Nice2Predict') parser.add_argument('--log_dir', dest='log_dir', type=str, required=True, help='log directory') parser.add_argument('--valid_labels', dest='valid_labels', type=str, required=True, help='valid_label file of Nice2Predict.') args = parser.parse_args() return args
def _assemble_and_send_email(location_slug, post): location = get_object_or_404(Location, slug=location_slug) subject = post.get('subject') recipient = [post.get('recipient')] body = ((post.get('body') + '\n\n') + post.get('footer')) send_from_location_address(subject, body, None, recipient, location)
class PatchEncoder(fl.Chain): def __init__(self, in_channels: int=3, dim: int=128, patch_size: int=16) -> None: self.in_channels = in_channels self.dim = dim self.patch_size = patch_size super().__init__(fl.Conv2d(in_channels=in_channels, out_channels=dim, kernel_size=patch_size, stride=patch_size), fl.Reshape((- 1), dim))
class TimestampAligner(Aligner): def __init__(self, lag: float) -> None: self._pq: Optional[TimestampedHeap] = None self.lag: float = lag self.callbacks: Dict[(str, List[LabgraphCallback])] = collections.defaultdict(list) self.active: bool = True self.terminate: bool = False def register(self, stream_id: str, callback: LabgraphCallback) -> None: self.callbacks[stream_id].append(callback) def push(self, params: LabgraphCallbackParams[TimestampedMessage]) -> None: message = params.message if (params.stream_id is None): raise LabgraphError('TimestampAligner::push expected stream id, but got None.') heap_entry: TimestampedHeapEntry = (message.timestamp, self.pq.count, params.stream_id, params) self.pq.push(heap_entry) async def get_aligned(self) -> None: now = time.time() while (self.pq and ((self.pq[0][0] + self.lag) < now)): (_, _, stream_id, next_params) = self.pq.pop() for callback in self.callbacks[stream_id]: callback(next_params.message) def wait_for_completion(self) -> None: self.active = False def stop(self) -> None: self.terminate = True async def run(self) -> None: while ((not self.terminate) and (self.active or self.pq)): (await asyncio.sleep(0.001)) (await self.get_aligned()) def pq(self) -> TimestampedHeap: if (self._pq is None): self._pq = TimestampedHeap() return self._pq
_api.route((api_url + 'uploads/'), methods=['GET']) _api.route((api_url + 'uploads/<int:page>/'), methods=['GET']) _auth.login_required def get_uploads(page=1): uploads = FlicketUploads.query per_page = min(request.args.get('per_page', app.config['posts_per_page'], type=int), 100) data = FlicketUploads.to_collection_dict(uploads, page, per_page, 'bp_api.get_uploads') return jsonify(data)
class OptionPlotoptionsTilemapSonificationContexttracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
('config_name,overrides,expected', [param('with_missing', [], DefaultsTreeNode(node=ConfigDefault(path='with_missing'), children=[GroupDefault(group='db', value='???'), ConfigDefault(path='_self_')]), id='with_missing')]) def test_with_missing_and_skip_missing_flag(config_name: str, overrides: List[str], expected: DefaultsTreeNode) -> None: _test_defaults_tree_impl(config_name=config_name, input_overrides=overrides, expected=expected, skip_missing=True)
def run_pylint(file_path): pylint_process = subprocess.run(f'pylint --output-format=json {file_path}', shell=True, stdout=PIPE, stderr=STDOUT, check=False, text=True) try: pylint_json = json.loads(pylint_process.stdout) except json.JSONDecodeError: logging.warning(f'''Failed to execute pylint: {pylint_process.stdout}''') return [] return _pylint_extract_relevant_warnings(pylint_json)
class TestAdaptationOffer(unittest.TestCase): def test_lazy_loading(self): LAZY_EXAMPLES = 'traits.adaptation.tests.lazy_examples' if (LAZY_EXAMPLES in sys.modules): del sys.modules[LAZY_EXAMPLES] offer = AdaptationOffer(factory=(LAZY_EXAMPLES + '.IBarToIFoo'), from_protocol=(LAZY_EXAMPLES + '.IBar'), to_protocol=(LAZY_EXAMPLES + '.IFoo')) self.assertNotIn(LAZY_EXAMPLES, sys.modules) factory = offer.factory self.assertIn(LAZY_EXAMPLES, sys.modules) from traits.adaptation.tests.lazy_examples import IBarToIFoo self.assertIs(factory, IBarToIFoo) del sys.modules[LAZY_EXAMPLES] from_protocol = offer.from_protocol from traits.adaptation.tests.lazy_examples import IBar self.assertIs(from_protocol, IBar) del sys.modules[LAZY_EXAMPLES] to_protocol = offer.to_protocol from traits.adaptation.tests.lazy_examples import IFoo self.assertIs(to_protocol, IFoo) def test_adaptation_offer_str_representation(self): class Foo(): pass class Bar(): pass adaptation_offer = AdaptationOffer(from_protocol=Foo, to_protocol=Bar) desired_repr = "<AdaptationOffer: '{}' -> '{}'>".format(adaptation_offer.from_protocol_name, adaptation_offer.to_protocol_name) self.assertEqual(desired_repr, str(adaptation_offer)) self.assertEqual(desired_repr, repr(adaptation_offer))
def convert_theme(value, level=3): if (not isinstance(value, str)): return value if ((value[:1] == '') and (value.find(':') >= 2)): try: from .image.image import ImageLibrary info = ImageLibrary.image_info(value) except: info = None if (info is not None): return info.theme from .theme import Theme return Theme(image=convert_image(value, (level + 1)))
def _get_all_summuries(root_log_dir: str): summary_dir = os.path.join(root_log_dir, 'summaries') summary_names = [] if os.path.exists(summary_dir): filenames = os.listdir(summary_dir) for filename in filenames: if filename.endswith('.summary'): summary_names.append(os.path.splitext(filename)[0]) return summary_names
def _capture_task_logs(): root_logger = logging.getLogger() root_logger.setLevel(logger.level) root_logger.handlers[:] = logger.handlers info_writer = StreamLogWriter(logger, logging.INFO) warning_writer = StreamLogWriter(logger, logging.WARNING) with redirect_stdout(info_writer), redirect_stderr(warning_writer): (yield)
class ExecutionSpec(_common_models.FlyteIdlEntity): def __init__(self, launch_plan, metadata, notifications=None, disable_all=None, labels=None, annotations=None, auth_role=None, raw_output_data_config=None, max_parallelism: Optional[int]=None, security_context: Optional[security.SecurityContext]=None, overwrite_cache: Optional[bool]=None, envs: Optional[_common_models.Envs]=None, tags: Optional[typing.List[str]]=None, cluster_assignment: Optional[ClusterAssignment]=None): self._launch_plan = launch_plan self._metadata = metadata self._notifications = notifications self._disable_all = disable_all self._labels = (labels or _common_models.Labels({})) self._annotations = (annotations or _common_models.Annotations({})) self._auth_role = (auth_role or _common_models.AuthRole()) self._raw_output_data_config = raw_output_data_config self._max_parallelism = max_parallelism self._security_context = security_context self._overwrite_cache = overwrite_cache self._envs = envs self._tags = tags self._cluster_assignment = cluster_assignment def launch_plan(self): return self._launch_plan def metadata(self): return self._metadata def notifications(self): return self._notifications def disable_all(self): return self._disable_all def labels(self): return self._labels def annotations(self): return self._annotations def auth_role(self): return self._auth_role def raw_output_data_config(self): return self._raw_output_data_config def max_parallelism(self) -> int: return self._max_parallelism def security_context(self) -> typing.Optional[security.SecurityContext]: return self._security_context def overwrite_cache(self) -> Optional[bool]: return self._overwrite_cache def envs(self) -> Optional[_common_models.Envs]: return self._envs def tags(self) -> Optional[typing.List[str]]: return self._tags def cluster_assignment(self) -> Optional[ClusterAssignment]: return self._cluster_assignment def to_flyte_idl(self): return _execution_pb2.ExecutionSpec(launch_plan=self.launch_plan.to_flyte_idl(), metadata=self.metadata.to_flyte_idl(), notifications=(self.notifications.to_flyte_idl() if self.notifications else None), disable_all=self.disable_all, labels=self.labels.to_flyte_idl(), annotations=self.annotations.to_flyte_idl(), auth_role=(self._auth_role.to_flyte_idl() if self.auth_role else None), raw_output_data_config=(self._raw_output_data_config.to_flyte_idl() if self._raw_output_data_config else None), max_parallelism=self.max_parallelism, security_context=(self.security_context.to_flyte_idl() if self.security_context else None), overwrite_cache=self.overwrite_cache, envs=(self.envs.to_flyte_idl() if self.envs else None), tags=self.tags, cluster_assignment=(self._cluster_assignment.to_flyte_idl() if self._cluster_assignment else None)) def from_flyte_idl(cls, p): return cls(launch_plan=_identifier.Identifier.from_flyte_idl(p.launch_plan), metadata=ExecutionMetadata.from_flyte_idl(p.metadata), notifications=(NotificationList.from_flyte_idl(p.notifications) if p.HasField('notifications') else None), disable_all=(p.disable_all if p.HasField('disable_all') else None), labels=_common_models.Labels.from_flyte_idl(p.labels), annotations=_common_models.Annotations.from_flyte_idl(p.annotations), auth_role=_common_models.AuthRole.from_flyte_idl(p.auth_role), raw_output_data_config=(_common_models.RawOutputDataConfig.from_flyte_idl(p.raw_output_data_config) if p.HasField('raw_output_data_config') else None), max_parallelism=p.max_parallelism, security_context=(security.SecurityContext.from_flyte_idl(p.security_context) if p.security_context else None), overwrite_cache=p.overwrite_cache, envs=(_common_models.Envs.from_flyte_idl(p.envs) if p.HasField('envs') else None), tags=p.tags, cluster_assignment=(ClusterAssignment.from_flyte_idl(p.cluster_assignment) if p.HasField('cluster_assignment') else None))
def get_trees_from_file(filename, fileobject=None): fileobject = (fileobject or open(filename, 'rb')) trees = [] def extend(btext, fname): name = os.path.splitext(os.path.basename(fname))[0] trees.extend(get_trees_from_nexus_or_newick(btext, name)) if filename.endswith('.zip'): zf = zipfile.ZipFile(fileobject) for fname in zf.namelist(): extend(zf.read(fname), fname) elif filename.endswith('.tar'): tf = tarfile.TarFile(fileobj=fileobject) for fname in tf.getnames(): extend(tf.extractfile(fname).read(), fname) elif (filename.endswith('.tar.gz') or filename.endswith('.tgz')): tf = tarfile.TarFile(fileobj=gzip.GzipFile(fileobj=fileobject)) for fname in tf.getnames(): extend(tf.extractfile(fname).read(), fname) elif filename.endswith('.gz'): extend(gzip.GzipFile(fileobj=fileobject).read(), filename) elif filename.endswith('.bz2'): extend(bz2.BZ2File(fileobject).read(), filename) else: extend(fileobject.read(), filename) return trees
class TestPlayable(TestIntegrationBase): module = player def setUp(self): super(TestPlayable, self).setUp() self.manager = self.manager_module.MimetypeActionPluginManager(self.app) self.manager.register_mimetype_function(self.player_module.detect_playable_mimetype) def test_playablefile(self): exts = {'mp3': 'mp3', 'wav': 'wav', 'ogg': 'ogg'} for (ext, media_format) in exts.items(): pf = self.module.PlayableFile(path=('asdf.%s' % ext), app=self.app) self.assertEqual(pf.media_format, media_format)
class Workspace(UUIDModel, CreatedUpdatedAt, MainBase): __tablename__ = 'workspaces' name: Mapped[str] = mapped_column(String(length=255), nullable=False) domain: Mapped[str] = mapped_column(String(length=255), nullable=False) database_type: Mapped[(DatabaseType | None)] = mapped_column(Enum(DatabaseType), nullable=True) database_use_schema: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) database_table_prefix: Mapped[str] = mapped_column(String(length=255), default='fief_', nullable=False) database_host: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) database_port: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) database_username: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) database_password: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) database_name: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) database_ssl_mode: Mapped[(str | None)] = mapped_column(StringEncryptedType(Text, settings.encryption_key, FernetEngine), nullable=True) alembic_revision: Mapped[(str | None)] = mapped_column(String(length=255), nullable=True, index=True) users_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0, server_default='0') workspace_users: Mapped[list['WorkspaceUser']] = relationship('WorkspaceUser', back_populates='workspace', cascade='all, delete') def get_database_connection_parameters(self, asyncio=True) -> DatabaseConnectionParameters: if self.is_byod: url = settings.get_database_connection_parameters(asyncio, schema=self.schema_name) else: url = create_database_connection_parameters(cast(DatabaseType, self.database_type), asyncio=asyncio, username=self.database_username, password=self.database_password, host=self.database_host, port=(int(self.database_port) if self.database_port else None), database=self.database_name, path=settings.database_location, schema=self.schema_name, ssl_mode=self.database_ssl_mode) return url def is_byod(self) -> bool: return (self.database_type is None) def use_schema(self) -> bool: if self.is_byod: return True return self.database_use_schema def schema_name(self) -> (str | None): if self.use_schema: return str(self.id) return None