code stringlengths 281 23.7M |
|---|
class PokerGame():
TIMEOUT_TOLERANCE = 2
BET_TIMEOUT = 30
WAIT_AFTER_CARDS_ASSIGNMENT = 1
WAIT_AFTER_BET_ROUND = 1
WAIT_AFTER_SHOWDOWN = 2
WAIT_AFTER_WINNER_DESIGNATION = 5
def __init__(self, id: str, game_players: GamePlayers, event_dispatcher: GameEventDispatcher, deck_factory: DeckFactory, score_detector: ScoreDetector):
self._id: str = id
self._game_players: GamePlayers = game_players
self._event_dispatcher: GameEventDispatcher = event_dispatcher
self._deck_factory: DeckFactory = deck_factory
self._score_detector: ScoreDetector = score_detector
self._bet_handler: GameBetHandler = self._create_bet_handler()
self._winners_detector: GameWinnersDetector = self._create_winners_detector()
def event_dispatcher(self) -> GameEventDispatcher:
return self._event_dispatcher
def play_hand(self, dealer_id: str):
raise NotImplemented
def _create_bet_handler(self) -> GameBetHandler:
return GameBetHandler(game_players=self._game_players, bet_rounder=GameBetRounder(self._game_players), event_dispatcher=self._event_dispatcher, bet_timeout=self.BET_TIMEOUT, timeout_tolerance=self.TIMEOUT_TOLERANCE, wait_after_round=self.WAIT_AFTER_BET_ROUND)
def _create_winners_detector(self) -> GameWinnersDetector:
return GameWinnersDetector(self._game_players)
def _create_pots(self) -> GamePots:
return GamePots(self._game_players)
def _create_scores(self) -> GameScores:
return GameScores(self._score_detector)
def _assign_cards(self, number_of_cards: int, dealer_id: str, deck: Deck, scores: GameScores):
for player in self._game_players.round(dealer_id):
scores.assign_cards(player.id, deck.pop_cards(number_of_cards))
self._send_player_score(player, scores)
gevent.sleep(self.WAIT_AFTER_CARDS_ASSIGNMENT)
def _send_player_score(self, player: Player, scores: GameScores):
self._event_dispatcher.cards_assignment_event(player=player, cards=scores.player_cards(player.id), score=scores.player_score(player.id))
def _game_over_detection(self):
if (self._game_players.count_active() < 2):
raise EndGameException
def _detect_winners(self, pots: GamePots, scores: GameScores):
for (i, pot) in enumerate(reversed(pots)):
winners = self._winners_detector.get_winners(pot.players, scores)
try:
money_split = round((pot.money / len(winners)))
except ZeroDivisionError:
raise GameError('No players left')
else:
for winner in winners:
winner.add_money(money_split)
self._event_dispatcher.winner_designation_event(players=self._game_players.active, pot=pot, winners=winners, money_split=money_split, upcoming_pots=pots[(i + 1):])
gevent.sleep(self.WAIT_AFTER_WINNER_DESIGNATION)
def _showdown(self, scores: GameScores):
self._event_dispatcher.showdown_event(self._game_players.active, scores)
gevent.sleep(self.WAIT_AFTER_SHOWDOWN) |
def encode_data(primary_type, types, data):
encoded_types = ['bytes32']
encoded_values = [hash_struct_type(primary_type, types)]
for field in types[primary_type]:
(type, value) = encode_field(types, field['name'], field['type'], data[field['name']])
encoded_types.append(type)
encoded_values.append(value)
return encode(encoded_types, encoded_values) |
class UpdateOtherFieldHook(UpdateGenericHook):
def __init__(self, other_field: str, update_function: Callable[([T], Any)], update_condition: Optional[Callable[([T], bool)]]=None, only_trigger_on_change: bool=True, triggers: Optional[Iterable[HookEventType]]=None) -> None:
super().__init__(update_function=(lambda obj: setattr(obj, other_field, update_function(obj))), update_condition=update_condition, only_trigger_on_change=only_trigger_on_change, triggers=triggers) |
class FinancialInformationSubCategoryType(SubCategoryBase, Enum):
CreditCard = 'CreditCard'
CardExpiry = 'CardExpiry'
BankAccountNumber = 'BankAccountNumber'
BankRoutingNumber = 'BankRoutingNumber'
SwiftCode = 'SwiftCode'
TaxIdentificationNumber = 'TaxIdentificationNumber'
def list_choices(cls) -> Dict[('SubCategoryBase', List[str])]:
return {cls.CreditCard: SubCategoryPattern.FinancialInformation.CREDIT_CARD, cls.CardExpiry: SubCategoryPattern.FinancialInformation.CARD_EXPIRY, cls.BankAccountNumber: SubCategoryPattern.FinancialInformation.BANK_ACCOUNT_NUMBER, cls.BankRoutingNumber: SubCategoryPattern.FinancialInformation.BANK_ROUTING_NUMBER, cls.SwiftCode: SubCategoryPattern.FinancialInformation.SWIFT_CODE, cls.TaxIdentificationNumber: SubCategoryPattern.FinancialInformation.TAX_ID} |
def test_dualperm_wg_fractured_sgas_property(dual_poro_dual_perm_wg_run):
sgas = dual_poro_dual_perm_wg_run.get_property_from_restart('SGAS', date=, fracture=True)
assert (sgas.values[(3, 0, 0)] == pytest.approx(0.0))
assert (sgas.values[(0, 1, 0)] == pytest.approx(0.))
assert (sgas.values[(4, 2, 0)] == pytest.approx(0.178411)) |
def comment_mismatches_in_file(to_comment_lines, fqp):
with open(fqp) as fp:
conts = fp.readlines()
changed = False
for (idx, line) in enumerate(conts):
if any([(tmp in line) for tmp in to_comment_lines]):
if (not line.strip().startswith('#')):
changed = True
conts[idx] = ('\t# ' + conts[idx].lstrip())
if changed:
print('Should rewrite file!')
with open(fqp, 'w') as fp:
fp.write(''.join(conts)) |
class OptionSeriesAreasplineSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _get_twistd_cmdline(pprofiler, sprofiler):
portal_cmd = [TWISTED_BINARY, f'--python={PORTAL_PY_FILE}', '--logger=evennia.utils.logger.GetPortalLogObserver']
server_cmd = [TWISTED_BINARY, f'--python={SERVER_PY_FILE}', '--logger=evennia.utils.logger.GetServerLogObserver']
if (os.name != 'nt'):
portal_cmd.append('--pidfile={}'.format(PORTAL_PIDFILE))
server_cmd.append('--pidfile={}'.format(SERVER_PIDFILE))
if pprofiler:
portal_cmd.extend(['--savestats', '--profiler=cprofile', '--profile={}'.format(PPROFILER_LOGFILE)])
if sprofiler:
server_cmd.extend(['--savestats', '--profiler=cprofile', '--profile={}'.format(SPROFILER_LOGFILE)])
return (portal_cmd, server_cmd) |
def information_based_similarity(x, y, n):
Wordlist = []
Space = [[0, 0], [0, 1], [1, 0], [1, 1]]
Sample = [0, 1]
if (n == 1):
Wordlist = Sample
if (n == 2):
Wordlist = Space
elif (n > 1):
Wordlist = Space
Buff = []
for k in range(0, (n - 2)):
Buff = []
for i in range(0, len(Wordlist)):
Buff.append(tuple(Wordlist[i]))
Buff = tuple(Buff)
Wordlist = []
for i in range(0, len(Buff)):
for j in range(0, len(Sample)):
Wordlist.append(list(Buff[i]))
Wordlist[(len(Wordlist) - 1)].append(Sample[j])
Wordlist.sort()
Input = [[], []]
Input[0] = x
Input[1] = y
SymbolicSeq = [[], []]
for i in range(0, 2):
Encoder = numpy.diff(Input[i])
for j in range(0, (len(Input[i]) - 1)):
if (Encoder[j] > 0):
SymbolicSeq[i].append(1)
else:
SymbolicSeq[i].append(0)
Wm = []
Wm.append(embed_seq(SymbolicSeq[0], 1, n).tolist())
Wm.append(embed_seq(SymbolicSeq[1], 1, n).tolist())
Count = [[], []]
for i in range(0, 2):
for k in range(0, len(Wordlist)):
Count[i].append(Wm[i].count(Wordlist[k]))
Prob = [[], []]
for i in range(0, 2):
Sigma = 0
for j in range(0, len(Wordlist)):
Sigma += Count[i][j]
for k in range(0, len(Wordlist)):
Prob[i].append(numpy.true_divide(Count[i][k], Sigma))
Entropy = [[], []]
for i in range(0, 2):
for k in range(0, len(Wordlist)):
if (Prob[i][k] == 0):
Entropy[i].append(0)
else:
Entropy[i].append((Prob[i][k] * numpy.log2(Prob[i][k])))
Rank = [[], []]
Buff = [[], []]
Buff[0] = tuple(Count[0])
Buff[1] = tuple(Count[1])
for i in range(0, 2):
Count[i].sort()
Count[i].reverse()
for k in range(0, len(Wordlist)):
Rank[i].append(Count[i].index(Buff[i][k]))
Count[i][Count[i].index(Buff[i][k])] = (- 1)
IBS = 0
Z = 0
n = 0
for k in range(0, len(Wordlist)):
if ((Buff[0][k] != 0) & (Buff[1][k] != 0)):
F = ((- Entropy[0][k]) - Entropy[1][k])
IBS += numpy.multiply(numpy.absolute((Rank[0][k] - Rank[1][k])), F)
Z += F
else:
n += 1
IBS = numpy.true_divide(IBS, Z)
IBS = numpy.true_divide(IBS, (len(Wordlist) - n))
return IBS |
class ClientInformation():
def __init__(self, parent):
builder = Gtk.Builder()
builder.add_from_file(locate_resource('client_information.ui'))
builder.connect_signals(self)
self.dialog = builder.get_object('dlg_client_information')
self.dialog.set_transient_for(parent)
self.btn_edit_alias = builder.get_object('btn_edit_alias')
self.dlg_edit_alias = builder.get_object('dlg_edit_alias')
self.ent_alias = builder.get_object('ent_alias')
self.lbl_type = builder.get_object('lbl_type')
self.lbl_alias = builder.get_object('lbl_alias')
self.lbl_hostname = builder.get_object('lbl_hostname')
self.lbl_mac = builder.get_object('lbl_mac')
self.lbl_ip = builder.get_object('lbl_ip')
self.lbl_user = builder.get_object('lbl_user')
self.lbl_cpu = builder.get_object('lbl_cpu')
self.lbl_ram = builder.get_object('lbl_ram')
self.lbl_vga = builder.get_object('lbl_vga')
self.client = None
def run(self, client, execute):
self.client = client
inst = client[C_INSTANCE]
handle = (inst.hsystem or client[C_SESSION_HANDLE])
self.lbl_type.set_text(inst.type)
self.lbl_alias.set_text(inst.alias)
self.lbl_hostname.set_text(inst.hostname)
self.lbl_mac.set_text(inst.mac)
self.lbl_ip.set_text(handle.split(':')[0])
if client[C_SESSION_HANDLE]:
(uname, realname) = inst.users[client[C_SESSION_HANDLE]].values()
if realname:
user = '{} ({})'.format(uname, realname)
else:
user = uname
else:
user = ''
self.lbl_user.set_text(user)
self.lbl_cpu.set_text('')
self.lbl_ram.set_text('')
self.lbl_vga.set_text('')
if handle:
execute(handle, 'echo "$CPU"').addCallback(self.cb_set_text, self.lbl_cpu)
execute(handle, 'echo "$RAM MiB"').addCallback(self.cb_set_text, self.lbl_ram)
execute(handle, 'echo "$VGA"').addCallback(self.cb_set_text, self.lbl_vga)
self.dialog.set_title((_('Properties of %s') % inst.get_name()))
self.dialog.run()
self.dialog.hide()
def cb_set_text(result, widget):
widget.set_text(result.decode().strip())
def on_edit_alias_clicked(self, _widget):
inst = self.client[C_INSTANCE]
self.ent_alias.set_text(inst.alias)
reply = self.dlg_edit_alias.run()
if (reply == 1):
inst.set_name(self.ent_alias.get_text().strip())
self.lbl_alias.set_text(inst.alias.strip())
self.dlg_edit_alias.hide() |
class Cpu_interface(Peripherical_interface, metaclass=ABCMeta):
vendor: str
_model: str
info: str
def model(self) -> str:
return self._model
def model(self, value: str):
raise NotImplementedError
def temp(self) -> float:
try:
self._temp = self.get_temp()
except NotImplementedError as e:
try:
raise e
finally:
e = None
del e
else:
return self._temp
def temp(self, value: float):
self._temp = value
def __init__(self, os, vendor, model):
super().__init__(os)
self.vendor = vendor
self.model = model
def get_temp(self) -> float:
raise NotImplementedError
def get_id(self, cpu_list: Dict[(str, str)]) -> str:
if (self.model.lower() in cpu_list[self.vendor]):
return cpu_list[self.vendor][self.model.lower()]
else:
return cpu_list['unknown'] |
class OptionPlotoptionsWaterfallSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def debug_server_general(args, settings):
(prb, pwb) = os.pipe()
tmpin = os.fdopen(prb, 'rb')
tmpout = os.fdopen(pwb, 'wb')
s = LangServer(conn=JSONRPC2Connection(ReadWriter(tmpin, tmpout)), settings=settings)
if args.debug_rootpath:
dir_exists = os.path.isdir(args.debug_rootpath)
if (dir_exists is False):
error_exit("Specified 'debug_rootpath' does not exist or is not a directory")
print('\nTesting "initialize" request:')
print(' Root = "{}"'.format(args.debug_rootpath))
s.serve_initialize({'params': {'rootPath': args.debug_rootpath}})
if (len(s.post_messages) == 0):
print(' Successful!')
else:
print(' Successful with errors:')
for message in s.post_messages:
print(' {}'.format(message[1]))
print('\n Source directories:')
for source_dir in s.source_dirs:
print(' {}'.format(source_dir))
if args.debug_diagnostics:
print('\nTesting "textDocument/publishDiagnostics" notification:')
check_request_params(args, loc_needed=False)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
(diag_results, _) = s.get_diagnostics(args.debug_filepath)
if (diag_results is not None):
if args.debug_full_result:
print(json.dumps(diag_results, indent=2))
else:
sev_map = ['ERROR', 'WARNING', 'INFO']
if (len(diag_results) == 0):
print('\nNo errors or warnings')
else:
print('\nReported errors or warnings:')
for diag in diag_results:
sline = diag['range']['start']['line']
message = diag['message']
sev = sev_map[(diag['severity'] - 1)]
print(' {:5d}:{} "{}"'.format(sline, sev, message))
if args.debug_symbols:
print('\nTesting "textDocument/documentSymbol" request:')
check_request_params(args, loc_needed=False)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
symbol_results = s.serve_document_symbols({'params': {'textDocument': {'uri': args.debug_filepath}}})
if args.debug_full_result:
print(json.dumps(symbol_results, indent=2))
else:
for symbol in symbol_results:
sline = symbol['location']['range']['start']['line']
if ('containerName' in symbol):
parent = symbol['containerName']
else:
parent = 'null'
print(' line {2:5d} symbol -> {1:3d}:{0:30} parent = {3}'.format(symbol['name'], symbol['kind'], sline, parent))
if (args.debug_workspace_symbols is not None):
print('\nTesting "workspace/symbol" request:')
if (args.debug_rootpath is None):
error_exit("'debug_rootpath' not specified for debug request")
symbol_results = s.serve_workspace_symbol({'params': {'query': args.debug_workspace_symbols}})
if args.debug_full_result:
print(json.dumps(symbol_results, indent=2))
else:
for symbol in symbol_results:
path = path_from_uri(symbol['location']['uri'])
sline = symbol['location']['range']['start']['line']
if ('containerName' in symbol):
parent = symbol['containerName']
else:
parent = 'null'
print(' {2}::{3:d} symbol -> {1:3d}:{0:30} parent = {4}'.format(symbol['name'], symbol['kind'], os.path.relpath(path, args.debug_rootpath), sline, parent))
if args.debug_completion:
print('\nTesting "textDocument/completion" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
completion_results = s.serve_autocomplete({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
if (completion_results is None):
print(' No results!')
else:
print(' Results:')
if args.debug_full_result:
print(json.dumps(completion_results, indent=2))
else:
for obj in completion_results:
print(' {}: {} -> {}'.format(obj['kind'], obj['label'], obj['detail']))
if args.debug_signature:
print('\nTesting "textDocument/signatureHelp" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
signature_results = s.serve_signature({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
if (signature_results is None):
print(' No Results!')
else:
print(' Results:')
if args.debug_full_result:
print(json.dumps(signature_results, indent=2))
else:
active_param = signature_results.get('activeParameter', 0)
print(' Active param = {}'.format(active_param))
active_signature = signature_results.get('activeSignature', 0)
print(' Active sig = {}'.format(active_signature))
for (i, signature) in enumerate(signature_results['signatures']):
print(' {}'.format(signature['label']))
for (j, obj) in enumerate(signature['parameters']):
if ((i == active_signature) and (j == active_param)):
active_mark = '*'
else:
active_mark = ' '
arg_desc = obj.get('documentation')
if (arg_desc is not None):
print('{2} {0} :: {1}'.format(arg_desc, obj['label'], active_mark))
else:
print('{1} {0}'.format(obj['label'], active_mark))
if (args.debug_definition or args.debug_implementation):
if args.debug_definition:
print('\nTesting "textDocument/definition" request:')
elif args.debug_implementation:
print('\nTesting "textDocument/implementation" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
if args.debug_definition:
definition_results = s.serve_definition({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
elif args.debug_implementation:
definition_results = s.serve_implementation({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
print(' Result:')
if (definition_results is None):
print(' No result found!')
elif args.debug_full_result:
print(json.dumps(definition_results, indent=2))
else:
print(' URI = "{}"'.format(definition_results['uri']))
print(' Line = {}'.format((definition_results['range']['start']['line'] + 1)))
print(' Char = {}'.format((definition_results['range']['start']['character'] + 1)))
if args.debug_hover:
print('\nTesting "textDocument/hover" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
hover_results = s.serve_hover({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
print(' Result:')
if (hover_results is None):
print(' No result found!')
elif args.debug_full_result:
print(json.dumps(hover_results, indent=2))
else:
contents = hover_results['contents']
print('=======')
if isinstance(contents, dict):
print(contents['value'])
else:
print(contents)
print('=======')
if args.debug_references:
print('\nTesting "textDocument/references" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
ref_results = s.serve_references({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}})
print(' Result:')
if (ref_results is None):
print(' No result found!')
elif args.debug_full_result:
print(json.dumps(ref_results, indent=2))
else:
print('=======')
for result in ref_results:
print(' {} ({}, {})'.format(result['uri'], (result['range']['start']['line'] + 1), (result['range']['start']['character'] + 1)))
print('=======')
if (args.debug_rename is not None):
print('\nTesting "textDocument/rename" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
ref_results = s.serve_rename({'params': {'textDocument': {'uri': args.debug_filepath}, 'position': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}, 'newName': args.debug_rename}})
print(' Result:')
if (ref_results is None):
print(' No changes found!')
elif args.debug_full_result:
print(json.dumps(ref_results, indent=2))
else:
print('=======')
for (uri, result) in ref_results['changes'].items():
path = path_from_uri(uri)
print('File: "{}"'.format(path))
file_obj = s.workspace.get(path)
if (file_obj is not None):
file_contents = file_obj.contents_split
for change in result:
start_line = change['range']['start']['line']
end_line = change['range']['end']['line']
start_col = change['range']['start']['character']
end_col = change['range']['end']['character']
print(' {}, {}'.format((start_line + 1), (end_line + 1)))
new_contents = []
for i in range(start_line, (end_line + 1)):
line = file_contents[i]
print(' - {}'.format(line))
if (i == start_line):
new_contents.append((line[:start_col] + change['newText']))
if (i == end_line):
new_contents[(- 1)] += line[end_col:]
for line in new_contents:
print(' + {}'.format(line))
print()
else:
print('Unknown file: "{}"'.format(path))
print('=======')
if args.debug_actions:
pp = pprint.PrettyPrinter(indent=2, width=120)
print('\nTesting "textDocument/getActions" request:')
check_request_params(args)
s.serve_onSave({'params': {'textDocument': {'uri': args.debug_filepath}}})
action_results = s.serve_codeActions({'params': {'textDocument': {'uri': args.debug_filepath}, 'range': {'start': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}, 'end': {'line': (args.debug_line - 1), 'character': (args.debug_char - 1)}}}})
if args.debug_full_result:
print(json.dumps(action_results, indent=2))
else:
for result in action_results:
print("Kind = '{}', Title = '{}'".format(result['kind'], result['title']))
for (editUri, editChange) in result['edit']['changes'].items():
print("\nChange: URI = '{}'".format(editUri))
pp.pprint(editChange)
print()
tmpout.close()
tmpin.close() |
def extractBrniWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _branch_highlights(coverage_eval, coverage_map):
results = dict(((i, []) for i in coverage_map))
for (path, fn) in [(k, x) for (k, v) in coverage_map.items() for x in v]:
results[path].extend([(list(offset[:2]) + [_branch_color(int(i), coverage_eval, path, offset[2]), '']) for (i, offset) in coverage_map[path][fn].items()])
return results |
class Kaptan(object):
HANDLER_MAP = {'json': JsonHandler, 'dict': DictHandler, 'yaml': YamlHandler, 'file': PyFileHandler, 'ini': IniHandler}
def __init__(self, handler=None):
self.configuration_data = dict()
self.handler = None
if handler:
self.handler = self.HANDLER_MAP[handler]()
def upsert(self, key, value):
self.configuration_data.update({key: value})
return self
def _is_python_file(self, value):
ext = os.path.splitext(value)[1][1:]
if ((ext == 'py') or os.path.isfile((value + '.py'))):
return True
return False
def import_config(self, value):
if isinstance(value, dict):
self.handler = self.HANDLER_MAP['dict']()
data = value
elif (os.path.isfile(value) and (not self._is_python_file(value))):
if (not self.handler):
try:
key = HANDLER_EXT.get(os.path.splitext(value)[1][1:], None)
self.handler = self.HANDLER_MAP[key]()
except:
raise RuntimeError('Unable to determine handler')
with open(value) as f:
data = f.read()
elif self._is_python_file(value):
self.handler = self.HANDLER_MAP[HANDLER_EXT['py']]()
if (not value.endswith('.py')):
value += '.py'
data = os.path.abspath(os.path.expanduser(value))
if (not os.path.isfile(data)):
raise IOError('File {0} not found.'.format(data))
else:
if (not self.handler):
raise RuntimeError('Unable to determine handler')
data = value
self.configuration_data = self.handler.load(data)
return self
def _get(self, key):
current_data = self.configuration_data
for chunk in key.split('.'):
if isinstance(current_data, collections_abc.Mapping):
current_data = current_data[chunk]
elif isinstance(current_data, collections_abc.Sequence):
chunk = int(chunk)
current_data = current_data[chunk]
else:
return current_data
return current_data
def get(self, key=None, default=SENTINEL):
if (not key):
return self.export('dict')
try:
try:
return self._get(key)
except KeyError:
raise KeyError(key)
except ValueError:
raise ValueError('Sequence index not an integer')
except IndexError:
raise IndexError('Sequence index out of range')
except (KeyError, ValueError, IndexError):
if (default is not SENTINEL):
return default
raise
def export(self, handler=None, **kwargs):
if (not handler):
handler_class = self.handler
else:
handler_class = self.HANDLER_MAP[handler]()
return handler_class.dump(self.configuration_data, **kwargs)
def __handle_default_value(self, key, default):
if (default == SENTINEL):
raise KeyError(key)
return default |
def test_async_ens_strict_bytes_type_checking_is_distinct_from_w3_instance(local_async_w3):
ns = AsyncENS.from_web3(local_async_w3)
assert (ns.w3 != local_async_w3)
assert (ns.w3 == ns._resolver_contract.w3)
assert local_async_w3.strict_bytes_type_checking
assert ns.strict_bytes_type_checking
local_async_w3.strict_bytes_type_checking = False
assert (not local_async_w3.strict_bytes_type_checking)
assert ns.strict_bytes_type_checking
assert ns.w3.strict_bytes_type_checking
assert ns._resolver_contract.w3.strict_bytes_type_checking
local_async_w3.strict_bytes_type_checking = True
assert local_async_w3.strict_bytes_type_checking
ns.strict_bytes_type_checking = False
assert (not ns.strict_bytes_type_checking)
assert (not ns.w3.strict_bytes_type_checking)
assert (not ns._resolver_contract.w3.strict_bytes_type_checking) |
class EventHandler(object):
pdf_lock = threading.Lock()
def __init__(self, browser, tab):
self.browser = browser
self.tab = tab
self.start_frame = None
def frame_started_loading(self, frameId):
if (not self.start_frame):
self.start_frame = frameId
def frame_stopped_loading(self, frameId):
if (self.start_frame == frameId):
self.tab.Page.stopLoading()
with self.pdf_lock:
print(self.browser.activate_tab(self.tab.id))
try:
data = self.tab.Page.printToPDF()
with open(('%s.pdf' % time.time()), 'wb') as fd:
fd.write(base64.b64decode(data['data']))
finally:
self.tab.stop() |
class ErrStyle(Style):
background_color = '#ffffcc'
default_style = ''
styles = {Whitespace: '#3e4349', Comment: '#3f6b5b', Keyword: 'bold #f06f00', Operator: '#3e4349', Name: '#3e4349', Name.Builtin: '#007020', Name.Function: 'bold #3e4349', Name.Class: 'bold #3e4349', Name.Variable: 'underline #8a2be2', Name.Constant: 'underline #b91f49', Name.Entity: 'bold #330000', Name.Tag: 'bold #f06f00', Name.Decorator: 'bold italic #3e4349', String: '#9a5151', String.Doc: 'italic #3f6b5b', Number: 'underline #9a5151', Generic: '#3e4349', Generic.Heading: 'bold #1014ad', Generic.Subheading: 'bold #1014ad', Generic.Deleted: 'bg:#c8f2ea #2020ff', Generic.Inserted: '#3e4349'} |
(scope='class', autouse=True)
def setup(request, tmp_path_factory):
cls = request.cls
cls.local_remote_name = 'remote_repo'
cls.local_branch = 'rally-unit-test-local-only-branch'
cls.remote_branch = 'rally-unit-test-remote-only-branch'
cls.rebase_branch = 'rally-unit-test-rebase-branch'
cls.local_tmp_src_dir = str(tmp_path_factory.mktemp('rally-unit-test-local-dir'))
cls.remote_tmp_src_dir = str(tmp_path_factory.mktemp('rally-unit-test-remote-dir'))
cls.tmp_clone_dir = str(tmp_path_factory.mktemp('rally-unit-test-clone-dir'))
try:
cls.local_repo = Repo.init(cls.local_tmp_src_dir, initial_branch='master')
except Exception:
cls.local_repo = Repo.init(cls.local_tmp_src_dir)
commit(cls.local_repo)
cls.local_revision = cls.local_repo.heads['master'].commit.hexsha
cls.local_repo.create_tag('local-tag-1', 'HEAD')
cls.local_repo.create_tag('local-tag-2', 'HEAD')
cls.local_repo.create_head(cls.local_branch, 'HEAD')
try:
cls.remote_repo = Repo.init(cls.remote_tmp_src_dir, initial_branch='master')
except Exception:
cls.remote_repo = Repo.init(cls.remote_tmp_src_dir)
commit(cls.remote_repo, date='2016-01-01 00:00:00+0000')
cls.old_revision = cls.remote_repo.heads['master'].commit.hexsha
commit(cls.remote_repo)
cls.remote_branch_hash = cls.remote_repo.heads['master'].commit.hexsha
cls.remote_repo.create_head(cls.remote_branch, 'HEAD')
cls.local_repo.create_remote(cls.local_remote_name, cls.remote_tmp_src_dir)
cls.local_repo.remotes[cls.local_remote_name].fetch() |
def kernel_tensorized(, x, , y, blur=0.05, kernel=None, name=None, potentials=False, **kwargs):
(B, N, D) = x.shape
(_, M, _) = y.shape
if (kernel is None):
kernel = kernel_routines[name]
K_xx = kernel(double_grad(x), x.detach(), blur=blur)
K_yy = kernel(double_grad(y), y.detach(), blur=blur)
K_xy = kernel(x, y, blur=blur)
a_x = torch.matmul(K_xx, .detach().unsqueeze((- 1))).squeeze((- 1))
b_y = torch.matmul(K_yy, .detach().unsqueeze((- 1))).squeeze((- 1))
b_x = torch.matmul(K_xy, .unsqueeze((- 1))).squeeze((- 1))
if potentials:
a_y = torch.matmul(K_xy.transpose(1, 2), .unsqueeze((- 1))).squeeze((- 1))
return ((a_x - b_x), (b_y - a_y))
else:
return (((0.5 * (double_grad() * a_x).sum(1)) + (0.5 * (double_grad() * b_y).sum(1))) - ( * b_x).sum(1)) |
def ovlp3d_13(ax, da, A, bx, db, B):
result = numpy.zeros((3, 10), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + B[0])
x4 = (x3 ** 2)
x5 = (3.0 * x0)
x6 = (x2 + A[0])
x7 = (x3 * x6)
x8 = ((x0 * ((((- 2.0) * x1) + A[0]) + B[0])) + (x3 * (x0 + (2.0 * x7))))
x9 = ((ax * bx) * x0)
x10 = (((5. * da) * db) * numpy.exp(((- x9) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x11 = ((x0 ** 1.5) * x10)
x12 = (3. * x11)
x13 = (0. * x12)
x14 = (x0 * ((ax * A[1]) + (bx * B[1])))
x15 = (- x14)
x16 = (x15 + B[1])
x17 = (x11 * x16)
x18 = 1.
x19 = (0. * x18)
x20 = (x19 * x8)
x21 = (x0 * ((ax * A[2]) + (bx * B[2])))
x22 = (- x21)
x23 = (x22 + B[2])
x24 = (x11 * x23)
x25 = (x16 ** 2)
x26 = (0.5 * x0)
x27 = (0. * x18)
x28 = (x27 * (x25 + x26))
x29 = (x26 + x7)
x30 = ((x0 ** 1.5) * x10)
x31 = (x29 * x30)
x32 = (x23 ** 2)
x33 = (x27 * (x26 + x32))
x34 = (0. * x12)
x35 = (x34 * x6)
x36 = (1.5 * x0)
x37 = (x16 * (x25 + x36))
x38 = (x23 * (x32 + x36))
x39 = (x15 + A[1])
x40 = (x34 * x39)
x41 = (x3 * (x36 + x4))
x42 = (x16 * x39)
x43 = (x26 + x42)
x44 = (x30 * x43)
x45 = (x27 * (x26 + x4))
x46 = ((x0 * ((((- 2.0) * x14) + A[1]) + B[1])) + (x16 * (x0 + (2.0 * x42))))
x47 = (x19 * x46)
x48 = (x11 * x3)
x49 = (x22 + A[2])
x50 = (x34 * x49)
x51 = (x23 * x49)
x52 = (x26 + x51)
x53 = (x30 * x52)
x54 = ((x0 * ((((- 2.0) * x21) + A[2]) + B[2])) + (x23 * (x0 + (2.0 * x51))))
x55 = (x19 * x54)
result[(0, 0)] = numpy.sum((x13 * ((x0 * (((2.0 * x4) + x5) + (4.0 * x7))) + ((2.0 * x3) * x8))))
result[(0, 1)] = numpy.sum((x17 * x20))
result[(0, 2)] = numpy.sum((x20 * x24))
result[(0, 3)] = numpy.sum((x28 * x31))
result[(0, 4)] = numpy.sum(((x16 * x24) * x29))
result[(0, 5)] = numpy.sum((x31 * x33))
result[(0, 6)] = numpy.sum((x35 * x37))
result[(0, 7)] = numpy.sum(((x24 * x28) * x6))
result[(0, 8)] = numpy.sum(((x17 * x33) * x6))
result[(0, 9)] = numpy.sum((x35 * x38))
result[(1, 0)] = numpy.sum((x40 * x41))
result[(1, 1)] = numpy.sum((x44 * x45))
result[(1, 2)] = numpy.sum(((x24 * x39) * x45))
result[(1, 3)] = numpy.sum((x47 * x48))
result[(1, 4)] = numpy.sum(((x24 * x3) * x43))
result[(1, 5)] = numpy.sum(((x33 * x39) * x48))
result[(1, 6)] = numpy.sum((x13 * ((x0 * (((2.0 * x25) + (4.0 * x42)) + x5)) + ((2.0 * x16) * x46))))
result[(1, 7)] = numpy.sum((x24 * x47))
result[(1, 8)] = numpy.sum((x33 * x44))
result[(1, 9)] = numpy.sum((x38 * x40))
result[(2, 0)] = numpy.sum((x41 * x50))
result[(2, 1)] = numpy.sum(((x17 * x45) * x49))
result[(2, 2)] = numpy.sum((x45 * x53))
result[(2, 3)] = numpy.sum(((x28 * x48) * x49))
result[(2, 4)] = numpy.sum(((x17 * x3) * x52))
result[(2, 5)] = numpy.sum((x48 * x55))
result[(2, 6)] = numpy.sum((x37 * x50))
result[(2, 7)] = numpy.sum((x28 * x53))
result[(2, 8)] = numpy.sum((x17 * x55))
result[(2, 9)] = numpy.sum((x13 * ((x0 * (((2.0 * x32) + x5) + (4.0 * x51))) + ((2.0 * x23) * x54))))
return result |
def is_program_info(name, f):
name_underscore = (os.path.basename(strip_ngs_extensions(name)) + '_')
name_dot = (os.path.basename(utils.rootname(name)) + '.')
if (f.endswith('.programs') and (f.startswith(name_dot) or f.startswith(name_underscore))):
return True
else:
return False |
class Config():
template = None
def __init__(self, **kwargs):
self.max_error = kwargs.pop('max_error', (1 / (2 ** 63)))
self.n_word_max = kwargs.pop('n_word_max', 64)
self.overflow = kwargs.pop('overflow', 'saturate')
self.rounding = kwargs.pop('rounding', 'trunc')
self.shifting = kwargs.pop('shifting', 'expand')
self.op_method = kwargs.pop('op_method', 'raw')
self.op_input_size = kwargs.pop('op_input_size', 'same')
self.op_out = kwargs.pop('op_out', None)
self.op_out_like = kwargs.pop('op_out_like', None)
self.op_sizing = kwargs.pop('op_sizing', 'optimal')
self.const_op_sizing = kwargs.pop('const_op_sizing', 'same')
self.array_output_type = kwargs.pop('array_output_type', 'fxp')
self.array_op_out = kwargs.pop('array_op_out', None)
self.array_op_out_like = kwargs.pop('array_op_out_like', None)
self.array_op_method = kwargs.pop('array_op_method', 'repr')
self.dtype_notation = kwargs.pop('dtype_notation', 'fxp')
if ('template' in kwargs):
self.template = kwargs.pop('template')
if (self.template is not None):
if isinstance(self.template, Config):
self.__dict__ = copy.deepcopy(self.template.__dict__)
def max_error(self):
return self._max_error
_error.setter
def max_error(self, val):
if (val > 0):
self._max_error = val
else:
raise ValueError('max_error must be greater than 0!')
def n_word_max(self):
return self._n_word_max
_word_max.setter
def n_word_max(self, val):
if (isinstance(val, int) and (val > 0)):
self._n_word_max = val
else:
raise ValueError('n_word_max must be int type greater than 0!')
def _overflow_list(self):
return ['saturate', 'wrap']
def overflow(self):
return self._overflow
def overflow(self, val):
if (isinstance(val, str) and (val in self._overflow_list)):
self._overflow = val
else:
raise ValueError('overflow must be str type with following valid values: {}'.format(self._overflow_list))
def _rounding_list(self):
return ['around', 'floor', 'ceil', 'fix', 'trunc']
def rounding(self):
return self._rounding
def rounding(self, val):
if (isinstance(val, str) and (val in self._rounding_list)):
self._rounding = val
else:
raise ValueError('rounding must be str type with following valid values: {}'.format(self._rounding_list))
def _shifting_list(self):
return ['expand', 'trunc', 'keep']
def shifting(self):
return self._shifting
def shifting(self, val):
if (isinstance(val, str) and (val in self._shifting_list)):
self._shifting = val
else:
raise ValueError('shifting must be str type with following valid values: {}'.format(self._shifting_list))
def _op_input_size_list(self):
return ['same', 'best']
def op_input_size(self):
return self._op_input_size
_input_size.setter
def op_input_size(self, val):
if (isinstance(val, str) and (val in self._op_input_size_list)):
self._op_input_size = val
else:
raise ValueError('op_input_size must be str type with following valid values: {}'.format(self._op_input_size_list))
def op_out(self):
return self._op_out
_out.setter
def op_out(self, val):
if ((val is None) or isinstance(val, Fxp)):
self._op_out = val
else:
raise ValueError('op_out must be a Fxp object or None!')
def op_out_like(self):
return self._op_out_like
_out_like.setter
def op_out_like(self, val):
if ((val is None) or isinstance(val, Fxp)):
self._op_out_like = val
else:
raise ValueError('op_out_like must be a Fxp object or None!')
def _op_sizing_list(self):
return ['optimal', 'same', 'fit', 'largest', 'smallest']
def op_sizing(self):
return self._op_sizing
_sizing.setter
def op_sizing(self, val):
if (isinstance(val, str) and (val in self._op_sizing_list)):
self._op_sizing = val
else:
raise ValueError('op_sizing must be str type with following valid values: {}'.format(self._op_sizing_list))
def _op_method_list(self):
return ['raw', 'repr']
def op_method(self):
return self._op_method
_method.setter
def op_method(self, val):
if (isinstance(val, str) and (val in self._op_method_list)):
self._op_method = val
else:
raise ValueError('op_method must be str type with following valid values: {}'.format(self._op_method_list))
def _const_op_sizing_list(self):
return ['optimal', 'same', 'fit', 'largest', 'smallest']
def const_op_sizing(self):
return self._const_op_sizing
_op_sizing.setter
def const_op_sizing(self, val):
if (isinstance(val, str) and (val in self._const_op_sizing_list)):
self._const_op_sizing = val
else:
raise ValueError('op_sizing must be str type with following valid values: {}'.format(self._const_op_sizing_list))
def _array_output_type_list(self):
return ['fxp', 'array']
def array_output_type(self):
return self._array_output_type
_output_type.setter
def array_output_type(self, val):
if (isinstance(val, str) and (val in self._array_output_type_list)):
self._array_output_type = val
else:
raise ValueError('array_output_type must be str type with following valid values: {}'.format(self._array_output_type_list))
def array_op_out(self):
return self._array_op_out
_op_out.setter
def array_op_out(self, val):
if ((val is None) or isinstance(val, Fxp)):
self._array_op_out = val
else:
raise ValueError('array_op_out must be a Fxp object or None!')
def array_op_out_like(self):
return self._array_op_out_like
_op_out_like.setter
def array_op_out_like(self, val):
if ((val is None) or isinstance(val, Fxp)):
self._array_op_out_like = val
else:
raise ValueError('array_op_out_like must be a Fxp object or None!')
def _array_op_method_list(self):
return ['raw', 'repr']
def array_op_method(self):
return self._array_op_method
_op_method.setter
def array_op_method(self, val):
if (isinstance(val, str) and (val in self._array_op_method_list)):
self._array_op_method = val
else:
raise ValueError('array_op_method must be str type with following valid values: {}'.format(self._array_op_method_list))
def _dtype_notation_list(self):
return ['fxp', 'Q']
def dtype_notation(self):
return self._dtype_notation
_notation.setter
def dtype_notation(self, val):
if (isinstance(val, str) and (val in self._dtype_notation_list)):
self._dtype_notation = val
else:
raise ValueError('dtype_notation must be str type with following valid values: {}'.format(self._dtype_notation_list))
def print(self):
for (k, v) in self.__dict__.items():
print('\t{: <24}:\t{}'.format(k.strip('_'), v))
def update(self, **kwargs):
for (k, v) in kwargs.items():
if hasattr(self, k):
setattr(self, k, v)
def copy(self):
return copy.copy(self)
def deepcopy(self):
return copy.deepcopy(self) |
def test_read_request_stream_in_dispatch_after_app_calls_body(test_client_factory: Callable[([ASGIApp], TestClient)]) -> None:
async def homepage(request: Request):
assert ((await request.body()) == b'a')
return PlainTextResponse('Homepage')
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint):
resp = (await call_next(request))
with pytest.raises(RuntimeError, match='Stream consumed'):
async for _ in request.stream():
raise AssertionError('should not be called')
return resp
app = Starlette(routes=[Route('/', homepage, methods=['POST'])], middleware=[Middleware(ConsumingMiddleware)])
client: TestClient = test_client_factory(app)
response = client.post('/', content=b'a')
assert (response.status_code == 200) |
def test_staticfiles_unhandled_os_error_returns_500(tmpdir, test_client_factory, monkeypatch):
def mock_timeout(*args, **kwargs):
raise TimeoutError
path = os.path.join(tmpdir, 'example.txt')
with open(path, 'w') as file:
file.write('<file content>')
routes = [Mount('/', app=StaticFiles(directory=tmpdir), name='static')]
app = Starlette(routes=routes)
client = test_client_factory(app, raise_server_exceptions=False)
monkeypatch.setattr('starlette.staticfiles.StaticFiles.lookup_path', mock_timeout)
response = client.get('/example.txt')
assert (response.status_code == 500)
assert (response.text == 'Internal Server Error') |
def test_namespace_collision(tester, build):
build['abi'].append({'constant': False, 'inputs': [{'name': '_to', 'type': 'address'}, {'name': '_value', 'type': 'uint256'}, {'name': '_test', 'type': 'uint256'}], 'name': 'bytecode', 'outputs': [{'name': '', 'type': 'bool'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'})
with pytest.warns(BrownieEnvironmentWarning):
Contract.from_abi(None, tester.address, build['abi']) |
class Requester(object):
protocol = '
host = ''
method = ''
action = ''
headers = {}
data = {}
def __init__(self, path, uagent, ssl, proxies):
try:
with open(path, 'r') as f:
content = f.read().strip()
except IOError as e:
logging.error('File not found')
exit()
try:
content = content.split('\n')
regex = re.compile('(.*) (.*) HTTP')
(self.method, self.action) = regex.findall(content[0])[0]
for header in content[1:]:
(name, _, value) = header.partition(': ')
if ((not name) or (not value)):
continue
self.headers[name] = value
self.host = self.headers['Host']
if (uagent != None):
self.headers['User-Agent'] = uagent
self.data_to_dict(content[(- 1)])
if (ssl == True):
self.protocol = '
self.proxies = proxies
except Exception as e:
logging.warning('Bad Format or Raw data !')
def data_to_dict(self, data):
if (self.method == 'POST'):
if (self.headers['Content-Type'] and ('application/json' in self.headers['Content-Type'])):
self.data = json.loads(data)
elif (self.headers['Content-Type'] and ('application/xml' in self.headers['Content-Type'])):
self.data['__xml__'] = data
else:
for arg in data.split('&'):
regex = re.compile('(.*)=(.*)')
for (name, value) in regex.findall(arg):
name = urllib.parse.unquote(name)
value = urllib.parse.unquote(value)
self.data[name] = value
def do_request(self, param, value, timeout=3, stream=False):
try:
if (self.method == 'POST'):
data_injected = self.data.copy()
if (param in str(data_injected)):
data_injected[param] = value
if (self.headers['Content-Type'] and ('application/json' in self.headers['Content-Type'])):
r = requests.post((((self.protocol + '://') + self.host) + self.action), headers=self.headers, json=data_injected, timeout=timeout, stream=stream, verify=False, proxies=self.proxies)
else:
r = requests.post((((self.protocol + '://') + self.host) + self.action), headers=self.headers, data=data_injected, timeout=timeout, stream=stream, verify=False, proxies=self.proxies)
elif (self.headers['Content-Type'] and ('application/xml' in self.headers['Content-Type'])):
if ('*FUZZ*' in data_injected['__xml__']):
data_xml = data_injected['__xml__']
data_xml = data_xml.replace('*FUZZ*', value)
r = requests.post((((self.protocol + '://') + self.host) + self.action), headers=self.headers, data=data_xml, timeout=timeout, stream=stream, verify=False, proxies=self.proxies)
else:
logging.error('No injection point found ! (use -p)')
exit(1)
else:
logging.error('No injection point found ! (use -p)')
exit(1)
else:
regex = re.compile((param + '=(\\w+)'))
value = urllib.parse.quote(value, safe='')
data_injected = re.sub(regex, ((param + '=') + value), self.action)
r = requests.get((((self.protocol + '://') + self.host) + data_injected), headers=self.headers, timeout=timeout, stream=stream, verify=False, proxies=self.proxies)
except Exception as e:
logging.error(e)
return None
return r
def __str__(self):
text = (self.method + ' ')
text += (self.action + ' HTTP/1.1\n')
for header in self.headers:
text += (((header + ': ') + self.headers[header]) + '\n')
text += '\n\n'
for data in self.data:
text += (((data + '=') + self.data[data]) + '&')
return text[:(- 1)] |
def extractNightbreeze(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
releases = ['Transcending The Nine Heavens', 'Stellar Transformation', 'Stellar Transformations']
for release in releases:
if (release in item['tags']):
return buildReleaseMessageWithType(item, release, vol, chp, frag=frag, postfix=postfix)
return False |
def read_raw_node_data(pool, root_dir):
(_, nodes) = lib.read_root_csv(root_dir)
raw_node_data = []
with progressbar.ProgressBar(max_value=len(nodes)) as bar:
for (idx, node) in enumerate(pool.imap_unordered(read_json5, nodes, chunksize=20)):
bar.update(idx)
raw_node_data.append(node)
bar.update((idx + 1))
return raw_node_data |
def test_certification_nulls(client, setup_test_data):
resp = client.get((url.format(agency='222', fy=2020, period=12) + '?sort=certification_date&order=desc'))
assert (resp.status_code == status.HTTP_200_OK)
response = resp.json()
assert (len(response['results']) == 4)
assert (response['results'] == [{'certification_date': '2021-02-16T14:17:00.729315Z', 'publication_date': '2021-02-16T14:16:00.729315Z'}, {'certification_date': '2021-02-14T14:17:00.729315Z', 'publication_date': '2021-02-14T14:16:00.729315Z'}, {'certification_date': None, 'publication_date': '2020-01-17T18:37:21.605023Z'}, {'certification_date': None, 'publication_date': '2020-01-14T14:17:00.729315Z'}])
resp = client.get((url.format(agency='222', fy=2020, period=12) + '?sort=certification_date&order=asc'))
assert (resp.status_code == status.HTTP_200_OK)
response = resp.json()
assert (len(response['results']) == 4)
assert (response['results'] == [{'certification_date': None, 'publication_date': '2020-01-14T14:17:00.729315Z'}, {'certification_date': None, 'publication_date': '2020-01-17T18:37:21.605023Z'}, {'certification_date': '2021-02-14T14:17:00.729315Z', 'publication_date': '2021-02-14T14:16:00.729315Z'}, {'certification_date': '2021-02-16T14:17:00.729315Z', 'publication_date': '2021-02-16T14:16:00.729315Z'}]) |
def create_recipient_object(db_row_dict: dict) -> OrderedDict:
return OrderedDict([('recipient_hash', obtain_recipient_uri(db_row_dict['_recipient_name'], db_row_dict['_recipient_uei'], db_row_dict['_parent_recipient_uei'], db_row_dict['_recipient_unique_id'], db_row_dict['_parent_recipient_unique_id'])), ('recipient_name', db_row_dict['_recipient_name']), ('recipient_uei', db_row_dict['_recipient_uei']), ('recipient_unique_id', db_row_dict['_recipient_unique_id']), ('parent_recipient_hash', obtain_recipient_uri(db_row_dict['_parent_recipient_name'], db_row_dict['_parent_recipient_uei'], None, db_row_dict['_parent_recipient_unique_id'], None, True)), ('parent_recipient_name', db_row_dict['_parent_recipient_name']), ('parent_recipient_uei', db_row_dict['_parent_recipient_uei']), ('parent_recipient_unique_id', db_row_dict['_parent_recipient_unique_id']), ('business_categories', get_business_category_display_names(fetch_business_categories_by_transaction_id(db_row_dict['_transaction_id']))), ('location', OrderedDict([('location_country_code', db_row_dict['_rl_location_country_code']), ('country_name', db_row_dict['_rl_country_name']), ('state_code', db_row_dict['_rl_state_code']), ('state_name', db_row_dict['_rl_state_name']), ('city_name', (db_row_dict['_rl_city_name'] or db_row_dict.get('_rl_foreign_city'))), ('county_code', db_row_dict['_rl_county_code']), ('county_name', db_row_dict['_rl_county_name']), ('address_line1', db_row_dict['_rl_address_line1']), ('address_line2', db_row_dict['_rl_address_line2']), ('address_line3', db_row_dict['_rl_address_line3']), ('congressional_code', db_row_dict['_rl_congressional_code_current']), ('zip4', (db_row_dict.get('_rl_zip_last_4') or db_row_dict.get('_rl_zip4'))), ('zip5', db_row_dict['_rl_zip5']), ('foreign_postal_code', db_row_dict.get('_rl_foreign_postal_code')), ('foreign_province', db_row_dict.get('_rl_foreign_province'))]))]) |
('cuda.fused_elementwise.gen_function')
def fused_elementwise_gen_function(func_attrs: Dict[(str, Any)]) -> str:
custom_libs = Target.current().get_custom_libs(os.path.dirname(__file__), 'custom_math.cuh')
return elementwise_common.fused_elementwise_gen_function(func_attrs=func_attrs, custom_libs=custom_libs, head_template=HEAD_TEMPLATE, backend_spec=CUDASpec()) |
class PeerID(Struct):
def build(identifier: str, name: str, flags: int=6) -> PeerID:
peer_id = PeerID()
peer_id._identifier = identifier
peer_id._name = name
peer_id._cfields['flags'] = flags
peer_id._cfields['length'] = len(peer_id.peer_string())
return peer_id
def __init__(self):
super(PeerID, self).__init__('peer_id', (('flags', 'I'), ('length', 'H')))
self._identifier = None
self._name = None
def identifier(self) -> str:
return self._identifier
def numeric_identifier(self) -> int:
return base36.loads(self._identifier)
def name(self) -> str:
return self._name
def display(self) -> str:
return ('peer(id=%s(0x%x) name=%s)' % (self._identifier, base36.loads(self._identifier), self._name))
def peer_string(self) -> bytes:
return (((str.encode(self._identifier) + b'+') + str.encode(self._name)) + b'\x00')
def set_identifier(self, ident: str) -> None:
self._identifier = ident
self._cfields['length'] = len(self.peer_string())
def set_name(self, name: str) -> None:
self._name = name
self._cfields['length'] = len(self.peer_string())
def parse_raw_data(self, data: bytes) -> None:
super(PeerID, self).parse_raw_data(data)
try:
(self._identifier, self._name) = data[self._csize:].decode('utf-8').rstrip('\x00').split('+')
except Exception as e:
print(("can't decode peer_id name data:\n\n%s\n" % hexdump(data, 'return')))
raise e
def to_raw_data(self) -> bytes:
return (super(PeerID, self).to_raw_data() + self.peer_string())
def to_bplist_encoding(self) -> bytes:
b36_id = self._identifier
num_id = base36.loads(b36_id)
name = self._name.encode('raw_unicode_escape')
return bytes(((list(num_id.to_bytes(8, 'big')) + [(len(name) & 255)]) + list(name)))
def identifier_as_32bit(self):
as_64 = self.numeric_identifier()
tmp = ((as_64 >> 32) << 32)
return (as_64 - tmp)
def from_bplist_encoding(encoded) -> PeerID:
num_id = int.from_bytes(encoded[:8], 'big')
b36_id = base36.dumps(num_id)
name_len = encoded[8]
name = encoded[9:(9 + name_len)].decode('raw_unicode_escape')
return PeerID.build(b36_id, name)
def print(self) -> None:
print(' _id')
super(PeerID, self).print()
id_as_int = base36.loads(self._identifier)
id_as_bytes = id_as_int.to_bytes(8, 'big').hex()
print((' identifier : %s [ base36(%d), raw=0x%s ]' % (self._identifier, id_as_int, id_as_bytes)))
print((' name : %s' % self._name)) |
def make_pausing_beam_chain(vm_config: VMConfiguration, chain_id: int, consensus_context_class: Type[ConsensusContextAPI], db: AtomicDatabaseAPI, event_bus: EndpointAPI, metrics_registry: MetricsRegistry, loop: asyncio.AbstractEventLoop, urgent: bool=True) -> BeamChain:
pausing_vm_config = tuple(((starting_block, pausing_vm_decorator(vm, event_bus, metrics_registry, loop, urgent=urgent)) for (starting_block, vm) in vm_config))
PausingBeamChain = BeamChain.configure(consensus_context_class=consensus_context_class, vm_configuration=pausing_vm_config, chain_id=chain_id)
return PausingBeamChain(db) |
class OptionSeriesVariablepieSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesVariablepieSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesVariablepieSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesVariablepieSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesVariablepieSonificationTracksMappingTremoloSpeed) |
def make_clean(fips_dir, proj_dir, cfg_name):
proj_name = util.get_project_name_from_dir(proj_dir)
configs = config.load(fips_dir, proj_dir, cfg_name)
num_valid_configs = 0
if configs:
for cfg in configs:
(config_valid, _) = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True)
if config_valid:
log.colored(log.YELLOW, '=== cleaning: {}'.format(cfg['name']))
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
result = cmake.run_clean(fips_dir, build_dir)
if result:
num_valid_configs += 1
else:
log.error("Failed to clean config '{}' of project '{}'".format(cfg['name'], proj_name))
else:
log.error("Config '{}' not valid in this environment".format(cfg['name']))
else:
log.error("No valid configs found for '{}'".format(cfg_name))
if (num_valid_configs != len(configs)):
log.error('{} out of {} configs failed!'.format((len(configs) - num_valid_configs), len(configs)))
return False
else:
log.colored(log.GREEN, '{} configs cleaned'.format(num_valid_configs))
return True |
class OptionSeriesGaugeSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class ResourceRulesEngine(base_rules_engine.BaseRulesEngine):
def __init__(self, rules_file_path, snapshot_timestamp=None):
super(ResourceRulesEngine, self).__init__(rules_file_path=rules_file_path)
self.rule_book = None
def build_rule_book(self, global_configs=None):
self.rule_book = ResourceRuleBook(self._load_rule_definitions())
def find_violations(self, resources, force_rebuild=False):
if ((self.rule_book is None) or force_rebuild):
self.build_rule_book()
violations = self.rule_book.find_violations(resources)
return violations
def add_rules(self, rule_defs):
if (self.rule_book is not None):
self.rule_book.add_rules(rule_defs) |
class CommitteeDetail(BaseConcreteCommittee):
__table_args__ = {'extend_existing': True}
__tablename__ = 'ofec_committee_detail_mv'
email = db.Column(db.String(50), doc=docs.COMMITTEE_EMAIL)
fax = db.Column(db.String(10), doc=docs.COMMITTEE_FAX)
website = db.Column(db.String(50), doc=docs.COMMITTEE_WEBSITE)
form_type = db.Column(db.String(3), doc=docs.FORM_TYPE)
leadership_pac = db.Column(db.String(50), doc=docs.LEADERSHIP_PAC_INDICATE)
lobbyist_registrant_pac = db.Column(db.String(1), doc=docs.LOBBIST_REGISTRANT_PAC_INDICATE)
party_type = db.Column(db.String(3), doc=docs.PARTY_TYPE)
party_type_full = db.Column(db.String(15), doc=docs.PARTY_TYPE_FULL)
street_1 = db.Column(db.String(50), doc=docs.COMMITTEE_STREET_1)
street_2 = db.Column(db.String(50), doc=docs.COMMITTEE_STREET_2)
city = db.Column(db.String(50), doc=docs.COMMITTEE_CITY)
state_full = db.Column(db.String(50), doc=docs.COMMITTEE_STATE_FULL)
zip = db.Column(db.String(9), doc=docs.COMMITTEE_ZIP)
treasurer_city = db.Column(db.String(50), doc=docs.TREASURER_CITY)
treasurer_name_1 = db.Column(db.String(50), doc=docs.TREASURER_NAME_1)
treasurer_name_2 = db.Column(db.String(50), doc=docs.TREASURER_NAME_2)
treasurer_name_middle = db.Column(db.String(50), doc=docs.TREASURER_NAME_MIDDLE)
treasurer_name_prefix = db.Column(db.String(50), doc=docs.TREASURER_NAME_PREFIX)
treasurer_name_suffix = db.Column(db.String(50), doc=docs.TREASURER_NAME_SUFFIX)
treasurer_phone = db.Column(db.String(15), doc=docs.TREASURER_PHONE)
treasurer_state = db.Column(db.String(50), doc=docs.TREASURER_STATE)
treasurer_street_1 = db.Column(db.String(50), doc=docs.TREASURER_STREET_1)
treasurer_street_2 = db.Column(db.String(50), doc=docs.TREASURER_STREET_2)
treasurer_name_title = db.Column(db.String(50), doc=docs.TREASURER_NAME_TITLE)
treasurer_zip = db.Column(db.String(9), doc=docs.TREASURER_ZIP)
custodian_city = db.Column(db.String(50), doc=docs.CUSTODIAN_CITY)
custodian_name_1 = db.Column(db.String(50), doc=docs.CUSTODIAN_NAME1)
custodian_name_2 = db.Column(db.String(50), doc=docs.CUSTODIAN_NAME2)
custodian_name_middle = db.Column(db.String(50), doc=docs.CUSTODIAN_MIDDLE_NAME)
custodian_name_full = db.Column(db.String(100), doc=docs.CUSTODIAN_NAME_FULL)
custodian_phone = db.Column(db.String(15), doc=docs.CUSTODIAN_PHONE)
custodian_name_prefix = db.Column(db.String(50), doc=docs.CUSTODIAN_NAME_PREFIX)
custodian_state = db.Column(db.String(2), doc=docs.CUSTODIAN_STATE)
custodian_street_1 = db.Column(db.String(50), doc=docs.CUSTODIAN_STREET_1)
custodian_street_2 = db.Column(db.String(50), doc=docs.CUSTODIAN_STREET_2)
custodian_name_suffix = db.Column(db.String(50), doc=docs.CUSTODIAN_NAME_SUFFIX)
custodian_name_title = db.Column(db.String(50), doc=docs.CUSTODIAN_NAME_TITLE)
custodian_zip = db.Column(db.String(9), doc=docs.CUSTODIAN_ZIP) |
def mockproject(newproject, mocker):
with newproject._path.joinpath('contracts/Foo.sol').open('w') as fp:
fp.write(CONTRACT)
with newproject._path.joinpath('contracts/BaseFoo.sol').open('w') as fp:
fp.write(BASE_CONTRACT)
with newproject._path.joinpath('contracts/FooLib.sol').open('w') as fp:
fp.write(LIBRARY)
with newproject._path.joinpath('interfaces/IFoo.sol').open('w') as fp:
fp.write(INTERFACE)
newproject.load()
newproject.close()
mocker.spy(newproject, '_compile')
(yield newproject) |
class OptionSeriesTreegraphSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPictorialData(Options):
def accessibility(self) -> 'OptionSeriesPictorialDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesPictorialDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionSeriesPictorialDataDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesPictorialDataDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def drilldown(self):
return self._config_get(None)
def drilldown(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionSeriesPictorialDataEvents':
return self._config_sub_data('events', OptionSeriesPictorialDataEvents)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def pointWidth(self):
return self._config_get('undefined')
def pointWidth(self, num: float):
self._config(num, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsTreegraphSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_group():
report_data = get_report_data()
assert ({'node_id': 'model.elementary_integration_tests.error_model', 'resource_type': 'model'} in report_data['groups']['dbt']['elementary_integration_tests']['models']['__files__'])
assert ({'node_id': 'model.elementary_integration_tests.nested', 'resource_type': 'model'} in report_data['groups']['dbt']['elementary_integration_tests']['models']['nested']['models']['tree']['__files__'])
assert ({'node_id': 'source.elementary_integration_tests.training.any_type_column_anomalies_training', 'resource_type': 'source'} in report_data['groups']['dbt']['elementary_integration_tests']['sources']['__files__'])
assert ({'node_id': 'model.elementary_integration_tests.any_type_column_anomalies', 'resource_type': 'model'} in report_data['groups']['owners'][''])
assert ({'node_id': 'model.elementary_integration_tests.any_type_column_anomalies', 'resource_type': 'model'} not in report_data['groups']['owners']['No owners'])
assert ({'node_id': 'model.elementary_integration_tests.string_column_anomalies', 'resource_type': 'model'} in report_data['groups']['tags']['marketing'])
assert ({'node_id': 'model.elementary_integration_tests.string_column_anomalies', 'resource_type': 'model'} not in report_data['groups']['tags']['No tags']) |
class EODRetriever(FileSource):
sphinxdoc = '\n EODRetriever\n '
def __init__(self, source='ecmwf', *args, **kwargs):
if len(args):
assert (len(args) == 1)
assert isinstance(args[0], dict)
assert (not kwargs)
kwargs = args[0]
self.source_kwargs = self.request(**kwargs)
self.client = ecmwf.opendata.Client(source=source, preserve_request_order=True)
self.path = self._retrieve(self.source_kwargs)
def connect_to_mirror(self, mirror):
return mirror.connection_for_eod(self)
def _retrieve(self, request):
def retrieve(target, request):
self.client.retrieve(request, target)
return self.cache_file(retrieve, request)
def request(self, **request):
return request |
def setup_authentication(app, get_pw_callback):
auth = HTTPDigestAuth()
_required
def _assert_auth_before_request():
app.logger.info(f'User: {auth.username()}')
return None
app.logger.info(f'Setting up {app} to require login...')
auth.get_password(get_pw_callback)
app.before_request(_assert_auth_before_request) |
def _migrate_gen_kw(ensemble: EnsembleAccessor, data_file: DataFile, ens_config: EnsembleConfig) -> None:
for block in data_file.blocks(Kind.GEN_KW):
config = ens_config[block.name]
assert isinstance(config, GenKwConfig)
priors = config.get_priors()
array = data_file.load(block, len(priors))
dataset = xr.Dataset({'values': ('names', array), 'transformed_values': ('names', config.transform(array)), 'names': [x['key'] for x in priors]})
ensemble.save_parameters(block.name, block.realization_index, dataset) |
def download_datasets(data_dir):
if (not os.path.isdir(data_dir)):
os.mkdir(data_dir)
datasets_dir = os.path.join(data_dir, 'datasets')
if (not os.path.isdir(datasets_dir)):
os.mkdir(datasets_dir)
datasets = ['roxford5k', 'rparis6k']
for di in range(len(datasets)):
dataset = datasets[di]
if (dataset == 'roxford5k'):
src_dir = '
dl_files = ['oxbuild_images-v1.tgz']
elif (dataset == 'rparis6k'):
src_dir = '
dl_files = ['paris_1-v1.tgz', 'paris_2-v1.tgz']
else:
raise ValueError('Unknown dataset: {}!'.format(dataset))
dst_dir = os.path.join(data_dir, 'datasets', dataset, 'jpg')
if (not os.path.isdir(dst_dir)):
print('>> Dataset {} directory does not exist. Creating: {}'.format(dataset, dst_dir))
os.makedirs(dst_dir)
for dli in range(len(dl_files)):
dl_file = dl_files[dli]
src_file = os.path.join(src_dir, dl_file)
dst_file = os.path.join(dst_dir, dl_file)
print('>> Downloading dataset {} archive {}...'.format(dataset, dl_file))
os.system('wget {} -O {}'.format(src_file, dst_file))
print('>> Extracting dataset {} archive {}...'.format(dataset, dl_file))
dst_dir_tmp = os.path.join(dst_dir, 'tmp')
os.system('mkdir {}'.format(dst_dir_tmp))
os.system('tar -zxf {} -C {}'.format(dst_file, dst_dir_tmp))
os.system('find {} -type f -exec mv -i {{}} {} \\;'.format(dst_dir_tmp, dst_dir))
os.system('rm -rf {}'.format(dst_dir_tmp))
print('>> Extracted, deleting dataset {} archive {}...'.format(dataset, dl_file))
os.system('rm {}'.format(dst_file))
gnd_src_dir = os.path.join(' 'datasets', dataset)
gnd_dst_dir = os.path.join(data_dir, 'datasets', dataset)
gnd_dl_file = 'gnd_{}.pkl'.format(dataset)
gnd_src_file = os.path.join(gnd_src_dir, gnd_dl_file)
gnd_dst_file = os.path.join(gnd_dst_dir, gnd_dl_file)
if (not os.path.exists(gnd_dst_file)):
print('>> Downloading dataset {} ground truth file...'.format(dataset))
os.system('wget {} -O {}'.format(gnd_src_file, gnd_dst_file)) |
def _patch_info_in_place(fsize, patch_size, compression, compression_info, memory_size, segment_size, from_shift_size, from_size, to_size, segments):
patch_to_ratio = _format_ratio(patch_size, to_size)
compression = _format_compression(compression, compression_info)
print('Type: in-place')
print('Patch size: {}'.format(fsize(patch_size)))
print('Memory size: {}'.format(fsize(memory_size)))
print('Segment size: {}'.format(fsize(segment_size)))
print('From shift size: {}'.format(fsize(from_shift_size)))
print('From size: {}'.format(fsize(from_size)))
print('To size: {}'.format(fsize(to_size)))
print('Patch/to ratio: {} % (lower is better)'.format(patch_to_ratio))
print('Number of segments: {}'.format(len(segments)))
print('Compression: {}'.format(compression))
print()
for (i, (dfpatch_size, data_format, sequential_info)) in enumerate(segments):
from_offset_begin = max(((segment_size * (i + 1)) - from_shift_size), 0)
from_offset_end = min(from_size, (memory_size - from_shift_size))
to_offset_begin = (segment_size * i)
to_offset_end = min((to_offset_begin + segment_size), to_size)
_patch_info_in_place_segment(fsize, (i + 1), from_offset_begin, from_offset_end, to_offset_begin, to_offset_end, dfpatch_size, data_format, *sequential_info) |
def fetch_exchange(zone_key1: str, zone_key2: str, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict:
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
sorted_zones = '->'.join(sorted([zone_key1, zone_key2]))
if (sorted_zones not in JSON_MAPPING.keys()):
raise NotImplementedError('This exchange is not implemented.')
s = (session or Session())
raw_data = s.get(DATA_URL).json()
flow = round(extract_exchange(raw_data, sorted_zones), 1)
dt = arrow.now('UTC-6').floor('minute')
exchange = {'sortedZoneKeys': sorted_zones, 'datetime': dt.datetime, 'netFlow': flow, 'source': 'enteoperador.org'}
return exchange |
class Bug(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
body = db.Column(db.Text, nullable=False)
link = db.Column(db.String(100), nullable=False)
owner_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
reviewed = db.Column(db.Boolean, default=False) |
class FBPrintCountersCommand(fb.FBCommand):
def name(self):
return 'printcounters'
def description(self):
return 'Prints all the counters sorted by the keys.'
def run(self, arguments, options):
keys = sorted(counters.keys())
for key in keys:
print(((key + ': ') + str(counters[key]))) |
class MegaDB():
def __init__(self):
if Common().is_atlas:
self.db_client = pymongo.MongoClient(Common().db_host)
else:
self.db_client = pymongo.MongoClient(Common().db_host, username=Common().db_username, password=Common().db_password)
self.db = self.db_client[Common().db_name] |
def test_app_middleware_argument(test_client_factory):
def homepage(request):
return PlainTextResponse('Homepage')
app = Starlette(routes=[Route('/', homepage)], middleware=[Middleware(CustomMiddleware)])
client = test_client_factory(app)
response = client.get('/')
assert (response.headers['Custom-Header'] == 'Example') |
def rotate_island(island, uv_layer=None, angle=0, center_x=0, center_y=0):
if (uv_layer is None):
me = bpy.context.active_object.data
bm = bmesh.from_edit_mesh(me)
uv_layer = bm.loops.layers.uv.verify()
for face in island:
for loop in face.loops:
(x, y) = loop[uv_layer].uv
xt = (x - center_x)
yt = (y - center_y)
xr = ((xt * math.cos(angle)) - (yt * math.sin(angle)))
yr = ((xt * math.sin(angle)) + (yt * math.cos(angle)))
loop[uv_layer].uv.x = (xr + center_x)
loop[uv_layer].uv.y = (yr + center_y) |
def get_next_ball_pocket_collision(shot: System, solver: QuarticSolver=QuarticSolver.HYBRID) -> Event:
dtau_E = np.inf
agent_ids = []
collision_coeffs = []
for ball in shot.balls.values():
if (ball.state.s in const.nontranslating):
continue
state = ball.state
params = ball.params
for pocket in shot.table.pockets.values():
collision_coeffs.append(solve.ball_pocket_collision_coeffs(rvw=state.rvw, s=state.s, a=pocket.a, b=pocket.b, r=pocket.radius, mu=(params.u_s if (state.s == const.sliding) else params.u_r), m=params.m, g=params.g, R=params.R))
agent_ids.append((ball.id, pocket.id))
if (not len(collision_coeffs)):
return ball_pocket_collision(Ball.dummy(), Pocket.dummy(), (shot.t + dtau_E))
(dtau_E, index) = ptmath.roots.quartic.minimum_quartic_root(ps=np.array(collision_coeffs), solver=solver)
(ball_id, pocket_id) = agent_ids[index]
(ball, pocket) = (shot.balls[ball_id], shot.table.pockets[pocket_id])
return ball_pocket_collision(ball, pocket, (shot.t + dtau_E)) |
class OptionSeriesVectorStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def exposed_test_all_rss():
print('fetching and debugging RSS feeds')
rules = WebMirror.rules.load_rules()
feeds = [item['feedurls'] for item in rules]
feeds = [item for sublist in feeds for item in sublist]
flags.RSS_DEBUG = True
with ThreadPoolExecutor(max_workers=8) as executor:
for url in feeds:
try:
executor.submit(common.management.WebMirrorManage.exposed_fetch, url, debug=False)
except common.Exceptions.DownloadException:
print('failure downloading page!')
except urllib.error.URLError:
print('failure downloading page!') |
def test_serialize_deserialize():
key_hash = b'key_hash'
key_data = b'key_data'
value_data = b'value_data'
item = StorageItem.build_from(key_hash, key_data, value_data)
serialized = item.serialize()
deserialized = StorageItem.deserialize(serialized)
assert (deserialized.key_hash == item.key_hash)
assert (deserialized.key_data == item.key_data)
assert (deserialized.value_data == item.value_data)
assert (deserialized.length == item.length) |
class UNet2DConditionModel(nn.Module):
def __init__(self, sample_size: Optional[int]=None, in_channels: int=4, out_channels: int=4, center_input_sample: bool=False, flip_sin_to_cos: bool=True, freq_shift: int=0, down_block_types: Tuple[str]=('CrossAttnDownBlock2D', 'CrossAttnDownBlock2D', 'CrossAttnDownBlock2D', 'DownBlock2D'), up_block_types: Tuple[str]=('UpBlock2D', 'CrossAttnUpBlock2D', 'CrossAttnUpBlock2D', 'CrossAttnUpBlock2D'), block_out_channels: Tuple[int]=(320, 640, 1280, 1280), layers_per_block: int=2, downsample_padding: int=1, mid_block_scale_factor: float=1, act_fn: str='silu', norm_num_groups: int=32, norm_eps: float=1e-05, cross_attention_dim: int=1280, attention_head_dim: Union[(int, Tuple[int])]=8, use_linear_projection: bool=False):
super().__init__()
self.center_input_sample = center_input_sample
self.sample_size = sample_size
time_embed_dim = (block_out_channels[0] * 4)
self.conv_in = nn.Conv2dBias(in_channels, block_out_channels[0], 3, 1, 1)
self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift)
timestep_input_dim = block_out_channels[0]
self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim)
self.down_blocks = nn.ModuleList([])
self.up_blocks = nn.ModuleList([])
if isinstance(attention_head_dim, int):
attention_head_dim = ((attention_head_dim,) * len(down_block_types))
output_channel = block_out_channels[0]
for (i, down_block_type) in enumerate(down_block_types):
input_channel = output_channel
output_channel = block_out_channels[i]
is_final_block = (i == (len(block_out_channels) - 1))
down_block = get_down_block(down_block_type, num_layers=layers_per_block, in_channels=input_channel, out_channels=output_channel, temb_channels=time_embed_dim, add_downsample=(not is_final_block), resnet_eps=norm_eps, resnet_act_fn=act_fn, attn_num_head_channels=attention_head_dim[i], cross_attention_dim=cross_attention_dim, downsample_padding=downsample_padding, use_linear_projection=use_linear_projection)
self.down_blocks.append(down_block)
self.mid_block = UNetMidBlock2DCrossAttn(in_channels=block_out_channels[(- 1)], temb_channels=time_embed_dim, resnet_eps=norm_eps, resnet_act_fn=act_fn, output_scale_factor=mid_block_scale_factor, resnet_time_scale_shift='default', cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim[(- 1)], resnet_groups=norm_num_groups, use_linear_projection=use_linear_projection)
reversed_block_out_channels = list(reversed(block_out_channels))
reversed_attention_head_dim = list(reversed(attention_head_dim))
output_channel = reversed_block_out_channels[0]
for (i, up_block_type) in enumerate(up_block_types):
prev_output_channel = output_channel
output_channel = reversed_block_out_channels[i]
input_channel = reversed_block_out_channels[min((i + 1), (len(block_out_channels) - 1))]
is_final_block = (i == (len(block_out_channels) - 1))
up_block = get_up_block(up_block_type, num_layers=(layers_per_block + 1), in_channels=input_channel, out_channels=output_channel, prev_output_channel=prev_output_channel, temb_channels=time_embed_dim, add_upsample=(not is_final_block), resnet_eps=norm_eps, resnet_act_fn=act_fn, attn_num_head_channels=reversed_attention_head_dim[i], cross_attention_dim=cross_attention_dim, use_linear_projection=use_linear_projection)
self.up_blocks.append(up_block)
prev_output_channel = output_channel
self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps, use_swish=True)
self.conv_out = nn.Conv2dBias(block_out_channels[0], out_channels, 3, 1, 1)
def forward(self, sample, timesteps, encoder_hidden_states, down_block_additional_residuals: Optional[Tuple[Tensor]]=None, mid_block_additional_residual: Optional[Tensor]=None, return_dict: bool=True):
t_emb = self.time_proj(timesteps)
emb = self.time_embedding(t_emb)
sample = self.conv_in(sample)
down_block_res_samples = (sample,)
for downsample_block in self.down_blocks:
if (hasattr(downsample_block, 'attentions') and (downsample_block.attentions is not None)):
(sample, res_samples) = downsample_block(hidden_states=sample, temb=emb, encoder_hidden_states=encoder_hidden_states)
else:
(sample, res_samples) = downsample_block(hidden_states=sample, temb=emb)
down_block_res_samples += res_samples
if (down_block_additional_residuals is not None):
new_down_block_res_samples = ()
for (down_block_res_sample, down_block_additional_residual) in zip(down_block_res_samples, down_block_additional_residuals):
down_block_res_sample += down_block_additional_residual
new_down_block_res_samples += (down_block_res_sample,)
down_block_res_samples = new_down_block_res_samples
sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states)
if (mid_block_additional_residual is not None):
sample += mid_block_additional_residual
for upsample_block in self.up_blocks:
res_samples = down_block_res_samples[(- len(upsample_block.resnets)):]
down_block_res_samples = down_block_res_samples[:(- len(upsample_block.resnets))]
if (hasattr(upsample_block, 'attentions') and (upsample_block.attentions is not None)):
sample = upsample_block(hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, encoder_hidden_states=encoder_hidden_states)
else:
sample = upsample_block(hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples)
sample = self.conv_norm_out(sample)
sample = self.conv_out(sample)
return sample |
def build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('beacon', metavar='BEACON', help='beacon to use as configuration')
comms = parser.add_argument_group('beacon communication')
comms.add_argument('-d', '--domain', help='override the domain configured in the beacon')
comms.add_argument('-p', '--port', type=int, help='override the port configured in the beacon')
timing = parser.add_argument_group('beacon sleep options')
timing.add_argument('--sleeptime', type=int, help='override sleeptime settings (in milliseconds)')
timing.add_argument('--jitter', type=int, help='override jitter settings (in percentage)')
metadata = parser.add_argument_group('beacon metadata')
metadata.add_argument('-c', '--computer', default=None, help='computer name (None = random)')
metadata.add_argument('-u', '--user', default=None, help='user name (None = random)')
metadata.add_argument('-P', '--process', default=None, help='process name (None = random)')
metadata.add_argument('-i', '--beacon-id', required=False, type=int, help='beacon id (None = random)')
metadata.add_argument('-I', '--internal-ip', help='internal ip (None = random)')
flags = parser.add_argument_group('beacon metadata flags')
flags.add_argument('--arch', choices=['x86', 'x64'], default=None, help='system architecture (None = random)')
flags.add_argument('--barch', choices=['x86', 'x64'], default=None, help='beacon architecture (None = random)')
flags.add_argument('--high-integrity', action='store_true', default=False, help='set high integrity flag')
parser.add_argument('-n', '--dry-run', action='store_true', default=False, help='show settings and exit')
writer = parser.add_argument_group('output options')
writer.add_argument('-w', '--writer', help='record writer')
writer.add_argument('-v', '--verbose', action='count', default=0, help='verbosity level (-v for INFO, -vv for DEBUG)')
writer.add_argument('-s', '--silent', action='store_true', default=False, help='suppress empty task messages')
return parser |
def GetPythonDependencies():
_ForceLazyModulesToLoad()
module_paths = (m.__file__ for m in sys.modules.values() if ((m is not None) and hasattr(m, '__file__')))
abs_module_paths = map(os.path.abspath, filter((lambda p: (p is not None)), module_paths))
assert os.path.isabs(DIR_SOURCE_ROOT)
non_system_module_paths = [p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)]
def ConvertPycToPy(s):
if s.endswith('.pyc'):
return s[:(- 1)]
return s
non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
non_system_module_paths = map(os.path.relpath, non_system_module_paths)
return sorted(set(non_system_module_paths)) |
def dircmp_recursive(dircmp_obj: filecmp.dircmp) -> Tuple[(Set[str], Set[str], Set[str])]:
def _dircmp_recursive(dircmp_obj: filecmp.dircmp, prefix: str='') -> Tuple[(Set[str], Set[str], Set[str])]:
def join_with_prefix(suffix: str) -> str:
return os.path.join(prefix, suffix)
left_only: Set[str] = set(map(join_with_prefix, dircmp_obj.left_only))
right_only: Set[str] = set(map(join_with_prefix, dircmp_obj.right_only))
diff_files: Set[str] = set(map(join_with_prefix, dircmp_obj.diff_files))
for (name, sub_dircmp_obj) in dircmp_obj.subdirs.items():
subprefix = join_with_prefix(name)
(subleft, subright, subdiff) = _dircmp_recursive(sub_dircmp_obj, prefix=subprefix)
left_only.update(subleft)
right_only.update(subright)
diff_files.update(subdiff)
return (left_only, right_only, diff_files)
return _dircmp_recursive(dircmp_obj, '') |
def parse(code='', mode='sys', state=False, keep_internal_state=None):
if (keep_internal_state is None):
keep_internal_state = bool(state)
command = get_state(state)
if (command.comp is None):
command.setup()
if (mode not in PARSERS):
raise CoconutException(('invalid parse mode ' + repr(mode)), extra=('valid modes are ' + ', '.join(PARSERS)))
return PARSERS[mode](command.comp)(code, keep_state=keep_internal_state) |
('cuda.gemm_rrr_small_nk.func_call')
def gen_function_call(func_attrs, indent=' '):
a = func_attrs['inputs'][0]
ashape = a._attrs['shape']
adims = [('&' + dim._attrs['name']) for dim in ashape]
b = func_attrs['inputs'][1]
bshape = b._attrs['shape']
bdims = [('&' + dim._attrs['name']) for dim in bshape]
c = func_attrs['outputs'][0]
cshape = c._attrs['shape']
cdims = [('&' + dim._attrs['name']) for dim in cshape]
use_fp16_acc = False
if ('use_fp16_acc' in Target.current()._kwargs):
use_fp16_acc = Target.current()._kwargs['use_fp16_acc']
return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], a_ptr=a._attrs['name'], b_ptr=b._attrs['name'], c_ptr=c._attrs['name'], adims=adims, bdims=bdims, cdims=cdims, use_fp16_acc=('true' if use_fp16_acc else 'false'), indent=indent) |
class CustomerTypeTests(unittest.TestCase):
def test_unicode(self):
customer_type = CustomerType()
customer_type.Name = 'test'
self.assertEqual(str(customer_type), 'test')
def test_valid_object_name(self):
obj = CustomerType()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result) |
def _unicorn_hook_block(uc: Uc, address: int, _size: int, user_data: Tuple[(ProcessController, int)]) -> None:
(process_controller, stop_on_ret_addr) = user_data
ptr_size = process_controller.pointer_size
arch = process_controller.architecture
if (arch == Architecture.X86_32):
pc_register = UC_X86_REG_EIP
sp_register = UC_X86_REG_ESP
result_register = UC_X86_REG_EAX
elif (arch == Architecture.X86_64):
pc_register = UC_X86_REG_RIP
sp_register = UC_X86_REG_RSP
result_register = UC_X86_REG_RAX
else:
raise NotImplementedError(f'Unsupported architecture: {arch}')
exports_dict = process_controller.enumerate_exported_functions()
if (address in exports_dict):
sp = uc.reg_read(sp_register)
assert isinstance(sp, int)
ret_addr_data = uc.mem_read(sp, ptr_size)
ret_addr = struct.unpack(pointer_size_to_fmt(ptr_size), ret_addr_data)[0]
api_name = exports_dict[address]['name']
LOG.debug("Reached API '%s'", api_name)
if ((ret_addr == stop_on_ret_addr) or (ret_addr == (stop_on_ret_addr + 1)) or (ret_addr == STACK_MAGIC_RET_ADDR)):
uc.reg_write(result_register, address)
uc.emu_stop()
return
if _is_no_return_api(api_name):
LOG.debug('Reached noreturn API, stopping emulation')
uc.reg_write(result_register, address)
uc.emu_stop()
return
if _is_bogus_api(api_name):
LOG.debug('Reached bogus API call, skipping')
(result, arg_count) = _simulate_bogus_api(api_name)
uc.reg_write(result_register, result)
if (arch == Architecture.X86_32):
uc.reg_write(sp_register, (sp + (ptr_size * (1 + arg_count))))
elif (arch == Architecture.X86_64):
stack_arg_count = max(0, (arg_count - 4))
uc.reg_write(sp_register, (sp + (ptr_size * (1 + stack_arg_count))))
uc.reg_write(pc_register, ret_addr)
return |
(help={'serve': 'Build the docs watching for changes', 'open_browser': 'Open the docs in the web browser'})
def docs(c, serve=False, open_browser=False):
_run(c, f'sphinx-apidoc -o {DOCS_DIR} {SOURCE_DIR}')
build_docs = f'sphinx-build -b html {DOCS_DIR} {DOCS_BUILD_DIR}'
_run(c, build_docs)
if open_browser:
webbrowser.open(DOCS_INDEX.absolute().as_uri())
if serve:
_run(c, f"poetry run watchmedo shell-command -p '*.rst;*.md' -c '{build_docs}' -R -D .") |
def find_config_path(path: Path) -> Tuple[(Path, bool)]:
if path.is_dir():
current_dir = path
else:
current_dir = path.parent
if (current_dir / '_config.yml').is_file():
return (current_dir, True)
while (current_dir != current_dir.parent):
if (current_dir / '_config.yml').is_file():
return (current_dir, True)
current_dir = current_dir.parent
if (not path.is_dir()):
return (path.parent, False)
return (path, False) |
(tags=['electioneering'], description=docs.ELECTIONEERING_AGGREGATE_BY_CANDIDATE)
class ElectioneeringByCandidateView(CandidateAggregateResource):
model = models.ElectioneeringByCandidate
schema = schemas.ElectioneeringByCandidateSchema
page_schema = schemas.ElectioneeringByCandidatePageSchema
query_args = utils.extend(args.elections, args.electioneering_by_candidate)
filter_multi_fields = [('candidate_id', models.ElectioneeringByCandidate.candidate_id)] |
.parallel(nprocs=2)
def test_assign_with_valid_halo_and_subset_sets_halo_values(cg1):
u = Function(cg1)
assert u.dat.halo_valid
subset = make_subset(cg1)
u.assign(1, subset=subset)
expected = ([0] * u.dat.dataset.total_size)
expected[0] = 1
expected[u.dat.dataset.size] = 1
assert u.dat.halo_valid
assert np.allclose(u.dat._data, expected) |
class TestUserUtilitiesHelper(OpenEventTestCase):
def test_modify_email_for_user_to_be_deleted(self):
with self.app.test_request_context():
user = create_user(email='test_', password='testpass')
save_to_db(user)
modified_user = modify_email_for_user_to_be_deleted(user)
assert ('test_.deleted' == modified_user.email)
def test_modify_email_for_user_to_be_restored(self):
with self.app.test_request_context():
user = create_user(email='test_.deleted', password='testpass')
save_to_db(user)
modified_user = modify_email_for_user_to_be_restored(user)
assert ('test_' == modified_user.email)
user1 = create_user(email='test_', password='testpass')
save_to_db(user1)
user2 = create_user(email='test_.deleted', password='testpass')
save_to_db(user2)
with pytest.raises(ForbiddenError):
modify_email_for_user_to_be_restored(user2) |
class S7LPDDR5PHY(LPDDR5PHY, S7Common):
def __init__(self, pads, *, iodelay_clk_freq, with_odelay, ddr_clk=None, csr_cdc=None, **kwargs):
self.iodelay_clk_freq = iodelay_clk_freq
super().__init__(pads, ser_latency=Latency(sys=1), des_latency=Latency(sys=2), phytype=self.__class__.__name__, **kwargs)
self.settings.delays = 32
self.settings.write_leveling = True
self.settings.write_latency_calibration = True
self.settings.write_dq_dqs_training = True
self.settings.read_leveling = True
assert (iodelay_clk_freq in [.0, .0, .0])
iodelay_tap_average = (1 / ((2 * 32) * iodelay_clk_freq))
half_sys4x_taps = math.floor((self.twck / (4 * iodelay_tap_average)))
assert (half_sys4x_taps < 32), 'Exceeded ODELAYE2 max value: {} >= 32'.format(half_sys4x_taps)
self._half_sys8x_taps = CSRStorage(5, reset=half_sys4x_taps)
self._rdly_dq_rst = CSR()
self._rdly_dq_inc = CSR()
self._rdly_dqs_rst = CSR()
self._rdly_dqs_inc = CSR()
if with_odelay:
self._cdly_rst = CSR()
self._cdly_inc = CSR()
self._wdly_dq_rst = CSR()
self._wdly_dq_inc = CSR()
self._wdly_dqs_rst = CSR()
self._wdly_dqs_inc = CSR()
def cdc(i):
if (csr_cdc is None):
return i
return csr_cdc(i)
rdly_dq_rst = cdc(self._rdly_dq_rst.re)
rdly_dq_inc = cdc(self._rdly_dq_inc.re)
rdly_dqs_rst = cdc(self._rdly_dqs_rst.re)
rdly_dqs_inc = cdc(self._rdly_dqs_inc.re)
if with_odelay:
cdly_rst = (cdc(self._cdly_rst.re) | self._rst.storage)
cdly_inc = cdc(self._cdly_inc.re)
wdly_dq_rst = cdc(self._wdly_dq_rst.re)
wdly_dq_inc = cdc(self._wdly_dq_inc.re)
wdly_dqs_rst = cdc(self._wdly_dqs_rst.re)
wdly_dqs_inc = cdc(self._wdly_dqs_inc.re)
def oe_delay_data(oe):
oe_d = Signal()
delay = TappedDelayLine(oe, 3)
self.submodules += delay
self.comb += oe_d.eq(reduce(or_, delay.taps))
return oe_d
def oe_delay_dqs(oe):
delay = TappedDelayLine(oe, 2)
self.submodules += delay
return delay.output
ck_dly = Signal()
ck_ser = Signal()
self.oserdese2_sdr(din=self.out.ck, dout=(ck_ser if with_odelay else ck_dly), clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=ck_ser, dout=ck_dly, rst=cdly_rst, inc=cdly_inc, clk='sys')
self.obufds(din=ck_dly, dout=self.pads.ck_p, dout_b=self.pads.ck_n)
for cmd in ['cs', 'reset_n']:
cmd_i = getattr(self.out, cmd)
cmd_o = getattr(self.pads, cmd)
cmd_ser = Signal()
assert (len(cmd_i) == 1)
cmd_2bit_i = Signal(2)
cmd_2bit_o = Signal(2)
self.comb += cmd_2bit_i.eq(Replicate(cmd_i, 2))
self.submodules += ConstBitSlip(dw=2, slp=1, cycles=1, register=False, i=cmd_2bit_i, o=cmd_2bit_o)
self.oserdese2_sdr(din=cmd_2bit_o, dout=(cmd_ser if with_odelay else cmd_o), clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=cmd_ser, dout=cmd_o, rst=cdly_rst, inc=cdly_inc, clk='sys')
for bit in range(len(self.out.ca)):
ca_i = self.out.ca[bit]
ca_ser = Signal()
ca_dly = self.pads.ca[bit]
assert (len(ca_i) == 2)
ca_4bit_i = Signal(4)
ca_4bit_o = Signal(4)
self.comb += cmd_4bit_i.eq(Cat([Replicate(bit, 2) for bit in cmd_i]))
self.submodules += ConstBitSlip(dw=4, slp=3, cycles=1, register=False, i=ca_4bit_i, o=ca_4bit_o)
self.oserdese2_sdr(din=ca_4bit_o, dout=(ca_ser if with_odelay else ca_dly), clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=ca_ser, dout=ca_dly, rst=cdly_rst, inc=cdly_inc, clk='sys')
data_ser = (self.oserdese2_sdr if (self.settings.wck_ck_ratio == 2) else self.oserdese2_ddr)
data_des = (self.iserdese2_sdr if (self.settings.wck_ck_ratio == 2) else self.iserdese2_ddr)
for byte in range((self.databits // 8)):
wck_ser = Signal()
wck_dly = Signal()
data_ser(din=self.out.wck[byte], dout=(wck_ser if with_odelay else wck_dly), clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=wck_ser, dout=wck_dly, rst=cdly_rst, inc=cdly_inc, clk='sys')
self.obufds(din=wck_dly, dout=self.pads.wck_p[byte], dout_b=self.pads.wck_n[byte])
for byte in range((self.databits // 8)):
dqs_t = Signal()
dqs_ser = Signal()
dqs_dly = Signal()
dqs_i = Signal()
dqs_i_dly = Signal()
dqs_din = self.out.rdqs_o[byte]
if (not with_odelay):
dqs_din_d = Signal.like(dqs_din)
self.sync += dqs_din_d.eq(dqs_din)
dqs_din = dqs_din_d
data_ser(din=dqs_din, **(dict(dout_fb=dqs_ser) if with_odelay else dict(dout=dqs_dly)), tin=(~ oe_delay_dqs(self.out.rdqs_oe)), tout=dqs_t, clk=('sys4x' if with_odelay else 'sys4x_90'), clkdiv='sys')
if with_odelay:
self.odelaye2(din=dqs_ser, dout=dqs_dly, rst=self.get_rst(byte, wdly_dqs_rst), inc=self.get_inc(byte, wdly_dqs_inc), init=half_sys4x_taps, clk='sys')
self.iobufds(din=dqs_dly, dout=dqs_i, tin=dqs_t, dinout=self.pads.rdqs_p[byte], dinout_b=self.pads.rdqs_n[byte])
self.idelaye2(din=dqs_i, dout=dqs_i_dly, rst=self.get_rst(byte, rdly_dqs_rst), inc=self.get_inc(byte, rdly_dqs_inc), clk='sys')
data_des(din=dqs_i_dly, dout=self.out.rdqs_i[byte], clk='sys4x', clkdiv='sys')
for byte in range((self.databits // 8)):
dmi_t = Signal()
dmi_ser = Signal()
dmi_dly = Signal()
data_ser(din=self.out.dmi_o[byte], **(dict(dout_fb=dmi_ser) if with_odelay else dict(dout=dmi_dly)), tin=(~ oe_delay_data(self.out.dmi_oe)), tout=dmi_t, clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=dmi_ser, dout=dmi_dly, rst=self.get_rst(byte, wdly_dq_rst), inc=self.get_inc(byte, wdly_dq_inc), clk='sys')
self.iobuf(din=dmi_dly, dout=Signal(), tin=dmi_t, dinout=self.pads.dmi[byte])
for bit in range(self.databits):
dq_t = Signal()
dq_ser = Signal()
dq_dly = Signal()
dq_i = Signal()
dq_i_dly = Signal()
data_ser(din=self.out.dq_o[bit], **(dict(dout_fb=dq_ser) if with_odelay else dict(dout=dq_dly)), tin=(~ oe_delay_data(self.out.dmi_oe)), tout=dq_t, clk='sys4x', clkdiv='sys')
if with_odelay:
self.odelaye2(din=dq_ser, dout=dq_dly, rst=self.get_rst((bit // 8), wdly_dq_rst), inc=self.get_inc((bit // 8), wdly_dq_inc), clk='sys')
self.iobuf(din=dq_dly, dout=dq_i, dinout=self.pads.dq[bit], tin=dq_t)
self.idelaye2(din=dq_i, dout=dq_i_dly, rst=self.get_rst((bit // 8), rdly_dq_rst), inc=self.get_inc((bit // 8), rdly_dq_inc), clk='sys')
data_des(din=dq_i_dly, dout=self.out.dq_i[bit], clk='sys4x', clkdiv='sys') |
class Attribute():
_attribute_type_to_pb = {bool: models_pb2.Query.Attribute.BOOL, int: models_pb2.Query.Attribute.INT, float: models_pb2.Query.Attribute.DOUBLE, str: models_pb2.Query.Attribute.STRING, Location: models_pb2.Query.Attribute.LOCATION}
__slots__ = ('name', 'type', 'is_required', 'description')
def __init__(self, name: str, type_: Type[ATTRIBUTE_TYPES], is_required: bool, description: str='') -> None:
self.name = name
self.type = type_
self.is_required = is_required
self.description = description
def __eq__(self, other: Any) -> bool:
return (isinstance(other, Attribute) and (self.name == other.name) and (self.type == other.type) and (self.is_required == other.is_required))
def __str__(self) -> str:
return 'Attribute(name={},type={},is_required={})'.format(self.name, self.type, self.is_required)
def encode(self) -> models_pb2.Query.Attribute:
attribute = models_pb2.Query.Attribute()
attribute.name = self.name
attribute.type = self._attribute_type_to_pb[self.type]
attribute.required = self.is_required
if (self.description is not None):
attribute.description = self.description
return attribute
def decode(cls, attribute_pb: models_pb2.Query.Attribute) -> 'Attribute':
_pb_to_attribute_type = {v: k for (k, v) in cls._attribute_type_to_pb.items()}
return cls(attribute_pb.name, _pb_to_attribute_type[attribute_pb.type], attribute_pb.required, (attribute_pb.description if attribute_pb.description else None)) |
class ViewManager(GObject.Object):
__gsignals__ = {'new-view': (GObject.SIGNAL_RUN_LAST, None, (str,))}
view_name = GObject.property(type=str, default=CoverIconView.name)
def __init__(self, source, window):
super(ViewManager, self).__init__()
self.source = source
self.window = window
self._views = {}
ui = Gtk.Builder()
self._views[CoverIconView.name] = CoverIconView()
self._views[CoverFlowView.name] = CoverFlowView()
self._views[ListView.name] = ListView()
self._views[QueueView.name] = QueueView()
ui.add_from_file(rb.find_plugin_file(source.plugin, 'ui/coverart_artistview.ui'))
self._views[ArtistView.name] = ui.get_object('artist_view')
self._lastview = None
self.controller = ViewController(source.shell, self)
self._connect_signals()
self._connect_properties()
self._lastview = self.view_name
if self.current_view.use_plugin_window:
window.add(self.current_view.view)
window.show_all()
def current_view(self):
return self._views[self.view_name]
def get_view(self, view_name):
return self._views[view_name]
def _connect_signals(self):
self.connect('notify::view-name', self.on_notify_view_name)
def _connect_properties(self):
gs = GSetting()
setting = gs.get_setting(gs.Path.PLUGIN)
setting.bind(gs.PluginKey.VIEW_NAME, self, 'view_name', Gio.SettingsBindFlags.DEFAULT)
def on_notify_view_name(self, *args):
if (self._lastview and (self.view_name != self._lastview)):
selected = self._views[self._lastview].get_selected_objects()
current_album = None
if (len(selected) > 0):
current_album = self._views[self._lastview].get_selected_objects()[0]
if self._views[self.view_name].use_plugin_window:
child = self.window.get_child()
if child:
self.window.remove(child)
self.window.add(self._views[self.view_name].view)
self.window.show_all()
self.click_count = 0
self._views[self._lastview].panedposition = self.source.paned.get_expansion_status()
self._views[self.view_name].switch_to_view(self.source, current_album)
self._views[self.view_name].emit('update-toolbar')
self._views[self.view_name].get_default_manager().emit('sort', None)
if self._views[self.view_name].use_plugin_window:
self.source.paned.expand(self._views[self.view_name].panedposition)
self.current_view.set_popup_menu(self.source.popup_menu)
self.source.album_manager.current_view = self.current_view
if self._views[self.view_name].use_plugin_window:
saved_view = self.view_name
else:
saved_view = self._lastview
self._lastview = self.view_name
gs = GSetting()
setting = gs.get_setting(gs.Path.PLUGIN)
setting[gs.PluginKey.VIEW_NAME] = saved_view
self.emit('new-view', self.view_name)
def get_view_icon_name(self, view_name):
return self._views[view_name].get_view_icon_name()
def get_selection_colour(self):
try:
colour = self._views[CoverIconView.name].view.get_style_context().get_background_color(Gtk.StateFlags.SELECTED)
colour = ('#%s%s%s' % (str(hex(int((colour.red * 255)))).replace('0x', ''), str(hex(int((colour.green * 255)))).replace('0x', ''), str(hex(int((colour.blue * 255)))).replace('0x', '')))
except:
colour = '#0000FF'
return colour |
class AutoPausePlugin(plugin.Plugin):
_errors
(Events.SESSION_END)
def on_session_end(self, **_):
self.pause()
def pause(self) -> None:
try:
for player in Playerctl.list_players():
instance = Playerctl.Player.new_from_name(player)
if (instance.props.playback_status != Playerctl.PlaybackStatus.PLAYING):
logger.debug('action=ignored player=%s status=%s', player.name, instance.props.playback_status)
continue
instance.pause()
logger.debug('action=paused player=%s', player.name)
except GLib.Error as err:
logger.error("action=failed error='%s'", err) |
def add_args(subparsers):
parser = subparsers.add_parser('subscribe', formatter_class=argparse.ArgumentDefaultsHelpFormatter, description=__doc__, help='Listen to a stream of messages')
parser.add_argument('-c', '--clientid', default=('beem.listr-%d' % os.getpid()), help='Set the client id of the listner, can be useful for acls\n Default has pid information appended.\n ')
parser.add_argument('-H', '--host', default='localhost', help='MQTT host to connect to')
parser.add_argument('-p', '--port', type=int, default=1883, help='Port for remote MQTT host')
parser.add_argument('-q', '--qos', type=int, choices=[0, 1, 2], help='set the mqtt qos for subscription', default=1)
parser.add_argument('-n', '--msg_count', type=int, default=10, help='How many messages to expect')
parser.add_argument('-N', '--client_count', type=int, default=1, help='How many clients to expect. See docs for examples\n of how this works')
parser.add_argument('-t', '--topic', default='mqtt-malaria/+/data/#', help="Topic to subscribe to, will be sorted into clients by the\n '+' symbol")
parser.add_argument('--json', type=str, default=None, help='Dump the collected stats into the given JSON file.')
parser.set_defaults(handler=run) |
def strOfSize(size):
def strofsize(integer, remainder, level):
if (integer >= 1024):
remainder = (integer % 1024)
integer //= 1024
level += 1
return strofsize(integer, remainder, level)
else:
return (integer, remainder, level)
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
(integer, remainder, level) = strofsize(size, 0, 0)
if ((level + 1) > len(units)):
level = (- 1)
return '{}.{:>03d} {}'.format(integer, remainder, units[level]) |
class OptionPlotoptionsAreasplinerangeLowmarkerStates(Options):
def hover(self) -> 'OptionPlotoptionsAreasplinerangeLowmarkerStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsAreasplinerangeLowmarkerStatesHover)
def normal(self) -> 'OptionPlotoptionsAreasplinerangeLowmarkerStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsAreasplinerangeLowmarkerStatesNormal)
def select(self) -> 'OptionPlotoptionsAreasplinerangeLowmarkerStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsAreasplinerangeLowmarkerStatesSelect) |
def log_task(task):
logger.info('Received Task:')
task_dt = datetime.datetime.fromtimestamp(task.epoch, tz=datetime.timezone.utc)
data_r = reprlib.repr(task.data)
logger.info(f' - stamp: {task_dt} ({task.epoch:#04x})')
logger.info(f' - task: {task.command} ({task.command.value}, {task.command.value:#04x})')
logger.info(f' - size: {task.size}')
logger.info(f' - data: {data_r}') |
def speed_test_no_column():
remap = remap_ids_v2(mapper)
premap = premap_ids(mapper)
keys = np.random.randint(0, N_symbols, N_tokens)
with time_context() as elapsed:
for i in range(100):
remap(keys, False)
remaptime = elapsed.elapsed
with time_context() as elapsed:
for i in range(100):
premap(keys, False)
premaptime = elapsed.elapsed
print('remap', remaptime)
print('premap', premaptime)
print('speedup', (remaptime / premaptime)) |
def generate_random_factored_numbers_with_multiplicative_group(bits, procs, count):
count_per_proc = (count // procs)
processes = [mp.Process(target=generate_random_factored_numbers_with_multiplicative_group_mp, args=(gmpy2.mpz((2 ** bits)), random.randint(1, (10 ** 10)), count_per_proc)) for x in range(procs)]
for p in processes:
p.start()
remaining_num = (count % procs)
generate_random_factored_numbers_with_multiplicative_group_mp(gmpy2.mpz((2 ** bits)), random.randint(1, (10 ** 10)), remaining_num)
results = []
for i in range(count):
results.append(output.get())
for p in processes:
p.join()
return results |
def extractAshialafineWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('The Daughter of the Albert House Wishes for Ruin', 'The Daughter of the Albert House Wishes for Ruin', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_param_convention_mars_2():
('parameter', 'variable-list(mars)')
def values_mars(parameter):
return parameter
assert (values_mars(parameter='tp') == ['tp'])
assert (values_mars(parameter='2t') == ['2t'])
assert (values_mars(parameter='t2m') == ['2t'])
assert (values_mars(parameter=['t2m', 'tp']) == ['2t', 'tp'])
assert (values_mars(parameter='whatever') == ['whatever']) |
class OptionSeriesStreamgraphSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TypeGraph(DiGraph):
class EdgeType(Enum):
assignment = 0
subexpression = 1
def __init__(self, **attr):
super().__init__(**attr)
self._usages: DefaultDict[(Expression, Set)] = defaultdict(set)
def from_cfg(cls, cfg: ControlFlowGraph) -> TypeGraph:
graph = cls()
for instruction in cfg.instructions:
graph.add_instruction(instruction)
return graph
def add_instruction(self, instruction: Instruction) -> None:
for top_level_expression in instruction:
self.add_expression(top_level_expression, instruction)
if isinstance(instruction, BaseAssignment):
self.add_edge(self._make_node(instruction.destination), self._make_node(instruction.value), label=self.EdgeType.assignment)
def add_expression(self, expression: Expression, parent: Instruction):
todo = [expression]
while todo:
head = todo.pop()
self.add_node(self._make_node(head), **{str(id(head)): head})
self._usages[self._make_node(head)].add(parent)
children = list(head)
todo.extend(children)
for sub_expression in children:
self.add_edge(self._make_node(sub_expression), self._make_node(head), label=self.EdgeType.subexpression)
def iter_equivalence_groups(self) -> Iterator[List[Expression]]:
equivalence_edges = [(start, end) for (start, end, data) in self.edges(data=True) if (data['label'] == self.EdgeType.assignment)]
equivalence_subgraph = self._undirected_edge_subgraph(equivalence_edges)
for equivalence_group in connected_components(equivalence_subgraph):
(yield list(chain.from_iterable((self.nodes[node].values() for node in equivalence_group))))
def _undirected_edge_subgraph(self, edges: List[Tuple[(Expression, Expression)]]) -> Graph:
graph = Graph()
graph.add_edges_from(edges)
return graph
def __iter__(self) -> Iterator[List[Expression]]:
for node in self:
(yield self.nodes[node].values())
def _make_node(expression: Expression):
if isinstance(expression, Variable):
return expression.name
return id(expression) |
class Crunchyroll():
def __init__(self, channel=False):
if channel:
self.channel = channel.replace(' '')
self.channel_url = '
self.channel_folder = self.channel.split('/')[(- 1)]
self.last_episode_file = '{}/{}/{}.{}'.format(media_folder, sanitize('{}'.format(self.channel_folder)), 'last_episode', 'txt')
self.new_content = False
self.last_episode = self.get_start_episode()
self.videos = self.get_videos()
def get_videos(self):
command = ['yt-dlp', '--print', '%(season_number)s;%(season)s;%(episode_number)s;%(episode)s;%(webpage_url)s;%(playlist_autonumber)s', '--no-download', '--no-warnings', '--match-filter', 'language={}'.format(audio_language), '--extractor-args', 'crunchyrollbeta:hardsub={}'.format(subtitle_language), '{}'.format(self.channel_url), '--replace-in-metadata', '"season,episode"', '"[;/]"', '"-"']
self.set_auth(command)
self.set_proxy(command)
self.set_start_episode(command)
print(' '.join(command))
return w.worker(command).pipe()
def get_start_episode(self):
last_episode = 0
if (not os.path.isfile(self.last_episode_file)):
self.new_content = True
f.folders().write_file(self.last_episode_file, '0')
else:
with open(self.last_episode_file) as fl:
last_episode = fl.readlines()
fl.close()
last_episode = last_episode[0]
return last_episode
def set_start_episode(self, command):
if (not self.new_content):
try:
next_episode = int(self.last_episode)
except:
next_episode = 1
if (next_episode < 1):
next_episode = 1
command.append('--playlist-start')
command.append('{}'.format(next_episode))
def set_last_episode(self, playlist_count):
if self.new_content:
f.folders().write_file(self.last_episode_file, playlist_count)
else:
f.folders().write_file(self.last_episode_file, str(playlist_count))
def set_auth(self, command, quotes=False):
if (config['crunchyroll_auth'] == 'browser'):
command.append('--cookies-from-browser')
if quotes:
command.append('"{}"'.format(config['crunchyroll_browser']))
else:
command.append(config['crunchyroll_browser'])
if (config['crunchyroll_auth'] == 'cookies'):
command.append('--cookies')
command.append(config['crunchyroll_cookies_file'])
if (config['crunchyroll_auth'] == 'login'):
command.append('--username')
command.append(config['crunchyroll_username'])
command.append('--password')
command.append(config['crunchyroll_password'])
command.append('--user-agent')
if quotes:
command.append('"{}"'.format(config['crunchyroll_useragent']))
else:
command.append('{}'.format(config['crunchyroll_useragent']))
def set_proxy(self, command):
if proxy:
if (proxy_url != ''):
command.append('--proxy')
command.append(proxy_url) |
class Solution(object):
def uniqueMorseRepresentations(self, words):
mos = ['.-', '-...', '-.-.', '-..', '.', '..-.', '--.', '....', '..', '.---', '-.-', '.-..', '--', '-.', '---', '.--.', '--.-', '.-.', '...', '-', '..-', '...-', '.--', '-..-', '-.--', '--..']
trans = set()
for word in words:
curr = []
for ch in word:
rep = mos[(ord(ch) - ord('a'))]
curr.append(rep)
trans.add(''.join(curr))
return len(trans) |
def preprocess_jap(text):
'Reference
text = symbols_to_japanese(text)
sentences = re.split(_japanese_marks, text)
marks = re.findall(_japanese_marks, text)
text = []
for (i, sentence) in enumerate(sentences):
if re.match(_japanese_characters, sentence):
p = pyopenjtalk.g2p(sentence)
text += p.split(' ')
if (i < len(marks)):
text += [marks[i].replace(' ', '')]
return text |
class Invalid(Exception):
def __init__(self, msg, value, state, error_list=None, error_dict=None):
Exception.__init__(self, msg, value, state, error_list, error_dict)
self.msg = msg
self.value = value
self.state = state
self.error_list = error_list
self.error_dict = error_dict
assert ((not self.error_list) or (not self.error_dict)), ("Errors shouldn't have both error dicts and lists (error %s has %s and %s)" % (self, self.error_list, self.error_dict))
def __str__(self):
return self.msg
def unpack_errors(self, encode_variables=False, dict_char='.', list_char='-'):
if self.error_list:
assert (not encode_variables), 'You can only encode dictionary errors'
assert (not self.error_dict)
return [(item.unpack_errors() if item else item) for item in self.error_list]
if self.error_dict:
result = {}
for (name, item) in self.error_dict.items():
result[name] = (item if isinstance(item, str) else item.unpack_errors())
if encode_variables:
from . import variabledecode
result = variabledecode.variable_encode(result, add_repetitions=False, dict_char=dict_char, list_char=list_char)
for key in list(result.keys()):
if (not result[key]):
del result[key]
return result
assert (not encode_variables), 'You can only encode dictionary errors'
return self.msg |
def main():
parser = argparse.ArgumentParser(description='LiteDRAM Bench on XCU1525')
parser.add_argument('--uart', default='crossover', help='Selected UART: crossover (default) or serial')
parser.add_argument('--build', action='store_true', help='Build bitstream')
parser.add_argument('--channel', default='0', help='DDRAM channel 0 (default), 1, 2 or 3')
parser.add_argument('--with-bist', action='store_true', help='Add BIST Generator/Checker')
parser.add_argument('--with-analyzer', action='store_true', help='Add Analyzer')
parser.add_argument('--load', action='store_true', help='Load bitstream')
parser.add_argument('--load-bios', action='store_true', help='Load BIOS')
parser.add_argument('--sys-clk-freq', default=None, help='Set sys_clk_freq')
parser.add_argument('--test', action='store_true', help='Run Full Bench')
args = parser.parse_args()
soc = BenchSoC(uart=args.uart, channel=int(args.channel, 0), with_bist=args.with_bist, with_analyzer=args.with_analyzer)
builder = Builder(soc, output_dir=f'build/sqrl_xcu1525_ch{args.channel}', csr_csv='csr.csv')
builder.build(run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, (soc.build_name + '.bit')))
if args.load_bios:
from common import load_bios
load_bios(f'build/sqrl_xcu1525_ch{args.channel}/software/bios/bios.bin')
if (args.sys_clk_freq is not None):
from common import us_set_sys_clk
us_set_sys_clk(clk_freq=float(args.sys_clk_freq), vco_freq=soc.crg.main_pll.compute_config()['vco'])
if args.test:
from common import us_bench_test
us_bench_test(freq_min=.0, freq_max=.0, freq_step=1000000.0, vco_freq=soc.crg.pll.compute_config()['vco'], bios_filename=f'build/sqrl_xcu1525_ch{args.channel}/software/bios/bios.bin') |
class RandomResizeCrop():
def __init__(self, jitter=10, ratio=0.5):
self.jitter = jitter
self.ratio = ratio
def __call__(self, img):
(w, h) = img.size
img = transforms.functional.pad(img, self.jitter, fill=255)
x = (self.jitter + random.randint((- self.jitter), self.jitter))
y = (self.jitter + random.randint((- self.jitter), self.jitter))
size_w = (w * random.uniform((1 - self.ratio), (1 + self.ratio)))
size = (h, int(size_w))
img = transforms.functional.resized_crop(img, y, x, h, w, size)
return img |
class DefaultOverride(EditorFactory):
_overrides = Dict()
def __init__(self, *args, **overrides):
EditorFactory.__init__(self, *args)
self._overrides = overrides
def _customise_default(self, editor_kind, ui, object, name, description, parent):
trait = object.trait(name)
editor_factory = trait.trait_type.create_editor()
for option in self._overrides:
setattr(editor_factory, option, self._overrides[option])
editor = getattr(editor_factory, editor_kind)(ui, object, name, description, parent)
return editor
def simple_editor(self, ui, object, name, description, parent):
return self._customise_default('simple_editor', ui, object, name, description, parent)
def custom_editor(self, ui, object, name, description, parent):
return self._customise_default('custom_editor', ui, object, name, description, parent)
def text_editor(self, ui, object, name, description, parent):
return self._customise_default('text_editor', ui, object, name, description, parent)
def readonly_editor(self, ui, object, name, description, parent):
return self._customise_default('readonly_editor', ui, object, name, description, parent) |
def write_render(ints_Ls, args, name, doc_func, out_dir, comment='', py_kwargs=None, c=True, c_kwargs=None):
if (py_kwargs is None):
py_kwargs = {}
if (c_kwargs is None):
c_kwargs = {}
ints_Ls = list(ints_Ls)
py_rendered = render_py_funcs(ints_Ls, args, name, doc_func, comment=comment, **py_kwargs)
write_file(out_dir, f'{name}.py', py_rendered)
if c:
(c_rendered, h_rendered) = render_c_funcs(ints_Ls, args, name, doc_func, comment=comment, **c_kwargs)
write_file(out_dir, f'{name}.c', c_rendered)
write_file(out_dir, f'{name}.h', h_rendered) |
(frozen=True)
class CompilationState(object):
prefix: str
mode: int = 1
task_resolver: Optional[TaskResolverMixin] = None
nodes: List = field(default_factory=list)
def add_node(self, n: Node):
self.nodes.append(n)
def with_params(self, prefix: str, mode: Optional[int]=None, resolver: Optional[TaskResolverMixin]=None, nodes: Optional[List]=None) -> CompilationState:
return CompilationState(prefix=(prefix if prefix else ''), mode=(mode if mode else self.mode), task_resolver=(resolver if resolver else self.task_resolver), nodes=(nodes if nodes else [])) |
def trace_and_save_torchscript(model: nn.Module, inputs: Optional[Tuple[Any]], output_path: str, torchscript_filename: str='model.jit', mobile_optimization: Optional[MobileOptimizationConfig]=None, _extra_files: Optional[Dict[(str, bytes)]]=None):
return export_optimize_and_save_torchscript(model, inputs, output_path, jit_mode='trace', torchscript_filename=torchscript_filename, mobile_optimization=mobile_optimization, _extra_files=_extra_files) |
def add_name(font, string, nameID):
nameTable = font.get('name')
if (nameTable is None):
nameTable = font['name'] = table__n_a_m_e()
nameTable.names = []
namerec = NameRecord()
namerec.nameID = nameID
namerec.string = string.encode('mac_roman')
(namerec.platformID, namerec.platEncID, namerec.langID) = (1, 0, 0)
nameTable.names.append(namerec) |
class TabsExtraDocCommand(sublime_plugin.WindowCommand):
re_pkgs = re.compile('^Packages')
def on_navigate(self, href):
if href.startswith('sub://Packages'):
sublime.run_command('open_file', {'file': self.re_pkgs.sub('${packages}', href[6:])})
else:
webbrowser.open_new_tab(href)
def run(self, page):
try:
import mdpopups
has_phantom_support = ((mdpopups.version() >= (1, 10, 0)) and (int(sublime.version()) >= 3124))
except Exception:
has_phantom_support = False
if (not has_phantom_support):
sublime.run_command('open_file', {'file': page})
else:
text = sublime.load_resource(page.replace('${packages}', 'Packages'))
view = self.window.new_file()
view.set_name('TabsExtra - Quick Start')
view.settings().set('gutter', False)
view.settings().set('word_wrap', False)
if has_phantom_support:
mdpopups.add_phantom(view, 'quickstart', sublime.Region(0), text, sublime.LAYOUT_INLINE, css=CSS, wrapper_class='tabs-extra', on_navigate=self.on_navigate)
else:
view.run_command('insert', {'characters': text})
view.set_read_only(True)
view.set_scratch(True) |
class VideoUploader(object):
def __init__(self):
self._session = None
def upload(self, video, wait_for_encoding=False):
if self._session:
raise FacebookError('There is already an upload session for this video uploader')
self._session = VideoUploadSession(video, wait_for_encoding)
result = self._session.start()
self._session = None
return result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.