code stringlengths 281 23.7M |
|---|
class PrivateKey(Signer):
curve: Curve = ecdsa.SECP256k1
hash_function: Callable = hashlib.sha256
def __init__(self, private_key: Optional[Union[(bytes, str)]]=None):
if (private_key is None):
self._signing_key = ecdsa.SigningKey.generate(curve=self.curve, hashfunc=self.hash_function)
elif isinstance(private_key, bytes):
self._signing_key = ecdsa.SigningKey.from_string(private_key, curve=self.curve, hashfunc=self.hash_function)
elif isinstance(private_key, str):
raw_private_key = _base64_decode(private_key)
self._signing_key = ecdsa.SigningKey.from_string(raw_private_key, curve=self.curve, hashfunc=self.hash_function)
else:
raise RuntimeError('Unable to load private key from input')
self._private_key_bytes = self._signing_key.to_string()
self._private_key = base64.b64encode(self._private_key_bytes).decode()
def private_key(self) -> str:
return self._private_key
def private_key_hex(self) -> str:
return self.private_key_bytes.hex()
def private_key_bytes(self) -> bytes:
return self._private_key_bytes
def public_key(self) -> PublicKey:
return PublicKey(self._signing_key.get_verifying_key())
def sign(self, message: bytes, deterministic: bool=True, canonicalise: bool=True) -> bytes:
sigencode = (sigencode_string_canonize if canonicalise else sigencode_string)
sign_fnc = (self._signing_key.sign_deterministic if deterministic else self._signing_key.sign)
return sign_fnc(message, sigencode=sigencode)
def sign_digest(self, digest: bytes, deterministic=True, canonicalise: bool=True) -> bytes:
sigencode = (sigencode_string_canonize if canonicalise else sigencode_string)
sign_fnc = (self._signing_key.sign_digest_deterministic if deterministic else self._signing_key.sign_digest)
return sign_fnc(digest, sigencode=sigencode) |
class bsn_table_checksum_stats_request(bsn_stats_request):
version = 5
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 11
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_table_checksum_stats_request()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 11)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
return True
def pretty_print(self, q):
q.text('bsn_table_checksum_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
.django_db
def test_award_types(client, monkeypatch, sub_agency_data_1, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.get(url.format(toptier_code='001', filter='?fiscal_year=2021&award_type_codes=[A]'))
assert (resp.status_code == status.HTTP_200_OK)
expected_results = {'toptier_code': '001', 'fiscal_year': 2021, 'sub_agency_count': 1, 'office_count': 1, 'messages': []}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_results) |
.parametrize('elasticapm_client', [{'transaction_max_spans': 1}], indirect=True)
def test_dropped_spans(tracer):
assert (tracer._agent.config.transaction_max_spans == 1)
with tracer.start_active_span('transaction') as ot_scope_t:
with tracer.start_active_span('span') as ot_scope_s:
s = ot_scope_s.span
s.set_tag('db.type', 'sql')
with tracer.start_active_span('span') as ot_scope_s:
s = ot_scope_s.span
s.set_tag('db.type', 'sql')
client = tracer._agent
spans = client.events[constants.SPAN]
assert (len(spans) == 1) |
def _add_before_scripts(model: DbtModel, downstream_fal_node_unique_id: str, faldbt: FalDbt, graph: nx.DiGraph, nodeLookup: Dict[(str, FalFlowNode)]) -> Tuple[(nx.DiGraph, Dict[(str, FalFlowNode)])]:
before_scripts = model.get_scripts(before=True)
before_fal_scripts = map((lambda script_path: FalScript(faldbt, model, script_path, timing_type=TimingType.PRE)), before_scripts)
before_fal_script_node = map((lambda fal_script: ScriptNode(_script_id_from_path(fal_script.path, model.name, 'BEFORE'), fal_script)), before_fal_scripts)
for fal_script_node in before_fal_script_node:
graph.add_node(fal_script_node.unique_id, kind=NodeKind.FAL_SCRIPT)
nodeLookup[fal_script_node.unique_id] = fal_script_node
graph.add_edge(fal_script_node.unique_id, downstream_fal_node_unique_id)
return (graph, nodeLookup) |
def image_callback2(viz, env, args):
def show_color_image_window(color, win=None):
image = np.full([3, 256, 256], color, dtype=float)
return viz.image(image, opts=dict(title='Colors', caption='Press arrows to alter color.'), win=win, env=env)
callback_image_window = show_color_image_window(image_color)
def image_callback(event):
global image_color
if (event['event_type'] == 'KeyPress'):
if (event['key'] == 'ArrowRight'):
image_color = min((image_color + 0.2), 1)
if (event['key'] == 'ArrowLeft'):
image_color = max((image_color - 0.2), 0)
show_color_image_window(image_color, callback_image_window)
viz.register_event_handler(image_callback, callback_image_window) |
()
('--card', default=None, type=str, help='ID of the card to unblock. Omitting this will unblock all cards.')
_decorator
def card_unblock(card: str):
if card:
card_ids = [card]
else:
card_ids = [card['id'] for card in API_CLIENT.get_cards()]
for card_id in card_ids:
API_CLIENT.unblock_card(card_id)
click.echo(('Unblocked card: ' + card_id)) |
class LastfmSource(DynamicSource):
name = 'lastfm'
def __init__(self):
DynamicSource.__init__(self)
def get_results(self, artist):
ar = urllib.parse.quote_plus(artist.encode('utf-8'))
url = (' + API_KEY)
try:
f = urllib.request.urlopen((url % ar)).read()
except IOError:
logger.exception('Error retrieving results')
return []
retlist = []
xml = ETree.fromstring(f)
for e in xml.getiterator('artist'):
retlist.append((float(e.find('match').text), e.find('name').text))
return retlist |
class AppEngineRepositoryClient(_base_repository.BaseRepositoryClient):
def __init__(self, quota_max_calls=None, quota_period=1.0, use_rate_limiter=True, cache_discovery=False, cache=None):
if (not quota_max_calls):
use_rate_limiter = False
self._apps = None
self._app_services = None
self._service_versions = None
self._version_instances = None
super(AppEngineRepositoryClient, self).__init__(API_NAME, versions=['v1'], quota_max_calls=quota_max_calls, quota_period=quota_period, use_rate_limiter=use_rate_limiter, cache_discovery=cache_discovery, cache=cache)
def apps(self):
if (not self._apps):
self._apps = self._init_repository(_AppEngineAppsRepository)
return self._apps
def app_services(self):
if (not self._app_services):
self._app_services = self._init_repository(_AppEngineAppsServicesRepository)
return self._app_services
def service_versions(self):
if (not self._service_versions):
self._service_versions = self._init_repository(_AppEngineAppsServicesVersionsRepository)
return self._service_versions
def version_instances(self):
if (not self._version_instances):
self._version_instances = self._init_repository(_AppEngineAppsServicesVersionsInstancesRepository)
return self._version_instances |
class VclDiff(ModelNormal):
allowed_values = {('format',): {'TEXT': 'text', 'HTML': 'html', 'HTML_SIMPLE': 'html_simple'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'_from': (int,), 'to': (int,), 'format': (str,), 'diff': (str,)}
_property
def discriminator():
return None
attribute_map = {'_from': 'from', 'to': 'to', 'format': 'format', 'diff': 'diff'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class SelfAttentionSeqBlock(ShapeNormalizationBlock):
def __init__(self, in_keys: Union[(str, List[str])], out_keys: Union[(str, List[str])], in_shapes: Union[(Sequence[int], List[Sequence[int]])], num_heads: int, dropout: Optional[float], add_input_to_output: bool, bias: bool):
in_keys = (in_keys if isinstance(in_keys, List) else [in_keys])
out_keys = (out_keys if isinstance(out_keys, List) else [out_keys])
in_shapes = (in_shapes if isinstance(in_shapes, List) else [in_shapes])
assert (isinstance(in_keys, str) or (len(in_keys) in (1, 2))), f'but got {in_keys}'
assert (isinstance(out_keys, str) or (len(out_keys) in (1, 2))), f'but got {out_keys}'
assert (len(in_shapes[0]) in (1, 2)), 'Input shape has to be 1 or 2 dimensional (without batch)'
in_num_dims = [(len(in_shape) + 1) for in_shape in in_shapes]
out_num_dims = [in_num_dims[0]]
if (isinstance(out_keys, list) and (len(out_keys) > 1)):
out_num_dims.append(out_num_dims[0])
super().__init__(in_keys=in_keys, out_keys=out_keys, in_shapes=in_shapes, in_num_dims=in_num_dims, out_num_dims=out_num_dims)
embed_dim = self.in_shapes[0][(- 1)]
self.add_input_to_output = add_input_to_output
self.preprocess = None
self.postprocess = None
if (len(self.in_shapes[0]) == 1):
self.preprocess = (lambda x: torch.unsqueeze(x, 0))
self.postprocess = (lambda x: torch.squeeze(x, 0))
elif (len(self.in_shapes[0]) == 2):
self.preprocess = (lambda x: x.transpose(1, 0))
self.postprocess = (lambda x: x.transpose(0, 1))
self.num_heads = num_heads
self.self_attn = nn.MultiheadAttention(embed_dim=embed_dim, num_heads=num_heads, dropout=(dropout if (dropout is not None) else 0.0), bias=bias)
self.gamma = nn.Parameter(torch.zeros(1, dtype=torch.float32))
(ShapeNormalizationBlock)
def normalized_forward(self, block_input: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
input_tensor = block_input[self.in_keys[0]]
attn_mask = (block_input[self.in_keys[1]] if (len(self.in_keys) > 1) else None)
if (attn_mask is not None):
if (self.num_heads > 1):
attn_mask = attn_mask.repeat([self.num_heads, *[1 for _ in attn_mask.shape[1:]]])
attn_mask = (~ torch.eq(attn_mask, torch.tensor(1).to(attn_mask.device)))
attn_mask[(..., 0)] = False
if (self.preprocess is not None):
input_tensor = self.preprocess(input_tensor)
(out, attention) = self.self_attn(input_tensor, input_tensor, input_tensor, need_weights=(len(self.out_keys) == 2), attn_mask=attn_mask)
if (self.postprocess is not None):
out = self.postprocess(out)
input_tensor = self.postprocess(input_tensor)
out = (self.gamma * out)
if self.add_input_to_output:
out = (out + input_tensor)
out_dict = dict({self.out_keys[0]: out})
if (len(self.out_keys) == 2):
out_dict[self.out_keys[1]] = attention
return out_dict
def __repr__(self):
txt = f'{self.__class__.__name__}'
txt += f'''
num_heads: {self.self_attn.num_heads}'''
txt += f'''
embed_dim: {self.self_attn.embed_dim}'''
txt += f'''
dropout: {self.self_attn.dropout}'''
txt += f'''
bias: {(self.self_attn.in_proj_bias is not None)}'''
txt += f'''
add_input_to_output: {self.add_input_to_output}'''
txt += f'''
use_attn_mask: {(len(self.in_keys) > 1)}'''
txt += f'''
Out Shapes: {self.out_shapes()}'''
return txt |
def test_answer_call_greeting(app):
global play_audio_called
global record_message_called
global voice_messaging_menu_called
play_audio_called = False
record_message_called = False
voice_messaging_menu_called = False
app.answer_call(('answer', 'greeting'), 'greeting.wav', 2, caller2)
assert play_audio_called
assert (not record_message_called)
assert (not voice_messaging_menu_called) |
def send_message(msg: 'Message') -> Optional['Message']:
global middlewares, master, slaves
if (msg is None):
return
for i in middlewares:
m = i.process_message(msg)
if (m is None):
return None
msg = m
msg.verify()
if (msg.deliver_to.channel_id == master.channel_id):
return master.send_message(msg)
elif (msg.deliver_to.channel_id in slaves):
return slaves[msg.deliver_to.channel_id].send_message(msg)
else:
raise EFBChannelNotFound() |
def main():
args = _parse_commandline()
wallet = LocalWallet.generate()
ledger = LedgerClient(NetworkConfig.latest_stable_testnet())
faucet_api = FaucetApi(NetworkConfig.latest_stable_testnet())
wallet_balance = ledger.query_bank_balance(wallet.address())
while (wallet_balance < (10 ** 18)):
print('Providing wealth to wallet...')
faucet_api.get_wealth(wallet.address())
wallet_balance = ledger.query_bank_balance(wallet.address())
token_contract_address = 'fetch1qr8ysysnfxmqzu7cu7cq7dsq5g2r0kvkg5e2wl2fnlkqss60hcjsxtljxl'
pair_contract_address = 'fetch1vgnx2d46uvyxrg9pc5mktkcvkp4uflyp3j86v68pq4jxdc8j4y0s6ulf2a'
token_contract = LedgerContract(path=None, client=ledger, address=token_contract_address)
pair_contract = LedgerContract(path=None, client=ledger, address=pair_contract_address)
currency = 'atestfet'
tokens = args.trading_wallet
upper_bound = args.upper_bound
lower_bound = args.lower_bound
commission = args.commission
interval = args.interval_time
while True:
pool = pair_contract.query({'pool': {}})
native_amount = int(pool['assets'][1]['amount'])
cw20_amount = int(pool['assets'][0]['amount'])
if (currency == 'atestfet'):
tokens_out = round((((cw20_amount * tokens) / (native_amount + tokens)) * (1 - commission)))
sell_price = (tokens / tokens_out)
print('atestfet sell price: ', sell_price)
if (sell_price <= lower_bound):
swap_native_for_cw20(tokens, pair_contract, wallet)
tokens = int(token_contract.query({'balance': {'address': str(wallet.address())}})['balance'])
currency = 'CW20'
else:
tokens_out = round((((native_amount * tokens) / (cw20_amount + tokens)) * (1 - commission)))
buy_price = (tokens_out / tokens)
print('atestfet buy price: ', buy_price)
if (buy_price >= upper_bound):
swap_cw20_for_native(tokens, pair_contract_address, token_contract, wallet)
tokens = tokens_out
currency = 'atestfet'
sleep(interval) |
.skipcomplex
def test_interpolate_with_arguments():
from firedrake.adjoint import ReducedFunctional, Control, taylor_test
mesh = UnitSquareMesh(10, 10)
V1 = FunctionSpace(mesh, 'CG', 1)
V2 = FunctionSpace(mesh, 'CG', 2)
(x, y) = SpatialCoordinate(mesh)
expr = (x + y)
f = interpolate(expr, V1)
interpolator = Interpolator(TestFunction(V1), V2)
u = interpolator.interpolate(f)
J = assemble(((u ** 2) * dx))
rf = ReducedFunctional(J, Control(f))
h = Function(V1)
h.vector()[:] = rand(V1.dim())
assert (taylor_test(rf, f, h) > 1.9) |
def extract_executable(filename):
with open(filename, 'r', encoding='utf-8') as filehandle:
for line in filehandle.readlines():
splitline = line.strip().split()
if ((len(splitline) > 1) and (splitline[0] == 'EXECUTABLE')):
return splitline[1]
return None |
(Output('sleep-exclamation', 'style'), [Input('sleep-date', 'children')])
def show_sleep_exclamation(dummy):
show = {'display': 'inline-block', 'fontSize': '1rem', 'color': orange, 'paddingLeft': '1%'}
hide = {'display': 'none'}
max_sleep_date = app.session.query(func.max(ouraSleepSummary.report_date)).first()[0]
max_readiness_date = app.session.query(func.max(ouraReadinessSummary.report_date)).first()[0]
max_activity_date = app.session.query(func.max(ouraActivitySummary.summary_date)).first()[0]
app.session.remove()
max_date = max([max_sleep_date, max_readiness_date, max_activity_date])
sleep_style = (show if (max_sleep_date != max_date) else hide)
return sleep_style |
def build_amg_index_sets(L_sizes):
neqns = L_sizes[0][0]
velocityDOF = []
for start in range(1, 3):
velocityDOF.append(np.arange(start=start, stop=(1 + neqns), step=3, dtype='i'))
velocityDOF_full = np.vstack(velocityDOF).transpose().flatten()
velocity_u_DOF = []
velocity_u_DOF.append(np.arange(start=1, stop=(1 + neqns), step=3, dtype='i'))
velocity_u_DOF_full = np.vstack(velocity_u_DOF).transpose().flatten()
velocity_v_DOF = []
velocity_v_DOF.append(np.arange(start=2, stop=(2 + neqns), step=3, dtype='i'))
velocity_v_DOF_full = np.vstack(velocity_v_DOF).transpose().flatten()
isvelocity = PETSc.IS()
isvelocity.createGeneral(velocityDOF_full)
isu = PETSc.IS()
isu.createGeneral(velocity_u_DOF_full)
isv = PETSc.IS()
isv.createGeneral(velocity_v_DOF_full)
return [isvelocity, isu, isv] |
def train_ppo_for_beginners(args):
from ppo_for_beginners.ppo import PPO
from ppo_for_beginners.network import FeedForwardNN
hyperparameters = {}
total_timesteps = 0
if (args.env == 'Pendulum-v0'):
hyperparameters = {'timesteps_per_batch': 2048, 'max_timesteps_per_episode': 200, 'gamma': 0.99, 'n_updates_per_iteration': 10, 'lr': 0.0003, 'clip': 0.2, 'save_freq': .0, 'seed': args.seed}
total_timesteps = 1005000
elif (args.env == 'BipedalWalker-v3'):
hyperparameters = {'timesteps_per_batch': 2048, 'max_timesteps_per_episode': 1600, 'gamma': 0.99, 'n_updates_per_iteration': 10, 'lr': 0.00025, 'clip': 0.2, 'save_freq': .0, 'seed': args.seed}
total_timesteps = 1405000
elif (args.env == 'LunarLanderContinuous-v2'):
hyperparameters = {'timesteps_per_batch': 1024, 'max_timesteps_per_episode': 1000, 'gamma': 0.999, 'n_updates_per_iteration': 4, 'lr': 0.00025, 'clip': 0.2, 'save_freq': .0, 'seed': args.seed}
total_timesteps = 1005000
elif (args.env == 'MountainCarContinuous-v0'):
hyperparameters = {'timesteps_per_batch': 256, 'max_timesteps_per_episode': 1000, 'gamma': 0.99, 'n_updates_per_iteration': 4, 'lr': 0.005, 'clip': 0.2, 'save_freq': .0, 'seed': args.seed}
total_timesteps = 405000
env = gym.make(args.env)
model = PPO(FeedForwardNN, env, **hyperparameters)
model.learn(total_timesteps) |
.parametrize('decay', [0.995, 0.9])
.parametrize('use_num_updates', [True, False])
.parametrize('explicit_params', [True, False])
def test_val_error(decay, use_num_updates, explicit_params):
torch.manual_seed(0)
x_train = torch.rand((100, 10))
y_train = torch.rand(100).round().long()
x_val = torch.rand((100, 10))
y_val = torch.rand(100).round().long()
model = torch.nn.Linear(10, 2)
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
ema = ExponentialMovingAverage(model.parameters(), decay=decay, use_num_updates=use_num_updates)
model.train()
for _ in range(20):
logits = model(x_train)
loss = torch.nn.functional.cross_entropy(logits, y_train)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if explicit_params:
ema.update(model.parameters())
else:
ema.update()
model.eval()
logits = model(x_val)
loss_orig = torch.nn.functional.cross_entropy(logits, y_val)
print(f'Original loss: {loss_orig}')
if explicit_params:
ema.store(model.parameters())
else:
ema.store()
if explicit_params:
ema.copy_to(model.parameters())
else:
ema.copy_to()
logits = model(x_val)
loss_ema = torch.nn.functional.cross_entropy(logits, y_val)
print(f'EMA loss: {loss_ema}')
assert (loss_ema < loss_orig), "EMA loss wasn't lower"
if explicit_params:
ema.restore(model.parameters())
else:
ema.restore()
model.eval()
logits = model(x_val)
loss_orig2 = torch.nn.functional.cross_entropy(logits, y_val)
assert torch.allclose(loss_orig, loss_orig2), "Restored model wasn't the same as stored model" |
class OptionSeriesTilemapSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def define_def_map() -> Tuple[(List, DefMap)]:
def_map = DefMap()
instruction_list = [Assignment(Variable('v', Integer.int32_t(), 1), Variable('u', Integer.int32_t())), Assignment(Variable('v', Integer.int32_t(), 3), Constant(4)), Assignment(Variable('w', Integer.int32_t(), 1), Variable('v', Integer.int32_t(), 3)), Assignment(Variable('u', Integer.int32_t()), Constant(2))]
for instruction in instruction_list:
for definition in instruction.definitions:
def_map._map[definition] = instruction
return (instruction_list, def_map) |
def extractAdammentranslatesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('y_true, y_pred', [(uniform_regression_inputs.target, uniform_regression_inputs.preds), (normal_regression_inputs.target, normal_regression_inputs.preds)])
def test_mae(y_true, y_pred):
sk_preds = y_pred.numpy()
sk_target = y_true.numpy()
sk_score = mean_absolute_error(y_true=sk_target, y_pred=sk_preds)
torch_metric = MeanAbsoluteError()
tm_score = torch_metric(y_pred, y_true)
assert_allclose(sk_score, tm_score) |
class DuckInterface(SqliteInterface):
target = duck
supports_foreign_key = False
requires_subquery_name = True
def _create_connection(self):
import duckdb
return duckdb.connect((self._filename or ':memory:'))
def quote_name(self, name):
return f'"{name}"'
def rollback(self):
pass |
class OptionSeriesTilemapData(Options):
def accessibility(self) -> 'OptionSeriesTilemapDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesTilemapDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionSeriesTilemapDataDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesTilemapDataDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesTilemapDataDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesTilemapDataDragdrop)
def drilldown(self):
return self._config_get(None)
def drilldown(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionSeriesTilemapDataEvents':
return self._config_sub_data('events', OptionSeriesTilemapDataEvents)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def pointPadding(self):
return self._config_get(None)
def pointPadding(self, num: float):
self._config(num, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def test_localisedNames(tmpdir):
tmpdir = str(tmpdir)
testDocPath = os.path.join(tmpdir, 'testLocalisedNames.designspace')
testDocPath2 = os.path.join(tmpdir, 'testLocalisedNames_roundtrip.designspace')
masterPath1 = os.path.join(tmpdir, 'masters', 'masterTest1.ufo')
masterPath2 = os.path.join(tmpdir, 'masters', 'masterTest2.ufo')
instancePath1 = os.path.join(tmpdir, 'instances', 'instanceTest1.ufo')
instancePath2 = os.path.join(tmpdir, 'instances', 'instanceTest2.ufo')
doc = DesignSpaceDocument()
s1 = SourceDescriptor()
s1.filename = os.path.relpath(masterPath1, os.path.dirname(testDocPath))
s1.name = 'master.ufo1'
s1.copyInfo = True
s1.location = dict(weight=0)
doc.addSource(s1)
s2 = SourceDescriptor()
s2.filename = os.path.relpath(masterPath2, os.path.dirname(testDocPath))
s2.name = 'master.ufo2'
s2.location = dict(weight=1000)
doc.addSource(s2)
i1 = InstanceDescriptor()
i1.filename = os.path.relpath(instancePath1, os.path.dirname(testDocPath))
i1.familyName = 'Montserrat'
i1.styleName = 'SemiBold'
i1.styleMapFamilyName = 'Montserrat SemiBold'
i1.styleMapStyleName = 'Regular'
i1.setFamilyName('Montserrat', 'fr')
i1.setFamilyName('', 'ja')
i1.setStyleName('Demigras', 'fr')
i1.setStyleName('', 'ja')
i1.setStyleMapStyleName('Standard', 'de')
i1.setStyleMapFamilyName('Montserrat Halbfett', 'de')
i1.setStyleMapFamilyName(' SemiBold', 'ja')
i1.name = 'instance.ufo1'
i1.location = dict(weight=500, spooky=666)
i1.postScriptFontName = 'InstancePostscriptName'
glyphData = dict(name='arrow', mute=True, unicodes=[291])
i1.glyphs['arrow'] = glyphData
doc.addInstance(i1)
doc.axes = []
a1 = AxisDescriptor()
a1.minimum = 0
a1.maximum = 1000
a1.default = 0
a1.name = 'weight'
a1.tag = 'wght'
a1.labelNames['fa-IR'] = ''
a1.labelNames['en'] = 'Weight'
doc.addAxis(a1)
a2 = AxisDescriptor()
a2.minimum = 0
a2.maximum = 1000
a2.default = 0
a2.name = 'width'
a2.tag = 'wdth'
a2.map = [(0.0, 10.0), (401.0, 66.0), (1000.0, 990.0)]
a2.labelNames['fr'] = 'Poids'
doc.addAxis(a2)
a3 = AxisDescriptor()
a3.minimum = 333
a3.maximum = 666
a3.default = 444
a3.name = 'spooky'
a3.tag = 'spok'
a3.map = [(0.0, 10.0), (401.0, 66.0), (1000.0, 990.0)]
r1 = RuleDescriptor()
r1.name = 'named.rule.1'
r1.conditionSets.append([dict(name='weight', minimum=200, maximum=500), dict(name='width', minimum=0, maximum=150)])
r1.subs.append(('a', 'a.alt'))
doc.addRule(r1)
doc.write(testDocPath)
assert os.path.exists(testDocPath)
new = DesignSpaceDocument()
new.read(testDocPath)
new.write(testDocPath2)
with open(testDocPath, 'r', encoding='utf-8') as f1:
t1 = f1.read()
with open(testDocPath2, 'r', encoding='utf-8') as f2:
t2 = f2.read()
assert (t1 == t2) |
class TestPlaying(util.TestCase):
MARKUP = '\n <!DOCTYPE html>\n <html>\n <body>\n\n <video id="vid" width="320" height="240" controls>\n <source src="movie.mp4" type="video/mp4">\n <source src="movie.ogg" type="video/ogg">\n Your browser does not support the video tag.\n </video>\n\n </body>\n </html>\n '
def test_playing(self):
self.assert_selector(self.MARKUP, 'video:playing', [], flags=util.HTML)
def test_not_playing(self):
self.assert_selector(self.MARKUP, 'video:not(:playing)', ['vid'], flags=util.HTML) |
class OptionPlotoptionsSunburstSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SchedulerServiceServicer(object):
def addNamespace(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getNamespace(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def updateNamespace(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listNamespaces(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteNamespace(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def updateWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def disableWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def enableWorkflow(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listWorkflows(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addWorkflowSnapshot(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getWorkflowSnapshot(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listWorkflowSnapshots(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowSnapshot(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowSnapshots(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def startWorkflowExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stopWorkflowExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stopWorkflowExecutions(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getWorkflowExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listWorkflowExecutions(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def startTaskExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stopTaskExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getTaskExecution(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listTaskExecutions(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addWorkflowSchedule(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getWorkflowSchedule(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listWorkflowSchedules(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowSchedule(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowSchedules(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def pauseWorkflowSchedule(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def resumeWorkflowSchedule(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addWorkflowTrigger(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getWorkflowTrigger(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def listWorkflowTriggers(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowTrigger(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteWorkflowTriggers(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def pauseWorkflowTrigger(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def resumeWorkflowTrigger(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') |
class UnreadTopicsView(LoginRequiredMixin, ListView):
context_object_name = 'topics'
paginate_by = machina_settings.FORUM_TOPICS_NUMBER_PER_PAGE
template_name = 'forum_tracking/unread_topic_list.html'
def get_queryset(self):
forums = self.request.forum_permission_handler.get_readable_forums(Forum.objects.all(), self.request.user)
topics = Topic.objects.filter(forum__in=forums)
topics_pk = map((lambda t: t.pk), track_handler.get_unread_topics(topics, self.request.user))
return Topic.approved_objects.filter(pk__in=topics_pk).order_by('-last_post_on') |
class TestPublishUnpublish():
PUBLISH_UNPUBLISH_WITH_ARGS = [(ml.publish_model, True), (ml.unpublish_model, False)]
PUBLISH_UNPUBLISH_FUNCS = [item[0] for item in PUBLISH_UNPUBLISH_WITH_ARGS]
def setup_class(cls):
cred = testutils.MockCredential()
firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID})
ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 0.1
def teardown_class(cls):
testutils.cleanup_apps()
def _update_url(project_id, model_id):
update_url = 'projects/{0}/models/{1}?updateMask=state.published'.format(project_id, model_id)
return (BASE_URL + update_url)
def _get_url(project_id, model_id):
return (BASE_URL + 'projects/{0}/models/{1}'.format(project_id, model_id))
def _op_url(project_id):
return (BASE_URL + 'projects/{0}/operations/123'.format(project_id))
.parametrize('publish_function, published', PUBLISH_UNPUBLISH_WITH_ARGS)
def test_immediate_done(self, publish_function, published):
recorder = instrument_ml_service(status=200, payload=OPERATION_DONE_RESPONSE)
model = publish_function(MODEL_ID_1)
assert (model == CREATED_UPDATED_MODEL_1)
assert (len(recorder) == 1)
assert (recorder[0].method == 'PATCH')
assert (recorder[0].url == TestPublishUnpublish._update_url(PROJECT_ID, MODEL_ID_1))
assert (recorder[0].headers[HEADER_CLIENT_KEY] == HEADER_CLIENT_VALUE)
body = json.loads(recorder[0].body.decode())
assert (body.get('state', {}).get('published', None) is published)
.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS)
def test_returns_locked(self, publish_function):
recorder = instrument_ml_service(status=[200, 200], payload=[OPERATION_NOT_DONE_RESPONSE, LOCKED_MODEL_2_RESPONSE])
expected_model = ml.Model.from_dict(LOCKED_MODEL_JSON_2)
model = publish_function(MODEL_ID_1)
assert (model == expected_model)
assert (len(recorder) == 2)
assert (recorder[0].method == 'PATCH')
assert (recorder[0].url == TestPublishUnpublish._update_url(PROJECT_ID, MODEL_ID_1))
assert (recorder[0].headers[HEADER_CLIENT_KEY] == HEADER_CLIENT_VALUE)
assert (recorder[1].method == 'GET')
assert (recorder[1].url == TestPublishUnpublish._get_url(PROJECT_ID, MODEL_ID_1))
assert (recorder[1].headers[HEADER_CLIENT_KEY] == HEADER_CLIENT_VALUE)
.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS)
def test_operation_error(self, publish_function):
instrument_ml_service(status=200, payload=OPERATION_ERROR_RESPONSE)
with pytest.raises(Exception) as excinfo:
publish_function(MODEL_ID_1)
check_operation_error(excinfo, OPERATION_ERROR_EXPECTED_STATUS, OPERATION_ERROR_MSG)
.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS)
def test_malformed_operation(self, publish_function):
instrument_ml_service(status=200, payload=OPERATION_MALFORMED_RESPONSE)
with pytest.raises(Exception) as excinfo:
publish_function(MODEL_ID_1)
check_error(excinfo, exceptions.UnknownError, 'Internal Error: Malformed Operation.')
.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS)
def test_rpc_error(self, publish_function):
create_recorder = instrument_ml_service(status=400, payload=ERROR_RESPONSE_BAD_REQUEST)
with pytest.raises(Exception) as excinfo:
publish_function(MODEL_ID_1)
check_firebase_error(excinfo, ERROR_STATUS_BAD_REQUEST, ERROR_CODE_BAD_REQUEST, ERROR_MSG_BAD_REQUEST)
assert (len(create_recorder) == 1) |
class StripePaymentsManager():
def get_credentials(event=None):
if (not event):
settings = get_settings(from_db=True)
if ((settings['app_environment'] == 'development') and settings['stripe_test_secret_key'] and settings['stripe_test_publishable_key']):
return {'SECRET_KEY': settings['stripe_test_secret_key'], 'PUBLISHABLE_KEY': settings['stripe_test_publishable_key']}
if (settings['stripe_secret_key'] and settings['stripe_publishable_key']):
return {'SECRET_KEY': settings['stripe_secret_key'], 'PUBLISHABLE_KEY': settings['stripe_publishable_key']}
return None
if represents_int(event):
authorization = StripeAuthorization.query.filter_by(event_id=event).first()
else:
authorization = event.stripe_authorization
if authorization:
return {'SECRET_KEY': authorization.stripe_secret_key, 'PUBLISHABLE_KEY': authorization.stripe_publishable_key}
return None
def get_event_organizer_credentials_from_stripe(stripe_auth_code):
credentials = StripePaymentsManager.get_credentials()
if (not credentials):
raise ForbiddenError({'pointer': ''}, "Stripe payment isn't configured properly for the Platform")
data = {'client_secret': credentials['SECRET_KEY'], 'code': stripe_auth_code, 'grant_type': 'authorization_code'}
response = requests.post(' data=data)
return json.loads(response.text)
def get_payment_intent_stripe(order_invoice, currency=None, credentials=None):
if (not credentials):
credentials = StripePaymentsManager.get_credentials(order_invoice.event)
if (not credentials):
raise ConflictError({'pointer': ''}, 'Stripe credentials not found for the event.')
stripe.api_key = credentials['SECRET_KEY']
if (not currency):
currency = order_invoice.event.payment_currency
if ((not currency) or (currency == '')):
currency = 'USD'
event_name = order_invoice.event.name
try:
payment_intent = stripe.PaymentIntent.create(amount=int((order_invoice.amount * 100)), currency=currency.lower(), metadata={'order_id': order_invoice.identifier}, automatic_payment_methods={'enabled': True}, description=f'Eventyay {event_name}')
return payment_intent
except Exception as e:
raise ConflictError({'pointer': ''}, str(e))
def retrieve_session(event_id, stripe_session_id):
credentials = StripePaymentsManager.get_credentials(event_id)
if (not credentials):
raise ConflictError({'pointer': ''}, 'Stripe credentials not found for the event.')
stripe.api_key = credentials['SECRET_KEY']
session = stripe.checkout.Session.retrieve(stripe_session_id)
return session
def retrieve_payment_intent(event_id, payment_intent_id):
credentials = StripePaymentsManager.get_credentials(event_id)
if (not credentials):
raise ConflictError({'pointer': ''}, 'Stripe credentials not found for the event.')
stripe.api_key = credentials['SECRET_KEY']
payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id)
return payment_intent |
class QResolver():
RDTYPE_A = 1
RDTYPE_NS = 2
RDTYPE_CNAME = 5
RDTYPE_PTR = 12
RCODE_SERVFAIL = 2
RCODE_NXDOMAIN = 3
RCODE_REFUSED = 5
SOCKET_TIMEOUT = 3
def _build_query(fqdn, rdtype=RDTYPE_A):
qname = b''.join([(len(x).to_bytes(1, 'big') + x) for x in fqdn.encode('idna').split(b'.')])
return b''.join([b'\x19\x86\x01\x00\x00\x01\x00\x00\x00\x00\x00\x01', qname, b'\x00', rdtype.to_bytes(2, 'big'), b'\x00\x01\x00\x00)\x04\xd0\x00\x00\x00\x00\x00\x00'])
def _decompress_name(cls, data, packet):
labels = []
i = 0
while (i < len(data)):
b = data[i]
if (not b):
break
if (b & 192):
pointer = (((b & 63) << 8) + data[(i + 1)])
nul = packet.find(b'\x00', pointer, (pointer + 253))
d = packet[pointer:nul]
label = cls._decompress_name(d, packet)
labels.append(label)
break
else:
label = data[(i + 1):((i + 1) + b)]
labels.append(label.decode('idna'))
i += b
i += 1
return '.'.join(labels)
def _parse_response(cls, response, qname):
to_int = (lambda b: int.from_bytes(b, 'big'))
reader = BuffReader(response)
header = reader.read(12)
rn = []
while True:
rl = reader.read(1)[0]
if rl:
rn.append(reader.read(rl))
else:
reader.skip(4)
break
rname = b'.'.join(rn).decode('idna')
if (qname != rname):
raise DNSException('Inconsistent DNS query and response: {} <> {}'.format(qname, rname))
flags = to_int(header[2:4])
rcode = (flags & 3)
answer_rr = to_int(header[6:8])
if (rcode == cls.RCODE_NXDOMAIN):
if (answer_rr == 0):
raise NXDOMAIN('Domain name does not exist: {}'.format(qname))
elif (rcode == cls.RCODE_SERVFAIL):
raise SERVFAIL('Server failed to complete request: {}'.format(qname))
elif (rcode == cls.RCODE_REFUSED):
raise REFUSED('Server refused to answer: {}'.format(qname))
elif (rcode > 0):
raise DNSException('DNS resolution error code: 0x{:x}'.format(rcode))
rr = {}
rdtype_name = {cls.RDTYPE_A: 'a', cls.RDTYPE_NS: 'ns', cls.RDTYPE_CNAME: 'cname', cls.RDTYPE_PTR: 'ptr'}
for _ in range(answer_rr):
reader.skip(2)
type_id = to_int(reader.read(2))
type_name = rdtype_name.get(type_id, 'type_{}'.format(type_id))
reader.skip(6)
dlen = reader.read(2)
data = reader.read(to_int(dlen))
if (type_id == cls.RDTYPE_A):
item = socket._socket.inet_ntoa(data)
elif (type_id in (cls.RDTYPE_CNAME, cls.RDTYPE_NS, cls.RDTYPE_PTR)):
item = cls._decompress_name(data, response)
else:
continue
rr.setdefault(type_name, []).append(item)
return rr
def resolve(cls, resolver, fqdn, rdtype=RDTYPE_A, timeout=SOCKET_TIMEOUT):
query = cls._build_query(fqdn, rdtype)
sock = socket._socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(timeout)
try:
sock.sendto(query, (resolver, 53))
resp = sock.recv(1232)
except Exception as e:
raise
else:
return cls._parse_response(resp, fqdn)
finally:
sock.close() |
class DumpRecord(abstract.AbstractRecord):
grammar = dump.DumpGrammar()
ext = 'dump'
def evaluate_oid(self, oid):
return univ.ObjectIdentifier(oid)
def evaluate_value(self, oid, tag, value, **context):
try:
value = self.grammar.TAG_MAP[tag](value)
except Exception as exc:
raise SnmpsimError(('value evaluation error for tag %r, value %r: %s' % (tag, value, exc)))
try:
if (((not context['nextFlag']) and (not context['exactMatch'])) or context['setFlag']):
return (context['origOid'], tag, context['errorStatus'])
except KeyError:
pass
return (oid, tag, value)
def evaluate(self, line, **context):
(oid, tag, value) = self.grammar.parse(line)
oid = self.evaluate_oid(oid)
if context.get('oidOnly'):
value = None
else:
try:
(oid, tag, value) = self.evaluate_value(oid, tag, value, **context)
except PyAsn1Error as exc:
raise SnmpsimError(('value evaluation for %s = %r failed: %s\r\n' % (oid, value, exc)))
return (oid, value)
def format_oid(self, oid):
return univ.ObjectIdentifier(oid).prettyPrint()
def format_value(self, oid, value, **context):
return (self.format_oid(oid), self.grammar.get_tag_by_type(value), str(value))
def format(self, oid, value, **context):
return self.grammar.build(*self.format_value(oid, value, **context)) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'log_webtrends_setting': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['log_webtrends_setting']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['log_webtrends_setting']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'log_webtrends_setting')
(is_error, has_changed, result, diff) = fortios_log_webtrends(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
()
_option
('--runtime', default='3.8', help='Python version to use')
('--toolkit', default='pyside6', help='Toolkit and API to use')
('--environment', default=None, help='EDM environment to use')
('--no-environment-vars', is_flag=True, help='Do not set ETS_TOOLKIT and QT_API')
def test(edm, runtime, toolkit, environment, no_environment_vars=False):
parameters = get_parameters(edm, runtime, toolkit, environment)
if (toolkit == 'wx'):
parameters['exclude'] = 'qt'
elif (toolkit in {'pyqt5', 'pyside2', 'pyside6'}):
parameters['exclude'] = 'wx'
else:
parameters['exclude'] = '(wx|qt)'
if no_environment_vars:
environ = {}
else:
environ = environment_vars.get(toolkit, {}).copy()
environ['PYTHONUNBUFFERED'] = '1'
if (toolkit == 'wx'):
environ['EXCLUDE_TESTS'] = 'qt'
elif (toolkit in {'pyqt5', 'pyside2', 'pyside6'}):
environ['EXCLUDE_TESTS'] = 'wx'
else:
environ['EXCLUDE_TESTS'] = '(wx|qt)'
commands = ['{edm} run -e {environment} -- python -Xfaulthandler -m coverage run -p -m unittest discover -v pyface']
click.echo("Running tests in '{environment}'".format(**parameters))
with do_in_tempdir(files=['.coveragerc'], capture_files=[os.path.join('.', '.coverage*')]):
os.environ.update(environ)
execute(commands, parameters)
click.echo('Done test') |
def build_image(path):
log.debug('build_image()')
log.debug('Request Path : {0}', path)
request_path = path[1:]
if (request_path == 'favicon.ico'):
return []
decoded_url = base64.b64decode(request_path).decode('utf-8')
log.debug('decoded_url : {0}', decoded_url)
image_urls = get_image_links(decoded_url)
(width, height) = (500, 750)
collage = Image.new('RGB', (width, height), (5, 5, 5))
cols = 2
rows = 2
thumbnail_width = int((width / cols))
thumbnail_height = int((height / rows))
size = (thumbnail_width, thumbnail_height)
image_count = 0
for art in image_urls:
thumb_url = art.get('thumb')
if thumb_url:
url_bits = urlparse(thumb_url.strip())
host_name = url_bits.hostname
port = url_bits.port
url_path = url_bits.path
url_query = url_bits.query
server = ('%s:%s' % (host_name, port))
url_full_path = ((url_path + '?') + url_query)
log.debug('Loading image from : {0} {1} {2}', image_count, server, url_full_path)
try:
conn =
conn.request('GET', url_full_path)
image_responce = conn.getresponse()
image_data = image_responce.read()
loaded_image = Image.open(io.BytesIO(image_data))
image = ImageOps.fit(loaded_image, size, method=Image.ANTIALIAS, bleed=0.0, centering=(0.5, 0.5))
x = (int((image_count % cols)) * thumbnail_width)
y = (int((image_count / cols)) * thumbnail_height)
collage.paste(image, (x, y))
del loaded_image
del image
del image_data
except Exception as con_err:
log.debug('Error loading image : {0}', str(con_err))
image_count += 1
if (image_count == (cols * rows)):
break
del image_urls
img_byte_arr = io.BytesIO()
collage.save(img_byte_arr, format='JPEG')
image_bytes = img_byte_arr.getvalue()
return image_bytes |
class DictTransform():
def __init__(self, transforms: Dict[(RVIdentifier, dist.Transform)]):
self.transforms = transforms
def __call__(self, node_vals: RVDict) -> RVDict:
return {node: self.transforms[node](val) for (node, val) in node_vals.items()}
def inv(self, node_vals: RVDict) -> RVDict:
return {node: self.transforms[node].inv(val) for (node, val) in node_vals.items()}
def log_abs_det_jacobian(self, untransformed_vals: RVDict, transformed_vals: RVDict) -> torch.Tensor:
jacobian = torch.tensor(0.0)
for node in untransformed_vals:
jacobian = (jacobian + self.transforms[node].log_abs_det_jacobian(untransformed_vals[node], transformed_vals[node]).sum())
return jacobian |
def find_top_similar_results(df: pd.DataFrame, query: str, n: int):
if (len(df.index) < n):
n = len(df.index)
embedding = create_embedding(query)
df1 = df.copy()
df1['similarities'] = df1['ada_search'].apply((lambda x: cosine_similarity(x, embedding)))
best_results = df1.sort_values('similarities', ascending=False).head(n)
return best_results.drop(['similarities', 'ada_search'], axis=1).drop_duplicates(subset=['text']) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_multicast_address6': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_multicast_address6']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_multicast_address6']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_multicast_address6')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class IDName():
def __init__(self):
self.id = uuid1()
self.name = None
def __str__(self):
if self.name:
return self.name
else:
return str(self.id)
def pack_message(self, msg):
return (self, msg)
def unpack_message(self, msg):
(sender, message) = msg
sender_name = ('you said' if (sender.id == self.id) else ('%s says' % sender))
return ('%s: %s' % (sender_name, message)) |
def make_versioned(mapper=sa.orm.mapper, session=sa.orm.session.Session, manager=versioning_manager, plugins=None, options=None, user_cls='User', cares_about_checker=None):
if (plugins is not None):
manager.plugins = plugins
if (options is not None):
manager.options.update(options)
if cares_about_checker:
versioning_manager.set_check_func(cares_about_checker)
manager.user_cls = user_cls
manager.apply_class_configuration_listeners(mapper)
manager.track_operations(mapper)
manager.track_session(session)
sa.event.listen(sa.engine.Engine, 'before_cursor_execute', manager.track_association_operations)
sa.event.listen(sa.engine.Engine, 'rollback', manager.clear_connection)
sa.event.listen(sa.engine.Engine, 'set_connection_execution_options', manager.track_cloned_connections) |
('Favourite Sessions > Favourite Sessions Collection > Create a Favourite Session')
def favourite_sessions_list_post(transaction):
with stash['app'].app_context():
event = EventFactoryBasic()
track = TrackFactoryBase()
session = SessionSubFactory(event=event, track=track)
db.session.add(session)
db.session.commit() |
def test_color_set(session):
data = {'irisSeqId': '1111111', 'irisTags': ['DeltaAdminTextMessage', 'is_from_iris_fanout'], 'messageMetadata': {'actorFbId': '1234', 'adminText': 'You changed the chat theme to Orange.', 'folderId': {'systemFolderId': 'INBOX'}, 'messageId': 'mid.$XYZ', 'offlineThreadingId': '', 'skipBumpThread': False, 'tags': ['source:titan:web', 'no_push'], 'threadKey': {'threadFbId': '4321'}, 'threadReadStateEffect': 'MARK_UNREAD', 'timestamp': '', 'unsendType': 'deny_log_message'}, 'participants': ['1234', '2345', '3456'], 'requestContext': {'apiArgs': {}}, 'tqSeqId': '1111', 'type': 'change_thread_theme', 'untypedData': {'should_show_icon': '1', 'theme_color': 'FFFF7E29', 'accessibility_label': 'Orange'}, 'class': 'AdminTextMessage'}
assert (ColorSet(author=User(session=session, id='1234'), thread=Group(session=session, id='4321'), color='#ff7e29', at=datetime.datetime(2017, 7, 14, 2, 40, tzinfo=datetime.timezone.utc)) == parse_admin_message(session, data)) |
class Solution():
def minSubArrayLen(self, s: int, nums: List[int]) -> int:
mlen = (len(nums) + 1)
start = 0
ss = 0
for (i, n) in enumerate(nums):
ss += n
while (ss >= s):
mlen = min(((i - start) + 1), mlen)
ss -= nums[start]
start += 1
return (mlen if (mlen != (len(nums) + 1)) else 0) |
def Rotate(kwargs: dict) -> OutgoingMessage:
compulsory_params = ['id', 'rotation']
optional_params = []
utility.CheckKwargs(kwargs, compulsory_params)
msg = OutgoingMessage()
msg.write_int32(kwargs['id'])
msg.write_string('Rotate')
for i in range(3):
msg.write_float32(kwargs['rotation'][i])
return msg |
def resetRawInProgress():
print('Resetting any stalled downloads from the previous session.')
commit_interval = 50000
step = 50000
commit_every = 30
last_commit = time.time()
with db.session_context(override_timeout_ms=((60 * 1000) * 30)) as sess:
try:
print('Getting minimum row in need or update..')
start = sess.execute('SELECT min(id) FROM raw_web_pages')
start = list(start)[0][0]
if (start is None):
print('No rows to reset!')
return
print('Minimum row ID:', start, 'getting maximum row...')
stop = sess.execute('SELECT max(id) FROM raw_web_pages')
stop = list(stop)[0][0]
print('Maximum row ID: ', stop)
print(('Need to fix rows from %s to %s' % (start, stop)))
start = (start - (start % step))
changed = 0
tot_changed = 0
for idx in tqdm.tqdm(range(start, stop, step), desc='Resetting raw URLs'):
try:
have = sess.execute("UPDATE\n\t\t\t\t\t\t\t\t\t\t\t\traw_web_pages\n\t\t\t\t\t\t\t\t\t\t\tSET\n\t\t\t\t\t\t\t\t\t\t\t\tstate = 'new'\n\t\t\t\t\t\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t\t\t\t\t\t\t(state = 'fetching' OR state = 'processing')\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid > {}\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid <= {};".format(idx, (idx + step)))
changed += have.rowcount
tot_changed += have.rowcount
if (changed > commit_interval):
print(('Committing (%s changed rows)....' % changed), end=' ')
sess.commit()
print('done')
changed = 0
last_commit = time.time()
if (time.time() > (last_commit + commit_every)):
last_commit = time.time()
print(('Committing (%s changed rows, timed out)....' % changed), end=' ')
sess.commit()
print('done')
changed = 0
except sqlalchemy.exc.OperationalError:
sess.rollback()
except sqlalchemy.exc.InvalidRequestError:
sess.rollback()
sess.commit()
finally:
pass |
def extractToffeedragontlCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_block_bodies_request_with_extra_unrequested_bodies():
headers_bundle = mk_headers((2, 3), (8, 4), (0, 1), (0, 0))
(headers, bodies, transactions_roots, trie_data_dicts, uncles_hashes) = zip(*headers_bundle)
transactions_bundles = tuple(zip(transactions_roots, trie_data_dicts))
bodies_bundle = tuple(zip(bodies, transactions_bundles, uncles_hashes))
validator = GetBlockBodiesValidator(headers)
wrong_headers_bundle = mk_headers((3, 2), (4, 8), (1, 0), (0, 0))
(w_headers, w_bodies, w_transactions_roots, w_trie_data_dicts, w_uncles_hashes) = zip(*wrong_headers_bundle)
w_transactions_bundles = tuple(zip(w_transactions_roots, w_trie_data_dicts))
w_bodies_bundle = tuple(zip(w_bodies, w_transactions_bundles, w_uncles_hashes))
validator = GetBlockBodiesValidator(headers)
with pytest.raises(ValidationError):
validator.validate_result((bodies_bundle + w_bodies_bundle)) |
(scope='function')
def async_w3(async_w3_base, async_result_generator_middleware):
async_w3_base.middleware_onion.add(async_result_generator_middleware)
async_w3_base.middleware_onion.add(async_attrdict_middleware)
async_w3_base.middleware_onion.add(async_local_filter_middleware)
return async_w3_base |
class OptionChartParallelaxesAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def rangeDescription(self):
return self._config_get(None)
def rangeDescription(self, text: str):
self._config(text, js_type=False) |
class TestIntConversions(TestCase):
def test_int2str(self):
self.assertEqual('three', utils.int2str(3))
self.assertEqual('3rd', utils.int2str(3, adjective=True))
self.assertEqual('5th', utils.int2str(5, adjective=True))
self.assertEqual('15', utils.int2str(15))
def test_str2int(self):
self.assertEqual(5, utils.str2int('5'))
self.assertEqual(3, utils.str2int('three'))
self.assertEqual(20, utils.str2int('twenty'))
self.assertEqual(2345, utils.str2int('two thousand, three hundred and forty-five'))
self.assertEqual(1, utils.str2int('1st'))
self.assertEqual(1, utils.str2int('first'))
self.assertEqual(4, utils.str2int('fourth'))
self.assertEqual(5, utils.str2int('fifth'))
self.assertEqual(20, utils.str2int('twentieth'))
with self.assertRaises(ValueError):
utils.str2int('not a number') |
_exempt
def RoomsAvailableOnDates(request, location_slug):
location = get_object_or_404(Location, slug=location_slug)
arrive = maya.parse(request.POST['arrive']).date
depart = maya.parse(request.POST['depart']).date
free_rooms = location.rooms_free(arrive, depart)
rooms_capacity = {}
for room in location.rooms_with_future_capacity():
if (room in free_rooms):
rooms_capacity[room.name] = {'available': True, 'id': room.id}
else:
rooms_capacity[room.name] = {'available': False, 'id': room.id}
return JsonResponse({'rooms_capacity': rooms_capacity}) |
class OptionSeriesVariwideSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_tx_tooltip(status: TxStatus, conf: int) -> str:
text = ((str(conf) + ' confirmation') + ('s' if (conf != 1) else ''))
if (status == TxStatus.UNMATURED):
text = ((text + '\n') + _('Unmatured'))
elif (status in TX_STATUS):
text = ((text + '\n') + TX_STATUS[status])
return text |
class SchemaHandler(HttpErrorMixin, APIHandler):
async def get(self, schemaspace):
schemaspace = url_unescape(schemaspace)
try:
schema_manager = SchemaManager.instance()
schemas = schema_manager.get_schemaspace_schemas(schemaspace)
except (ValidationError, ValueError, SchemaNotFoundError) as err:
raise web.HTTPError(404, str(err)) from err
except Exception as err:
raise web.HTTPError(500, repr(err)) from err
schemas_model = {schemaspace: list(schemas.values())}
self.set_header('Content-Type', 'application/json')
self.finish(schemas_model)
def write_error(self, status_code, **kwargs):
HttpErrorMixin.write_error(self, status_code, **kwargs) |
def print_clients(player_number):
return
for c in p.cl.clients:
if (c.player_number == player_number):
logging.info('Player {}: {} {} BPM Pitch {:.2f}% ({:.2f}%) Beat {} Beatcnt {} pos {:.6f}'.format(c.player_number, c.model, c.bpm, ((c.pitch - 1) * 100), ((c.actual_pitch - 1) * 100), c.beat, c.beat_count, (c.position if (c.position is not None) else 0))) |
def get_kernel_data_files(argv):
if any((arg.startswith('bdist') for arg in argv)):
executable = 'python'
elif any((arg.startswith('install') for arg in argv)):
executable = sys.executable
else:
return []
install_custom_kernel(executable)
return [(icoconut_custom_kernel_install_loc, [icoconut_custom_kernel_file_loc])] |
def upload_save_data_body_v2(save_data: bytes, save_key_data: dict[(str, Any)]) -> tuple[(bytes, bytes)]:
boundary = f'__{random_digit_string(9)}-'.encode('utf-8')
body = b''
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="key"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['key'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="policy"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['policy'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="x-amz-signature"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['x-amz-signature'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="x-amz-credential"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['x-amz-credential'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="x-amz-algorithm"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['x-amz-algorithm'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="x-amz-date"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['x-amz-date'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="x-amz-security-token"\r\n'
body += b'Content-Type: text/plain\r\n'
body += b'\r\n'
body += (save_key_data['x-amz-security-token'].encode('utf-8') + b'\r\n')
body += ((b'--' + boundary) + b'\r\n')
body += b'Content-Disposition: form-data; name="file"; filename="file.sav"\r\n'
body += b'Content-Type: application/octet-stream\r\n'
body += b'\r\n'
body += (save_data + b'\r\n')
body += ((b'--' + boundary) + b'--\r\n')
return (body, boundary) |
def extractEssentialvillainessWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def multisubset2(numbers, subsets, adder=(lambda x, y: (x + y)), zero=0):
partition_size = (1 + int(math.log((len(subsets) + 1))))
numbers = numbers[:]
while ((len(numbers) % partition_size) != 0):
numbers.append(zero)
power_sets = []
for i in range(0, len(numbers), partition_size):
new_power_set = [zero]
for (dimension, value) in enumerate(numbers[i:(i + partition_size)]):
new_power_set += [adder(n, value) for n in new_power_set]
power_sets.append(new_power_set)
subset_sums = []
for subset in subsets:
o = zero
for i in range(len(power_sets)):
index_in_power_set = 0
for j in range(partition_size):
if (((i * partition_size) + j) in subset):
index_in_power_set += (2 ** j)
o = adder(o, power_sets[i][index_in_power_set])
subset_sums.append(o)
return subset_sums |
class Pot():
def __init__(self):
self._pot = collections.Counter()
self._uid = str(uuid.uuid4().hex)
def __repr__(self):
return f'<Pot n_chips={self.total}>'
def __getitem__(self, player: Player):
if (not isinstance(player, Player)):
raise ValueError(f'Index the pot with the player to get the contribution.')
return self._pot[player]
def add_chips(self, player: Player, n_chips: int):
self._pot[player] += n_chips
def reset(self):
self._pot = collections.Counter()
def side_pots(self):
side_pots = []
if (not len(self._pot)):
return []
pot = {k: v for (k, v) in self._pot.items()}
while len(pot):
side_pots.append({})
min_n_chips = min(pot.values())
players_to_pop = []
for (player, n_chips) in pot.items():
side_pots[(- 1)][player] = min_n_chips
pot[player] -= min_n_chips
if (pot[player] == 0):
players_to_pop.append(player)
for player in players_to_pop:
pot.pop(player)
return side_pots
def uid(self):
return self._uid
def total(self):
return sum(self._pot.values()) |
def test_dataset_correlation_metric_success() -> None:
current_dataset = pd.DataFrame({'numerical_feature_1': [0, 2, 2, 2, 0], 'numerical_feature_2': [0, 2, 2, 2, 0], 'category_feature': [1, 2, 4, 2, 1], 'target': [0.0, 2.0, 2.0, 2.0, 0.0], 'prediction': [0.0, 2.0, 2.0, 2.0, 0.0]})
data_mapping = ColumnMapping(task='regression')
metric = DatasetCorrelationsMetric()
report = Report(metrics=[metric])
report.run(current_data=current_dataset, reference_data=None, column_mapping=data_mapping)
result = metric.get_result()
assert (result is not None)
assert (result.current is not None)
assert (result.current.stats is not None)
assert (result.reference is None) |
class PullRequest(Node):
baseRef: Optional[Ref]
baseRefName: str
body: str
closed: bool
headRef: Optional[Ref]
headRefName: str
number: GitHubNumber
_repository: GraphQLId
title: str
url: str
def repository(self, info: GraphQLResolveInfo) -> Repository:
return github_state(info).repositories[self._repository] |
def main():
parser = argparse.ArgumentParser(description='LiteSATA standalone core generator')
parser.add_argument('config', help='YAML config file')
args = parser.parse_args()
core_config = yaml.load(open(args.config).read(), Loader=yaml.Loader)
for (k, v) in core_config.items():
replaces = {'False': False, 'True': True, 'None': None}
for r in replaces.keys():
if (v == r):
core_config[k] = replaces[r]
if (core_config['phy'] == 'A7SATAPHY'):
from litex.build.xilinx import XilinxPlatform
platform = XilinxPlatform('xc7a', io=[])
elif (core_config['phy'] == 'K7SATAPHY'):
from litex.build.xilinx import XilinxPlatform
platform = XilinxPlatform('xc7k', io=[])
else:
raise ValueError('Unsupported SATA PHY: {}'.format(core_config['phy']))
soc = _LiteSATACore(platform, core_config)
builder = Builder(soc, output_dir='build', compile_gateware=False)
builder.build(build_name='litesata_core', regular_comb=True) |
def _select_features_for_corr(dataset: pd.DataFrame, data_definition: DataDefinition) -> tuple:
num = data_definition.get_columns(ColumnType.Numerical)
cat = data_definition.get_columns(ColumnType.Categorical)
num_for_corr = []
cat_for_corr = []
for col in num:
col_name = col.column_name
unique_count = dataset[col_name].nunique()
if (unique_count and (unique_count > 1)):
num_for_corr.append(col_name)
for col in cat:
col_name = col.column_name
unique_count = dataset[col_name].nunique()
if (unique_count and (unique_count > 1)):
cat_for_corr.append(col_name)
return (num_for_corr, cat_for_corr) |
class BanForm(CSRFForm):
name = 'Add ban'
action = '.mod_bans'
ban_ip4 = StringField('IPv4 address', [DataRequired(), IPAddress(ipv4=True, ipv6=False)], description='IPv4 address to ban.', render_kw={'placeholder': '123.123.123.123'})
ban_ip4_end = StringField('IPv4 address end range', [Optional(), IPAddress(ipv4=True, ipv6=False)], description='If specified then IPv4 range from start to end will be banned.', render_kw={'placeholder': '123.123.123.123'})
board = StringField('Board code', [Optional(), BoardNameValidator()], description='If specified then the ban will be restricted to the given board, otherwise the ban is for all boards.', render_kw={'placeholder': 'a'})
duration = IntegerField('Ban duration', [InputRequired(), NumberRange(min=0, max=None)], default=24, description='Ban duration in hours. Use 0 for a permanent ban.', render_kw={'placeholder': '24'})
reason = TextAreaField('Ban reason', [Length(max=ban_service.MAX_REASON_LENGTH)], description='This will be shown to the user on the banned page.', render_kw={'cols': 60, 'rows': 6, 'placeholder': 'Banned!'})
submit = SubmitField('Ban') |
def test_generic_search_lt_gt(frontend_db, backend_db):
insert_test_fo(backend_db, 'uid_1', size=10)
insert_test_fo(backend_db, 'uid_2', size=20)
insert_test_fo(backend_db, 'uid_3', size=30)
assert (set(frontend_db.generic_search({'size': {'$lt': 25}})) == {'uid_1', 'uid_2'})
assert (set(frontend_db.generic_search({'size': {'$gt': 15}})) == {'uid_2', 'uid_3'}) |
def test_slate_hybridization_jacobi_prec_A00():
(a, L, W) = setup_poisson_3D()
w = Function(W)
params = {'mat_type': 'matfree', 'ksp_type': 'preonly', 'pc_type': 'python', 'pc_python_type': 'firedrake.HybridizationPC', 'hybridization': {'ksp_type': 'cg', 'pc_type': 'none', 'ksp_rtol': 1e-12, 'mat_type': 'matfree', 'localsolve': {'ksp_type': 'preonly', 'pc_type': 'fieldsplit', 'pc_fieldsplit_type': 'schur', 'fieldsplit_0': {'ksp_type': 'default', 'pc_type': 'jacobi'}}}}
eq = (a == L)
problem = LinearVariationalProblem(eq.lhs, eq.rhs, w)
solver = LinearVariationalSolver(problem, solver_parameters=params)
solver.solve()
expected = {'nested': True, 'preonly_A00': False, 'jacobi_A00': True, 'schur_approx': False, 'preonly_Shat': False, 'jacobi_Shat': False, 'preonly_S': False, 'jacobi_S': False}
builder = solver.snes.ksp.pc.getPythonContext().getSchurComplementBuilder()
assert options_check(builder, expected), 'Some solver options have not ended up in the PC as wanted.'
(sigma_h, u_h) = w.subfunctions
w2 = Function(W)
solve((a == L), w2, solver_parameters={'mat_type': 'matfree', 'ksp_type': 'preonly', 'pc_type': 'python', 'pc_python_type': 'firedrake.HybridizationPC', 'hybridization': {'ksp_type': 'cg', 'pc_type': 'none', 'ksp_rtol': 1e-08, 'mat_type': 'matfree'}})
(nh_sigma, nh_u) = w2.subfunctions
sigma_err = errornorm(sigma_h, nh_sigma)
u_err = errornorm(u_h, nh_u)
assert (sigma_err < 1e-08)
assert (u_err < 1e-08) |
def read_associate_def(line: str):
assoc_match = FRegex.ASSOCIATE.match(line)
if (assoc_match is not None):
trailing_line = line[assoc_match.end(0):]
match_char = find_paren_match(trailing_line)
if (match_char < 0):
return ('assoc', [])
var_words = separate_def_list(trailing_line[:match_char].strip())
return ('assoc', var_words) |
class MySQLVersion():
def __init__(self, version_str):
self._version_str = version_str
self._version = ''
self._fork = ''
self._build = ''
self.parse_str()
def parse_str(self):
segments = self._version_str.split('-')
self._version = segments[0]
if (len(segments) > 1):
self._fork = segments[1]
if (len(segments) > 2):
self._build = segments[2]
def major(self):
return int(self._version.split('.')[0])
def minor(self):
return int(self._version.split('.')[1])
def release(self):
return int(self._version.split('.')[2])
def build(self):
return self._build
def fork(self):
return self._fork
def is_fb(self):
return (self.fork == FB_FORK_NAME)
def is_mysql8(self):
return (self.major == 8)
def __gt__(self, other):
if (self.major > other.major):
return True
elif (self.major == other.major):
if (self.minor > other.minor):
return True
elif (self.minor == other.minor):
if (self.release > other.release):
return True
else:
return False
else:
return False
else:
return False
def __lt__(self, other):
if (self.major < other.major):
return True
elif (self.major == other.major):
if (self.minor < other.minor):
return True
elif (self.minor == other.minor):
if (self.release < other.release):
return True
else:
return False
else:
return False
else:
return False
def __ge__(self, other):
if (self.major > other.major):
return True
elif (self.major == other.major):
if (self.minor > other.minor):
return True
elif (self.minor == other.minor):
if (self.release >= other.release):
return True
else:
return False
else:
return False
else:
return False
def __le__(self, other):
if (self.major < other.major):
return True
elif (self.major == other.major):
if (self.minor < other.minor):
return True
elif (self.minor == other.minor):
if (self.release <= other.release):
return True
else:
return False
else:
return False
else:
return False |
class Polling(threading.Thread):
def __init__(self, master_list, master_info, callback, update_hz):
threading.Thread.__init__(self)
self.masterList = master_list
self.masterInfo = master_info
self.__callback = callback
self.__update_hz = update_hz
self.start()
def stop(self):
self.__callback = None
def run(self):
self.current_check_hz = self.__update_hz
while ((self.__callback is not None) and (not rospy.is_shutdown())):
cputimes = os.times()
cputime_init = (cputimes[0] + cputimes[1])
master = self.__callback(self.masterInfo)
if (master is not None):
self.masterList.updateMaster(master)
else:
self.masterList.setMasterOnline(self.masterInfo.name, False)
cputimes = os.times()
cputime = ((cputimes[0] + cputimes[1]) - cputime_init)
if ((self.current_check_hz * cputime) > 0.2):
self.current_check_hz = (float(self.current_check_hz) / 2.0)
elif (((self.current_check_hz * cputime) < 0.1) and (self.current_check_hz < self.__update_hz)):
self.current_check_hz = (float(self.current_check_hz) * 2.0)
time.sleep((1.0 / self.current_check_hz)) |
class SearchSpider(Spider, SeleniumSpiderMixin):
allowed_domains = ('linkedin.com',)
def __init__(self, start_url, driver=None, name=None, *args, **kwargs):
super().__init__(*args, name=name, **kwargs)
self.start_url = start_url
self.driver = (driver or build_driver())
self.user_profile = None
self.profile_counter = 0
self.connections_sent_counter = 0
self.llm = (OpenAI(max_tokens=90, model_name='text-davinci-003', openai_api_key=OPENAI_API_KEY) if SEND_CONNECTION_REQUESTS else None)
def wait_page_completion(self, driver):
get_by_xpath_or_none(driver, "//*[='global-nav']/div", wait_timeout=5)
def parse_search_list(self, response):
continue_scrape = True
driver = self.get_driver_from_response(response)
if self.check_if_no_results_found(driver):
logger.warning('No results found. Stopping crawl.')
return
for user_container in self.iterate_containers(driver):
if is_your_network_is_growing_present(driver):
press_exit(driver)
user_profile_url = extract_user_url(user_container)
if (user_profile_url is None):
continue
logger.debug(f'Found user URL:{user_profile_url}')
self.user_profile = extract_profile_from_url(user_profile_url, driver.get_cookies())
if self.should_stop(response):
continue_scrape = False
break
connect_button = extract_connect_button(user_container)
if skip_profile(self.user_profile):
logger.info(f'Skipped profile: {user_profile_url}')
else:
message = (generate_connection_message(self.llm, self.user_profile) if OPENAI_API_KEY else DEFAULT_CONNECTION_MESSAGE)
self.user_profile['connection_msg'] = (message if OPENAI_API_KEY else None)
if skip_connection_request(connect_button):
logger.info(f'Skipped connection request: {user_profile_url}')
else:
click(driver, connect_button)
if is_email_verifier_present(driver):
press_exit(driver)
else:
conn_sent = send_connection_request(driver, message=message)
(logger.info(f'''Connection request sent to {user_profile_url}
{message}''') if conn_sent else None)
self.connections_sent_counter += 1
(yield LinkedinUser(linkedinUrl=user_profile_url, **self.user_profile))
self.profile_counter += 1
if continue_scrape:
next_url = self.get_next_url(response)
(yield self.create_next_request(next_url, response))
def get_driver_from_response(self, response):
return response.meta.pop('driver')
def check_if_no_results_found(self, driver):
no_result_found_xpath = "//div[contains(, 'search-reusable-search-no-results')]"
return (get_by_xpath_or_none(driver=driver, xpath=no_result_found_xpath, wait_timeout=3) is not None)
def get_next_url(self, response):
(index, next_url) = increment_index_at_end_url(response)
return next_url
def create_next_request(self, next_url, response):
return Request(url=next_url, priority=(- 1), callback=self.parse_search_list, meta=response.meta)
def iterate_containers(self, driver):
for i in range(1, 11):
container_xpath = f"//li[contains(, 'result-container')][{i}]"
container_elem = get_by_xpath_or_none(driver, container_xpath, wait_timeout=2)
if container_elem:
logger.debug(f'Loading {i}th user')
driver.execute_script('arguments[0].scrollIntoView();', container_elem)
self.sleep()
(yield container_elem)
def should_stop(self, response):
max_num_profiles = (self.profile_counter >= MAX_PROFILES_TO_SCRAPE)
if max_num_profiles:
logger.info('Stopping Reached maximum number of profiles to scrape. Stopping crawl.')
max_num_connections = (self.connections_sent_counter >= MAX_PROFILES_TO_CONNECT)
if max_num_connections:
logger.info('Stopping Reached maximum number of profiles to connect. Stopping crawl.')
return max_num_profiles |
.standalone
def main():
pd = tvtk.PolyData()
pd.points = np.random.random((1000, 3))
verts = np.arange(0, 1000, 1)
verts.shape = (1000, 1)
pd.verts = verts
pd.point_data.scalars = np.random.random(1000)
pd.point_data.scalars.name = 'scalars'
from mayavi.sources.vtk_data_source import VTKDataSource
from mayavi.modules.outline import Outline
from mayavi.modules.surface import Surface
mayavi.new_scene()
d = VTKDataSource()
d.data = pd
mayavi.add_source(d)
mayavi.add_module(Outline())
s = Surface()
mayavi.add_module(s)
s.actor.property.trait_set(representation='p', point_size=2) |
class Generator(AbstractODSGenerator):
MIN_ROWS: int = 40
MAX_COLUMNS: int = 40
OUTPUT_FILE: str = 'rp2_full_report.ods'
TEMPLATE_SHEETS_TO_KEEP: Set[str] = {'__Summary'}
__in_out_sheet_transaction_2_row: Dict[(AbstractTransaction, int)] = {}
__tax_sheet_year_2_row: Dict[(_AssetAndYear, int)] = {}
__legend: List[List[str]] = []
__yearly_gain_loss_summary_header_names_row_1: List[str] = []
__yearly_gain_loss_summary_header_names_row_2: List[str] = []
__in_header_names_row_1: List[str] = []
__in_header_names_row_2: List[str] = []
__out_header_names_row_1: List[str] = []
__out_header_names_row_2: List[str] = []
__intra_header_names_row_1: List[str] = []
__intra_header_names_row_2: List[str] = []
__balance_header_names_row_1: List[str] = []
__balance_header_names_row_2: List[str] = []
__gain_loss_summary_header_names_row_1: List[str] = []
__gain_loss_summary_header_names_row_2: List[str] = []
__gain_loss_detail_header_names_row_1: List[str] = []
__gain_loss_detail_header_names_row_2: List[str] = []
def _setup_text_data(self, country: AbstractCountry) -> None:
currency_code: str = country.currency_iso_code.upper()
self.__legend: List[List[str]] = [[_('General')], [_('Accounting Method')], [_('From Date Filter')], [_('To Date Filter')], [''], [_('Sheet Types')], [_('<Crypto> In-Out'), _('Captures all transactions coming in (IN), going out (OUT) and transferring across accounts (INTRA)')], [_('<Crypto> Tax'), _('Computation of balances and gain / loss')], [''], [_('Table Types')], [_('In-Flow Detail'), _('Transactions that added new crypto to accounts (e.g. buy, etc.). Only EARN-typed transactions are taxable events (interest received on crypto)')], [_('Out-Flow Detail'), _('Transactions that removed some crypto from accounts (e.g. sell, send as gift, etc.). These are taxable events')], [_('Intra-Flow Detail'), _('Movements across accounts without increasing/decreasing total value of crypto owned. The amount transferred is non-taxable but the transfer fee is considered a crypto sale and it is a taxable event')], [_('Gain / Loss Summary'), _('Computed gain and loss for the given cryptocurrency, organized by year and capital gains type (LONG or SHORT)')], [_('Account Balances'), _('Computed balances of all accounts. Useful to double-check that the input transactions have been entered correctly. If values dont match actual balances some data is missing or wrong')], [_('Average Price'), _('Average price at which the crypto was acquired')], [_('Gain / Loss Detail'), _('Detailed computation of gain and loss: each lot is divided into fractions, which are used to calculate the cost basis and the gain/loss')], [''], [_('In-Flow Detail')], [_('Sent/Sold'), _('Lots that have been sent or sold, according to the order defined by the Accounting Method (see General section)')], [_('Timestamp'), _('Time at which the transaction occurred')], [_('Asset'), _('Which cryptocurrency was transacted (e.g. BTC, ETH, etc.)')], [_('Exchange'), _('Exchange or wallet on which the transaction occurred (e.g. Coinbase, Coinbase Pro, BlockFi, etc.)')], [_('Holder'), _('Exchange account or wallet owner')], [_('Transaction Type'), _('Type of the transaction (BUY, GIFT, INTEREST, STAKING, etc.)')], [_('Spot Price'), _('Value of 1 unit of the given cryptocurrency at the time the transaction occurred')], [_('Crypto In'), _('How much of the given cryptocurrency was acquired with the transaction')], [_('Crypto In Running Sum'), _('Running sum of crypto received')], [_('{} Fee').format(currency_code), _('{} value of the fees').format(currency_code)], [_('{} In No Fee').format(currency_code), _('{} value of the transaction without fees').format(currency_code)], [_('{} In With Fee').format(currency_code), _('{} value of the transaction with fees').format(currency_code)], [_('Taxable Event'), _('Does the transaction contain a taxable event? If so the taxable amount is highlighted in yellow')], [_('Unique Id'), _('Hash or exchange-specific unique identifier for the transaction')], [_('Notes'), _('Description of the transaction')], [''], [_('Out-Flow Detail')], [_('Timestamp'), _('Time at which the transaction occurred')], [_('Asset'), _('Which cryptocurrency was transacted (e.g. BTC, ETH, etc.)')], [_('Exchange'), _('Exchange or wallet on which the transaction occurred (e.g. Coinbase, Coinbase Pro, BlockFi, etc.)')], [_('Holder'), _('Exchange account or wallet owner')], [_('Transaction Type'), _('Type of the transaction (DONATE, GIFT, SELL, etc.)')], [_('Spot Price'), _('Value of 1 unit of the given cryptocurrency at the time the transaction occurred')], [_('Crypto Out'), _('How much of the given cryptocurrency was sent with the transaction')], [_('Crypto Fee'), _('Crypto value of the fees')], [_('Crypto Out Running Sum'), _('Running sum of crypto sent')], [_('Crypto Fee Running Sum'), _('Running sum of crypto fees')], [_('{} Out').format(currency_code), _('{} value of the transaction without fees').format(currency_code)], [_('{} Fee').format(currency_code), _('{} value of the fees').format(currency_code)], [_('Taxable Event'), _('Does the transaction contain a taxable event? If so the taxable amount is highlighted in yellow')], [_('Unique Id'), _('Hash or exchange-specific unique identifier for the transaction')], [_('Notes'), _('Description of the transaction')], [''], [_('Intra-Flow Detail')], [_('Timestamp'), _('Time at which the transaction occurred')], [_('Asset'), _('Which cryptocurrency was transacted (e.g. BTC, ETH, etc.)')], [_('From Exchange'), _('Exchange or wallet from which the transfer of crypto occurred (e.g. Coinbase, Coinbase Pro, BlockFi, etc.)')], [_('From Holder'), _('Owner of the exchange account or wallet from which the transfer of crypto occurred')], [_('To Exchange'), _('Exchange or wallet to which the transfer of crypto occurred (e.g. Coinbase, Coinbase Pro, BlockFi, etc.)')], [_('To Holder'), _('Owner of the exchange account or wallet to which the transfer of crypto occurred')], [_('Spot Price'), _('Value of 1 unit of the given cryptocurrency at the time the transaction occurred')], [_('Crypto Sent'), _('How much of the given cryptocurrency was sent with the transaction')], [_('Crypto Received'), _('How much of the given cryptocurrency was received with the transaction')], [_('Crypto Fee'), _('Crypto value of the fees')], [_('Crypto Fee Running Sum'), _('Running sum of crypto fees')], [_('{} Fee').format(currency_code), _('{} value of the fees').format(currency_code)], [_('Taxable Event'), _('Does the transaction contain a taxable event? If so the taxable amount is highlighted in yellow')], [_('Unique Id'), _('Hash of the transaction')], [_('Notes'), _('Description of the transaction')], [''], [_('Gain / Loss Summary')], [_('Year'), _('Summary year')], [_('Asset'), _('Which cryptocurrency (e.g. BTC, ETH, etc.)')], [_('Capital Gains'), _('Sum of all capital gains in {} for transactions of the given capital gains type').format(currency_code)], [_('Capital Gains Type'), _('LONG (> 1 year) or SHORT (< 1 year)')], [_('Transaction Type'), _('EARN (crypto earned through interest, etc.), GIFT (crypto given), SOLD (crypto sold) OR INTRA (fees for transferring crypto across accounts)')], [_('Crypto Taxable Total'), _('Sum of all taxable events in crypto for transactions of the given capital gains type')], [_('{} Taxable Total').format(currency_code), _('Sum of all taxable events in {} for transactions of the given capital gains type').format(currency_code)], [_('{} Total Cost Basis').format(currency_code), _('Sum of all cost bases in {} for transactions of the given capital gains type').format(currency_code)], [''], [_('Account Balances')], [_('Exchange'), _('Exchange or wallet on which the transaction occurred (e.g. Coinbase, Coinbase Pro, BlockFi, etc.)')], [_('Holder'), _('Name of the exchange or wallet account holder')], [_('Asset'), _('Which cryptocurrency was transacted (e.g. BTC, ETH, etc.)')], [_('Acquired Balance'), _('Balance of all the BUY and EARN transactions for a given exchange and holder')], [_('Sent Balance'), _('Balance of all the SEND and SOLD transactions for which the given exchange and holder are sender')], [_('Received Balance'), _('Balance of all the SEND and SOLD transactions for which the given exchange and holder are receiver')], [_('Final Balance'), _('Final balance of all transactions for a given exchange and holder')], [''], [_('Average Price')], [_('Average Price Paid Per 1 crypto'), _('Average price at which the crypto was acquired, across all transactions')], [''], [_('Gain / Loss Detail')], [_('Crypto Amount'), _('Crypto amount for the given taxable event fraction')], [_('Asset'), _('Which cryptocurrency (e.g. BTC, ETH, etc.)')], [_('Crypto Amt Running Sum'), _('Running sum of crypto amount')], [_('Capital Gains'), _('Sum of all capital gains in {} for transactions of the given capital gains type').format(currency_code)], [_('Capital Gains Type'), _('LONG (> 1 year) or SHORT (< 1 year)')], [_('Taxable Event Timestamp'), _('Time at which the taxable event occurred')], [_('Taxable Event Direction/Type'), _('Direction (IN/OUT/INTRA) and type (BUY, SOLD, etc) of the taxable event')], [_('Taxable Event Fraction %'), _('Percentage of the taxable event')], [_('Taxable Event {} Amount Fraction').format(currency_code), _('{} amount of this taxable event fraction').format(currency_code)], [_('Taxable Event Spot Price'), _('Value of 1 unit of the given cryptocurrency at the time the taxable event occurred')], [_('Taxable Event Fraction Description'), _('English description of this taxable event fraction')], [_('Acquired Lot Timestamp'), _('Time at which the in-transaction occurred')], [_('Acquired Lot Fraction %'), _('Percentage of the in-transaction')], [_('Acquired Lot {} Amount Fraction').format(currency_code), _('{} amount of this in-transaction fraction').format(currency_code)], [_('Acquired Lot {} Fee Fraction').format(currency_code), _('{} fee of this in-transaction fraction').format(currency_code)], [_('Acquired Lot {} Cost Basis').format(currency_code), _('{} cost basis of this in-transaction fraction').format(currency_code)], [_('Acquired Lot Spot Price'), _('Value of 1 unit of the given cryptocurrency at the time the in-transaction occurred')], [_('Acquired Lot Fraction Description'), _('English description of this in-transaction fraction')]]
self.__yearly_gain_loss_summary_header_names_row_1: List[str] = ['', '', _('Capital'), _('Capital'), _('Transaction'), _('Crypto'), _('USD'), _('USD Total')]
self.__yearly_gain_loss_summary_header_names_row_2: List[str] = [_('Year'), _('Asset'), _('Gains'), _('Gains Type'), _('Type'), _('Taxable Total'), _('Taxable Total'), _('Cost Basis')]
self.__in_header_names_row_1: List[str] = ['', '', '', '', '', _('Transaction'), '', _('Crypto'), _('Crypto In'), '', _('{} In').format(currency_code), _('{} In').format(currency_code), _('Taxable'), '', '', '']
self.__in_header_names_row_2: List[str] = [_('Sent/Sold'), _('Timestamp'), _('Asset'), _('Exchange'), _('Holder'), _('Type'), _('Spot Price'), _('In'), _('Running Sum'), _('{} Fee').format(currency_code), _('No Fee'), _('With Fee'), _('Event'), _('N/A'), _('Unique Id'), _('Notes')]
self.__out_header_names_row_1: List[str] = ['', '', '', '', _('Transaction'), '', '', '', _('Crypto Out'), _('Crypto Fee'), '', '', _('Taxable'), '', '']
self.__out_header_names_row_2: List[str] = [_('Timestamp'), _('Asset'), _('Exchange'), _('Holder'), _('Type'), _('Spot Price'), _('Crypto Out'), _('Crypto Fee'), _('Running Sum'), _('Running Sum'), _('{} Out').format(currency_code), _('{} Fee').format(currency_code), _('Event'), _('Unique Id'), _('Notes')]
self.__intra_header_names_row_1: List[str] = ['', '', _('From'), _('From'), '', '', '', '', _('Crypto'), '', _('Crypto Fee'), '', _('Taxable'), '', '']
self.__intra_header_names_row_2: List[str] = [_('Timestamp'), _('Asset'), _('Exchange'), _('Holder'), _('To Exchange'), _('To Holder'), _('Spot Price'), _('Crypto Sent'), _('Received'), _('Crypto Fee'), _('Running Sum'), _('{} Fee').format(currency_code), _('Event'), _('Unique Id'), _('Notes')]
self.__balance_header_names_row_1: List[str] = ['', '', '', _('Acquired'), _('Sent'), _('Received'), _('Final')]
self.__balance_header_names_row_2: List[str] = [_('Exchange'), _('Holder'), _('Asset'), _('Balance'), _('Balance'), _('Balance'), _('Balance')]
self.__gain_loss_summary_header_names_row_1: List[str] = ['', '', _('Capital'), _('Capital'), _('Transaction'), _('Crypto'), _('{}').format(currency_code), _('{} Total').format(currency_code)]
self.__gain_loss_summary_header_names_row_2: List[str] = [_('Year'), _('Asset'), _('Gains'), _('Gains Type'), _('Type'), _('Taxable Total'), _('Taxable Total'), _('Cost Basis')]
self.__gain_loss_detail_header_names_row_1: List[str] = [_('Crypto'), '', _('Crypto Amt'), _('Capital'), _('Capital'), _('Taxable Event'), _('Taxable Event'), _('Taxable Event'), _('Taxable Event {}').format(currency_code), _('Taxable Event'), '', _('Taxable Event'), _('Acquired Lot'), _('Acquired Lot'), _('Acquired Lot {}').format(currency_code), _('Acquired Lot {}').format(currency_code), _('Acquired Lot {}').format(currency_code), _('Acquired Lot'), '', _('Acquired Lot Fraction')]
self.__gain_loss_detail_header_names_row_2: List[str] = [_('Amount'), _('Asset'), _('Running Sum'), _('Gains'), _('Gains Type'), _('Timestamp'), _('Direction/Type'), _('Fraction %'), _('Amount Fraction'), _('Spot Price'), _('Unique Id'), _('Fraction Description'), _('Timestamp'), _('Fraction %'), _('Amount Fraction'), _('Fee Fraction'), _('Cost Basis'), _('Spot Price'), _('Unique Id'), _('Description')]
def generate(self, country: AbstractCountry, years_2_accounting_method_names: Dict[(int, str)], asset_to_computed_data: Dict[(str, ComputedData)], output_dir_path: str, output_file_prefix: str, from_date: date, to_date: date, generation_language: str) -> None:
if (not isinstance(asset_to_computed_data, Dict)):
raise RP2TypeError(f"Parameter 'asset_to_computed_data' has non-Dict value {asset_to_computed_data}")
self._setup_text_data(country)
template_path: str = self._get_template_path('rp2_full_report', country, generation_language)
output_file: Any
output_file = self._initialize_output_file(country=country, legend_data=self.__legend, years_2_accounting_method_names=years_2_accounting_method_names, output_dir_path=output_dir_path, output_file_prefix=output_file_prefix, output_file_name=self.OUTPUT_FILE, template_path=template_path, template_sheets_to_keep=self.TEMPLATE_SHEETS_TO_KEEP, from_date=from_date, to_date=to_date)
asset: str
computed_data: ComputedData
summary_sheet = output_file.sheets['Summary']
summary_row_index: int = self._fill_header(_('Yearly Gain / Loss Summary'), self.__yearly_gain_loss_summary_header_names_row_1, self.__yearly_gain_loss_summary_header_names_row_2, summary_sheet, 0, 0)
for (asset, computed_data) in asset_to_computed_data.items():
if (not isinstance(asset, str)):
raise RP2TypeError(f"Parameter 'asset' has non-string value {asset}")
ComputedData.type_check('computed_data', computed_data)
summary_row_index = self.__generate_asset(computed_data, output_file, summary_row_index)
summary_sheet.name = _('Summary')
output_file.save()
LOGGER.info("Plugin '%s' output: %s", __name__, Path(output_file.docname).resolve())
def get_in_out_sheet_name(asset: str) -> str:
return _('{} In-Out').format(asset)
def get_tax_sheet_name(asset: str) -> str:
return _('{} Tax').format(asset)
def __get_number_of_rows_in_transaction_sheet(self, computed_data: ComputedData) -> int:
return (((self.MIN_ROWS + computed_data.in_transaction_set.count) + computed_data.out_transaction_set.count) + computed_data.intra_transaction_set.count)
def __get_number_of_rows_in_output_sheet(self, computed_data: ComputedData) -> int:
return (((self.MIN_ROWS + len(computed_data.yearly_gain_loss_list)) + computed_data.balance_set.count) + computed_data.gain_loss_set.count)
def __generate_asset(self, computed_data: ComputedData, output_file: Any, summary_row_index: int) -> int:
asset: str = computed_data.asset
transaction_sheet_name: str = self.get_in_out_sheet_name(asset)
output_sheet_name: str = self.get_tax_sheet_name(asset)
transaction_sheet: Any = ezodf.Table(transaction_sheet_name)
output_sheet: Any = ezodf.Table(output_sheet_name)
summary_sheet: Any = output_file.sheets['Summary']
output_file.sheets += transaction_sheet
output_file.sheets += output_sheet
transaction_sheet.reset(size=(self.__get_number_of_rows_in_transaction_sheet(computed_data), self.MAX_COLUMNS))
output_sheet.reset(size=(self.__get_number_of_rows_in_output_sheet(computed_data), self.MAX_COLUMNS))
new_lines: int = len(computed_data.yearly_gain_loss_list)
if new_lines:
summary_sheet.append_rows(new_lines)
row_index: int = 0
row_index = self.__generate_in_table(transaction_sheet, computed_data, row_index)
row_index = self.__generate_out_table(transaction_sheet, computed_data, (row_index + 2))
row_index = self.__generate_intra_table(transaction_sheet, computed_data, (row_index + 2))
row_index = 0
row_index = self.__generate_gain_loss_summary(output_sheet, computed_data.yearly_gain_loss_list, row_index)
row_index = self.__generate_account_balances(output_sheet, computed_data.balance_set, (row_index + 2))
row_index = self.__generate_average_price_per_unit(output_sheet, asset, computed_data.price_per_unit, (row_index + 2))
row_index = self.__generate_gain_loss_detail(output_sheet, asset, computed_data, (row_index + 2))
return self.__generate_yearly_gain_loss_summary(summary_sheet, asset, computed_data.yearly_gain_loss_list, summary_row_index)
def __get_transaction_visual_style(transaction: AbstractTransaction, year: int) -> _TransactionVisualStyle:
visual_style: str = 'transparent'
highlighted_style: str = 'transparent'
if transaction.is_taxable():
visual_style = 'taxable_event'
highlighted_style = 'highlighted'
if (year == 0):
year = transaction.timestamp.year
if (transaction.timestamp.year != year):
visual_style = f'{visual_style}_border'
highlighted_style = f'{highlighted_style}_border'
year = transaction.timestamp.year
return _TransactionVisualStyle(year, visual_style, highlighted_style)
def __get_border_style(current_year: int, year: int) -> _BorderStyle:
border_suffix: str = ''
if (year == 0):
year = current_year
if (current_year != year):
border_suffix = '_border'
year = current_year
return _BorderStyle(year, border_suffix)
def __generate_in_table(self, sheet: Any, computed_data: ComputedData, row_index: int) -> int:
row_index = self._fill_header(_('In-Flow Detail'), self.__in_header_names_row_1, self.__in_header_names_row_2, sheet, row_index, 0)
in_transaction_set: TransactionSet = computed_data.in_transaction_set
entry: AbstractEntry
year: int = 0
visual_style: str
previous_transaction: Optional[InTransaction] = None
border_style: _BorderStyle
border_suffix: str = ''
for entry in in_transaction_set:
transaction: InTransaction = cast(InTransaction, entry)
highlighted_style: str
transaction_visual_style: _TransactionVisualStyle = self.__get_transaction_visual_style(transaction, year)
year = transaction_visual_style.year
border_style = self.__get_border_style(entry.timestamp.year, year)
border_suffix = border_style.border_suffix
visual_style = transaction_visual_style.visual_style
highlighted_style = transaction_visual_style.highlighted_style
in_lot_sold_percentage: Optional[RP2Decimal] = computed_data.get_in_lot_sold_percentage(transaction)
if ((in_lot_sold_percentage == _ZERO) and (previous_transaction is not None)):
in_lot_sold_percentage = None
self._fill_cell(sheet, row_index, 0, (in_lot_sold_percentage if (in_lot_sold_percentage is not None) else ''), data_style='percent', visual_style=(('acquired_lot' + border_suffix) if (in_lot_sold_percentage is not None) else 'transparent'))
self._fill_cell(sheet, row_index, 1, transaction.timestamp, visual_style=visual_style)
self._fill_cell(sheet, row_index, 2, transaction.asset, visual_style=visual_style)
self._fill_cell(sheet, row_index, 3, transaction.exchange, visual_style=visual_style)
self._fill_cell(sheet, row_index, 4, transaction.holder, visual_style=visual_style)
self._fill_cell(sheet, row_index, 5, transaction.transaction_type.get_translation().upper(), visual_style=visual_style)
self._fill_cell(sheet, row_index, 6, transaction.spot_price, data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 7, transaction.crypto_in, data_style='crypto', visual_style=visual_style)
self._fill_cell(sheet, row_index, 8, computed_data.get_crypto_in_running_sum(transaction), data_style='crypto', visual_style=visual_style)
self._fill_cell(sheet, row_index, 9, transaction.fiat_fee, data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 10, transaction.fiat_in_no_fee, data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 11, transaction.fiat_in_with_fee, data_style='fiat', visual_style=highlighted_style)
self._fill_cell(sheet, row_index, 12, (_('YES') if transaction.is_taxable() else _('NO')), data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 13, '', visual_style=visual_style)
self._fill_cell(sheet, row_index, 14, transaction.unique_id, visual_style='transparent')
self._fill_cell(sheet, row_index, 15, transaction.notes, visual_style='transparent')
self.__in_out_sheet_transaction_2_row[transaction] = (row_index + 1)
previous_transaction = transaction
row_index += 1
return row_index
def __generate_out_table(self, sheet: Any, computed_data: ComputedData, row_index: int) -> int:
row_index = self._fill_header(_('Out-Flow Detail'), self.__out_header_names_row_1, self.__out_header_names_row_2, sheet, row_index, 1)
out_transaction_set: TransactionSet = computed_data.out_transaction_set
entry: AbstractEntry
year: int = 0
for entry in out_transaction_set:
transaction: OutTransaction = cast(OutTransaction, entry)
visual_style: str
highlighted_style: str
transaction_visual_style: _TransactionVisualStyle = self.__get_transaction_visual_style(transaction, year)
year = transaction_visual_style.year
visual_style = transaction_visual_style.visual_style
highlighted_style = transaction_visual_style.highlighted_style
self._fill_cell(sheet, row_index, 0, '', visual_style='transparent')
self._fill_cell(sheet, row_index, 1, transaction.timestamp, visual_style=visual_style)
self._fill_cell(sheet, row_index, 2, transaction.asset, visual_style=visual_style)
self._fill_cell(sheet, row_index, 3, transaction.exchange, visual_style=visual_style)
self._fill_cell(sheet, row_index, 4, transaction.holder, visual_style=visual_style)
self._fill_cell(sheet, row_index, 5, transaction.transaction_type.get_translation().upper(), visual_style=visual_style)
self._fill_cell(sheet, row_index, 6, transaction.spot_price, visual_style=visual_style, data_style='fiat')
self._fill_cell(sheet, row_index, 7, transaction.crypto_out_no_fee, visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 8, transaction.crypto_fee, visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 9, computed_data.get_crypto_out_running_sum(transaction), data_style='crypto', visual_style=visual_style)
self._fill_cell(sheet, row_index, 10, computed_data.get_crypto_out_fee_running_sum(transaction), data_style='crypto', visual_style=visual_style)
self._fill_cell(sheet, row_index, 11, transaction.fiat_out_no_fee, visual_style=(highlighted_style if (transaction.fiat_out_no_fee > ZERO) else visual_style), data_style='fiat')
self._fill_cell(sheet, row_index, 12, transaction.fiat_fee, visual_style=(highlighted_style if (transaction.fiat_fee > ZERO) else visual_style), data_style='fiat')
self._fill_cell(sheet, row_index, 13, (_('YES') if transaction.is_taxable() else _('NO')), data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 14, transaction.unique_id, visual_style='transparent')
self._fill_cell(sheet, row_index, 15, transaction.notes, visual_style='transparent')
self.__in_out_sheet_transaction_2_row[transaction] = (row_index + 1)
row_index += 1
return row_index
def __generate_intra_table(self, sheet: Any, computed_data: ComputedData, row_index: int) -> int:
row_index = self._fill_header(_('Intra-Flow Detail'), self.__intra_header_names_row_1, self.__intra_header_names_row_2, sheet, row_index, 1)
intra_transaction_set: TransactionSet = computed_data.intra_transaction_set
entry: AbstractEntry
year: int = 0
for entry in intra_transaction_set:
transaction: IntraTransaction = cast(IntraTransaction, entry)
visual_style: str
highlighted_style: str
transaction_visual_style: _TransactionVisualStyle = self.__get_transaction_visual_style(transaction, year)
year = transaction_visual_style.year
visual_style = transaction_visual_style.visual_style
highlighted_style = transaction_visual_style.highlighted_style
self._fill_cell(sheet, row_index, 0, '', visual_style='transparent')
self._fill_cell(sheet, row_index, 1, transaction.timestamp, visual_style=visual_style)
self._fill_cell(sheet, row_index, 2, transaction.asset, visual_style=visual_style)
self._fill_cell(sheet, row_index, 3, transaction.from_exchange, visual_style=visual_style)
self._fill_cell(sheet, row_index, 4, transaction.from_holder, visual_style=visual_style)
self._fill_cell(sheet, row_index, 5, transaction.to_exchange, visual_style=visual_style)
self._fill_cell(sheet, row_index, 6, transaction.to_holder, visual_style=visual_style)
self._fill_cell(sheet, row_index, 7, transaction.spot_price, visual_style=visual_style, data_style='fiat')
self._fill_cell(sheet, row_index, 8, transaction.crypto_sent, visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 9, transaction.crypto_received, visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 10, transaction.crypto_fee, visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 11, computed_data.get_crypto_intra_fee_running_sum(transaction), data_style='crypto', visual_style=visual_style)
self._fill_cell(sheet, row_index, 12, transaction.fiat_fee, visual_style=highlighted_style, data_style='fiat')
self._fill_cell(sheet, row_index, 13, (_('YES') if transaction.is_taxable() else _('NO')), data_style='fiat', visual_style=visual_style)
self._fill_cell(sheet, row_index, 14, transaction.unique_id, visual_style=visual_style)
self._fill_cell(sheet, row_index, 15, transaction.notes, visual_style='transparent')
self.__in_out_sheet_transaction_2_row[transaction] = (row_index + 1)
row_index += 1
return row_index
def __generate_gain_loss_summary(self, sheet: Any, yearly_gain_loss_list: List[YearlyGainLoss], row_index: int) -> int:
row_index = self._fill_header(_('Gain / Loss Summary'), self.__gain_loss_summary_header_names_row_1, self.__gain_loss_summary_header_names_row_2, sheet, row_index, 0)
year: int = 0
for yearly_gain_loss in yearly_gain_loss_list:
border_suffix: str = ''
capital_gains_type: str = (_('LONG') if yearly_gain_loss.is_long_term_capital_gains else _('SHORT'))
border_style: _BorderStyle = self.__get_border_style(yearly_gain_loss.year, year)
year = border_style.year
border_suffix = border_style.border_suffix
self._fill_cell(sheet, row_index, 0, yearly_gain_loss.year, visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 1, yearly_gain_loss.asset, visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 2, yearly_gain_loss.fiat_gain_loss, visual_style=('bold' + border_suffix), data_style='fiat')
self._fill_cell(sheet, row_index, 3, capital_gains_type, visual_style=('bold' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 4, yearly_gain_loss.transaction_type.get_translation().upper(), visual_style=('bold' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 5, yearly_gain_loss.crypto_amount, visual_style=('transparent' + border_suffix), data_style='crypto')
self._fill_cell(sheet, row_index, 6, yearly_gain_loss.fiat_amount, visual_style=('taxable_event' + border_suffix), data_style='fiat')
self._fill_cell(sheet, row_index, 7, yearly_gain_loss.fiat_cost_basis, visual_style=('acquired_lot' + border_suffix), data_style='fiat')
row_index += 1
return row_index
def __generate_account_balances(self, sheet: Any, balance_set: BalanceSet, row_index: int) -> int:
row_index = self._fill_header(_('Account Balances'), self.__balance_header_names_row_1, self.__balance_header_names_row_2, sheet, row_index, 0)
totals: Dict[(str, RP2Decimal)] = {}
value: RP2Decimal
for balance in balance_set:
self._fill_cell(sheet, row_index, 0, balance.exchange, visual_style='bold', data_style='default')
self._fill_cell(sheet, row_index, 1, balance.holder, visual_style='bold', data_style='default')
self._fill_cell(sheet, row_index, 2, balance.asset)
self._fill_cell(sheet, row_index, 3, balance.acquired_balance, data_style='crypto')
self._fill_cell(sheet, row_index, 4, balance.sent_balance, data_style='crypto')
self._fill_cell(sheet, row_index, 5, balance.received_balance, data_style='crypto')
self._fill_cell(sheet, row_index, 6, balance.final_balance, visual_style='bold', data_style='crypto')
value = totals.setdefault(balance.holder, _ZERO)
value += balance.final_balance
totals[balance.holder] = value
row_index += 1
holder: str
border_drawn: bool = False
for (holder, value) in sorted(totals.items()):
border_suffix: str = ''
if (not border_drawn):
border_suffix = '_border'
border_drawn = True
self._fill_cell(sheet, row_index, 0, _('Total'), visual_style=('bold' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 1, holder, visual_style=('bold' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 2, '', visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 3, '', visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 4, '', visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 5, '', visual_style=('transparent' + border_suffix), data_style='default')
self._fill_cell(sheet, row_index, 6, value, visual_style=('bold' + border_suffix), data_style='crypto')
row_index += 1
return row_index
def __generate_average_price_per_unit(self, sheet: Any, asset: str, price_per_unit: RP2Decimal, row_index: int) -> int:
self._fill_cell(sheet, row_index, 0, _('Average Price'), visual_style='title')
self._fill_cell(sheet, (row_index + 1), 0, _('Average Price'), visual_style='header')
self._fill_cell(sheet, (row_index + 2), 0, _('Paid Per 1 {}').format(asset), visual_style='header')
self._fill_cell(sheet, (row_index + 3), 0, price_per_unit, visual_style='transparent', data_style='fiat')
return (row_index + 4)
def __get_hyperlinked_transaction_value(self, transaction: AbstractTransaction, value: Any) -> Any:
row: Optional[int] = self.__get_in_out_sheet_row(transaction)
if (not row):
return value
if isinstance(value, (RP2Decimal, int, float)):
return f'=HYPERLINK("#{self.get_in_out_sheet_name(transaction.asset)}.a{row}:z{row}"; {value})'
return f'=HYPERLINK("#{self.get_in_out_sheet_name(transaction.asset)}.a{row}:z{row}"; "{value}")'
def __get_hyperlinked_summary_value(self, asset: str, value: Any, year: int) -> Any:
row: int = self.__tax_sheet_year_2_row[_AssetAndYear(asset, year)]
if isinstance(value, (RP2Decimal, int, float)):
return f'=HYPERLINK("#{self.get_tax_sheet_name(asset)}.a{row}:z{row}"; {value})'
return f'=HYPERLINK("#{self.get_tax_sheet_name(asset)}.a{row}:z{row}"; "{value}")'
def __get_in_out_sheet_row(self, transaction: AbstractTransaction) -> Optional[int]:
if (transaction not in self.__in_out_sheet_transaction_2_row):
return None
return self.__in_out_sheet_transaction_2_row[transaction]
def __generate_gain_loss_detail(self, sheet: Any, asset: str, computed_data: ComputedData, row_index: int) -> int:
row_index = self._fill_header(_('Gain / Loss Detail'), self.__gain_loss_detail_header_names_row_1, self.__gain_loss_detail_header_names_row_2, sheet, row_index, 0)
gain_loss_set: GainLossSet = computed_data.gain_loss_set
taxable_event_style_modifier: str = ''
acquired_lot_style_modifier: str = '_alt'
year: int = 0
border_style: _BorderStyle
previous_acquired_lot: Optional[InTransaction] = None
for entry in gain_loss_set:
gain_loss: GainLoss = cast(GainLoss, entry)
border_suffix: str = ''
border_style = self.__get_border_style(gain_loss.taxable_event.timestamp.year, year)
if (gain_loss.taxable_event.timestamp.year != year):
self.__tax_sheet_year_2_row[_AssetAndYear(asset, gain_loss.taxable_event.timestamp.year)] = (row_index + 1)
year = border_style.year
border_suffix = border_style.border_suffix
transparent_style: str = f'transparent{border_suffix}'
taxable_event_style: str = f'taxable_event{taxable_event_style_modifier}{border_suffix}'
highlighted_style: str = f'highlighted{border_suffix}'
current_taxable_event_fraction: int = (gain_loss_set.get_taxable_event_fraction(gain_loss) + 1)
total_taxable_event_fractions: int = gain_loss_set.get_taxable_event_number_of_fractions(gain_loss.taxable_event)
transaction_type: str = f'{self._get_table_type_from_transaction(gain_loss.taxable_event)} / {gain_loss.taxable_event.transaction_type.get_translation().upper()}'
taxable_event_note: str = f'{current_taxable_event_fraction}/{total_taxable_event_fractions}: {gain_loss.crypto_amount:.8f} of {gain_loss.taxable_event.crypto_balance_change:.8f} {asset}'
acquired_lot_style: str
self._fill_cell(sheet, row_index, 0, gain_loss.crypto_amount, visual_style=transparent_style, data_style='crypto')
self._fill_cell(sheet, row_index, 1, gain_loss.asset, visual_style=transparent_style)
self._fill_cell(sheet, row_index, 2, computed_data.get_crypto_gain_loss_running_sum(gain_loss), visual_style=transparent_style, data_style='crypto')
self._fill_cell(sheet, row_index, 3, gain_loss.fiat_gain, visual_style=transparent_style, data_style='fiat')
self._fill_cell(sheet, row_index, 4, (_('LONG') if gain_loss.is_long_term_capital_gains() else _('SHORT')), visual_style=transparent_style)
self._fill_cell(sheet, row_index, 5, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, gain_loss.taxable_event.timestamp), visual_style=taxable_event_style)
self._fill_cell(sheet, row_index, 6, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, transaction_type), visual_style=taxable_event_style)
self._fill_cell(sheet, row_index, 7, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, gain_loss.taxable_event_fraction_percentage), visual_style=taxable_event_style, data_style='percent')
self._fill_cell(sheet, row_index, 8, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, gain_loss.taxable_event_fiat_amount_with_fee_fraction), visual_style=highlighted_style, data_style='fiat')
self._fill_cell(sheet, row_index, 9, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, gain_loss.taxable_event.spot_price), visual_style=taxable_event_style, data_style='fiat')
self._fill_cell(sheet, row_index, 10, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, gain_loss.taxable_event.unique_id), visual_style=taxable_event_style, data_style='fiat')
self._fill_cell(sheet, row_index, 11, self.__get_hyperlinked_transaction_value(gain_loss.taxable_event, taxable_event_note), visual_style=f'taxable_event_note{border_suffix}')
if (current_taxable_event_fraction == total_taxable_event_fractions):
taxable_event_style_modifier = ('' if (taxable_event_style_modifier == '_alt') else '_alt')
if gain_loss.acquired_lot:
if (gain_loss.acquired_lot != previous_acquired_lot):
acquired_lot_style_modifier = ('' if (acquired_lot_style_modifier == '_alt') else '_alt')
acquired_lot_style = f'acquired_lot{acquired_lot_style_modifier}{border_suffix}'
current_acquired_lot_fraction: int = (gain_loss_set.get_acquired_lot_fraction(gain_loss) + 1)
total_acquired_lot_fractions: int = gain_loss_set.get_acquired_lot_number_of_fractions(gain_loss.acquired_lot)
acquired_lot_note: str = f'{current_acquired_lot_fraction}/{total_acquired_lot_fractions}: {gain_loss.crypto_amount:.8f} of {gain_loss.acquired_lot.crypto_balance_change:.8f} {asset}'
self._fill_cell(sheet, row_index, 12, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.acquired_lot.timestamp), visual_style=acquired_lot_style)
self._fill_cell(sheet, row_index, 13, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.acquired_lot_fraction_percentage), visual_style=acquired_lot_style, data_style='percent')
self._fill_cell(sheet, row_index, 14, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.acquired_lot_fiat_amount_with_fee_fraction), visual_style=acquired_lot_style, data_style='fiat')
fiat_fee_fraction: RP2Decimal = (gain_loss.acquired_lot.fiat_fee * gain_loss.acquired_lot_fraction_percentage)
self._fill_cell(sheet, row_index, 15, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, fiat_fee_fraction), visual_style=acquired_lot_style, data_style='fiat')
self._fill_cell(sheet, row_index, 16, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.fiat_cost_basis), visual_style=highlighted_style, data_style='fiat')
self._fill_cell(sheet, row_index, 17, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.acquired_lot.spot_price), visual_style=acquired_lot_style, data_style='fiat')
self._fill_cell(sheet, row_index, 18, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, gain_loss.acquired_lot.unique_id), visual_style=acquired_lot_style, data_style='fiat')
self._fill_cell(sheet, row_index, 19, self.__get_hyperlinked_transaction_value(gain_loss.acquired_lot, acquired_lot_note), visual_style=f'acquired_lot_note{border_suffix}')
previous_acquired_lot = gain_loss.acquired_lot
else:
acquired_lot_style = f'acquired_lot{acquired_lot_style_modifier}{border_suffix}'
for i in range(12, 19):
self._fill_cell(sheet, row_index, i, '', visual_style=f'{acquired_lot_style}')
row_index += 1
return row_index
def __generate_yearly_gain_loss_summary(self, sheet: Any, asset: str, yearly_gain_loss_list: List[YearlyGainLoss], row_index: int) -> int:
for gain_loss in yearly_gain_loss_list:
visual_style: str = 'transparent'
capital_gains_type: str = (_('LONG') if gain_loss.is_long_term_capital_gains else _('SHORT'))
year: int = gain_loss.year
self._fill_cell(sheet, row_index, 0, self.__get_hyperlinked_summary_value(asset, year, year), visual_style=visual_style)
self._fill_cell(sheet, row_index, 1, self.__get_hyperlinked_summary_value(asset, asset, year), visual_style=visual_style)
self._fill_cell(sheet, row_index, 2, self.__get_hyperlinked_summary_value(asset, gain_loss.fiat_gain_loss, year), visual_style=visual_style, data_style='fiat')
self._fill_cell(sheet, row_index, 3, self.__get_hyperlinked_summary_value(asset, capital_gains_type, year), visual_style=visual_style)
self._fill_cell(sheet, row_index, 4, self.__get_hyperlinked_summary_value(asset, gain_loss.transaction_type.get_translation().upper(), year), visual_style=visual_style)
self._fill_cell(sheet, row_index, 5, self.__get_hyperlinked_summary_value(asset, gain_loss.crypto_amount, year), visual_style=visual_style, data_style='crypto')
self._fill_cell(sheet, row_index, 6, self.__get_hyperlinked_summary_value(asset, gain_loss.fiat_amount, year), visual_style=visual_style, data_style='fiat')
self._fill_cell(sheet, row_index, 7, self.__get_hyperlinked_summary_value(asset, gain_loss.fiat_cost_basis, year), visual_style=visual_style, data_style='fiat')
row_index += 1
return row_index |
class TweetView(ModelView):
column_list = ('name', 'user_name', 'text')
column_sortable_list = ('name', 'text')
column_filters = (filters.FilterEqual('name', 'Name'), filters.FilterNotEqual('name', 'Name'), filters.FilterLike('name', 'Name'), filters.FilterNotLike('name', 'Name'), filters.BooleanEqualFilter('testie', 'Testie'))
form = TweetForm
def get_list(self, *args, **kwargs):
(count, data) = super(TweetView, self).get_list(*args, **kwargs)
for item in data:
item['user_name'] = db.user.find_one({'_id': item['user_id']})['name']
return (count, data)
def _feed_user_choices(self, form):
users = db.user.find(fields=('name',))
form.user_id.choices = [(str(x['_id']), x['name']) for x in users]
return form
def create_form(self):
form = super(TweetView, self).create_form()
return self._feed_user_choices(form)
def edit_form(self, obj):
form = super(TweetView, self).edit_form(obj)
return self._feed_user_choices(form)
def on_model_change(self, form, model):
user_id = model.get('user_id')
model['user_id'] = user_id
return model |
def get_settings_folder():
home = os.getenv('USERPROFILE')
if (home is None):
if (android is not None):
home = '/storage/sdcard0/'
else:
home = os.getenv('HOME')
if is_venv():
home = sys.prefix
if os.path.isdir(os.path.join(home, '~expyriment')):
return os.path.join(home, '~expyriment')
else:
return os.path.join(home, '.expyriment') |
class AccountWizard(BaseWizard, MessageBoxMixin):
HELP_DIRNAME = 'account-wizard'
_last_page_id: Optional[AccountPage] = None
_selected_device: Optional[Tuple[(str, DeviceInfo)]] = None
_keystore: Optional[KeyStore] = None
_keystore_type = ResultType.UNKNOWN
def __init__(self, main_window: ElectrumWindow, flags: WizardFlags=DEFAULT_WIZARD_FLAGS, parent: Optional[QWidget]=None) -> None:
if (parent is None):
parent = main_window
super().__init__(parent)
self.flags = flags
self._main_window = main_window
self._wallet: Wallet = main_window._wallet
self._text_import_type: Optional[KeystoreTextType] = None
self._text_import_matches: Optional[KeystoreMatchType] = None
self.set_subtitle('')
self.setModal(True)
self.setMinimumSize(600, 600)
self.setOption(QWizard.HaveCustomButton1, True)
self.button(QWizard.CustomButton1).setVisible(False)
self.setPage(AccountPage.ADD_ACCOUNT_MENU, AddAccountWizardPage(self))
self.setPage(AccountPage.IMPORT_ACCOUNT_TEXT, ImportWalletTextPage(self))
self.setPage(AccountPage.IMPORT_ACCOUNT_TEXT_CUSTOM, ImportWalletTextCustomPage(self))
self.setPage(AccountPage.CREATE_MULTISIG_ACCOUNT, CreateMultisigAccountPage(self))
self.setPage(AccountPage.CREATE_MULTISIG_ACCOUNT_CUSTOM, CreateMultisigAccountCustomPage(self))
self.setPage(AccountPage.CREATE_MULTISIG_ACCOUNT_COSIGNERS, MultisigAccountCosignerListPage(self))
self.setPage(AccountPage.FIND_HARDWARE_WALLET, FindHardwareWalletAccountPage(self))
self.setPage(AccountPage.SETUP_HARDWARE_WALLET, SetupHardwareWalletAccountPage(self))
self.setStartId(AccountPage.ADD_ACCOUNT_MENU)
def query_choice(self, msg: str, choices: Iterable[str]) -> Optional[int]:
return query_choice(self, msg, choices)
def set_subtitle(self, subtitle: str) -> None:
suffix = (f' - {subtitle}' if len(subtitle) else '')
self.setWindowTitle(f'ElectrumSV{suffix}')
def set_selected_device(self, device: Optional[Tuple[(str, DeviceInfo)]]) -> None:
self._selected_device = device
def get_selected_device(self) -> Optional[Tuple[(str, DeviceInfo)]]:
return self._selected_device
def get_main_window(self) -> ElectrumWindow:
return self._main_window
def get_wallet(self) -> Wallet:
return self._wallet
def set_text_import_matches(self, text_type: KeystoreTextType, text_matches: KeystoreMatchType) -> None:
self._text_import_type = text_type
self._text_import_matches = text_matches
def get_text_import_type(self) -> Optional[KeystoreTextType]:
return self._text_import_type
def get_text_import_matches(self) -> Optional[KeystoreMatchType]:
return self._text_import_matches
def has_result(self) -> bool:
return (self._keystore_type != ResultType.UNKNOWN)
def get_keystore(self) -> KeyStore:
return self._keystore
def set_keystore_result(self, result_type: ResultType, keystore: Optional[KeyStore]) -> None:
self._keystore_type = result_type
self._keystore = keystore
if (keystore is None):
return
if (self.flags & WizardFlags.ACCOUNT_RESULT):
self._wallet.create_account_from_keystore(keystore)
def set_text_entry_account_result(self, result_type: ResultType, text_type: KeystoreTextType, script_type: ScriptType, text_matches: KeystoreMatchType, password: Optional[str]) -> None:
self._keystore_type = result_type
if (self.flags & WizardFlags.ACCOUNT_RESULT):
assert (password is not None)
self._wallet.create_account_from_text_entries(text_type, script_type, text_matches, password)
else:
raise NotImplementedError('Invalid attempt to generate keyless keystore data') |
def test_hyperparameter_job_config():
jc = hpo_job.HyperparameterTuningJobConfig(tuning_strategy=hpo_job.HyperparameterTuningStrategy.BAYESIAN, tuning_objective=hpo_job.HyperparameterTuningObjective(objective_type=hpo_job.HyperparameterTuningObjectiveType.MAXIMIZE, metric_name='test_metric'), training_job_early_stopping_type=hpo_job.TrainingJobEarlyStoppingType.AUTO)
jc2 = hpo_job.HyperparameterTuningJobConfig.from_flyte_idl(jc.to_flyte_idl())
assert (jc2.tuning_strategy == jc.tuning_strategy)
assert (jc2.tuning_objective == jc.tuning_objective)
assert (jc2.training_job_early_stopping_type == jc.training_job_early_stopping_type) |
def test_select_subselect_with_alias():
sql_statement = '\n SELECT count(*)\n FROM (\n SELECT count(id) AS some_alias, some_column\n FROM mytable\n GROUP BY some_colun\n HAVING count(id) > 1\n ) AS foo\n '
actual = extract_signature(sql_statement)
assert ('SELECT FROM mytable' == actual) |
class OptionPlotoptionsPieSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsPieSonificationDefaultspeechoptionsMappingVolume) |
def test_content_transfer_encoding_header(client):
data = b'--BOUNDARY\r\nContent-Disposition: form-data; name="file"; filename="bytes"\r\nContent-Transfer-Encoding: Base64Content-Type: application/x-falcon\r\n\r\nUGVyZWdyaW5lIEZhbGNvbiADLgA=\r\n--BOUNDARY\r\nContent-Disposition: form-data; name="empty"\r\nContent-Type: text/plain\r\n\r\n\r\n--BOUNDARY--\r\n'
resp = client.simulate_post('/submit', headers={'Content-Type': 'multipart/form-data; boundary=BOUNDARY'}, body=data)
assert (resp.status_code == 400)
assert (resp.json == {'description': 'the deprecated Content-Transfer-Encoding header field is unsupported', 'title': 'Malformed multipart/form-data request media'}) |
class OptionSeriesWaterfallSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('request_1__group_by', ['42'])
.parametrize('request_2__group_by', ['something else'])
.usefixtures('request_1', 'request_2')
def test_users(dashboard_user, endpoint, session):
response = dashboard_user.get('dashboard/api/users/{0}'.format(endpoint.id))
assert (response.status_code == 200)
data = response.json
(row1, row2) = data
assert (row1['hits'] == 1)
assert ((row1['user'] == '42') or 'something else')
assert (row2['hits'] == 1)
assert ((row2['user'] == '42') or 'something else') |
class TestsAchromatic(util.ColorAsserts, unittest.TestCase):
def test_achromatic(self):
self.assertEqual(Color('okhsv', [270, 0.5, 0]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, 0, 0.5]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, 1e-06, 0.5]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, NaN, 0]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, 0.0, NaN]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, 0.5, 1]).is_achromatic(), False)
self.assertEqual(Color('okhsv', [270, NaN, 1]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, 0.5, NaN]).is_achromatic(), True)
self.assertEqual(Color('okhsv', [270, NaN, NaN]).is_achromatic(), True) |
class BlockDeclList(Block):
decl: Decl
block: Block
def decl_env(self) -> (Decl.env decl):
return self.env
def block_same(self, decl) -> (Block.same block):
return (self.same + [decl.new])
def block_env(self, decl) -> (Block.env block):
return append_dict(self.env, decl.new)
def ok(self, decl, block):
return ((decl.new not in self.same) and (decl.ok and block.ok))
def procs(self, decl, block):
if (decl.new[1] == 'void()'):
return append_dict(block.procs, decl.new)
else:
return block.procs
def ast_dict(self):
return {'decl': self.decl.ast_dict(), 'block': self.block.ast_dict(), 'class': self.__class__.__name__} |
def extractNotsofriendlytranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('Venom Tongue c', 'I Quit the Going-Home Club for a Girl with a Venomous Tongue', 'Translated'), ('Villainess Father c', 'Since Ive Reincarnated as the Villainess Father, Ill Shower My Wife and Daughter in Love', 'Translated'), ('Villainess Father c', 'Since Ive Reincarnated as the Villainess Father, Ill Shower My Wife and Daughter in Love', 'Translated'), ('Kage Ga Usui c', 'Hazure Skill Kage ga Usui o Motsu Guild Shokuin ga, Jitsuha Densetsu no Ansatsusha', 'Translated'), ('Kage Ga Usui c', 'Hazure Skill Kage ga Usui o Motsu Guild Shokuin ga, Jitsuha Densetsu no Ansatsusha', 'Translated'), ('Aristocrat Assassin c', 'The Best Assassin, Incarnated into a Different Worlds Aristocrat', 'Translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PurgeInventoryTest(ForsetiTestCase):
def setUp(self):
ForsetiTestCase.setUp(self)
def tearDown(self):
ForsetiTestCase.tearDown(self)
def populate_data(self):
self.engine = create_test_engine()
initialize(self.engine)
self.scoped_sessionmaker = db.create_scoped_sessionmaker(self.engine)
with self.scoped_sessionmaker() as session:
inventory_indices = [InventoryIndex(id='one_day_old', created_at_datetime=datetime(2010, 12, 30, 8, 0, 0)), InventoryIndex(id='seven_days_old', created_at_datetime=datetime(2010, 12, 24, 8, 0, 0)), InventoryIndex(id='nine_days_old', created_at_datetime=datetime(2010, 12, 22, 8, 0, 0))]
for i in inventory_indices:
session.add(i)
session.commit()
session.expunge_all()
inventory_resources = [Inventory(id=1, full_name=1, inventory_index_id='one_day_old'), Inventory(id=2, full_name=2, inventory_index_id='one_day_old'), Inventory(id=3, full_name=3, inventory_index_id='seven_days_old'), Inventory(id=4, full_name=4, inventory_index_id='seven_days_old'), Inventory(id=5, full_name=5, inventory_index_id='nine_days_old'), Inventory(id=6, full_name=6, inventory_index_id='nine_days_old')]
for i in inventory_resources:
session.add(i)
session.commit()
session.expunge_all()
return session
def get_inventory_api(self):
mock_config = mock.MagicMock()
mock_config.get_engine.return_value = self.engine
mock_config.scoped_session.return_value = self.scoped_sessionmaker()
return InventoryApi(mock_config)
('google.cloud.forseti.services.inventory.inventory.date_time', autospec=True)
def test_all_inventory_are_purged(self, mock_date_time):
session = self.populate_data()
mock_date_time.get_utc_now_datetime.return_value = datetime(2010, 12, 31)
inventory_api = self.get_inventory_api()
inventory_api.purge(retention_days='0')
inventory_indices = session.query(InventoryIndex).all()
self.assertEqual(0, len(inventory_indices))
resources = session.query(Inventory).all()
self.assertEqual(0, len(resources))
('google.cloud.forseti.services.inventory.inventory.date_time', autospec=True)
def test_purge_is_disabled(self, mock_date_time):
session = self.populate_data()
mock_date_time.get_utc_now_datetime.return_value = datetime(2010, 12, 31)
inventory_api = self.get_inventory_api()
inventory_api.purge(retention_days='-1')
inventory_indices = session.query(InventoryIndex).all()
self.assertEqual(3, len(inventory_indices))
resources = session.query(Inventory).all()
self.assertEqual(6, len(resources))
('google.cloud.forseti.services.inventory.inventory.date_time', autospec=True)
def test_purge_uses_configuration_value(self, mock_date_time):
session = self.populate_data()
mock_date_time.get_utc_now_datetime.return_value = datetime(2010, 12, 31)
inventory_api = self.get_inventory_api()
inventory_api.config.inventory_config.retention_days = (- 1)
inventory_api.purge(retention_days=None)
inventory_indices = session.query(InventoryIndex).all()
self.assertEqual(3, len(inventory_indices))
resources = session.query(Inventory).all()
self.assertEqual(6, len(resources))
('google.cloud.forseti.services.inventory.inventory.date_time', autospec=True)
def test_no_inventory_is_purged(self, mock_date_time):
session = self.populate_data()
mock_date_time.get_utc_now_datetime.return_value = datetime(2010, 12, 31)
inventory_api = self.get_inventory_api()
inventory_api.purge(retention_days='30')
inventory_indices = session.query(InventoryIndex).all()
self.assertEqual(3, len(inventory_indices))
resources = session.query(Inventory).all()
self.assertEqual(6, len(resources))
('google.cloud.forseti.services.inventory.inventory.date_time', autospec=True)
def test_inventory_older_than_retention_days_are_purged(self, mock_date_time):
session = self.populate_data()
mock_date_time.get_utc_now_datetime.return_value = datetime(2010, 12, 31)
inventory_api = self.get_inventory_api()
inventory_api.purge(retention_days='5')
inventory_indices = session.query(InventoryIndex).all()
self.assertEqual(1, len(inventory_indices))
for i in inventory_indices:
self.assertEqual('one_day_old', i.id)
resources = session.query(Inventory).all()
self.assertEqual(2, len(resources))
for i in resources:
self.assertEqual('one_day_old', i.inventory_index_id) |
class OperatorObsidian():
def dedup(self, pages):
print('')
print('# Dedup Obsidian pages')
print('')
client = DBClient()
deduped_pages = []
for (page_id, page) in pages.items():
name = page['name']
print(f'Dedupping page, title: {name}')
if client.get_obsidian_inbox_item_id('obsidian', 'default', page_id):
print(f'Duplicated obsidian found, skip. page_id: {page_id}')
else:
deduped_pages.append(page)
print(f'Pages after dedup: {len(deduped_pages)}')
return deduped_pages
def filters(self, pages: list, **kwargs):
print('')
print('# Filtering Obsidian pages')
print('')
min_rating = kwargs.setdefault('min_rating', 4)
print(f'min_rating: {min_rating}, type: {type(min_rating)}')
filtered_pages = []
tot = 0
skipped = 0
for page in pages:
name = page['name']
user_rating = page['user_rating']
tot += 1
if (user_rating < min_rating):
print(f'[INFO] Skip low quality content, name: {name}, user_rating: {user_rating}')
skipped += 1
continue
filtered_pages.append(page)
print(f'[INFO] Finished, total {tot}, skipped: {skipped}')
return filtered_pages
def push(self, pages, **kwargs):
print('')
print('# Push Obsidian pages')
print('')
data_folder = (kwargs.setdefault('data_folder', '') or os.getenv('OBSIDIAN_FOLDER'))
if (not data_folder):
print('[ERROR] Data folder path is invalid, skip pushing')
return
print(f'Data folder: {data_folder}, total pages: {len(pages)}')
client = DBClient()
notion_agent = NotionAgent()
tot = 0
err = 0
skipped = 0
for page in pages:
page_id = page['id']
tot += 1
try:
(filename, content) = self._gen_ob_page(page, notion_agent=notion_agent)
if self._save_ob_page(data_folder, filename, content):
print(f'[INFO] Gen obsidian page, filename: {filename}')
print(f'[INFO] Gen obsidian body, content: {content}')
self.markVisisted(page_id, db_client=client)
else:
skipped += 1
except Exception as e:
print(f'[ERROR] Failed to push obsidian md: {e}')
traceback.print_exc()
err += 1
print(f'[INFO] Finished, total {tot}, skipped: {skipped}, errors: {err}')
def markVisisted(self, page_id, db_client=None):
client = (db_client or DBClient())
client.set_obsidian_inbox_item_id('obsidian', 'default', page_id)
def _gen_ob_page(self, page, notion_agent: NotionAgent=None):
tpl_title = tpl_obsidian.TEMPLATE_OBSIDIAN_INBOX_FILE
tpl_body = tpl_obsidian.TEMPLATE_OBSIDIAN_INBOX_BODY
page_id = page['id']
name = page['name']
props = page['properties']['properties']
source = (page.get('source') or props['Source']['select']['name'])
created_at = page['created_at']
rating = props['Rating']['number']
user_rating = props['User Rating']['select']['name']
alias = name
to = notion_agent.extractRichText(props['To']['rich_text'])
list_name = notion_agent.extractMultiSelect(props['List Name'])
notion_url = page['notion_url']
take_aways = notion_agent.extractRichText(props['Take Aways']['rich_text'])
topic = notion_agent.extractMultiSelect(props['Topic'])
category = notion_agent.extractMultiSelect(props['Category'])
body = notion_agent.concatBlocksText(page['blocks'], separator='\n')
filename = tpl_title.format(source, 'default', page_id)
content = tpl_body.format(created_at, rating, user_rating, alias, to, source, list_name, notion_url, topic, category, take_aways, body)
return (filename, content)
def _save_ob_page(self, data_path, filename, content):
workdir = os.getenv('WORKDIR')
topdir = f'{workdir}/{data_path}'
full_path = f'{topdir}/{filename}'
print(f'[INFO] Obsidian data path: {topdir}, filename: {filename}, full_path: {full_path}')
if (not os.path.exists(topdir)):
print(f'[ERROR] Not found Obsidian folder, skip to save: {topdir}')
return False
if os.path.exists(full_path):
print('[INFO] the file exsit, skip')
return False
with open(full_path, 'w') as file:
file.write(content)
print(f'File saved: {full_path}')
return True |
def parse_web_form_error(html_text, variant='a'):
soup = BeautifulSoup(html_text, 'html.parser')
if (variant == 'a'):
classes = 'alert alert-danger'
elif (variant == 'b'):
classes = 'alert alert-danger alert-dismissable'
alerts = soup.findAll('div', class_=classes)
assert (len(alerts) == 1)
div = alerts[0]
if (variant == 'a'):
return [li.text for li in div.find_all('li')]
return div.text.strip() |
class OptionSeriesBarSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def main(page: ft.Page):
def page_resize(e):
pw.value = f'{page.width} px'
pw.update()
page.on_resize = page_resize
pw = ft.Text(bottom=50, right=50, style='displaySmall')
page.overlay.append(pw)
page.add(ft.ResponsiveRow([ft.Container(ft.Text('Column 1'), padding=5, bgcolor=ft.colors.YELLOW, col={'sm': 6, 'md': 4, 'xl': 2}), ft.Container(ft.Text('Column 2'), padding=5, bgcolor=ft.colors.GREEN, col={'sm': 6, 'md': 4, 'xl': 2}), ft.Container(ft.Text('Column 3'), padding=5, bgcolor=ft.colors.BLUE, col={'sm': 6, 'md': 4, 'xl': 2}), ft.Container(ft.Text('Column 4'), padding=5, bgcolor=ft.colors.PINK_300, col={'sm': 6, 'md': 4, 'xl': 2})]), ft.ResponsiveRow([ft.TextField(label='TextField 1', col={'md': 4}), ft.TextField(label='TextField 2', col={'md': 4}), ft.TextField(label='TextField 3', col={'md': 4})], run_spacing={'xs': 10}))
page_resize(None) |
('Setup crossenv - {arch}')
def setup_crossenv(python: Command, pip: Command, arch: str='linux-x86_64') -> Command:
cpython = ProjectPaths('cpython', arch)
crossenv = ProjectPaths('crossenv', arch)
crossenv.clean()
pip.install('crossenv')
python('-m', 'crossenv', ((cpython.install / 'bin') / 'python3'), crossenv.install)
build_pip = Command((((crossenv.install / 'build') / 'bin') / 'pip'))
build_pip.install('cffi')
base = ((crossenv.install / 'cross') / 'bin')
python = Command((base / 'python3'))
site = next(cpython.install.glob('**/site-packages'))
pip_install = Command((base / 'pip3'), 'install', '--target', site)
return pip_install |
def test_scenario_a():
winner_indices = [0, 1]
player_cards = [[Card(rank='ace', suit='hearts'), Card(rank='8', suit='diamonds')], [Card(rank='ace', suit='spades'), Card(rank='3', suit='diamonds')], [Card(rank='2', suit='spades'), Card(rank='4', suit='diamonds')]]
board_cards = [Card(rank='ace', suit='clubs'), Card(rank='king', suit='diamonds'), Card(rank='king', suit='hearts'), Card(rank='8', suit='spades'), Card(rank='3', suit='spades')]
_scenario_helper(winner_indices=winner_indices, player_cards=player_cards, board_cards=board_cards) |
class TestHorizontalTypePropagation():
def test_backwards(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x := Variable('x', UnknownType(), ssa_label=0)), Constant(170, Integer.char()))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x.type == Integer.char())
def test_forward(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment(Variable('x', Integer.char(), ssa_label=0), (c := Constant(170, UnknownType())))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (c.type == Integer.char())
def test_ignore_primitives(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x := Variable('x', Float.float(), ssa_label=0)), Constant(170, Integer.char()))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x.type == Float.float())
def test_fallback_to_primitive(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x := Variable('x', Integer.uint8_t(), ssa_label=0)), Constant(170, Integer.int32_t()))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x.type == Integer.uint8_t())
def test_propagation(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x0 := Variable('x', UnknownType(), ssa_label=0)), Constant(170, Integer.char())), Return([(x := Variable('x', UnknownType(), ssa_label=0))])]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x0.type == Integer.char())
assert (x.type == Integer.char())
def test_constants_identical_value_different_types(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x := Variable('x', UnknownType(), ssa_label=0)), Constant(0, Integer.int64_t())), Assignment((y := Variable('y', UnknownType(), ssa_label=0)), Constant(0, Integer.int32_t()))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x.type == Integer.int64_t())
assert (y.type == Integer.int32_t())
def test_casted_constants_identical_value_different_types(self):
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions=[Assignment((x := Variable('x', Integer.int32_t(), ssa_label=0)), UnaryOperation(OperationType.cast, [Constant(0, Integer.int8_t())], vartype=Integer.int32_t())), Assignment((y := Variable('y', Integer.int64_t(), ssa_label=0)), UnaryOperation(OperationType.cast, [Constant(0, Integer.int64_t())], vartype=Integer.int32_t()))]))
TypePropagation().propagate(TypeGraph.from_cfg(cfg))
assert (x.type == Integer.int32_t())
assert (y.type == Integer.int64_t()) |
(IBodyProducer)
class StringProducer():
def __init__(self, body):
self.body = bytes(body, 'utf-8')
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.body)
return defer.succeed(None)
def pauseProducing(self):
pass
def stopProducing(self):
pass |
class LEDDemoHandler(Handler):
info = Instance(UIInfo)
running = Bool(True)
alive = Bool(True)
def init(self, info):
self.info = info
Thread(target=self._update_counter).start()
return True
def closed(self, info, is_ok):
self.running = False
while self.alive:
sleep(0.05)
def _update_counter(self):
while self.running:
self.info.object.counter1 += 1
self.info.object.counter2 += 0.001
sleep(0.01)
self.alive = False |
('pyscf')
('thermoanalysis')
def test_opt_h2o_do_hess():
T = 398.15
run_dict = {'geom': {'type': 'redund', 'fn': 'lib:h2o.xyz'}, 'calc': {'type': 'pyscf', 'basis': 'sto3g', 'pal': 2, 'verbose': 0}, 'opt': {'thresh': 'gau', 'do_hess': True, 'T': T}}
run_result = run_from_dict(run_dict)
thermo = run_result.opt_geom.get_thermoanalysis(T=T)
assert (thermo.dG == pytest.approx((- 0.))) |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
def test_average_regions_plot_log_vmin_vmax_colormap():
outfile = NamedTemporaryFile(suffix='.png', prefix='average_region_log1p', delete=False)
matrix = (ROOT + 'hicAverageRegions/result_range_100000.npz')
args = '--matrix {} -o {} --vMax 20 --vMin 10 --colorMap plasma'.format(matrix, outfile.name).split()
compute(hicPlotAverageRegions.main, args, 5)
res = compare_images((ROOT + '/hicPlotAverageRegions/defaults_log_vmin_vmax.png'), outfile.name, tol=40)
assert (res is None), res
os.remove(outfile.name) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.