code stringlengths 281 23.7M |
|---|
class VonKries(CAT, metaclass=VonKriesMeta):
NAME = 'von-kries'
MATRIX = [[0.40024, 0.7076, (- 0.08081)], [(- 0.2263), 1.16532, 0.0457], [0.0, 0.0, 0.91822]]
def adapt(self, w1: Tuple[(float, float)], w2: Tuple[(float, float)], xyz: VectorLike) -> Vector:
if (w1 == w2):
return list(xyz)
(a, b) = sorted([w1, w2])
(m, mi) = cast(Type['VonKries'], self).get_adaptation_matrices(a, b)
return alg.dot((mi if (a != w2) else m), xyz, dims=alg.D2_D1) |
.parallel(nprocs=3)
def test_extruded_periodic_annulus():
m = 5
n = 7
mesh = CircleManifoldMesh(n)
mesh0 = ExtrudedMesh(mesh, layers=m, layer_height=(1.0 / m), extrusion_type='radial')
mesh = IntervalMesh(m, 1.0, 2.0)
mesh1 = ExtrudedMesh(mesh, layers=n, layer_height=((2 * pi) / n), extrusion_type='uniform', periodic=True)
elem1 = mesh1.coordinates.ufl_element()
coordV1 = FunctionSpace(mesh1, elem1)
(x1, y1) = SpatialCoordinate(mesh1)
coord1 = Function(coordV1).interpolate(as_vector([(x1 * cos(y1)), (x1 * sin(y1))]))
mesh1 = make_mesh_from_coordinates(coord1.topological, 'annulus')
mesh1._base_mesh = mesh
(x0, y0) = SpatialCoordinate(mesh0)
(x1, y1) = SpatialCoordinate(mesh1)
vol0 = assemble((Constant(1) * dx(domain=mesh0)))
vol1 = assemble((Constant(1) * dx(domain=mesh1)))
assert (abs((vol1 - vol0)) < 1e-12)
RTCF0 = FunctionSpace(mesh0, 'RTCF', 3)
RTCF1 = FunctionSpace(mesh1, 'RTCF', 3)
f0 = Function(RTCF0).project(as_vector([(sin(x0) + 2.0), (cos(y0) + 3.0)]), solver_parameters={'ksp_rtol': 1e-13})
f1 = Function(RTCF1).project(as_vector([(sin(x1) + 2.0), (cos(y1) + 3.0)]), solver_parameters={'ksp_rtol': 1e-13})
int0 = assemble((inner(f0, as_vector([(x0 + 5.0), (y0 + 7.0)])) * dx))
int1 = assemble((inner(f1, as_vector([(x1 + 5.0), (y1 + 7.0)])) * dx))
assert (abs((int1 - int0)) < 1e-12)
inner_boun_id0 = 'bottom'
outer_boun_id0 = 'top'
inner_boun_id1 = 1
outer_boun_id1 = 2
DG0 = FunctionSpace(mesh0, 'DG', 2)
W0 = (RTCF0 * DG0)
(u0, p0) = TrialFunctions(W0)
(v0, q0) = TestFunctions(W0)
u0_ = f0
p0_ = Function(DG0).interpolate(((x0 + y0) + 1.0))
a0 = (((inner(u0, v0) + inner(p0, div(v0))) + inner(div(u0), q0)) * dx)
L0 = (conj(q0) * dx)
bcs0 = [DirichletBC(W0.sub(0), u0_, outer_boun_id0), DirichletBC(W0.sub(1), p0_, inner_boun_id0)]
w0 = Function(W0)
solve((a0 == L0), w0, bcs=bcs0)
DG1 = FunctionSpace(mesh1, 'DG', 2)
W1 = (RTCF1 * DG1)
(u1, p1) = TrialFunctions(W1)
(v1, q1) = TestFunctions(W1)
u1_ = f1
p1_ = Function(DG1).interpolate(((x1 + y1) + 1.0))
a1 = (((inner(u1, v1) + inner(p1, div(v1))) + inner(div(u1), q1)) * dx)
L1 = (conj(q1) * dx)
bcs1 = [DirichletBC(W1.sub(0), u1_, outer_boun_id1), DirichletBC(W1.sub(1), p1_, inner_boun_id1)]
w1 = Function(W1)
solve((a1 == L1), w1, bcs=bcs1)
uint0 = assemble((inner(w0.sub(0), as_vector([(sin(x0) + 0.2), (cos(y0) + 0.3)])) * dx))
uint1 = assemble((inner(w1.sub(0), as_vector([(sin(x1) + 0.2), (cos(y1) + 0.3)])) * dx))
assert (abs((uint1 - uint0)) < 1e-12)
pint0 = assemble((inner(w0.sub(1), ((x0 * y0) + 2.0)) * dx))
pint1 = assemble((inner(w1.sub(1), ((x1 * y1) + 2.0)) * dx))
assert (abs((pint1 - pint0)) < 1e-12) |
class ToptierAgency(models.Model):
toptier_agency_id = models.AutoField(primary_key=True)
create_date = models.DateTimeField(auto_now_add=True)
update_date = models.DateTimeField(auto_now=True)
toptier_code = models.TextField(db_index=True, unique=True)
abbreviation = models.TextField(blank=True, null=True)
name = models.TextField(db_index=True)
mission = models.TextField(blank=True, null=True)
about_agency_data = models.TextField(blank=True, null=True)
website = models.URLField(blank=True, null=True)
justification = models.URLField(blank=True, null=True)
icon_filename = models.TextField(blank=True, null=True)
objects = ToptierAgencyManager()
class Meta():
db_table = 'toptier_agency' |
def test_flatten_is_working_properly_with_no_references(create_test_data, trash_bin):
data = create_test_data
arch = Archiver()
project_path = arch.flatten([data['asset2_model_main_v001'].absolute_full_path])
trash_bin.append(project_path)
assert os.path.exists(project_path)
assert os.path.exists(os.path.join(project_path, 'workspace.mel'))
assert os.path.exists(os.path.join(project_path, 'scenes', data['asset2_model_main_v001'].filename)) |
class AccountService(object):
def __init__(self, bot_name, timestamp, base_capital=1000000, buy_cost=0.001, sell_cost=0.001, slippage=0.001, stock_fuquan='hfq'):
self.logger = logging.getLogger(__name__)
self.base_capital = base_capital
self.buy_cost = buy_cost
self.sell_cost = sell_cost
self.slippage = slippage
self.stock_fuquan = stock_fuquan
self.bot_name = bot_name
account = es_get_latest_record(index='account', query={'term': {'botName': bot_name}})
if account:
self.logger.warning('bot:{} has run before,old result would be deleted'.format(bot_name))
es_delete(index='account', query={'term': {'botName': bot_name}})
es_index_mapping('account', Account)
self.account = Account()
self.account.botName = bot_name
self.account.cash = self.base_capital
self.account.positions = []
self.account.value = self.base_capital
self.account.timestamp = timestamp
self.account.save()
def get_account(self, refresh=True):
if refresh:
account_json = es_get_latest_record(index='account', query={'term': {'botName': self.bot_name}})
self.account = Account()
fill_doc_type(self.account, account_json)
return self.account
def get_current_position(self, security_id):
account = self.get_account()
for position in account.positions:
if (position.securityId == security_id):
return position
return Position(security_id=security_id)
def calculate_closing_account(self, the_date):
account = self.get_account()
for position in account.positions:
kdata = esapi.es_get_kdata(security_item=position['securityId'], the_date=the_date)
closing_price = kdata['hfqClose']
position.availableLong = position.longAmount
position.availableShort = position.shortAmount
position.value = ((position.longAmount * closing_price) + (position.shortAmount * closing_price))
account.cash += (2 * (position.shortAmount * (position.averageShortPrice - closing_price)))
position.averageShortPrice = closing_price
position.averageLongPrice = closing_price
def save_account(self):
self.account.save()
def update_account(self, security_id, new_position):
positions = [position for position in self.account.positions if (position.securityId != security_id)]
positions.append(new_position)
self.account.positions = positions
self.account.save()
def update_position(self, current_position, order_amount, current_price, order_type):
if (order_type == ORDER_TYPE_LONG):
long_amount = (current_position.longAmount + order_amount)
current_position.averageLongPrice = (((current_position.averageLongPrice * current_position.longAmount) + (current_price * current_price)) / long_amount)
current_position.longAmount = long_amount
if (current_position.tradingT == 0):
current_position.availableLong += order_amount
elif (order_type == ORDER_TYPE_SHORT):
short_amount = (current_position.shortAmount + order_amount)
current_position.averageShortPrice = (((current_position.averageShortPrice * current_position.shortAmount) + (current_price * current_price)) / short_amount)
current_position.shortAmount = short_amount
if (current_position.tradingT == 0):
current_position.availableShort += order_amount
def buy(self, security_id, current_price, order_amount=0, order_pct=1.0, order_price=0):
self.order(security_id, current_price, order_amount, order_pct, order_price, order_type=ORDER_TYPE_LONG)
def sell(self, security_id, current_price, order_amount=0, order_pct=1.0, order_price=0):
self.order(security_id, current_price, order_amount, order_pct, order_price, order_type=ORDER_TYPE_SHORT)
def close_long(self, security_id, current_price, order_amount=0, order_pct=1.0, order_price=0):
self.order(security_id, current_price, order_amount, order_pct, order_price, order_type=ORDER_TYPE_CLOSE_LONG)
def close_short(self, security_id, current_price, order_amount=0, order_pct=1.0, order_price=0):
self.order(security_id, current_price, order_amount, order_pct, order_price, order_type=ORDER_TYPE_CLOSE_SHORT)
def order(self, security_id, current_price, order_amount=0, order_pct=1.0, order_price=0, order_type=ORDER_TYPE_LONG):
if (order_price == 0):
current_position = self.get_current_position(security_id=security_id)
if (order_amount > 0):
if (order_type == ORDER_TYPE_LONG):
need_money = ((order_amount * current_price) * ((1 + self.slippage) + self.buy_cost))
if (self.account.cash >= need_money):
self.account.cash -= need_money
self.update_position(current_position, order_amount, current_price, order_type)
else:
raise Exception('not enough money')
elif (order_type == ORDER_TYPE_SHORT):
need_money = ((order_amount * current_price) * ((1 + self.slippage) + self.buy_cost))
if (self.account.cash >= need_money):
self.account.cash -= need_money
self.update_position(current_position, order_amount, current_price, order_type)
else:
raise Exception('not enough money')
elif (order_type == ORDER_TYPE_CLOSE_LONG):
if (current_position.availableLong >= order_amount):
self.account.cash += (order_amount * current_price)
current_position.availableLong -= order_amount
current_position.longAmount -= order_amount
else:
raise Exception('not enough position')
elif (order_type == ORDER_TYPE_CLOSE_SHORT):
if (current_position.availableShort >= order_amount):
self.account.cash += (order_amount * current_price)
current_position.availableShort -= order_amount
current_position.shortAmount -= order_amount
else:
raise Exception('not enough position')
elif (0 < order_pct <= 1):
if (order_type == ORDER_TYPE_LONG):
cost = (current_price * ((1 + self.slippage) + self.buy_cost))
want_buy = (self.account.cash * order_pct)
if (want_buy >= cost):
order_amount = (want_buy // cost)
self.account.cash -= (want_buy - (want_buy % cost))
self.update_position(current_position, order_amount, current_price, order_type)
else:
raise Exception('not enough money')
elif (order_type == ORDER_TYPE_SHORT):
need_money = ((order_amount * current_price) * ((1 + self.slippage) + self.buy_cost))
if (self.account.cash >= need_money):
self.account.cash -= need_money
self.update_position(current_position, order_amount, current_price, order_type)
else:
raise Exception('not enough money')
elif (order_type == ORDER_TYPE_CLOSE_LONG):
if (current_position.availableLong > 1):
order_amount = math.floor((current_position.availableLong * order_pct))
if (order_amount != 0):
self.account.cash += (order_amount * current_price)
current_position.availableLong -= order_amount
current_position.longAmount -= order_amount
else:
self.logger.warning('{} availableLong:{} order_pct:{} order_amount:{}', security_id, current_position.availableLong, order_pct, order_amount)
else:
raise Exception('not enough position')
elif (order_type == ORDER_TYPE_CLOSE_SHORT):
if (current_position.availableShort > 1):
order_amount = math.floor((current_position.availableShort * order_pct))
if (order_amount != 0):
self.account.cash += (order_amount * current_price)
current_position.availableLong -= order_amount
current_position.longAmount -= order_amount
else:
self.logger.warning('{} availableLong:{} order_pct:{} order_amount:{}', security_id, current_position.availableLong, order_pct, order_amount)
else:
raise Exception('not enough position')
self.update_account(security_id, current_position) |
class TestMACCore(unittest.TestCase):
def test(self):
dut = DUT()
generators = {'sys': [main_generator(dut), dut.streamer.generator(), dut.streamer_randomizer.generator(), dut.logger_randomizer.generator(), dut.logger.generator()], 'eth_tx': [dut.phy_model.phy_sink.generator(), dut.phy_model.generator()], 'eth_rx': dut.phy_model.phy_source.generator()}
clocks = {'sys': 10, 'eth_rx': 10, 'eth_tx': 10}
run_simulation(dut, generators, clocks, vcd_name='sim.vcd') |
class TestFixEpoch(TestCase):
def test_fix_epoch(self):
for (long_epoch, epoch) in [(, ), (, ), (, ), (, ), (, ), (, ), (, ), (, )]:
assert (epoch == fix_epoch(long_epoch))
def test_fix_epoch_raise(self):
with pytest.raises(ValueError):
fix_epoch(None) |
class User(GraphObject):
__primarykey__ = 'username'
username = Property()
def __init__(self, username):
self.username = username
def find(self):
user = self.match(graph, self.username).first()
return user
def register(self, password):
if (not self.find()):
user = Node('User', username=self.username)
graph.create(user)
return True
else:
return False |
def get_serializable_launch_plan(entity_mapping: OrderedDict, settings: SerializationSettings, entity: LaunchPlan, recurse_downstream: bool=True, options: Optional[Options]=None) -> _launch_plan_models.LaunchPlan:
if recurse_downstream:
wf_spec = get_serializable(entity_mapping, settings, entity.workflow, options)
wf_id = wf_spec.template.id
else:
wf_id = _identifier_model.Identifier(resource_type=_identifier_model.ResourceType.WORKFLOW, project=settings.project, domain=settings.domain, name=entity.workflow.name, version=settings.version)
if (not options):
options = Options()
if (options and options.raw_output_data_config):
raw_prefix_config = options.raw_output_data_config
else:
raw_prefix_config = (entity.raw_output_data_config or _common_models.RawOutputDataConfig(''))
lps = _launch_plan_models.LaunchPlanSpec(workflow_id=wf_id, entity_metadata=_launch_plan_models.LaunchPlanMetadata(schedule=entity.schedule, notifications=(options.notifications or entity.notifications)), default_inputs=entity.parameters, fixed_inputs=entity.fixed_inputs, labels=(options.labels or entity.labels or _common_models.Labels({})), annotations=(options.annotations or entity.annotations or _common_models.Annotations({})), auth_role=None, raw_output_data_config=raw_prefix_config, max_parallelism=(options.max_parallelism or entity.max_parallelism), security_context=(options.security_context or entity.security_context))
lp_id = _identifier_model.Identifier(resource_type=_identifier_model.ResourceType.LAUNCH_PLAN, project=settings.project, domain=settings.domain, name=entity.name, version=settings.version)
lp_model = _launch_plan_models.LaunchPlan(id=lp_id, spec=lps, closure=_launch_plan_models.LaunchPlanClosure(state=None, expected_inputs=interface_models.ParameterMap({}), expected_outputs=interface_models.VariableMap({})))
return lp_model |
def extract_concrete_prefix_index(bv):
try:
get_concrete_int(bv)
return (- 1)
except AttributeError as e:
pass
hi = (bv.size() - 1)
lo = 0
while (hi > lo):
mid = (lo + ((hi - lo) // 2))
hi_bv = z3.Extract(hi, mid, bv)
lo_bv = z3.Extract((mid - 1), lo, bv)
hi_concrete = is_bv_concrete(hi_bv)
lo_concrete = is_bv_concrete(lo_bv)
if (hi_concrete and (not lo_concrete)):
hi = (mid - 1)
elif ((not hi_concrete) and (not lo_concrete)):
lo = mid
else:
return (- 1)
return mid |
def common_top_matter(out, name):
loxi_utils.gen_c_copy_license(out)
out.write(('\n/\n * File: %s\n *\n * DO NOT EDIT\n *\n * This file is automatically generated\n *\n /\n\n' % name))
if (name[(- 2):] == '.h'):
out.write(('\n#if !defined(%(h)s)\n#define %(h)s\n\n' % dict(h=h_file_to_define(name)))) |
class GPIO(GenericGPIO):
def setup_module(self) -> None:
import smbus as gpio
addr = self.config['dev_addr']
bus = self.config['i2c_bus_num']
self.bus = gpio.SMBus(bus)
self.address = addr
def setup_pin(self, pin: PinType, direction: PinDirection, pullup: PinPUD, pin_config: ConfigType, initial: Optional[str]=None) -> None:
print(('setup_pin(pin=%r, direction=%r, pullup=%r, pin_config=%r, initial=%r)' % (pin, direction, pullup, pin_config, initial)))
if (initial == 'high'):
self.set_pin(pin, True)
elif (initial == 'low'):
self.set_pin(pin, False)
def set_pin(self, pin: PinType, value: bool) -> None:
self.bus.write_byte_data(self.address, pin, (ON if value else OFF))
def get_pin(self, pin: PinType) -> bool:
return bool(self.bus.read_byte_data[(self.address, pin)])
def cleanup(self) -> None:
self.bus.close() |
def valid_ovsdb_addr(addr):
m = re.match('unix:(\\S+)', addr)
if m:
file = m.group(1)
return os.path.isfile(file)
m = re.match('(tcp|ssl):(\\S+):(\\d+)', addr)
if m:
address = m.group(2)
port = m.group(3)
if ('[' in address):
address = address.strip('[').strip(']')
return (ip.valid_ipv6(address) and port.isdigit())
else:
return (ip.valid_ipv4(address) and port.isdigit())
return False |
class ArgumentParser():
def parse_args(self, args: Optional[Sequence[str]]=None, namespace: Optional[Namespace]=None) -> Namespace:
raise NotImplementedError
def parse_known_args(self, args: Optional[Sequence[str]]=None, namespace: Optional[Namespace]=None) -> Tuple[(Namespace, List[str])]:
raise NotImplementedError |
def extractTimelesspavilionCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Util():
def async_requests_available():
return (sys.version_info >= (3, 5, 3))
def ca_bundle_path():
return os.path.join(os.path.dirname(__file__), '..', '..', 'fb_ca_chain_bundle.crt')
def appsecret_proof(appsecret, access_token):
hmac_object = hmac.new(appsecret.encode('utf-8'), msg=access_token.encode('utf-8'), digestmod=hashlib.sha256)
return hmac_object.hexdigest() |
def upgrade():
c = get_context()
insp = sa.inspect(c.connection.engine)
groups_permissions_pkey = 'groups_permissions_pkey'
groups_pkey = 'groups_pkey'
groups_resources_permissions_pkey = 'groups_resources_permissions_pkey'
users_groups_pkey = 'users_groups_pkey'
users_permissions_pkey = 'users_permissions_pkey'
users_resources_permissions_pkey = 'users_resources_permissions_pkey'
groups_permissions_pkey = insp.get_pk_constraint('groups_permissions')['name']
groups_pkey = insp.get_pk_constraint('groups')['name']
groups_resources_permissions_pkey = insp.get_pk_constraint('groups_resources_permissions')['name']
users_groups_pkey = insp.get_pk_constraint('users_groups')['name']
users_permissions_pkey = insp.get_pk_constraint('users_permissions')['name']
users_resources_permissions_pkey = insp.get_pk_constraint('users_resources_permissions')['name']
op.drop_constraint('groups_pkey', 'groups', type_='primary')
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('groups', sa.Column('id', sa.Integer, primary_key=True, autoincrement=False))
op.create_primary_key(groups_pkey, 'groups', columns=['id'])
op.alter_column('groups', 'id', type_=sa.Integer, existing_type=sa.Integer, autoincrement=True, existing_autoincrement=False, nullable=False)
else:
op.add_column('groups', sa.Column('id', sa.Integer, primary_key=True, autoincrement=True))
op.create_primary_key(groups_pkey, 'groups', columns=['id'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
for t in ['groups_permissions', 'groups_resources_permissions', 'users_groups']:
for constraint in insp.get_foreign_keys(t):
if (constraint['referred_columns'] == ['group_name']):
op.drop_constraint(constraint['name'], t, type_='foreignkey')
for t in ['users_resources_permissions', 'users_permissions', 'users_groups']:
for constraint in insp.get_foreign_keys(t):
if (constraint['referred_columns'] == ['user_name']):
op.drop_constraint(constraint['name'], t, type_='foreignkey')
for constraint in insp.get_foreign_keys('resources'):
if (constraint['referred_columns'] in [['user_name'], ['group_name']]):
op.drop_constraint(constraint['name'], 'resources', type_='foreignkey')
op.add_column('resources', sa.Column('owner_user_id', sa.Integer(), sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='SET NULL')))
op.add_column('resources', sa.Column('owner_group_id', sa.Integer(), sa.ForeignKey('groups.id', onupdate='CASCADE', ondelete='SET NULL')))
resources_table = sa.Table('resources', sa.MetaData(), autoload=True, autoload_with=c.connection)
users_table = sa.Table('users', sa.MetaData(), autoload=True, autoload_with=c.connection)
groups_table = sa.Table('groups', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = resources_table.update().values(owner_user_id=users_table.c.id).where((users_table.c.user_name == resources_table.c.owner_user_name))
op.execute(stmt)
stmt = resources_table.update().values(owner_group_id=groups_table.c.id).where((groups_table.c.group_name == resources_table.c.owner_group_name))
op.execute(stmt)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('groups_permissions', sa.Column('group_id', sa.Integer()))
else:
op.add_column('groups_permissions', sa.Column('group_id', sa.Integer(), sa.ForeignKey('groups.id', onupdate='CASCADE', ondelete='CASCADE')))
groups_permissions_table = sa.Table('groups_permissions', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = groups_permissions_table.update().values(group_id=groups_table.c.id).where((groups_table.c.group_name == groups_permissions_table.c.group_name))
op.execute(stmt)
op.drop_constraint(groups_permissions_pkey, 'groups_permissions', type_='primary')
op.create_primary_key(groups_permissions_pkey, 'groups_permissions', columns=['group_id', 'perm_name'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(None, 'groups_permissions', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('groups_resources_permissions', sa.Column('group_id', sa.Integer()))
else:
op.add_column('groups_resources_permissions', sa.Column('group_id', sa.Integer(), sa.ForeignKey('groups.id', onupdate='CASCADE', ondelete='CASCADE')))
groups_resources_permissions_table = sa.Table('groups_resources_permissions', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = groups_resources_permissions_table.update().values(group_id=groups_table.c.id).where((groups_table.c.group_name == groups_resources_permissions_table.c.group_name))
op.execute(stmt)
op.drop_constraint(groups_resources_permissions_pkey, 'groups_resources_permissions', type_='primary')
op.create_primary_key(groups_resources_permissions_pkey, 'groups_resources_permissions', columns=['group_id', 'resource_id', 'perm_name'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(None, 'groups_resources_permissions', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('users_groups', sa.Column('group_id', sa.Integer()))
else:
op.add_column('users_groups', sa.Column('group_id', sa.Integer(), sa.ForeignKey('groups.id', onupdate='CASCADE', ondelete='CASCADE')))
users_groups_table = sa.Table('users_groups', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = users_groups_table.update().values(group_id=groups_table.c.id).where((groups_table.c.group_name == users_groups_table.c.group_name))
op.execute(stmt)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('users_groups', sa.Column('user_id', sa.Integer()))
else:
op.add_column('users_groups', sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE')))
users_groups_table = sa.Table('users_groups', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = users_groups_table.update().values(user_id=users_table.c.id).where((users_table.c.user_name == users_groups_table.c.user_name))
op.execute(stmt)
op.drop_constraint(users_groups_pkey, 'users_groups', type_='primary')
op.create_primary_key(users_groups_pkey, 'users_groups', columns=['user_id', 'group_id'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(None, 'users_groups', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
op.create_foreign_key(None, 'users_groups', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE')
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('users_permissions', sa.Column('user_id', sa.Integer()))
else:
op.add_column('users_permissions', sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE')))
users_permissions_table = sa.Table('users_permissions', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = users_permissions_table.update().values(user_id=users_table.c.id).where((users_table.c.user_name == users_permissions_table.c.user_name))
op.execute(stmt)
op.drop_constraint(users_permissions_pkey, 'users_permissions', type_='primary')
op.create_primary_key(users_permissions_pkey, 'users_permissions', columns=['user_id', 'perm_name'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(None, 'users_permissions', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE')
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column('users_resources_permissions', sa.Column('user_id', sa.Integer()))
else:
op.add_column('users_resources_permissions', sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE')))
users_resources_permissions_table = sa.Table('users_resources_permissions', sa.MetaData(), autoload=True, autoload_with=c.connection)
stmt = users_resources_permissions_table.update().values(user_id=users_table.c.id).where((users_table.c.user_name == users_resources_permissions_table.c.user_name))
op.execute(stmt)
op.drop_constraint(users_resources_permissions_pkey, 'users_resources_permissions', type_='primary')
op.create_primary_key(users_resources_permissions_pkey, 'users_resources_permissions', columns=['user_id', 'resource_id', 'perm_name'])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(None, 'users_resources_permissions', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE')
op.drop_column('resources', 'owner_user_name')
op.drop_column('resources', 'owner_group_name')
op.drop_column('groups_permissions', 'group_name')
op.drop_column('groups_resources_permissions', 'group_name')
op.drop_column('users_resources_permissions', 'user_name')
op.drop_column('users_groups', 'group_name')
op.drop_column('users_groups', 'user_name')
op.drop_column('users_permissions', 'user_name') |
def defaults_from_env(f):
sig = inspect.signature(f)
params = []
changed = False
for param in sig.parameters.values():
envname = f'NUTILS_{param.name.upper()}'
if ((envname in os.environ) and (param.annotation != param.empty) and (param.default != param.empty)):
try:
v = stringly.loads(param.annotation, os.environ[envname])
except Exception as e:
warnings.warn(f'ignoring environment variable {envname}: {e}')
else:
param = param.replace(default=v)
changed = True
params.append(param)
if (not changed):
return f
sig = sig.replace(parameters=params)
(f)
def defaults_from_env(*args, **kwargs):
bound = sig.bind(*args, **kwargs)
bound.apply_defaults()
return f(*bound.args, **bound.kwargs)
defaults_from_env.__signature__ = sig
return defaults_from_env |
class ProductionOps():
def production_info(self) -> ProductionInfo:
return getattr(self, '__production')
def root(self):
return getattr(self, '__root', None)
def parent(self):
return getattr(self, '__parent', None)
def children(self):
children_info = self.production_info().children
for name in children_info:
child = getattr(self, name)
if (child is None):
continue
for c in (child if children_info[name].is_list else [child]):
(yield c)
def descendants(self):
for child in ProductionOps.children(self):
(yield child)
if (child is not None):
(yield from ProductionOps.descendants(child))
def ancestors(self):
ancestor = ProductionOps.parent(self)
while (ancestor is not None):
(yield ancestor)
ancestor = ProductionOps.parent(ancestor)
def find_ancestor_of_type(self, types: Type[T]) -> Optional[T]:
for ancestor in self.ancestors():
if isinstance(ancestor, types):
return ancestor
def find_descendants_of_type(self, types: Type[T]) -> Iterator[T]:
for descendant in self.descendants():
if isinstance(descendant, types):
(yield descendant)
def find_children_of_type(self, types: Type[T]) -> Iterator[T]:
for child in self.children():
if isinstance(child, types):
(yield child) |
class Delaunay2D(FilterBase):
__version__ = 0
filter = Instance(tvtk.Delaunay2D, args=(), allow_none=False, record=True)
input_info = PipelineInfo(datasets=['structured_grid', 'poly_data', 'unstructured_grid'], attribute_types=['any'], attributes=['any'])
output_info = PipelineInfo(datasets=['poly_data'], attribute_types=['any'], attributes=['any']) |
def test_namedtuple():
T = namedtuple('T', 'foo bar')
def default(o):
if isinstance(o, T):
return dict(o._asdict())
raise TypeError(('Unsupported type %s' % (type(o),)))
packed = packb(T(1, 42), strict_types=True, use_bin_type=True, default=default)
unpacked = unpackb(packed, raw=False)
assert (unpacked == {'foo': 1, 'bar': 42}) |
class OptionPlotoptionsPyramid3dSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def zone_chart(activity_id=None, sport='run', metrics=['power_zone', 'hr_zone'], chart_id='zone-chart', days=90, height=400, intensity='all'):
if activity_id:
df_samples = pd.read_sql(sql=app.session.query(stravaSamples).filter((stravaSamples.activity_id == activity_id)).statement, con=engine, index_col=['timestamp_local'])
elif (intensity == 'all'):
df_samples = pd.read_sql(sql=app.session.query(stravaSamples).filter(stravaSamples.type.like(sport), (stravaSamples.timestamp_local >= (datetime.now() - timedelta(days=days)))).statement, con=engine, index_col=['timestamp_local'])
else:
df_samples = pd.read_sql(sql=app.session.query(stravaSamples).filter(stravaSamples.type.like(sport), (stravaSamples.timestamp_local >= (datetime.now() - timedelta(days=days)))).statement, con=engine, index_col=['timestamp_local'])
df_samples = df_samples.merge(pd.read_sql(sql=app.session.query(stravaSummary.activity_id, stravaSummary.workout_intensity).statement, con=engine), how='left', left_on='activity_id', right_on='activity_id')
df_samples = df_samples[(df_samples['workout_intensity'] == intensity)]
app.session.remove()
data = []
for metric in metrics:
zone_df = df_samples.groupby(metric).size().reset_index(name='counts')
zone_df['seconds'] = zone_df['counts']
zone_df['Percent of Total'] = (zone_df['seconds'] / zone_df['seconds'].sum())
zone_map = {1: 'Zone 1', 2: 'Zone 2', 3: 'Zone 3', 4: 'Zone 4', 5: 'Zone 5', 6: 'Zone 6', 7: 'Zone 7'}
zone_df[metric] = zone_df[metric].map(zone_map)
zone_df = zone_df.sort_values(by=metric, ascending=False)
label = [((((('Time: ' + '<b>{}</b>'.format(timedelta(seconds=seconds))) + '<br>') + '% of Total: ') + '<b>{0:.0f}'.format((percentage * 100))) + '%') for (seconds, percentage) in zip(list(zone_df['seconds']), list(zone_df['Percent of Total']))]
per_low = zone_df[zone_df[metric].isin((['Zone 1', 'Zone 2', 'Zone 3'] if ((sport == 'Ride') and (metric == 'power_zone')) else ['Zone 1', 'Zone 2']))]['Percent of Total'].sum()
if (metric == 'hr_zone'):
colors = ['rgb(174, 18, 58)', 'rgb(204, 35, 60)', 'rgb(227, 62, 67)', 'rgb(242, 98, 80)', 'rgb(248, 130, 107)', 'rgb(252, 160, 142)', 'rgb(255, 190, 178)']
elif (metric == 'power_zone'):
colors = ['rgb(44, 89, 113)', 'rgb(49, 112, 151)', 'rgb(53, 137, 169)', 'rgb(69, 162, 185)', 'rgb(110, 184, 197)', 'rgb(147, 205, 207)', 'rgb(188, 228, 216)']
data.append(go.Bar(name=('HR ({:.0f}% Low)'.format((per_low * 100)) if (metric == 'hr_zone') else 'Power ({:.0f}% Low)'.format((per_low * 100))), y=zone_df[metric], x=zone_df['Percent of Total'], orientation='h', text=[('{0:.0f}'.format((percentage * 100)) + '%') for percentage in list(zone_df['Percent of Total'])], hovertext=label, hoverinfo='text', textposition='auto', width=0.4, marker={'color': colors}))
return dcc.Graph(id=chart_id, style={'height': '100%'}, config={'displayModeBar': False}, figure={'data': data, 'layout': go.Layout(title=('Time in Zones' if (height < 400) else ''), font=dict(size=10, color=white), height=height, autosize=True, xaxis=dict(hoverformat='.1%', tickformat='%', showgrid=False), yaxis=dict(autorange='reversed', showgrid=False, categoryarray=(['Zone 5', 'Zone 4', 'Zone 3', 'Zone 2', 'Zone 1'] if (sport == 'run') else ['Zone 7', 'Zone 6', 'Zone 5', 'Zone 4', 'Zone 3', 'Zone 2', 'Zone 1'])), showlegend=True, hovermode='closest', legend=(dict(x=0.5, y=1.1, bgcolor='rgba(127, 127, 127, 0)', xanchor='center', orientation='h') if (height >= 400) else dict(x=0.85, bgcolor='rgba(127, 127, 127, 0)', xanchor='center')), margin={'l': 45, 'b': 0, 't': 20, 'r': 0})}) |
def test_explode_get_fastas_file_by_taxon(o_dir, e_dir, request):
program = 'bin/assembly/phyluce_assembly_explode_get_fastas_file'
output = os.path.join(o_dir, 'exploded-by-taxa')
cmd = [os.path.join(request.config.rootdir, program), '--input', os.path.join(e_dir, 'taxon-set.complete.fasta'), '--output', output, '--by-taxon']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
expected = os.path.join(e_dir, 'exploded-by-taxa')
for taxon in ['alligator-mississippiensis', 'gallus-gallus', 'peromyscus-maniculatus', 'rana-sphenocephafa']:
fname = '{}.unaligned.fasta'.format(taxon)
observed_sequences = SeqIO.to_dict(SeqIO.parse(os.path.join(output, fname), 'fasta'))
expected_sequences = SeqIO.to_dict(SeqIO.parse(os.path.join(expected, fname), 'fasta'))
for (k, v) in observed_sequences.items():
assert (taxon.replace('-', '_') in k)
assert (v.seq == expected_sequences[k].seq) |
def test_extend_memory_doesnt_increase_until_32_bytes_are_used(computation):
computation.extend_memory(0, 1)
computation.extend_memory(1, 1)
assert (len(computation._memory._bytes) == 32)
computation.extend_memory(2, 32)
assert (len(computation._memory._bytes) == 64)
assert (computation._gas_meter.gas_remaining == 94) |
class Foo(HasTraits):
bar = Float
baz = Float
fuz = Float
def _bar_changed(self):
pass
_trait_change('bar')
def _on_bar_change_notification(self):
pass
_trait_change('baz')
def _on_baz_change_notification(self):
self.bar += 1
_trait_change('fuz')
def _on_fuz_change_notification(self):
self.bar += 1
raise FuzException('method') |
class TestAsyncAggregator():
def _test_one_step(self, param_after_local_training: float, param_after_global_training: float, weight: float, config: AsyncAggregatorConfig) -> None:
init_val = 1
global_model = MockQuadratic1DFL(Quadratic1D())
async_aggregator = instantiate(config, global_model=global_model)
local_model = FLModelParamUtils.clone(global_model)
delta = FLModelParamUtils.clone(global_model)
local_model.fl_get_module().x.data = torch.Tensor([param_after_local_training])
global_model.fl_get_module().x.data = torch.Tensor([param_after_global_training])
delta.fl_get_module().x.data = torch.Tensor([(init_val - param_after_local_training)])
async_aggregator.on_client_training_end(client_delta=delta, final_local_model=local_model, weight=weight)
if (config.aggregation_type == AsyncAggregationType.fed_buff_aggregation):
global_model_expected = (param_after_global_training + (weight * (param_after_local_training - init_val)))
else:
global_model_expected = (((1 - weight) * param_after_global_training) + (weight * param_after_local_training))
assertTrue(torch.allclose(global_model.fl_get_module().x.data, torch.Tensor([global_model_expected]), atol=1e-07))
def test_fed_buff_aggregation(self) -> None:
num_random_values = 5
num_random_weights = 5
for config in [FedAvgWithLRAsyncAggregatorConfig(aggregation_type=AsyncAggregationType.fed_buff_aggregation, lr=1.0), FedAvgWithLRFedBuffAggregatorConfig(aggregation_type=AsyncAggregationType.fed_buff_aggregation, lr=1.0, buffer_size=1)]:
for _ in range(num_random_values):
for _ in range(num_random_weights):
self._test_one_step(param_after_local_training=np.random.random_sample(), param_after_global_training=np.random.random_sample(), weight=np.random.random_sample(), config=config)
def test_fed_async_aggregation(self) -> None:
num_random_values = 5
num_random_weights = 5
for _ in range(num_random_values):
for _ in range(num_random_weights):
self._test_one_step(param_after_local_training=np.random.random_sample(), param_after_global_training=np.random.random_sample(), weight=np.random.random_sample(), config=FedAvgWithLRAsyncAggregatorConfig(aggregation_type=AsyncAggregationType.fed_async_aggregation, lr=1.0))
def _create_n_clients(self, num_clients):
return [MockClientModel(delta=SampleNet(TwoFC()), after_train=SampleNet(TwoFC()), weight=np.random.random_sample()) for _ in range(num_clients)]
def _symmetry_test(self, num_users, fedbuff_config) -> str:
fedbuff_global_model_1 = SampleNet(TwoFC())
fedbuff_global_model_2 = FLModelParamUtils.clone(fedbuff_global_model_1)
fedbuff_aggregator_1 = instantiate(fedbuff_config, global_model=fedbuff_global_model_1)
fedbuff_aggregator_2 = instantiate(fedbuff_config, global_model=fedbuff_global_model_2)
client_models = self._create_n_clients(num_users)
for client_model in client_models:
fedbuff_aggregator_1.zero_grad()
fedbuff_aggregator_1.on_client_training_end(client_model.delta, client_model.after_train, weight=client_model.weight)
random.shuffle(client_models)
for client_model in client_models:
fedbuff_aggregator_2.zero_grad()
fedbuff_aggregator_2.on_client_training_end(client_model.delta, client_model.after_train, weight=client_model.weight)
return FLModelParamUtils.get_mismatched_param(models=[fedbuff_global_model_1.fl_get_module(), fedbuff_global_model_2.fl_get_module()], rel_epsilon=1e-06, abs_epsilon=1e-06)
def _equivalence_test(self, num_users, fedbuff_config, async_config) -> str:
async_global_model = SampleNet(TwoFC())
fedbuff_global_model = FLModelParamUtils.clone(async_global_model)
async_aggregator = instantiate(async_config, global_model=async_global_model)
fedbuff_aggregator = instantiate(fedbuff_config, global_model=fedbuff_global_model)
client_models = self._create_n_clients(num_users)
for client_model in client_models:
async_aggregator.zero_grad()
async_aggregator.on_client_training_end(client_model.delta, client_model.after_train, weight=client_model.weight)
for client_model in client_models:
fedbuff_aggregator.zero_grad()
fedbuff_aggregator.on_client_training_end(client_model.delta, client_model.after_train, weight=client_model.weight)
return FLModelParamUtils.get_mismatched_param(models=[async_global_model.fl_get_module(), fedbuff_global_model.fl_get_module()], rel_epsilon=1e-06, abs_epsilon=1e-06)
def test_fedbuff_async_symmetry(self) -> None:
num_users = 10
global_lr = 1.0
fedbuff_config = FedAvgWithLRFedBuffAggregatorConfig(lr=global_lr, buffer_size=1)
error_msg = self._symmetry_test(num_users=num_users, fedbuff_config=fedbuff_config)
assertEmpty(error_msg, msg=error_msg)
def test_fedbuff_async_equivalence(self) -> None:
num_users = 10
global_lr = 1.0
async_config = FedAvgWithLRAsyncAggregatorConfig(lr=global_lr)
fedbuff_config = FedAvgWithLRFedBuffAggregatorConfig(lr=global_lr, buffer_size=10)
error_msg = self._equivalence_test(num_users=num_users, fedbuff_config=fedbuff_config, async_config=async_config)
assertEmpty(error_msg, msg=error_msg)
def test_global_update(self) -> None:
num_epochs = 5
for _ in range(num_epochs):
num_total_users = np.random.randint(1, 20)
buffer_size = np.random.randint(1, (num_total_users + 1))
fedbuff_config = FedAvgWithLRFedBuffAggregatorConfig(lr=1.0, buffer_size=buffer_size)
global_model = SampleNet(TwoFC())
fedbuff_aggregator = instantiate(fedbuff_config, global_model=global_model)
client_models = self._create_n_clients(num_total_users)
for (client_num, client) in enumerate(client_models):
is_global_model_updated = fedbuff_aggregator.on_client_training_end(client.delta, client.after_train, weight=1)
should_update_global_model = (((client_num + 1) % buffer_size) == 0)
assertEqual(is_global_model_updated, should_update_global_model)
def train_async_with_zero_weight(self, initial_model: IFLModel, client_models: List[MockClientModel], num_epochs: int, num_total_users: int, momentum: float, train_with_zero_weight_in_middle: bool) -> IFLModel:
assert ((num_epochs % 2) == 0), 'Training must be over even number of epochs'
config = FedAvgWithLRWithMomentumAsyncAggregatorConfig(lr=1.0, momentum=momentum)
aggregator = instantiate(config, global_model=initial_model)
half_epochs = int((num_epochs / 2))
def print_debug(prefix: str):
for (key, value) in aggregator.optimizer.state.items():
print(f'{prefix}: {key}:{value}')
break
for _ in range(half_epochs):
for client in client_models:
aggregator.on_client_training_end(client.delta, client.after_train, weight=1)
print_debug('After first loop')
if train_with_zero_weight_in_middle:
for _ in range(half_epochs):
for client in client_models:
aggregator.on_client_training_end(client.delta, client.after_train, weight=0)
print_debug('After second loop')
for _ in range(half_epochs):
for client in client_models:
aggregator.on_client_training_end(client.delta, client.after_train, weight=1)
print_debug('After third loop')
return aggregator.global_model
.parametrize('num_total_users,num_epochs, momentum', [(1, 2, 0.5), (10, 10, 0.5), (10, 10, 0)])
def test_momentum_implementation_zero_weight(self, num_total_users: int, num_epochs: int, momentum: float) -> None:
initial_model = SampleNet(TwoFC())
client_models = self._create_n_clients(num_total_users)
torch.manual_seed(1)
np.random.seed(1)
global_model_trained1 = self.train_async_with_zero_weight(initial_model=FLModelParamUtils.clone(initial_model), client_models=client_models, num_epochs=num_epochs, num_total_users=num_total_users, momentum=momentum, train_with_zero_weight_in_middle=False)
torch.manual_seed(1)
np.random.seed(1)
global_model_trained2 = self.train_async_with_zero_weight(initial_model=FLModelParamUtils.clone(initial_model), client_models=client_models, num_epochs=num_epochs, num_total_users=num_total_users, momentum=momentum, train_with_zero_weight_in_middle=True)
error_msg = verify_models_equivalent_after_training(global_model_trained1, global_model_trained2, initial_model, rel_epsilon=1e-06, abs_epsilon=1e-06)
assertEqual(error_msg, '')
.parametrize('num_total_users,num_epochs, momentum, lr', [(1, 2, 0.5, 10), (10, 10, 0.5, 10), (10, 10, 0, 10)])
def test_momentum_implementation_one_weight(self, num_total_users: int, num_epochs: int, momentum: float, lr: float) -> None:
momentum = 0.5
num_epochs = 10
num_total_users = 10
lr = 1.0
initial_model = SampleNet(TwoFC())
client_models = self._create_n_clients(num_total_users)
torch.manual_seed(1)
np.random.seed(1)
config = FedAvgWithLRWithMomentumAsyncAggregatorConfig(lr=lr, momentum=momentum)
aggregator = instantiate(config, global_model=FLModelParamUtils.clone(initial_model))
for _ in range(num_epochs):
for client in client_models:
aggregator.on_client_training_end(client.delta, client.after_train, weight=1)
torch.manual_seed(1)
np.random.seed(1)
sgd_model = FLModelParamUtils.clone(initial_model)
sgd_optimizer = torch.optim.SGD(sgd_model.fl_get_module().parameters(), lr=lr, momentum=momentum)
for _ in range(num_epochs):
for client in client_models:
FLModelParamUtils.set_gradient(model=sgd_model.fl_get_module(), reference_gradient=client.delta.fl_get_module())
sgd_optimizer.step()
error_msg = verify_models_equivalent_after_training(aggregator.global_model, sgd_model, initial_model, rel_epsilon=1e-06, abs_epsilon=1e-06)
assertEqual(error_msg, '') |
def format_mr0(bl, cl, wr, dll_reset):
bl_to_mr0 = {4: 2, 8: 0}
wr_to_mr0 = {10: 0, 12: 1, 14: 2, 16: 3, 18: 4, 20: 5, 24: 6, 22: 7, 26: 8, 28: 9}
mr0 = bl_to_mr0[bl]
mr0 |= ((cl_to_mr0[cl] & 1) << 2)
mr0 |= (((cl_to_mr0[cl] >> 1) & 7) << 4)
mr0 |= (((cl_to_mr0[cl] >> 4) & 1) << 12)
mr0 |= (dll_reset << 8)
mr0 |= ((wr_to_mr0[wr] & 7) << 9)
mr0 |= ((wr_to_mr0[wr] >> 3) << 13)
return mr0 |
class VideoRecordingList(ResourceList):
def before_get(self, args, kwargs):
if kwargs.get('video_stream_id'):
stream = safe_query_kwargs(VideoStream, kwargs, 'video_stream_id', 'id')
if (stream.channel and (stream.channel.provider == 'bbb')):
if (not has_access('is_organizer', event_id=stream.event_id)):
raise ForbiddenError({'pointer': 'event_id'}, 'You need to be the event organizer to access video recordings.')
if (stream.extra is not None):
params = dict(meetingID=stream.extra['response']['meetingID'])
channel = stream.channel
bbb = BigBlueButton(channel.api_url, channel.api_key)
result = bbb.request('getRecordings', params)
if result.data['response']['recordings']:
recordings = []
if (type(result.data['response']['recordings']['recording']) is list):
recordings = result.data['response']['recordings']['recording']
else:
recordings.append(result.data['response']['recordings']['recording'])
for recording in recordings:
get_or_create(VideoRecording, bbb_record_id=recording['recordID'], participants=recording['participants'], url=recording['playback']['format']['url'], start_time=datetime.fromtimestamp(int((int(recording['startTime']) / 1000))), end_time=datetime.fromtimestamp(int((int(recording['endTime']) / 1000))), video_stream=stream)
def query(self, view_kwargs):
query_ = VideoRecording.query
if view_kwargs.get('video_stream_id'):
stream = safe_query_kwargs(VideoStream, view_kwargs, 'video_stream_id')
query_ = VideoRecording.query.filter((VideoRecording.video_stream_id == stream.id))
elif (not has_access('is_admin')):
raise ForbiddenError({'pointer': 'user'}, 'You need to be the admin to access video recordings.')
return query_
methods = ['GET']
view_kwargs = True
decorators = (jwt_required,)
schema = VideoRecordingSchema
data_layer = {'session': db.session, 'model': VideoRecording, 'methods': {'query': query, 'before_get': before_get}} |
def extractGourmetscansNet(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_dbt_command_list(args: argparse.Namespace, models_list: List[str]) -> List[str]:
command_list = []
if args.debug:
command_list += ['--debug']
command_list += ['run']
command_list += ['--threads', str(1)]
if args.project_dir:
command_list += ['--project-dir', args.project_dir]
if args.profiles_dir:
command_list += ['--profiles-dir', args.profiles_dir]
if args.defer:
command_list += ['--defer']
if args.state:
command_list += ['--state', args.state]
if args.full_refresh:
command_list += ['--full-refresh']
if args.target:
command_list += ['--target', args.target]
if ((args.vars is not None) and (args.vars != '{}')):
command_list += ['--vars', args.vars]
if (len(models_list) > 0):
command_list += (['--select'] + models_list)
return list(map(str, command_list)) |
def get_columns(table_name: str, schema_name: Optional[str]=None, dblink_name: Optional[str]=None) -> List[str]:
sql = "\n select attname as column_name\n from pg_attribute\n where attnum > 0 and attisdropped is false and attrelid = '{table}'::regclass\n order by attnum\n "
rows = _get_whatever(table_name, schema_name, dblink_name, sql, _columns)
return [r['column_name'] for r in rows] |
def evaluate_policy_rule(taxonomy: Taxonomy, policy_rule: PolicyRule, data_subjects: List[str], data_categories: List[str], data_use: str, declaration_violation_message: str) -> List[Violation]:
category_hierarchies = [get_fides_key_parent_hierarchy(taxonomy=taxonomy, fides_key=declaration_category) for declaration_category in data_categories]
data_category_violations = compare_rule_to_declaration(rule_types=policy_rule.data_categories.values, declaration_type_hierarchies=category_hierarchies, rule_match=policy_rule.data_categories.matches)
data_use_hierarchies = [get_fides_key_parent_hierarchy(taxonomy=taxonomy, fides_key=data_use)]
data_use_violations = compare_rule_to_declaration(rule_types=policy_rule.data_uses.values, declaration_type_hierarchies=data_use_hierarchies, rule_match=policy_rule.data_uses.matches)
data_subject_violations = compare_rule_to_declaration(rule_types=policy_rule.data_subjects.values, declaration_type_hierarchies=[[FidesKey(data_subject)] for data_subject in data_subjects], rule_match=policy_rule.data_subjects.matches)
evaluation_result = all([data_category_violations, data_use_violations, data_subject_violations])
if evaluation_result:
violations = [Violation(detail='{}. Violated usage of data categories ({}) for data uses ({}) and subjects ({})'.format(declaration_violation_message, ','.join(data_category_violations), ','.join(data_use_violations), ','.join(data_subject_violations)), violating_attributes=ViolationAttributes(data_categories=list(data_category_violations), data_uses=list(data_use_violations), data_subjects=list(data_subject_violations)))]
return violations
return [] |
class CuratorTestCase(TestCase):
def setUp(self):
super(CuratorTestCase, self).setUp()
self.logger = logging.getLogger('CuratorTestCase.setUp')
self.client = get_client()
args = {}
args['HOST'] = HOST
args['time_unit'] = 'days'
args['prefix'] = 'logstash-'
self.args = args
nodesinfo = self.client.nodes.info()
nodename = list(nodesinfo['nodes'].keys())[0]
if ('repo' in nodesinfo['nodes'][nodename]['settings']['path']):
if isinstance(nodesinfo['nodes'][nodename]['settings']['path']['repo'], list):
self.args['location'] = nodesinfo['nodes'][nodename]['settings']['path']['repo'][0]
else:
self.args['location'] = nodesinfo['nodes'][nodename]['settings']['path']['repo']
else:
self.logger.warning('path.repo is not configured!')
self.args['location'] = random_directory()
self.args['configdir'] = random_directory()
self.args['configfile'] = os.path.join(self.args['configdir'], 'curator.yml')
self.args['actionfile'] = os.path.join(self.args['configdir'], 'actions.yml')
self.args['repository'] = 'test_repository'
self.logger.debug('setUp completed...')
self.runner = clicktest.CliRunner()
self.runner_args = ['--config', self.args['configfile'], self.args['actionfile']]
self.result = None
def tearDown(self):
self.logger = logging.getLogger('CuratorTestCase.tearDown')
self.logger.debug('tearDown initiated...')
enable_allocation = json.loads('{"cluster.routing.allocation.enable":null}')
self.client.cluster.put_settings(transient=enable_allocation)
self.delete_repositories()
indices = list(self.client.indices.get(index='*', expand_wildcards='open,closed').keys())
if (len(indices) > 0):
self.client.indices.delete(index=','.join(indices))
for path_arg in ['location', 'configdir']:
if os.path.exists(self.args[path_arg]):
shutil.rmtree(self.args[path_arg])
def parse_args(self):
return Args(self.args)
def create_indices(self, count, unit=None, ilm_policy=None):
now = datetime.utcnow()
unit = (unit if unit else self.args['time_unit'])
fmt = DATEMAP[unit]
if (not (unit == 'months')):
step = timedelta(**{unit: 1})
for _ in range(count):
self.create_index((self.args['prefix'] + now.strftime(fmt)), wait_for_yellow=False, ilm_policy=ilm_policy)
now -= step
else:
now = date.today()
d = date(now.year, now.month, 1)
self.create_index((self.args['prefix'] + now.strftime(fmt)), wait_for_yellow=False, ilm_policy=ilm_policy)
for _ in range(1, count):
if (d.month == 1):
d = date((d.year - 1), 12, 1)
else:
d = date(d.year, (d.month - 1), 1)
self.create_index((self.args['prefix'] + datetime(d.year, d.month, 1).strftime(fmt)), wait_for_yellow=False, ilm_policy=ilm_policy)
self.client.cluster.health(wait_for_status='yellow')
def wfy(self):
self.client.cluster.health(wait_for_status='yellow')
def create_index(self, name, shards=1, wait_for_yellow=True, ilm_policy=None, wait_for_active_shards=1):
request_body = {'index': {'number_of_shards': shards, 'number_of_replicas': 0}}
if (ilm_policy is not None):
request_body['index']['lifecycle'] = {'name': ilm_policy}
self.client.indices.create(index=name, settings=request_body, wait_for_active_shards=wait_for_active_shards)
if wait_for_yellow:
self.wfy()
def add_docs(self, idx):
for i in ['1', '2', '3']:
self.client.create(index=idx, id=i, document={('doc' + i): 'TEST DOCUMENT'})
self.client.indices.flush(index=idx, force=True)
self.client.indices.refresh(index=idx)
def create_snapshot(self, name, csv_indices):
self.create_repository()
self.client.snapshot.create(repository=self.args['repository'], snapshot=name, ignore_unavailable=False, include_global_state=True, partial=False, indices=csv_indices, wait_for_completion=True)
def delete_snapshot(self, name):
self.client.snapshot.delete(repository=self.args['repository'], snapshot=name)
def create_repository(self):
args = {'location': self.args['location']}
self.client.snapshot.create_repository(name=self.args['repository'], type='fs', settings=args)
def delete_repositories(self):
result = self.client.snapshot.get_repository(name='*')
for repo in result:
try:
cleanup = self.client.snapshot.get(repository=repo, snapshot='*')
except Exception:
cleanup = {'snapshots': []}
for listitem in cleanup['snapshots']:
self.delete_snapshot(listitem['snapshot'])
self.client.snapshot.delete_repository(name=repo)
def close_index(self, name):
self.client.indices.close(index=name)
def write_config(self, fname, data):
with open(fname, 'w', encoding='utf-8') as fhandle:
fhandle.write(data)
def get_runner_args(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
runner = os.path.join(os.getcwd(), 'run_singleton.py')
return [sys.executable, runner]
def run_subprocess(self, args, logname='subprocess'):
local_logger = logging.getLogger(logname)
p = Popen(args, stderr=PIPE, stdout=PIPE)
(stdout, stderr) = p.communicate()
local_logger.debug('STDOUT = %s', stdout.decode('utf-8'))
local_logger.debug('STDERR = %s', stderr.decode('utf-8'))
return p.returncode
def invoke_runner(self, dry_run=False):
if dry_run:
self.result = self.runner.invoke(cli, ['--config', self.args['configfile'], '--dry-run', self.args['actionfile']])
return
self.result = self.runner.invoke(cli, self.runner_args)
def invoke_runner_alt(self, **kwargs):
myargs = []
if kwargs:
for (key, value) in kwargs.items():
myargs.append(f'--{key}')
myargs.append(value)
myargs.append(self.args['actionfile'])
self.result = self.runner.invoke(cli, myargs) |
(scope='function')
def fullstory_secrets(saas_config):
return {'domain': (pydash.get(saas_config, 'fullstory.domain') or secrets['domain']), 'api_key': (pydash.get(saas_config, 'fullstory.api_key') or secrets['api_key']), 'fullstory_user_id': {'dataset': 'fullstory_postgres', 'field': 'fullstory_users.fullstory_user_id', 'direction': 'from'}} |
_abort.register_cause_code
_error.register_cause_code
class cause_invalid_stream_id(cause_with_value):
_PACK_STR = '!HHH2x'
_MIN_LEN = struct.calcsize(_PACK_STR)
def cause_code(cls):
return CCODE_INVALID_STREAM_ID
def __init__(self, value=0, length=0):
super(cause_invalid_stream_id, self).__init__(value, length)
def parser(cls, buf):
(_, length, value) = struct.unpack_from(cls._PACK_STR, buf)
return cls(value, length)
def serialize(self):
if (0 == self.length):
self.length = self._MIN_LEN
buf = struct.pack(self._PACK_STR, self.cause_code(), self.length, self.value)
return buf |
def iamdb_pieces(args):
iamdb = module_from_file('iamdb', os.path.join(root_dir, 'datasets/iamdb.py'))
forms = iamdb.load_metadata(args.data_dir, '')
ds_keys = set()
for (_, v) in iamdb.SPLITS.items():
for ds in v:
with open(os.path.join(args.data_dir, f'{ds}.txt'), 'r') as fid:
ds_keys.update((l.strip() for l in fid))
text = [l['text'] for (_, lines) in forms.items() for l in lines if (l['key'] not in ds_keys)]
num_pieces = args.num_pieces
sp = train_spm_model(iter(text), (num_pieces + 1), user_symbols=['/'])
vocab = sorted(set((w for t in text for w in t.split('') if w)))
assert ('MOVE' in vocab)
save_pieces(sp, num_pieces, args.output_prefix, vocab) |
def parse_note(note_str):
try:
elements = re.findall('^(\\d{0,2})([pbeh]|[cdfga]#?)(\\d?)(\\.?)$', note_str)[0]
funcs = (parse_duration, parse_pitch, parse_octave, has_dot)
elements = [func(element) for (func, element) in zip(funcs, elements)]
except:
return False
keys = ('duration', 'pitch', 'octave', 'dot')
return dict(zip(keys, elements)) |
def serialise_cat_upgrades(save_data: list[int], cat_upgrades: dict[(str, list[int])]) -> list[int]:
data: list[int] = []
length = len(cat_upgrades['Base'])
for cat_id in range(length):
data.append(cat_upgrades['Plus'][cat_id])
data.append(cat_upgrades['Base'][cat_id])
write_length_data(save_data, data, 4, 2, True, length)
return save_data |
class StateCondition(Condition):
def is_met(self, event: Event, context: Context) -> bool:
state: ValueState = context.get_state(ValueStateDescriptor(name='count'))
v = state.value()
if (v is None):
v = 0
v = (v + 1)
state.update(v)
if (0 == (v % 2)):
return True
else:
return False |
def get_messages_positions(path):
fd = os.open(path, os.O_RDONLY)
try:
def get(count):
buf = os.read(fd, count)
assert (len(buf) == count)
return int.from_bytes(buf, byteorder='big', signed=False)
offset = 0
while True:
code = os.read(fd, 4)
if (len(code) < 4):
break
if (code != b'GRIB'):
offset = os.lseek(fd, (offset + 1), os.SEEK_SET)
continue
length = get(3)
edition = get(1)
if (edition == 1):
if (length & 8388608):
sec1len = get(3)
os.lseek(fd, 4, os.SEEK_CUR)
flags = get(1)
os.lseek(fd, (sec1len - 8), os.SEEK_CUR)
if (flags & (1 << 7)):
sec2len = get(3)
os.lseek(fd, (sec2len - 3), os.SEEK_CUR)
if (flags & (1 << 6)):
sec3len = get(3)
os.lseek(fd, (sec3len - 3), os.SEEK_CUR)
sec4len = get(3)
if (sec4len < 120):
length &= 8388607
length *= 120
length -= sec4len
length += 4
if (edition == 2):
length = get(8)
(yield (offset, length))
offset = os.lseek(fd, (offset + length), os.SEEK_SET)
finally:
os.close(fd) |
def _dataIsString(data):
data = (('(' + data) + ')')
result = fb.evaluateExpressionValue((('(NSString*)[[NSString alloc] initWithData:' + data) + ' encoding:4]'))
if ((result.GetError() is not None) and (str(result.GetError()) != 'success')):
return False
else:
isString = (result.GetValueAsUnsigned() != 0)
return isString |
class OptionPlotoptionsStreamgraphSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsStreamgraphSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsStreamgraphSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsStreamgraphSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsStreamgraphSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsStreamgraphSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsStreamgraphSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
class ConfigFile():
def __init__(self, config_name):
self.user_home = os.getenv('RALLY_HOME', os.path.expanduser('~'))
self.rally_home = os.path.join(self.user_home, '.rally')
if (config_name is not None):
self.config_file_name = f'rally-{config_name}.ini'
else:
self.config_file_name = 'rally.ini'
self.source_path = os.path.join(os.path.dirname(__file__), 'resources', self.config_file_name)
self.target_path = os.path.join(self.rally_home, self.config_file_name) |
class ColormappedSelectionOverlay(AbstractOverlay):
plot = Property
fade_alpha = Float(0.15)
minimum_delta = Float(0.01)
selected_outline_width = Float(1.0)
unselected_outline_width = Float(0.0)
selection_type = Enum('range', 'mask')
_plot = Instance(ColormappedScatterPlot)
_visible = Bool(False)
_old_alpha = Float
_old_outline_color = Any
_old_line_width = Float(0.0)
def __init__(self, component=None, **kw):
super().__init__(**kw)
self.component = component
def overlay(self, component, gc, view_bounds=None, mode='normal'):
if (not self._visible):
return
plot = self.plot
datasource = plot.color_data
if (self.selection_type == 'range'):
selections = datasource.metadata['selections']
if ((selections is not None) and (len(selections) == 0)):
return
(low, high) = selections
if ((abs((high - low)) / abs((high + low))) < self.minimum_delta):
return
data_pts = datasource.get_data()
mask = ((data_pts >= low) & (data_pts <= high))
elif (self.selection_type == 'mask'):
mask = functools.reduce(logical_and, datasource.metadata['selection_masks'])
if (sum(mask) < 2):
return
datasource.set_mask(mask)
fade_outline_color = plot.outline_color_
plot.outline_color = (list(self._old_outline_color[:3]) + [1.0])
plot.fill_alpha = 1.0
plot.line_width = self.selected_outline_width
plot._draw_plot(gc, view_bounds, mode)
plot.fill_alpha = self.fade_alpha
plot.outline_color = fade_outline_color
plot.line_width = self.unselected_outline_width
datasource.remove_mask()
def _component_changed(self, old, new):
if old:
old.observe(self.datasource_change_handler, 'color_data', remove=True)
if new:
new.observe(self.datasource_change_handler, 'color_data')
self._old_alpha = new.fill_alpha
self._old_outline_color = new.outline_color
self._old_line_width = new.line_width
self.datasource_change_handler(TraitChangeEvent(object=new, name='color_data', old=None, new=new.color_data))
def datasource_change_handler(self, event):
(old, new) = (event.old, event.new)
if old:
old.observe(self.selection_change_handler, 'metadata_changed', remove=True)
if new:
new.observe(self.selection_change_handler, 'metadata_changed')
self.selection_change_handler(TraitChangeEvent(object=new, name='metadata_changed', old=None, new=new.metadata))
def selection_change_handler(self, event):
new = event.new
if (self.selection_type == 'range'):
selection_key = 'selections'
elif (self.selection_type == 'mask'):
selection_key = 'selection_masks'
if ((type(new) == dict) and (new.get(selection_key, None) is not None) and (len(new[selection_key]) > 0)):
if (not self._visible):
plot = self.plot
self._old_line_width = plot.line_width
plot.line_width = self.unselected_outline_width
self._old_outline_color = plot.outline_color_
outline_color = list(plot.outline_color_)
if (len(outline_color) == 3):
outline_color += [self.fade_alpha]
else:
outline_color[3] = self.fade_alpha
plot.outline_color = outline_color
self._old_alpha = plot.fill_alpha
plot.fill_alpha = self.fade_alpha
self.plot.invalidate_draw()
self._visible = True
else:
self.plot.fill_alpha = self._old_alpha
self.plot.outline_color = self._old_outline_color
self.plot.line_width = self._old_line_width
self.plot.invalidate_draw()
self._visible = False
self.plot.request_redraw()
def _get_plot(self):
if (self._plot is not None):
return self._plot
else:
return self.component
def _set_plot(self, val):
self._plot = val |
class Dependencies():
def __init__(self, build_dependencies: Dict[(str, 'Package')]) -> None:
self.build_dependencies = build_dependencies
def __getitem__(self, key: str) -> 'Package':
return self.build_dependencies.get(key)
def __contains__(self, key: str) -> bool:
return (key in self.build_dependencies)
def _validate_name(self, name: str) -> None:
validate_package_name(name)
if (name not in self.build_dependencies):
raise KeyError(f'Package name: {name} not found in build dependencies.')
def items(self) -> Tuple[(Tuple[(str, 'Package')], ...)]:
item_dict = {name: self.build_dependencies.get(name) for name in self.build_dependencies}
return tuple(item_dict.items())
def values(self) -> List['Package']:
values = [self.build_dependencies.get(name) for name in self.build_dependencies]
return values
def get_dependency_package(self, package_name: str) -> 'Package':
self._validate_name(package_name)
return self.build_dependencies.get(package_name) |
def compile_unet(pt_mod, batch_size=2, height=64, width=64, dim=320, hidden_dim=1024, use_fp16_acc=False, convert_conv_to_gemm=False, attention_head_dim=[5, 10, 20, 20], model_name='UNet2DConditionModel', use_linear_projection=False):
ait_mod = ait_UNet2DConditionModel(sample_size=64, cross_attention_dim=hidden_dim, attention_head_dim=attention_head_dim, use_linear_projection=use_linear_projection)
ait_mod.name_parameter_tensor()
pt_mod = pt_mod.eval()
params_ait = map_unet_params(pt_mod, dim)
latent_model_input_ait = Tensor([batch_size, height, width, 4], name='input0', is_input=True)
timesteps_ait = Tensor([batch_size], name='input1', is_input=True)
text_embeddings_pt_ait = Tensor([batch_size, 77, hidden_dim], name='input2', is_input=True)
mid_block_additional_residual = None
down_block_additional_residuals = None
Y = ait_mod(latent_model_input_ait, timesteps_ait, text_embeddings_pt_ait, down_block_additional_residuals, mid_block_additional_residual)
mark_output(Y)
target = detect_target(use_fp16_acc=use_fp16_acc, convert_conv_to_gemm=convert_conv_to_gemm)
compile_model(Y, target, './tmp', model_name, constants=params_ait) |
def get_generator_and_config_args(tool):
args = []
cmake_generator = CMAKE_GENERATOR
if (('Visual Studio 16' in CMAKE_GENERATOR) or ('Visual Studio 17' in CMAKE_GENERATOR)):
args += ['-A', cmake_target_platform(tool)]
args += [('-Thost=' + cmake_host_platform())]
elif (('Visual Studio' in CMAKE_GENERATOR) and (tool.bitness == 64)):
cmake_generator += ' Win64'
args += ['-Thost=x64']
return (cmake_generator, args) |
class TestFilePathFieldRequired():
def test_required_passed_to_both_django_file_path_field_and_base(self):
field = serializers.FilePathField(path=os.path.abspath(os.path.dirname(__file__)), required=False)
assert ('' in field.choices)
assert (field.required is False)
with pytest.raises(SkipField):
field.run_validation(empty) |
def test_client_create_access_code_jwe(oauth_client, config):
jwe = oauth_client.create_access_code_jwe(config.security.app_encryption_key)
token_data = json.loads(extract_payload(jwe, config.security.app_encryption_key))
assert (token_data[JWE_PAYLOAD_CLIENT_ID] == oauth_client.id)
assert (token_data[JWE_PAYLOAD_SCOPES] == oauth_client.scopes)
assert (token_data[JWE_ISSUED_AT] is not None)
assert (token_data[JWE_PAYLOAD_ROLES] == [])
assert (token_data[JWE_PAYLOAD_SYSTEMS] == []) |
def test_init_raw_transaction():
ledger_id = 'some_ledger'
body = {'body': 'value'}
rt = RawTransaction(ledger_id, body)
assert (rt.ledger_id == ledger_id)
assert (rt.body == body)
assert (str(rt) == "RawTransaction: ledger_id=some_ledger, body={'body': 'value'}")
assert (rt == rt) |
class Module02(Digraph.Node):
depends_on = ['Module01']
def __init__(self, config):
Digraph.Node.__init__(self, 'Module02')
def get_type_set(self):
return set([InputModuleTypes.reqtag])
def set_modules(self, mods):
pass
def rewrite(self, rid, reqs):
return ('SameTag', None) |
def check_create_keys(wallet: Wallet, account_script_type: ScriptType) -> None:
def check_rows(rows: List[KeyInstanceRow], script_type: ScriptType) -> None:
for row in rows:
assert isinstance(row.keyinstance_id, int)
assert (account.get_id() == row.account_id)
assert (1 == row.masterkey_id)
assert (script_type == row.script_type)
assert (DerivationType.BIP32_SUBPATH == row.derivation_type)
assert (None is row.description)
accounts = wallet.get_accounts()
assert (len(accounts) == 1)
account = accounts[0]
assert ([] == account.get_existing_fresh_keys(RECEIVING_SUBPATH))
assert ([] == account.get_existing_fresh_keys(CHANGE_SUBPATH))
assert (account_script_type == account.get_default_script_type())
keyinstances: List[KeyInstanceRow] = []
keyinstance_ids: Set[int] = set()
for count in (0, 1, 5):
new_keyinstances = account.create_keys(count, RECEIVING_SUBPATH)
assert (count == len(new_keyinstances))
check_rows(new_keyinstances, account_script_type)
keyinstance_ids |= set((keyinstance.keyinstance_id for keyinstance in new_keyinstances))
keyinstances.extend(new_keyinstances)
assert (len(keyinstance_ids) == len(keyinstances))
assert ([] == account.get_existing_fresh_keys(RECEIVING_SUBPATH))
for count in (0, 1, 5):
last_row = keyinstances[(- 1)]
last_index = account.get_derivation_path(last_row.keyinstance_id)[(- 1)]
next_index = account.get_next_derivation_index(RECEIVING_SUBPATH)
assert (next_index == (last_index + 1))
try:
new_keyinstances = account.create_keys_until((RECEIVING_SUBPATH + (((next_index + count) - 1),)))
except AssertionError:
assert (0 == count)
continue
assert (0 != count)
assert (count == len(new_keyinstances))
check_rows(new_keyinstances, account_script_type)
keyinstance_ids |= set((keyinstance.keyinstance_id for keyinstance in new_keyinstances))
keyinstances.extend(new_keyinstances)
assert (len(keyinstance_ids) == len(keyinstances))
assert ([] == account.get_existing_fresh_keys(RECEIVING_SUBPATH))
keyinstance_batches: List[List[KeyInstanceRow]] = []
for count in (0, 1, 5):
new_keyinstances = account.get_fresh_keys(RECEIVING_SUBPATH, count)
assert (count == len(new_keyinstances))
assert (new_keyinstances == account.get_existing_fresh_keys(RECEIVING_SUBPATH))
check_rows(new_keyinstances, ScriptType.NONE)
if (len(keyinstance_batches) > 0):
last_keyinstances = keyinstance_batches[(- 1)]
assert (last_keyinstances == new_keyinstances[:len(last_keyinstances)])
keyinstance_batches.append(new_keyinstances) |
def extractKudarajin(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if (('I Appear to have been Reincarnated as a Love Interest in an Otome Game' in item['tags']) or item['title'].startswith('I Appear to have been Reincarnated as a Love Interest in an Otome Game: ')):
return buildReleaseMessageWithType(item, 'I Appear to have been Reincarnated as a Love Interest in an Otome Game', vol, chp, frag=frag, postfix=postfix)
if (('Hokuou Kizoku to Moukinzuma no Yukiguni Karigurashi' in item['tags']) or item['title'].startswith('Hokuou Kizoku to Moukinzuma no Yukiguni Karigurashi:')):
return buildReleaseMessageWithType(item, 'Hokuou Kizoku to Moukinzuma no Yukiguni Karigurashi', vol, chp, frag=frag, postfix=postfix)
if ('Miniature Garden Chemister' in item['tags']):
return buildReleaseMessageWithType(item, 'Miniature Garden Chemister', vol, chp, frag=frag, postfix=postfix)
if ('Tensei Shite Inaka de Slowlife wo\xa0Okuritai' in item['tags']):
return buildReleaseMessageWithType(item, 'Tensei Shite Inaka de Slowlife wo\xa0Okuritai', vol, chp, frag=frag, postfix=postfix)
return False |
class Translator(object):
ParseTree = ParseTree
parser = parser
ST = ast
def __init__(self, st):
for (value, name) in self.parser.tokenizer.tok_name.items():
setattr(self, name, value)
def isdict(obj):
return isinstance(obj, dict)
for (name, d) in inspect.getmembers(self.__class__, isdict):
d = d.copy()
for (key, val) in d.items():
try:
d[key] = getattr(self.ST, val.__name__)
except AttributeError:
pass
setattr(self, name, d)
self.ast = self.do(self.ParseTree(st))
def do(self, st, ctx=None):
if (ctx is None):
ctx = self.ST.Load
name = st.symbol
meth = getattr(self, ('do_' + name))
tree = meth(st, ctx)
try:
tree.st = st
except AttributeError:
pass
return tree
_unary = {'not': ast.Not, '+': ast.UAdd, '-': ast.USub, '~': ast.Invert}
def _do_unary(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return self.ST.UnaryOp(lineno=st.srow, col_offset=st.scol, op=self._unary[st[0].text](lineno=st[0].srow, col_offset=st[0].scol), operand=self.do(st[1], ctx))
_binary = {'+': ast.Add, '-': ast.Sub, '*': ast.Mult, '/': ast.Div, '%': ast.Mod, '&': ast.BitAnd, '|': ast.BitOr, '^': ast.BitXor, '<<': ast.LShift, '>>': ast.RShift, '**': ast.Pow, '//': ast.FloorDiv}
def _do_binary(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
values = [self.do(child, ctx) for child in st[::2]]
ops = [(self._binary[child.text], child) for child in st[1::2]]
while (len(values) > 1):
left = values.pop(0)
right = values.pop(0)
(operator, node) = ops.pop(0)
values.insert(0, self.ST.BinOp(lineno=st.srow, col_offset=st.scol, left=left, op=operator(lineno=node.srow, col_offset=node.scol), right=right))
return values[0]
_boolean = {'and': ast.And, 'or': ast.Or}
def _do_boolean(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return self.ST.BoolOp(lineno=st.srow, col_offset=st.scol, op=self._boolean[st[1].text](lineno=st[1].srow, col_offset=st[1].scol), values=[self.do(child, ctx) for child in st[::2]])
def do_file_input(self, st, ctx):
body = reduce(operator.add, (self.do(child, ctx) for child in st if (child.kind not in (self.NEWLINE, self.ENDMARKER))), [])
return self.ST.Module(lineno=st.srow, col_offset=st.scol, body=body)
def do_decorator(self, st, ctx):
names = self.do(st[1], ctx).split('.')
obj = self.ST.Name(lineno=st[1].srow, col_offset=st[1].scol, id=names.pop(0), ctx=ctx())
while names:
obj = self.ST.Attribute(lineno=st[1].srow, col_offset=st[1].scol, value=obj, attr=names.pop(0), ctx=ctx())
if (len(st) == 3):
return obj
elif (len(st) == 5):
return self.ST.Call(lineno=st[1].srow, col_offset=st[1].scol, func=obj, args=[], keywords=[], starargs=None, kwargs=None)
else:
(args, keywords, starargs, kwargs) = self.do(st[3], ctx)
return self.ST.Call(lineno=st[1].srow, col_offset=st[1].scol, func=obj, args=args, keywords=keywords, starargs=starargs, kwargs=kwargs)
def do_decorators(self, st, ctx):
return [self.do(child, ctx) for child in st]
def do_decorated(self, st, ctx):
child = self.do(st[1], ctx)
child.decorator_list.extend(self.do(st[0], ctx))
return child
def do_funcdef(self, st, ctx):
if (len(st) == 5):
return self.ST.FunctionDef(lineno=st.srow, col_offset=st.scol, name=st[1].text, args=self.do(st[2], ctx), returns=None, body=self.do(st[(- 1)], ctx), decorator_list=[])
else:
return self.ST.FunctionDef(lineno=st.srow, col_offset=st.scol, name=st[1].text, args=self.do(st[2], ctx), returns=self.do(st[5], ctx), body=self.do(st[(- 1)], ctx), decorator_list=[])
def do_parameters(self, st, ctx):
if (len(st) == 2):
return self.ST.arguments(lineno=st.srow, col_offset=st.scol, args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[], kw_defaults=[])
else:
return self.do(st[1], ctx)
def do_typedargslist(self, st, ctx):
args = []
vararg = None
varargannotation = None
star = False
kwonlyargs = []
kwarg = None
kwargannotation = None
defaults = []
kw_defaults = []
nodes = list(st)
while nodes:
first = nodes.pop(0)
if (first.text == ','):
pass
elif (first.text == '*'):
star = True
if (nodes and (nodes[0].text != ',')):
(vararg, varargannotation) = self.do(nodes.pop(0), ctx)
elif (first.text == '**'):
(kwarg, kwargannotation) = self.do(nodes.pop(0), ctx)
else:
(n, a) = self.do(first, ctx)
arg = self.ST.arg(lineno=first.srow, col_offset=first.scol, arg=n, annotation=a)
if (nodes and (nodes[0].text == '=')):
del nodes[0]
d = self.do(nodes.pop(0), ctx)
else:
d = None
if star:
kwonlyargs.append(arg)
kw_defaults.append(d)
else:
args.append(arg)
if (d is not None):
defaults.append(d)
return self.ST.arguments(lineno=st.srow, col_offset=st.scol, args=args, vararg=vararg, varargannotation=varargannotation, kwonlyargs=kwonlyargs, kwarg=kwarg, kwargannotation=kwargannotation, defaults=defaults, kw_defaults=kw_defaults)
def do_tfpdef(self, st, ctx):
if (len(st) == 1):
return (st[0].text, None)
else:
return (st[0].text, self.do(st[2], ctx))
def do_varargslist(self, st, ctx):
tree = self.do_typedargslist(st, ctx)
tree.st = st
return tree
def do_vfpdef(self, st, ctx):
return (st[0].text, None)
def do_stmt(self, st, ctx):
child = self.do(st[0], ctx)
if isinstance(child, self.ST.AST):
return [child]
else:
return child
def do_simple_stmt(self, st, ctx):
return [self.do(child, ctx) for child in st[::2] if (child.kind != self.NEWLINE)]
def do_small_stmt(self, st, ctx):
return self.do(st[0], ctx)
def do_expr_stmt(self, st, ctx):
if (len(st) == 1):
return self.ST.Expr(lineno=st.srow, col_offset=st.scol, value=self.do(st[0], ctx))
elif (st[1].symbol == 'augassign'):
target = self.do(st[0], self.ST.Store)
if isinstance(target, self.ST.Tuple):
raise ParseError(st[0].text, reason='illegal expression for augmented assignment')
return self.ST.AugAssign(lineno=st.srow, col_offset=st.scol, target=target, op=self.do(st[1], ctx), value=self.do(st[2], ctx))
else:
return self.ST.Assign(lineno=st.srow, col_offset=st.scol, targets=[self.do(child, ast.Store) for child in st[:(- 1):2]], value=self.do(st[(- 1)], ctx))
def do_augassign(self, st, ctx):
return self._binary[st[0].text[:(- 1)]](lineno=st.srow, col_offset=st.scol)
def do_del_stmt(self, st, ctx):
targets = self.do(st[1], ctx=self.ST.Del)
if isinstance(targets, self.ST.Tuple):
targets = targets.elts
else:
targets = [targets]
return self.ST.Delete(lineno=st.srow, col_offset=st.scol, targets=targets)
def do_pass_stmt(self, st, ctx):
return self.ST.Pass(lineno=st.srow, col_offset=st.scol)
def do_flow_stmt(self, st, ctx):
return self.do(st[0], ctx)
def do_break_stmt(self, st, ctx):
return self.ST.Break(lineno=st.srow, col_offset=st.scol)
def do_continue_stmt(self, st, ctx):
return self.ST.Continue(lineno=st.srow, col_offset=st.scol)
def do_return_stmt(self, st, ctx):
if (len(st) == 1):
return self.ST.Return(lineno=st.srow, col_offset=st.scol, value=None)
else:
return self.ST.Return(lineno=st.srow, col_offset=st.scol, value=self.do(st[1], ctx))
def do_yield_stmt(self, st, ctx):
return self.ST.Expr(lineno=st.srow, col_offset=st.scol, value=self.do(st[0], ctx))
def do_raise_stmt(self, st, ctx):
count = len(st)
if (count == 1):
return self.ST.Raise(lineno=st.srow, col_offset=st.scol, exc=None, cause=None)
elif (count == 2):
return self.ST.Raise(lineno=st.srow, col_offset=st.scol, exc=self.do(st[1], ctx), cause=None)
else:
return self.ST.Raise(lineno=st.srow, col_offset=st.scol, exc=self.do(st[1], ctx), cause=self.do(st[3], ctx))
def do_import_stmt(self, st, ctx):
return self.do(st[0], ctx)
def do_import_name(self, st, ctx):
return self.ST.Import(lineno=st.srow, col_offset=st.scol, names=self.do(st[1], ctx))
def do_import_from(self, st, ctx):
level = 0
next = 1
for (i, child) in enumerate(st[1:]):
text = child.text
if (text not in ('.', '...')):
next = (i + 1)
break
level += len(text)
if (text == 'import'):
module = ''
next += 1
else:
module = self.do(st[next], ctx)
next += 2
text = st[next].text
if (text == '*'):
names = [self.ST.alias(lineno=st[next].srow, col_offset=st[next].scol, name='*', asname=None)]
elif (text == '('):
names = self.do(st[(next + 1)], ctx)
else:
names = self.do(st[next], ctx)
return self.ST.ImportFrom(lineno=st.srow, col_offset=st.scol, module=module, names=names, level=level)
def do_import_as_name(self, st, ctx):
if (len(st) == 1):
return self.ST.alias(lineno=st.srow, col_offset=st.scol, name=st[0].text, asname=None)
else:
return self.ST.alias(lineno=st.srow, col_offset=st.scol, name=st[0].text, asname=st[2].text)
def do_dotted_as_name(self, st, ctx):
if (len(st) == 1):
return self.ST.alias(lineno=st.srow, col_offset=st.scol, name=self.do(st[0], ctx), asname=None)
else:
return self.ST.alias(lineno=st.srow, col_offset=st.scol, name=self.do(st[0], ctx), asname=st[2].text)
def do_import_as_names(self, st, ctx):
return [self.do(child, ctx) for child in st[::2]]
def do_dotted_as_names(self, st, ctx):
return [self.do(child, ctx) for child in st[::2]]
def do_dotted_name(self, st, ctx):
return '.'.join((child.text for child in st[::2]))
def do_global_stmt(self, st, ctx):
return self.ST.Global(lineno=st.srow, col_offset=st.scol, names=[child.text for child in st[1::2]])
def do_nonlocal_stmt(self, st, ctx):
return self.ST.Nonlocal(lineno=st.srow, col_offset=st.scol, names=[child.text for child in st[1::2]])
def do_assert_stmt(self, st, ctx):
if (len(st) == 2):
return self.ST.Assert(lineno=st.srow, col_offset=st.scol, test=self.do(st[1], ctx), msg=None)
else:
return self.ST.Assert(lineno=st.srow, col_offset=st.scol, test=self.do(st[1], ctx), msg=self.do(st[3], ctx))
def do_compound_stmt(self, st, ctx):
return self.do(st[0], ctx)
def do_if_stmt(self, st, ctx):
nodes = list(st)
first = None
last = None
while nodes:
if (len(nodes) == 3):
last.orelse.extend(self.do(nodes[2], ctx))
del nodes[:3]
else:
next = self.ST.If(lineno=nodes[0].srow, col_offset=nodes[0].scol, test=self.do(nodes[1], ctx), body=self.do(nodes[3], ctx), orelse=[])
if (first is None):
first = next
if (last is not None):
last.orelse.append(next)
last = next
del nodes[:4]
return first
def do_while_stmt(self, st, ctx):
if (len(st) == 4):
return self.ST.While(lineno=st.srow, col_offset=st.scol, test=self.do(st[1], ctx), body=self.do(st[3], ctx), orelse=[])
else:
return self.ST.While(lineno=st.srow, col_offset=st.scol, test=self.do(st[1], ctx), body=self.do(st[3], ctx), orelse=self.do(st[6], ctx))
def do_for_stmt(self, st, ctx):
if (len(st) == 6):
return self.ST.For(lineno=st.srow, col_offset=st.scol, target=self.do(st[1], ast.Store), iter=self.do(st[3], ctx), body=self.do(st[5], ctx), orelse=[])
else:
return self.ST.For(lineno=st.srow, col_offset=st.scol, target=self.do(st[1], ast.Store), iter=self.do(st[3], ctx), body=self.do(st[5], ctx), orelse=self.do(st[8], ctx))
def do_try_stmt(self, st, ctx):
handlers = []
finalbody = None
orelse = []
nodes = st[3:]
while nodes:
if (nodes[0].text == 'else'):
orelse.extend(self.do(nodes[2], ctx))
elif (nodes[0].text == 'finally'):
finalbody = self.do(nodes[2], ctx)
else:
(t, n) = self.do(nodes[0], ctx)
handlers.append(self.ST.ExceptHandler(lineno=nodes[0].srow, col_offset=nodes[0].scol, type=t, name=n, body=self.do(nodes[2], ctx)))
del nodes[:3]
stmt = self.ST.TryExcept(lineno=st.srow, col_offset=st.scol, body=self.do(st[2], ctx), handlers=handlers, orelse=orelse)
if (finalbody is None):
return stmt
else:
return self.ST.TryFinally(lineno=st.srow, col_offset=st.scol, body=[stmt], finalbody=finalbody)
def do_with_stmt(self, st, ctx):
if (len(st) == 5):
return self.ST.With(lineno=st.srow, col_offset=st.scol, context_expr=self.do(st[1], ctx), optional_vars=self.do(st[2], self.ST.Store), body=self.do(st[4], ctx))
else:
return self.ST.With(lineno=st.srow, col_offset=st.scol, context_expr=self.do(st[1], ctx), optional_vars=None, body=self.do(st[3], ctx))
def do_with_var(self, st, ctx):
return self.do(st[1], ctx)
def do_except_clause(self, st, ctx):
if (len(st) == 1):
return (None, None)
elif (len(st) == 2):
return (self.do(st[1], ctx), None)
else:
return (self.do(st[1], ctx), self.ST.Name(lineno=st[3].srow, col_offset=st[3].scol, id=st[3].text, ctx=self.ST.Store()))
def do_suite(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return reduce(operator.add, (self.do(child, ctx) for child in st[2:(- 1)]), [])
def do_test(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return self.ST.IfExp(lineno=st.srow, col_offset=st.scol, test=self.do(st[2], ctx), body=self.do(st[0], ctx), orelse=self.do(st[4], ctx))
def do_test_nocond(self, st, ctx):
return self.do(st[0], ctx)
def do_lambdef(self, st, ctx):
if (len(st) == 3):
return self.ST.Lambda(lineno=st.srow, col_offset=st.scol, args=self.ST.arguments(lineno=st.srow, col_offset=st.scol, args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[], kw_defaults=[]), body=self.do(st[(- 1)], ctx))
else:
return self.ST.Lambda(lineno=st.srow, col_offset=st.scol, args=self.do(st[1], ctx), body=self.do(st[(- 1)], ctx))
def do_lambdef_nocond(self, st, ctx):
tree = self.do_lambdef(st, ctx)
tree.st = st
return tree
def do_or_test(self, st, ctx):
return self._do_boolean(st, ctx)
def do_and_test(self, st, ctx):
return self._do_boolean(st, ctx)
def do_not_test(self, st, ctx):
return self._do_unary(st, ctx)
def do_comparison(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return self.ST.Compare(lineno=st.srow, col_offset=st.scol, left=self.do(st[0], ctx), ops=[self.do(child, ctx) for child in st[1::2]], comparators=[self.do(child, ctx) for child in st[2::2]])
_comparison = {'<': ast.Lt, '>': ast.Gt, '==': ast.Eq, '>=': ast.GtE, '<=': ast.LtE, '!=': ast.NotEq, '<>': ast.NotEq, 'in': ast.In, 'not in': ast.NotIn, 'is': ast.Is, 'is not': ast.IsNot}
def do_comp_op(self, st, ctx):
text = ' '.join((child.text for child in st))
return self._comparison[text](lineno=st.srow, col_offset=st.scol)
def do_star_expr(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
return self.ST.Starred(lineno=st.srow, col_offset=st.scol, value=self.do(st[1], ctx), ctx=ctx())
def do_expr(self, st, ctx):
return self._do_binary(st, ctx)
def do_xor_expr(self, st, ctx):
return self._do_binary(st, ctx)
def do_and_expr(self, st, ctx):
return self._do_binary(st, ctx)
def do_shift_expr(self, st, ctx):
return self._do_binary(st, ctx)
def do_arith_expr(self, st, ctx):
return self._do_binary(st, ctx)
def do_term(self, st, ctx):
return self._do_binary(st, ctx)
def do_factor(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
tree = self._do_unary(st, ctx)
if (isinstance(tree.op, self.ST.USub) and isinstance(tree.operand, self.ST.Num) and (tree.operand.n > 0)):
tree = self.ST.Num(lineno=st.srow, col_offset=st.scol, n=(- tree.operand.n))
return tree
def do_power(self, st, ctx):
if (len(st) == 1):
return self.do(st[0], ctx)
else:
left = self.do(st[0], ctx)
power = None
for child in st[1:]:
if (child.text == '**'):
power = self.do(st[(- 1)], ctx)
break
trailer = self.do(child, ctx)
left = trailer(left, st.srow, st.scol)
if power:
return self.ST.BinOp(lineno=st.srow, col_offset=st.scol, left=left, op=self.ST.Pow(lineno=st[(- 2)].srow, col_offset=st[(- 2)].scol), right=power)
else:
return left
def do_atom(self, st, ctx):
(kind, text) = (st[0].kind, st[0].text)
if (kind == self.NUMBER):
return self.ST.Num(lineno=st.srow, col_offset=st.scol, n=self.ST.literal_eval(text))
elif (kind == self.NAME):
return self.ST.Name(lineno=st.srow, col_offset=st.scol, id=text, ctx=ctx())
elif (kind == self.STRING):
return self.ST.Str(lineno=st.srow, col_offset=st.scol, s=''.join((self.ST.literal_eval(child.text) for child in st)))
elif (text == '...'):
return self.ST.Ellipsis(lineno=st.srow, col_offset=st.scol)
elif (text == '['):
if (len(st) == 2):
return self.ST.List(lineno=st.srow, col_offset=st.scol, elts=[], ctx=ctx())
else:
(loop, elts, atom) = self.do(st[1], ctx)
if (atom is not None):
elts = [atom]
if (loop is None):
return self.ST.List(lineno=st.srow, col_offset=st.scol, elts=elts, ctx=ctx())
else:
return self.ST.ListComp(lineno=st.srow, col_offset=st.scol, elt=loop, generators=elts)
elif (text == '('):
if (len(st) == 2):
return self.ST.Tuple(lineno=st.srow, col_offset=st.scol, elts=[], ctx=ctx())
elif (st[1].symbol == 'yield_expr'):
return self.do(st[1], ctx)
else:
(loop, elts, atom) = self.do(st[1], ctx)
if (atom is not None):
return atom
elif (loop is None):
return self.ST.Tuple(lineno=st.srow, col_offset=st.scol, elts=elts, ctx=ctx())
else:
return self.ST.GeneratorExp(lineno=st.srow, col_offset=st.scol, elt=loop, generators=elts)
elif (len(st) == 2):
return self.ST.Dict(lineno=st.srow, col_offset=st.scol, keys=[], values=[])
else:
return self.do(st[1], ctx)
def do_testlist_comp(self, st, ctx):
if (len(st) == 1):
return (None, None, self.do(st[0]))
elif (st[1].text == ','):
return (None, [self.do(child, ctx) for child in st[::2]], None)
else:
return (self.do(st[0], ctx), self.do(st[1], ctx)[0], None)
def do_trailer(self, st, ctx):
hd = st[0].text
if (hd == '.'):
def trail(tree, lineno, col_offset):
return self.ST.Attribute(lineno=lineno, col_offset=col_offset, value=tree, attr=st[1].text, ctx=ctx())
elif (hd == '['):
subscript = self.do(st[1], ctx)
if (len(subscript) == 1):
subscript = subscript[0]
else:
subscript = self.ST.ExtSlice(lineno=st[1].srow, col_offset=st[1].scol, dims=subscript, ctx=ctx())
def trail(tree, lineno, col_offset):
return self.ST.Subscript(lineno=lineno, col_offset=col_offset, value=tree, slice=subscript, ctx=ctx())
elif (len(st) == 2):
def trail(tree, lineno, col_offset):
return self.ST.Call(lineno=lineno, col_offset=col_offset, func=tree, args=[], keywords=[], starargs=None, kwargs=None)
else:
def trail(tree, lineno, col_offset):
(args, keywords, starargs, kwargs) = self.do(st[1], ctx)
return self.ST.Call(lineno=lineno, col_offset=col_offset, func=tree, args=args, keywords=keywords, starargs=starargs, kwargs=kwargs)
return trail
def do_subscriptlist(self, st, ctx):
return [self.do(child, ctx) for child in st[::2]]
def do_subscript(self, st, ctx):
count = len(st)
if ((count == 1) and (st[0].text == ':')):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=None, upper=None, step=None)
elif (count == 1):
return self.ST.Index(lineno=st.srow, col_offset=st.scol, value=self.do(st[0], ctx))
elif (count == 4):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=self.do(st[0], ctx), upper=self.do(st[2], ctx), step=self.do(st[3], ctx))
elif ((count == 3) and (st[(- 1)].symbol == 'test')):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=self.do(st[0], ctx), upper=self.do(st[2], ctx), step=None)
elif ((count == 3) and (st[0].text == ':')):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=None, upper=self.do(st[1], ctx), step=self.do(st[2], ctx))
elif (count == 3):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=self.do(st[0], ctx), upper=None, step=self.do(st[2], ctx))
elif ((count == 2) and (st[(- 1)].symbol == 'sliceop')):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=None, upper=None, step=self.do(st[1], ctx))
elif ((count == 2) and (st[0].text == ':')):
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=None, upper=self.do(st[1], ctx), step=None)
else:
return self.ST.Slice(lineno=st.srow, col_offset=st.scol, lower=self.do(st[0], ctx), upper=None, step=None)
def do_sliceop(self, st, ctx):
if (len(st) == 1):
return self.ST.Name(lineno=st.srow, col_offset=st.scol, id='None', ctx=ctx())
else:
return self.do(st[1], ctx)
def do_exprlist(self, st, ctx):
tree = self.do_testlist(st, ctx)
tree.st = st
return tree
def do_testlist(self, st, ctx):
lst = [self.do(child, ctx) for child in st[::2]]
if (len(lst) == 1):
return lst[0]
else:
return self.ST.Tuple(lineno=st.srow, col_offset=st.scol, elts=lst, ctx=ctx())
def do_dictorsetmaker(self, st, ctx):
if (len(st) == 1):
return self.ST.Set(lineno=st.srow, col_offset=st.scol, elts=[self.do(st[0], ctx)])
elif (st[1].text == ':'):
if ((len(st) < 4) or (st[3].text == ',')):
return self.ST.Dict(lineno=st.srow, col_offset=st.scol, keys=[self.do(child, ctx) for child in st[::4]], values=[self.do(child, ctx) for child in st[2::4]])
else:
return self.ST.DictComp(lineno=st.srow, col_offset=st.scol, key=self.do(st[0], ctx), value=self.do(st[2], ctx), generators=self.do(st[3], ctx)[0])
else:
(loop, elts, atom) = self.do_testlist_comp(st, ctx)
if (loop is None):
return self.ST.Set(lineno=st.srow, col_offset=st.scol, elts=elts)
else:
return self.ST.SetComp(lineno=st.srow, col_offset=st.scol, elt=loop, generators=elts)
def do_classdef(self, st, ctx):
if (len(st) <= 6):
return self.ST.ClassDef(lineno=st.srow, col_offset=st.scol, name=st[1].text, bases=[], keywords=[], starargs=None, kwargs=None, body=self.do(st[(- 1)], ctx), decorator_list=[])
else:
(args, keywords, starargs, kwargs) = self.do(st[3], ctx)
return self.ST.ClassDef(lineno=st.srow, col_offset=st.scol, name=st[1].text, bases=args, keywords=keywords, starargs=starargs, kwargs=kwargs, body=self.do(st[(- 1)], ctx), decorator_list=[])
def do_arglist(self, st, ctx):
args = []
keywords = []
allkw = set()
starargs = None
kwargs = None
nodes = [n for n in st if (n.text != ',')]
while nodes:
if (nodes[0].text == '*'):
starargs = self.do(nodes[1], ctx)
del nodes[:2]
elif (nodes[0].text == '**'):
kwargs = self.do(nodes[1], ctx)
del nodes[:2]
else:
arg = self.do(nodes[0], ctx)
if isinstance(arg, self.ST.keyword):
if (arg.arg in allkw):
raise ParseError(nodes[0].text, reason='keyword argument repeated')
keywords.append(arg)
allkw.add(arg.arg)
elif (starargs is not None):
raise ParseError(nodes[0].text, reason='only named arguments may follow *expression')
else:
args.append(arg)
del nodes[0]
return (args, keywords, starargs, kwargs)
def do_argument(self, st, ctx):
test = self.do(st[0], ctx)
if (len(st) == 1):
return test
elif (len(st) == 3):
if (not isinstance(test, self.ST.Name)):
raise ParseError(st[0].text, reason="keyword can't be an expression")
return self.ST.keyword(lineno=st.srow, col_offset=st.scol, arg=test.id, value=self.do(st[2], ctx))
else:
(comp, ifs) = self.do(st[1], ctx)
return self.ST.GeneratorExp(lineno=st.srow, col_offset=st.scol, elt=test, generators=comp)
def do_comp_iter(self, st, ctx):
return self.do(st[0], ctx)
def do_comp_for(self, st, ctx):
if (len(st) == 4):
return ([self.ST.comprehension(lineno=st.srow, col_offset=st.scol, target=self.do(st[1], ast.Store), iter=self.do(st[3], ctx), ifs=[])], [])
else:
(comp, ifs) = self.do(st[4], ctx)
return (([self.ST.comprehension(lineno=st.srow, col_offset=st.scol, target=self.do(st[1], ast.Store), iter=self.do(st[3], ctx), ifs=ifs)] + comp), [])
def do_comp_if(self, st, ctx):
if (len(st) == 2):
return ([], [self.do(st[1], ctx)])
else:
(comp, ifs) = self.do(st[2], ctx)
return (comp, ([self.do(st[1], ctx)] + ifs))
def do_yield_expr(self, st, ctx):
if (len(st) == 2):
return self.ST.Yield(lineno=st.srow, col_offset=st.scol, value=self.do(st[1], ctx))
else:
return self.ST.Yield(lineno=st.srow, col_offset=st.scol, value=None)
def parse(cls, expr, mode='exec', filename='<string>'):
tree = cls(cls.parser.parseString((expr.strip() + '\n'), filename=filename)).ast
if (mode == 'exec'):
return tree
elif (mode == 'eval'):
if ((len(tree.body) > 1) or (not isinstance(tree.body[0], cls.ST.Expr))):
raise ParseError(None, reason='invalid syntax')
return cls.ST.Expression(body=tree.body[0].value)
elif (mode == 'single'):
return cls.ST.Interactive(body=tree.body)
else:
raise ValueError("arg 2 must be 'exec', 'eval' or 'single'") |
def test_bitly_total_clicks():
body = json.dumps({'link_clicks': [{'clicks': 20}]})
headers = {'Authorization': f'Bearer {token}'}
url = ''.join(urlparse(shorten)[1:3])
url = f'{bitly.api_url}/bitlinks/{url}/clicks'
responses.add(responses.GET, url, headers=headers, body=body, match_querystring=True)
assert (bitly.total_clicks(shorten) == 20) |
def map_fun(context):
tf_context = TFContext(context)
job_name = tf_context.get_node_type()
index = tf_context.get_index()
cluster_json = tf_context.get_tf_cluster_config()
print(cluster_json)
sys.stdout.flush()
cluster = tf.train.ClusterSpec(cluster=cluster_json)
server = tf.train.Server(cluster, job_name=job_name, task_index=index)
sess_config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False, device_filters=['/job:ps', ('/job:worker/task:%d' % index)])
if ('ps' == job_name):
from time import sleep
while True:
sleep(1)
else:
with tf.device(tf.train.replica_device_setter(worker_device=('/job:worker/task:' + str(index)), cluster=cluster)):
global_step = tf.contrib.framework.get_or_create_global_step()
global_step_inc = tf.assign_add(global_step, 1)
input_records = [tf.constant([1, 2, 3]), tf.constant([1.0, 2.0, 3.0]), tf.constant(['1.0', '2.0', '3.0'])]
out = tff_ops.encode_csv(input_list=input_records, field_delim='|')
fw = tff_ops.FlinkTFRecordWriter(address=context.toFlink())
w = fw.write([out])
is_chief = (index == 0)
t = time.time()
try:
hooks = [tf.train.StopAtStepHook(last_step=50)]
with tf.train.MonitoredTrainingSession(master=server.target, config=sess_config, is_chief=is_chief, checkpoint_dir=('./target/tmp/with_output/' + str(t)), hooks=hooks) as mon_sess:
while (not mon_sess.should_stop()):
print(index, mon_sess.run([global_step_inc, w]))
sys.stdout.flush()
time.sleep(1)
finally:
SummaryWriterCache.clear() |
_eh
class LaevateinHandler(THBEventHandler):
interested = ['attack_aftergraze']
card_usage = 'drop'
def handle(self, evt_type, arg):
if (evt_type == 'attack_aftergraze'):
(act, succeed) = arg
assert isinstance(act, basic.BaseAttack)
if succeed:
return arg
src = act.source
tgt = act.target
if ((not src) or (not src.has_skill(LaevateinSkill))):
return arg
g = self.game
cards = user_choose_cards(self, src, ('cards', 'showncards', 'equips'))
if (not cards):
return arg
g.process_action(DropCards(src, src, cards))
g.process_action(Laevatein(src, tgt))
return (act, True)
return arg
def cond(self, cards):
if (not (len(cards) == 2)):
return False
from thb.cards.definition import LaevateinCard
for c in cards:
t = c.resides_in.type
if (t not in ('cards', 'showncards', 'equips')):
return False
elif ((t == 'equips') and c.is_card(LaevateinCard)):
return False
elif c.is_card(Skill):
return False
return True |
def test_api_version_ord():
assert (APIVersion(1, 0) == APIVersion(1, 0))
assert (APIVersion(1, 0) < APIVersion(1, 1))
assert (APIVersion(1, 1) <= APIVersion(1, 1))
assert (APIVersion(1, 0) < APIVersion(2, 0))
assert (not (APIVersion(2, 1) <= APIVersion(2, 0)))
assert (APIVersion(2, 1) > APIVersion(2, 0)) |
class GraphAdjacencyMethods():
def _dummy_symmetric_adj_tensor_factory(cls, in_shape: Sequence[int]) -> Callable[([], torch.Tensor)]:
def create_binary_sym_tensor() -> torch.Tensor:
xx_np = np.random.randint(0, 2, size=in_shape).astype(np.float32)
xx = torch.from_numpy(xx_np)
xx_sym = (xx + torch.transpose(xx, dim0=(- 2), dim1=(- 1)))
xx_sym[(xx_sym > 0)] = 1
xx_sym = xx_sym.unsqueeze(dim=0)
return xx_sym
return create_binary_sym_tensor
def preprocess_adj_to_adj_bar(cls, adj: torch.Tensor) -> Union[(torch.Tensor, np.ndarray)]:
batch_dim = 0
if (len(adj.shape) > 2):
batch_dim = adj.shape[0]
else:
adj = adj.unsqueeze(0)
assert (adj.shape[(- 1)] == adj.shape[(- 2)]), 'The adj matrix should be a square matrix'
adj_bar = (adj + torch.eye(adj.shape[(- 2)], device=adj.device).repeat(adj.shape[0], 1, 1))
if (batch_dim == 0):
adj_bar = adj_bar.squeeze(0)
return adj_bar
def preprocess_adj_to_adj_hat(cls, adj: torch.Tensor, self_importance_scalar: Optional[torch.Tensor]=torch.tensor(1)) -> Union[(torch.Tensor, np.ndarray)]:
batch_dim = 0
if (len(adj.shape) > 2):
batch_dim = adj.shape[0]
else:
adj = adj.unsqueeze(0)
assert (adj.shape[(- 1)] == adj.shape[(- 2)]), 'The adj matrix should be a square matrix'
adj_bar = (adj + (self_importance_scalar * torch.eye(adj.shape[(- 2)], device=adj.device).repeat(adj.shape[0], 1, 1)))
rowsum = adj_bar.sum((- 1))
r_inv_sqrt = torch.pow(rowsum, (- 0.5))
r_inv_sqrt[torch.isinf(r_inv_sqrt)] = 0.0
r_mat_inv_sqrt = torch.diag_embed(r_inv_sqrt)
row_norm = torch.matmul(adj_bar, r_mat_inv_sqrt)
adj_hat = torch.matmul(torch.transpose(row_norm, dim0=(- 2), dim1=(- 1)), r_mat_inv_sqrt)
if (batch_dim == 0):
adj_hat = adj_hat.squeeze(0)
return adj_hat |
def upgrade():
for table_name in tables_to_update:
logger.info(('upgrading table: %s' % table_name))
with op.batch_alter_table(table_name) as batch_op:
for column_name in tables_to_update[table_name]:
logger.info(('altering column: %s' % column_name))
batch_op.alter_column(column_name, type_=sa.DateTime(timezone=True))
sql = '\n -- Add the time zone offset\n UPDATE\n "{table_name}"\n SET\n '.format(table_name=table_name)
for (i, column_name) in enumerate(tables_to_update[table_name]):
if (i > 0):
sql = '{sql},\n'.format(sql=sql)
sql = '{sql}\n "{column_name}" = (\n SELECT\n aliased_table.{column_name}::timestamp at time zone \'utc\'\n FROM "{table_name}" as aliased_table\n where aliased_table.id = "{table_name}".id\n )'.format(sql=sql, column_name=column_name, table_name=table_name)
op.execute(sql)
logger.info(('done upgrading table: %s' % table_name)) |
('ecs_deploy.cli.get_client')
def test_update_task_empty_environment_variable_again(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.update, (TASK_DEFINITION_ARN_1, '-e', 'webserver', 'empty', ''))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Update task definition based on: test-task:1' in result.output)
assert (u'Updating task definition' not in result.output)
assert (u'Changed environment' not in result.output)
assert (u'Successfully created revision: 2' in result.output) |
def test_boundaryspec_classmethods():
boundary_spec = BoundarySpec.pml(x=False, y=True, z=True)
boundaries = boundary_spec.to_list
assert (isinstance(boundaries[0][0], Periodic) and isinstance(boundaries[0][1], Periodic) and isinstance(boundaries[1][0], PML) and isinstance(boundaries[1][1], PML) and isinstance(boundaries[2][0], PML) and isinstance(boundaries[2][1], PML))
boundary_spec = BoundarySpec.pec(x=True, z=True)
boundaries = boundary_spec.to_list
assert (isinstance(boundaries[0][0], PECBoundary) and isinstance(boundaries[0][1], PECBoundary) and isinstance(boundaries[1][0], PML) and isinstance(boundaries[1][1], PML) and isinstance(boundaries[2][0], PECBoundary) and isinstance(boundaries[2][1], PECBoundary))
boundary_spec = BoundarySpec.pmc(y=True)
boundaries = boundary_spec.to_list
assert (isinstance(boundaries[0][0], PML) and isinstance(boundaries[0][1], PML) and isinstance(boundaries[1][0], PMCBoundary) and isinstance(boundaries[1][1], PMCBoundary) and isinstance(boundaries[2][0], PML) and isinstance(boundaries[2][1], PML))
boundary_spec = BoundarySpec.all_sides(boundary=PML())
boundaries = boundary_spec.to_list
assert all([isinstance(boundary, PML) for boundary_dim in boundaries for boundary in boundary_dim]) |
def patch_len(fit_generator):
(fit_generator)
def fit_generator_patch_len(*args, **kwargs):
generator = args[1]
steps_per_epoch = kwargs.get('steps_per_epoch', len(generator))
patch_train_sequence_len = patch.object(generator.__class__, '__len__', return_value=steps_per_epoch)
validation_data = kwargs.get('validation_data', [])
validation_steps = kwargs.get('validation_steps', len(validation_data))
patch_val_sequence_len = patch.object(validation_data.__class__, '__len__', return_value=validation_steps)
patch_train_sequence_len.start()
if validation_steps:
patch_val_sequence_len.start()
history = fit_generator(*args, **kwargs)
patch_train_sequence_len.stop()
if validation_steps:
patch_val_sequence_len.stop()
return history
return fit_generator_patch_len |
class Ops(enum.Enum):
ADD = 1
MUL = 2
SUB = 3
DIV = 4
SDIV = 5
MOD = 6
SMOD = 7
ADDMOD = 8
MULMOD = 9
EXP = 10
SIGNEXTEND = 11
LT = 16
GT = 17
SLT = 18
SGT = 19
EQ = 20
ISZERO = 21
AND = 22
OR = 23
XOR = 24
NOT = 25
BYTE = 26
SHL = 27
SHR = 28
SAR = 29
KECCAK = 32
ADDRESS = 48
BALANCE = 49
ORIGIN = 50
CALLER = 51
CALLVALUE = 52
CALLDATALOAD = 53
CALLDATASIZE = 54
CALLDATACOPY = 55
CODESIZE = 56
CODECOPY = 57
GASPRICE = 58
EXTCODESIZE = 59
EXTCODECOPY = 60
RETURNDATASIZE = 61
RETURNDATACOPY = 62
EXTCODEHASH = 63
BLOCKHASH = 64
COINBASE = 65
TIMESTAMP = 66
NUMBER = 67
PREVRANDAO = 68
GASLIMIT = 69
CHAINID = 70
SELFBALANCE = 71
BASEFEE = 72
STOP = 0
JUMP = 86
JUMPI = 87
PC = 88
GAS = 90
JUMPDEST = 91
SLOAD = 84
SSTORE = 85
POP = 80
PUSH1 = 96
PUSH2 = 97
PUSH3 = 98
PUSH4 = 99
PUSH5 = 100
PUSH6 = 101
PUSH7 = 102
PUSH8 = 103
PUSH9 = 104
PUSH10 = 105
PUSH11 = 106
PUSH12 = 107
PUSH13 = 108
PUSH14 = 109
PUSH15 = 110
PUSH16 = 111
PUSH17 = 112
PUSH18 = 113
PUSH19 = 114
PUSH20 = 115
PUSH21 = 116
PUSH22 = 117
PUSH23 = 118
PUSH24 = 119
PUSH25 = 120
PUSH26 = 121
PUSH27 = 122
PUSH28 = 123
PUSH29 = 124
PUSH30 = 125
PUSH31 = 126
PUSH32 = 127
DUP1 = 128
DUP2 = 129
DUP3 = 130
DUP4 = 131
DUP5 = 132
DUP6 = 133
DUP7 = 134
DUP8 = 135
DUP9 = 136
DUP10 = 137
DUP11 = 138
DUP12 = 139
DUP13 = 140
DUP14 = 141
DUP15 = 142
DUP16 = 143
SWAP1 = 144
SWAP2 = 145
SWAP3 = 146
SWAP4 = 147
SWAP5 = 148
SWAP6 = 149
SWAP7 = 150
SWAP8 = 151
SWAP9 = 152
SWAP10 = 153
SWAP11 = 154
SWAP12 = 155
SWAP13 = 156
SWAP14 = 157
SWAP15 = 158
SWAP16 = 159
MLOAD = 81
MSTORE = 82
MSTORE8 = 83
MSIZE = 89
LOG0 = 160
LOG1 = 161
LOG2 = 162
LOG3 = 163
LOG4 = 164
CREATE = 240
RETURN = 243
CALL = 241
CALLCODE = 242
DELEGATECALL = 244
STATICCALL = 250
REVERT = 253
SELFDESTRUCT = 255
CREATE2 = 245 |
class SwapFacePipelineOptions():
def __init__(self):
self.parser = ArgumentParser()
self.initialize()
def initialize(self):
self.parser.add_argument('--num_seg_cls', type=int, default=12, help='Segmentation mask class number')
self.parser.add_argument('--train_G', default=True, type=bool, help='Whether to train the model')
self.parser.add_argument('--device', default='cuda:0', type=str, help='Which GPU(s) to use')
self.parser.add_argument('--lap_bld', action='store_true', help='Whether to use Laplacian multi-band blending')
self.parser.add_argument('--out_size', type=int, default=1024, help='output image size')
self.parser.add_argument('--fsencoder_type', type=str, default='psp', help='FS Encode type')
self.parser.add_argument('--remaining_layer_idx', type=int, default=13, help='mask-guided style injection, i.e., K in paper')
self.parser.add_argument('--outer_dilation', type=int, default=15, help='dilation width')
self.parser.add_argument('--erode_radius', type=int, default=3, help='erode width')
self.parser.add_argument('--learn_in_w', action='store_true', help='Whether to learn in w space instead of w+')
self.parser.add_argument('--start_from_latent_avg', action='store_true', default=True, help='Whether to add average latent vector to generate codes from encoder.')
self.parser.add_argument('--output_size', default=1024, type=int, help='Output size of generator')
self.parser.add_argument('--n_styles', default=18, type=int, help='StyleGAN')
self.parser.add_argument('--checkpoint_path', default='./pretrained_ckpts/e4s/iteration_300000.pt', type=str, help='Path to E4S pre-trained model checkpoint')
self.parser.add_argument('--faceParser_name', default='default', type=str, help='face parser name, [ default | segnext] is currently supported.')
self.parser.add_argument('--source', type=str, default='example/input/faceswap/source.jpg', help='Path to the source image')
self.parser.add_argument('--target', type=str, default='example/input/faceswap/target.jpg', help='Path to the target image')
self.parser.add_argument('--target_mask', type=str, default='', help='Path to the target mask')
self.parser.add_argument('--verbose', default=False, type=bool, help='Whether to show the intermediate results')
self.parser.add_argument('--output_dir', type=str, default='example/output/faceswap', help='Path to the target mask')
def parse(self):
opts = self.parser.parse_args()
return opts |
def test_raises_field_errors_unexpected_only(invalid_event_id_field_error, unknown_event_id_field_error):
errors = [invalid_event_id_field_error, unknown_event_id_field_error]
with pytest.raises(pytest.raises.Exception):
with raises_field_errors({'event_id': ['UNKNOWN']}, only=True):
raise Client.CallActionError(actions=[ActionResponse(action='', errors=errors)]) |
.skip('require proper tests case')
def test_data_quality_test_target_features_correlation() -> None:
test_dataset = pd.DataFrame({'feature1': [0, 1, 2, 3], 'target': [0, 0, 0, 1]})
column_mapping = ColumnMapping(task='regression')
suite = TestSuite(tests=[TestTargetFeaturesCorrelations()])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=column_mapping)
assert suite
suite = TestSuite(tests=[TestTargetFeaturesCorrelations(gt=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=column_mapping)
assert (not suite)
suite = TestSuite(tests=[TestTargetFeaturesCorrelations(lt=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=column_mapping)
assert suite
assert suite.show()
assert suite.json() |
class Solution(object):
def combinationSum2(self, candidates, target):
def fill_results(candidates, index, target, curr, results):
if ((index >= len(candidates)) and (target > 0)):
return
elif (target == 0):
results.add(tuple(curr))
return
elif (target < 0):
return
elif (target < candidates[index]):
return
fill_results(candidates, (index + 1), target, curr, results)
curr.append(candidates[index])
fill_results(candidates, (index + 1), (target - candidates[index]), curr, results)
curr.pop()
candidates.sort()
curr = []
results = set()
fill_results(candidates, 0, target, curr, results)
return list(results) |
_module()
class SampleDataset(Dataset):
def __init__(self, dataset: Union[(dict, Dataset)], num_samples: int=8, collate_fn=None) -> None:
super().__init__()
self.num_samples = num_samples
self.collate_fn = collate_fn
if isinstance(dataset, dict):
self.dataset = DATASETS.build(dataset)
else:
self.dataset = dataset
def __len__(self):
return self.num_samples
def __getitem__(self, idx):
idx = random.randint(0, (len(self.dataset) - 1))
return self.dataset[idx] |
class OptionSeriesColumnpyramidDatasorting(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def matchByName(self):
return self._config_get(None)
def matchByName(self, flag: bool):
self._config(flag, js_type=False)
def sortKey(self):
return self._config_get('y')
def sortKey(self, text: str):
self._config(text, js_type=False) |
_bad_request
def practice_price_per_unit(request, code):
date = _specified_or_last_date(request, 'prescribing')
practice = get_object_or_404(Practice, code=code)
context = {'entity': practice, 'entity_name': practice.cased_name, 'entity_name_and_status': practice.name_and_status, 'highlight': practice.code, 'highlight_name': practice.cased_name, 'date': date, 'by_practice': True}
return render(request, 'price_per_unit.html', context) |
def create_multiple_website_clicks_ads(account, name, country, titles, bodies, urls, image_paths, bid_strategy, daily_budget=None, lifetime_budget=None, start_time=None, end_time=None, age_min=None, age_max=None, genders=None, campaign=None, paused=False):
if (daily_budget is None):
if (lifetime_budget is None):
raise TypeError('One of daily_budget or lifetime_budget must be defined.')
elif (end_time is None):
raise TypeError('If lifetime_budget is defined, end_time must be defined.')
if (not campaign):
campaign = AdCampaign(parent_id=account.get_id_assured())
campaign[AdCampaign.Field.name] = (name + ' Campaign')
campaign[AdCampaign.Field.objective] = AdCampaign.Objective.website_clicks
campaign[AdCampaign.Field.status] = (AdCampaign.Status.active if (not paused) else AdCampaign.Status.paused)
campaign.remote_create()
ad_set = AdSet(parent_id=account.get_id_assured())
ad_set[AdSet.Field.campaign_group_id] = campaign.get_id_assured()
ad_set[AdSet.Field.name] = (name + ' AdSet')
ad_set[AdSet.Field.bid_type] = bid_type
if daily_budget:
ad_set[AdSet.Field.daily_budget] = daily_budget
else:
ad_set[AdSet.Field.lifetime_budget] = lifetime_budget
if end_time:
ad_set[AdSet.Field.end_time] = end_time
if start_time:
ad_set[AdSet.Field.start_time] = start_time
targeting = {}
targeting[TargetingSpecsField.geo_locations] = {'countries': [country]}
if age_max:
targeting[TargetingSpecsField.age_max] = age_max
if age_min:
targeting[TargetingSpecsField.age_min] = age_min
if genders:
targeting[TargetingSpecsField.genders] = genders
ad_set[AdSet.Field.targeting] = targeting
ad_set.remote_create()
image_hashes = []
for image_path in image_paths:
img = AdImage(parent_id=account.get_id_assured())
img[AdImage.Field.filename] = image_path
img.remote_create()
image_hashes.append(img.get_hash())
ADGROUP_BATCH_CREATE_LIMIT = 10
ad_groups_created = []
def callback_failure(response):
raise response.error()
for creative_info_batch in generate_batches(itertools.product(titles, bodies, urls, image_hashes), ADGROUP_BATCH_CREATE_LIMIT):
api_batch = account.get_api_assured().new_batch()
for (title, body, url, image_hash) in creative_info_batch:
ad = Ad(parent_id=account.get_id_assured())
ad[Ad.Field.name] = (name + ' Ad')
ad[Ad.Field.campaign_id] = ad_set.get_id_assured()
ad[Ad.Field.creative] = {AdCreative.Field.title: title, AdCreative.Field.body: body, AdCreative.Field.object_url: url, AdCreative.Field.image_hash: image_hash}
ad.remote_create(batch=api_batch, failure=callback_failure)
ad_groups_created.append(ad)
api_batch.execute()
return ad_groups_created |
def fetch_dag_data(dag_dump_dir: str, epoch_seed: bytes) -> Tuple[(bytes, ...)]:
dag_file_path = f'{dag_dump_dir}/full-R23-{epoch_seed.hex()[:16]}'
with open(dag_file_path, 'rb') as fp:
dag_dataset = fp.read()
dag_dataset = dag_dataset[8:]
dag_dataset_items = []
for i in range(0, len(dag_dataset), HASH_BYTES):
dag_dataset_items.append(dag_dataset[i:(i + HASH_BYTES)])
return tuple(dag_dataset_items) |
class Language(Options):
def decimal(self):
return self._config_get()
def decimal(self, val):
self._config(val)
def url(self):
return self._config_get()
def url(self, val):
self._config(val)
def thousands(self):
return self._config_get()
def thousands(self, val):
self._config(val) |
class CmdUnconnectedCreate(COMMAND_DEFAULT_CLASS):
key = 'create'
aliases = ['cre', 'cr']
locks = 'cmd:all()'
arg_regex = '\\s.*?|$'
def at_pre_cmd(self):
if (not settings.NEW_ACCOUNT_REGISTRATION_ENABLED):
self.msg('Registration is currently disabled.')
return True
return super().at_pre_cmd()
def func(self):
session = self.caller
args = self.args.strip()
address = session.address
Account = class_from_module(settings.BASE_ACCOUNT_TYPECLASS)
parts = [part.strip() for part in re.split('\\"', args) if part.strip()]
if (len(parts) == 1):
parts = parts[0].split(None, 1)
if (len(parts) != 2):
string = '\n Usage (without <>): create <name> <password>\nIf <name> or <password> contains spaces, enclose it in double quotes.'
session.msg(string)
return
(username, password) = parts
non_normalized_username = username
username = Account.normalize_username(username)
if (non_normalized_username != username):
session.msg('Note: your username was normalized to strip spaces and remove characters that could be visually confusing.')
answer = (yield f'''You want to create an account '{username}' with password '{password}'.
Is this what you intended? [Y]/N?''')
if (answer.lower() in ('n', 'no')):
session.msg('Aborted. If your user name contains spaces, surround it by quotes.')
return
(account, errors) = Account.create(username=username, password=password, ip=address, session=session)
if account:
string = "A new account '%s' was created. Welcome!"
if (' ' in username):
string += '\n\nYou can now log in with the command \'connect "%s" <your password>\'.'
else:
string += "\n\nYou can now log with the command 'connect %s <your password>'."
session.msg((string % (username, username)))
else:
session.msg(('|R%s|n' % '\n'.join(errors))) |
class GCRARCArchive():
def __init__(self, file_path: Path, file_bytes):
self.file_path = file_path
self.compression = 'none'
file_bytes = self.try_decompress_archive(file_bytes)
self.magic = struct.unpack_from('>I', file_bytes, 0)[0]
self.file_size = struct.unpack_from('>I', file_bytes, 4)[0]
self.data_header_offset = struct.unpack_from('>I', file_bytes, 8)[0]
self.file_data_offset = (struct.unpack_from('>I', file_bytes, 12)[0] + 32)
self.total_file_data_size = struct.unpack_from('>I', file_bytes, 16)[0]
self.mram_preload_size = struct.unpack_from('>I', file_bytes, 20)[0]
self.aram_preload_size = struct.unpack_from('>I', file_bytes, 24)[0]
self.data_header = GCRARCDataHeader(self.data_header_offset, file_bytes)
self.nodes: List[GCRARCNode] = []
def try_decompress_archive(self, file_bytes):
compression_scheme = struct.unpack_from('>I', file_bytes, 0)[0]
if (compression_scheme == ):
self.compression = 'yaz0'
return file_bytes
elif (compression_scheme == ):
self.compression = 'yay0'
return crunch64.yay0.decompress(file_bytes)
else:
return file_bytes
def build_hierarchy(self, file_bytes):
string_table_offset = self.data_header.string_table_offset
string_table_size = self.data_header.string_table_size
string_table_bytes = file_bytes[string_table_offset:(string_table_offset + string_table_size)]
for i in range(self.data_header.node_count):
offset = (self.data_header.node_offset + (i * 16))
new_node = GCRARCNode(offset, file_bytes, string_table_bytes)
new_node.get_entries(self.data_header.file_entry_offset, file_bytes, string_table_bytes)
self.nodes.append(new_node)
for n in self.nodes:
for e in n.entries:
if ((e.flags & int(GCRARCFlags.IS_FILE)) != 0):
continue
if ((e.name == '.') or (e.name == '..')):
continue
dir_node = self.nodes[e.data_offset]
dir_node.parent = n
n.children.append(dir_node)
def emit(self, file_bytes):
assert (options.opts.filesystem_path is not None)
rel_path = self.file_path.relative_to((options.opts.filesystem_path / 'files'))
arc_root_path = (options.opts.asset_path / rel_path.with_suffix(''))
self.nodes[0].emit_to_filesystem_recursive(arc_root_path, self.file_data_offset, file_bytes)
self.emit_config(arc_root_path)
def emit_config(self, config_path: Path):
lines = []
lines.append(f'''name: "{self.file_path.name}"
''')
if (self.compression != 'none'):
lines.append(f'''compression: {self.compression}
''')
lines.append(f'''next_file_id: 0x{self.data_header.next_free_file_id:04X}
''')
lines.append(f'''sync_file_ids_to_indices: {self.data_header.sync_file_ids_to_indices}
''')
root_node = self.nodes[0]
lines.append('root_dir:\n')
lines.append(f''' res_type: "{root_node.resource_type}"
''')
lines.append(f''' name: "{root_node.name}"
''')
if (len(root_node.entries) != 0):
lines.append(' entries:\n')
for e in root_node.entries:
entry_config = e.emit_config(2)
if (entry_config != None):
lines.extend(entry_config)
if (len(root_node.children) != 0):
lines.append(' subdirs:\n')
for n in root_node.children:
node_config = n.emit_config(2)
if (node_config != None):
lines.extend(node_config)
with open((config_path / 'arcinfo.yaml'), 'w', newline='\n') as f:
f.writelines(lines) |
class WafFirewallResponseDataAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'id': (str,), 'attributes': (WafFirewallResponseDataAttributes,), 'relationships': (RelationshipWafFirewallVersions,)}
_property
def discriminator():
return None
attribute_map = {'id': 'id', 'attributes': 'attributes', 'relationships': 'relationships'}
read_only_vars = {'id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def _test_success_with_all_filters_recipient_location_state(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'state', 'filters': non_legacy_filters()}))
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response' |
class TestNTESMO(unittest.TestCase):
def setUp(self):
self.session = Session()
self.adapter = Adapter()
self.session.mount(' self.adapter)
data = open('parsers/test/mocks/AU/NTESMO.xlsx', 'rb')
self.adapter.register_uri(ANY, ANY, content=data.read())
index_page = '<div class="smp-tiles-article__item">\n <a href=" <div class="smp-tiles-article__title">01 December 2022</div>\n\n <div class="smp-tiles-article__lower d-flex flex-nowrap justify-content-between align-content-center align-items-center">\n <div class="col-9 no-padding">\n <strong>Download</strong>\n <span>MS Excel Document (115.5 KB)</span>\n </div>\n <div class="col-3 no-padding d-flex justify-content-end">\n <svg xmlns=" width="33" height="34" viewBox="0 0 33 34">\n <path fill="currentColor" d="M-1223.7-1933.8h.2l.6.6.6-.6h.2v-.2l8.6-8.5-1.2-1.2-7.4 7.5v-22.6h-1.6v22.6l-7.4-7.5-1.2 1.2 8.6 8.5z" transform="translate(1239 1959)"></path>\n <path fill="currentColor" class="st0" d="M-1207.8-1938.1v11.3h-29.4v-11.3h-1.6v12.9h32.6v-12.9z" transform="translate(1239 1959)"></path>\n </svg>\n </div>\n </div>\n </a>\n </div>'
self.adapter.register_uri(ANY, NTESMO.INDEX_URL.format(2022), text=index_page)
def test_fetch_production(self):
data_list = NTESMO.fetch_production_mix('AU-NT', self.session, target_datetime=datetime(year=2022, month=12, day=1))[:2]
self.assertIsNotNone(data_list)
expected_data = [{'production': {'gas': 96, 'biomass': 13, 'unknown': 0}, 'storage': {}}, {'production': {'gas': 96, 'biomass': 13, 'unknown': 0}, 'storage': {}}]
self.assertEqual(len(data_list), len(expected_data))
for (index, actual) in enumerate(data_list):
self.assertEqual(actual['zoneKey'], 'AU-NT')
self.assertEqual(actual['source'], 'ntesmo.com.au')
for (production_type, production) in actual['production'].items():
self.assertEqual(production, expected_data[index]['production'][production_type])
def test_fetch_price(self):
data_list = NTESMO.fetch_price('AU-NT', self.session, target_datetime=datetime(year=2022, month=12, day=1))
self.assertIsNotNone(data_list)
expected_data = ([{'price': 500, 'currency': 'AUD'}] * 48)
self.assertEqual(len(data_list), len(expected_data))
for (index, actual) in enumerate(data_list):
self.assertEqual(actual['zoneKey'], 'AU-NT')
self.assertEqual(actual['source'], 'ntesmo.com.au')
self.assertEqual(actual['price'], expected_data[index]['price'])
self.assertEqual(actual['currency'], expected_data[index]['currency'])
self.assertEqual(data_list[0]['datetime'], datetime(year=2022, month=12, day=1, hour=4, minute=30).replace(tzinfo=australia))
self.assertEqual(data_list[(- 1)]['datetime'], datetime(year=2022, month=12, day=2, hour=4, minute=0).replace(tzinfo=australia))
def test_fetch_consumption(self):
data_list = NTESMO.fetch_consumption('AU-NT', self.session, target_datetime=datetime(year=2022, month=12, day=1))
self.assertIsNotNone(data_list)
expected_data = ([{'consumption': 30}] * 48)
self.assertEqual(len(data_list), len(expected_data))
for (index, actual) in enumerate(data_list):
self.assertEqual(actual['zoneKey'], 'AU-NT')
self.assertEqual(actual['source'], 'ntesmo.com.au')
self.assertEqual(actual['consumption'], expected_data[index]['consumption'])
self.assertEqual(data_list[0]['datetime'], datetime(year=2022, month=12, day=1, hour=4, minute=30).replace(tzinfo=australia))
self.assertEqual(data_list[(- 1)]['datetime'], datetime(year=2022, month=12, day=2, hour=4, minute=0).replace(tzinfo=australia)) |
class BreakScreen(Subscriber):
def __init__(self, monitor: Monitor, session: Session, config: Config):
logger.debug('action=init_screen monitor=%s', monitor)
self.monitor = monitor
self.session = session
self.options = self.create_options(config)
self.countdown = Gtk.Label(label='00:00', name='countdown')
self.skip_button = self.create_button()
content = self.create_content_area(self.countdown, self.skip_button)
self.widget = self.create_window(self.monitor, content)
def create_options(self, config) -> Dict[(str, bool)]:
return {SKIP_BREAK_OPTION: config.get_bool(SECTION_NAME, SKIP_BREAK_OPTION, fallback=False), AUTO_START_OPTION: config.get_bool(SECTION_NAME, AUTO_START_OPTION, fallback=False)}
def create_button(self) -> Gtk.Button:
logger.debug('action=create_skip_button visibile=%s', self.options[SKIP_BREAK_OPTION])
button = Gtk.Button(label=_('Skip'), name='skip', visible=self.options[SKIP_BREAK_OPTION], no_show_all=True)
button.connect('clicked', self.skip_break)
button.grab_focus()
return button
def skip_break(self, _) -> None:
logger.debug('action=skip_break')
self.session.stop()
self.session.change(SessionType.POMODORO)
def create_content_area(self, countdown: Gtk.Label, skip_button: Gtk.Button) -> Gtk.Box:
content = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
content.pack_start(countdown, False, False, 0)
content.pack_start(skip_button, False, False, 0)
space = Gtk.Box(halign=Gtk.Align.CENTER, valign=Gtk.Align.CENTER)
space.pack_start(content, True, True, 0)
return space
def create_window(self, monitor: Monitor, box: Gtk.Box) -> Gtk.Window:
window = Gtk.Window(can_focus=False, decorated=False, deletable=False, focus_on_map=False, gravity=Gdk.Gravity.CENTER, name='breakscreen', skip_taskbar_hint=True, urgency_hint=True)
window.set_visual(window.get_screen().get_rgba_visual())
window.stick()
window.set_keep_above(True)
window.fullscreen()
window.move(monitor.x, monitor.y)
window.resize(monitor.width, monitor.height)
window.add(box)
return window
(Events.SESSION_START)
def on_session_start(self, payload=SessionPayload) -> None:
logger.debug('action=session_start monitor=%d session=%s', self.monitor.number, payload.type)
if (payload.type != SessionType.POMODORO):
self.countdown.set_text(payload.countdown)
self.widget.show_all()
(Events.SESSION_INTERRUPT)
def on_session_interrupt(self, **__) -> None:
logger.debug('action=session_start monitor=%d', self.monitor.number)
self.widget.hide()
(Events.SESSION_END)
def on_session_end(self, payload: SessionPayload) -> None:
logger.debug('action=session_end monitor=%d auto_start=%s session_type=%s', self.monitor.number, self.auto_start, payload.type)
if ((payload.type == SessionType.POMODORO) and self.auto_start):
GLib.timeout_add_seconds(Timer.ONE_SECOND, self._start_session)
else:
self.widget.hide()
def _start_session(self) -> bool:
self.session.start()
return False
def auto_start(self) -> bool:
return self.options[AUTO_START_OPTION]
(Events.TIMER_UPDATE)
def on_timer_update(self, payload: TimerPayload) -> None:
logger.debug('action=update_countdown monitor=%s countdown=%s', payload.countdown, self.monitor.number)
self.countdown.set_text(payload.countdown)
(Events.CONFIG_CHANGE)
def on_settings_change(self, payload: ConfigPayload) -> None:
if (payload.section != SECTION_NAME):
return
logger.debug('action=change_option monitor=%d config=%s option=%s', self.monitor.number, payload.action, payload.option)
self.options[payload.option] = (payload.action == 'set')
self.skip_button.props.visible = self.options[SKIP_BREAK_OPTION] |
('/models')
def update_all_models(models: Optional[List[Model]]=None, connector: Connector=Depends(get_connection)):
try:
if (models is None):
models = []
connector.save_models(models)
models += connector.load_models()
except exc.CannotSaveModel:
raise HTTPException(status_code=500, detail='Cannot save model')
except exc.CannotConnectToDatabase as ex:
raise HTTPException(status_code=500, detail=ex.message)
except exc.ValidationError as ex:
raise HTTPException(status_code=400, detail=ex.message)
logger.info(f'Inserted models {models}')
return models |
class RadiusPenalty(Penalty):
min_radius: float = 0.15
alpha: float = 1.0
kappa: float = 10.0
wrap: bool = False
def evaluate(self, points: ArrayFloat2D) -> float:
def quad_fit(p0, pc, p2):
p1 = (((2 * pc) - (p0 / 2)) - (p2 / 2))
def p(t):
term0 = (((1 - t) ** 2) * (p0 - p1))
term1 = p1
term2 = ((t ** 2) * (p2 - p1))
return ((term0 + term1) + term2)
def d_p(t):
d_term0 = ((2 * (1 - t)) * (p1 - p0))
d_term2 = ((2 * t) * (p2 - p1))
return (d_term0 + d_term2)
def d2_p(t):
d2_term0 = (2 * p0)
d2_term1 = ((- 4) * p1)
d2_term2 = (2 * p2)
return ((d2_term0 + d2_term1) + d2_term2)
return (p, d_p, d2_p)
def get_fit_vals(xs, ys):
ps = jnp.stack((xs, ys), axis=1)
p0 = ps[:(- 2)]
pc = ps[1:(- 1)]
p2 = ps[2:]
(p, d_p, d_2p) = quad_fit(p0, pc, p2)
ps = p(0.5)
dps = d_p(0.5)
d2ps = d_2p(0.5)
return (ps.T, dps.T, d2ps.T)
def get_radii_curvature(xs, ys):
(_, dps, d2ps) = get_fit_vals(xs, ys)
(xp, yp) = dps
(xp2, yp2) = d2ps
num = (((xp ** 2) + (yp ** 2)) ** (3.0 / 2.0))
den = abs(((xp * yp2) - (yp * xp2)))
return (num / den)
def penalty_fn(radius):
arg = (self.kappa * (radius - self.min_radius))
exp_arg = jnp.exp((- arg))
return (self.alpha * (exp_arg / (1 + exp_arg)))
(xs, ys) = jnp.array(points).T
rs = get_radii_curvature(xs, ys)
return (jnp.sum(penalty_fn(rs)) / len(rs)) |
class Statement(BaseElement):
def __init__(self, Action, Effect, Resource):
self.Action = Action
self.Effect = Effect
self.Resource = Resource
def json_repr(self):
return {'Action': self.Action, 'Effect': self.Effect, 'Resource': self.Resource}
def merge(self, other):
if (self.Effect != other.Effect):
raise ValueError(f'Trying to combine two statements with differing effects: {self.Effect} {other.Effect}')
effect = self.Effect
actions = list(sorted(set((self.Action + other.Action)), key=(lambda action: action.json_repr())))
resources = list(sorted(set((self.Resource + other.Resource))))
return Statement(Effect=effect, Action=actions, Resource=resources)
def __action_list_strings(self):
return '-'.join([a.json_repr() for a in self.Action])
def __lt__(self, other):
if (self.Effect != other.Effect):
return (self.Effect < other.Effect)
if (self.Action != other.Action):
return (self.__action_list_strings() < other.__action_list_strings())
return (''.join(self.Resource) < ''.join(other.Resource)) |
class Command(object):
comp = None
runner = None
executor = None
exit_code = 0
errmsg = None
display = False
jobs = 0
mypy_args = None
argv_args = None
stack_size = 0
use_cache = USE_CACHE
fail_fast = False
prompt = Prompt()
def start(self, run=False):
if run:
(args, argv) = ([], [])
source = None
for i in range(1, len(sys.argv)):
arg = sys.argv[i]
if ((not arg.startswith('-')) and can_parse(arguments, args)):
source = arg
argv = sys.argv[(i + 1):]
break
else:
args.append(arg)
args = proc_run_args(args)
if ('--run' in args):
logger.warn('extraneous --run argument passed; coconut-run implies --run')
else:
args.append('--run')
dest = None
if (source is not None):
source = fixpath(source)
args.append(source)
if default_use_cache_dir:
if os.path.isfile(source):
dest = os.path.join(os.path.dirname(source), coconut_cache_dir)
else:
dest = os.path.join(source, coconut_cache_dir)
self.cmd_sys(args, argv=argv, use_dest=dest)
else:
self.cmd()
def cmd_sys(self, *args, **in_kwargs):
out_kwargs = coconut_sys_kwargs.copy()
out_kwargs.update(in_kwargs)
return self.cmd(*args, **out_kwargs)
def cmd(self, args=None, argv=None, interact=True, default_target=None, default_jobs=None, use_dest=None):
result = None
with self.handling_exceptions(exit_on_error=True):
if (args is None):
parsed_args = arguments.parse_args()
else:
parsed_args = arguments.parse_args(args)
if (argv is not None):
if (parsed_args.argv is not None):
raise CoconutException('cannot pass --argv/--args when using coconut-run (coconut-run interprets any arguments after the source file as --argv/--args)')
parsed_args.argv = argv
if (parsed_args.target is None):
parsed_args.target = default_target
if (parsed_args.jobs is None):
parsed_args.jobs = default_jobs
if ((use_dest is not None) and (not parsed_args.no_write)):
internal_assert((parsed_args.dest is None), 'coconut-run got passed a dest', parsed_args)
parsed_args.dest = use_dest
self.exit_code = 0
self.stack_size = parsed_args.stack_size
result = self.run_with_stack_size(self.execute_args, parsed_args, interact, original_args=args)
return result
def run_with_stack_size(self, func, *args, **kwargs):
if self.stack_size:
return run_with_stack_size(self.stack_size, func, *args, **kwargs)
else:
return func(*args, **kwargs)
def setup(self, *args, **kwargs):
if (self.comp is None):
self.comp = Compiler(*args, **kwargs)
else:
self.comp.setup(*args, **kwargs)
def parse_block(self, code):
return self.comp.parse_block(code, keep_state=True)
def exit_on_error(self):
if self.exit_code:
if (self.errmsg is not None):
logger.show(('Coconut exiting with error: ' + self.errmsg), color=error_color_code)
self.errmsg = None
if self.using_jobs:
kill_children()
sys.exit(self.exit_code)
def execute_args(self, args, interact=True, original_args=None):
with self.handling_exceptions():
if (not DEVELOP):
args.trace = args.profile = False
logger.setup(quiet=args.quiet, verbose=args.verbose, tracing=args.trace)
if (args.trace or args.profile):
unset_fast_pyparsing_reprs()
if args.profile:
start_profiling()
logger.enable_colors()
logger.log(cli_version)
if (original_args is not None):
logger.log('Directly passed args:', original_args)
logger.log('Parsed args:', args)
if (args.stack_size and ((args.stack_size % 4) != 0)):
logger.warn('--stack-size should generally be a multiple of 4, not {stack_size} (to support 4 KB pages)'.format(stack_size=args.stack_size))
if ((args.mypy is not None) and args.no_line_numbers):
logger.warn("using --mypy running with --no-line-numbers is not recommended; mypy error messages won't include Coconut line numbers")
if (args.line_numbers and args.no_line_numbers):
raise CoconutException('cannot compile with both --line-numbers and --no-line-numbers')
if (args.site_install and args.site_uninstall):
raise CoconutException('cannot --site-install and --site-uninstall simultaneously')
for and_args in (getattr(args, 'and') or []):
if (len(and_args) > 2):
raise CoconutException('--and accepts at most two arguments, source and dest ({n} given: {args!r})'.format(n=len(and_args), args=and_args))
self.set_jobs(args.jobs, args.profile)
if (args.recursion_limit is not None):
set_recursion_limit(args.recursion_limit)
self.fail_fast = args.fail_fast
self.display = args.display
self.prompt.vi_mode = args.vi_mode
if (args.style is not None):
self.prompt.set_style(args.style)
if (args.argv is not None):
self.argv_args = list(args.argv)
if args.no_cache:
self.use_cache = False
if args.docs:
launch_documentation()
if args.tutorial:
launch_tutorial()
if args.site_uninstall:
self.site_uninstall()
if args.site_install:
self.site_install()
if args.line_numbers:
line_numbers = True
elif args.no_line_numbers:
line_numbers = False
else:
line_numbers = ((not args.minify) or (args.mypy is not None))
self.setup(target=args.target, strict=args.strict, minify=args.minify, line_numbers=line_numbers, keep_lines=args.keep_lines, no_tco=args.no_tco, no_wrap=args.no_wrap_types)
self.comp.warm_up(streamline=((not self.using_jobs) and (args.watch or args.profile)), enable_incremental_mode=((not self.using_jobs) and args.watch), set_debug_names=(args.verbose or args.trace or args.profile))
if (args.mypy is not None):
self.set_mypy_args(args.mypy)
logger.log_compiler_stats(self.comp)
filepaths = []
if (args.source is not None):
if (args.interact and args.run):
logger.warn('extraneous --run argument passed; --interact implies --run')
if (args.package and self.mypy):
logger.warn('extraneous --package argument passed; --mypy implies --package')
if (args.standalone and args.package):
raise CoconutException('cannot compile as both --package and --standalone')
if (args.standalone and self.mypy):
raise CoconutException('cannot compile as both --package (implied by --mypy) and --standalone')
if (args.no_write and self.mypy):
raise CoconutException('cannot compile with --no-write when using --mypy')
src_dest_package_triples = []
for and_args in ([(args.source, args.dest)] + (getattr(args, 'and') or [])):
if (len(and_args) == 1):
(src,) = and_args
dest = None
else:
(src, dest) = and_args
src_dest_package_triples.append(self.process_source_dest(src, dest, args))
if ((len(src_dest_package_triples) <= 1) and (not any((os.path.isdir(source) for (source, dest, package) in src_dest_package_triples)))):
self.disable_jobs()
with self.running_jobs(exit_on_error=(not (args.watch or args.profile))):
for (source, dest, package) in src_dest_package_triples:
filepaths += self.compile_path(source, dest, package, run=(args.run or args.interact), force=args.force)
self.run_mypy(filepaths)
elif (args.run or args.no_write or args.force or args.package or args.standalone or args.watch or args.jobs):
raise CoconutException('a source file/folder must be specified when options that depend on the source are enabled')
elif getattr(args, 'and'):
raise CoconutException('--and should only be used for extra source/dest pairs, not the first source/dest pair')
if (args.code is not None):
self.execute(self.parse_block(args.code))
got_stdin = False
if (args.jupyter is not None):
self.start_jupyter(args.jupyter)
elif stdin_readable():
logger.log('Reading piped input from stdin...')
read_stdin = sys.stdin.read()
if read_stdin:
self.execute(self.parse_block(read_stdin))
got_stdin = True
if (args.interact or (interact and (not (got_stdin or args.source or args.code or args.tutorial or args.docs or args.watch or args.site_uninstall or args.site_install or (args.jupyter is not None) or (args.mypy == [mypy_install_arg]))))):
self.start_prompt()
if args.watch:
self.watch(src_dest_package_triples, args.run, args.force)
if args.profile:
print_profiling_results()
return filepaths
def process_source_dest(self, source, dest, args):
processed_source = fixpath(source)
if ((args.run or args.interact) and os.path.isdir(processed_source)):
if args.run:
raise CoconutException(('source path %r must point to file not directory when --run is enabled' % (source,)))
if args.interact:
raise CoconutException(('source path %r must point to file not directory when --run (implied by --interact) is enabled' % (source,)))
if (args.watch and os.path.isfile(processed_source)):
raise CoconutException(('source path %r must point to directory not file when --watch is enabled' % (source,)))
if (dest is None):
if args.no_write:
processed_dest = False
else:
processed_dest = True
elif args.no_write:
raise CoconutException('destination path cannot be given when --no-write is enabled')
else:
processed_dest = dest
if (args.package or self.mypy):
package = True
elif args.standalone:
package = False
elif os.path.isfile(processed_source):
package = False
elif os.path.isdir(processed_source):
package = True
else:
raise CoconutException('could not find source path', source)
return (processed_source, processed_dest, package)
def register_exit_code(self, code=1, errmsg=None, err=None):
if (err is not None):
internal_assert((errmsg is None), 'register_exit_code accepts only one of errmsg or err')
if logger.verbose:
errmsg = format_error(err)
else:
errmsg = err.__class__.__name__
if (errmsg is not None):
if (self.errmsg is None):
self.errmsg = errmsg
elif (errmsg not in self.errmsg):
if logger.verbose:
self.errmsg += ('\nAnd error: ' + errmsg)
else:
self.errmsg += ('; ' + errmsg)
if (code is not None):
self.exit_code = (code or self.exit_code)
def handling_exceptions(self, exit_on_error=None, on_keyboard_interrupt=None):
if (exit_on_error is None):
exit_on_error = self.fail_fast
try:
if self.using_jobs:
with handling_broken_process_pool():
(yield)
else:
(yield)
except SystemExit as err:
self.register_exit_code(err.code)
except GeneratorExit:
raise
except BaseException as err:
if isinstance(err, CoconutException):
logger.print_exc()
elif isinstance(err, KeyboardInterrupt):
if (on_keyboard_interrupt is not None):
on_keyboard_interrupt()
else:
logger.print_exc()
logger.printerr(report_this_text)
self.register_exit_code(err=err)
if exit_on_error:
self.exit_on_error()
def compile_path(self, path, write=True, package=True, handling_exceptions_kwargs={}, **kwargs):
if (not isinstance(write, bool)):
write = fixpath(write)
if os.path.isfile(path):
destpath = self.compile_file(path, write, package, **kwargs)
return ([destpath] if (destpath is not None) else [])
elif os.path.isdir(path):
return self.compile_folder(path, write, package, handling_exceptions_kwargs=handling_exceptions_kwargs, **kwargs)
else:
raise CoconutException('could not find source path', path)
def compile_folder(self, directory, write=True, package=True, handling_exceptions_kwargs={}, **kwargs):
if ((not isinstance(write, bool)) and os.path.isfile(write)):
raise CoconutException('destination path cannot point to a file when compiling a directory')
filepaths = []
for (dirpath, dirnames, filenames) in os.walk(directory):
if isinstance(write, bool):
writedir = write
else:
writedir = os.path.join(write, os.path.relpath(dirpath, directory))
for filename in filenames:
if (os.path.splitext(filename)[1] in code_exts):
with self.handling_exceptions(**handling_exceptions_kwargs):
destpath = self.compile_file(os.path.join(dirpath, filename), writedir, package, **kwargs)
if (destpath is not None):
filepaths.append(destpath)
for name in dirnames[:]:
if ((not is_special_dir(name)) and name.startswith('.')):
if logger.verbose:
logger.show_tabulated('Skipped directory', name, '(explicitly pass as source to override).')
dirnames.remove(name)
return filepaths
def compile_file(self, filepath, write=True, package=False, force=False, **kwargs):
set_ext = False
if (write is False):
destpath = None
elif (write is True):
destpath = filepath
set_ext = True
elif os.path.splitext(write)[1]:
destpath = write
else:
destpath = os.path.join(write, os.path.basename(filepath))
set_ext = True
if set_ext:
(base, ext) = os.path.splitext(os.path.splitext(destpath)[0])
if (not ext):
ext = comp_ext
destpath = fixpath((base + ext))
if (filepath == destpath):
raise CoconutException((('cannot compile ' + showpath(filepath)) + ' to itself'), extra='incorrect file extension')
if (destpath is not None):
dest_ext = os.path.splitext(destpath)[1]
if (dest_ext in code_exts):
if force:
logger.warn((('found destination path with ' + dest_ext) + ' extension; compiling anyway due to --force'))
else:
raise CoconutException((('found destination path with ' + dest_ext) + ' extension; aborting compilation'), extra='pass --force to override')
self.compile(filepath, destpath, package, force=force, **kwargs)
return destpath
def compile(self, codepath, destpath=None, package=False, run=False, force=False, show_unchanged=True):
with univ_open(codepath, 'r') as opened:
code = readfile(opened)
package_level = (- 1)
if (destpath is not None):
destpath = fixpath(destpath)
destdir = os.path.dirname(destpath)
ensure_dir(destdir, logger=logger)
if (package is True):
package_level = self.get_package_level(codepath)
if (package_level == 0):
self.create_package(destdir)
foundhash = (None if force else self.has_hash_of(destpath, code, package_level))
if foundhash:
if show_unchanged:
logger.show_tabulated('Left unchanged', showpath(destpath), '(pass --force to overwrite).')
if self.display:
logger.print(foundhash)
if run:
self.execute_file(destpath, argv_source_path=codepath)
else:
logger.show_tabulated('Compiling', showpath(codepath), '...')
def callback(compiled):
if (destpath is None):
logger.show_tabulated('Compiled', showpath(codepath), 'without writing to file.')
else:
with univ_open(destpath, 'w') as opened:
writefile(opened, compiled)
logger.show_tabulated('Compiled to', showpath(destpath), '.')
if self.display:
logger.print(compiled)
if run:
if (destpath is None):
self.execute(compiled, path=codepath, allow_show=False)
else:
self.execute_file(destpath, argv_source_path=codepath)
parse_kwargs = dict(codepath=codepath, use_cache=self.use_cache)
if (package is True):
self.submit_comp_job(codepath, callback, 'parse_package', code, package_level=package_level, **parse_kwargs)
elif (package is False):
self.submit_comp_job(codepath, callback, 'parse_file', code, **parse_kwargs)
else:
raise CoconutInternalException('invalid value for package', package)
def get_package_level(self, codepath):
package_level = (- 1)
check_dir = os.path.dirname(os.path.abspath(codepath))
while check_dir:
has_init = False
for ext in code_exts:
init_file = os.path.join(check_dir, ('__init__' + ext))
if os.path.exists(init_file):
has_init = True
break
if has_init:
package_level += 1
check_dir = os.path.dirname(check_dir)
else:
break
if (package_level < 0):
if self.comp.strict:
logger.warn((('missing __init__' + code_exts[0]) + ' in package'), check_dir, extra='remove --strict to dismiss')
package_level = 0
return package_level
def create_package(self, dirpath, retries_left=create_package_retries):
filepath = os.path.join(dirpath, '__coconut__.py')
try:
with univ_open(filepath, 'w') as opened:
writefile(opened, self.comp.getheader('__coconut__'))
except OSError:
logger.log_exc()
if (retries_left <= 0):
logger.warn('Failed to write header file at', filepath)
else:
time.sleep((random.random() / 10))
self.create_package(dirpath, (retries_left - 1))
def submit_comp_job(self, path, callback, method, *args, **kwargs):
if (self.executor is None):
with self.handling_exceptions():
callback(getattr(self.comp, method)(*args, **kwargs))
else:
path = showpath(path)
with logger.in_path(path):
future = self.executor.submit(multiprocess_wrapper(self.comp, method), *args, **kwargs)
def callback_wrapper(completed_future):
with logger.in_path(path):
with self.handling_exceptions():
result = completed_future.result()
callback(result)
future.add_done_callback(callback_wrapper)
def set_jobs(self, jobs, profile=False):
if (jobs in (None, 'sys')):
self.jobs = jobs
else:
try:
jobs = int(jobs)
except ValueError:
jobs = (- 1)
if (jobs < 0):
raise CoconutException("--jobs must be an integer >= 0 or 'sys'")
self.jobs = jobs
logger.log('Jobs:', self.jobs)
if (profile and (self.jobs != 0)):
raise CoconutException('--profile incompatible with --jobs {jobs}'.format(jobs=jobs))
def disable_jobs(self):
if (self.jobs not in (0, 1, None)):
logger.warn('got --jobs {jobs} but only compiling one file; disabling --jobs'.format(jobs=self.jobs))
self.jobs = 0
logger.log('Jobs:', self.jobs)
def get_max_workers(self):
jobs = (self.jobs if (self.jobs is not None) else base_default_jobs)
if (jobs == 'sys'):
return None
else:
return jobs
def using_jobs(self):
max_workers = self.get_max_workers()
return ((max_workers is None) or (max_workers > 1))
def running_jobs(self, exit_on_error=True):
with self.handling_exceptions(exit_on_error=exit_on_error):
if self.using_jobs:
from concurrent.futures import ProcessPoolExecutor
try:
with ProcessPoolExecutor(self.get_max_workers()) as self.executor:
(yield)
finally:
self.executor = None
else:
(yield)
def has_hash_of(self, destpath, code, package_level):
if ((destpath is not None) and os.path.isfile(destpath)):
with univ_open(destpath, 'r') as opened:
compiled = readfile(opened)
hashash = gethash(compiled)
if (hashash is not None):
newhash = self.comp.genhash(code, package_level)
if (hashash == newhash):
return True
logger.log('old __coconut_hash__', hashash, '!= new __coconut_hash__', newhash)
return False
def get_input(self, more=False):
received = None
try:
received = self.prompt.input(more)
except KeyboardInterrupt:
logger.printerr('\nKeyboardInterrupt')
except EOFError:
logger.print()
self.exit_runner()
else:
if received.startswith(exit_chars):
self.exit_runner()
received = None
return received
def start_running(self):
self.comp.warm_up(enable_incremental_mode=interpreter_uses_incremental)
self.check_runner()
self.running = True
logger.log((('Time till prompt: ' + str((get_clock_time() - first_import_time))) + ' secs'))
def start_prompt(self):
logger.show('Coconut Interpreter v{co_ver} (Python {py_ver}):'.format(co_ver=VERSION, py_ver='.'.join((str(v) for v in sys.version_info[:2]))))
logger.show("(enter 'exit()' or press Ctrl-D to end)")
self.start_running()
while self.running:
try:
code = self.get_input()
if code:
compiled = self.handle_input(code)
if compiled:
self.execute(compiled, use_eval=None)
except KeyboardInterrupt:
logger.printerr('\nKeyboardInterrupt')
def exit_runner(self, exit_code=0):
self.register_exit_code(exit_code)
self.running = False
def handle_input(self, code):
if (not self.prompt.multiline):
if (not should_indent(code)):
try:
return self.parse_block(code)
except CoconutException:
pass
while True:
line = self.get_input(more=True)
if (line is None):
return None
elif line:
code += ('\n' + line)
else:
break
try:
return self.parse_block(code)
except CoconutException:
logger.print_exc()
return None
def execute(self, compiled=None, path=None, use_eval=False, allow_show=True):
self.check_runner()
if (compiled is not None):
if (allow_show and self.display):
logger.print(compiled)
if (path is None):
if (not self.mypy):
no_str_code = self.comp.remove_strs(compiled)
if (no_str_code is not None):
result = mypy_builtin_regex.search(no_str_code)
if result:
logger.warn((('found mypy-only built-in ' + repr(result.group(0))) + '; pass --mypy to use mypy-only built-ins at the interpreter'))
else:
compiled = rem_encoding(compiled)
self.runner.run(compiled, use_eval=use_eval, path=path, all_errors_exit=(path is not None))
self.run_mypy(code=self.runner.was_run_code())
def execute_file(self, destpath, **kwargs):
self.check_runner(**kwargs)
self.runner.run_file(destpath)
def check_runner(self, set_sys_vars=True, argv_source_path=''):
if set_sys_vars:
if (os.getcwd() not in sys.path):
sys.path.append(os.getcwd())
if (self.argv_args is not None):
sys.argv = ([argv_source_path] + self.argv_args)
if (self.runner is None):
self.runner = Runner(self.comp, exit=self.exit_runner, store=self.mypy)
self.prompt.set_runner(self.runner)
def mypy(self):
return (self.mypy_args is not None)
def set_mypy_args(self, mypy_args=None):
if (mypy_args is None):
self.mypy_args = None
elif (mypy_install_arg in mypy_args):
if (mypy_args != [mypy_install_arg]):
raise CoconutException("'--mypy install' cannot be used alongside other --mypy arguments")
stub_dir = set_mypy_path()
logger.show_sig(('Successfully installed MyPy stubs into ' + repr(stub_dir)))
self.mypy_args = None
else:
self.mypy_args = list(mypy_args)
if (not any((arg.startswith('--python-version') for arg in self.mypy_args))):
self.mypy_args += ['--python-version', ver_tuple_to_str(get_target_info_smart(self.comp.target, mode='highest'))]
if (not any((arg.startswith('--python-executable') for arg in self.mypy_args))):
self.mypy_args += ['--python-executable', sys.executable]
add_mypy_args = (default_mypy_args + (verbose_mypy_args if logger.verbose else ()))
for arg in add_mypy_args:
no_arg = invert_mypy_arg(arg)
arg_prefixes = ((arg,) + ((no_arg,) if (no_arg is not None) else ()))
if (not any((arg.startswith(arg_prefixes) for arg in self.mypy_args))):
self.mypy_args.append(arg)
logger.log('MyPy args:', self.mypy_args)
self.mypy_errs = []
def run_mypy(self, paths=(), code=None):
if self.mypy:
set_mypy_path()
from coconut.command.mypy import mypy_run
args = (list(paths) + self.mypy_args)
if (code is not None):
args += ['-c', code]
for (line, is_err) in mypy_run(args):
line = line.rstrip()
logger.log('[MyPy:{std}]'.format(std=('err' if is_err else 'out')), line)
if line.startswith(mypy_silent_err_prefixes):
if (code is None):
logger.printerr(line)
self.register_exit_code(errmsg='MyPy error')
elif line.startswith(mypy_silent_non_err_prefixes):
if (code is None):
logger.print('MyPy', line)
else:
if (code is None):
logger.printerr(line)
if any(((infix in line) for infix in mypy_err_infixes)):
self.register_exit_code(errmsg='MyPy error')
if (line not in self.mypy_errs):
if (code is not None):
logger.printerr(line)
self.mypy_errs.append(line)
def run_silent_cmd(self, *args):
return run_cmd(*args, show_output=logger.verbose)
def install_jupyter_kernel(self, jupyter, kernel_dir, install_args=[]):
install_args = ((jupyter + ['kernelspec', 'install', kernel_dir, '--replace']) + install_args)
try:
self.run_silent_cmd(install_args)
except CalledProcessError:
user_install_args = (install_args + ['--user'])
try:
self.run_silent_cmd(user_install_args)
except CalledProcessError:
logger.warn('kernel install failed on command', ' '.join(install_args))
self.register_exit_code(errmsg='Jupyter kernel error')
return False
return True
def remove_jupyter_kernel(self, jupyter, kernel_name):
remove_args = (jupyter + ['kernelspec', 'remove', kernel_name, '-f'])
try:
self.run_silent_cmd(remove_args)
except CalledProcessError:
logger.warn('kernel removal failed on command', ' '.join(remove_args))
self.register_exit_code(errmsg='Jupyter kernel error')
return False
return True
def install_default_jupyter_kernels(self, jupyter, kernel_list, install_args=[]):
logger.show_sig((("Installing Jupyter kernels '" + "', '".join(icoconut_default_kernel_names)) + "'..."))
overall_success = True
for old_kernel_name in icoconut_old_kernel_names:
if (old_kernel_name in kernel_list):
success = self.remove_jupyter_kernel(jupyter, old_kernel_name)
overall_success = (overall_success and success)
for kernel_dir in icoconut_default_kernel_dirs:
success = self.install_jupyter_kernel(jupyter, kernel_dir, install_args)
overall_success = (overall_success and success)
if overall_success:
return icoconut_default_kernel_names
else:
return []
def get_jupyter_kernels(self, jupyter):
raw_kernel_list = run_cmd((jupyter + ['kernelspec', 'list']), show_output=False, raise_errs=False)
kernel_list = []
for line in raw_kernel_list.splitlines():
kernel_list.append(line.split()[0])
return kernel_list
def get_jupyter_command(self):
for jupyter in ([sys.executable, '-m', 'jupyter'], [sys.executable, '-m', 'ipython']):
if PY35:
break
try:
self.run_silent_cmd((jupyter + ['--help']))
except CalledProcessError:
logger.warn(('failed to find Jupyter command at ' + repr(' '.join(jupyter))))
else:
break
else:
raise CoconutException("'coconut --jupyter' requires Jupyter (run 'pip install coconut[jupyter]' to fix)")
return jupyter
def start_jupyter(self, args):
jupyter = self.get_jupyter_command()
kernel_list = self.get_jupyter_kernels(jupyter)
newly_installed_kernels = []
if (not args):
just_install = True
elif args[0].startswith('-'):
just_install = True
elif (args[0] == jupyter_install_arg):
just_install = True
args = args[1:]
else:
just_install = False
install_args = (args if just_install else [])
custom_kernel_dir = install_custom_kernel(logger=logger)
if ((custom_kernel_dir is not None) and ((icoconut_custom_kernel_name not in kernel_list) or just_install)):
logger.show_sig('Installing Jupyter kernel {name!r}...'.format(name=icoconut_custom_kernel_name))
if self.install_jupyter_kernel(jupyter, custom_kernel_dir, install_args):
newly_installed_kernels.append(icoconut_custom_kernel_name)
if just_install:
newly_installed_kernels += self.install_default_jupyter_kernels(jupyter, kernel_list)
run_args = None
else:
if ((icoconut_custom_kernel_name in kernel_list) or (icoconut_custom_kernel_name in newly_installed_kernels)):
kernel = icoconut_custom_kernel_name
else:
ver = ('2' if PY2 else '3')
try:
self.run_silent_cmd([('python' + ver), '-m', 'coconut.main', '--version'])
except CalledProcessError:
kernel = 'coconut_py'
else:
kernel = ('coconut_py' + ver)
if (kernel not in kernel_list):
newly_installed_kernels += self.install_default_jupyter_kernels(jupyter, kernel_list, install_args)
logger.warn('could not find {name!r} kernel; using {kernel!r} kernel instead'.format(name=icoconut_custom_kernel_name, kernel=kernel))
if (args[0] in jupyter_console_commands):
if any((a.startswith('--kernel') for a in args)):
logger.warn((("unable to specify Coconut kernel in 'jupyter " + args[0]) + "' command as --kernel was already specified in the given arguments"))
else:
args += ['--kernel', kernel]
run_args = (jupyter + args)
if newly_installed_kernels:
logger.show_sig((("Successfully installed Jupyter kernels: '" + "', '".join(newly_installed_kernels)) + "'"))
if (run_args is not None):
self.register_exit_code(run_cmd(run_args, raise_errs=False), errmsg='Jupyter error')
def watch(self, src_dest_package_triples, run=False, force=False):
from coconut.command.watch import Observer, RecompilationWatcher
for (src, _, _) in src_dest_package_triples:
logger.show()
logger.show_tabulated('Watching', showpath(src), '(press Ctrl-C to end)...')
interrupted = [False]
def interrupt():
interrupted[0] = True
def recompile(path, src, dest, package):
path = fixpath(path)
if (os.path.isfile(path) and (os.path.splitext(path)[1] in code_exts)):
with self.handling_exceptions(on_keyboard_interrupt=interrupt):
if ((dest is True) or (dest is None)):
writedir = dest
else:
dirpath = os.path.dirname(path)
writedir = os.path.join(dest, os.path.relpath(dirpath, src))
filepaths = self.compile_path(path, writedir, package, run=run, force=force, show_unchanged=False, handling_exceptions_kwargs=dict(on_keyboard_interrupt=interrupt))
self.run_mypy(filepaths)
observer = Observer()
watchers = []
for (src, dest, package) in src_dest_package_triples:
watcher = RecompilationWatcher(recompile, src, dest, package)
observer.schedule(watcher, src, recursive=True)
watchers.append(watcher)
with self.running_jobs():
observer.start()
try:
while (not interrupted[0]):
time.sleep(watch_interval)
for wcher in watchers:
wcher.keep_watching()
except KeyboardInterrupt:
interrupt()
finally:
if interrupted[0]:
logger.show_sig('Got KeyboardInterrupt; stopping watcher.')
observer.stop()
observer.join()
def site_install(self):
python_lib = get_python_lib()
shutil.copy(coconut_pth_file, python_lib)
logger.show_sig(('Added %s to %s' % (os.path.basename(coconut_pth_file), python_lib)))
def site_uninstall(self):
python_lib = get_python_lib()
pth_file = os.path.join(python_lib, os.path.basename(coconut_pth_file))
if os.path.isfile(pth_file):
os.remove(pth_file)
logger.show_sig(('Removed %s from %s' % (os.path.basename(coconut_pth_file), python_lib)))
else:
raise CoconutException(('failed to find %s file to remove' % (os.path.basename(coconut_pth_file),))) |
def lazy_import():
from fastly.model.logging_common_response_all_of import LoggingCommonResponseAllOf
from fastly.model.logging_common_response_all_of1 import LoggingCommonResponseAllOf1
globals()['LoggingCommonResponseAllOf'] = LoggingCommonResponseAllOf
globals()['LoggingCommonResponseAllOf1'] = LoggingCommonResponseAllOf1 |
def make_testcase(contract_address):
def test_robustness(self):
contract = get_contract_from_blockchain(contract_address, '../../../api_key.txt')
contract = fix_pragma(contract)
config = AnalysisConfiguration(ast_compiler=(lambda t: solidity_ast_compiler.compile_ast(t.source_file)), cfg_compiler=(lambda t: solidity_cfg_compiler.compile_cfg(t.ast).cfg))
context = AnalysisContext(config=config, source_file=contract)
cfg = context.cfg
assert cfg
return test_robustness |
def handle(editor, cmd: str) -> bool:
if cmd.startswith('siac-hl-clicked '):
id = int(cmd.split()[1])
color = ' '.join(cmd.split()[2:])
Reader.highlight_color = color
Reader.highlight_type = id
return True
elif cmd.startswith('siac-pdf-page-loaded '):
page = int(cmd.split()[1])
Reader.show_highlights_for_page(page)
return True
elif cmd.startswith('siac-hl-new '):
page = int(cmd.split(' ')[1])
group = int(cmd.split(' ')[2])
type = int(cmd.split(' ')[3])
nid = Reader.note_id
all = []
text = cmd[(cmd.index('#') + 1):]
for (ix, i) in enumerate(cmd.split(' ')[4:]):
if (i == '#'):
break
if ((ix % 4) == 0):
x0 = float(i[:10])
elif ((ix % 4) == 1):
y0 = float(i[:10])
elif ((ix % 4) == 2):
x1 = float(i[:10])
else:
y1 = float(i[:10])
all.append((nid, page, group, type, text, x0, y0, x1, y1))
insert_highlights(all)
Reader.show_highlights_for_page(page)
return True
elif cmd.startswith('siac-hl-del '):
id = int(cmd.split()[1])
delete_highlight(id)
return True
elif cmd.startswith('siac-hl-text-update-coords '):
id = int(cmd.split()[1])
x0 = float(cmd.split()[2])
y0 = float(cmd.split()[3])
x1 = float(cmd.split()[4])
y1 = float(cmd.split()[5])
update_text_comment_coords(id, x0, y0, x1, y1)
return True
elif cmd.startswith('siac-hl-text-update-text '):
id = int(cmd.split()[1])
page = int(cmd.split()[2])
text = ' '.join(cmd.split(' ')[3:])
update_text_comment_text(id, text)
Reader.show_highlights_for_page(page)
return True
return False |
class TestAlgebra(unittest.TestCase):
def test_cross(self):
self.assertEqual(alg.cross([1, 2, 3], [4, 5, 6]), [(- 3), 6, (- 3)])
self.assertEqual(alg.cross([1, 2], [4, 5]), [(- 3)])
self.assertEqual(alg.cross([1, 2, 3], [4, 5]), [(- 15), 12, (- 3)])
self.assertEqual(alg.cross([1, 2], [4, 5, 6]), [12, (- 6), (- 3)])
self.assertEqual(alg.cross([[1, 2, 3], [4, 5, 6]], [7, 8, 9]), [[(- 6), 12, (- 6)], [(- 3), 6, (- 3)]])
self.assertEqual(alg.cross([7, 8, 9], [[1, 2, 3], [4, 5, 6]]), [[6, (- 12), 6], [3, (- 6), 3]])
self.assertEqual(alg.cross([[1, 2], [4, 5]], [7, 8, 9]), [[18, (- 9), (- 6)], [45, (- 36), (- 3)]])
self.assertEqual(alg.cross([7, 8, 9], [[1, 2], [4, 5]]), [[(- 18), 9, 6], [(- 45), 36, 3]])
self.assertEqual(alg.cross([7, 8], [[1, 2], [4, 5]]), [[6], [3]])
self.assertEqual(alg.cross([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]], [1, 2, 3]), [[[0, 0, 0], [3, (- 6), 3]], [[6, (- 12), 6], [9, (- 18), 9]]])
self.assertEqual(alg.cross([1, 2, 3], [[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]), [[[0, 0, 0], [(- 3), 6, (- 3)]], [[(- 6), 12, (- 6)], [(- 9), 18, (- 9)]]])
self.assertEqual(alg.cross([1, 2, 3], [[[1, 2], [4, 5]], [[7, 8], [10, 11]]]), [[[(- 6), 3, 0], [(- 15), 12, (- 3)]], [[(- 24), 21, (- 6)], [(- 33), 30, (- 9)]]])
self.assertEqual(alg.cross([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]], [[[4, 8, 9], [2, 7, 6]], [[1, 4, 3], [2, 3, 1]]]), [[[(- 6), 3, 0], [(- 12), (- 12), 18]], [[(- 12), (- 12), 20], [(- 25), 14, 8]]])
self.assertEqual(alg.cross([[[1, 2, 3], [4, 5, 6]]], [[[4, 8, 9], [2, 7, 6]], [[1, 4, 3], [2, 3, 1]]]), [[[(- 6), 3, 0], [(- 12), (- 12), 18]], [[(- 6), 0, 2], [(- 13), 8, 2]]])
with self.assertRaises(ValueError):
alg.cross([[[1], [4]]], [[[4, 8, 9], [2, 7, 6]], [[1, 4, 3], [2, 3, 1]]])
with self.assertRaises(ValueError):
alg.cross(3, 4)
def test_outer(self):
self.assertEqual(alg.outer(3, 4), [[12]])
self.assertEqual(alg.outer(4, [1, 2, 3]), [[4, 8, 12]])
self.assertEqual(alg.outer([1, 2, 3], 4), [[4], [8], [12]])
self.assertEqual(alg.outer([1, 2, 3], [4, 5, 6]), [[4, 5, 6], [8, 10, 12], [12, 15, 18]])
self.assertEqual(alg.outer([[1, 2], [4, 5]], [4, 5, 6]), [[4, 5, 6], [8, 10, 12], [16, 20, 24], [20, 25, 30]])
self.assertEqual(alg.outer([4, 5, 6], [[1, 2], [4, 5]]), [[4, 8, 16, 20], [5, 10, 20, 25], [6, 12, 24, 30]])
self.assertEqual(alg.outer([[1, 2], [3, 4]], [[5, 6], [7, 8]]), [[5, 6, 7, 8], [10, 12, 14, 16], [15, 18, 21, 24], [20, 24, 28, 32]])
def test_inner(self):
self.assertEqual(alg.inner(3, 4), 12)
self.assertEqual(alg.inner(3, [1, 2, 3]), [3, 6, 9])
self.assertEqual(alg.inner(3, [[1, 2], [3, 4]]), [[3, 6], [9, 12]])
self.assertEqual(alg.inner(3, [[[1, 2], [3, 4], [5, 6]], [[7, 8], [9, 10], [11, 12]]]), [[[3, 6], [9, 12], [15, 18]], [[21, 24], [27, 30], [33, 36]]])
self.assertEqual(alg.inner([1, 2, 3], [4, 5, 6]), 32)
self.assertEqual(alg.inner([1, 2], [[1, 2], [3, 4]]), [5, 11])
self.assertEqual(alg.inner([[1, 2], [3, 4]], [[5, 6], [7, 8]]), [[17, 23], [39, 53]])
self.assertEqual(alg.inner([[1, 2], [3, 4], [3, 4]], [[5, 6], [7, 8]]), [[17, 23], [39, 53], [39, 53]])
self.assertEqual(alg.inner([[[1, 2], [3, 4], [5, 6]], [[7, 8], [9, 10], [11, 12]]], [[[13, 14], [15, 16], [17, 18]], [[19, 20], [21, 22], [23, 24]]]), [[[[41, 47, 53], [59, 65, 71]], [[95, 109, 123], [137, 151, 165]], [[149, 171, 193], [215, 237, 259]]], [[[203, 233, 263], [293, 323, 353]], [[257, 295, 333], [371, 409, 447]], [[311, 357, 403], [449, 495, 541]]]])
with self.assertRaises(ValueError):
alg.inner([1, 2, 3], [1, 2])
def test_inv(self):
self.assertEqual(alg.inv([[8, 9], [4, 2]]), [[(- 0.1), 0.45], [0.2, (- 0.4)]])
self.assertEqual(alg.inv([[[[8, 9], [4, 2]], [[6, 2], [7, 1]]], [[[7, 3], [6, 1]], [[6, 4], [2, 2]]]]), [[[[(- 0.1), 0.45], [0.2, (- 0.4)]], [[(- 0.125), 0.25], [0.875, (- 0.75)]]], [[[(- 0.), 0.], [0., (- 0.)]], [[0.5, (- 0.)], [(- 0.), 1.]]]])
self.assertEqual(alg.inv([[0., (- 0.), (- 0.)], [(- 0.), 0., (- 0.)], [(- 0.), (- 0.), 0.]]), [[0., (- 1.), (- 1.0)], [(- 0.), 0., (- 1.0)], [(- 0.), (- 0.), 0.0]])
with self.assertRaises(ValueError):
alg.inv([[8, 9, 1], [4, 2, 1]])
with self.assertRaises(ValueError):
alg.inv([[0, 0], [0, 0]])
with self.assertRaises(ValueError):
alg.inv([[1, 1], [1, 1]])
def test_vstack(self):
self.assertEqual(alg.vstack((1, 2, 3, 4)), [[1], [2], [3], [4]])
self.assertEqual(alg.vstack(([1, 2], [3, 4])), [[1, 2], [3, 4]])
self.assertEqual(alg.vstack([[[1, 2], [2, 3]], [[3, 4], [4, 5]], [[5, 6], [6, 7]]]), [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7]])
self.assertEqual(alg.vstack([[[], []], [[], []], [[], []]]), [[], [], [], [], [], []])
self.assertEqual(alg.vstack(([[[[1, 2], [3, 4], [5, 6], [7, 8]], [[9, 10], [11, 12], [13, 14], [15, 16]]], [[[17, 18], [19, 19], [21, 22], [23, 24]], [[25, 26], [27, 28], [29, 30], [31, 32]]]], [[[[33, 34], [35, 36], [37, 38], [39, 40]], [[41, 42], [43, 44], [45, 46], [47, 48]]], [[[49, 50], [51, 52], [53, 54], [55, 56]], [[57, 58], [59, 60], [61, 62], [63, 64]]]])), [[[[1, 2], [3, 4], [5, 6], [7, 8]], [[9, 10], [11, 12], [13, 14], [15, 16]]], [[[17, 18], [19, 19], [21, 22], [23, 24]], [[25, 26], [27, 28], [29, 30], [31, 32]]], [[[33, 34], [35, 36], [37, 38], [39, 40]], [[41, 42], [43, 44], [45, 46], [47, 48]]], [[[49, 50], [51, 52], [53, 54], [55, 56]], [[57, 58], [59, 60], [61, 62], [63, 64]]]])
with self.assertRaises(ValueError):
alg.vstack(([1, 2, 3], [], [4, 5, 6]))
with self.assertRaises(ValueError):
alg.vstack(([1, 2, 3], 1, [4, 5, 6]))
with self.assertRaises(ValueError):
alg.vstack([])
def test_hstack(self):
self.assertEqual(alg.hstack((1, 2, 3, 4)), [1, 2, 3, 4])
self.assertEqual(alg.hstack(([1, 2], [3, 4])), [1, 2, 3, 4])
self.assertEqual(alg.hstack(([1, 2, 3], [], [4, 5, 6])), [1, 2, 3, 4, 5, 6])
self.assertEqual(alg.hstack([[1, 2, 3], 1, [4, 5, 6]]), [1, 2, 3, 1, 4, 5, 6])
self.assertEqual(alg.hstack([[[1, 2], [2, 3]], [[3, 4], [4, 5]], [[5, 6], [6, 7]]]), [[1, 2, 3, 4, 5, 6], [2, 3, 4, 5, 6, 7]])
self.assertEqual(alg.hstack([[[1, 2], [3, 4]], [[], []], [[4, 5], [6, 7]]]), [[1, 2, 4, 5], [3, 4, 6, 7]])
self.assertEqual(alg.hstack([[[], []], [[], []], [[], []]]), [[], []])
self.assertEqual(alg.hstack(([[[[1, 2], [3, 4], [5, 6], [7, 8]], [[9, 10], [11, 12], [13, 14], [15, 16]]], [[[17, 18], [19, 19], [21, 22], [23, 24]], [[25, 26], [27, 28], [29, 30], [31, 32]]]], [[[[33, 34], [35, 36], [37, 38], [39, 40]], [[41, 42], [43, 44], [45, 46], [47, 48]], [[41, 42], [43, 44], [45, 46], [47, 48]]], [[[49, 50], [51, 52], [53, 54], [55, 56]], [[57, 58], [59, 60], [61, 62], [63, 64]], [[41, 42], [43, 44], [45, 46], [47, 48]]]])), [[[[1, 2], [3, 4], [5, 6], [7, 8]], [[9, 10], [11, 12], [13, 14], [15, 16]], [[33, 34], [35, 36], [37, 38], [39, 40]], [[41, 42], [43, 44], [45, 46], [47, 48]], [[41, 42], [43, 44], [45, 46], [47, 48]]], [[[17, 18], [19, 19], [21, 22], [23, 24]], [[25, 26], [27, 28], [29, 30], [31, 32]], [[49, 50], [51, 52], [53, 54], [55, 56]], [[57, 58], [59, 60], [61, 62], [63, 64]], [[41, 42], [43, 44], [45, 46], [47, 48]]]])
with self.assertRaises(ValueError):
alg.hstack([[[1, 2], [3, 4]], 1, [[4, 5], [6, 7]]])
with self.assertRaises(ValueError):
alg.hstack([[[1, 2], [3, 4]], [], [[4, 5], [6, 7]]])
with self.assertRaises(ValueError):
alg.hstack([])
def test_diag(self):
self.assertEqual(alg.diag([1, 2, 3]), [[1, 0, 0], [0, 2, 0], [0, 0, 3]])
self.assertEqual(alg.diag([1, 2, 3], 1), [[0, 1, 0, 0], [0, 0, 2, 0], [0, 0, 0, 3], [0, 0, 0, 0]])
self.assertEqual(alg.diag([1, 2, 3], (- 1)), [[0, 0, 0, 0], [1, 0, 0, 0], [0, 2, 0, 0], [0, 0, 3, 0]])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (4, 4))), [0, 5, 10, 15])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (4, 4)), 1), [1, 6, 11])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (4, 4)), (- 1)), [4, 9, 14])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (8, 2))), [0, 3])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (8, 2)), 1), [1])
self.assertEqual(alg.diag(alg.reshape(alg.arange(16), (8, 2)), (- 1)), [2, 5])
with self.assertRaises(ValueError):
alg.diag(alg.reshape(alg.arange(16), (4, 2, 2)))
with self.assertRaises(ValueError):
alg.diag(3)
def test_broadcast_reset(self):
x = [[[0, 1]], [[2, 3]], [[4, 5]]]
y = [[0], [1], [(- 1)]]
b = alg.broadcast(x, y)
self.assertEqual(next(b), (0, 0))
self.assertEqual(next(b), (1, 0))
b.reset()
self.assertEqual(list(b), [(0, 0), (1, 0), (0, 1), (1, 1), (0, (- 1)), (1, (- 1)), (2, 0), (3, 0), (2, 1), (3, 1), (2, (- 1)), (3, (- 1)), (4, 0), (5, 0), (4, 1), (5, 1), (4, (- 1)), (5, (- 1))])
def test_broadcast(self):
b = alg.broadcast(5, 8)
self.assertEqual(list(b), [(5, 8)])
self.assertEqual(b.shape, ())
b = alg.broadcast([3], [1, 2, 3])
self.assertEqual(list(b), [(3, 1), (3, 2), (3, 3)])
self.assertEqual(b.shape, (3,))
b = alg.broadcast(3, [1, 2, 3])
self.assertEqual(list(b), [(3, 1), (3, 2), (3, 3)])
self.assertEqual(b.shape, (3,))
b = alg.broadcast([1, 2, 3], 3)
self.assertEqual(list(b), [(1, 3), (2, 3), (3, 3)])
self.assertEqual(b.shape, (3,))
b = alg.broadcast([[1], [2], [3]], [[], [], []])
self.assertEqual(list(b), [])
self.assertEqual(b.shape, (3, 0))
b = alg.broadcast([[1], [2], [3]], [[], [], []], [[4], [5], [6]])
self.assertEqual(list(b), [])
self.assertEqual(b.shape, (3, 0))
b = alg.broadcast()
self.assertEqual(list(b), [])
self.assertEqual(b.shape, ())
b = alg.broadcast([[1, 2, 3], [4, 5, 6]], [[7], [8]])
self.assertEqual(list(b), [(1, 7), (2, 7), (3, 7), (4, 8), (5, 8), (6, 8)])
self.assertEqual(b.shape, (2, 3))
b = alg.broadcast([[1, 2], [3, 4]], 5)
self.assertEqual(list(b), [(1, 5), (2, 5), (3, 5), (4, 5)])
self.assertEqual(b.shape, (2, 2))
b = alg.broadcast(5, [[1, 2], [3, 4]])
self.assertEqual(list(b), [(5, 1), (5, 2), (5, 3), (5, 4)])
self.assertEqual(b.shape, (2, 2))
b = alg.broadcast([[1, 2], [3, 4]], [5, 6])
self.assertEqual(list(b), [(1, 5), (2, 6), (3, 5), (4, 6)])
self.assertEqual(b.shape, (2, 2))
b = alg.broadcast([5, 6], [[1, 2], [3, 4]])
self.assertEqual(list(b), [(5, 1), (6, 2), (5, 3), (6, 4)])
self.assertEqual(b.shape, (2, 2))
with self.assertRaises(ValueError):
list(alg.broadcast([[3, 3], [3, 3]], [[3, 3, 3], [3, 3, 3]]))
def test_broacast_to(self):
self.assertEqual(alg.broadcast_to(3, (3, 2)), [[3, 3], [3, 3], [3, 3]])
self.assertEqual(alg.broadcast_to(3, 3), [3, 3, 3])
with self.assertRaises(ValueError):
alg.broadcast_to([[3, 3, 3], [3, 3, 3]], (3,))
with self.assertRaises(ValueError):
alg.broadcast_to([[3, 3], [3, 3]], (2, 3))
def test_shape(self):
self.assertEqual(alg.shape(3), ())
self.assertEqual(alg.shape([1, 2]), (2,))
self.assertEqual(alg.shape([[1, 2], [1, 2], [1, 2]]), (3, 2))
self.assertEqual(alg.shape([[[2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2], [2, 2, 2, 2]]]), (3, 2, 4))
with self.assertRaises(ValueError):
alg.shape([[[2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2], [2, 2, 2, 2]]])
with self.assertRaises(ValueError):
alg.shape([3, [3], 3])
with self.assertRaises(ValueError):
alg.shape([[1, 2], [1, 2, 3], [1, 2]])
with self.assertRaises(ValueError):
alg.shape([[1, 2], [], [1, 2]])
self.assertEqual(alg.shape([]), (0,))
self.assertEqual(alg.shape([[]]), (1, 0))
def test_ones(self):
self.assertEqual(alg.ones((3, 2)), [[1.0, 1.0], [1.0, 1.0], [1.0, 1.0]])
def test_zeros(self):
self.assertEqual(alg.zeros((2, 3)), [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
def test_eye(self):
self.assertEqual(alg.eye(2), [[1.0, 0.0], [0.0, 1.0]])
self.assertEqual(alg.eye(2, k=1), [[0.0, 1.0], [0.0, 0.0]])
self.assertEqual(alg.eye(2, k=2), [[0.0, 0.0], [0.0, 0.0]])
self.assertEqual(alg.eye(2, k=(- 1)), [[0.0, 0.0], [1.0, 0.0]])
self.assertEqual(alg.eye(2, k=(- 2)), [[0.0, 0.0], [0.0, 0.0]])
self.assertEqual(alg.eye(2, 3), [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]])
self.assertEqual(alg.eye(2, 3, k=2), [[0.0, 0.0, 1.0], [0.0, 0.0, 0.0]])
self.assertEqual(alg.eye(2, 3, k=(- 1)), [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
self.assertEqual(alg.eye(3, 2), [[1.0, 0.0], [0.0, 1.0], [0.0, 0.0]])
self.assertEqual(alg.eye(3, 2, k=1), [[0.0, 1.0], [0.0, 0.0], [0.0, 0.0]])
self.assertEqual(alg.eye(3, 2, k=(- 1)), [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0]])
def test_identity(self):
self.assertEqual(alg.identity(4), [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])
def test_reshape(self):
self.assertEqual(alg.reshape([0, 1, 2, 3, 4, 5], (3, 2)), [[0, 1], [2, 3], [4, 5]])
with self.assertRaises(ValueError):
alg.reshape([0, 1, 2, 3, 4, 5], (4, 2))
self.assertEqual(alg.reshape([0, 1, 2, 3, 4, 5], 6), [0, 1, 2, 3, 4, 5])
self.assertEqual(alg.reshape([1], ()), 1)
self.assertEqual(alg.reshape(5, (1,)), [5])
with self.assertRaises(ValueError):
alg.reshape([1, 2], ())
self.assertEqual(alg.reshape([], ()), [])
self.assertEqual(alg.reshape([], (0,)), [])
self.assertEqual(alg.reshape([], (2, 0)), [[], []])
self.assertEqual(alg.reshape([], (2, 3, 0)), [[[], [], []], [[], [], []]])
def test_transpose(self):
self.assertEqual(alg.transpose([[[5, 6, 7, 8, 9], [1, 2, 3, 4, 5]], [[9, 8, 7, 6, 5], [6, 5, 4, 3, 2]]]), [[[5, 9], [1, 6]], [[6, 8], [2, 5]], [[7, 7], [3, 4]], [[8, 6], [4, 3]], [[9, 5], [5, 2]]])
self.assertEqual(alg.transpose([[[], []], [[], []], [[], []]]), [[[], [], []], [[], [], []]])
self.assertEqual(alg.transpose([1, 2, 3, 4]), [1, 2, 3, 4])
self.assertEqual(alg.transpose(1), 1)
def test_arange(self):
self.assertEqual(alg.reshape(alg.arange(6), (3, 2)), [[0, 1], [2, 3], [4, 5]])
self.assertEqual(alg.arange(0.0, 1.0, 0.1), [0.0, 0.1, 0.2, 0., 0.4, 0.5, 0.6, 0.7, 0., 0.])
self.assertEqual(alg.arange(1.0, 0.0, (- 0.1)), [1.0, 0.9, 0.8, 0., 0., 0., 0., 0., 0., 0.])
self.assertEqual(alg.arange(0.2, (- 2.0), (- 0.2)), [0.2, 0.0, (- 0.2), (- 0.4), (- 0.), (- 0.8), (- 1.0), (- 1.2), (- 1.4), (- 1.), (- 1.)])
def test_flatiter(self):
self.assertEqual(list(alg.flatiter([[1, 2, 3], [4, 5, 6], [7, 8, 9]])), [1, 2, 3, 4, 5, 6, 7, 8, 9])
with self.assertRaises(ValueError):
list(alg.flatiter([[1, 2], []]))
with self.assertRaises(ValueError):
list(alg.flatiter([[[1, 2], [1, 2, 3]], [1, 2]]))
def test_full(self):
self.assertEqual(alg.full((3, 2, 4), 2), [[[2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2], [2, 2, 2, 2]], [[2, 2, 2, 2], [2, 2, 2, 2]]])
self.assertEqual(alg.full((3, 2, 4), [[[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]]]), [[[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]]])
self.assertEqual(alg.full((3, 2, 4), [[0, 1, 2, 3], [4, 5, 6, 7]]), [[[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]], [[0, 1, 2, 3], [4, 5, 6, 7]]])
def test_fill_diagonal(self):
m1 = alg.zeros((3, 3))
alg.fill_diagonal(m1, 3)
self.assertEqual(m1, [[3, 0, 0], [0, 3, 0], [0, 0, 3]])
seq = [4, 5]
m1 = alg.zeros((3, 3))
alg.fill_diagonal(m1, seq)
self.assertEqual(m1, [[4, 0, 0], [0, 5, 0], [0, 0, 4]])
m1 = alg.zeros((6, 3))
alg.fill_diagonal(m1, 3)
self.assertEqual(m1, [[3.0, 0.0, 0.0], [0.0, 3.0, 0.0], [0.0, 0.0, 3.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
m1 = alg.zeros((6, 3))
alg.fill_diagonal(m1, 3, wrap=True)
self.assertEqual(m1, [[3.0, 0.0, 0.0], [0.0, 3.0, 0.0], [0.0, 0.0, 3.0], [0.0, 0.0, 0.0], [3.0, 0.0, 0.0], [0.0, 3.0, 0.0]])
m1 = alg.zeros((3, 3, 3))
alg.fill_diagonal(m1, 1)
self.assertEqual(m1, [[[1, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[0.0, 0.0, 0.0], [0.0, 1, 0.0], [0.0, 0.0, 0.0]], [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 1]]])
with self.assertRaises(ValueError):
alg.fill_diagonal([0, 0, 0], 3)
with self.assertRaises(ValueError):
alg.fill_diagonal(alg.zeros((3, 2, 4)), 3)
def test_no_nan(self):
with pytest.warns(DeprecationWarning):
self.assertEqual(alg.no_nan(math.nan), 0)
self.assertEqual(alg.no_nans([0, 1, 2, math.nan]), [0, 1, 2, 0])
def test_is_nan(self):
with pytest.warns(DeprecationWarning):
self.assertTrue(alg.is_nan(math.nan))
self.assertTrue(math.nan)
self.assertFalse(alg.is_nan(3))
self.assertFalse(alg.is_nan(0))
def test_isnan(self):
self.assertTrue(alg.isnan(math.nan))
self.assertEqual(alg.isnan([2, math.nan, 1]), [False, True, False])
self.assertEqual(alg.isnan([[2, math.nan], [math.nan, 1]]), [[False, True], [True, False]])
self.assertTrue(alg.isnan(math.nan, dims=alg.SC))
self.assertEqual(alg.isnan([2, math.nan, 1], dims=alg.D1), [False, True, False])
self.assertEqual(alg.isnan([[2, math.nan], [math.nan, 1]], dims=alg.D2), [[False, True], [True, False]])
self.assertEqual(alg.isnan([[[2, math.nan], [math.nan, 1]], [[2, math.nan], [math.nan, 1]]]), [[[False, True], [True, False]], [[False, True], [True, False]]])
def test_round_to_inf(self):
self.assertEqual(alg.round_to(math.inf, 2), math.inf)
def test_round_to_full(self):
self.assertEqual(alg.round_to((1 / 3), (- 1)), 0.)
def test_round_to_zero(self):
self.assertEqual(alg.round_to(4.567, 0), 5)
def test_round_to_num(self):
self.assertEqual(alg.round_to(4.567, 2), 4.6)
def test_round(self):
self.assertEqual(alg.round_half_up(3.3), 3)
self.assertEqual(alg.round_half_up(3.5), 4)
self.assertEqual(alg.round_half_up(3.9), 4)
self.assertEqual(alg.round_half_up(4), 4)
def test_scale(self):
self.assertEqual(alg.round_half_up(3.345, 1), 3.3)
self.assertEqual(alg.round_half_up(3.345, 2), 3.35)
self.assertEqual(alg.round_half_up(3.345, 3), 3.345)
self.assertEqual(alg.round_half_up(333, (- 2)), 300)
def test_cbrt(self):
self.assertEqual(alg.cbrt(27), 3)
self.assertEqual(alg.cbrt((- 27)), (- 3))
def test_clamp(self):
self.assertEqual(alg.clamp(3, None, None), 3)
self.assertEqual(alg.clamp(3, 4, None), 4)
self.assertEqual(alg.clamp(4, 4, None), 4)
self.assertEqual(alg.clamp(4, None, 4), 4)
self.assertEqual(alg.clamp(5, None, 4), 4)
self.assertEqual(alg.clamp(3, 4, 6), 4)
self.assertEqual(alg.clamp(7, 4, 6), 6)
self.assertEqual(alg.clamp(4, 4, 6), 4)
self.assertEqual(alg.clamp(6, 4, 6), 6)
def test_matmul(self):
self.assertEqual(alg.matmul([1, 2, 3], [4, 5, 6]), 32)
self.assertEqual(alg.matmul([4, 5, 6], [1, 2, 3]), 32)
self.assertEqual(alg.matmul([[1, 2, 3], [4, 5, 6], [7, 8, 9]], [1, 2, 3]), [14, 32, 50])
self.assertEqual(alg.matmul([1, 2, 3], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [30, 36, 42])
self.assertEqual(alg.matmul([[4, 4, 4], [1, 0, 1], [2, 3, 4]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[48, 60, 72], [8, 10, 12], [42, 51, 60]])
m1 = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[10, 20, 30, 40], [50, 60, 70, 80]], [[15, 25, 35, 45], [55, 65, 75, 85]]]]
m2 = [[[[11, 21], [31, 41], [51, 61], [71, 81]], [[21, 11], [41, 12], [51, 13], [81, 14]], [[2, 17], [2, 2], [9, 8], [3, 4]]], [[[5, 1], [5, 41], [5, 61], [5, 81]], [[21, 3], [41, 3], [51, 3], [81, 3]], [[4, 9], [6, 7], [1, 2], [1, 5]]]]
e = [[[[510.0, 610.0], [1166.0, 1426.0]], [[5800.0, 1300.0], [13560.0, 3300.0]], [[530.0, 765.0], [1170.0, 2005.0]]], [[[50.0, 590.0], [130.0, 1326.0]], [[5800.0, 300.0], [13560.0, 780.0]], [[290.0, 605.0], [770.0, 1525.0]]]]
self.assertEqual(alg.matmul(m1, m2), e)
m1 = [[[[11, 21], [31, 41], [51, 61], [71, 81]], [[21, 11], [41, 12], [51, 13], [81, 14]]], [[[5, 21], [5, 41], [5, 61], [5, 81]], [[21, 3], [41, 3], [51, 3], [81, 3]]]]
self.assertEqual(alg.matmul([40, 0.3, 12, 9], m1), [[[1700.3, 2313.3], [2193.3, 725.6]], [[306.5, 2313.3], [2193.3, 183.9]]])
self.assertEqual(alg.matmul(m1, [40, 12]), [[[692, 1732, 2772, 3812], [972, 1784, 2196, 3408]], [[452, 692, 932, 1172], [876, 1676, 2076, 3276]]])
with self.assertRaises(ValueError):
alg.matmul([1, 2, 3], [4, 5, 6, 7], dims=alg.D1)
m1 = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[10, 20, 30, 40], [50, 60, 70, 80]], [[15, 25, 35, 45], [55, 65, 75, 85]]]]
with self.assertRaises(ValueError):
alg.matmul(m1, 3)
with self.assertRaises(ValueError):
alg.matmul(3, m1)
m1 = [[[[1, 2, 3, 4, 2], [5, 6, 7, 8, 4]], [[10, 20, 30, 40, 12], [50, 60, 70, 80, 1]], [[15, 25, 35, 45, 5], [55, 65, 75, 85, 7]]]]
m2 = [[[[11, 21], [31, 41], [51, 61], [71, 81]], [[21, 11], [41, 12], [51, 13], [81, 14]]], [[[5, 21], [5, 41], [5, 61], [5, 81]], [[21, 3], [41, 3], [51, 3], [81, 3]]]]
with self.assertRaises(ValueError):
alg.matmul(m1, m2)
def test_dot(self):
self.assertEqual(alg.dot(2, 2), 4)
self.assertEqual(alg.dot([1, 2, 3], 2), [2, 4, 6])
self.assertEqual(alg.dot(2, [1, 2, 3]), [2, 4, 6])
self.assertEqual(alg.dot([1, 2, 3], [4, 5, 6]), 32)
self.assertEqual(alg.dot([4, 5, 6], [1, 2, 3]), 32)
self.assertEqual(alg.dot([[1, 2, 3], [4, 5, 6], [7, 8, 9]], [1, 2, 3]), [14, 32, 50])
self.assertEqual(alg.dot([1, 2, 3], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [30, 36, 42])
self.assertEqual(alg.dot([[4, 4, 4], [1, 0, 1], [2, 3, 4]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[48, 60, 72], [8, 10, 12], [42, 51, 60]])
m1 = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[10, 20, 30, 40], [50, 60, 70, 80]], [[15, 25, 35, 45], [55, 65, 75, 85]]]]
m2 = [[[[11, 21], [31, 41], [51, 61], [71, 81]], [[21, 11], [41, 12], [51, 13], [81, 14]]], [[[5, 21], [5, 41], [5, 61], [5, 81]], [[21, 3], [41, 3], [51, 3], [81, 3]]]]
self.assertEqual(alg.dot(m1, m2), [[[[[[510, 610], [580, 130]], [[50, 610], [580, 30]]], [[[1166, 1426], [1356, 330]], [[130, 1426], [1356, 78]]]], [[[[5100, 6100], [5800, 1300]], [[500, 6100], [5800, 300]]], [[[11660, 14260], [13560, 3300]], [[1300, 14260], [13560, 780]]]], [[[[5920, 7120], [6770, 1550]], [[600, 7120], [6770, 360]]], [[[12480, 15280], [14530, 3550]], [[1400, 15280], [14530, 840]]]]]])
self.assertEqual(alg.dot(2, m1), [[[[2, 4, 6, 8], [10, 12, 14, 16]], [[20, 40, 60, 80], [100, 120, 140, 160]], [[30, 50, 70, 90], [110, 130, 150, 170]]]])
(self.assertEqual(alg.dot(m1, 2), [[[[2, 4, 6, 8], [10, 12, 14, 16]], [[20, 40, 60, 80], [100, 120, 140, 160]], [[30, 50, 70, 90], [110, 130, 150, 170]]]]),)
self.assertEqual(alg.dot([40, 0.3, 12, 9], m2), [[[1700.3, 2313.3], [2193.3, 725.6]], [[306.5, 2313.3], [2193.3, 183.9]]])
self.assertEqual(alg.dot(m2, [40, 12]), [[[692, 1732, 2772, 3812], [972, 1784, 2196, 3408]], [[452, 692, 932, 1172], [876, 1676, 2076, 3276]]])
with self.assertRaises(ValueError):
alg.dot([1, 2, 3], [4, 5, 6, 7], dims=alg.D1)
def test_multi_dot(self):
a = alg.reshape(alg.arange((10 * 30)), (10, 30))
b = alg.reshape(alg.arange((30 * 5)), (30, 5))
c = alg.reshape(alg.arange((5 * 60)), (5, 60))
d = alg.reshape(alg.arange((60 * 5)), (60, 5))
with self.assertRaises(ValueError):
alg.multi_dot([[1, 2, 3]])
self.assertEqual(alg.multi_dot(([[4, 4, 4], [1, 0, 1], [2, 3, 4]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]])), [[48, 60, 72], [8, 10, 12], [42, 51, 60]])
self.assertEqual(alg.multi_dot((a, b, c)), alg.dot(a, alg.dot(b, c)))
self.assertEqual(alg.multi_dot((a, b, c, d)), alg.dot(a, alg.dot(b, alg.dot(c, d))))
with self.assertRaises(ValueError):
alg.multi_dot((a, alg.zeros((2,)), c))
self.assertEqual(alg.multi_dot(([1, 2, 3], alg.full((3, 3), 1), alg.full((3, 3), 2))), [36, 36, 36])
self.assertEqual(alg.multi_dot((alg.full((3, 3), 2), alg.full((3, 3), 1), [1, 2, 3])), [36, 36, 36])
self.assertEqual(alg.multi_dot(([1, 2, 3], alg.full((3, 3), 1), [1, 2, 3])), 36)
def test_multiply(self):
self.assertEqual(alg.multiply(2, 2), 4)
self.assertEqual(alg.multiply([1, 2, 3], 2), [2, 4, 6])
self.assertEqual(alg.multiply(2, [1, 2, 3]), [2, 4, 6])
self.assertEqual(alg.multiply([1, 2, 3], [4, 5, 6]), [4, 10, 18])
self.assertEqual(alg.multiply([4, 5, 6], [1, 2, 3]), [4, 10, 18])
self.assertEqual(alg.multiply([[1, 2, 3], [4, 5, 6], [7, 8, 9]], [1, 2, 3]), [[1, 4, 9], [4, 10, 18], [7, 16, 27]])
self.assertEqual(alg.multiply([1, 2, 3], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[1, 4, 9], [4, 10, 18], [7, 16, 27]])
self.assertEqual(alg.multiply([[4, 4, 4], [1, 0, 1], [2, 3, 4]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[4, 8, 12], [4, 0, 6], [14, 24, 36]])
self.assertEqual(alg.multiply([[1, 2, 3], [4, 5, 6], [7, 8, 9]], [[4, 4, 4], [1, 0, 1], [2, 3, 4]]), [[4, 8, 12], [4, 0, 6], [14, 24, 36]])
self.assertEqual(alg.multiply([[4, 4, 4], [1, 0, 1], [2, 3, 4]], 2), [[8, 8, 8], [2, 0, 2], [4, 6, 8]])
self.assertEqual(alg.multiply(2, [[4, 4, 4], [1, 0, 1], [2, 3, 4]]), [[8, 8, 8], [2, 0, 2], [4, 6, 8]])
m1 = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[10, 20, 30, 40], [50, 60, 70, 80]], [[15, 25, 35, 45], [55, 65, 75, 85]]]]
m2 = [[[[50, 60, 70, 80], [15, 25, 35, 45]], [[10, 20, 30, 40], [5, 6, 7, 8]], [[1, 2, 3, 4], [55, 65, 75, 85]]]]
self.assertEqual(alg.multiply(m1, [1, 2, 3, 4]), [[[[1, 4, 9, 16], [5, 12, 21, 32]], [[10, 40, 90, 160], [50, 120, 210, 320]], [[15, 50, 105, 180], [55, 130, 225, 340]]]])
self.assertEqual(alg.multiply([1, 2, 3, 4], m1), [[[[1, 4, 9, 16], [5, 12, 21, 32]], [[10, 40, 90, 160], [50, 120, 210, 320]], [[15, 50, 105, 180], [55, 130, 225, 340]]]])
self.assertEqual(alg.multiply(m1, m2), [[[[50, 120, 210, 320], [75, 150, 245, 360]], [[100, 400, 900, 1600], [250, 360, 490, 640]], [[15, 50, 105, 180], [3025, 4225, 5625, 7225]]]])
self.assertEqual(alg.multiply(m2, m1), [[[[50, 120, 210, 320], [75, 150, 245, 360]], [[100, 400, 900, 1600], [250, 360, 490, 640]], [[15, 50, 105, 180], [3025, 4225, 5625, 7225]]]])
self.assertEqual(alg.multiply(m1, 3), [[[[3, 6, 9, 12], [15, 18, 21, 24]], [[30, 60, 90, 120], [150, 180, 210, 240]], [[45, 75, 105, 135], [165, 195, 225, 255]]]])
self.assertEqual(alg.multiply(3, m1), [[[[3, 6, 9, 12], [15, 18, 21, 24]], [[30, 60, 90, 120], [150, 180, 210, 240]], [[45, 75, 105, 135], [165, 195, 225, 255]]]])
def test_divide(self):
self.assertEqual(alg.divide(4, 2), 2)
self.assertEqual(alg.divide([2, 4, 6], 2), [1, 2, 3])
self.assertEqual(alg.divide(2, [2, 4, 6]), [1.0, 0.5, 0.])
self.assertEqual(alg.divide([4, 10, 18], [4, 5, 6]), [1, 2, 3])
self.assertEqual(alg.divide([4, 10, 18], [1, 2, 3]), [4, 5, 6])
self.assertEqual(alg.divide([[1, 4, 9], [4, 10, 18], [7, 16, 27]], [1, 2, 3]), [[1, 2, 3], [4, 5, 6], [7, 8, 9]])
self.assertEqual(alg.divide([1, 2, 3], [[1, 4, 9], [4, 10, 18], [7, 16, 27]]), [[1.0, 0.5, 0.], [0.25, 0.2, 0.], [0., 0.125, 0.]])
self.assertEqual(alg.divide([[1], [2], [3]], [[1, 4, 9], [4, 10, 18], [7, 16, 27]]), [[1.0, 0.25, 0.], [0.5, 0.2, 0.], [0., 0.1875, 0.]])
self.assertEqual(alg.divide([[1, 4, 9], [4, 10, 18], [7, 16, 27]], [[1], [2], [3]]), [[1.0, 4.0, 9.0], [2.0, 5.0, 9.0], [2., 5., 9.0]])
self.assertEqual(alg.divide([[1, 4, 9], [4, 10, 18], [7, 16, 27]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[1.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 2.0, 3.0]])
self.assertEqual(alg.divide([[4, 8, 12], [4, 0, 6], [14, 24, 36]], [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), [[4, 4, 4], [1, 0, 1], [2, 3, 4]])
self.assertEqual(alg.divide([[4, 8, 12], [4, 0, 6], [14, 24, 36]], 2), [[2.0, 4.0, 6.0], [2.0, 0.0, 3.0], [7.0, 12.0, 18.0]])
self.assertEqual(alg.divide(2, [[4, 8, 12], [4, 1, 6], [14, 24, 36]]), [[0.5, 0.25, 0.], [0.5, 2.0, 0.], [0., 0., 0.]])
self.assertEqual(alg.divide(8, 4), 2)
def test_add(self):
self.assertEqual(alg.add(alg.reshape(alg.arange(9.0), (3, 3)), alg.arange(3.0)), [[0.0, 2.0, 4.0], [3.0, 5.0, 7.0], [6.0, 8.0, 10.0]])
def test_subtraction(self):
self.assertEqual(alg.subtract(alg.reshape(alg.arange(9.0), (3, 3)), alg.arange(3.0)), [[0.0, 0.0, 0.0], [3.0, 3.0, 3.0], [6.0, 6.0, 6.0]])
def test_interpolate(self):
self.assertEqual(alg.interpolate([[3, 4], [6, 8], [9, 2]]).steps(5), [[3.0, 4.0], [4.5, 6.0], [6.0, 8.0], [7.5, 5.0], [9.0, 2.0]])
def test_interpolate_natural(self):
self.assertEqual(alg.interpolate([[3, 4], [6, 8], [9, 2]], method='natural').steps(5), [[3.0, 4.0], [4.5, 6.9375], [6.0, 8.0], [7.5, 5.9375], [9.0, 2.0]])
def test_interpolate_extrapolate(self):
i = alg.interpolate([[3, 4], [6, 8], [9, 2]], method='natural')
self.assertEqual(i(1.2), [9.6, 0.])
self.assertEqual(i((- 0.2)), [2.4, 2.7])
def test_vectorize(self):
cbrt = alg.vectorize((lambda x: alg.nth_root(x, 3)))
self.assertEqual(cbrt([8, 27]), [2, 3])
log = alg.vectorize(math.log)
self.assertEqual(log([10, 100]), [2., 4.])
self.assertEqual(log([10, 100], 10), [1.0, 2.0])
pi = alg.vectorize((lambda : math.pi))
self.assertEqual(pi(), math.pi)
cbrt2 = alg.vectorize((lambda x: alg.nth_root(x, 3)), excluded=[0])
self.assertEqual(cbrt2(27), 3)
log = alg.vectorize((lambda x, *, base=math.e: math.log(x, base)), excluded=['base'])
self.assertEqual(log([10, 100], base=10), [1.0, 2.0])
with self.assertRaises(TypeError):
log([10, 100], base=[10, math.e])
def test_vectorize2(self):
cbrt = alg.vectorize2((lambda x: alg.nth_root(x, 3)))
self.assertEqual(cbrt([8, 27]), [2, 3])
with self.assertRaises(TypeError):
cbrt([8, 27], 4)
log = alg.vectorize2(math.log)
self.assertEqual(log([10, 100]), [2., 4.])
self.assertEqual(log([10, 100], 10), [1.0, 2.0])
self.assertEqual(log([10, 100], [10, math.e]), [1.0, 4.])
def test_apply_two_inputs(self):
with pytest.warns(DeprecationWarning):
self.assertEqual(alg.apply(alg.npow, [[1, 2, 3], [4, 5, 6]], 2), [[1, 4, 9], [16, 25, 36]])
def test_apply_one_input(self):
with pytest.warns(DeprecationWarning):
self.assertEqual(alg.apply(math.sqrt, [[1, 4, 9], [16, 25, 36]]), [[1, 2, 3], [4, 5, 6]])
def test_linspace(self):
self.assertEqual(alg.linspace(0, 10, 11), [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
self.assertEqual(alg.linspace(0, 10, 11, endpoint=False), [0.0, 0., 1., 2., 3., 4., 5., 6., 7., 8., 9.])
self.assertEqual(alg.linspace(0, [5, 10], 3), [[0.0, 0.0], [2.5, 5.0], [5.0, 10.0]])
self.assertEqual(alg.linspace([0], [5, 10], 3), [[0.0, 0.0], [2.5, 5.0], [5.0, 10.0]])
self.assertEqual(alg.linspace([0, 0], 10, 3), [[0.0, 0.0], [5.0, 5.0], [10.0, 10.0]])
self.assertEqual(alg.linspace([0, 0], [10], 3), [[0.0, 0.0], [5.0, 5.0], [10.0, 10.0]])
self.assertEqual(alg.linspace([0, 1], [1, 2], 3), [[0.0, 1.0], [0.5, 1.5], [1.0, 2.0]])
self.assertEqual(alg.linspace([[0, 1], [2, 3]], [[4, 5], [6, 7]], 3), [[[0.0, 1.0], [2.0, 3.0]], [[2.0, 3.0], [4.0, 5.0]], [[4.0, 5.0], [6.0, 7.0]]])
self.assertEqual(alg.linspace(0, 1, 0), [])
self.assertEqual(alg.linspace([0, 1], [1, 2], 0), [[], []])
with self.assertRaises(ValueError):
alg.linspace(0, 1, (- 1))
with self.assertRaises(ValueError):
alg.linspace([0, 0], [1, 1, 1], 3)
def test_ilerp(self):
t = 0.5
v = alg.lerp(0.2, 1.8, t)
self.assertEqual(round(alg.ilerp(0.2, 1.8, v), 5), t)
def test_lerp2d(self):
m = [[0.1, 0.0], [1.0, 0.0], [0.0, 0.95], [1, 1]]
v = alg.lerp2d(alg.transpose(m), [0.5, 0.5])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.525, 0.4875])]
v = alg.lerp2d(alg.transpose(m), [0.0, 0.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.1, 0.0])]
v = alg.lerp2d(alg.transpose(m), [1.0, 0.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [1.0, 0.0])]
v = alg.lerp2d(alg.transpose(m), [0.0, 1.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.0, 0.95])]
v = alg.lerp2d(alg.transpose(m), [1.0, 1.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [1, 1])]
def test_ilerp2d(self):
m = [[0.1, 0.0], [1.0, 0.0], [0.0, 0.95], [1, 1]]
v = alg.ilerp2d(alg.transpose(m), [0.525, 0.4875])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.5, 0.5])]
v = alg.ilerp2d(alg.transpose(m), [0.1, 0.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.0, 0.0])]
v = alg.ilerp2d(alg.transpose(m), [1.0, 0.0])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [1.0, 0.0])]
v = alg.ilerp2d(alg.transpose(m), [0.0, 0.95])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [0.0, 1.0])]
v = alg.ilerp2d(alg.transpose(m), [1, 1])
[self.assertAlmostEqual(a, b) for (a, b) in zip(v, [1.0, 1.0])]
def test_solve(self):
m = [[1, 3, 4, 7], [(- 8), 27, (- 36), 0], [28, (- 5), 0, 2], [4, 2, 8, (- 1)]]
s = [(- 12), 2, 5, (- 2)]
self.assertEqual(alg.solve(m, s), [0., (- 0.), (- 0.), (- 1.)])
sm = [[(- 12), 2, 5, (- 2)], [2, 5, (- 2), 3], [20, (- 19), 1, 5], [0, 0, 3, 0]]
self.assertEqual(alg.solve(m, sm), [[0., (- 0.), 0., 0.], [(- 0.), 0., 0., (- 0.)], [(- 0.), 0., 0., (- 0.)], [(- 1.), 0., 0., (- 0.)]])
m = [[0, 4, 8], [7, 2, 6], [1, 5, 3]]
b = [0, 2, 1]
e = [0., 0., (- 0.)]
self.assertEqual(alg.solve(m, b), e)
m = [[7, 5, 8], [1, 2, 0], [6, 3, 4]]
b = [[8, 6, 2], [4, 7, 0], [1, 5, 3]]
e = [[(- 1.), 0., 0.], [2., 3., (- 0.)], [0., (- 1.), (- 0.)]]
self.assertEqual(alg.solve(m, b), e)
m = [[1, 3, 4], [8, 5, 6], [7, 2, 0]]
b = [[9, 20, 18, 12, 4, 13, 21, 11], [3, 10, 22, 0, 15, 23, 5, 1], [16, 8, 17, 7, 6, 19, 2, 14]]
e = [[(- 3.), (- 4.), (- 1.), (- 4.), 1., (- 0.), (- 5.), (- 4.0)], [18., 20., 15., 18., (- 2.), 10., 20., 21.0], [(- 11.0), (- 9.0), (- 6.5), (- 9.5), 2.5, (- 4.5), (- 9.0), (- 12.0)]]
self.assertEqual(alg.solve(m, b), e)
m = [[8, 0, 2], [5, 1, 6], [3, 4, 7]]
b = [[[15, 4, 23], [33, 7, 34], [1, 11, 0]], [[17, 35, 6], [14, 9, 8], [5, 32, 26]], [[16, 19, 20], [2, 3, 13], [29, 10, 25]], [[22, 12, 24], [18, 31, 28], [21, 30, 27]]]
e = [[[(- 0.), 0., 1.], [(- 13.), 1., (- 12.)], [7., 0., 6.]], [[1., 5., 0.], [(- 2.), 13., 6.], [1., (- 5.), (- 0.)]], [[3., 3., 2.], [12., 5., 6.], [(- 4.), (- 3.), (- 1.)]], [[2., 0., 2.], [2., (- 2.), 0.], [0., 5., 2.]]]
self.assertEqual(alg.solve(m, b), e)
m = [[5, 1, 3], [4, 7, 6], [8, 0, 2]]
b = [[[18, 13], [2, 11], [19, 5]], [[3, 17], [7, 6], [12, 1]], [[4, 0], [8, 15], [14, 10]], [[20, 22], [16, 23], [21, 9]]]
e = [[[0., (- 1.)], [(- 6.), (- 4.)], [6., 8.]], [[2., (- 3.)], [3., (- 9.)], [(- 4.), 15.]], [[2., 3.], [3., 6.], [(- 4.), (- 7.)]], [[1., (- 2.)], [(- 3.), (- 6.)], [5., 13.]]]
self.assertEqual(alg.solve(m, b), e)
m = [[[3, 6, 25], [18, 31, 4], [1, 22, 8]], [[13, 30, 24], [33, 16, 27], [19, 28, 7]], [[15, 17, 32], [9, 23, 14], [2, 12, 0]], [[34, 29, 10], [11, 26, 35], [5, 21, 20]]]
b = [[1, 6, 4], [3, 10, 8], [2, 7, 11], [0, 9, 5]]
e = [[0., 0., (- 0.)], [0., 0., (- 0.)], [52., (- 7.), (- 20.)], [(- 0.), (- 0.), 0.]]
self.assertEqual(alg.solve(m, b), e)
m = [[[14, 9, 1], [18, 21, 5], [24, 3, 22]], [[26, 15, 10], [23, 16, 4], [0, 20, 25]], [[6, 7, 17], [13, 8, 19], [12, 11, 2]]]
b = [[4, 1, 7], [0, 3, 5], [6, 8, 2]]
e = [[0., (- 0.), (- 0.)], [(- 0.), 0., (- 0.)], [0., (- 0.), 0.]]
self.assertEqual(alg.solve(m, b), e)
with self.assertRaises(ValueError):
alg.solve([[2, 4, 5, 6], [0, 0, 0, 0], [1, (- 4), 0, 3], [2, 9, 9, 2]], [3, 5, 6, 1])
with self.assertRaises(ValueError):
alg.solve([[1, 2, 3], [4, 5, 6]], [0, 1])
with self.assertRaises(ValueError):
m = [[5, 1, 3], [4, 7, 6], [8, 0, 2]]
b = [1, 2, 3, 4]
alg.solve(m, b)
with self.assertRaises(ValueError):
m = [[7, 5, 8], [1, 2, 0], [6, 3, 4]]
b = [[8, 6, 2], [4, 7, 0], [1, 5, 3], [4, 5, 6]]
alg.solve(m, b)
with self.assertRaises(ValueError):
m = [[7, 5, 8], [1, 2, 0], [6, 3, 4]]
b = [[8, 6, 2], [4, 7, 0, 3], [1, 5, 3]]
alg.solve(m, b)
with self.assertRaises(ValueError):
m = alg.reshape(([1] * 9), (3, 3))
b = alg.reshape(([1] * 36), (4, 3, 3))
alg.solve(m, b)
with self.assertRaises(ValueError):
m = alg.reshape(([1] * 27), (3, 3, 3))
b = alg.reshape(([1] * 9), (3, 3))
alg.solve(m, b)
with self.assertRaises(ValueError):
m = alg.reshape(([1] * 27), (3, 3, 3))
b = alg.reshape(([1] * 24), (3, 4, 2))
alg.solve(m, b)
def test_lu(self):
m = [[1, 0, 1], [4, 0, 3], [1, 2, (- 1)]]
(p, l, u) = alg.lu(m)
self.assertEqual(alg.dot(p, m), alg.dot(l, u))
m = [[1, 0, 0], [3, 2, 0], [1, 0, 1]]
(p, l, u) = alg.lu(m)
self.assertEqual(alg.dot(p, m), alg.dot(l, u))
m = [[2, 4, 5, 6], [0, 0, 0, 0], [1, (- 4), 0, 3], [2, 9, 9, 2]]
(p, l, u) = alg.lu(m)
self.assertEqual(alg.dot(p, m), alg.dot(l, u))
m = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12], [13, 14, 15]]
(p, l, u) = alg.lu(m)
self.assertEqual(alg.dot(p, m), alg.dot(l, u))
m = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15]]
(p, l, u) = alg.lu(m)
self.assertEqual(alg.dot(p, m), alg.dot(l, u))
m = [[2, 4, 5, 6], [0, 0, 0, 0], [1, (- 4), 0, 3], [2, 9, 9, 2]]
(p, l, u) = alg.lu(m, p_indices=True)
self.assertEqual([m[idx] for idx in p], alg.dot(l, u))
m = [[2, 4, 5, 6], [0, 0, 0, 0], [1, (- 4), 0, 3], [2, 9, 9, 2]]
(l, u) = alg.lu(m, permute_l=True)
self.assertEqual(m, alg.dot(l, u))
m = [[[18, 13], [2, 11], [19, 5]], [[3, 17], [7, 6], [12, 1]], [[4, 0], [8, 15], [14, 10]], [[20, 22], [16, 23], [21, 9]]]
(p, l, u) = alg.lu(m)
sm = alg.shape(m)
(sp, sl, su) = (alg.shape(p), alg.shape(l), alg.shape(u))
rm = list(alg._extract_rows(m, sm))
(rp, rl, ru) = (list(alg._extract_rows(p, sp)), list(alg._extract_rows(l, sl)), list(alg._extract_rows(u, su)))
mm = [rm[r:(r + sm[(- 2)])] for r in range(0, len(rm), sm[(- 2)])]
mp = [rp[r:(r + sp[(- 2)])] for r in range(0, len(rp), sp[(- 2)])]
ml = [rl[r:(r + sl[(- 2)])] for r in range(0, len(rl), sl[(- 2)])]
mu = [ru[r:(r + su[(- 2)])] for r in range(0, len(ru), su[(- 2)])]
for (_m, _p, _l, _u) in zip(mm, mp, ml, mu):
self.assertTrue(alg.allclose(alg.dot(_p, _m), alg.dot(_l, _u), rel_tol=1e-12, abs_tol=1e-12))
with self.assertRaises(ValueError):
alg.lu([1, 2, 3])
def test_det(self):
self.assertEqual(alg.det([[1, 0, 1], [4, 0, 3], [1, 2, (- 1)]]), 2.0)
m = [[[[8, 9], [4, 2]], [[6, 2], [7, 1]]], [[[7, 3], [6, 1]], [[6, 4], [2, 2]]]]
self.assertEqual(alg.det(m), [(- 20.0), (- 8.0), (- 10.), 4.0])
with self.assertRaises(ValueError):
alg.det([[1, 2, 3], [4, 5, 6]])
def test_any(self):
self.assertTrue(alg.any([False, True, False]))
self.assertFalse(alg.any([False, False, False]))
def test_all(self):
self.assertTrue(alg.all([True, True, True]))
self.assertFalse(alg.all([False, True, False]))
def test_extract_columns(self):
v = [1, 2, 3]
self.assertEqual(list(alg._extract_cols(v, alg.shape(v))), [[1, 2, 3]])
def test_zdiv(self):
self.assertEqual(alg.zdiv(4, 0), 0)
def test_order(self):
self.assertEqual(alg.order(0), 0)
self.assertEqual(alg.order(20), 1)
self.assertEqual(alg.order(2), 0)
self.assertEqual(alg.order(0.002), (- 3)) |
def extractTamingwangxianWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('mdzs', 'Grandmaster of Demonic Cultivation', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def JoinArgs(args):
arr = []
for item in args:
if (hasattr(item, 'savefig') and callable(item.savefig)):
d = io.BytesIO()
item.savefig(d, format='png')
arr.append(('`data:image/png;base64,%s`' % base64.b64encode(d.getvalue()).decode('utf-8')))
elif (isinstance(item, dict) and (item.get('type') == 'table') and item.get('cols')):
arr.append(('`%s`' % json.dumps(item)))
else:
arr.append(str(item))
return safe_str(' '.join(arr)) |
class MainWindow(QtWidgets.QMainWindow):
_default_window_width = 1200
_default_window_height = 800
_sidemenu_width = 400
def __init__(self, onnx_model_path='', parent=None):
super(MainWindow, self).__init__(parent)
self.graph: ONNXNodeGraph = None
self.load_graph()
self.graph_widget: NodeGraphWidget = self.graph.widget
self.properties_bin: CustomPropertiesBinWidget = None
self.init_ui()
ext = os.path.splitext(onnx_model_path)[(- 1)]
if (ext == '.onnx'):
self.load_graph(onnx_model_path=onnx_model_path, clear_undo_stack=True, push_undo=False)
elif (ext == '.json'):
onnx_graph = onnx_tools_json2onnx(input_json_path=onnx_model_path)
self.load_graph(onnx_model=onnx_graph, clear_undo_stack=True, push_undo=False)
self.update_graph()
def init_ui(self):
self.setGeometry(0, 0, self._default_window_width, self._default_window_height)
icon_dir = os.path.join(os.path.dirname(__file__), 'data/icon')
window_icon = QtGui.QIcon(os.path.join(icon_dir, 'icon.png'))
self.setWindowIcon(window_icon)
menu_list = [Menu('File (&F)', [SubMenu('Open', self.btnOpenONNX_clicked, None), SubMenu('Export', self.btnExportONNX_clicked, None), SubMenu('Export PNG', self.btnExportPNG_clicked, None), Separator(), SubMenu('Exit', self.exit, None)]), Menu('View (&V)', [SubMenu('&Search', self.btnSearch_clicked, None), SubMenu('Auto &Layout', self.btnAutoLayout_clicked, None)]), Menu('Tools (&T)', [SubMenu('Inference Test', self.btnInferenceTest_clicked, None)])]
self.menu_bar = MenuBarWidget(menu_list=menu_list)
set_font(self, BASE_FONT_SIZE)
for (key, action) in self.menu_bar.menu_actions.items():
set_font(action, BASE_FONT_SIZE)
self.setMenuBar(self.menu_bar)
self.search_widget = NodeSearchWidget(self.graph, parent=self)
self.layout_graph = QtWidgets.QStackedLayout()
self.layout_graph.addWidget(self.graph.widget)
self.layout_base = QtWidgets.QHBoxLayout()
self.setAcceptDrops(True)
self.graph._viewer.dropEvent = self.dropEvent
self.widget_sidemenu = QtWidgets.QWidget()
self.widget_sidemenu.setFixedWidth(self._sidemenu_width)
self.layout_sidemenu = QtWidgets.QVBoxLayout(self.widget_sidemenu)
self.layout_main_properties = QtWidgets.QVBoxLayout()
self.layout_node_properties = QtWidgets.QVBoxLayout()
self.layout_sidemenu.addLayout(self.layout_main_properties)
self.layout_sidemenu.addSpacerItem(QtWidgets.QSpacerItem(self._sidemenu_width, 10))
self.layout_sidemenu.addLayout(self.layout_node_properties)
self.layout_base.addLayout(self.layout_graph)
self.layout_base.addWidget(self.widget_sidemenu)
central_widget = QtWidgets.QWidget()
central_widget.setLayout(self.layout_base)
self.setCentralWidget(central_widget)
layout_lbl = QtWidgets.QFormLayout()
lbl_graph_opset_ = QtWidgets.QLabel('opset')
lbl_graph_name_ = QtWidgets.QLabel('name')
lbl_graph_doc_string_ = QtWidgets.QLabel('doc_string')
lbl_graph_ir_version_ = QtWidgets.QLabel('ir_version')
self.set_font_bold(lbl_graph_opset_)
self.set_font_bold(lbl_graph_name_)
self.set_font_bold(lbl_graph_doc_string_)
self.set_font_bold(lbl_graph_ir_version_)
self.lbl_graph_opset = QtWidgets.QLabel()
self.lbl_graph_name = QtWidgets.QLabel()
self.lbl_graph_doc_string = QtWidgets.QLabel()
self.lbl_graph_ir_version = QtWidgets.QLabel()
layout_lbl.addRow(lbl_graph_name_, self.lbl_graph_name)
layout_lbl.addRow(lbl_graph_opset_, self.lbl_graph_opset)
layout_lbl.addRow(lbl_graph_ir_version_, self.lbl_graph_ir_version)
layout_lbl.addRow(lbl_graph_doc_string_, self.lbl_graph_doc_string)
self.layout_main_properties.addLayout(layout_lbl)
layout_operator_btn = QtWidgets.QGridLayout()
for i in range(6):
layout_operator_btn.setRowMinimumHeight(i, 45)
self.btnCombineNetwork = createIconButton('Combine Network\n(snc4onnx)', os.path.join(icon_dir, 'snc4onnx.png'))
self.btnCombineNetwork.clicked.connect(self.btnCombineNetwork_clicked)
self.btnExtractNetwork = createIconButton('Extract Network\n(sne4onnx)', os.path.join(icon_dir, 'sne4onnx.png'))
self.btnExtractNetwork.clicked.connect(self.btnExtractNetwork_clicked)
self.btnDelNode = createIconButton('Delete Node\n(snd4onnx)', os.path.join(icon_dir, 'snd4onnx.png'))
self.btnDelNode.clicked.connect(self.btnDelNode_clicked)
self.btnConstShrink = createIconButton('Const Shrink\n(scs4onnx)', os.path.join(icon_dir, 'scs4onnx.png'))
self.btnConstShrink.clicked.connect(self.btnConstShrink_clicked)
self.btnGenerateOperator = createIconButton('Generate Operator\n(sog4onnx)', os.path.join(icon_dir, 'sog4onnx.png'))
self.btnGenerateOperator.clicked.connect(self.btnGenerateOperator_clicked)
self.btnModifyAttrConst = createIconButton('Mod Attr and Const\n(sam4onnx)', os.path.join(icon_dir, 'sam4onnx.png'))
self.btnModifyAttrConst.clicked.connect(self.btnModifyAttrConst_clicked)
self.btnChangeOpset = createIconButton('Change Opset\n(soc4onnx)', os.path.join(icon_dir, 'soc4onnx.png'))
self.btnChangeOpset.clicked.connect(self.btnChangeOpset_clicked)
self.btnChannelConvert = createIconButton('Channel Convert\n(scc4onnx)', os.path.join(icon_dir, 'scc4onnx.png'))
self.btnChannelConvert.clicked.connect(self.btnChannelConvert_clicked)
self.btnAddNode = createIconButton('Add Node\n(sna4onnx)', os.path.join(icon_dir, 'sna4onnx.png'))
self.btnAddNode.clicked.connect(self.btnAddNode_clicked)
self.btnInitializeBatchSize = createIconButton('Initialize Batchsize\n(sbi4onnx)', os.path.join(icon_dir, 'sbi4onnx.png'))
self.btnInitializeBatchSize.clicked.connect(self.btnInitializeBatchSize_clicked)
self.btnRenameOp = createIconButton('Rename Op\n(sor4onnx)', os.path.join(icon_dir, 'sor4onnx.png'))
self.btnRenameOp.clicked.connect(self.btnRenameOp_clicked)
self.btnIOChange = createIconButton('IO Change\n(sio4onnx)', os.path.join(icon_dir, 'sio4onnx.png'))
self.btnIOChange.clicked.connect(self.btnIOChange_clicked)
layout_operator_btn.addWidget(self.btnGenerateOperator, 0, 0)
layout_operator_btn.addWidget(self.btnAddNode, 0, 1)
layout_operator_btn.addWidget(self.btnCombineNetwork)
layout_operator_btn.addWidget(self.btnExtractNetwork)
layout_operator_btn.addWidget(self.btnRenameOp)
layout_operator_btn.addWidget(self.btnModifyAttrConst)
layout_operator_btn.addWidget(self.btnInitializeBatchSize)
layout_operator_btn.addWidget(self.btnIOChange)
layout_operator_btn.addWidget(self.btnChannelConvert)
layout_operator_btn.addWidget(self.btnChangeOpset)
layout_operator_btn.addWidget(self.btnConstShrink)
layout_operator_btn.addWidget(self.btnDelNode)
self.layout_main_properties.addSpacerItem(QtWidgets.QSpacerItem(self._sidemenu_width, 10))
self.layout_main_properties.addLayout(layout_operator_btn)
def update_graph(self, update_layout=True):
t0 = time.time()
self.set_cursor_busy()
self.graph.update_pipe_paint()
self.graph.auto_layout(push_undo=False)
if update_layout:
self.graph.reset_selection()
self.graph.fit_to_selection()
self.graph.reset_selection()
self.lbl_graph_opset.setText(f'{self.graph.opset}')
self.lbl_graph_ir_version.setText(f'{self.graph.ir_version}')
self.lbl_graph_name.setText(f'{self.graph.name}')
self.lbl_graph_doc_string.setText(f'{self.graph.doc_string}')
if (self.properties_bin is not None):
self.properties_bin.hide()
self.layout_node_properties.removeWidget(self.properties_bin)
del self.properties_bin
self.properties_bin = self.create_properties_bin(self.graph)
self.layout_node_properties.addWidget(self.properties_bin)
self.set_sidemenu_buttons_enabled(True)
self.search_widget.update(self.graph)
self.set_cursor_arrow()
dt0 = (time.time() - t0)
print(f'update graph: {dt0}s')
def set_cursor_busy(self):
cursor = self.cursor()
cursor.setShape(QtCore.Qt.BusyCursor)
self.setCursor(cursor)
def set_cursor_arrow(self):
cursor = self.cursor()
cursor.setShape(QtCore.Qt.ArrowCursor)
self.setCursor(cursor)
def set_font_bold(self, widget: QtWidgets.QWidget, bold=True):
set_font(widget, bold=bold)
def set_sidemenu_buttons_enabled(self, enable=True, current_button: QtWidgets.QPushButton=None):
if enable:
self.menu_bar.menu_actions['Open'].setEnabled(True)
self.btnCombineNetwork.setEnabled(True)
self.btnGenerateOperator.setEnabled(True)
self.btnAddNode.setEnabled(True)
if (self.graph.node_count() > 0):
self.menu_bar.menu_actions['Export'].setEnabled(True)
self.menu_bar.menu_actions['Export PNG'].setEnabled(True)
self.btnExtractNetwork.setEnabled(True)
self.btnDelNode.setEnabled(True)
self.btnConstShrink.setEnabled(True)
self.btnModifyAttrConst.setEnabled(True)
self.btnChangeOpset.setEnabled(True)
self.btnChannelConvert.setEnabled(True)
self.btnInitializeBatchSize.setEnabled(True)
self.btnRenameOp.setEnabled(True)
self.btnIOChange.setEnabled(True)
else:
self.menu_bar.menu_actions['Export'].setEnabled(False)
self.menu_bar.menu_actions['Export PNG'].setEnabled(False)
self.btnExtractNetwork.setEnabled(False)
self.btnDelNode.setEnabled(False)
self.btnConstShrink.setEnabled(False)
self.btnModifyAttrConst.setEnabled(False)
self.btnChangeOpset.setEnabled(False)
self.btnChannelConvert.setEnabled(False)
self.btnInitializeBatchSize.setEnabled(False)
self.btnRenameOp.setEnabled(False)
self.btnIOChange.setEnabled(False)
else:
self.menu_bar.menu_actions['Open'].setEnabled(False)
self.menu_bar.menu_actions['Export'].setEnabled(False)
self.menu_bar.menu_actions['Export PNG'].setEnabled(False)
self.btnCombineNetwork.setEnabled(False)
self.btnExtractNetwork.setEnabled(False)
self.btnDelNode.setEnabled(False)
self.btnConstShrink.setEnabled(False)
self.btnModifyAttrConst.setEnabled(False)
self.btnChangeOpset.setEnabled(False)
self.btnChannelConvert.setEnabled(False)
self.btnInitializeBatchSize.setEnabled(False)
self.btnRenameOp.setEnabled(False)
self.btnGenerateOperator.setEnabled(False)
self.btnAddNode.setEnabled(False)
self.btnIOChange.setEnabled(False)
self.current_button = current_button
if current_button:
current_button.setEnabled(True)
def load_graph(self, onnx_model: onnx.ModelProto=None, onnx_model_path: str=None, model_name: str=None, clear_undo_stack=False, push_undo=False):
t0 = time.time()
self.set_cursor_busy()
if ((not onnx_model) and (not onnx_model_path)):
if (self.graph is None):
self.graph = ONNXNodeGraph(name='onnx_graph_qt', opset=DEFAULT_OPSET, doc_string='', import_domains=None, producer_name='onnx_graph_qt', producer_version=0, ir_version=8, model_version=0)
return
onnx_graph = None
if onnx_model:
onnx_graph = gs.import_onnx(onnx_model)
elif onnx_model_path:
onnx_model = onnx.load(onnx_model_path)
onnx_graph = gs.import_onnx(onnx_model)
if (model_name is None):
if onnx_model_path:
model_name = os.path.basename(onnx_model_path)
else:
model_name = 'new graph'
self.setWindowTitle(model_name)
if (self.graph is None):
self.graph = ONNXNodeGraph(name=onnx_graph.name, opset=onnx_graph.opset, doc_string=onnx_graph.doc_string, import_domains=onnx_graph.import_domains, producer_name=onnx_model.producer_name, producer_version=onnx_model.producer_version, ir_version=onnx_model.ir_version, model_version=onnx_model.model_version)
else:
self.graph.name = onnx_graph.name
self.graph.opset = onnx_graph.opset
self.graph.doc_string = onnx_graph.doc_string
self.graph.import_domains = onnx_graph.import_domains
self.graph.producer_name = onnx_model.producer_name
self.graph.producer_version = onnx_model.producer_version
self.graph.ir_version = onnx_model.ir_version
self.graph.model_version = onnx_model.model_version
if clear_undo_stack:
self.graph.clear_undo_stack()
self.graph.remove_all_nodes(push_undo=push_undo)
self.graph.load_onnx_graph(onnx_graph, push_undo=push_undo)
if onnx_model_path:
(op_num, model_size) = structure_check(onnx_graph=onnx_model)
print(op_num)
print(f'{model_size} bytes')
self.set_cursor_arrow()
dt0 = (time.time() - t0)
print(f'load graph: {dt0}s')
def create_properties_bin(self, graph: ONNXNodeGraph):
properties_bin = CustomPropertiesBinWidget(node_graph=graph)
return properties_bin
def open_onnx(self, file_name: str):
if (not os.path.exists(file_name)):
MessageBox.warn(f'not found {file_name}.', title='open')
return
ext = os.path.splitext(file_name)[(- 1)]
model_name = os.path.basename(file_name)
if (ext == '.onnx'):
self.load_graph(onnx_model_path=file_name, model_name=model_name, clear_undo_stack=True, push_undo=False)
self.update_graph()
elif (ext == '.json'):
onnx_graph = onnx_tools_json2onnx(input_json_path=file_name)
self.load_graph(onnx_model=onnx_graph, model_name=model_name, clear_undo_stack=True, push_undo=False)
self.update_graph()
else:
MessageBox.warn(f'no supported format ({ext}).', title='open')
def dragEnterEvent(self, event: QtGui.QDragEnterEvent):
mime = event.mimeData()
if (mime.hasUrls() == True):
event.accept()
else:
event.ignore()
def dragMoveEvent(self, event):
if event.mimeData().hasUrls:
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
mimedata = event.mimeData()
if mimedata.hasUrls():
urls = mimedata.urls()
files = [url.path() for url in urls]
for file in files:
ext = os.path.splitext(file)[(- 1)]
if (ext in ['.onnx', '.json']):
self.open_onnx(file)
break
def btnOpenONNX_clicked(self):
self.set_sidemenu_buttons_enabled(False)
(file_name, filter) = QtWidgets.QFileDialog.getOpenFileName(self, caption='Open ONNX Model File', filter='*.onnx *.json')
if (not file_name):
self.set_sidemenu_buttons_enabled(True)
return
print(f'Open: {file_name}')
self.open_onnx(file_name)
self.set_sidemenu_buttons_enabled(True)
def btnExportONNX_clicked(self):
self.set_sidemenu_buttons_enabled(False)
(file_name, filter) = QtWidgets.QFileDialog.getSaveFileName(self, caption='Export ONNX Model File', directory=os.path.abspath(os.curdir), filter='*.onnx;;*.json')
if (not file_name):
self.set_sidemenu_buttons_enabled(True)
return
ext = os.path.splitext(file_name)[(- 1)]
if (filter == '*.onnx'):
if (ext != '.onnx'):
file_name += '.onnx'
if os.path.exists(file_name):
ret = MessageBox.question([f'{file_name} is already exist.', 'overwrite?'], 'export')
if (ret == MessageBox.No):
self.set_sidemenu_buttons_enabled(True)
return
self.graph.export(file_name)
elif (filter == '*.json'):
if (ext != '.json'):
file_name += '.json'
onnx_tools_onnx2json(onnx_graph=self.graph.to_onnx(), output_json_path=file_name, json_indent=2)
print(f'Export: {file_name}.')
MessageBox.info(['Success.', f'Export to {file_name}.'], 'Export ONNX', parent=self)
self.set_sidemenu_buttons_enabled(True)
def btnExportPNG_clicked(self):
self.set_sidemenu_buttons_enabled(False)
default_file_name = 'screenshot.png'
dialog = QtWidgets.QFileDialog(self, caption='Export Graph Image', directory=os.path.abspath(os.curdir), filter='*.png')
dialog.setAcceptMode(QtWidgets.QFileDialog.AcceptMode.AcceptSave)
dialog.selectFile(default_file_name)
ret = dialog.exec_()
if (ret == 0):
self.set_sidemenu_buttons_enabled(True)
return
file_name = dialog.selectedFiles()[0]
if (not file_name):
self.set_sidemenu_buttons_enabled(True)
return
self.graph.export_to_png(file_name)
MessageBox.info(['Success.', f'Export to {file_name}.'], 'Export PNG', parent=self)
self.set_sidemenu_buttons_enabled(True)
def btnAutoLayout_clicked(self):
self.set_cursor_busy()
self.set_sidemenu_buttons_enabled(False)
self.graph.auto_layout(push_undo=True)
self.set_sidemenu_buttons_enabled(True)
self.set_cursor_arrow()
def btnSearch_clicked(self):
self.search_widget.show()
def btnInferenceTest_clicked(self):
w = InferenceTestWidgets(self.graph.to_onnx(), parent=self)
w.show()
def btnCombineNetwork_clicked(self):
btn = self.btnCombineNetwork
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Combine Network'
self.current_widgets = CombineNetworkWidgets(graph=self.graph.to_data(), parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
onnx_graphs = []
if props.combine_with_current_graph:
onnx_graphs.append(onnx_graph)
for onnx_file in props.input_onnx_file_paths:
graph = onnx.load(onnx_file)
onnx_graphs.append(graph)
exception = None
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_combine(srcop_destop=props.srcop_destop, op_prefixes_after_merging=props.op_prefixes_after_merging, input_onnx_file_paths=[], onnx_graphs=onnx_graphs, output_of_onnx_file_in_the_process_of_fusion=props.output_of_onnx_file_in_the_process_of_fusion, non_verbose=False)
except BaseException as e:
exception = e
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.load_graph(onnx_model=onnx_model, model_name=model_name)
self.update_graph(update_layout=True)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnExtractNetwork_clicked(self, e: bool):
btn = self.btnExtractNetwork
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Extract Network'
self.current_widgets = ExtractNetworkWidgets(graph=self.graph.to_data(), parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_extraction(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
exception = e
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.load_graph(onnx_model=onnx_model, model_name=model_name)
self.update_graph(update_layout=True)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnDelNode_clicked(self, e: bool):
btn = self.btnDelNode
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Delete Node'
selected_nodes = self.graph.get_selected_node_names()
self.current_widgets = DeleteNodeWidgets(parent=self, graph=self.graph.to_data(), selected_nodes=selected_nodes)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_deletion(onnx_graph=onnx_graph, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnConstShrink_clicked(self, e: bool):
btn = self.btnConstShrink
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Const Shrink'
self.current_widgets = ConstantShrinkWidgets(parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
(onnx_model, _) = onnx_tools_shrinking(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnGenerateOperator_clicked(self, e: bool):
btn = self.btnGenerateOperator
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Generate Operator'
self.current_widgets = GenerateOperatorWidgets(opset=self.graph.opset, parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_generate(non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
exception = e
pass
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=True)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnModifyAttrConst_clicked(self, e: bool):
btn = self.btnModifyAttrConst
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Modify Attributes and Constants'
selected_nodes = self.graph.selected_nodes()
selected_node = ''
if (len(selected_nodes) > 0):
selected_node = selected_nodes[0].node_name
self.current_widgets = ModifyAttrsWidgets(parent=self, graph=self.graph.to_data(), selected_node=selected_node)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_modify(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnChangeOpset_clicked(self, e: bool):
btn = self.btnChangeOpset
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Change Opset'
self.current_widgets = ChangeOpsetWidget(parent=self, current_opset=self.graph.opset)
old_opset = self.graph.opset
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
new_opset = int(props.opset)
if (old_opset == new_opset):
MessageBox.warn(f'opset num is same. not change.', msg_title, parent=self)
self.set_sidemenu_buttons_enabled(True)
continue
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_op_change(opset=new_opset, onnx_graph=onnx_graph, non_verbose=False)
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'Change opset {old_opset} to {new_opset}.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnChannelConvert_clicked(self, e: bool):
btn = self.btnChannelConvert
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Channel Convert'
self.current_widgets = ChangeChannelWidgets(graph=self.graph.to_data(), parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = e
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_order_conversion(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnAddNode_clicked(self, e: bool):
btn = self.btnAddNode
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Add Node'
self.current_widgets = AddNodeWidgets(current_opset=self.graph.opset, graph=self.graph.to_data(), parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
f = io.StringIO()
sys.stdout = f
try:
onnx_model: onnx.ModelProto = onnx_tools_add(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnInitializeBatchSize_clicked(self, e: bool):
btn = self.btnInitializeBatchSize
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Initialize Batchsize'
d = self.graph.to_data()
current_batchsize = '-1'
for (key, inp) in d.inputs.items():
current_batchsize = inp.shape[0]
break
self.current_widgets = InitializeBatchsizeWidget(current_batchsize=current_batchsize, parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_batchsize_initialize(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnIOChange_clicked(self, e: bool):
btn = self.btnIOChange
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'IO Change'
d = self.graph.to_data()
self.current_widgets = ChangeInputOutputShapeWidget(graph=d, parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
(props, _) = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_io_change(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def btnRenameOp_clicked(self, e: bool):
btn = self.btnRenameOp
if (self.current_button is btn):
self.current_widgets.close()
return
self.set_font_bold(btn, True)
self.set_sidemenu_buttons_enabled(False, btn)
msg_title = 'Rename Op'
self.current_widgets = RenameOpWidget(parent=self)
while True:
self.current_widgets.show()
if self.current_widgets.exec_():
onnx_tool_error = False
print_msg = ''
exception = None
props = self.current_widgets.get_properties()
try:
onnx_graph = self.graph.to_onnx(non_verbose=True)
try:
f = io.StringIO()
sys.stdout = f
onnx_model: onnx.ModelProto = onnx_tools_rename(onnx_graph=onnx_graph, non_verbose=False, **props._asdict())
except BaseException as e:
onnx_tool_error = True
raise e
finally:
sys.stdout = sys.__stdout__
print_msg = f.getvalue()
print(print_msg)
print_msg = remove_PrintColor(print_msg)
print_msg = print_msg[:1000]
f.close()
except BaseException as e:
exception = e
print(e)
if onnx_tool_error:
if print_msg:
MessageBox.error(print_msg, msg_title, parent=self)
else:
MessageBox.error(str(exception), msg_title, parent=self)
continue
if (print_msg.strip() and (print_msg != 'INFO: Finish!\n')):
MessageBox.warn(print_msg, msg_title, parent=self)
model_name = self.windowTitle()
self.graph.begin_undo(msg_title)
self.load_graph(onnx_model=onnx_model, model_name=model_name, clear_undo_stack=False, push_undo=True)
self.graph.end_undo()
self.update_graph(update_layout=False)
MessageBox.info(f'complete.', msg_title, parent=self)
break
else:
break
self.current_widgets = None
self.set_sidemenu_buttons_enabled(True)
self.set_font_bold(btn, False)
def exit(self):
self.close()
sys.exit(0) |
def ait_register_custom_acc_mapper_fn(op_and_target: Tuple[(str, Union[(str, Callable)])], arg_replacement_tuples: List[Union[(Tuple[(Union[(str, Tuple[(str, ...)])], str)], Tuple[(Union[(str, Tuple[(str, ...)])], str, bool)])]], needs_shapes_for_normalization=False, allow_normalize_from_torch_package=False):
def insert(custom_mapping_fn: Callable):
_CUSTOM_AIT_ACC_OP_MAPPERS[op_and_target] = CustomAitAccOpMapper(custom_mapping_fn=custom_mapping_fn, arg_replacement_tuples=arg_replacement_tuples, needs_shapes_for_normalization=needs_shapes_for_normalization, allow_normalize_from_torch_package=allow_normalize_from_torch_package)
return custom_mapping_fn
return insert |
(scope='session')
def rollbar_secrets(saas_config):
return {'domain': (pydash.get(saas_config, 'rollbar.domain') or secrets['domain']), 'read_access_token': (pydash.get(saas_config, 'rollbar.read_access_token') or secrets['read_access_token']), 'write_access_token': (pydash.get(saas_config, 'rollbar.write_access_token') or secrets['write_access_token']), 'page_limit': (pydash.get(saas_config, 'rollbar.page_limit') or secrets['page_limit'])} |
class bsn_generic_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 16
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_generic_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 16)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_generic_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_generic_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def _get_base_model_descriptions(model_cls: 'BaseModel') -> List[ParameterDescription]:
from dbgpt._private import pydantic
version = int(pydantic.VERSION.split('.')[0])
schema = (model_cls.model_json_schema() if (version >= 2) else model_cls.schema())
required_fields = set(schema.get('required', []))
param_descs = []
for (field_name, field_schema) in schema.get('properties', {}).items():
field = model_cls.model_fields[field_name]
param_type = field_schema.get('type')
if ((not param_type) and ('anyOf' in field_schema)):
for any_of in field_schema['anyOf']:
if (any_of['type'] != 'null'):
param_type = any_of['type']
break
if (version >= 2):
default_value = (field.default if (hasattr(field, 'default') and (str(field.default) != 'PydanticUndefined')) else None)
else:
default_value = (field.default if (not field.allow_none) else (field.default_factory() if callable(field.default_factory) else None))
description = field_schema.get('description', '')
is_required = (field_name in required_fields)
valid_values = None
ext_metadata = None
if hasattr(field, 'field_info'):
valid_values = (list(field.field_info.choices) if hasattr(field.field_info, 'choices') else None)
ext_metadata = (field.field_info.extra if hasattr(field.field_info, 'extra') else None)
param_class = (f'{model_cls.__module__}.{model_cls.__name__}',)
param_desc = ParameterDescription(param_class=param_class, param_name=field_name, param_type=param_type, default_value=default_value, description=description, required=is_required, valid_values=valid_values, ext_metadata=ext_metadata)
param_descs.append(param_desc)
return param_descs |
('request-certificate', cls=FandoghCommand)
('--name', 'name', prompt='Domain name', help='The domain name')
def request_certificate(name):
create_certificate(name)
command = format_text('fandogh domain details --name {}'.format(name), TextStyle.OKBLUE)
click.echo("Your request has been submitted and we are trying to get a certificate from Let's Encrypt for yourdomain, it might get a few minutes to complete.\nyou can follow up your request using {}".format(command)) |
def load(file, enforce_filetype=True):
(s, lim) = get_ivals()
if enforce_filetype:
file = pickled(file)
try:
return pickle_load(file)
except (EOFError, OSError, IOError) as exc:
msg = str(exc)
warnings.warn(('Could not load transformer at %s. Will check every %.1f seconds for %i seconds before aborting. ' % (file, s, lim)), ParallelProcessingWarning)
ts = time()
while (not os.path.exists(file)):
sleep(s)
if ((time() - ts) > lim):
raise ParallelProcessingError(('Could not load transformer at %s\nDetails:\n%r' % (dir, msg)))
return pickle_load(file) |
class Test_HotStart_rans3p(object):
def setup_class(cls):
cls._scriptdir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, cls._scriptdir)
def teardown_class(cls):
sys.path.remove(cls._scriptdir)
pass
def setup_method(self, method):
self.aux_names = []
def teardown_method(self, method):
pass
def test_hotstart_p1(self):
self.compare_name = 'T01P1_hotstart'
self.example_setting('T=0.1 vspaceOrder=1 onlySaveFinalSolution=True', h5_filename='solution_p1')
self.example_setting('T=0.1 vspaceOrder=1 onlySaveFinalSolution=True isHotStart=True', h5_filename='solution_p1', check_result=True, isHotstart=True, hotstart_t=0.1)
def test_hotstart_p2(self):
self.compare_name = 'T01P2_hotstart'
self.example_setting('T=0.1 vspaceOrder=2 onlySaveFinalSolution=True', h5_filename='solution_p2')
self.example_setting('T=0.1 vspaceOrder=2 onlySaveFinalSolution=True isHotStart=True', h5_filename='solution_p2', check_result=True, isHotstart=True, hotstart_t=0.1)
def example_setting(self, pre_setting, h5_filename, check_result=False, isHotstart=False, hotstart_t=0.0):
Context.contextOptionsString = pre_setting
from . import NS_hotstart_so as my_so
reload(my_so)
opts.profile = False
opts.gatherArchive = True
opts.hotStart = isHotstart
opts.hotStartTime = hotstart_t
pList = []
nList = []
sList = []
for (pModule, nModule) in my_so.pnList:
pList.append(importlib.import_module(('.' + pModule), 'proteus.tests.HotStart_3P'))
nList.append(importlib.import_module(('.' + nModule), 'proteus.tests.HotStart_3P'))
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule
reload(pList[(- 1)])
reload(nList[(- 1)])
if (my_so.sList == []):
for i in range(len(my_so.pnList)):
s = default_s
sList.append(s)
else:
sList = my_so.sList
my_so.name = h5_filename
ns = proteus.NumericalSolution.NS_base(my_so, pList, nList, sList, opts)
self.aux_names.append(ns.modelList[0].name)
ns.calculateSolution(my_so.name)
if check_result:
actual = tables.open_file((my_so.name + '.h5'))
expected_path = ((('comparison_files/' + 'comparison_') + self.compare_name) + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10) |
class AdCreativeLinkData(AbstractObject):
def __init__(self, api=None):
super(AdCreativeLinkData, self).__init__()
self._isAdCreativeLinkData = True
self._api = api
class Field(AbstractObject.Field):
ad_context = 'ad_context'
additional_image_index = 'additional_image_index'
app_link_spec = 'app_link_spec'
attachment_style = 'attachment_style'
automated_product_tags = 'automated_product_tags'
branded_content_shared_to_sponsor_status = 'branded_content_shared_to_sponsor_status'
branded_content_sponsor_page_id = 'branded_content_sponsor_page_id'
call_to_action = 'call_to_action'
caption = 'caption'
child_attachments = 'child_attachments'
collection_thumbnails = 'collection_thumbnails'
customization_rules_spec = 'customization_rules_spec'
description = 'description'
event_id = 'event_id'
force_single_link = 'force_single_link'
format_option = 'format_option'
image_crops = 'image_crops'
image_hash = 'image_hash'
image_layer_specs = 'image_layer_specs'
image_overlay_spec = 'image_overlay_spec'
link = 'link'
message = 'message'
multi_share_end_card = 'multi_share_end_card'
multi_share_optimized = 'multi_share_optimized'
name = 'name'
offer_id = 'offer_id'
page_welcome_message = 'page_welcome_message'
picture = 'picture'
post_click_configuration = 'post_click_configuration'
preferred_image_tags = 'preferred_image_tags'
retailer_item_ids = 'retailer_item_ids'
show_multiple_images = 'show_multiple_images'
static_fallback_spec = 'static_fallback_spec'
use_flexible_image_aspect_ratio = 'use_flexible_image_aspect_ratio'
class FormatOption():
carousel_ar_effects = 'carousel_ar_effects'
carousel_images_multi_items = 'carousel_images_multi_items'
carousel_images_single_item = 'carousel_images_single_item'
carousel_slideshows = 'carousel_slideshows'
single_image = 'single_image'
_field_types = {'ad_context': 'string', 'additional_image_index': 'int', 'app_link_spec': 'AdCreativeLinkDataAppLinkSpec', 'attachment_style': 'string', 'automated_product_tags': 'bool', 'branded_content_shared_to_sponsor_status': 'string', 'branded_content_sponsor_page_id': 'string', 'call_to_action': 'AdCreativeLinkDataCallToAction', 'caption': 'string', 'child_attachments': 'list<AdCreativeLinkDataChildAttachment>', 'collection_thumbnails': 'list<AdCreativeCollectionThumbnailInfo>', 'customization_rules_spec': 'list<AdCustomizationRuleSpec>', 'description': 'string', 'event_id': 'string', 'force_single_link': 'bool', 'format_option': 'FormatOption', 'image_crops': 'AdsImageCrops', 'image_hash': 'string', 'image_layer_specs': 'list<AdCreativeLinkDataImageLayerSpec>', 'image_overlay_spec': 'AdCreativeLinkDataImageOverlaySpec', 'link': 'string', 'message': 'string', 'multi_share_end_card': 'bool', 'multi_share_optimized': 'bool', 'name': 'string', 'offer_id': 'string', 'page_welcome_message': 'string', 'picture': 'string', 'post_click_configuration': 'AdCreativePostClickConfiguration', 'preferred_image_tags': 'list<string>', 'retailer_item_ids': 'list<string>', 'show_multiple_images': 'bool', 'static_fallback_spec': 'AdCreativeStaticFallbackSpec', 'use_flexible_image_aspect_ratio': 'bool'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['FormatOption'] = AdCreativeLinkData.FormatOption.__dict__.values()
return field_enum_info |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.