code stringlengths 281 23.7M |
|---|
.parametrize('restrictions', [(('+',), ('+',)), (('+',), ('-',)), (('-',), ('+',)), (('-', '+'), ('+', '+')), (('-', '+'), ('-', '+')), (('-', '+'), ('+', '-')), (('-', '+'), ('-', '-')), (('+', '+'), ('+', '+')), (('+', '+'), ('-', '+')), (('+', '+'), ('+', '-')), (('+', '+'), ('-', '-')), (('-', '-'), ('+', '+')), (('-', '-'), ('-', '+')), (('-', '-'), ('+', '-')), (('-', '-'), ('-', '-')), (('+', '-'), ('+', '+')), (('+', '-'), ('-', '+')), (('+', '-'), ('+', '-')), (('+', '-'), ('-', '-')), (('+', '+', '-', '-'), ('+', '-', '+', '-'))])
def test_bilinear_interior_facet_integral(dg_trial_test, restrictions):
(u, v) = dg_trial_test
(trial_r, test_r) = restrictions
idx = {'+': 0, '-': 1}
exact = np.zeros((2, 2), dtype=float)
form = 0
for (u_r, v_r) in zip(trial_r, test_r):
form = (form + (inner(u(u_r), v(v_r)) * dS))
exact[(idx[v_r], idx[u_r])] += sqrt(2)
interior_facet = assemble(form).M.values
assert np.allclose((interior_facet - exact), 0.0) |
def parse_config(file):
parser = ConfigParser()
with open(file, 'rt') as fh:
parser.read_string(('[cvehound]\n' + fh.read()))
config = dict(parser['cvehound'])
for key in ['cve', 'exclude', 'cwe', 'files', 'ignore_files']:
if (key not in config):
continue
config[key] = config[key].split()
if ('verbose' in config):
try:
config['verbose'] = int(config['verbose'])
except ValueError:
raise Exception('"verbose" should be an integer')
for key in ['check_strict', 'all_files', 'exploit']:
if (key not in config):
continue
if (config[key].lower() in ['y', 't', '1', 'yes', 'true']):
config[key] = True
elif (config[key].lower() in ['n', 'f', '0', 'no', 'false']):
config[key] = False
else:
raise Exception(("Can't parse boolean argument " + key))
return config |
def test_create_multi_namespace():
test_registry = catalogue.create('x', 'y')
_registry.register('z')
def z():
pass
items = test_registry.get_all()
assert (len(items) == 1)
assert (items['z'] == z)
assert catalogue.check_exists('x', 'y', 'z')
assert (catalogue._get(('x', 'y', 'z')) == z) |
def extractMuchadoaboutbluebottlesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class TestCmdSet(BaseEvenniaTest):
def test_cmdset_remove_by_key(self):
test_cmd_set = _CmdSetTest()
test_cmd_set.remove('another command')
self.assertNotIn(_CmdTest2, test_cmd_set.commands)
def test_cmdset_gets_by_key(self):
test_cmd_set = _CmdSetTest()
result = test_cmd_set.get('another command')
self.assertIsInstance(result, _CmdTest2) |
class TestParseRankList(unittest.TestCase):
def test_comma_separated(self):
comma_rank_list = '0,2,4,6'
bootstrap_info = bootstrap_info_test()
bootstrap_info.world_size = 8
parsed_rank_list = comms_utils.parseRankList(comma_rank_list)
self.assertEqual(4, len(parsed_rank_list))
for i in range(4):
self.assertEqual((i * 2), parsed_rank_list[i])
def test_range_ranks(self):
range_rank_list = '0:7'
bootstrap_info = bootstrap_info_test()
bootstrap_info.world_size = 8
parsed_rank_list = comms_utils.parseRankList(range_rank_list)
self.assertEqual(8, len(parsed_rank_list))
for i in range(8):
self.assertEqual(i, parsed_rank_list[i])
def test_single_rank(self):
single_rank = '5'
bootstrap_info = bootstrap_info_test()
bootstrap_info.world_size = 8
parsed_rank_list = comms_utils.parseRankList(single_rank)
self.assertEqual(1, len(parsed_rank_list))
self.assertEqual(5, parsed_rank_list[0]) |
class TestArraySourceAttributes(unittest.TestCase):
def setUp(self):
s1 = numpy.ones((2, 2))
src = ArraySource(scalar_data=s1, scalar_name='s1')
self.src = src
def test_add_attribute_works_for_point_data(self):
src = self.src
s1 = src.scalar_data
s2 = (s1.ravel() + 1.0)
src.add_attribute(s2, 's2')
v1 = numpy.ones((4, 3))
src.add_attribute(v1, 'v1')
t1 = numpy.ones((4, 9))
src.add_attribute(t1, 't1')
self.assertTrue(numpy.allclose(src.image_data.point_data.get_array('s2').to_array(), s2))
self.assertTrue(numpy.allclose(src.image_data.point_data.get_array('v1').to_array(), v1))
self.assertTrue(numpy.allclose(src.image_data.point_data.get_array('t1').to_array(), t1))
def test_add_attribute_works_for_cell_data(self):
src = self.src
s1 = src.scalar_data
s2 = (s1.ravel() + 1.0)
src.add_attribute(s2, 's2', category='cell')
v1 = numpy.ones((4, 3))
src.add_attribute(v1, 'v1', category='cell')
t1 = numpy.ones((4, 9))
src.add_attribute(t1, 't1', category='cell')
self.assertTrue(numpy.allclose(src.image_data.cell_data.get_array('s2').to_array(), s2))
self.assertTrue(numpy.allclose(src.image_data.cell_data.get_array('v1').to_array(), v1))
self.assertTrue(numpy.allclose(src.image_data.cell_data.get_array('t1').to_array(), t1))
def test_add_attribute_raises_errors(self):
src = self.src
data = numpy.ones((4, 3, 3))
self.assertRaises(AssertionError, src.add_attribute, data, 's2')
data = numpy.ones((4, 5))
self.assertRaises(AssertionError, src.add_attribute, data, 's2')
def test_remove_attribute(self):
src = self.src
s1 = src.scalar_data
s2 = (s1.ravel() + 1.0)
src.add_attribute(s2, 's2')
src.remove_attribute('s2')
self.assertEqual(src.image_data.point_data.get_array('s2'), None)
self.assertTrue(numpy.allclose(src.image_data.point_data.get_array('s1').to_array(), s1.ravel()))
def test_rename_attribute(self):
src = self.src
s1 = src.scalar_data
s2 = (s1.ravel() + 1.0)
src.add_attribute(s2, 's2')
src.rename_attribute('s2', 's3')
self.assertTrue(numpy.all((src.image_data.point_data.get_array('s3') == s2)))
self.assertEqual(src.image_data.point_data.get_array('s2'), None) |
class VectorCutPlane(Module):
__version__ = 0
implicit_plane = Instance(ImplicitPlane, allow_none=False, record=True)
cutter = Instance(Cutter, allow_none=False, record=True)
glyph = Instance(Glyph, allow_none=False, record=True)
actor = Instance(Actor, allow_none=False, record=True)
input_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['vectors'])
view = View(Group(Item(name='implicit_plane', style='custom'), label='ImplicitPlane', show_labels=False), Group(Item(name='glyph', style='custom', resizable=True), label='Glyph', show_labels=False), Group(Item(name='actor', style='custom'), label='Actor', show_labels=False))
def setup_pipeline(self):
self.implicit_plane = ImplicitPlane()
self.cutter = Cutter()
self.glyph = Glyph(module=self, scale_mode='scale_by_vector', color_mode='color_by_vector', show_scale_mode=False)
self.glyph.glyph_source.glyph_position = 'tail'
actor = self.actor = Actor()
actor.mapper.scalar_visibility = 1
actor.property.trait_set(line_width=2, backface_culling=False, frontface_culling=False)
def update_pipeline(self):
mm = self.module_manager
if (mm is None):
return
self.implicit_plane.inputs = [mm.source]
self._color_mode_changed(self.glyph.color_mode)
self.pipeline_changed = True
def update_data(self):
self.data_changed = True
def _color_mode_changed(self, value):
actor = self.actor
if (value == 'color_by_scalar'):
actor.mapper.scalar_visibility = 1
lut_mgr = self.module_manager.scalar_lut_manager
actor.set_lut(lut_mgr.lut)
elif (value == 'color_by_vector'):
lut_mgr = self.module_manager.vector_lut_manager
actor.set_lut(lut_mgr.lut)
else:
actor.mapper.scalar_visibility = 0
self.render()
def _implicit_plane_changed(self, old, new):
cutter = self.cutter
if (cutter is not None):
cutter.cut_function = new.plane
cutter.inputs = [new]
self._change_components(old, new)
def _cutter_changed(self, old, new):
ip = self.implicit_plane
if (ip is not None):
new.cut_function = ip.plane
new.inputs = [ip]
g = self.glyph
if (g is not None):
g.inputs = [new]
self._change_components(old, new)
def _glyph_changed(self, old, new):
if (old is not None):
old.on_trait_change(self._color_mode_changed, 'color_mode', remove=True)
new.module = self
cutter = self.cutter
if cutter:
new.inputs = [cutter]
new.on_trait_change(self._color_mode_changed, 'color_mode')
self._change_components(old, new)
def _actor_changed(self, old, new):
new.scene = self.scene
glyph = self.glyph
if (glyph is not None):
new.inputs = [glyph]
self._change_components(old, new) |
def _run_expression_propagation(cfg: ControlFlowGraph) -> None:
options = Options()
options.set('expression-propagation-function-call.maximum_instruction_complexity', 10)
options.set('expression-propagation-function-call.maximum_branch_complexity', 10)
options.set('expression-propagation-function-call.maximum_call_complexity', 10)
options.set('expression-propagation-function-call.maximum_assignment_complexity', 10)
task = DecompilerTask('test', cfg, options=options)
ExpressionPropagationFunctionCall().run(task) |
class FlicketUser(PaginatedAPIMixin, UserMixin, Base):
__tablename__ = 'flicket_users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(user_field_size['username_max']), index=True, unique=True)
name = db.Column(db.String(user_field_size['name_max']))
password = db.Column(db.LargeBinary(user_field_size['password_max']))
email = db.Column(db.String(user_field_size['email_max']), unique=True)
date_added = db.Column(db.DateTime)
date_modified = db.Column(db.DateTime, onupdate=datetime.now)
job_title = db.Column(db.String(user_field_size['job_title']))
avatar = db.Column(db.String(user_field_size['avatar']))
total_posts = db.Column(db.Integer, default=0)
total_assigned = db.Column(db.Integer, default=0)
token = db.Column(db.String(32), index=True, unique=True)
token_expiration = db.Column(db.DateTime)
locale = db.Column(db.String(10))
disabled = db.Column(db.Boolean, default=False)
def __init__(self, username, name, email, password, date_added, job_title=None, locale='en', disabled=False):
self.username = username
self.name = name
self.password = password
self.email = email
self.job_title = job_title
self.date_added = date_added
self.locale = locale
self.disabled = disabled
def is_admin(self):
user = FlicketUser.query.filter_by(id=self.id).first()
for g in user.flicket_groups:
if (g.group_name == app.config['ADMIN_GROUP_NAME']):
return True
else:
return False
def is_super_user(self):
user = FlicketUser.query.filter_by(id=self.id).first()
for g in user.flicket_groups:
if (g.group_name == app.config['SUPER_USER_GROUP_NAME']):
return True
else:
return False
def check_password(self, password):
users = FlicketUser.query.filter_by(username=self.username)
if (users.count() == 0):
return False
user = users.first()
if user.disabled:
return False
if (bcrypt.hashpw(password.encode('utf-8'), user.password) != user.password):
return False
return True
def check_token(token):
user = FlicketUser.query.filter_by(token=token).first()
if ((user is None) or (user.token_expiration < datetime.utcnow())):
return None
if user.disabled:
return None
return user
def generate_password():
characters = (string.ascii_letters + string.digits)
password = ''.join(random.sample(characters, 12))
return password
def get_token(self, expires_in=36000):
now = datetime.utcnow()
if (self.token and (self.token_expiration > (now + timedelta(seconds=60)))):
return self.token
self.token = base64.b64encode(os.urandom(24)).decode('utf-8')
self.token_expiration = (now + timedelta(seconds=expires_in))
db.session.add(self)
return self.token
def revoke_token(self):
self.token_expiration = (datetime.utcnow() - timedelta(seconds=1))
def to_dict(self):
avatar_url = (app.config['base_url'] + url_for('flicket_bp.static', filename='flicket_avatars/{}'.format('__default_profile.png')))
if self.avatar:
avatar_url = (app.config['base_url'] + url_for('flicket_bp.static', filename='flicket_avatars/{}'.format(self.avatar)))
data = {'id': self.id, 'avatar': avatar_url, 'email': self.email, 'job_title': (self.job_title if self.job_title else 'unknown'), 'name': self.name, 'username': self.username, 'total_posts': self.total_posts, 'links': {'self': (app.config['base_url'] + url_for('bp_api.get_user', id=self.id)), 'users': (app.config['base_url'] + url_for('bp_api.get_users'))}}
return data
def __repr__(self):
return '<User: id={}, username={}, email={}>'.format(self.id, self.username, self.email) |
class OptionSeriesParetoSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class Not(UnaryOp):
__slots__ = ()
__hash__ = Expr.__hash__
opstr = not_sym
def neg(self):
return self.elem
_coconut_tco
def simplify(self, **kwargs):
if (self.neg == top):
return bot
elif (self.neg == bot):
return top
elif isinstance(self.neg, Not):
return _coconut_tail_call(self.neg.neg.simplify, **kwargs)
elif isinstance(self.neg, And):
return _coconut_tail_call(Or(*map(Not, self.neg.ands)).simplify, **kwargs)
elif isinstance(self.neg, Or):
return _coconut_tail_call(And(*map(Not, self.neg.ors)).simplify, **kwargs)
elif isinstance(self.neg, Imp):
ands = (self.neg.conds + (Not(self.neg.concl),))
return _coconut_tail_call(And(*ands).simplify, **kwargs)
elif isinstance(self.neg, Exists):
return _coconut_tail_call(ForAll, self.neg.var, Not(self.neg.elem).simplify(**kwargs))
elif isinstance(self.neg, ForAll):
return _coconut_tail_call(Exists, self.neg.var, Not(self.neg.elem).simplify(**kwargs))
else:
return _coconut_tail_call(Not, self.neg.simplify(**kwargs))
def contradicts(self, other, **kwargs):
return (self.neg == other)
_coconut_tco
def resolve_against(self, other, **kwargs):
if isinstance(other, (Or, Eq)):
return _coconut_tail_call(other.resolve_against, self, **kwargs)
elif (self.neg.find_unification(other, **kwargs) is not None):
return bot
else:
return None
_coconut_tco
def admits_empty_universe(self):
if isinstance(self.neg, Atom):
return _coconut_tail_call(self.neg.admits_empty_universe)
else:
return (not self.neg.admits_empty_universe()) |
((API + '.get_dataset_identity'), MagicMock(return_value={'dataset_rid': DATASET_RID, 'dataset_path': DATASET_PATH, 'last_transaction_rid': TRANSACTION_RID}))
((API + '.get_dataset'), MagicMock())
((API + '.get_branch'), MagicMock())
((API + '.is_dataset_in_trash'), MagicMock(return_value=False))
((API + '.open_transaction'), MagicMock(return_value=TRANSACTION_RID))
((API + '.commit_transaction'), MagicMock())
((API + '.infer_dataset_schema'), MagicMock())
((API + '.upload_dataset_schema'), MagicMock())
((API + '.upload_dataset_files'))
def test_save_spark(upload_dataset_files):
fdt = CachedFoundryClient()
df = SparkSession.builder.master('local[*]').getOrCreate().createDataFrame([[1, 2]], 'a:string, b: string')
(dataset_rid, transaction_id) = fdt.save_dataset(df, dataset_path_or_rid=DATASET_PATH, branch='master', exists_ok=True, mode='SNAPSHOT')
args = upload_dataset_files.call_args[0]
assert (args[0] == DATASET_RID)
assert (args[1] == TRANSACTION_RID)
assert (len(args[2]) >= 2)
assert (dataset_rid == 'ri.foundry.main.dataset.12345de3-b916-46ba-b097-c4326ea4342e')
assert (transaction_id == 'transaction1') |
class OptionSeriesWindbarbDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class HelloWithBody(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Hello test')
logging.info('Sending Hello...')
request = ofp.message.hello()
request.data = 'OpenFlow Will Rule The World'
self.controller.message_send(request)
logging.info('Waiting for a Hello on the control plane with same xid,version--1.0.0 and data field empty')
(response, pkt) = self.controller.poll(exp_msg=ofp.OFPT_HELLO, timeout=1)
self.assertTrue((response is not None), 'Switch did not exchange hello message in return')
self.assertEqual(len(response.data), 0, 'Response data field non-empty')
self.assertTrue((response.version == 1), 'Openflow-version field is not 1.0.0') |
def verify_zip(zip_file: ZipFile, max_file_size: Optional[int]=None) -> None:
if (max_file_size is None):
max_file_size = MAX_FILE_SIZE
for file_info in zip_file.infolist():
file_size = 0
with zip_file.open(file_info) as file:
for chunk in iter((lambda f=file: f.read(CHUNK_SIZE)), b''):
file_size += len(chunk)
if (file_size > max_file_size):
raise ValueError('File size exceeds maximum allowed size') |
class Metric_Limit(Config_Item):
def __init__(self, metric_name, enabled, limit=None):
super().__init__()
assert isinstance(metric_name, str)
assert ((metric_name in METRICS) or (metric_name == '*'))
assert isinstance(enabled, bool)
assert (isinstance(limit, int) or (limit is None))
self.metric_name = metric_name
self.enabled = enabled
self.limit = limit
def dump(self):
print((' Metric limit for %s' % self.metric_name))
if (not self.enabled):
print(' Metric: ignore')
elif (self.limit is not None):
print((' Metric: limit to %i' % self.limit))
else:
print(' Metric: report')
def evaluate(self, mh, config):
assert isinstance(mh, Message_Handler)
assert isinstance(config, Config)
if (self.metric_name == '*'):
if self.enabled:
config.enabled_metrics = set(METRICS)
else:
config.enabled_metrics = set()
config.metric_limits = {}
elif self.enabled:
config.enabled_metrics.add(self.metric_name)
if (self.limit is None):
if (self.metric_name in config.metric_limits):
del config.metric_limits[self.metric_name]
else:
config.metric_limits[self.metric_name] = self.limit
else:
if (self.metric_name in config.enabled_metrics):
config.enabled_metrics.remove(self.metric_name)
if (self.metric_name in config.metric_limits):
del config.metric_limits[self.metric_name] |
class AdPlacement(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdPlacement = True
super(AdPlacement, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
bundle_id = 'bundle_id'
display_format = 'display_format'
external_placement_id = 'external_placement_id'
google_display_format = 'google_display_format'
id = 'id'
name = 'name'
placement_group = 'placement_group'
platform = 'platform'
status = 'status'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdPlacement, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'bundle_id': 'string', 'display_format': 'string', 'external_placement_id': 'string', 'google_display_format': 'string', 'id': 'string', 'name': 'string', 'placement_group': 'Object', 'platform': 'string', 'status': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Temperature(IntervalModule, ColorRangeModule):
settings = (('format', 'format string used for output. {temp} is the temperature in degrees celsius'), ('display_if', 'snippet that gets evaluated. if true, displays the module output'), ('lm_sensors_enabled', 'whether or not lm_sensors should be used for obtaining CPU temperature information'), ('urgent_on', 'whether to flag as urgent when temperature exceeds urgent value or critical value (requires lm_sensors_enabled)'), ('dynamic_color', 'whether to set the color dynamically (overrides alert_color)'), 'color', 'file', 'alert_temp', 'alert_color')
format = '{temp} C'
color = '#FFFFFF'
file = '/sys/class/thermal/thermal_zone0/temp'
alert_temp = 90
alert_color = '#FF0000'
display_if = 'True'
lm_sensors_enabled = False
dynamic_color = False
urgent_on = 'warning'
def init(self):
self.pango_enabled = (self.hints.get('markup', False) and (self.hints['markup'] == 'pango'))
self.colors = self.get_hex_color_range(self.start_color, self.end_color, 100)
def run(self):
if eval(self.display_if):
if self.lm_sensors_enabled:
self.output = self.get_output_sensors()
else:
self.output = self.get_output_original()
def get_output_original(self):
with open(self.file, 'r') as f:
temp = (float(f.read().strip()) / 1000)
if self.dynamic_color:
perc = int(self.percentage(int(temp), self.alert_temp))
color = self.get_colour(perc)
else:
color = (self.color if (temp < self.alert_temp) else self.alert_color)
return {'full_text': self.format.format(temp=temp), 'color': color}
def get_output_sensors(self):
data = dict()
found_sensors = get_sensors()
if (len(found_sensors) == 0):
raise Exception('No sensors detected! Ensure lm-sensors is installed and check the output of the `sensors` command.')
for sensor in found_sensors:
data[sensor.name] = self.format_sensor(sensor)
data['{}_bar'.format(sensor.name)] = self.format_sensor_bar(sensor)
data['temp'] = max((s.current for s in found_sensors))
return {'full_text': self.format.format(**data), 'urgent': self.get_urgent(found_sensors), 'color': (self.color if (not self.dynamic_color) else None)}
def get_urgent(self, sensors):
if (self.urgent_on not in ('warning', 'critical')):
raise Exception('urgent_on must be one of (warning, critical)')
for sensor in sensors:
if ((self.urgent_on == 'warning') and sensor.is_warning()):
return True
elif ((self.urgent_on == 'critical') and sensor.is_critical()):
return True
return False
def format_sensor(self, sensor):
current_val = sensor.current
if self.pango_enabled:
percentage = self.percentage(sensor.current, sensor.critical)
if self.dynamic_color:
color = self.get_colour(percentage)
return self.format_pango(color, current_val)
return current_val
def format_sensor_bar(self, sensor):
percentage = self.percentage(sensor.current, sensor.critical)
bar = make_vertical_bar(int(percentage))
if self.pango_enabled:
if self.dynamic_color:
color = self.get_colour(percentage)
return self.format_pango(color, bar)
return bar
def format_pango(self, color, value):
return '<span color="{}">{}</span>'.format(color, value)
def get_colour(self, percentage):
index = ((- 1) if (int(percentage) > (len(self.colors) - 1)) else int(percentage))
return self.colors[index] |
class DistributedActors():
def __init__(self, env_factory: Callable[([], Union[(StructuredEnv, StructuredEnvSpacesMixin, LogStatsEnv)])], policy: TorchPolicy, n_rollout_steps: int, n_actors: int, batch_size: int):
self.env_factory = env_factory
self.policy = policy
self.n_rollout_steps = n_rollout_steps
self.n_actors = n_actors
self.batch_size = batch_size
self.epoch_stats = LogStatsAggregator(LogStatsLevel.EPOCH)
self.epoch_stats.register_consumer(get_stats_logger('train'))
def start(self) -> None:
raise NotImplementedError
def stop(self) -> None:
raise NotImplementedError
def broadcast_updated_policy(self, state_dict: Dict) -> None:
raise NotImplementedError
def collect_outputs(self, learner_device: str) -> Tuple[(StructuredSpacesRecord, float, float, float)]:
raise NotImplementedError
def get_epoch_stats_aggregator(self) -> LogStatsAggregator:
return self.epoch_stats
def get_stats_value(self, event: Callable, level: LogStatsLevel, name: Optional[str]=None) -> LogStatsValue:
assert (level == LogStatsLevel.EPOCH)
return self.epoch_stats.last_stats[(event, name, None)] |
class FlowRemoveAll(base_tests.SimpleProtocol):
def runTest(self):
for i in range(1, 5):
logging.debug('Adding flow %d', i)
request = ofp.message.flow_add(buffer_id=ofp.OFP_NO_BUFFER, priority=(i * 1000))
self.controller.message_send(request)
do_barrier(self.controller)
delete_all_flows(self.controller)
logging.info('Sending flow stats request')
stats = get_flow_stats(self, ofp.match())
self.assertEqual(len(stats), 0, 'Expected empty flow stats reply') |
class MarkerHandler():
decision_context: DecisionContext
marker_name: str
mutable_marker_results: Dict[(str, MarkerResult)] = field(default_factory=dict)
def record_mutable_marker(self, id: str, event_id: int, data: Dict[(str, Payloads)], access_count: int):
marker = MarkerData.create(id=id, event_id=event_id, data=data, access_count=access_count)
if (id in self.mutable_marker_results):
self.mutable_marker_results[id].replayed = True
else:
self.mutable_marker_results[id] = MarkerResult(data=data)
self.decision_context.record_marker(self.marker_name, marker.get_header(), data)
def set_data(self, id, data: Dict[(str, Payloads)]):
self.mutable_marker_results[id] = MarkerResult(data=data)
def mark_replayed(self, id):
self.mutable_marker_results[id].replayed = True
def handle(self, id: str, func) -> Dict[(str, Payloads)]:
event_id = self.decision_context.decider.next_decision_event_id
result: MarkerResult = self.mutable_marker_results.get(id)
if (result or self.decision_context.is_replaying()):
if result:
if (self.decision_context.is_replaying() and (not result.replayed)):
self.record_mutable_marker(id, event_id, result.data, 0)
return result.data
else:
return None
else:
to_store = func()
if to_store:
data = to_store
self.record_mutable_marker(id, event_id, data, 0)
return to_store
else:
return None
def get_marker_data_from_history(self, event_id: int, marker_id: str, expected_access_count: int) -> Dict[(str, Payloads)]:
event: HistoryEvent = self.decision_context.decider.get_optional_decision_event(event_id)
if ((not event) or (event.event_type != EventType.EVENT_TYPE_MARKER_RECORDED)):
return None
attributes: MarkerRecordedEventAttributes = event.marker_recorded_event_attributes
name = attributes.marker_name
if (self.marker_name != name):
return None
marker_data = MarkerInterface.from_event_attributes(attributes)
if ((marker_id != marker_data.get_id()) or (marker_data.get_access_count() > expected_access_count)):
return None
return marker_data.get_data() |
class Comment(Instruction):
STYLES = {'C': ('/*', '*/'), 'html': ('todo', 'todo'), 'debug': ('##', '##')}
DEFAULT_STYLE = 'C'
def __init__(self, comment: str, comment_style: str='C', tags: Optional[Tuple[(Tag, ...)]]=None):
super().__init__(tags)
self._comment = comment
self._comment_style = comment_style
(self._open_comment, self._close_comment) = self.STYLES.get(comment_style, self.STYLES[self.DEFAULT_STYLE])
def __repr__(self) -> str:
return f'{self._open_comment} {self._comment} {self._close_comment}'
def __str__(self) -> str:
return f'{self._open_comment} {self._comment} {self._close_comment}'
def __iter__(self) -> Iterator[Expression]:
return
(yield)
def complexity(self) -> int:
return 0
def copy(self) -> Comment:
return Comment(self._comment, self._comment_style, self.tags)
def substitute(self, replacee: 'Expression', replacement: 'Expression') -> None:
pass
def accept(self, visitor: DataflowObjectVisitorInterface[T]) -> T:
return visitor.visit_comment(self) |
def multi_lastz_runner(log, output, cores, target, query, huge, coverage=83, identity=92.5, size=):
if (not huge):
with open(target, 'rb') as f:
tb = twobit.TwoBitFile(f)
tb_keys = [i.decode('utf-8') for i in tb.keys()]
chromos = [os.path.join(target, c) for c in tb_keys]
else:
chromos = chunk_scaffolds(log, target, size)
work = [[chromo, query, coverage, identity] for chromo in chromos]
log.info(('Running the targets against %s queries...' % len(chromos)))
if (cores == 1):
results = list(map(run_lastz, work))
else:
pool = multiprocessing.Pool(cores)
results = pool.map(run_lastz, work)
print('')
log.info('Writing the results file...')
outp = open(output, 'wb')
for t_file in results:
if (t_file is not None):
sys.stdout.write('.')
sys.stdout.flush()
outp.write(open(t_file, 'rb').read())
os.remove(t_file)
outp.close()
print('')
if huge:
log.info('Cleaning up the chunked files...')
for t_file in chromos:
os.remove(t_file) |
class MyApp(App):
def build(self):
box1 = BoxLayout(orientation='vertical')
box2 = BoxLayout()
box1.add_widget(box2)
label = Label(text='Olar Mundo')
label.font_size = 50
text_input = TextInput()
btn = Button(text='Butaum', on_press=on_press, on_release=on_release)
btn.font_size = 50
box1.add_widget(label)
box1.add_widget(text_input)
box2.add_widget(btn)
return box1 |
class Client(object):
def __init__(self):
self.gcs_client = gcs.Client(project=settings.BQ_PROJECT)
def bucket(self):
return self.gcs_client.bucket(settings.BQ_PROJECT)
def get_bucket(self):
return self.gcs_client.get_bucket(settings.BQ_PROJECT)
def __getattr__(self, name):
return getattr(self.gcs_client, name) |
(scope='session', autouse=True)
def fideslang_data_categories(db):
cats = []
for obj in DEFAULT_TAXONOMY.data_category:
try:
cats.append(DataCategoryDbModel.from_fideslang_obj(obj).save(db))
except IntegrityError:
pass
(yield cats)
for cat in cats:
try:
cat.delete(db)
except ObjectDeletedError:
pass |
class CharacterCmdSet(CmdSet):
key = 'DefaultCharacter'
priority = 0
def at_cmdset_creation(self):
self.add(general.CmdLook())
self.add(general.CmdHome())
self.add(general.CmdInventory())
self.add(general.CmdPose())
self.add(general.CmdNick())
self.add(general.CmdSetDesc())
self.add(general.CmdGet())
self.add(general.CmdDrop())
self.add(general.CmdGive())
self.add(general.CmdSay())
self.add(general.CmdWhisper())
self.add(general.CmdAccess())
self.add(help.CmdHelp())
self.add(help.CmdSetHelp())
self.add(system.CmdPy())
self.add(system.CmdAccounts())
self.add(system.CmdService())
self.add(system.CmdAbout())
self.add(system.CmdTime())
self.add(system.CmdServerLoad())
self.add(system.CmdTickers())
self.add(system.CmdTasks())
self.add(admin.CmdBoot())
self.add(admin.CmdBan())
self.add(admin.CmdUnban())
self.add(admin.CmdEmit())
self.add(admin.CmdPerm())
self.add(admin.CmdWall())
self.add(admin.CmdForce())
self.add(building.CmdTeleport())
self.add(building.CmdSetObjAlias())
self.add(building.CmdListCmdSets())
self.add(building.CmdWipe())
self.add(building.CmdSetAttribute())
self.add(building.CmdName())
self.add(building.CmdDesc())
self.add(building.CmdCpAttr())
self.add(building.CmdMvAttr())
self.add(building.CmdCopy())
self.add(building.CmdFind())
self.add(building.CmdOpen())
self.add(building.CmdLink())
self.add(building.CmdUnLink())
self.add(building.CmdCreate())
self.add(building.CmdDig())
self.add(building.CmdTunnel())
self.add(building.CmdDestroy())
self.add(building.CmdExamine())
self.add(building.CmdTypeclass())
self.add(building.CmdLock())
self.add(building.CmdSetHome())
self.add(building.CmdTag())
self.add(building.CmdSpawn())
self.add(building.CmdScripts())
self.add(building.CmdObjects())
self.add(batchprocess.CmdBatchCommands())
self.add(batchprocess.CmdBatchCode()) |
class MacToPort(collections.defaultdict):
def __init__(self):
super(MacToPort, self).__init__(set)
def add_port(self, dpid, port_no, mac_address):
self[mac_address].add(MacPort(dpid, port_no))
def remove_port(self, dpid, port_no, mac_address):
ports = self[mac_address]
ports.discard(MacPort(dpid, port_no))
if (not ports):
del self[mac_address]
def get_ports(self, mac_address):
return self[mac_address] |
class TestLoggingUsingAdminPermissions(CoprsTestCase):
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_db')
def test_update_copr(self, log):
CoprsLogic.update(self.u1, self.c2)
log.info.assert_called_with("Admin '%s' using their permissions to update project '%s' settings", 'user1', 'user2/foocopr')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_mock_chroots', 'f_db')
def test_update_copr_chroot(self, log):
CoprChrootsLogic.update_chroot(self.u1, self.c2.copr_chroots[0])
log.info.assert_called_with("Admin '%s' using their permissions to update chroot '%s'", 'user1', 'user2/foocopr/fedora-17-x86_64')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_u1_ts_client', 'f_coprs', 'f_builds', 'f_db')
def test_update_package_webui(self, log):
url = '/coprs/{0}/package/{1}/edit/scm'.format(self.c2.full_name, self.p2.name)
data = {'clone_url': ' 'package_name': self.p2.name}
self.tc.post(url, headers=self.auth_header, data=data)
log.info.assert_called_with("Admin '%s' using their permissions to update package '%s' in project '%s'", 'user1', 'whatsupthere-world', 'user2/foocopr')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_u1_ts_client', 'f_coprs', 'f_builds', 'f_db')
def test_update_package_apiv3(self, log):
url = '/api_3/package/edit/{0}/{1}/scm'.format(self.c2.full_name, self.p2.name)
data = {'clone_url': ' 'package_name': self.p2.name}
self.post_api3_with_auth(url, data, self.u1)
log.info.assert_called_with("Admin '%s' using their permissions to update package '%s' in project '%s'", 'user1', 'whatsupthere-world', 'user2/foocopr') |
_deserializable
class OpenSearchDB(BaseVectorDB):
BATCH_SIZE = 100
def __init__(self, config: OpenSearchDBConfig):
if (config is None):
raise ValueError('OpenSearchDBConfig is required')
self.config = config
self.client = OpenSearch(hosts=[self.config.opensearch_url], **self.config.extra_params)
info = self.client.info()
logging.info(f"Connected to {info['version']['distribution']}. Version: {info['version']['number']}")
super().__init__(config=self.config)
def _initialize(self):
logging.info(self.client.info())
index_name = self._get_index()
if self.client.indices.exists(index=index_name):
print(f"Index '{index_name}' already exists.")
return
index_body = {'settings': {'knn': True}, 'mappings': {'properties': {'text': {'type': 'text'}, 'embeddings': {'type': 'knn_vector', 'index': False, 'dimension': self.config.vector_dimension}}}}
self.client.indices.create(index_name, body=index_body)
print(self.client.indices.get(index_name))
def _get_or_create_db(self):
return self.client
def _get_or_create_collection(self, name):
def get(self, ids: Optional[List[str]]=None, where: Optional[Dict[(str, any)]]=None, limit: Optional[int]=None) -> Set[str]:
query = {}
if ids:
query['query'] = {'bool': {'must': [{'ids': {'values': ids}}]}}
else:
query['query'] = {'bool': {'must': []}}
if ('app_id' in where):
app_id = where['app_id']
query['query']['bool']['must'].append({'term': {'metadata.app_id.keyword': app_id}})
response = self.client.search(index=self._get_index(), body=query, _source=True, size=limit)
docs = response['hits']['hits']
ids = [doc['_id'] for doc in docs]
doc_ids = [doc['_source']['metadata']['doc_id'] for doc in docs]
result = {'ids': ids, 'metadatas': []}
for doc_id in doc_ids:
result['metadatas'].append({'doc_id': doc_id})
return result
def add(self, embeddings: List[List[str]], documents: List[str], metadatas: List[object], ids: List[str], **kwargs: Optional[Dict[(str, any)]]):
for batch_start in tqdm(range(0, len(documents), self.BATCH_SIZE), desc='Inserting batches in opensearch'):
batch_end = (batch_start + self.BATCH_SIZE)
batch_documents = documents[batch_start:batch_end]
batch_embeddings = embeddings[batch_start:batch_end]
batch_entries = [{'_index': self._get_index(), '_id': doc_id, '_source': {'text': text, 'metadata': metadata, 'embeddings': embedding}} for (doc_id, text, metadata, embedding) in zip(ids[batch_start:batch_end], batch_documents, metadatas[batch_start:batch_end], batch_embeddings)]
bulk(self.client, batch_entries, **kwargs)
self.client.indices.refresh(index=self._get_index())
time.sleep(0.1)
def query(self, input_query: List[str], n_results: int, where: Dict[(str, any)], citations: bool=False, **kwargs: Optional[Dict[(str, Any)]]) -> Union[(List[Tuple[(str, Dict)]], List[str])]:
embeddings = OpenAIEmbeddings()
docsearch = OpenSearchVectorSearch(index_name=self._get_index(), embedding_function=embeddings, opensearch_url=f'{self.config.opensearch_url}', use_ssl=(hasattr(self.config, 'use_ssl') and self.config.use_ssl), verify_certs=(hasattr(self.config, 'verify_certs') and self.config.verify_certs))
pre_filter = {'match_all': {}}
if ('app_id' in where):
app_id = where['app_id']
pre_filter = {'bool': {'must': [{'term': {'metadata.app_id.keyword': app_id}}]}}
docs = docsearch.similarity_search_with_score(input_query, search_type='script_scoring', space_type='cosinesimil', vector_field='embeddings', text_field='text', metadata_field='metadata', pre_filter=pre_filter, k=n_results, **kwargs)
contexts = []
for (doc, score) in docs:
context = doc.page_content
if citations:
metadata = doc.metadata
metadata['score'] = score
contexts.append(tuple((context, metadata)))
else:
contexts.append(context)
return contexts
def set_collection_name(self, name: str):
if (not isinstance(name, str)):
raise TypeError('Collection name must be a string')
self.config.collection_name = name
def count(self) -> int:
query = {'query': {'match_all': {}}}
response = self.client.count(index=self._get_index(), body=query)
doc_count = response['count']
return doc_count
def reset(self):
if self.client.indices.exists(index=self._get_index()):
self.client.indices.delete(index=self._get_index())
def delete(self, where):
if ('doc_id' not in where):
raise ValueError('doc_id is required to delete a document')
query = {'query': {'bool': {'must': [{'term': {'metadata.doc_id': where['doc_id']}}]}}}
self.client.delete_by_query(index=self._get_index(), body=query)
def _get_index(self) -> str:
return self.config.collection_name |
def gif_query(query, limit=3):
try:
url = (config.host + ('/giphy/search?limit=%s&query=%s' % (limit, query)))
r = requests.get(url, headers=headers)
return r.json()
except requests.exceptions.RequestException as e:
print('Something went wrong. Could not get your gifs:', e) |
class OptionPlotoptionsDependencywheelTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesItemSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart)
_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class ONFFlowMonitorStatsRequest(OFPExperimenterStatsRequestBase):
def __init__(self, datapath, flags, body=None, type_=None, experimenter=None, exp_type=None):
body = (body if body else [])
super(ONFFlowMonitorStatsRequest, self).__init__(datapath, flags, experimenter=ofproto_common.ONF_EXPERIMENTER_ID, exp_type=ofproto.ONFMP_FLOW_MONITOR)
self.body = body
def _serialize_stats_body(self):
data = bytearray()
for i in self.body:
data += i.serialize()
body = OFPExperimenterMultipart(experimenter=self.experimenter, exp_type=self.exp_type, data=data)
self.buf += body.serialize() |
def extractEternalDreamlandTranslation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Amorous Slave Girl' in item['tags']):
return buildReleaseMessageWithType(item, 'Amorous Slave Girl', vol, chp, frag=frag, postfix=postfix)
if ('Dragon Blood Warrior' in item['tags']):
return buildReleaseMessageWithType(item, 'Dragon Blood Warrior', vol, chp, frag=frag, postfix=postfix)
if ('Love Affair With Sister-In-Law' in item['tags']):
return buildReleaseMessageWithType(item, 'Love Affair With Sister-In-Law', vol, chp, frag=frag, postfix=postfix)
if ('Peerless Demonic Lord' in item['tags']):
return buildReleaseMessageWithType(item, 'Peerless Demonic Lord', vol, chp, frag=frag, postfix=postfix)
return False |
.network
def test_pooch_download_retry_fails_eventually(monkeypatch):
with TemporaryDirectory() as local_store:
monkeypatch.setattr(core, 'hash_matches', FakeHashMatches(3).hash_matches)
path = Path(local_store)
pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY, retry_if_failed=1)
with pytest.raises(ValueError) as error:
with capture_log() as log_file:
pup.fetch('tiny-data.txt')
logs = log_file.getvalue().strip().split('\n')
assert (len(logs) == 2)
assert logs[0].startswith('Downloading')
assert logs[0].endswith(f"'{path}'.")
assert ('Failed to download' in logs[1])
assert ('download again 1 more time.' in logs[1])
assert ('does not match the known hash' in str(error)) |
class ResonanceHandler(THBEventHandler):
interested = ['action_done']
def handle(self, evt_type, act):
if ((evt_type == 'action_done') and isinstance(act, Attack)):
src = act.source
tgt = act.target
if (act.cancelled or src.dead or tgt.dead):
return act
if (not src.has_skill(Resonance)):
return act
g = self.game
pl = [p for p in g.players if ((not p.dead) and (p not in (src, tgt)))]
if (not pl):
return act
pl = user_choose_players(self, src, pl)
if (not pl):
return act
g.process_action(ResonanceAction(src, pl[0], tgt))
return act
def choose_player_target(self, tl):
if (not tl):
return (tl, False)
return (tl[(- 1):], True) |
def test_form_args():
(app, db, admin) = setup()
class BaseModel(peewee.Model):
class Meta():
database = db
class Model(BaseModel):
test = peewee.CharField(null=False)
Model.create_table()
shared_form_args = {'test': {'validators': [validators.Regexp('test')]}}
view = CustomModelView(Model, form_args=shared_form_args)
admin.add_view(view)
create_form = view.create_form()
assert (len(create_form.test.validators) == 2)
edit_form = view.edit_form()
assert (len(edit_form.test.validators) == 2) |
def test_list_tenants(sample_tenant):
page = tenant_mgt.list_tenants()
result = None
for tenant in page.iterate_all():
if (tenant.tenant_id == sample_tenant.tenant_id):
result = tenant
break
assert isinstance(result, tenant_mgt.Tenant)
assert (result.tenant_id == sample_tenant.tenant_id)
assert (result.display_name == 'admin-python-tenant')
assert (result.allow_password_sign_up is True)
assert (result.enable_email_link_sign_in is True) |
.parametrize('fork_fn,vm_class', ((frontier_at, FrontierVM), (homestead_at, HomesteadVM), (tangerine_whistle_at, TangerineWhistleVM), (spurious_dragon_at, SpuriousDragonVM), (byzantium_at, ByzantiumVM), (constantinople_at, ConstantinopleVM), (petersburg_at, PetersburgVM), (istanbul_at, IstanbulVM), (muir_glacier_at, MuirGlacierVM), (berlin_at, BerlinVM), (london_at, LondonVM), (arrow_glacier_at, ArrowGlacierVM), (gray_glacier_at, GrayGlacierVM), (paris_at, ParisVM), (shanghai_at, ShanghaiVM), (latest_mainnet_at, ShanghaiVM)))
def test_chain_builder_construct_chain_fork_specific_helpers(fork_fn, vm_class):
class DummyVM(FrontierVM):
pass
class ChainForTest(MiningChain):
vm_configuration = ((0, DummyVM),)
chain = build(ChainForTest, fork_fn(12))
assert issubclass(chain, MiningChain)
assert (len(chain.vm_configuration) == 2)
assert (chain.vm_configuration[0][0] == 0)
assert (chain.vm_configuration[0][1] is DummyVM)
assert (chain.vm_configuration[1][0] == 12)
assert (chain.vm_configuration[1][1] is vm_class) |
def test_omega():
data = np.array([0.0089, 0.0012, (- 0.002), 0.01, (- 0.0002), 0.02, 0.03, 0.01, (- 0.003), 0.01, 0.0102, (- 0.01)])
mar = 0.01
omega = perf.omega(data, target_rtn=mar, log=True)
assert np.isclose(omega, 0.)
df = pd.DataFrame(data)
omega = perf.omega(df, target_rtn=mar, log=True)
assert np.isclose(omega, 0.) |
def get_color_balls_to_be_potted(shot: System, legal: bool, ball_call: str) -> List[str]:
color_balls_to_be_potted = [ball_id for (ball_id, info) in ball_infos_dict.items() if (info.color and (info.points >= ball_info(get_lowest_pottable(shot)).points))]
if ((not legal) and is_ball_pocketed(shot, ball_call)):
color_balls_to_be_potted.append(ball_call)
return color_balls_to_be_potted |
class JFCCommittee(BaseModel):
__tablename__ = 'fec_form_1s_vw'
idx = db.Column('sub_id', db.Integer, primary_key=True)
committee_id = db.Column('cmte_id', db.String, doc=docs.COMMITTEE_ID)
joint_committee_id = db.Column('joint_cmte_id', db.String, doc=docs.COMMITTEE_ID)
joint_committee_name = db.Column('joint_cmte_nm', db.String(100), doc=docs.COMMITTEE_NAME)
most_recent_filing_flag = db.Column(db.String(1), doc=docs.MOST_RECENT) |
def extractNegativeInserts(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def test_check_flags():
parser = Parser()
(valid, fields, sort_by, max_width, unique, nonzero, output_format, ipv4_only, ipv6_only, ipv4_and_ipv6) = parser._check_flags({'fields': 'all'}, '')
assert (fields == ['ID', 'MAC Address', 'Switch', 'Port', 'VLAN', 'IPv4', 'IPv4 Subnet', 'IPv6', 'IPv6 Subnet', 'Ethernet Vendor', 'Ignored', 'State', 'Next State', 'First Seen', 'Last Seen', 'Previous States', 'IPv4 OS\n(p0f)', 'IPv6 OS\n(p0f)', 'Previous IPv4 OSes\n(p0f)', 'Previous IPv6 OSes\n(p0f)', 'Role\n(NetworkML)', 'Role Confidence\n(NetworkML)', 'Previous Roles\n(NetworkML)', 'Previous Role Confidences\n(NetworkML)', 'IPv4 rDNS', 'IPv6 rDNS', 'SDN Controller Type', 'SDN Controller URI', 'History', 'ACL History', 'Pcap labels'])
expected_fields = ['ID', 'MAC Address', 'Switch', 'Port', 'VLAN', 'IPv4']
(valid, fields, sort_by, max_width, unique, nonzero, output_format, ipv4_only, ipv6_only, ipv4_and_ipv6) = parser._check_flags({'fields': expected_fields, 'sort_by': 1, 'max_width': 100, 'unique': True, 'nonzero': True, 'output_format': 'csv', '4': True, '6': True, '4and6': True}, '')
assert (fields == expected_fields)
assert (sort_by == 1)
assert (max_width == 100)
assert (unique == True)
assert (nonzero == True)
assert (output_format == 'csv')
assert (ipv4_only == False)
assert (ipv6_only == False)
assert (ipv4_and_ipv6 == True) |
def generate_withdrawals_dict(n: Optional[int]=None):
num_countries = 5
countries = [f'country_{i}' for i in range(num_countries)]
country_dist = ([0.9] + ([(0.1 / num_countries)] * (num_countries - 1)))
num_users = 500
users = [f'user_{i}' for i in range(num_users)]
for _ in (range(n) if (n is not None) else count()):
(yield {'user': random.choice(users), 'amount': random.uniform(0, 25000), 'country': random.choices(countries, country_dist)[0], 'date': datetime.utcnow().replace(tzinfo=timezone.utc).isoformat()}) |
def test_aliased_problems_8():
(x0, x1, x2, x3) = [Variable('x', Integer.int32_t(), i) for i in range(4)]
(y0, y1, y2) = [Variable('y', Integer.int32_t(), i, is_aliased=True) for i in range(3)]
cfg = ControlFlowGraph()
cfg.add_nodes_from([(start := BasicBlock(0, instructions=[Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [y0.copy()])])), Assignment(y1.copy(), y0.copy())])), (loop_body := BasicBlock(1, instructions=[Phi(y2.copy(), [Constant(1), y2.copy()]), Assignment(x3.copy(), x1.copy()), Assignment(x2.copy(), x3.copy()), Branch(Condition(OperationType.greater, [x2.copy(), Constant(20)]))])), (end := BasicBlock(2, instructions=[Return([x2.copy()])]))])
cfg.add_edges_from([UnconditionalEdge(start, loop_body), TrueCase(loop_body, end), FalseCase(loop_body, loop_body)])
IdentityElimination().run(DecompilerTask('test', cfg))
assert (start.instructions == [Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [y0.copy()])]))])
assert (loop_body.instructions == [Phi(y2.copy(), [Constant(1), y2.copy()]), Branch(Condition(OperationType.greater, [x1.copy(), Constant(20)]))])
assert (end.instructions == [Return([x1.copy()])]) |
def test_no_horiz_jump():
mesh = UnitTriangleMesh()
mesh = ExtrudedMesh(mesh, 1)
DG = FunctionSpace(mesh, 'DG', 0)
u = TestFunction(DG)
(_, _, z) = SpatialCoordinate(mesh)
form = (jump((z * u)) * dS_h)
assert np.allclose(assemble(Tensor(form)).dat.data, assemble(form).dat.data) |
def _handle_items(trees, notify):
if trees:
raise ValueError('Unexpected tree: {!r}'.format(trees))
return (((expression_module.trait('items', notify=notify, optional=True) | expression_module.dict_items(notify=notify, optional=True)) | expression_module.list_items(notify=notify, optional=True)) | expression_module.set_items(notify=notify, optional=True)) |
def pct_to_log_return(pct_returns, fillna=True):
if _is_pandas(pct_returns):
if fillna:
pct_returns = pct_returns.fillna(0)
return np.log(((1 + pct_returns) + 1e-08))
else:
if fillna:
pct_returns = np.nan_to_num(pct_returns)
return np.log(((1 + pct_returns) + 1e-08)) |
class Worker(QRunnable):
class Signals(QObject):
task_name = Signal(str)
result = Signal(tuple)
error = Signal(tuple)
finished = Signal()
def __init__(self, binary_view: BinaryView, function: Function):
super(Worker, self).__init__()
self.binary_view = binary_view
self.function = function
self.signals = self.Signals()
()
def run(self):
try:
self.signals.task_name.emit(self.function.name)
code = self.decompile_for_widget(self.binary_view, self.function)
except:
traceback.print_exc()
(exctype, value) = sys.exc_info()[:2]
self.signals.error.emit((self.function.name, exctype, value, traceback.format_exc()))
else:
self.signals.result.emit((self.function.name, code))
finally:
self.signals.finished.emit()
def decompile_for_widget(binary_view: BinaryView, function: Function):
configure_logging()
decompiler = Decompiler.from_raw(binary_view)
options = Options.from_gui()
task = decompiler.decompile(function, options)
return DecoratedCode.formatted_plain(task.code) |
class PurePath(pathlib.PurePath):
__slots__ = ()
def __new__(cls, *args: str) -> 'PurePath':
if (cls is PurePath):
cls = (PureWindowsPath if (os.name == 'nt') else PurePosixPath)
if (not util.PY312):
return cls._from_parts(args)
else:
return object.__new__(cls)
def _translate_flags(self, flags: int) -> int:
flags = ((flags & FLAG_MASK) | _PATHNAME)
if (flags & REALPATH):
flags |= (_FORCEWIN if (os.name == 'nt') else _FORCEUNIX)
if isinstance(self, PureWindowsPath):
if (flags & _FORCEUNIX):
raise ValueError('Windows pathlike objects cannot be forced to behave like a Posix path')
flags |= _FORCEWIN
elif isinstance(self, PurePosixPath):
if (flags & _FORCEWIN):
raise ValueError('Posix pathlike objects cannot be forced to behave like a Windows path')
flags |= _FORCEUNIX
return flags
def _translate_path(self) -> str:
sep = ''
name = str(self)
if (isinstance(self, Path) and name and self.is_dir()):
sep = self._flavour.sep
return (name + sep)
def match(self, patterns: (str | Sequence[str]), *, flags: int=0, limit: int=_wcparse.PATTERN_LIMIT, exclude: ((str | Sequence[str]) | None)=None) -> bool:
return self.globmatch(patterns, flags=(flags | _RTL), limit=limit, exclude=exclude)
def globmatch(self, patterns: (str | Sequence[str]), *, flags: int=0, limit: int=_wcparse.PATTERN_LIMIT, exclude: ((str | Sequence[str]) | None)=None) -> bool:
return glob.globmatch(self._translate_path(), patterns, flags=self._translate_flags(flags), limit=limit, exclude=exclude) |
def load_results_manually(qtbot, gui, case_name='default'):
def handle_load_results_dialog():
dialog = wait_for_child(gui, qtbot, ClosableDialog)
panel = get_child(dialog, LoadResultsPanel)
case_selector = get_child(panel, CaseSelector)
index = case_selector.findText(case_name, Qt.MatchFlag.MatchContains)
assert (index != (- 1))
case_selector.setCurrentIndex(index)
load_button = get_child(panel.parent(), QPushButton, name='Load')
def handle_popup_dialog():
messagebox = QApplication.activeModalWidget()
assert isinstance(messagebox, QMessageBox)
assert (messagebox.text() == 'Successfully loaded all realisations')
ok_button = messagebox.button(QMessageBox.Ok)
qtbot.mouseClick(ok_button, Qt.LeftButton)
QTimer.singleShot(2000, handle_popup_dialog)
qtbot.mouseClick(load_button, Qt.LeftButton)
dialog.close()
QTimer.singleShot(1000, handle_load_results_dialog)
load_results_tool = gui.tools['Load results manually']
load_results_tool.trigger() |
def reachable_nodes(adjacency_list: Dict[(T, Set[T])], starting_nodes: Set[T]) -> Dict[(T, Set[T])]:
all_nodes = set()
for (node, nodes) in adjacency_list.items():
all_nodes.add(node)
all_nodes.update(nodes)
enforce(all(((s in all_nodes) for s in starting_nodes)), f'These starting nodes are not in the set of nodes: {starting_nodes.difference(all_nodes)}')
visited: Set[T] = set()
result: Dict[(T, Set[T])] = {start_node: set() for start_node in starting_nodes}
queue: Deque[T] = deque()
queue.extend(starting_nodes)
while (len(queue) > 0):
current = queue.pop()
if ((current in visited) or (current not in adjacency_list)):
continue
successors = adjacency_list.get(current, set())
result.setdefault(current, set()).update(successors)
queue.extendleft(successors)
visited.add(current)
return result |
def test_simple_model_roundtrip_bytes_serializable_attrs():
fwd = (lambda model, X, is_train: (X, (lambda dY: dY)))
attr = SerializableAttr()
assert (attr.value == 'foo')
assert (attr.to_bytes() == b'foo')
model = Model('test', fwd, attrs={'test': attr})
model.initialize()
_attr.register(SerializableAttr)
def serialize_attr_custom(_, value, name, model):
return value.to_bytes()
_attr.register(SerializableAttr)
def deserialize_attr_custom(_, value, name, model):
return SerializableAttr().from_bytes(value)
model_bytes = model.to_bytes()
model = model.from_bytes(model_bytes)
assert ('test' in model.attrs)
assert (model.attrs['test'].value == 'foo from bytes') |
class BaseEyeTracker():
def __init__(self):
pass
def calibrate(self):
pass
def close(self):
pass
def connected(self):
pass
def drift_correction(self, pos=None, fix_triggered=False):
pass
def fix_triggered_drift_correction(self, pos=None, min_samples=30, max_dev=60, reset_threshold=10):
pass
def get_eyetracker_clock_async(self):
pass
def log(self, msg):
pass
def log_var(self, var, val):
self.log(u'var {} {}'.format(safe_decode(var), safe_decode(val)))
def pupil_size(self):
pass
def sample(self):
pass
def send_command(self, cmd):
pass
def set_eye_used(self):
pass
def draw_drift_correction_target(self, x, y):
pass
def draw_calibration_target(self, x, y):
pass
def set_draw_calibration_target_func(self, func):
self.draw_calibration_target = func
def set_draw_drift_correction_target_func(self, func):
self.draw_drift_correction_target = func
def start_recording(self):
pass
def status_msg(self, msg):
pass
def stop_recording(self):
pass
def set_detection_type(self, eventdetection):
pass
def wait_for_event(self, event):
pass
def wait_for_blink_end(self):
pass
def wait_for_blink_start(self):
pass
def wait_for_fixation_end(self):
pass
def wait_for_fixation_start(self):
pass
def wait_for_saccade_end(self):
pass
def wait_for_saccade_start(self):
pass |
def with_plugins(plugins: Iterable[pkg_resources.EntryPoint]) -> Callable:
def decorator(group: click.Group) -> click.Group:
if (not isinstance(group, click.Group)):
raise TypeError('Plugins can only be attached to an instance of click.Group()')
for entry_point in (plugins or ()):
try:
group.add_command(entry_point.load())
except Exception:
group.add_command(BrokenCommand(entry_point.name))
return group
return decorator |
def ovlp3d_04(ax, da, A, bx, db, B):
result = numpy.zeros((1, 15), dtype=float)
x0 = (0.5 / (ax + bx))
x1 = ((ax + bx) ** (- 1.0))
x2 = ((x1 * ((ax * A[0]) + (bx * B[0]))) - B[0])
x3 = ((ax * bx) * x1)
x4 = numpy.exp(((- x3) * ((A[0] - B[0]) ** 2)))
x5 = (1. * numpy.sqrt(x1))
x6 = (x4 * x5)
x7 = ((x2 ** 2) * x6)
x8 = (x0 * x6)
x9 = (x7 + x8)
x10 = (x2 * ((2.0 * x8) + x9))
x11 = numpy.exp(((- x3) * ((A[1] - B[1]) ** 2)))
x12 = (da * db)
x13 = numpy.exp(((- x3) * ((A[2] - B[2]) ** 2)))
x14 = ((3. * x1) * x13)
x15 = (x12 * x14)
x16 = (x11 * x15)
x17 = 0.
x18 = ((x1 * ((ax * A[1]) + (bx * B[1]))) - B[1])
x19 = 0.
x20 = (x18 * x19)
x21 = (x10 * x16)
x22 = ((x1 * ((ax * A[2]) + (bx * B[2]))) - B[2])
x23 = (x19 * x22)
x24 = (x11 * x5)
x25 = ((x18 ** 2) * x24)
x26 = (x0 * x24)
x27 = (x25 + x26)
x28 = (x13 * x5)
x29 = (0. * x12)
x30 = (x29 * x9)
x31 = 1.
x32 = (x18 * x31)
x33 = (x14 * x22)
x34 = ((x22 ** 2) * x28)
x35 = (x0 * x28)
x36 = (x34 + x35)
x37 = (x2 * x4)
x38 = (x19 * x37)
x39 = (x18 * ((2.0 * x26) + x27))
x40 = (x15 * x39)
x41 = (x27 * x29)
x42 = ((3. * x1) * x11)
x43 = (x22 * ((2.0 * x35) + x36))
x44 = (x12 * x42)
x45 = (x43 * x44)
x46 = (x17 * x4)
result[(0, 0)] = numpy.sum(((x16 * x17) * (((3.0 * x0) * (x7 + x8)) + (x10 * x2))))
result[(0, 1)] = numpy.sum((x20 * x21))
result[(0, 2)] = numpy.sum((x21 * x23))
result[(0, 3)] = numpy.sum(((x27 * x28) * x30))
result[(0, 4)] = numpy.sum((((x11 * x30) * x32) * x33))
result[(0, 5)] = numpy.sum(((x24 * x30) * x36))
result[(0, 6)] = numpy.sum((x38 * x40))
result[(0, 7)] = numpy.sum((((x31 * x33) * x37) * x41))
result[(0, 8)] = numpy.sum(((((x29 * x32) * x36) * x37) * x42))
result[(0, 9)] = numpy.sum((x38 * x45))
result[(0, 10)] = numpy.sum(((x15 * x46) * (((3.0 * x0) * (x25 + x26)) + (x18 * x39))))
result[(0, 11)] = numpy.sum(((x23 * x4) * x40))
result[(0, 12)] = numpy.sum(((x36 * x41) * x6))
result[(0, 13)] = numpy.sum(((x20 * x4) * x45))
result[(0, 14)] = numpy.sum(((x44 * x46) * (((3.0 * x0) * (x34 + x35)) + (x22 * x43))))
return result |
class OptionSeriesWordcloudRotation(Options):
def from_(self):
return self._config_get(0)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def orientations(self):
return self._config_get(2)
def orientations(self, num: float):
self._config(num, js_type=False)
def to(self):
return self._config_get(90)
def to(self, num: float):
self._config(num, js_type=False) |
class ChooseIndividualCardInputlet(Inputlet):
def __init__(self, initiator: Any, cards: List[Card]):
self.initiator = initiator
self.cards = cards
self.selected: Optional[Card] = None
def parse(self, data):
try:
cid = data
check(isinstance(cid, int))
cards = [c for c in self.cards if (c.sync_id == cid)]
check(len(cards))
return cards[0]
except CheckFailed:
return None
def data(self):
sel = self.selected
return (sel.sync_id if sel else None)
def set_card(self, c):
assert (c in self.cards)
self.selected = c
def set_card_sid(self, sid):
g: Any = self.game
c = g.deck.lookup(sid)
self.set_card(c)
def post_process(self, actor, card):
if card:
log.debug('ChooseIndividualCardInputlet: detaching %r!', card)
card.detach()
return card |
class SatisfactionRatingRequest(BaseZendeskRequest):
def post(self, ticket_id, satisfaction_rating):
payload = self.build_payload(satisfaction_rating)
url = self.api._build_url(EndpointFactory('satisfaction_ratings').create(id=ticket_id))
return self.api._post(url, payload)
def put(self, api_objects, *args, **kwargs):
raise NotImplementedError('PUT is not implemented for SatisfactionRequest!')
def delete(self, api_objects, *args, **kwargs):
raise NotImplementedError('DELETE is not implemented fpr SatisfactionRequest!') |
def process_shot(shot: Shot, info: dict, image_path: Path, output_dir: Path, cfg: DictConfig) -> List[Shot]:
if (not image_path.exists()):
return None
image_orig = cv2.imread(str(image_path))
max_size = cfg.max_image_size
pano_offset = None
camera = shot.camera
(camera.width, camera.height) = image_orig.shape[:2][::(- 1)]
if camera.is_panorama(camera.projection_type):
camera_new = perspective_camera_from_pano(camera, max_size)
undistorter = PanoramaUndistorter(camera, camera_new)
pano_offset = get_pano_offset(info, cfg.do_legacy_pano_offset)
elif (camera.projection_type in ['fisheye', 'perspective']):
if (camera.projection_type == 'fisheye'):
camera_new = perspective_camera_from_fisheye(camera)
else:
camera_new = perspective_camera_from_perspective(camera)
camera_new = scale_camera(camera_new, max_size)
camera_new.id = (camera.id + '_undistorted')
undistorter = CameraUndistorter(camera, camera_new)
else:
raise NotImplementedError(camera.projection_type)
(shots_undist, images_undist) = undistort_shot(image_orig, shot, undistorter, pano_offset)
for (shot, image) in zip(shots_undist, images_undist):
cv2.imwrite(str((output_dir / f'{shot.id}.jpg')), image)
return shots_undist |
def test_process_connection_ids_not_specified():
builder = AEABuilder()
builder.set_name('aea_1')
builder.add_private_key('fetchai')
with pytest.raises(ValueError, match='Connection ids .* not declared in the configuration file.'):
builder._process_connection_ids([ConnectionConfig('conn', 'author', '0.1.0').public_id]) |
.skipif((not has_mxnet), reason='needs MXNet')
def test_mxnet_wrapper_train_overfits(model: Model[(Array2d, Array2d)], X: Array2d, Y: Array1d, answer: int):
optimizer = Adam()
for i in range(100):
(guesses, backprop) = model(X, is_train=True)
d_guesses = ((guesses - Y) / guesses.shape[0])
backprop(d_guesses)
model.finish_update(optimizer)
predicted = model.predict(X).argmax()
assert (predicted == answer) |
class sRGB(RGBish, Space):
BASE = 'srgb-linear'
NAME = 'srgb'
CHANNELS = (Channel('r', 0.0, 1.0, bound=True, flags=FLG_OPT_PERCENT), Channel('g', 0.0, 1.0, bound=True, flags=FLG_OPT_PERCENT), Channel('b', 0.0, 1.0, bound=True, flags=FLG_OPT_PERCENT))
CHANNEL_ALIASES = {'red': 'r', 'green': 'g', 'blue': 'b'}
WHITE = WHITES['2deg']['D65']
EXTENDED_RANGE = True
def is_achromatic(self, coords: Vector) -> bool:
white = [1, 1, 1]
for x in alg.vcross(coords, white):
if (not math.isclose(0.0, x, abs_tol=1e-05)):
return False
return True
def from_base(self, coords: Vector) -> Vector:
return gam_srgb(coords)
def to_base(self, coords: Vector) -> Vector:
return lin_srgb(coords) |
def test_account_db_update_then_make_root_then_read(account_db):
assert (account_db.get_storage(ADDRESS, 1) == 0)
account_db.set_storage(ADDRESS, 1, 2)
assert (account_db.get_storage(ADDRESS, 1) == 2)
account_db.make_state_root()
assert (account_db.get_storage(ADDRESS, 1) == 2)
account_db.persist()
assert (account_db.get_storage(ADDRESS, 1) == 2) |
(array=arrays(np.float32, shape=array_shapes(min_dims=3, max_dims=3)), name=st.text(ascii_letters, min_size=8, max_size=8))
(suppress_health_check=[HealthCheck.function_scoped_fixture])
def test_that_binary_export_and_import_are_inverses(array, name, tmp_path):
masked_array = np.ma.masked_invalid(array)
export_grdecl(masked_array, (tmp_path / 'test.bgrdecl'), name, binary=True)
assert (import_bgrdecl((tmp_path / 'test.bgrdecl'), name, dimensions=array.shape) == masked_array).filled(True).all() |
_argument_doc
def parse_return_annotation(return_annotation: type, deconstruct_dataclass_return_type: bool, single_return_param_name: str) -> Tuple[(Optional[Dict[(str, type)]], bool)]:
return_types = None
unpack_return = False
if (return_annotation is not Parameter.empty):
if (return_annotation is None):
return_types = None
unpack_return = False
elif (deconstruct_dataclass_return_type and hasattr(return_annotation, '__annotations__')):
return_types = {name: typ for (name, typ) in return_annotation.__annotations__.items()}
unpack_return = True
else:
return_types = {single_return_param_name: return_annotation}
unpack_return = False
return (return_types, unpack_return) |
def extractWhitemoonxblacksunCa(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestSpeakersCallValidation(OpenEventTestCase):
def test_date_db_populate(self):
with self.app.test_request_context():
schema = SpeakersCallSchema()
SpeakersCallFactory()
original_data = {'data': {'id': 1}}
data = {}
SpeakersCallSchema.validate_date(schema, data, original_data) |
class BytesField(CharField[bytes]):
encoding: str = sys.getdefaultencoding()
errors: str = 'strict'
def __init__(self, *, encoding: Optional[str]=None, errors: Optional[str]=None, **kwargs: Any) -> None:
if (encoding is not None):
self.encoding = encoding
if (errors is not None):
self.errors = errors
super().__init__(encoding=self.encoding, errors=self.errors, **kwargs)
def prepare_value(self, value: Any, *, coerce: Optional[bool]=None) -> Optional[bytes]:
if self.should_coerce(value, coerce):
if isinstance(value, bytes):
val = value
else:
val = cast(str, value).encode(encoding=self.encoding)
if self.trim_whitespace:
return val.strip()
return val
else:
return value |
(init=True, repr=True, eq=True, frozen=True)
class Config(object):
platform: PlatformConfig = PlatformConfig()
secrets: SecretsConfig = SecretsConfig()
stats: StatsConfig = StatsConfig()
data_config: DataConfig = DataConfig()
local_sandbox_path: str = tempfile.mkdtemp(prefix='flyte')
def with_params(self, platform: PlatformConfig=None, secrets: SecretsConfig=None, stats: StatsConfig=None, data_config: DataConfig=None, local_sandbox_path: str=None) -> Config:
return Config(platform=(platform or self.platform), secrets=(secrets or self.secrets), stats=(stats or self.stats), data_config=(data_config or self.data_config), local_sandbox_path=(local_sandbox_path or self.local_sandbox_path))
def auto(cls, config_file: typing.Union[(str, ConfigFile, None)]=None) -> Config:
config_file = get_config_file(config_file)
kwargs = {}
set_if_exists(kwargs, 'local_sandbox_path', _internal.LocalSDK.LOCAL_SANDBOX.read(cfg=config_file))
return Config(platform=PlatformConfig.auto(config_file), secrets=SecretsConfig.auto(config_file), stats=StatsConfig.auto(config_file), data_config=DataConfig.auto(config_file), **kwargs)
def for_sandbox(cls) -> Config:
return Config(platform=PlatformConfig(endpoint='localhost:30080', auth_mode='Pkce', insecure=True), data_config=DataConfig(s3=S3Config(endpoint=' access_key_id='minio', secret_access_key='miniostorage')))
def for_endpoint(cls, endpoint: str, insecure: bool=False, data_config: typing.Optional[DataConfig]=None, config_file: typing.Union[(str, ConfigFile)]=None) -> Config:
c = cls.auto(config_file)
return c.with_params(platform=PlatformConfig.for_endpoint(endpoint, insecure), data_config=data_config) |
def _dnsify(value: str) -> str:
res = ''
MAX = 63
HASH_LEN = 10
if (len(value) >= MAX):
h = _sha224(value.encode('utf-8')).hexdigest()[:HASH_LEN]
value = '{}-{}'.format(h, value[(- ((MAX - HASH_LEN) - 1)):])
for ch in value:
if ((ch == '_') or (ch == '-') or (ch == '.')):
if ((res != '') and (len(res) < 62)):
res += '-'
elif (not ch.isalnum()):
pass
elif (ch.islower() or ch.isdigit()):
res += ch
else:
if ((res != '') and (res[(- 1)] != '-') and (len(res) < 62)):
res += '-'
res += ch.lower()
if ((len(res) > 0) and (res[(- 1)] == '-')):
res = res[:(len(res) - 1)]
return res |
class OptionPlotoptionsBarSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsBarSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsBarSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsBarSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsBarSonificationTracksMappingTremoloSpeed) |
class Message(BaseMessage):
__slots__ = ['_auto_decode', '_decode_cache']
def __init__(self, channel, auto_decode=True, **message):
super(Message, self).__init__(channel, **message)
self._decode_cache = dict()
self._auto_decode = auto_decode
def create(channel, body, properties=None):
properties = (properties or {})
if ('correlation_id' not in properties):
properties['correlation_id'] = str(uuid.uuid4())
if ('message_id' not in properties):
properties['message_id'] = str(uuid.uuid4())
if ('timestamp' not in properties):
properties['timestamp'] = datetime.utcnow()
return Message(channel, auto_decode=False, body=body, properties=properties)
def body(self):
if (not self._auto_decode):
return self._body
if ('body' in self._decode_cache):
return self._decode_cache['body']
body = try_utf8_decode(self._body)
self._decode_cache['body'] = body
return body
def channel(self):
return self._channel
def method(self):
return self._try_decode_utf8_content(self._method, 'method')
def properties(self):
return self._try_decode_utf8_content(self._properties, 'properties')
def ack(self):
if (not self._method):
raise AMQPMessageError('Message.ack only available on incoming messages')
self._channel.basic.ack(delivery_tag=self._method['delivery_tag'])
def nack(self, requeue=True):
if (not self._method):
raise AMQPMessageError('Message.nack only available on incoming messages')
self._channel.basic.nack(delivery_tag=self._method['delivery_tag'], requeue=requeue)
def reject(self, requeue=True):
if (not self._method):
raise AMQPMessageError('Message.reject only available on incoming messages')
self._channel.basic.reject(delivery_tag=self._method['delivery_tag'], requeue=requeue)
def publish(self, routing_key, exchange='', mandatory=False, immediate=False):
return self._channel.basic.publish(body=self._body, routing_key=routing_key, exchange=exchange, properties=self._properties, mandatory=mandatory, immediate=immediate)
def app_id(self):
return self.properties.get('app_id')
_id.setter
def app_id(self, value):
self._update_properties('app_id', value)
def message_id(self):
return self.properties.get('message_id')
_id.setter
def message_id(self, value):
self._update_properties('message_id', value)
def content_encoding(self):
return self.properties.get('content_encoding')
_encoding.setter
def content_encoding(self, value):
self._update_properties('content_encoding', value)
def content_type(self):
return self.properties.get('content_type')
_type.setter
def content_type(self, value):
self._update_properties('content_type', value)
def correlation_id(self):
return self.properties.get('correlation_id')
_id.setter
def correlation_id(self, value):
self._update_properties('correlation_id', value)
def delivery_mode(self):
return self.properties.get('delivery_mode')
_mode.setter
def delivery_mode(self, value):
self._update_properties('delivery_mode', value)
def timestamp(self):
return self.properties.get('timestamp')
def timestamp(self, value):
self._update_properties('timestamp', value)
def priority(self):
return self.properties.get('priority')
def priority(self, value):
self._update_properties('priority', value)
def reply_to(self):
return self.properties.get('reply_to')
_to.setter
def reply_to(self, value):
self._update_properties('reply_to', value)
def json(self):
return json.loads(self.body)
def _update_properties(self, name, value):
if (self._auto_decode and ('properties' in self._decode_cache)):
self._decode_cache['properties'][name] = value
self._properties[name] = value
def _try_decode_utf8_content(self, content, content_type):
if ((not self._auto_decode) or (not content)):
return content
if (content_type in self._decode_cache):
return self._decode_cache[content_type]
if isinstance(content, dict):
content = self._try_decode_dict(content)
else:
content = try_utf8_decode(content)
self._decode_cache[content_type] = content
return content
def _try_decode_dict(self, content):
result = dict()
for (key, value) in content.items():
key = try_utf8_decode(key)
if isinstance(value, dict):
result[key] = self._try_decode_dict(value)
elif isinstance(value, list):
result[key] = self._try_decode_list(value)
elif isinstance(value, tuple):
result[key] = self._try_decode_tuple(value)
else:
result[key] = try_utf8_decode(value)
return result
def _try_decode_list(content):
result = list()
for value in content:
result.append(try_utf8_decode(value))
return result
def _try_decode_tuple(content):
return tuple(Message._try_decode_list(content)) |
class CtlUnary(form):
_fields = ('op', 'child')
_attributes = ('lineno', 'col_offset')
def __init__(self, op, child, lineno=0, col_offset=0, **ARGS):
form.__init__(self, **ARGS)
self.op = op
self.child = child
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
def test_print_coverage(plugintester, mocker):
mocker.spy(output, '_build_coverage_output')
plugintester.runpytest('--coverage')
assert (output._build_coverage_output.call_count == 1)
plugintester.runpytest()
assert (output._build_coverage_output.call_count == 1)
plugintester.runpytest('-C')
assert (output._build_coverage_output.call_count == 2) |
class VoiceSettings(API):
stability: float = Field(..., ge=0.0, le=1.0)
similarity_boost: float = Field(..., ge=0.0, le=1.0)
style: Optional[float] = Field(0.0, ge=0.0, le=1.0)
use_speaker_boost: Optional[bool] = False
def from_voice_id(cls, voice_id: str) -> VoiceSettings:
url = f'{api_base_url_v1}/voices/{voice_id}/settings'
return cls(**API.get(url).json())
def from_default(cls) -> VoiceSettings:
url = f'{api_base_url_v1}/voices/settings/default'
return cls(**API.get(url).json()) |
class OptionPlotoptionsPieSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def _validate_saml_provider_id(provider_id):
if (not isinstance(provider_id, str)):
raise ValueError('Invalid SAML provider ID: {0}. Provider ID must be a non-empty string.'.format(provider_id))
if (not provider_id.startswith('saml.')):
raise ValueError('Invalid SAML provider ID: {0}.'.format(provider_id))
return provider_id |
class EmmetRenameTag(sublime_plugin.TextCommand):
def run(self, edit, **kw):
selection = self.view.sel()
sels = list(selection)
sel_cleared = False
for s in sels:
syntax_name = syntax.from_pos(self.view, s.begin())
if syntax.is_html(syntax_name):
ctx = emmet_sublime.get_tag_context(self.view, s.begin(), syntax.is_xml(syntax_name))
if ctx:
if (not sel_cleared):
selection.clear()
sel_cleared = True
selection.add(sublime.Region((ctx['open'].begin() + 1), ((ctx['open'].begin() + 1) + len(ctx['name']))))
if ('close' in ctx):
selection.add(sublime.Region((ctx['close'].begin() + 2), (ctx['close'].end() - 1)))
if sel_cleared:
self.view.show(selection)
track_action('Rename Tag') |
class TestLoggingUsersLogic(CoprsTestCase):
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_db')
def test_user_get(self, log):
UsersLogic.get('somebody')
log.info.assert_called_once_with("Querying user '%s' by username", 'somebody')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_db')
def test_raise_if_cant_update_copr(self, log):
UsersLogic.raise_if_cant_update_copr(self.u2, self.c2, None)
log.info.assert_called_once_with("User '%s' allowed to update project '%s'", 'user2', 'user2/foocopr')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_db')
def test_raise_if_cant_build_in_copr(self, log):
UsersLogic.raise_if_cant_build_in_copr(self.u2, self.c2, None)
log.info.assert_called_once_with("User '%s' allowed to build in project '%s'", 'user2', 'user2/foocopr')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_groups', 'f_db')
def test_raise_if_not_in_group(self, log):
UsersLogic.raise_if_not_in_group(self.u1, self.g1)
log.info.assert_called_once_with("User '%s' allowed to access group '%s' (fas_name='%s')", 'user1', 'group1', 'fas_1')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_db')
def test_delete_user_data(self, log):
UsersLogic.delete_user_data(self.u2)
log.info.assert_called_once_with("Deleting user '%s' data", 'user2')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_db')
def test_create_user_wrapper(self, log):
UsersLogic.create_user_wrapper('somebody', '')
log.info.assert_called_once_with("Creating user '%s <%s>'", 'somebody', '')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_db')
def test_user_data_dumper(self, log):
dumper = UserDataDumper(self.u2)
dumper.dumps()
log.info.assert_called_once_with("Dumping all user data for '%s'", 'user2') |
def __ranking_metrics_heuristic(topology, od_pairs=None):
if (od_pairs is None):
od_pairs = od_pairs_from_topology(topology)
(fan_in, fan_out) = fan_in_out_capacities(topology)
degree = topology.degree()
min_capacity = {(u, v): min(fan_out[u], fan_in[v]) for (u, v) in od_pairs}
min_degree = {(u, v): min(degree[u], degree[v]) for (u, v) in od_pairs}
cap_deg_pairs = [(min_capacity[(u, v)], min_degree[(u, v)]) for (u, v) in od_pairs]
nfur_required = any(((val > 1) for val in Counter(cap_deg_pairs).values()))
if (not nfur_required):
return sorted(od_pairs, key=(lambda od_pair: (min_capacity[od_pair], min_degree[od_pair])))
parallelize = (topology.number_of_edges() > 100)
fast = (topology.number_of_edges() > 300)
nfur = __calc_nfur(topology, fast, parallelize)
max_inv_nfur = {(u, v): (- max(nfur[u], nfur[v])) for (u, v) in od_pairs}
return sorted(od_pairs, key=(lambda od_pair: (min_capacity[od_pair], min_degree[od_pair], max_inv_nfur[od_pair]))) |
.parametrize('sender,nonce,expected', (('0x39fA8c5f2793459D6622857E7D9FbB4BD91766d3', 8, '0xc083e9947Cf02b8FfC7D3090AE9AEA72DF98FD47'), ('0x39fa8c5f2793459d6622857e7d9fbb4bd91766d3', 8, '0xc083e9947Cf02b8FfC7D3090AE9AEA72DF98FD47'), ('0x18dd4e0eb8699ea4fee238de41ecfb95e32272f8', 0, '0x3845badAde8e6dFFd1F14bD3903a5d0')))
def test_address_get_create_address(sender, nonce, expected):
actual = get_create_address(sender, nonce)
assert (actual == expected) |
def get_reports_mapping(period_dir: Text) -> Dict[(Text, Path)]:
names: List[Text] = []
paths: List[Path] = []
for filename in os.listdir(period_dir):
if (not filename.startswith('.')):
paths.append(Path(f'{period_dir}/{filename}'))
paths.sort()
for path in paths:
name: Text = get_report_name(path)
if path.is_dir():
name += ' (folder)'
names.append(name)
return dict(zip(names, paths)) |
class OptionPlotoptionsScatterDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsScatterDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsScatterDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsScatterDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsScatterDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsScatterDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsScatterDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesBarSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesBarSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBarSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesBarSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesBarSonificationTracksMappingHighpassResonance) |
def get_scheme_names(is_jp: bool, scheme_data: list[list[int]]) -> dict[(int, str)]:
file_data = game_data_getter.get_file_latest('resLocal', 'localizable.tsv', is_jp)
if (file_data is None):
helper.error_text('Failed to get scheme names')
return {}
localizable = csv_handler.parse_csv(file_data.decode('utf-8'), delimeter='\t')
names: dict[(int, str)] = {}
for scheme in scheme_data[1:]:
scheme_id = scheme[0]
for name in localizable:
scheme_str = f'scheme_popup_{scheme_id}'
if (name[0] == scheme_str):
scheme_name = name[1].replace('<flash>', '').replace('</flash>', '')
names[scheme_id] = scheme_name
break
return names |
class OptionSeriesPyramidPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def extractGa7AxyTranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def getAspect(obj1, obj2, aspList):
ap = _getActivePassive(obj1, obj2)
aspDict = _aspectDict(ap['active'], ap['passive'], aspList)
if (not aspDict):
aspDict = {'type': const.NO_ASPECT, 'orb': 0, 'separation': 0}
aspProp = _aspectProperties(ap['active'], ap['passive'], aspDict)
return Aspect(aspProp) |
def example_text(NLP):
try:
examples = importlib.import_module((('spacy.lang.' + NLP.lang) + '.examples'))
sentences = examples.sentences
except Exception:
sentences = ['This is a sentence.', 'This is another sentence.']
punct_fixed = []
sent_sep = ' '
for sent in sentences:
if sent[(- 1)].isalnum():
if (NLP.lang in ['ja', 'zh']):
sent += ''
sent_sep = ''
else:
sent += '.'
punct_fixed.append(sent)
return sent_sep.join(punct_fixed) |
class TestWebaruba(TestCase):
def setUp(self) -> None:
self.session = Session()
self.adapter = Adapter()
self.session.mount(' self.adapter)
def test_fetch_production(self):
self.adapter.register_uri(GET, PRODUCTION_URL, json=json.loads(resources.files('parsers.test.mocks.AW').joinpath('production.json').read_text()))
production = fetch_production(zone_key=ZoneKey('AW'), session=self.session)
self.assertMatchSnapshot([{'datetime': element['datetime'].isoformat(), 'production': element['production'], 'storage': element['storage'], 'source': element['source'], 'zoneKey': element['zoneKey'], 'sourceType': element['sourceType'].value} for element in production]) |
class DockSection(DockGroup):
is_row = Bool(True)
splitters = List(DockSplitter)
dock_window = Instance('pyface.dock.dock_window.DockWindow')
modified = Property
_property
def _get_owner(self):
if (self.dock_window is not None):
return self.dock_window
if (self.parent is None):
return None
return self.parent.owner
def calc_min(self, use_size=False):
tdx = tdy = 0
contents = self.visible_contents
n = len(contents)
if self.is_row:
sdx = 10
for item in contents:
(dx, dy) = item.calc_min(use_size)
tdx += dx
tdy = max(tdy, dy)
if self.resizable:
tdx += ((n - 1) * sdx)
else:
tdx += ((n + 1) * 3)
tdy += 6
else:
sdy = 10
for item in contents:
(dx, dy) = item.calc_min(use_size)
tdx = max(tdx, dx)
tdy += dy
if self.resizable:
tdy += ((n - 1) * sdy)
else:
tdx += 6
tdy += ((n + 1) * 3)
if (self.width < 0):
self.width = tdx
self.height = tdy
return (tdx, tdy)
def initial_recalc_sizes(self, x, y, dx, dy):
self.width = dx = max(0, dx)
self.height = dy = max(0, dy)
self.bounds = (x, y, dx, dy)
if (not self.resizable):
self.recalc_sizes_fixed(x, y, dx, dy)
return
contents = self.visible_contents
n = (len(contents) - 1)
splitters = []
splitter_size = 10
sizes = []
if self.is_row:
total = (dx - (n * splitter_size))
else:
total = (dy - (n * splitter_size))
for item in contents:
size = (- 1.0)
for dock_control in item.get_controls():
dockable = dock_control.dockable
if ((dockable is not None) and (dockable.element is not None)):
if self.is_row:
size = max(size, dockable.element.width)
else:
size = max(size, dockable.element.height)
sizes.append(size)
avail = total
remain = 0
for (i, sz) in enumerate(sizes):
if (avail <= 0):
break
if (sz >= 0):
if (sz >= 1):
sz = min(sz, avail)
else:
sz *= total
sz = int(sz)
sizes[i] = sz
avail -= sz
else:
remain += 1
if (remain > 0):
remain = int((avail / remain))
for (i, sz) in enumerate(sizes):
if (sz < 0):
sizes[i] = remain
else:
sizes[(- 1)] += avail
if self.is_row:
for (i, item) in enumerate(contents):
idx = int(sizes[i])
item.recalc_sizes(x, y, idx, dy)
x += idx
if (i < n):
splitters.append(DockSplitter(bounds=(x, y, splitter_size, dy), style='vertical', parent=self, index=i))
x += splitter_size
else:
for (i, item) in enumerate(contents):
idy = int(sizes[i])
item.recalc_sizes(x, y, dx, idy)
y += idy
if (i < n):
splitters.append(DockSplitter(bounds=(x, y, dx, splitter_size), style='horizontal', parent=self, index=i))
y += splitter_size
cur_splitters = self.splitters
for i in range(min(len(splitters), len(cur_splitters))):
splitters[i]._last_bounds = cur_splitters[i]._last_bounds
self.splitters = splitters
self._set_visibility()
def recalc_sizes(self, x, y, dx, dy):
if (not self.initialized):
self.initial_recalc_sizes(x, y, dx, dy)
self.initialized = True
return
self.width = dx = max(0, dx)
self.height = dy = max(0, dy)
self.bounds = (x, y, dx, dy)
if (not self.resizable):
self.recalc_sizes_fixed(x, y, dx, dy)
return
contents = self.visible_contents
n = (len(contents) - 1)
splitters = []
if self.is_row:
sdx = 10
dx -= (n * sdx)
cdx = 0
for item in contents:
cdx += item.width
cdx = max(1, cdx)
delta = remaining = (dx - cdx)
for (i, item) in enumerate(contents):
if (i < n):
idx = int(round((float((item.width * delta)) / cdx)))
else:
idx = remaining
remaining -= idx
idx += item.width
item.recalc_sizes(x, y, idx, dy)
x += idx
if (i < n):
splitters.append(DockSplitter(bounds=(x, y, sdx, dy), style='vertical', parent=self, index=i))
x += sdx
else:
sdy = 10
dy -= (n * sdy)
cdy = 0
for item in contents:
cdy += item.height
cdy = max(1, cdy)
delta = remaining = (dy - cdy)
for (i, item) in enumerate(contents):
if (i < n):
idy = int(round((float((item.height * delta)) / cdy)))
else:
idy = remaining
remaining -= idy
idy += item.height
item.recalc_sizes(x, y, dx, idy)
y += idy
if (i < n):
splitters.append(DockSplitter(bounds=(x, y, dx, sdy), style='horizontal', parent=self, index=i))
y += sdy
cur_splitters = self.splitters
for i in range(min(len(splitters), len(cur_splitters))):
splitters[i]._last_bounds = cur_splitters[i]._last_bounds
self.splitters = splitters
self._set_visibility()
def recalc_sizes_fixed(self, x, y, dx, dy):
self.splitters = []
x += 3
y += 3
dx = max(0, (dx - 3))
dy = max(0, (dy - 3))
if self.is_row:
for item in self.visible_contents:
(idx, idy) = item.calc_min()
idx = min(dx, idx)
idy = min(dy, idy)
dx = max(0, ((dx - idx) - 3))
item.recalc_sizes(x, y, idx, idy)
x += (idx + 3)
else:
for item in self.visible_contents:
(idx, idy) = item.calc_min()
idx = min(dx, idx)
idy = min(dy, idy)
dy = max(0, ((dy - idy) - 3))
item.recalc_sizes(x, y, idx, idy)
y += (idy + 3)
self._set_visibility()
def draw(self, dc):
if (self._visible is not False):
contents = self.visible_contents
(x, y, dx, dy) = self.bounds
self.fill_bg_color(dc, x, y, dx, dy)
for item in contents:
item.draw(dc)
self.begin_draw(dc)
for item in self.splitters:
item.draw(dc)
self.end_draw(dc)
def object_at(self, x, y, force=False):
if (self._visible is not False):
for item in self.splitters:
if item.is_at(x, y):
return item
for item in self.visible_contents:
object = item.object_at(x, y)
if (object is not None):
return object
if (force and self.is_at(x, y)):
return self
return None
def dock_info_at(self, x, y, tdx, is_control, force=False):
info = super().dock_info_at(x, y, tdx, is_control)
if (info is not None):
return info
if (self._visible is False):
return None
for item in self.splitters:
if item.is_at(x, y):
return DockInfo(kind=DOCK_SPLITTER)
for item in self.visible_contents:
object = item.dock_info_at(x, y, tdx, is_control)
if (object is not None):
return object
if (not force):
return None
(lx, ty, dx, dy) = self.bounds
left = (lx - x)
right = (((x - lx) - dx) + 1)
top = (ty - y)
bottom = (((y - ty) - dy) + 1)
if (max(left, right, top, bottom) > 20):
return DockInfo(kind=DOCK_EXPORT)
left = abs(left)
right = abs(right)
top = abs(top)
bottom = abs(bottom)
choice = min(left, right, top, bottom)
mdx = (dx // 3)
mdy = (dy // 3)
if (choice == left):
return DockInfo(kind=DOCK_LEFT, bounds=(lx, ty, mdx, dy))
if (choice == right):
return DockInfo(kind=DOCK_RIGHT, bounds=(((lx + dx) - mdx), ty, mdx, dy))
if (choice == top):
return DockInfo(kind=DOCK_TOP, bounds=(lx, ty, dx, mdy))
return DockInfo(kind=DOCK_BOTTOM, bounds=(lx, ((ty + dy) - mdy), dx, mdy))
def add(self, control, region, kind):
contents = self.contents
new_region = control
if (not isinstance(control, DockRegion)):
new_region = DockRegion(contents=[control])
i = contents.index(region)
if self.is_row:
if ((kind == DOCK_TOP) or (kind == DOCK_BOTTOM)):
if (kind == DOCK_TOP):
new_contents = [new_region, region]
else:
new_contents = [region, new_region]
contents[i] = DockSection(is_row=False).trait_set(contents=new_contents)
else:
if (new_region.parent is self):
contents.remove(new_region)
i = contents.index(region)
if (kind == DOCK_RIGHT):
i += 1
contents.insert(i, new_region)
elif ((kind == DOCK_LEFT) or (kind == DOCK_RIGHT)):
if (kind == DOCK_LEFT):
new_contents = [new_region, region]
else:
new_contents = [region, new_region]
contents[i] = DockSection(is_row=True).trait_set(contents=new_contents)
else:
if (new_region.parent is self):
contents.remove(new_region)
i = contents.index(region)
if (kind == DOCK_BOTTOM):
i += 1
contents.insert(i, new_region)
def remove(self, item):
contents = self.contents
if (isinstance(item, DockGroup) and (len(item.contents) == 1)):
contents[contents.index(item)] = item.contents[0]
else:
contents.remove(item)
if (self.parent is not None):
if (len(contents) <= 1):
self.parent.remove(self)
elif ((len(contents) == 0) and (self.dock_window is not None)):
self.dock_window.dock_window_empty()
def set_visibility(self, visible):
self._visible = visible
for item in self.contents:
item.set_visibility(visible)
def get_structure(self):
return self.clone_traits(['is_row', 'width', 'height']).trait_set(contents=[item.get_structure() for item in self.contents], splitters=[item.get_structure() for item in self.splitters])
def get_splitter_bounds(self, splitter):
(x, y, dx, dy) = splitter.bounds
i = self.splitters.index(splitter)
contents = self.visible_contents
item1 = contents[i]
item2 = contents[(i + 1)]
(bx, by, bdx, bdy) = item2.bounds
if self.is_row:
x = item1.bounds[0]
dx = ((bx + bdx) - x)
else:
y = item1.bounds[1]
dy = ((by + bdy) - y)
return (x, y, dx, dy)
def update_splitter(self, splitter, window):
(x, y, dx, dy) = splitter.bounds
i = self.splitters.index(splitter)
contents = self.visible_contents
item1 = contents[i]
item2 = contents[(i + 1)]
(ix1, iy1, idx1, idy1) = item1.bounds
(ix2, iy2, idx2, idy2) = item2.bounds
window.Freeze()
if self.is_row:
item1.recalc_sizes(ix1, iy1, (x - ix1), idy1)
item2.recalc_sizes((x + dx), iy2, (((ix2 + idx2) - x) - dx), idy2)
else:
item1.recalc_sizes(ix1, iy1, idx1, (y - iy1))
item2.recalc_sizes(ix2, (y + dy), idx2, (((iy2 + idy2) - y) - dy))
window.Thaw()
if (splitter.style == 'horizontal'):
dx = 0
else:
dy = 0
window.RefreshRect(wx.Rect((ix1 - dx), (iy1 - dy), (((ix2 + idx2) - ix1) + (2 * dx)), (((iy2 + idy2) - iy1) + (2 * dy))))
def dump(self, indent=0):
print(('%sSection( %08X, is_row = %s, width = %d, height = %d )' % ((' ' * indent), id(self), self.is_row, self.width, self.height)))
for item in self.contents:
item.dump((indent + 3))
def _set_visibility(self):
for item in self.contents:
item.set_visibility(item.visible)
('contents')
def _contents_updated(self, event):
for item in self.contents:
item.parent = self
self.calc_min(True)
self.modified = True
('contents:items')
def _contents_items_updated(self, event):
for item in event.added:
item.parent = self
self.calc_min(True)
self.modified = True
('splitters')
def _splitters_updated(self, event):
for item in self.splitters:
item.parent = self
('splitters:items')
def _splitters_items_updated(self, event):
for item in event.added:
item.parent = self
def _set_modified(self, value):
self._resizable = None
if (self.parent is not None):
self.parent.modified = True |
class DewolfNotifications(UIContextNotification):
def __init__(self, widget):
UIContextNotification.__init__(self)
self.widget = widget
self.widget.destroyed.connect(self.destroyed)
UIContext.registerNotification(self)
def destroyed(self):
UIContext.unregisterNotification(self)
def OnViewChange(self, context, frame, type):
self.widget.updateState()
def OnAddressChange(self, context, frame, view, location):
self.widget.updateState() |
def extractHornytranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Junior High School Sex Slave Runa', 'Junior High School Sex Slave Runa', 'translated'), ("Women's Dormitory Manager", "Women's Dormitory Manager", 'translated'), ('Valhalla - The Penis Mansion', 'Valhalla - The Penis Mansion', 'translated'), ('The Day My Sister Became an Exclusive Meat Toilet', 'The Day My Sister Became an Exclusive Meat Toilet', 'translated'), ('I Have The Only Ero Knowledge In The World, So I Decided To Cum Inside Pretty Girls', 'I Have The Only Ero Knowledge In The World, So I Decided To Cum Inside Pretty Girls', 'translated'), ('The Record of My Sex Life in a Different World', 'The Record of My Sex Life in a Different World', 'translated'), ('My Elder Sister Fell in Love with Me and Transformed into a Yandere', 'My Elder Sister Fell in Love with Me and Transformed into a Yandere', 'translated'), ('Chronicles of a Creative Different World Reincarnation', 'Chronicles of a Creative Different World Reincarnation', 'translated'), ('The Duo Who Hunt Women', 'The Duo Who Hunt Women', 'translated'), ('Fate Comes with Time', 'Fate Comes with Time', 'translated'), ('World Class Prostitution ring', 'World Class Prostitution ring', 'translated'), ('Serena The Futanari Princess', 'Serena The Futanari Princess', 'translated'), ('Princess Insult', 'Princess Insult', 'translated'), ('Marriage Insult', 'Marriage Insult', 'translated'), ('Hypnotized Harem', 'Hypnotized Harem', 'translated'), ('International Sex Slave Law', 'International Sex Slave Law', 'translated'), ('Books to Dominate Married Women', 'Books to Dominate Married Women', 'translated'), ('Beautiful Females in the Underground Prison', 'Beautiful Females in the Underground Prison', 'translated'), ('A World Where All Women Are Managed By Men', 'A World Where All Women Are Managed By Men', 'translated'), ('The Training Record of a Married Woman', 'The Training Record of a Married Woman', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PluginStore(CRUDMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.Unicode(255), nullable=False)
value = db.Column(db.PickleType, nullable=False)
value_type = db.Column(db.Enum(SettingValueType), nullable=False)
extra = db.Column(db.PickleType, nullable=True)
plugin_id = db.Column(db.Integer, db.ForeignKey('plugin_registry.id', ondelete='CASCADE'))
name = db.Column(db.Unicode(255), nullable=False)
description = db.Column(db.Text, nullable=True)
__table_args__ = (UniqueConstraint('key', 'plugin_id', name='plugin_kv_uniq'),)
def __repr__(self):
return '<PluginSetting plugin={} key={} value={}>'.format(self.plugin.name, self.key, self.value)
def get_or_create(cls, plugin_id, key):
obj = cls.query.filter_by(plugin_id=plugin_id, key=key).first()
if (obj is not None):
return obj
return PluginStore() |
def test_resolve_prefilled_values():
class Language(object):
def __init__(self):
...
_registry.optimizers('prefilled.v1')
def prefilled(nlp: Language, value: int=10):
return (nlp, value)
config = {'test': {'': 'prefilled.v1', 'nlp': Language(), 'value': 50}}
resolved = my_registry.resolve(config, validate=True)
result = resolved['test']
assert isinstance(result[0], Language)
assert (result[1] == 50) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.