code stringlengths 281 23.7M |
|---|
_deserializable
class PoeBot(BaseBot, PoeBot):
def __init__(self):
self.history_length = 5
super().__init__()
async def get_response(self, query):
last_message = query.query[(- 1)].content
try:
history = ([f'{m.role}: {m.content}' for m in query.query[(- (self.history_length + 1)):(- 1)]] if (len(query.query) > 0) else None)
except Exception as e:
logging.error(f'Error when processing the chat history. Message is being sent without history. Error: {e}')
answer = self.handle_message(last_message, history)
(yield self.text_event(answer))
def handle_message(self, message, history: Optional[List[str]]=None):
if message.startswith('/add '):
response = self.add_data(message)
else:
response = self.ask_bot(message, history)
return response
def ask_bot(self, message, history: List[str]):
try:
self.app.llm.set_history(history=history)
response = self.query(message)
except Exception:
logging.exception(f'Failed to query {message}.')
response = 'An error occurred. Please try again!'
return response
def start(self):
start_command() |
class TestExample(TestDSLBase):
def test_can_be_named_from_decorator(self):
name = 'example name'
def top_context(context):
(name)
def whatever(self):
pass
self.assertEqual(str(Context.all_top_level_contexts[0].examples[0]), name)
def test_can_be_named_from_function(self):
def top_context(context):
def Example_name(self):
pass
self.assertEqual(str(Context.all_top_level_contexts[0].examples[0]), 'Example name')
def test_cant_create_example_outside_context(self):
with self.assertRaisesRegex(TypeError, 'Can not create example without a parent context'):
def whatever(self):
pass
def test_skip_with_xexample(self):
def top_context(context):
def skip_with_xexample(self):
pass
(skip=True)
def skip_with_skip_arg(self):
pass
('skip_with_name_and_skip_arg', skip=True)
def skip_with_name_and_skip_arg(self):
pass
(skip_unless=False)
def skip_with_skip_unless_arg(self):
pass
('skip_with_name_and_skip_unless_arg', skip_unless=False)
def skip_with_name_and_skip_unless_arg(self):
pass
self.assertTrue(Context.all_top_level_contexts[0].examples)
for example in Context.all_top_level_contexts[0].examples:
self.assertTrue(example.skip)
def test_inherits_skip_from_xcontext(self):
def skipped_context(context):
def also_skipped(self):
pass
self.assertTrue(Context.all_top_level_contexts[0].examples[0].skip)
def test_focus_with_fexample(self):
def top_context(context):
def focused(self):
pass
self.assertTrue(Context.all_top_level_contexts[0].examples[0].focus)
def test_inherits_focus_from_fcontext(self):
def focused_context(context):
def also_focused(self):
pass
self.assertTrue(Context.all_top_level_contexts[0].examples[0].focus)
def test_cant_call_example_function(self):
with self.assertRaisesRegex(BaseException, 'This function should not be called outside test code.'):
def top_context(context):
def not_callable(self):
pass
not_callable(None)
def test_cant_create_two_with_same_name(self):
with self.assertRaisesRegex(RuntimeError, "An example with the same name 'same name' is already defined"):
def top_context(context):
def same_name(self):
pass
('same name')
def whatever(self):
pass
def test_can_call_unittest_assert_methods(self):
def unittest_assert_methods(context):
def has_assert_true(self):
self.assertTrue(True)
self.run_first_context_first_example()
def test_can_define_sub_examples(self):
ex1 = AssertionError('Sub failure 1')
ex2 = AssertionError('Sub failure 2')
exfinal = RuntimeError('Final failure')
def sub_examples(context):
def can_define_sub_examples(self):
with self.sub_example():
assert True
with self.sub_example():
raise ex1
with self.sub_example():
raise ex2
raise exfinal
try:
self.run_first_context_first_example()
except AggregatedExceptions as e:
self.assertEqual(len(e.exceptions), 3)
self.assertTrue((ex1 in e.exceptions))
self.assertTrue((ex2 in e.exceptions))
self.assertTrue((exfinal in e.exceptions))
else:
raise AssertionError('Expected test to fail') |
def verify_asset(fledge_url, total_assets, count, wait_time):
for i in range(count):
get_url = '/fledge/asset'
result = utils.get_request(fledge_url, get_url)
asset_created = len(result)
if (total_assets == asset_created):
print('Total {} asset created'.format(asset_created))
return
time.sleep((wait_time * 6))
assert (total_assets == len(result)) |
class MyEpochType(sqlalchemy.types.TypeDecorator):
impl = sqlalchemy.Integer
epoch = datetime.date(1970, 1, 1)
def process_bind_param(self, value, dialect):
return (value - self.epoch).days
def process_result_value(self, value, dialect):
return (self.epoch + datetime.timedelta(days=value)) |
class EmailSubjectLoader():
def __init__(self, templates: list['EmailTemplate'], *, templates_overrides: (dict[(EmailTemplateType, 'EmailTemplate')] | None)=None):
self.templates_map = _templates_list_to_map(templates)
if templates_overrides:
for (type, template) in templates_overrides.items():
self.templates_map[type] = template
def __call__(self, name: str) -> str:
return self.templates_map[name].subject |
class CmdTalk(EvAdventureCommand):
key = 'talk'
def func(self):
target = self.caller.search(self.args)
if (not target):
return
if (not inherits_from(target, EvAdventureTalkativeNPC)):
self.caller.msg(f'{target.get_display_name(looker=self.caller)} does not seem very talkative.')
return
target.at_talk(self.caller) |
()
def graph_phi_fct_in_head1(variable_u, variable_v) -> Tuple[(List[BasicBlock], ControlFlowGraph)]:
instructions = [Phi(variable_v[1], [variable_v[0], variable_u[1]]), Phi(variable_u[1], [variable_v[0], variable_u[2]]), Assignment(variable_u[2], BinaryOperation(OperationType.plus, [variable_v[1], Constant(10)]))]
node = BasicBlock(0, instructions[:])
node.instructions[0]._origin_block = {None: variable_v[0], node: variable_u[1]}
node.instructions[1]._origin_block = {None: variable_v[0], node: variable_u[2]}
cfg = ControlFlowGraph()
cfg.add_node(node)
cfg.add_edges_from([UnconditionalEdge(node, node)])
return ([node], cfg) |
def test_cursor_outside_of_data_path_not_found(response_with_body):
config = CursorPaginationConfiguration(cursor_param='after', field='meta.next')
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/conversations')
paginator = CursorPaginationStrategy(config)
next_request: SaaSRequestParams = paginator.get_next_request(request_params, {}, response_with_body, 'conversations')
assert (next_request is None) |
(base=RequestContextTask, name='send.after.event.mail')
def send_after_event_mail():
current_time = datetime.datetime.now()
events = Event.query.filter_by(state='published', deleted_at=None).filter((Event.ends_at < current_time), ((current_time - Event.ends_at) < datetime.timedelta(days=1))).all()
for event in events:
organizers = get_user_event_roles_by_role_name(event.id, 'organizer')
speakers = Speaker.query.filter_by(event_id=event.id, deleted_at=None).all()
owner = get_user_event_roles_by_role_name(event.id, 'owner').first()
unique_emails = set()
unique_emails_speakers = set()
for speaker in speakers:
if (not speaker.is_email_overridden):
unique_emails_speakers.add(speaker.user.email)
for organizer in organizers:
unique_emails.add(organizer.user.email)
if owner:
unique_emails.add(owner.user.email)
for email in unique_emails:
send_email_after_event(email, event.name)
for email in unique_emails_speakers:
send_email_after_event_speaker(email, event.name) |
def get_limb_direction(arm, closest_degrees=45):
dy = (arm[2]['y'] - arm[0]['y'])
dx = (arm[2]['x'] - arm[0]['x'])
angle = degrees(atan((dy / dx)))
if (dx < 0):
angle += 180
mod_close = (angle % closest_degrees)
angle -= mod_close
if (mod_close > (closest_degrees / 2)):
angle += closest_degrees
angle = int(angle)
if (angle == 270):
angle = (- 90)
return angle |
class AutoReconnect(threading.Thread):
def __init__(self, ami_client, delay=0.5, on_disconnect=(lambda *args: None), on_reconnect=(lambda *args: None)):
super(AutoReconnect, self).__init__()
self.on_reconnect = on_reconnect
self.on_disconnect = on_disconnect
self.delay = delay
self.finished = None
self._ami_client = ami_client
self._login_args = None
self._login = None
self._logoff = None
self._prepare_client()
def _prepare_client(self):
self._login = self._ami_client.login
self._logoff = self._ami_client.logoff
self._ami_client.login = self._login_wrapper
self._ami_client.logoff = self._logoff_wrapper
def _rollback_client(self):
self._ami_client.login = self._login
self._ami_client.logoff = self._logoff
def _login_wrapper(self, *args, **kwargs):
callback = (kwargs.pop('callback', None) or (lambda *a, **k: None))
def on_login(response, *a, **k):
if (not response.is_error()):
if (self._login_args is None):
self.finished = threading.Event()
self.start()
self._login_args = (args, kwargs)
callback(response, *a, **k)
kwargs['callback'] = on_login
return self._login(*args, **kwargs)
def _logoff_wrapper(self, *args, **kwargs):
self.finished.set()
self._rollback_client()
return self._logoff(*args, **kwargs)
def ping(self):
try:
f = self._ami_client.send_action(Action('Ping'))
response = f.response
if ((response is not None) and (not response.is_error())):
return True
self.on_disconnect(self._ami_client, response)
except Exception as ex:
self.on_disconnect(self._ami_client, ex)
return False
def try_reconnect(self):
try:
f = self._login(*self._login_args[0], **self._login_args[1])
response = f.response
if ((response is not None) and (not response.is_error())):
self.on_reconnect(self._ami_client, response)
return True
except:
pass
return False
def run(self):
self.finished.wait(self.delay)
while (not self.finished.is_set()):
if (not self.ping()):
self.try_reconnect()
self.finished.wait(self.delay)
def __del__(self):
self._rollback_client() |
class _DummyModelBase(type):
def __new__(mcs, name, bases, attrs):
meta = attrs.pop('Meta', None)
new_class = type.__new__(mcs, name, bases, attrs)
if meta:
meta.model_name = name.lower()
meta.concrete_model = new_class
setattr(new_class, '_meta', meta)
else:
raise AttributeError(('Class %s has no "class Meta" definition' % name))
return new_class |
def BuildTree(records):
parent_dict = {}
node_dict = {}
ordered_id = sorted((idx.record_id for idx in records))
for record in records:
validate_record(record)
parent_dict[record.record_id] = record.parent_id
node_dict[record.record_id] = Node(record.record_id)
root_id = 0
root = None
for (index, record_id) in enumerate(ordered_id):
if (index != record_id):
raise ValueError('Record id is invalid or out of order.')
if (record_id == root_id):
root = node_dict[record_id]
else:
parent_id = parent_dict[record_id]
node_dict[parent_id].children.append(node_dict[record_id])
return root |
def StockCutter(child_rects, parent_rects, output_json=True):
model = cp_model.CpModel()
horizon = parent_rects[0]
total_parent_area = (horizon[0] * horizon[1])
sheet_type = collections.namedtuple('sheet_type', 'x1 y1 x2 y2 x_interval y_interval is_extra')
all_vars = {}
total_child_area = 0
x_intervals = []
y_intervals = []
for (rect_id, rect) in enumerate(child_rects):
width = rect[0]
height = rect[1]
area = (width * height)
total_child_area += area
suffix = ('_%i_%i' % (width, height))
x1_var = model.NewIntVar(0, horizon[0], ('x1' + suffix))
x2_var = model.NewIntVar(0, horizon[0], ('x2' + suffix))
x_interval_var = model.NewIntervalVar(x1_var, width, x2_var, ('x_interval' + suffix))
y1_var = model.NewIntVar(0, horizon[1], ('y1' + suffix))
y2_var = model.NewIntVar(0, horizon[1], ('y2' + suffix))
y_interval_var = model.NewIntervalVar(y1_var, height, y2_var, ('y_interval' + suffix))
x_intervals.append(x_interval_var)
y_intervals.append(y_interval_var)
all_vars[rect_id] = sheet_type(x1=x1_var, y1=y1_var, x2=x2_var, y2=y2_var, x_interval=x_interval_var, y_interval=y_interval_var, is_extra=False)
'\n FIXME: experiment\n Experment: treat the remaining area as small units of 1x1 rectangles. Push these rects to higher x,y.\n '
model.AddNoOverlap2D(x_intervals, y_intervals)
solver = cp_model.CpSolver()
status = solver.Solve(model)
singleSolution = getSingleSolution(solver, all_vars)
int_solutions = [singleSolution]
output = {'statusName': solver.StatusName(status), 'numSolutions': '1', 'numUniqueSolutions': '1', 'solutions': int_solutions}
print('Time:', solver.WallTime())
print('Status:', output['statusName'])
print('Solutions found :', output['numSolutions'])
print('Unique solutions: ', output['numUniqueSolutions'])
if output_json:
return json.dumps(output)
else:
return int_solutions |
class GaussianTimeOutSimulatorConfig(TimeOutSimulatorConfig):
_target_: str = fullclassname(GaussianTimeOutSimulator)
timeout_wall_per_round: float = 1.0
fl_stopping_time: float = 1.0
duration_distribution_generator: PerExampleGaussianDurationDistributionConfig = PerExampleGaussianDurationDistributionConfig() |
def key_201_CosSin_2012():
dlf = DigitalFilter('Key 201 CosSin (2012)', 'key_201_CosSin_2012')
dlf.base = np.array([9.e-07, 1.e-06, 1.e-06, 1.e-06, 1.e-06, 1.e-06, 2.e-06, 2.e-06, 2.e-06, 3.e-06, 3.e-06, 4.e-06, 4.e-06, 5.e-06, 6.e-06, 7.e-06, 8.e-06, 9.e-06, 1.e-05, 1.e-05, 1.e-05, 1.e-05, 1.e-05, 2.e-05, 2.e-05, 2.e-05, 3.e-05, 3.e-05, 4.e-05, 5.e-05, 5.e-05, 6.e-05, 7.e-05, 9.e-05, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1.0, 1., 1., 1., 1., 2., 2., 2., 3., 3., 4., 4., 5., 6., 7., 8., 9., 10., 12., 14., 16., 18., 21., 24., 28., 32., 37., 42., 49., 56., 64., 74., 85., 98., 112., 129., 149., 171., 196., 226., 259., 298., 343., 394., 453., 520., 598., 687., 789., 907., 1043., 1198., 1377., 1582., 1818., 2090., 2401., 2760., 3171., 3644., 4188., 4812., 5530., 6355., 7302., 8391., 9643., 11081., 12733., 14632., 16814., 19322., 22203., 25514., 29319., 33691., 38715., 44489., 51123., 58747., 67507., 77574., 89143., 102436., 117712., 135266., 155437., 178617., 205253., 235861., 271034., 311451., 357896., 411267., 472597., 543073., 624058., 717121., 824061., 946948., 1088161.])
dlf.factor = np.array([1.])
dlf.cos = np.array([0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., 3.e-05, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., (- 0.), (- 1.), (- 0.), 1., 0., (- 1.), 2., (- 1.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 9.e-05), 8.e-05, (- 7.e-05), 6.e-05, (- 5.e-05), 5.e-05, (- 4.e-05), 3.e-05, (- 3.e-05), 2.e-05, (- 2.e-05), 1.e-05, (- 1.e-05), 8.e-06, (- 5.e-06), 2.e-06, (- 1.e-06), 3.e-07, (- 6.e-08)])
dlf.sin = np.array([(- 5.e-10), 4.e-09, (- 1.e-08), 5.e-08, (- 1.e-07), 2.e-07, (- 3.e-07), 4.e-07, (- 6.e-07), 7.e-07, (- 9.e-07), 1.e-06, (- 1.e-06), 1.e-06, (- 1.e-06), 1.e-06, (- 2.e-06), 2.e-06, (- 2.e-06), 2.e-06, (- 3.e-06), 3.e-06, (- 3.e-06), 4.e-06, (- 4.e-06), 5.e-06, (- 5.e-06), 6.e-06, (- 7.e-06), 8.e-06, (- 8.e-06), 9.e-06, (- 1.e-05), 1.e-05, (- 1.e-05), 1.e-05, (- 1.e-05), 1.e-05, (- 1.e-05), 1.e-05, (- 2.e-05), 2.e-05, (- 2.e-05), 2.e-05, (- 2.e-05), 2.e-05, (- 3.e-05), 3.e-05, (- 3.e-05), 3.e-05, (- 4.e-05), 4.e-05, (- 4.e-05), 5.e-05, (- 5.e-05), 5.e-05, (- 6.e-05), 6.e-05, (- 7.e-05), 7.e-05, (- 8.e-05), 9.e-05, (- 9.e-05), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 8.e-05), 0., 4.e-05, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 1., 0., (- 1.), (- 0.), 2., (- 1.), 0., 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 0.), 0., (- 8.e-05), 7.e-05, (- 6.e-05), 5.e-05, (- 4.e-05), 4.e-05, (- 3.e-05), 3.e-05, (- 2.e-05), 2.e-05, (- 2.e-05), 1.e-05, (- 1.e-05), 1.e-05, (- 1.e-05), 1.e-05, (- 9.e-06), 8.e-06, (- 7.e-06), 6.e-06, (- 5.e-06), 5.e-06, (- 4.e-06), 3.e-06, (- 3.e-06), 2.e-06, (- 2.e-06), 1.e-06, (- 1.e-06), 8.e-07, (- 5.e-07), 3.e-07, (- 1.e-07), 6.e-08, (- 1.e-08), 3.e-09])
return dlf |
def test_all_uids_found_in_database(backend_db, common_db):
backend_db.insert_object(TEST_FW)
assert (common_db.all_uids_found_in_database([TEST_FW.uid]) is True)
assert (common_db.all_uids_found_in_database([TEST_FW.uid, TEST_FO.uid]) is False)
backend_db.insert_object(TEST_FO)
assert (common_db.all_uids_found_in_database([TEST_FW.uid, TEST_FO.uid]) is True) |
def test_should_merge_deny_statments():
statement1 = Statement(Effect='Deny', Action=[Action('some-service', 'some-action')], Resource=['*'])
statement2 = Statement(Effect='Deny', Action=[Action('some-service', 'some-other-action')], Resource=['*'])
merged = Statement(Effect='Deny', Action=[Action('some-service', 'some-action'), Action('some-service', 'some-other-action')], Resource=['*'])
assert (statement1.merge(statement2) == merged) |
class OptMark(Options):
def type(self):
return self._config_get()
def type(self, text):
self._config(text)
def interactive(self):
return self._config_get()
def interactive(self, flag):
self._config(flag)
def name(self):
return self._config_get()
def name(self, text):
self._config(text)
def role(self):
return self._config_get()
def role(self, text):
self._config(text)
def style(self):
return self._config_get()
def style(self, text):
self._config(text)
def zindex(self):
return self._config_get()
def zindex(self, num):
self._config(num)
def align(self):
return self._config_get()
def align(self, text):
self._config(text)
def dx(self):
return self._config_get()
def dx(self, text):
self._config(text) |
def get_pseudo_3D_cylinder_box_domain(x0=(0.0, 0.0, 0.0), L=(1.0, 1.0, 1.0), x2=None, x3=None, radius=0.1, center=(0.5, 0.5), n_points_on_obstacle=((2 * 21) - 2), cross_section=circular_cross_section, thetaOffset=0.0, he=1.0, he2=None, he3=None):
if (he2 == None):
he2 = he
if (he3 == None):
he3 = he2
x1 = ((x0[0] + L[0]), (x0[1] + L[1]), (x0[2] + L[2]))
if (x2 == None):
x2 = ((0.25 * (x0[0] + x1[0])), (0.25 * (x0[1] + x1[1])))
if (x3 == None):
x3 = ((0.75 * (x0[0] + x1[0])), (0.75 * (x0[1] + x1[1])))
boundaries = ['left', 'right', 'bottom', 'top', 'front', 'back']
boundaryTags = dict([(key, (i + 1)) for (i, key) in enumerate(boundaries)])
vertexKeys = ['left_bottom', 'right_bottom', 'right_top', 'left_top']
vertices = [[x0[0], x0[1]], [x1[0], x0[1]], [x1[0], x1[1]], [x0[0], x1[1]]]
vertexFlags = [boundaryTags['bottom'], boundaryTags['bottom'], boundaryTags['top'], boundaryTags['top']]
theta = thetaOffset
pb = cross_section(center, radius, theta)
vertices.append([pb[0], pb[1]])
vertexKeys.append(('obstacle_' + repr(0)))
vertexFlags.append(boundaryTags['back'])
for gb in range(1, n_points_on_obstacle):
theta = ((((float(gb) / float(n_points_on_obstacle)) * 2.0) * math.pi) + thetaOffset)
pb = cross_section(center, radius, theta)
vertexKeys.append(('obstacle_' + repr(gb)))
vertices.append([pb[0], pb[1]])
vertexFlags.append(boundaryTags['back'])
vertexKeys.extend(['box_1', 'box_2', 'box_3', 'box_4'])
vertices.extend([[x2[0], x2[1]], [x3[0], x2[1]], [x3[0], x3[1]], [x2[0], x3[1]]])
vertexFlags.extend([boundaryTags['back'], boundaryTags['back'], boundaryTags['back'], boundaryTags['back']])
vertices3dDict = {}
vertices3d = []
vertexFlags3d = []
facets3d = []
facetFlags3d = []
facetHoles3d = []
front_cylinder = []
back_cylinder = []
front_box = []
back_box = []
for (vN, v) in enumerate(vertices):
vertices3dDict[(vertexKeys[vN] + '_back')] = vN
vertices3d.append([v[0], v[1], x0[2]])
vertexFlags3d.append(boundaryTags['back'])
if ('obstacle' in vertexKeys[vN]):
back_cylinder.append(vN)
if ('box' in vertexKeys[vN]):
back_box.append(vN)
for (vN, v) in enumerate(vertices):
vertices3dDict[(vertexKeys[vN] + '_front')] = (vN + len(vertices))
vertices3d.append([v[0], v[1], x1[2]])
vertexFlags3d.append(boundaryTags['front'])
if ('obstacle' in vertexKeys[vN]):
front_cylinder.append((vN + len(vertices)))
if ('box' in vertexKeys[vN]):
front_box.append((vN + len(vertices)))
facets3d.append([[vertices3dDict['left_bottom_front'], vertices3dDict['left_bottom_back'], vertices3dDict['left_top_back'], vertices3dDict['left_top_front']]])
facetFlags3d.append(boundaryTags['left'])
facetHoles3d.append([])
facets3d.append([[vertices3dDict['right_bottom_front'], vertices3dDict['right_bottom_back'], vertices3dDict['right_top_back'], vertices3dDict['right_top_front']]])
facetFlags3d.append(boundaryTags['right'])
facetHoles3d.append([])
facets3d.append([[vertices3dDict['left_top_front'], vertices3dDict['right_top_front'], vertices3dDict['right_top_back'], vertices3dDict['left_top_back']]])
facetFlags3d.append(boundaryTags['top'])
facetHoles3d.append([])
facets3d.append([[vertices3dDict['left_bottom_front'], vertices3dDict['right_bottom_front'], vertices3dDict['right_bottom_back'], vertices3dDict['left_bottom_back']]])
facetFlags3d.append(boundaryTags['bottom'])
facetHoles3d.append([])
facets3d.append([[vertices3dDict['left_bottom_front'], vertices3dDict['right_bottom_front'], vertices3dDict['right_top_front'], vertices3dDict['left_top_front']], front_cylinder, front_box])
facetFlags3d.append(boundaryTags['front'])
facetHoles3d.append([])
facets3d.append([[vertices3dDict['left_bottom_back'], vertices3dDict['right_bottom_back'], vertices3dDict['right_top_back'], vertices3dDict['left_top_back']], back_cylinder, back_box])
facetFlags3d.append(boundaryTags['back'])
facetHoles3d.append([])
for fN in range((n_points_on_obstacle - 1)):
facets3d.append([[front_cylinder[fN], back_cylinder[fN], back_cylinder[(fN + 1)], front_cylinder[(fN + 1)]]])
facetFlags3d.append(0)
facetHoles3d.append([])
facets3d.append([[front_cylinder[(- 1)], back_cylinder[(- 1)], back_cylinder[0], front_cylinder[0]]])
facetFlags3d.append(0)
facetHoles3d.append([])
for fN in range(3):
facets3d.append([[front_box[fN], back_box[fN], back_box[(fN + 1)], front_box[(fN + 1)]]])
facetFlags3d.append(0)
facetHoles3d.append([])
facets3d.append([[front_box[(- 1)], back_box[(- 1)], back_box[0], front_box[0]]])
facetFlags3d.append(0)
facetHoles3d.append([])
regions = [(center[0], center[1], (0.5 * (x0[2] + x1[2]))), (((0.1 * x2[0]) + (0.9 * x3[0])), ((0.1 * x2[1]) + (0.9 * x3[1])), (0.5 * (x0[2] + x1[2]))), (((0.1 * x0[0]) + (0.9 * x1[0])), ((0.1 * x0[1]) + (0.9 * x1[1])), (0.5 * (x0[2] + x1[2])))]
regionFlags = [1, 2, 3]
regionConstraints = [(0.5 * (he2 ** 2)), (0.5 * (he3 ** 2)), (0.5 * (he ** 2))]
domain = Domain.PiecewiseLinearComplexDomain(vertices=vertices3d, vertexFlags=vertexFlags3d, facets=facets3d, facetFlags=facetFlags3d, facetHoles=facetHoles3d, regions=regions, regionFlags=regionFlags, regionConstraints=regionConstraints)
domain.boundaryTags = boundaryTags
return (domain, boundaryTags) |
class BgpProcessor(Activity):
MAX_DEST_PROCESSED_PER_CYCLE = 100
_DestQueue = circlist.CircularListType(next_attr_name='next_dest_to_process', prev_attr_name='prev_dest_to_process')
def __init__(self, core_service, work_units_per_cycle=None):
Activity.__init__(self)
self._core_service = core_service
self._dest_queue = BgpProcessor._DestQueue()
self._rtdest_queue = BgpProcessor._DestQueue()
self.dest_que_evt = EventletIOFactory.create_custom_event()
self.work_units_per_cycle = (work_units_per_cycle or BgpProcessor.MAX_DEST_PROCESSED_PER_CYCLE)
def _run(self, *args, **kwargs):
while True:
LOG.debug('Starting new processing run...')
self._process_rtdest()
self._process_dest()
if self._dest_queue.is_empty():
self.dest_que_evt.clear()
self.dest_que_evt.wait()
else:
self.pause(0)
def _process_dest(self):
dest_processed = 0
LOG.debug('Processing destination...')
while ((dest_processed < self.work_units_per_cycle) and (not self._dest_queue.is_empty())):
next_dest = self._dest_queue.pop_first()
if next_dest:
next_dest.process()
dest_processed += 1
def _process_rtdest(self):
LOG.debug('Processing RT NLRI destination...')
if self._rtdest_queue.is_empty():
return
else:
processed_any = False
while (not self._rtdest_queue.is_empty()):
next_dest = self._rtdest_queue.pop_first()
if next_dest:
next_dest.process()
processed_any = True
if processed_any:
self._core_service.update_rtfilters()
def enqueue(self, destination):
if (not destination):
raise BgpProcessorError(('Invalid destination %s.' % destination))
dest_queue = self._dest_queue
if (destination.route_family == RF_RTC_UC):
dest_queue = self._rtdest_queue
if (not dest_queue.is_on_list(destination)):
dest_queue.append(destination)
self.dest_que_evt.set() |
_os(*metadata.platforms)
def main():
cal_dir = Path(f'{Path.home()}/Library/Calendars/')
cal_calendar = cal_dir.joinpath('test.calendar', 'Events')
cal_calendar.mkdir(parents=True, exist_ok=True)
cal_path = str(cal_calendar.joinpath('test.ics'))
common.log(f'Executing file modification on {cal_path} to mimic suspicious calendar file modification')
common.temporary_file_helper('testing', file_name=cal_path)
common.remove_directory(str(cal_calendar))
common.remove_directory(str(cal_dir)) |
_deserializable
class SlackChunker(BaseChunker):
def __init__(self, config: Optional[ChunkerConfig]=None):
if (config is None):
config = ChunkerConfig(chunk_size=1000, chunk_overlap=0, length_function=len)
text_splitter = RecursiveCharacterTextSplitter(chunk_size=config.chunk_size, chunk_overlap=config.chunk_overlap, length_function=config.length_function)
super().__init__(text_splitter) |
('cuda.gemm_rcr_softmax.func_decl')
def gen_function_decl(func_attrs, **kwargs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common_softmax.FUNC_DECL_TEMPLATE.render(func_name=func_name, input_ndims=input_ndims, weight_ndims=weight_ndims, **kwargs) |
def test_int_promotion():
int_schema = {'type': 'int'}
long_schema = {'type': 'long'}
result = roundtrip(1, int_schema, long_schema)
assert (result == 1)
assert isinstance(result, int)
float_schema = {'type': 'float'}
result = roundtrip(1, int_schema, float_schema)
assert (result == 1.0)
assert isinstance(result, float)
double_schema = {'type': 'double'}
result = roundtrip(1, int_schema, double_schema)
assert (result == 1.0)
assert isinstance(result, float) |
.skipif((not path_data_tests.exists()), reason='no data tests')
.skipif((nb_proc > 1), reason='No dist in MPI')
def test_detect_backend_extensions():
shutil.rmtree((path_data_tests / f'__{backend_default}__'), ignore_errors=True)
names = ['assign_func_boost.py', 'assign_func_jit.py', 'block_fluidsim.py', 'blocks_type_hints.py', 'boosted_func_use_import.py', 'class_blocks.py', 'classic.py', 'mixed_classic_type_hint.py', 'type_hint_notemplate.py', 'no_pythran_.py']
make_backend_files(((path_data_tests / name) for name in names))
ext_names = detect_transonic_extensions(path_data_tests)
if can_import_accelerator():
ext_names = [name for name in ext_names if ('package_for_test_meson' not in name)]
number_not_transonized = 2
if (len(ext_names) != (len(names) - number_not_transonized)):
print('ext_names:\n', pformat(sorted(ext_names)), sep='')
print('names:\n', pformat(sorted(names)), sep='')
raise RuntimeError
shutil.rmtree((path_data_tests / f'__{backend_default}__'), ignore_errors=True) |
class TestDeclEnumType(BasePyTestCase):
def test_create_does_not_raise_exception(self):
t = model.DeclEnumType(model.UpdateStatus)
t.create(self.engine)
def test_drop_does_not_raise_exception(self):
t = model.DeclEnumType(model.UpdateStatus)
t.drop(self.engine)
def test_process_bind_param_None(self):
t = model.DeclEnumType(model.UpdateStatus)
assert (t.process_bind_param(None, self.engine.dialect) is None)
def test_process_bind_param_truthy_value(self):
t = model.DeclEnumType(model.UpdateStatus)
assert (t.process_bind_param(model.UpdateStatus.stable, self.engine.dialect) == 'stable')
def test_process_result_value_None(self):
t = model.DeclEnumType(model.UpdateStatus)
assert (t.process_result_value(None, self.engine.dialect) is None)
def test_process_result_value_truthy_value(self):
t = model.DeclEnumType(model.UpdateStatus)
assert (t.process_result_value('testing', self.engine.dialect) == model.UpdateStatus.testing) |
class OptionSeriesVectorSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class DecoratorsTests(TestCase):
def test_psa_missing_backend(self):
_auth
def wrapped(cls, root, info, provider, *args):
with self.assertRaises(exceptions.GraphQLSocialAuthError):
wrapped(self, None, self.info(), 'unknown', 'token')
_auth_mock
_settings(SOCIAL_AUTH_PIPELINE=[])
def test_psa_invalid_token(self, *args):
_auth
def wrapped(cls, root, info, provider, *args):
with self.assertRaises(exceptions.InvalidTokenError):
wrapped(self, None, self.info(), 'google-oauth2', 'token')
_auth_mock
('social_core.backends.oauth.BaseOAuth2.do_auth')
def test_psa_do_auth_error(self, *args):
_auth
def wrapped(cls, root, info, provider, *args):
with self.assertRaises(exceptions.DoAuthError):
wrapped(self, None, self.info(), 'google-oauth2', 'token')
_auth_mock
def test_social_auth_thenable(self, *args):
_auth
def wrapped(cls, root, info, provider, *args):
return Promise()
result = wrapped(TestCase, None, self.info(), 'google-oauth2', 'token')
self.assertTrue(is_thenable(result)) |
class almostequal64(TestCase):
maxDiff = 5000
actual = numpy.arange(16).reshape(2, 4, 2)
desired = 'eNpjYAgyX2tRbsluVW51yUreOsl6rvVFa0YbXZtQmyqbOTYAmAYJvQ=='
def test_equal(self):
self.assertAlmostEqual64(self.actual, self.desired)
def test_notequal(self):
with self.assertRaises(AssertionError) as cm:
self.assertAlmostEqual64(self.actual, 'eNpjYFhrwW51ySrJ+qK1rk2VzR6b7zY6tjG27bbrba/YfrcFALIODB0=')
self.assertEqual(str(cm.exception), ('15/16 values do not match up to atol=2.00e-15, rtol=2.00e-03:\n[0, 0, 1] desired: +2.0014e+00, actual: +1.0000e+00, spacing: 4.0e-03\n[0, 1, 0] desired: +3.9981e+00, actual: +2.0000e+00, spacing: 8.0e-03\n[0, 1, 1] desired: +6.0004e+00, actual: +3.0000e+00, spacing: 1.2e-02\n[0, 2, 0] desired: +8.0031e+00, actual: +4.0000e+00, spacing: 1.6e-02\n[0, 2, 1] desired: +9.9925e+00, actual: +5.0000e+00, spacing: 2.0e-02\n...\n[1, 1, 1] desired: +2.2017e+01, actual: +1.1000e+01, spacing: 4.4e-02\n[1, 2, 0] desired: +2.3995e+01, actual: +1.2000e+01, spacing: 4.8e-02\n[1, 2, 1] desired: +2.5993e+01, actual: +1.3000e+01, spacing: 5.2e-02\n[1, 3, 0] desired: +2.7990e+01, actual: +1.4000e+01, spacing: 5.6e-02\n[1, 3, 1] desired: +3.0019e+01, actual: +1.5000e+01, spacing: 6.0e-02\nIf this is expected, update the base64 string to:\n' + self.desired))
def test_fail(self):
with self.assertRaises(AssertionError) as cm:
self.assertAlmostEqual64(self.actual, 'invalid')
self.assertEqual(str(cm.exception), ('failed to decode data: Incorrect padding\nIf this is expected, update the base64 string to:\n' + self.desired)) |
def _cmd_metrics(args):
if ((len(args.cnarrays) > 1) and args.segments and (len(args.segments) > 1) and (len(args.cnarrays) != len(args.segments))):
raise ValueError('Number of coverage/segment filenames given must be equal, if more than 1 segment file is given.')
cnarrs = map(read_cna, args.cnarrays)
if args.segments:
args.segments = map(read_cna, args.segments)
table = metrics.do_metrics(cnarrs, args.segments, args.drop_low_coverage)
write_dataframe(args.output, table) |
class TorchDeltaStateCritic(TorchStateCritic):
(StateCritic)
def predict_values(self, critic_input: StateCriticInput) -> StateCriticOutput:
critic_output = StateCriticOutput()
key_0 = critic_input[0].actor_id.step_key
value_0 = self.networks[key_0](critic_input[0].tensor_dict)['value'][(..., 0)]
critic_output.append(StateCriticStepOutput(value_0, detached_values=value_0.detach(), actor_id=critic_input[0].actor_id))
for step_critic_input in critic_input.substep_inputs[1:]:
prev_values = critic_output.detached_values[(- 1)]
obs = step_critic_input.tensor_dict.copy()
obs['prev_value'] = prev_values.unsqueeze((- 1))
value_delta = self.networks[step_critic_input.actor_id.step_key](obs)['value'][(..., 0)]
next_values = (critic_output.detached_values[(- 1)] + value_delta)
critic_output.append(StateCriticStepOutput(next_values, detached_values=next_values.detach(), actor_id=step_critic_input.actor_id))
return critic_output
(StateCritic)
def predict_value(self, observation: ObservationType, critic_id: Union[(int, str)]) -> torch.Tensor:
raise NotImplemented
(TorchStateCritic)
def num_critics(self) -> int:
return self._num_critics
(TorchStateCritic)
def compute_structured_return(self, gamma: float, gae_lambda: float, rewards: List[torch.Tensor], values: List[torch.Tensor], dones: torch.Tensor) -> List[torch.Tensor]:
shared_rewards = torch.stack(rewards).sum(dim=0)
sub_step_return = self.compute_return(gamma=gamma, gae_lambda=gae_lambda, rewards=shared_rewards, values=values[(- 1)], dones=dones)
return [sub_step_return for _ in values] |
def main(page: ft.Page):
def on_chart_event(e: ft.BarChartEvent):
for (group_index, group) in enumerate(chart.bar_groups):
for (rod_index, rod) in enumerate(group.bar_rods):
rod.hovered = ((e.group_index == group_index) and (e.rod_index == rod_index))
chart.update()
chart = ft.BarChart(bar_groups=[ft.BarChartGroup(x=0, bar_rods=[SampleRod(5)]), ft.BarChartGroup(x=1, bar_rods=[SampleRod(6.5)]), ft.BarChartGroup(x=2, bar_rods=[SampleRod(5)]), ft.BarChartGroup(x=3, bar_rods=[SampleRod(7.5)]), ft.BarChartGroup(x=4, bar_rods=[SampleRod(9)]), ft.BarChartGroup(x=5, bar_rods=[SampleRod(11.5)]), ft.BarChartGroup(x=6, bar_rods=[SampleRod(6)])], bottom_axis=ft.ChartAxis(labels=[ft.ChartAxisLabel(value=0, label=ft.Text('M')), ft.ChartAxisLabel(value=1, label=ft.Text('T')), ft.ChartAxisLabel(value=2, label=ft.Text('W')), ft.ChartAxisLabel(value=3, label=ft.Text('T')), ft.ChartAxisLabel(value=4, label=ft.Text('F')), ft.ChartAxisLabel(value=5, label=ft.Text('S')), ft.ChartAxisLabel(value=6, label=ft.Text('S'))]), on_chart_event=on_chart_event, interactive=True)
page.add(ft.Container(chart, bgcolor=ft.colors.GREEN_200, padding=10, border_radius=5, expand=True)) |
def RunCommand(args, timeout=None, logfile=None):
child = pexpect.spawn(args[0], args=args[1:], timeout=timeout, logfile=logfile)
child.expect(pexpect.EOF)
child.close()
if child.exitstatus:
print(args)
raise RuntimeError('Error: {}\nProblem running command. Exit status: {}'.format(child.before, child.exitstatus))
return 0 |
def resnet(conf, arch=None):
resnet_size = int((arch if (arch is not None) else conf.arch).replace('resnet', ''))
dataset = conf.data
if (('cifar' in conf.data) or ('svhn' in conf.data)):
model = ResNet_cifar(dataset=dataset, resnet_size=resnet_size, freeze_bn=conf.freeze_bn, freeze_bn_affine=conf.freeze_bn_affine, group_norm_num_groups=conf.group_norm_num_groups)
elif ('imagenet' in dataset):
if (('imagenet' in conf.data) and (len(conf.data) > 8)):
model = ResNet_cifar(dataset=dataset, resnet_size=resnet_size, scaling=4, group_norm_num_groups=conf.group_norm_num_groups, freeze_bn=conf.freeze_bn, freeze_bn_affine=conf.freeze_bn_affine)
else:
model = ResNet_imagenet(dataset=dataset, resnet_size=resnet_size, group_norm_num_groups=conf.group_norm_num_groups, freeze_bn=conf.freeze_bn, freeze_bn_affine=conf.freeze_bn_affine)
else:
raise NotImplementedError
return model |
class TestParserAndGrammarErrors(unittest.TestCase):
def test_no_file(self):
parser = ServiceTestPlanFixtureParser('/path/to/fake/fixture', 'test_no_file')
with self.assertRaises(FixtureLoadError):
parser.parse_test_fixture()
def test_empty_file(self):
with _temp_fixture_file_name_context('\n') as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_empty_file')
with self.assertRaises(FixtureLoadError):
parser.parse_test_fixture()
def test_general_syntax_error(self):
fixture = 'test name: some_test\ntest description: Some description\nget_user: input int: user_id: 123\nget_user: expect no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_general_syntax_error')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Failed to parse line', error_context.exception.msg)
self.assertIn('get_user: expect no errors', error_context.exception.msg)
self.assertIn(file_name, error_context.exception.msg)
self.assertIn('Expected end of text', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(4, error_context.exception.lineno)
self.assertEqual('get_user: expect no errors', error_context.exception.text)
self.assertEqual(1, error_context.exception.offset)
def test_data_type_conversion_error(self):
fixture = 'test name: some_test\ntest description: Some description\nget_user: input int: user_id: abc123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_data_type_conversion_error')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Data type conversion error', error_context.exception.msg)
self.assertIn(file_name, error_context.exception.msg)
self.assertIn('invalid literal', error_context.exception.msg)
self.assertIn('with base 10', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('get_user: input int: user_id: abc123', error_context.exception.text)
self.assertEqual(36, error_context.exception.offset)
def test_test_case_without_name(self):
fixture = 'test description: Some description\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_test_case_without_name')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Test case without name', error_context.exception.msg)
self.assertIn(file_name, error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('get_user: expect: no errors', error_context.exception.text)
self.assertEqual(27, error_context.exception.offset)
def test_test_case_without_description(self):
fixture = 'test name: some_test\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_test_case_without_description')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Test case without description', error_context.exception.msg)
self.assertIn(file_name, error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('get_user: expect: no errors', error_context.exception.text)
self.assertEqual(27, error_context.exception.offset)
def test_empty_test_case(self):
fixture = 'test name: some_test\ntest description: Some description\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_empty_test_case')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Empty test case', error_context.exception.msg)
self.assertIn(file_name, error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(2, error_context.exception.lineno)
self.assertEqual('test description: Some description', error_context.exception.text)
self.assertEqual(34, error_context.exception.offset)
def test_duplicate_test_name_directive_same_name(self):
fixture = 'test name: some_test\ntest name: some_test\ntest description: Some description\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_duplicate_test_name')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Duplicate test name directive for test case', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(2, error_context.exception.lineno)
self.assertEqual('test name: some_test', error_context.exception.text)
self.assertEqual(20, error_context.exception.offset)
def test_duplicate_test_name_directive_different_name(self):
fixture = 'test name: some_test\ntest name: another_test\ntest description: Some description\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_duplicate_test_name')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Duplicate test name directive for test case', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(2, error_context.exception.lineno)
self.assertEqual('test name: another_test', error_context.exception.text)
self.assertEqual(23, error_context.exception.offset)
def test_duplicate_test_description_directive_same_description(self):
fixture = 'test name: some_test\ntest description: Some description\ntest description: Some description\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_duplicate_test_description')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Duplicate test description directive for test case', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('test description: Some description', error_context.exception.text)
self.assertEqual(34, error_context.exception.offset)
def test_duplicate_test_description_directive_different_description(self):
fixture = 'test name: some_test\ntest description: Some description\ntest description: A different description\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_duplicate_test_description')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Duplicate test description directive for test case', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('test description: A different description', error_context.exception.text)
self.assertEqual(41, error_context.exception.offset)
def test_invalid_freeze_time_syntax(self):
fixture = 'test name: some_test\ntest description: Some description\nget_user: freeze time: not a valid date time string\nget_user: input int: user_id: 123\nget_user: expect: no errors\n'
with _temp_fixture_file_name_context(fixture) as file_name:
parser = ServiceTestPlanFixtureParser(file_name, 'test_invalid_freeze_time_syntax')
with self.assertRaises(FixtureSyntaxError) as error_context:
parser.parse_test_fixture()
self.assertIn('Could not parse datetime value for time freeze', error_context.exception.msg)
self.assertEqual(file_name, error_context.exception.filename)
self.assertEqual(3, error_context.exception.lineno)
self.assertEqual('get_user: freeze time: not a valid date time string', error_context.exception.text)
self.assertEqual(51, error_context.exception.offset) |
def serve(sock, handle, concurrency=1000):
pool = greenpool.GreenPool(concurrency)
server_gt = greenthread.getcurrent()
while True:
try:
(conn, addr) = sock.accept()
gt = pool.spawn(handle, conn, addr)
gt.link(_stop_checker, server_gt, conn)
(conn, addr, gt) = (None, None, None)
except StopServe:
return |
class Solution(object):
def findTilt(self, root):
def find_tilt(root):
if (root is None):
return (0, 0)
(lstilt, ls) = find_tilt(root.left)
(rstilt, rs) = find_tilt(root.right)
return (((abs((ls - rs)) + lstilt) + rstilt), ((ls + rs) + root.val))
(stilt, s) = find_tilt(root)
return stilt |
class Screen():
def __init__(self, screen):
self._screen = screen
curses.start_color()
curses.init_pair(1, curses.COLOR_BLACK, curses.COLOR_WHITE)
self._screen.clear()
self._screen.refresh()
(self._height, self._width) = self._screen.getmaxyx()
self._header_bar = curses.newwin(1, self._width, 0, 0)
self._header_bar.attron(curses.A_REVERSE)
self._header_bar_prefix = '>>> '
self._header_bar_text = self._header_bar_prefix
self._pad = curses.newpad(0, self._width)
self._footer_bar = curses.newwin(1, self._width, (self._height - 1), 0)
self._footer_bar.attron(curses.A_REVERSE)
self._footer_text = ''
self._exit_key = ord('q')
self.refresh_header_bar()
self.set_footer_bar('Logs will appear here...')
self._handler = StatusBarHandler(self)
self._logger = Logging.get_logger('koapy')
self._logger.addHandler(self._handler)
from koapy import KiwoomOpenApiPlusEntrypoint
self._entrypoint = KiwoomOpenApiPlusEntrypoint()
self._entrypoint.EnsureConnected()
def refresh_header_bar(self):
(self._height, self._width) = self._screen.getmaxyx()
self._header_bar.resize(1, self._width)
self._header_bar.mvwin(0, 0)
self._header_bar.addstr(0, 0, self._header_bar_text.ljust((self._width - 1)))
self._screen.move(0, screen_len(self._header_bar_text))
self._header_bar.refresh()
def set_header_bar(self, text):
self._header_bar_text = (self._header_bar_prefix + text)
self.refresh_header_bar()
def refresh_footer_bar(self):
(self._height, self._width) = self._screen.getmaxyx()
self._footer_bar.resize(1, self._width)
self._footer_bar.mvwin((self._height - 1), 0)
self._footer_bar.addstr(0, 0, self._footer_text.ljust((self._width - 1)))
self._footer_bar.refresh()
def set_footer_bar(self, text):
self._footer_text = text
self.refresh_footer_bar()
def show_entries(self, entries):
trie = pygtrie.CharTrie()
for entry in entries:
trie[str(entry.code)] = entry
trie[entry.name] = entry
original_entries = entries
current_entries = original_entries
k = 0
cursor_y = 0
scroll_y = 0
query = ''
while (k not in [curses.ascii.ESC, curses.KEY_LEFT]):
self.set_header_bar(query)
self.refresh_footer_bar()
(self._height, self._width) = self._screen.getmaxyx()
pad_height = (self._height - 2)
pad_width = self._width
self._pad.resize(len(current_entries), pad_width)
self._pad.clear()
if (k == curses.KEY_DOWN):
cursor_y = (cursor_y + 1)
elif (k == curses.KEY_UP):
cursor_y = (cursor_y - 1)
elif (k == curses.KEY_PPAGE):
prev_scroll_y = scroll_y
scroll_y = max((scroll_y - pad_height), 0)
cursor_y -= (prev_scroll_y - scroll_y)
elif (k == curses.KEY_NPAGE):
prev_scroll_y = scroll_y
scroll_y = min((scroll_y + pad_height), ((len(current_entries) - 1) - pad_height))
scroll_y = max(0, scroll_y)
cursor_y += (scroll_y - prev_scroll_y)
elif (k == curses.KEY_RIGHT):
if (len(current_entries) > 0):
entry = current_entries[cursor_y]
if hasattr(entry, 'stocks'):
market = entry
if (len(market.stocks) == 0):
stock_codes = self._entrypoint.GetCodeListByMarketAsList(market.code)
stock_names = [self._entrypoint.GetMasterCodeName(code) for code in stock_codes]
stocks = [StockEntry(code, name) for (code, name) in zip(stock_codes, stock_names)]
market.stocks = stocks
self._pad.clear()
self._pad.refresh(0, 0, 1, 0, pad_height, (pad_width - 1))
self.show_entries(market.stocks)
self._pad.clear()
self._pad.refresh(0, 0, 1, 0, pad_height, (pad_width - 1))
k = 0
continue
elif (k in [curses.KEY_BACKSPACE, 8]):
if (len(query) > 0):
query = query[:(- 1)]
self.set_header_bar(query)
if (len(query) > 0):
if (trie.has_key(query) or trie.has_subtrie(query)):
current_entries = trie.values(query)
else:
current_entries = original_entries
self._pad.clear()
self._pad.refresh(0, 0, 1, 0, pad_height, (pad_width - 1))
k = 0
continue
elif (unicodedata.category(chr(k))[0] != 'C'):
query += chr(k)
self.set_header_bar(query)
if (trie.has_key(query) or trie.has_subtrie(query)):
current_entries = trie.values(query)
else:
current_entries = []
self._pad.clear()
self._pad.refresh(0, 0, 1, 0, pad_height, (pad_width - 1))
k = 0
continue
cursor_y = min((len(current_entries) - 1), cursor_y)
cursor_y = max(0, cursor_y)
if (k == curses.KEY_DOWN):
if (cursor_y == ((scroll_y + pad_height) - 9)):
scroll_y = ((cursor_y - pad_height) + 10)
elif (cursor_y > ((scroll_y + pad_height) - 2)):
scroll_y = ((cursor_y - pad_height) + 1)
elif (k == curses.KEY_UP):
if (cursor_y == (scroll_y + 8)):
scroll_y = (cursor_y - 9)
elif (cursor_y < scroll_y):
scroll_y = cursor_y
scroll_y = min((len(current_entries) - 1), scroll_y)
scroll_y = max(0, scroll_y)
for (i, market) in enumerate(current_entries):
attr = 0
if (i == cursor_y):
attr = curses.A_UNDERLINE
self._pad.addstr(i, 0, market.to_string().ljust((pad_width - 1)), attr)
self._pad.refresh(scroll_y, 0, 1, 0, pad_height, (pad_width - 1))
k = self._screen.getch() |
class OptionSeriesErrorbarOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
def test_cli_with_pdm(pdm_venv_factory: PDMVenvFactory) -> None:
with pdm_venv_factory('project_with_pdm') as virtual_env:
issue_report = f'{uuid.uuid4()}.json'
result = virtual_env.run(f'deptry . -o {issue_report}')
assert (result.returncode == 1)
assert (get_issues_report(Path(issue_report)) == [{'error': {'code': 'DEP002', 'message': "'isort' defined as a dependency but not used in the codebase"}, 'module': 'isort', 'location': {'file': str(Path('pyproject.toml')), 'line': None, 'column': None}}, {'error': {'code': 'DEP002', 'message': "'requests' defined as a dependency but not used in the codebase"}, 'module': 'requests', 'location': {'file': str(Path('pyproject.toml')), 'line': None, 'column': None}}, {'error': {'code': 'DEP004', 'message': "'black' imported but declared as a dev dependency"}, 'module': 'black', 'location': {'file': str(Path('src/main.py')), 'line': 4, 'column': 0}}, {'error': {'code': 'DEP004', 'message': "'mypy' imported but declared as a dev dependency"}, 'module': 'mypy', 'location': {'file': str(Path('src/main.py')), 'line': 6, 'column': 0}}, {'error': {'code': 'DEP004', 'message': "'pytest' imported but declared as a dev dependency"}, 'module': 'pytest', 'location': {'file': str(Path('src/main.py')), 'line': 7, 'column': 0}}, {'error': {'code': 'DEP004', 'message': "'pytest_cov' imported but declared as a dev dependency"}, 'module': 'pytest_cov', 'location': {'file': str(Path('src/main.py')), 'line': 8, 'column': 0}}, {'error': {'code': 'DEP001', 'message': "'white' imported but missing from the dependency definitions"}, 'module': 'white', 'location': {'file': str(Path('src/main.py')), 'line': 9, 'column': 0}}]) |
def extractTriangleNovels(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WFTT' in item['tags']):
return buildReleaseMessageWithType(item, 'Waiting For The Train', vol, chp, frag=frag, postfix=postfix)
if ('AFGITMOLFM' in item['tags']):
return buildReleaseMessageWithType(item, 'AFGITMOLFM', vol, chp, frag=frag, postfix=postfix)
return False |
def test_control_doc_parse():
control_json = '\n {\n "remote": true,\n "scavengingBenchmark": true,\n "source": [\n {\n "identity": SRCID_NIGHTHAWK,\n "source_url": " "branch": "master"\n },\n {\n "identity": SRCID_ENVOY,\n "source_path": "/home/ubuntu/envoy",\n "branch": "master",\n "commit_hash": "random_commit_hash_string"\n }\n ],\n "images": {\n "reuseNhImages": true,\n "nighthawkBenchmarkImage": "envoyproxy/nighthawk-benchmark-dev:latest",\n "nighthawkBinaryImage": "envoyproxy/nighthawk-dev:latest",\n "envoyImage": "envoyproxy/envoy-dev:f61b096f6a2dd3a9c74b9a9369a6ea398dbe1f0f"\n },\n "environment": {\n testVersion: IPV_V4ONLY,\n "envoyPath": "envoy",\n "outputDir": "/home/ubuntu/nighthawk_output",\n "testDir": "/home/ubuntu/nighthawk_tests",\n "variables": {\n "TMP_DIR": "/home/ubuntu/nighthawk_output"\n }\n }\n }\n '
job_control = None
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp:
tmp.write(control_json)
tmp.close()
job_control = job_ctrl.load_control_doc(tmp.name)
os.unlink(tmp.name)
_validate_job_control_object(job_control) |
class Monitor(Thread):
def __init__(self, req, proxy, logger, task, exit_check=None, ignored_errors=[]):
Thread.__init__(self, name=('monitor%s' % task.guid))
Thread.setDaemon(self, True)
self.vote_result = {}
self.vote_cleared = set().union(ignored_errors)
self.thread_last_seen = {}
self.dctlock = RLock()
self.votelock = RLock()
self.thread_ref = {}
self.thread_zombie = set()
self.req = req
self.proxy = proxy
self.logger = logger
self.task = task
self._exit = (exit_check if exit_check else (lambda x: False))
self._cleaning_up = False
self.download_speed = 0
if (os.name == 'nt'):
self.set_title = (lambda s: os.system(('TITLE %s' % (s if PY3K else s.encode(CODEPAGE, 'replace')))))
elif (os.name == 'posix'):
import sys
self.set_title = (lambda s: sys.stdout.write(('\x1b]2;%s\x07' % (s if PY3K else s.encode(CODEPAGE, 'replace')))))
def set_vote_ns(self, tnames):
t = time.time()
self.thread_last_seen = {k: t for k in tnames}
def vote(self, tname, code):
self.votelock.acquire()
if (code != ERR_NO_ERROR):
self.logger.verbose(('t-%s vote:%s' % (tname, code)))
if (code not in self.vote_result):
self.vote_result[code] = 1
else:
self.vote_result[code] += 1
self.votelock.release()
def wrk_keepalive(self, wrk_thread, _exit=False):
tname = wrk_thread.name
if (tname in self.thread_zombie):
self.thread_zombie.remove(tname)
_ = ((self.task.meta['finished'] == self.task.meta['total']) or (self.task.state in (TASK_STATE_FINISHED, TASK_STATE_FAILED)) or self._exit('mon') or _exit)
if (_ or (not wrk_thread.is_alive())):
self.dctlock.acquire()
if (tname in self.thread_last_seen):
del self.thread_last_seen[tname]
if (tname in self.thread_ref):
del self.thread_ref[tname]
self.dctlock.release()
else:
self.thread_last_seen[tname] = time.time()
if (tname not in self.thread_ref):
self.thread_ref[tname] = wrk_thread
return _
def _check_vote(self):
if ((ERR_QUOTA_EXCEEDED in self.vote_result) and (ERR_QUOTA_EXCEEDED not in self.vote_cleared) and (self.vote_result[ERR_QUOTA_EXCEEDED] >= len(self.thread_last_seen))):
self.logger.error((i18n.TASK_STOP_QUOTA_EXCEEDED % self.task.guid))
self.task.state = TASK_STATE_FAILED
def run(self):
CHECK_INTERVAL = 10
STUCK_INTERVAL = 90
intv = 0
self.set_title((i18n.TASK_START % self.task.guid))
last_change = time.time()
last_finished = (- 1)
while (len(self.thread_last_seen) > 0):
intv += 1
self._check_vote()
total_speed = 0
for k in list(self.thread_last_seen.keys()):
_zombie_threshold = (self.thread_ref[k].zombie_threshold if (k in self.thread_ref) else 30)
if ((time.time() - self.thread_last_seen[k]) > _zombie_threshold):
if ((k not in self.thread_ref) and self.thread_ref[k].is_alive()):
self.logger.warning((i18n.THREAD_MAY_BECOME_ZOMBIE % k))
self.thread_zombie.add(k)
else:
self.logger.warning((i18n.THREAD_SWEEP_OUT % k))
del self.thread_last_seen[k]
elif ((k in self.thread_ref) and self.thread_ref[k].stream_speed):
total_speed += self.thread_ref[k].stream_speed.calc()
self.download_speed = total_speed
if (intv == CHECK_INTERVAL):
_ = ('%s %dR/%dZ, %s %dR/%dD, %s/s' % (i18n.THREAD, len(self.thread_last_seen), len(self.thread_zombie), i18n.QUEUE, self.task.img_q.qsize(), self.task.meta['finished'], util.human_size(total_speed)))
self.logger.info(_)
self.set_title(_)
intv = 0
if (last_finished != self.task.meta['finished']):
last_change = time.time()
last_finished = self.task.meta['finished']
elif ((time.time() - last_change) > STUCK_INTERVAL):
self.logger.info((i18n.TASK_UNFINISHED % (self.task.guid, self.task.get_fid_unfinished())))
if (total_speed > 0):
last_change = time.time()
self.logger.warning((i18n.TASK_SLOW % self.task.guid))
else:
self.logger.warning((i18n.TASK_STUCK % self.task.guid))
break
time.sleep(0.5)
if (self.task.meta['finished'] == self.task.meta['total']):
_err = self.task.rename_fname()
if _err:
self.logger.warning((i18n.XEH_RENAME_HAS_ERRORS % '\n'.join(map((lambda x: ('%s => %s : %s' % x)), _err))))
self.set_title((i18n.TASK_FINISHED % self.task.guid))
self.logger.info((i18n.TASK_FINISHED % self.task.guid))
self.task.state = TASK_STATE_FINISHED
self.task.cleanup() |
class Example():
def __init__(self, init_arg1=None, init_arg2=None, init_arg3=None, init_arg4=None):
self.init_arg1 = init_arg1
self.init_arg2 = init_arg2
self.init_arg3 = init_arg3
self.init_arg4 = init_arg4
self.attribute1 = None
self.attribute2 = None |
.django_db
def test_failure_with_invalid_group(client, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'group': 'not a valid group', 'filters': {'keywords': ['test', 'testing']}}))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.json().get('detail') == "Field 'group' is outside valid values ['quarter', 'q', 'fiscal_year', 'fy', 'month', 'm']"), 'Expected to fail with invalid group'
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'filters': {'keywords': ['test', 'testing']}}))
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY)
assert (resp.json().get('detail') == "Missing value: 'group' is a required field"), 'Expected to fail with no group' |
def restart_systemd_processes(bench_path='.', web_workers=False, _raise=True):
bench_name = get_bench_name(bench_path)
exec_cmd(f'sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)', _raise=_raise)
exec_cmd(f'sudo systemctl start -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)', _raise=_raise) |
def results_store(cfg):
logger = logging.getLogger(__name__)
if (cfg.opts('reporting', 'datastore.type') == 'elasticsearch'):
logger.info('Creating ES results store')
return EsResultsStore(cfg)
else:
logger.info('Creating no-op results store')
return NoopResultsStore() |
def reorg(address):
if is_account_state_active(state[address]):
return
for (i, r) in enumerate(alt_states[address]):
if is_account_state_active(r):
(state[address], alt_states[address][i]) = (alt_states[address][i], state[address])
return
raise Exception('wtf m8') |
class structured_sparsity(func):
def __init__(self, lambda_=1, groups=[[]], weights=[0], **kwargs):
super(structured_sparsity, self).__init__(**kwargs)
if (lambda_ < 0):
raise ValueError('The scaling factor must be non-negative.')
self.lambda_ = lambda_
if (not isinstance(groups, list)):
raise TypeError('The groups must be defined as a list of lists.')
self.groups = groups
if (len(weights) != len(groups)):
raise ValueError('Length of weights must be equal to number of groups.')
self.weights = weights
def _eval(self, x):
costs = [(w * np.linalg.norm(x[g])) for (g, w) in zip(self.groups, self.weights)]
return (self.lambda_ * np.sum(costs))
def _prox(self, x, T):
gamma = (self.lambda_ * T)
v = x.copy()
for (g, w) in zip(self.groups, self.weights):
xn = np.linalg.norm(v[g])
r = (gamma * w)
if (xn > r):
v[g] -= ((v[g] * r) / xn)
else:
v[g] = 0
return v |
def _nested_set(configuration_obj: PackageConfiguration, keys: List, value: Any) -> None:
def get_nested_ordered_dict_from_dict(input_dict: Dict) -> Dict:
_dic = {}
for (_key, _value) in input_dict.items():
if isinstance(_value, dict):
_dic[_key] = OrderedDict(get_nested_ordered_dict_from_dict(_value))
else:
_dic[_key] = _value
return _dic
def get_nested_ordered_dict_from_keys_and_value(keys: List[str], value: Any) -> Dict:
_dic = (OrderedDict(get_nested_ordered_dict_from_dict(value)) if isinstance(value, dict) else value)
for key in keys[::(- 1)]:
_dic = OrderedDict({key: _dic})
return _dic
root_key = keys[0]
if (isinstance(configuration_obj, SkillConfig) and (root_key in SkillConfig.FIELDS_WITH_NESTED_FIELDS)):
root_attr = getattr(configuration_obj, root_key)
length = len(keys)
if ((length < 3) or (keys[2] not in SkillConfig.NESTED_FIELDS_ALLOWED_TO_UPDATE)):
raise ValueError(f'Invalid keys={keys}.')
skill_component_id = keys[1]
skill_component_config = root_attr.read(skill_component_id)
if ((length == 3) and isinstance(value, dict)):
skill_component_config.args = get_nested_ordered_dict_from_dict(value)
elif (len(keys) >= 4):
dic = get_nested_ordered_dict_from_keys_and_value(keys[3:], value)
skill_component_config.args.update(dic)
else:
raise ValueError(f'Invalid keys={keys} and values={value}.')
root_attr.update(skill_component_id, skill_component_config)
else:
root_attr = getattr(configuration_obj, root_key)
if isinstance(root_attr, CRUDCollection):
if (isinstance(value, dict) and (len(keys) == 1)):
for (_key, _value) in value.items():
dic = get_nested_ordered_dict_from_keys_and_value([_key], _value)
root_attr.update(_key, dic[_key])
elif (len(keys) >= 2):
dic = get_nested_ordered_dict_from_keys_and_value(keys[1:], value)
root_attr.update(keys[1], dic[keys[1]])
else:
raise ValueError(f'Invalid keys={keys} and values={value}.')
elif (root_key == 'dependencies'):
enforce(isinstance(configuration_obj, ComponentConfiguration), 'Cannot only set dependencies to ComponentConfiguration instances.')
configuration_obj = cast(ComponentConfiguration, configuration_obj)
new_pypi_dependencies = dependencies_from_json(value)
configuration_obj.pypi_dependencies = new_pypi_dependencies
else:
dic = get_nested_ordered_dict_from_keys_and_value(keys, value)
setattr(configuration_obj, root_key, dic[root_key]) |
class queue_op_failed_error_msg(error_msg):
version = 1
type = 1
err_type = 5
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = queue_op_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 1)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 5)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('queue_op_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPQOFC_BAD_PORT', 1: 'OFPQOFC_BAD_QUEUE', 2: 'OFPQOFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
class OptionSeriesOrganizationSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def _str_2_callable(val: str, **kwargs):
str_field = str(val)
(module, fn) = str_field.rsplit('.', 1)
try:
call_ref = getattr(importlib.import_module(module), fn)
except Exception as e:
raise _SpockValueError(f'Attempted to import module {module} and callable {fn} however it could not be found on the current python path: {e}')
return call_ref |
class Solution():
def maxProduct(self, nums: List[int]) -> int:
max_prod = None
(positive, negative) = (1, 1)
for n in nums:
(t1, t2) = (positive, negative)
positive = max((t1 * n), (t2 * n))
negative = min((t1 * n), (t2 * n))
if ((max_prod is None) or (max_prod < positive)):
max_prod = positive
if ((max_prod is None) or (max_prod < negative)):
max_prod = negative
if (positive <= 0):
positive = 1
if (negative >= 0):
negative = 1
return max_prod |
def initialise(pkg, lib_file, map_file):
pkg_dir = os.path.dirname(__file__)
pkg_module = sys.modules[pkg]
cppyy.add_include_path((pkg_dir + '/include'))
cppyy.add_include_path((pkg_dir + '/include/bx'))
cppyy.add_include_path((pkg_dir + '/include/bimg'))
cppyy.add_include_path((pkg_dir + '/include/bgfx'))
cppyy.add_include_path((pkg_dir + '/include/imgui'))
cppyy.add_include_path((pkg_dir + '/include/extras'))
cppyy.add_include_path((pkg_dir + '/include/examples/common'))
files = fix_map_file_paths(pkg_dir, map_file)
cppyy.load_reflection_info(os.path.join(pkg_dir, lib_file))
add_types_to_namespaces(files, pkg, pkg_module) |
class BetterEmExtension(Extension):
def __init__(self, *args, **kwargs):
self.config = {'smart_enable': ['underscore', 'Treat connected words intelligently - Default: underscore']}
super().__init__(*args, **kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
self.make_better(md)
def make_better(self, md):
config = self.getConfigs()
enabled = config['smart_enable']
enable_all = (enabled == 'all')
enable_under = ((enabled == 'underscore') or enable_all)
enable_star = ((enabled == 'asterisk') or enable_all)
md.inlinePatterns.deregister('not_strong', False)
md.inlinePatterns.deregister('strong_em', False)
md.inlinePatterns.deregister('em_strong', False)
md.inlinePatterns.deregister('em_strong2', False)
md.inlinePatterns.deregister('strong', False)
md.inlinePatterns.deregister('emphasis', False)
md.inlinePatterns.deregister('strong2', False)
md.inlinePatterns.deregister('emphasis2', False)
md.inlinePatterns.register(SimpleTextInlineProcessor(NOT_STRONG), 'not_strong', 70)
asterisk = (SmartAsteriskProcessor('\\*') if enable_star else AsteriskProcessor('\\*'))
md.inlinePatterns.register(asterisk, 'strong_em', 50)
underscore = (SmartUnderscoreProcessor('_') if enable_under else UnderscoreProcessor('_'))
md.inlinePatterns.register(underscore, 'strong_em2', 40) |
class OptionSeriesPieDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class OptionPlotoptionsScatter3dSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsScatter3dSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsScatter3dSonificationTracksMappingLowpassResonance) |
class TestSoftTargetCrossEntropyLoss(testslide.TestCase):
def _get_outputs(self) -> torch.Tensor:
return torch.tensor([[1.0, 7.0, 0.0, 0.0, 2.0]])
def _get_targets(self) -> torch.Tensor:
return torch.tensor([[1, 0, 0, 0, 1]])
def _get_loss(self) -> float:
return 5.
def test_soft_target_cross_entropy(self) -> None:
crit = SoftTargetCrossEntropyLoss(reduction='mean')
outputs = self._get_outputs()
targets = self._get_targets()
self.assertAlmostEqual(crit(outputs, targets).item(), self._get_loss())
def test_soft_target_cross_entropy_none_reduction(self) -> None:
crit = SoftTargetCrossEntropyLoss(reduction='none')
outputs = torch.tensor([[1.0, 7.0, 0.0, 0.0, 2.0], [4.0, 2.0, 1.0, 6.0, 0.5]])
targets = torch.tensor([[1, 0, 0, 0, 1], [0, 1, 0, 1, 0]])
loss = crit(outputs, targets)
self.assertEqual(loss.numel(), outputs.size(0))
def test_soft_target_cross_entropy_integer_label(self) -> None:
crit = SoftTargetCrossEntropyLoss(reduction='mean')
outputs = self._get_outputs()
targets = torch.tensor([4])
self.assertAlmostEqual(crit(outputs, targets).item(), 5.)
def test_unnormalized_soft_target_cross_entropy(self) -> None:
crit = SoftTargetCrossEntropyLoss(reduction='none', normalize_targets=False)
outputs = self._get_outputs()
targets = self._get_targets()
self.assertAlmostEqual(crit(outputs, targets).item(), 11.0219593)
def test_deep_copy(self) -> None:
crit = SoftTargetCrossEntropyLoss(reduction='mean')
outputs = self._get_outputs()
targets = self._get_targets()
crit(outputs, targets)
crit2 = copy.deepcopy(crit)
self.assertAlmostEqual(crit2(outputs, targets).item(), self._get_loss()) |
class JobQueues():
def from_config(cls, cfg: 'JobsConfig', fs: JobQueuesFS):
return cls(cfg, fs)
def __init__(self, cfg: 'JobsConfig', fs: JobQueuesFS):
self._cfg = cfg
self._fs = fs
def __len__(self):
return len(self._cfg.workers)
def __iter__(self) -> Iterator[JobQueue]:
for queueid in self._cfg.workers.keys():
(yield JobQueue.from_fstree(self._fs.resolve_queue(queueid), queueid))
def __getitem__(self, queueid: str) -> JobQueue:
assert (queueid in self._cfg.workers), queueid
return JobQueue.from_fstree(self._fs.resolve_queue(queueid), queueid) |
class FuseBmmCrrAddCase(unittest.TestCase):
def _test_bmm_crr_add(self, Bs, M, N, K, testname, dtype='float16', do_not_fuse=False):
batch_dim = shape_utils.gen_int_var_min_max(Bs, name='batch_size')
A_shape = [batch_dim, K, M]
B_shape = [batch_dim, K, N]
if do_not_fuse:
assert (M != 1)
D0_shape = [batch_dim, 1, N]
else:
D0_shape = [batch_dim, M, N]
input_0 = Tensor(shape=A_shape, dtype=dtype, name='input_0', is_input=True)
input_1 = Tensor(shape=B_shape, dtype=dtype, name='input_1', is_input=True)
input_2 = Tensor(shape=D0_shape, dtype=dtype, name='input_2', is_input=True)
bmm_tensor = ops.gemm_universal.bmm_crr()(input_0, input_1)
add_tensor = ops.elementwise(FuncEnum.ADD)(bmm_tensor, input_2)
add_tensor._attrs['name'] = 'add_tensor'
output = ops.elementwise(FuncEnum.ADD)(add_tensor, input_2)
output._attrs['name'] = 'output_0'
output._attrs['is_output'] = True
target = detect_target()
module = compile_model(output, target, './tmp', testname)
check_tensor = None
for tensor in module.debug_sorted_graph:
src_ops = list(tensor.src_ops())
if (len(src_ops) != 1):
continue
if src_ops[0]._attrs['op'].startswith('bmm'):
check_tensor = tensor
if do_not_fuse:
self.assertEqual(src_ops[0]._attrs['op'], 'bmm_crr')
else:
self.assertEqual(src_ops[0]._attrs['op'], 'bmm_crr_add')
break
self.assertIsNotNone(check_tensor)
if do_not_fuse:
return
for B in Bs:
X_pt = get_random_torch_tensor([B, K, M], dtype)
W_pt = get_random_torch_tensor([B, K, N], dtype)
D0_pt = get_random_torch_tensor([B, M, N], dtype)
Y_pt = ((torch.bmm(X_pt.transpose(2, 1), W_pt) + D0_pt) + D0_pt)
input_name_to_index = module.get_input_name_to_index_map()
inputs = ([None] * 3)
inputs[input_name_to_index['input_0']] = X_pt
inputs[input_name_to_index['input_1']] = W_pt
inputs[input_name_to_index['input_2']] = D0_pt
y = get_torch_empty_tensor([B, M, N], dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_bmm_crr_add(self):
self._test_bmm_crr_add([8], 32, 16, 8, 'bmm_crr_add_basic')
self._test_bmm_crr_add([8, 32], 32, 16, 8, 'bmm_crr_add_dynamic')
self._test_bmm_crr_add([8], 7, 13, 3, 'bmm_crr_add_need_align')
self._test_bmm_crr_add([8], 32, 16, 8, 'bmm_crr_add_do_not_fuse', do_not_fuse=True)
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_bmm_crr_add_float_sm80(self):
self._test_bmm_crr_add([8, 32], 32, 16, 8, 'bmm_crr_add_dynamic_float', dtype='float')
self._test_bmm_crr_add([8], 7, 13, 3, 'bmm_crr_add_need_align_float', dtype='float')
self._test_bmm_crr_add([8], 32, 16, 8, 'bmm_crr_add_do_not_fuse', dtype='float', do_not_fuse=True) |
def test_save_and_load(conversation: StorageConversation, conv_storage, message_storage):
conversation.start_new_round()
conversation.add_user_message('hello')
conversation.add_ai_message('hi')
conversation.end_current_round()
saved_conversation = StorageConversation(conv_uid=conversation.conv_uid, conv_storage=conv_storage, message_storage=message_storage)
assert (saved_conversation.conv_uid == conversation.conv_uid)
assert (len(saved_conversation.messages) == 2)
assert isinstance(saved_conversation.messages[0], HumanMessage)
assert isinstance(saved_conversation.messages[1], AIMessage)
assert (saved_conversation.messages[0].content == 'hello')
assert (saved_conversation.messages[0].round_index == 1)
assert (saved_conversation.messages[1].content == 'hi')
assert (saved_conversation.messages[1].round_index == 1) |
.integration()
def test_schema_inference(client):
rnd = ''.join((choice(ascii_uppercase) for i in range(5)))
dataset_path = os.fspath((INTEGRATION_TEST_COMPASS_ROOT_PATH / f'test_api_schema_{rnd}'))
ds = client.create_dataset(dataset_path)
client.create_branch(ds['rid'], 'master')
transaction_rid = client.open_transaction(ds['rid'], 'SNAPSHOT', 'master')
client.upload_dataset_file(ds['rid'], transaction_rid, io.StringIO('col1,col2\n1,2'), 'test.csv')
client.commit_transaction(ds['rid'], transaction_rid)
with pytest.raises(DatasetHasNoSchemaError):
client.get_dataset_schema(ds['rid'], transaction_rid, 'master')
inferred_schema = client.infer_dataset_schema(ds['rid'], 'master')
client.upload_dataset_schema(ds['rid'], transaction_rid, inferred_schema, 'master')
returned_schema = client.get_dataset_schema(ds['rid'], transaction_rid, 'master')
inferred_schema['primaryKey'] = None
assert (inferred_schema == returned_schema)
transaction_rid = client.open_transaction(ds['rid'], 'SNAPSHOT', 'master')
client.upload_dataset_file(ds['rid'], transaction_rid, io.StringIO('col1,co"""l2\n1,2,3'), 'test.csv')
client.commit_transaction(ds['rid'], transaction_rid)
with pytest.warns(UserWarning) as warning:
client.infer_dataset_schema(ds['rid'], 'master')
assert (str(warning[0].message) == 'Foundry Schema inference completed with status \'WARN\' and message \'No column delimiters found. The delimiter (Comma ",") was our best guess.\'.')
client.delete_dataset(ds['rid']) |
class Solution():
def maxProfit(self, prices: List[int], fee: int) -> int:
(m0, m1) = (0, None)
for price in prices:
old_m0 = m0
if (m1 is not None):
m0 = max(m0, ((m1 + price) - fee))
m1 = max(m1, (old_m0 - price))
else:
m0 = m0
m1 = (m0 - price)
return m0 |
.parametrize('dtype', SUPPORTED_DTYPES)
def test_feature_quantizer_simple(dtype):
rng = numpy.random.default_rng()
a = rng.normal(size=(10, 3))
f = Quantizer(dtype=dtype, max_value=10.0)
f.fit(a)
out = f.transform(a)
assert (out.dtype == numpy.dtype(dtype))
oo = f.inverse_transform(out)
expected_decimals_correct = 3
if ('8' in dtype):
expected_decimals_correct = 1
assert_almost_equal(a, oo, decimal=expected_decimals_correct) |
class ErrorResponse(JsonApiException):
headers = {'Content-Type': 'application/vnd.api+json'}
def __init__(self, source: Union[(dict, str)], detail=None, title=None, status=None):
if (isinstance(source, str) and (detail is None)):
super().__init__(None, source)
else:
super().__init__(source, detail, title, status)
def respond(self):
dict_ = self.to_dict()
return make_response(json.dumps(jsonapi_errors([dict_])), self.status, self.headers) |
class AmazonSPAPISettings(Document):
def before_validate(self):
if (not self.amazon_fields_map):
self.set_default_fields_map()
def validate(self):
self.validate_amazon_fields_map()
self.validate_after_date()
if (self.is_active == 1):
self.validate_credentials()
setup_custom_fields()
else:
self.enable_sync = 0
if (not self.max_retry_limit):
self.max_retry_limit = 1
elif (self.max_retry_limit and (self.max_retry_limit > 5)):
frappe.throw(frappe._('Value for <b>Max Retry Limit</b> must be less than or equal to 5.'))
def save(self):
super(AmazonSPAPISettings, self).save()
if (not self.is_old_data_migrated):
migrate_old_data()
self.db_set('is_old_data_migrated', 1)
def validate_amazon_fields_map(self):
count = 0
for field_map in self.amazon_fields_map:
item_meta = frappe.get_meta('Item')
field_meta = item_meta.get_field(field_map.item_field)
if (field_map.item_field and (not field_meta)):
frappe.throw(_('Row #{0}: Item Field {1} does not exist.').format(field_map.idx, frappe.bold(field_map.item_field)))
if field_map.use_to_find_item_code:
if (not field_map.item_field):
frappe.throw(_('Row #{0}: Item Field is required.').format(field_map.idx))
elif (not field_meta.unique):
frappe.throw(_('Row #{0}: Item Field {1} must be unique.').format(field_map.idx, frappe.bold(field_map.item_field)))
count += 1
if (count == 0):
frappe.throw(_('At least one field must be selected to find the item code.'))
elif (count > 1):
frappe.throw(_('Only one field can be selected to find the item code.'))
def validate_after_date(self):
if (datetime.strptime(add_days(today(), (- 30)), '%Y-%m-%d') > datetime.strptime(self.after_date, '%Y-%m-%d')):
frappe.throw(_('The date must be within the last 30 days.'))
def validate_credentials(self):
from ecommerce_integrations.amazon.doctype.amazon_sp_api_settings.amazon_repository import validate_amazon_sp_api_credentials
validate_amazon_sp_api_credentials(iam_arn=self.get('iam_arn'), client_id=self.get('client_id'), client_secret=self.get_password('client_secret'), refresh_token=self.get('refresh_token'), aws_access_key=self.get('aws_access_key'), aws_secret_key=self.get_password('aws_secret_key'), country=self.get('country'))
()
def set_default_fields_map(self):
for field_map in [{'amazon_field': 'ASIN', 'item_field': 'item_code', 'use_to_find_item_code': 1}, {'amazon_field': 'SellerSKU', 'item_field': None, 'use_to_find_item_code': 0}, {'amazon_field': 'Title', 'item_field': None, 'use_to_find_item_code': 0}]:
self.append('amazon_fields_map', field_map)
()
def get_order_details(self):
from ecommerce_integrations.amazon.doctype.amazon_sp_api_settings.amazon_repository import get_orders
if (self.is_active == 1):
job_name = f'Get Amazon Orders - {self.name}'
if frappe.db.get_all('RQ Job', {'job_name': job_name, 'status': ['in', ['queued', 'started']]}):
return frappe.msgprint(_('The order details are currently being fetched in the background.'))
frappe.enqueue(job_name=job_name, method=get_orders, amz_setting_name=self.name, created_after=self.after_date, timeout=4000, now=frappe.flags.in_test)
frappe.msgprint(_('Order details will be fetched in the background.'))
else:
frappe.msgprint(_('Please enable the Amazon SP API Settings {0}.').format(frappe.bold(self.name))) |
class ModifyChrootForm(ChrootForm):
buildroot_pkgs = wtforms.StringField('Additional packages to be always present in minimal buildroot')
repos = wtforms.TextAreaField('Additional repos to be used for builds in chroot', validators=[UrlRepoListValidator(), wtforms.validators.Optional()], filters=[StringListFilter()])
comps = None
upload_comps = FileField('Upload comps.xml')
delete_comps = wtforms.BooleanField('Delete comps.xml', false_values=FALSE_VALUES)
reset_fields = wtforms.StringField('Reset these fields to their defaults') |
class TestGetConnections():
def test_get_connections_not_authenticated(self, api_client: TestClient, generate_auth_header, connection_config, url) -> None:
resp = api_client.get(url, headers={})
assert (resp.status_code == HTTP_401_UNAUTHORIZED)
def test_get_connections_with_invalid_system(self, api_client: TestClient, generate_auth_header, url_invalid_system):
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(url_invalid_system, headers=auth_header)
assert (resp.json()['detail'] == 'The specified system was not found. Please provide a valid system for the requested operation.')
assert (resp.status_code == HTTP_404_NOT_FOUND)
def test_get_connections_wrong_scope(self, api_client: TestClient, generate_auth_header, connection_config, url) -> None:
auth_header = generate_auth_header(scopes=[STORAGE_DELETE])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_403_FORBIDDEN)
def test_get_connection_configs(self, api_client: TestClient, generate_auth_header, connection_config, url, connections, db: Session) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_CREATE_OR_UPDATE])
api_client.patch(url, headers=auth_header, json=connections)
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_200_OK)
response_body = json.loads(resp.text)
assert (len(response_body['items']) == 3)
connection = response_body['items'][0]
assert (set(connection.keys()) == {'connection_type', 'access', 'updated_at', 'saas_config', 'secrets', 'name', 'last_test_timestamp', 'last_test_succeeded', 'key', 'created_at', 'disabled', 'description', 'authorized', 'enabled_actions'})
connection_keys = [connection['key'] for connection in connections]
assert (response_body['items'][0]['key'] in connection_keys)
assert (response_body['items'][1]['key'] in connection_keys)
assert (response_body['items'][2]['key'] in connection_keys)
assert (response_body['total'] == 3)
assert (response_body['page'] == 1)
assert (response_body['size'] == page_size)
def test_get_connection_configs_masks_secrets(self, api_client: TestClient, generate_auth_header, connection_config, url, connections, db: Session) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_CREATE_OR_UPDATE])
api_client.patch(url, headers=auth_header, json=connections)
auth_header = generate_auth_header(scopes=[CONNECTION_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_200_OK)
response_body = json.loads(resp.text)
assert (len(response_body['items']) == 3)
connection_1 = response_body['items'][0]['secrets']
connection_2 = response_body['items'][1]['secrets']
connection_3 = response_body['items'][2]['secrets']
assert (connection_1 == {'api_key': '', 'domain': 'test_mailchimp_domain', 'username': 'test_mailchimp_username'})
assert (connection_2 == {'db_schema': 'test', 'dbname': 'test', 'host': ' 'password': '', 'port': 5432, 'username': 'test'})
assert (connection_3 == None)
.parametrize('acting_user_role, expected_status_code, assign_system', [('viewer_user', HTTP_200_OK, False), ('viewer_user', HTTP_200_OK, True), ('viewer_and_approver_user', HTTP_200_OK, False), ('viewer_and_approver_user', HTTP_200_OK, True)])
def test_get_connection_configs_role_viewer(self, api_client: TestClient, generate_auth_header, generate_system_manager_header, connection_config, connections, acting_user_role, expected_status_code, assign_system, system, request, db: Session) -> None:
url = (V1_URL_PREFIX + f'/system/{system.fides_key}/connection')
patch_auth_header = generate_auth_header(scopes=[CONNECTION_CREATE_OR_UPDATE])
api_client.patch(url, headers=patch_auth_header, json=connections)
acting_user_role = request.getfixturevalue(acting_user_role)
if assign_system:
assign_url = (V1_URL_PREFIX + f'/user/{acting_user_role.id}/system-manager')
system_manager_auth_header = generate_auth_header(scopes=[SYSTEM_MANAGER_UPDATE])
api_client.put(assign_url, headers=system_manager_auth_header, json=[system.fides_key])
auth_header = generate_system_manager_header([system.id])
else:
auth_header = generate_role_header_for_user(acting_user_role, roles=acting_user_role.permissions.roles)
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == expected_status_code)
.parametrize('acting_user_role, expected_status_code', [('owner_user', HTTP_200_OK), ('contributor_user', HTTP_200_OK), ('approver_user', HTTP_403_FORBIDDEN)])
def test_get_connection_configs_role(self, api_client: TestClient, generate_auth_header, connection_config, connections, acting_user_role, expected_status_code, system, request, db: Session) -> None:
url = (V1_URL_PREFIX + f'/system/{system.fides_key}/connection')
patch_auth_header = generate_auth_header(scopes=[CONNECTION_CREATE_OR_UPDATE])
api_client.patch(url, headers=patch_auth_header, json=connections)
acting_user_role = request.getfixturevalue(acting_user_role)
auth_header = generate_role_header_for_user(acting_user_role, roles=acting_user_role.permissions.roles)
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == expected_status_code) |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
path = 'C:\\Users\\Public\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup'
argpath = "C:\\Users\\Public\\AppData\\Roaming\\Microsoft\\Windows\\'Start Menu'\\Programs\\Startup"
Path(path).mkdir(parents=True, exist_ok=True)
file = (argpath + '\\file.exe')
common.execute([powershell, '/c', f'echo AAAAAAAA | Out-File {file}'], timeout=10, kill=True)
common.remove_files(file) |
def get_event_filter() -> 'EventFilter':
country_id = request.args.get('country', None)
leader_id = request.args.get('leader', None)
system_id = request.args.get('system', None)
war_id = request.args.get('war', None)
planet_id = request.args.get('planet', None)
min_date = request.args.get('min_date', float('-inf'))
event_filter = EventFilter(min_date=min_date, country_filter=country_id, war_filter=war_id, leader_filter=leader_id, system_filter=system_id, planet_filter=planet_id)
return event_filter |
class OptionSeriesColumnpyramidSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_slave_message_get_node_text():
xml = '<?xml version="1.0" encoding="UTF-8"?>\n <root>\n <item>\n <child>Text</child>\n </item>\n <emptyItem/>\n </root>\n '
root = ETree.fromstring(xml)
assert (SlaveMessageManager.get_node_text(root, './item/child', '') == 'Text')
assert (SlaveMessageManager.get_node_text(root, './item/non_existing', 'fallback') == 'fallback')
assert (SlaveMessageManager.get_node_text(root, './emptyItem', 'fallback') == 'fallback') |
class Point(object):
swagger_types = {'coordinates': 'Vect2D', 'type': 'str'}
attribute_map = {'coordinates': 'coordinates', 'type': 'type'}
def __init__(self, coordinates=None, type='Point'):
self._coordinates = None
self._type = None
self.discriminator = None
if (coordinates is not None):
self.coordinates = coordinates
if (type is not None):
self.type = type
def coordinates(self):
return self._coordinates
def coordinates(self, coordinates):
self._coordinates = coordinates
def type(self):
return self._type
def type(self, type):
allowed_values = ['Point']
if (type not in allowed_values):
raise ValueError('Invalid value for `type` ({0}), must be one of {1}'.format(type, allowed_values))
self._type = type
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Point, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Point)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class DummyIPFSBackend(BaseIPFSBackend):
_assets: Dict = {}
_path = Path('./tests/data/ipfs-cache-mock').resolve()
def fetch_uri_contents(self, ipfs_uri: str) -> bytes:
ipfs_uri = ipfs_uri.replace('ipfs://', '')
if (ipfs_uri not in self._assets):
with self._path.joinpath(ipfs_uri).open() as fp:
self._assets[ipfs_uri] = fp.read()
return self._assets[ipfs_uri].encode()
def pin_assets(self, file_or_dir_path: Path) -> List:
if file_or_dir_path.is_dir():
for path in file_or_dir_path.glob('*'):
with path.open() as fp:
self._assets[path.name] = fp.read()
asset_data = [dummy_ipfs_pin(path) for path in file_or_dir_path.glob('*')]
elif file_or_dir_path.is_file():
asset_data = [dummy_ipfs_pin(file_or_dir_path)]
with file_or_dir_path.open() as fp:
self._assets[file_or_dir_path.name] = fp.read()
self._assets[asset_data[0]['Hash']] = self._assets[file_or_dir_path.name]
else:
raise FileNotFoundError(f'{file_or_dir_path} is not a valid file or directory path.')
return asset_data |
def count_pairpos_bytes(font: TTFont) -> int:
bytes = 0
gpos = font['GPOS']
for lookup in font['GPOS'].table.LookupList.Lookup:
if (lookup.LookupType == 2):
w = OTTableWriter(tableTag=gpos.tableTag)
lookup.compile(w, font)
bytes += len(w.getAllData())
elif (lookup.LookupType == 9):
if any(((subtable.ExtensionLookupType == 2) for subtable in lookup.SubTable)):
w = OTTableWriter(tableTag=gpos.tableTag)
lookup.compile(w, font)
bytes += len(w.getAllData())
return bytes |
class TraversedPartialPath(Exception):
def __init__(self, nibbles_traversed: NibblesInput, node: HexaryTrieNode, untraversed_tail: NibblesInput, *args) -> None:
super().__init__(Nibbles(nibbles_traversed), node, Nibbles(untraversed_tail), *args)
self._simulated_node = self._make_simulated_node()
def __repr__(self) -> str:
return f'TraversedPartialPath({self.nibbles_traversed}, {self.node}, {self.untraversed_tail})'
def __str__(self) -> str:
return f'Could not traverse through {self.node} at {self.nibbles_traversed}, only partially traversed with: {self.untraversed_tail}'
def nibbles_traversed(self) -> Nibbles:
return self.args[0]
def node(self) -> HexaryTrieNode:
return self.args[1]
def untraversed_tail(self) -> Nibbles:
return self.args[2]
def simulated_node(self) -> HexaryTrieNode:
return self._simulated_node
def _make_simulated_node(self) -> HexaryTrieNode:
from trie.utils.nodes import compute_extension_key, compute_leaf_key, key_starts_with
actual_node = self.node
key_tail = self.untraversed_tail
actual_sub_segments = actual_node.sub_segments
if (len(key_tail) == 0):
raise ValueError('Can only raise a TraversedPartialPath when some series of nibbles was untraversed')
if (len(actual_sub_segments) == 0):
if (not key_starts_with(actual_node.suffix, key_tail)):
raise ValidationError(f'Internal traverse bug: {actual_node.suffix} does not start with {key_tail}')
else:
trimmed_suffix = Nibbles(actual_node.suffix[len(key_tail):])
return HexaryTrieNode((), actual_node.value, trimmed_suffix, [compute_leaf_key(trimmed_suffix), actual_node.raw[1]], NodeType(NODE_TYPE_LEAF))
elif (len(actual_sub_segments) == 1):
extension = actual_sub_segments[0]
if (not key_starts_with(extension, key_tail)):
raise ValidationError(f'Internal traverse bug: extension {extension} does not start with {key_tail}')
elif (len(key_tail) == len(extension)):
raise ValidationError(f'Internal traverse bug: {key_tail} should not equal {extension}')
else:
trimmed_extension = Nibbles(extension[len(key_tail):])
return HexaryTrieNode((trimmed_extension,), actual_node.value, actual_node.suffix, [compute_extension_key(trimmed_extension), actual_node.raw[1]], NodeType(NODE_TYPE_EXTENSION))
else:
raise ValidationError(f'Can only partially traverse into leaf or extension, got {actual_node}') |
class RegexValidator(object):
def __init__(self, regex, verbose_pattern=None) -> None:
self.regex = regex
self.verbose_pattern = (verbose_pattern or regex)
def __call__(self, value, field_name):
value = str(value)
match = re.match(self.regex, value)
if match:
return value
raise ConfigurationError('{} does not match pattern {}'.format(value, self.verbose_pattern), field_name) |
def upgrade():
op.add_column('settings', sa.Column('admin_billing_state', sa.String(), nullable=True))
op.add_column('settings', sa.Column('invoice_sending_day', sa.Integer(), server_default='1', nullable=False))
op.add_column('settings', sa.Column('invoice_sending_timezone', sa.String(), server_default='UTC', nullable=False))
op.add_column('users', sa.Column('billing_state', sa.String(), nullable=True)) |
def test_websocket_iter_text(test_client_factory):
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
(await websocket.accept())
async for data in websocket.iter_text():
(await websocket.send_text(('Message was: ' + data)))
client = test_client_factory(app)
with client.websocket_connect('/') as websocket:
websocket.send_text('Hello, world!')
data = websocket.receive_text()
assert (data == 'Message was: Hello, world!') |
def _start():
global patch, name, path, monitor
global delay, winx, winy, winwidth, winheight, input_name, input_variable, variable, app, window, timer
delay = patch.getfloat('general', 'delay')
winx = patch.getint('display', 'xpos')
winy = patch.getint('display', 'ypos')
winwidth = patch.getint('display', 'width')
winheight = patch.getint('display', 'height')
(input_name, input_variable) = list(zip(*patch.config.items('input')))
for (name, variable) in zip(input_name, input_variable):
monitor.info(('%s = %s' % (name, variable)))
app = QApplication(sys.argv)
app.setWindowIcon(QtGui.QIcon(os.path.join(path, '../../doc/figures/logo-128.ico')))
app.aboutToQuit.connect(_stop)
signal.signal(signal.SIGINT, _stop)
window = Window()
window.show()
timer = QtCore.QTimer()
timer.timeout.connect(_loop_once)
timer.setInterval(10)
timer.start(int((delay * 1000)))
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
class OptionPlotoptionsBarSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BlockWitnessHashesExchange(BaseExchange[(GetBlockWitnessHashes, BlockWitnessHashes, BlockWitnessHashesPayload)]):
_request_command_type = GetBlockWitnessHashes
_response_command_type = BlockWitnessHashes
_normalizer = DefaultNormalizer(BlockWitnessHashes, BlockWitnessHashesPayload)
tracker_class = GetBlockWitnessHashesTracker
def __init__(self) -> None:
super().__init__()
self.logger = get_logger('trinity.protocol.wit.api.BlockWitnessHashesExchange')
async def __call__(self, block_hash: Hash32, timeout: float=None) -> Tuple[(Hash32, ...)]:
validator = GetBlockWitnessHashesValidator()
request = GetBlockWitnessHashes(GetBlockWitnessHashesPayload(gen_request_id(), block_hash))
result = (await self.get_result(request, self._normalizer, validator, match_payload_request_id, timeout))
return result.node_hashes |
def analyzinggraph(scenarios_dic, index, scenarios):
z = 'IT'
unit = '[21] - IT_P2HT_OTH'
GenerationOutput = pd.DataFrame()
demand = pd.DataFrame()
shedload = pd.DataFrame()
powerconsumption = pd.DataFrame()
storagelevel = pd.DataFrame()
curtailment = pd.DataFrame()
for SCEN in scenarios:
INPUTS = 'inputs_##'
INPUTS = INPUTS.replace('##', SCEN)
RESULTS = 'results_##'
RESULTS = RESULTS.replace('##', SCEN)
inputs = scenarios_dic[INPUTS]
results = scenarios_dic[RESULTS]
Generation = ds.get_plot_data(inputs, results, z)
GenerationOutput[SCEN] = Generation.sum(axis=1)
if ('OutputPowerConsumption' in results):
demand_p2h = ds.filter_by_zone(results['OutputPowerConsumption'], inputs, z)
demand_p2h = demand_p2h.sum(axis=1)
else:
demand_p2h = pd.Series(0, index=results['OutputPower'].index)
if (('Flex', z) in inputs['param_df']['Demand']):
demand_flex = inputs['param_df']['Demand'][('Flex', z)]
else:
demand_flex = pd.Series(0, index=results['OutputPower'].index)
if (('OutputDemandModulation' in results) and (z in results['OutputDemandModulation'])):
shifted_load = (- results['OutputDemandModulation'][z])
shifted_load = pd.Series(shifted_load, index=results['OutputPower'].index).fillna(0)
else:
shifted_load = pd.Series(0, index=results['OutputPower'].index)
demand_da = inputs['param_df']['Demand'][('DA', z)]
demand[SCEN] = (((demand_da + demand_p2h) + demand_flex) + shifted_load)
if (z in results['OutputShedLoad']):
shedload[SCEN] = results['OutputShedLoad'][z]
else:
shedload[SCEN] = 0
if (z in results['OutputCurtailedPower']):
curtailment[SCEN] = results['OutputCurtailedPower'][z]
else:
curtailment[SCEN] = 0
if (unit in results['OutputPowerConsumption']):
powerconsumption[SCEN] = results['OutputPowerConsumption'][unit]
else:
powerconsumption[SCEN] = 0
if (unit in results['OutputStorageLevel']):
storagelevel[SCEN] = results['OutputStorageLevel'][unit]
else:
storagelevel[SCEN] = 0
pd.DataFrame.fillna(curtailment, 0, inplace=True)
pd.DataFrame.fillna(shedload, 0, inplace=True)
pd.DataFrame.fillna(powerconsumption, 0, inplace=True)
pd.DataFrame.fillna(storagelevel, 0, inplace=True)
(fig, axes) = plt.subplots(nrows=5, ncols=2, sharex=True, sharey=False, figsize=(26, 24), frameon=True)
fig.tight_layout()
axes[(0, 0)].plot(index, GenerationOutput[scenarios[0]][index], color='k', label='Power output s9')
axes[(0, 0)].plot(index, GenerationOutput[scenarios[1]][index], color='b', label='Power output s11_A')
axes[(0, 0)].plot(index, GenerationOutput[scenarios[2]][index], color='m', label='Power output s11_B')
axes[(1, 0)].plot(index, demand[scenarios[0]][index], color='k', label='demand s9')
axes[(1, 0)].plot(index, demand[scenarios[1]][index], color='b', label='demand s11_A')
axes[(1, 0)].plot(index, demand[scenarios[2]][index], color='m', label='demand s11_B')
axes[(0, 1)].plot(index, shedload[scenarios[0]][index], color='k', label='shed load s9')
axes[(0, 1)].plot(index, shedload[scenarios[1]][index], color='b', label='shed load s11_A')
axes[(0, 1)].plot(index, shedload[scenarios[2]][index], color='m', label='shed load s11_B')
axes[(1, 1)].plot(index, curtailment[scenarios[0]][index], color='k', label='curtailment s9')
axes[(1, 1)].plot(index, curtailment[scenarios[1]][index], color='b', label='curtailment s11_A')
axes[(1, 1)].plot(index, curtailment[scenarios[2]][index], color='m', label='curtailment s11_B')
axes[(3, 1)].plot(index, storagelevel[scenarios[0]][index], color='k', label='thermal storage s9')
axes[(3, 1)].plot(index, storagelevel[scenarios[1]][index], color='b', label='thermal storage s11_A')
axes[(3, 1)].plot(index, storagelevel[scenarios[2]][index], color='m', label='thermal storage s11_B')
axes[(2, 1)].plot(index, powerconsumption[scenarios[0]][index], color='k', label='powerconsumption s9')
axes[(2, 1)].plot(index, powerconsumption[scenarios[1]][index], color='b', label='powerconsumption s11_A')
axes[(2, 1)].plot(index, powerconsumption[scenarios[2]][index], color='m', label='powerconsumption s11_B')
axes[(2, 0)].plot(index, demand[scenarios[0]][index], color='k', label='demand s9')
axes[(2, 0)].plot(index, GenerationOutput[scenarios[0]][index], color='b', label='Power output s9')
axes[(3, 0)].plot(index, demand[scenarios[1]][index], color='k', label='demand s11_A')
axes[(3, 0)].plot(index, GenerationOutput[scenarios[1]][index], color='b', label='Power output s11_A')
axes[(4, 0)].plot(index, demand[scenarios[2]][index], color='k', label='demand s11_B')
axes[(4, 0)].plot(index, GenerationOutput[scenarios[2]][index], color='b', label='Power output s11_B')
axes[(0, 0)].legend()
axes[(1, 0)].legend()
axes[(2, 0)].legend()
axes[(3, 0)].legend()
axes[(4, 0)].legend()
axes[(0, 1)].legend()
axes[(1, 1)].legend()
axes[(2, 1)].legend()
axes[(3, 1)].legend()
axes[(4, 1)].legend()
return True
MAX = (1 * 1.05)
plot_availability = pd.DataFrame(0.0, index=['none', 'DHS', 'DHS+RES'], columns=['downwards availability'])
plot_availability.loc[('none', 'downwards availability')] = availability_mean['Down'].at[('total', 's13')]
plot_availability.loc[('DHS', 'downwards availability')] = availability_mean['Down'].at[('total', 's14')]
plot_availability.loc[('DHS+RES', 'downwards availability')] = availability_mean['Down'].at[('total', 's16')]
ax = plot_availability.plot.bar(figsize=(12, 4), width=0.1, color=['cadetblue'], ylim=(0, MAX), legend=False)
ax.set_ylabel('average 2D availability [%]')
MAX = (1 * 1.05)
plot_availability = pd.DataFrame(0.0, index=['none', 'DHS', 'DHS+RES'], columns=['upwards availability'])
plot_availability.loc[('none', 'upwards availability')] = availability_mean['2U'].at[('total', 's13')]
plot_availability.loc[('DHS', 'upwards availability')] = availability_mean['2U'].at[('total', 's14')]
plot_availability.loc[('DHS+RES', 'upwards availability')] = availability_mean['2U'].at[('total', 's16')]
ax = plot_availability.plot.bar(figsize=(12, 4), width=0.1, color=['cadetblue'], ylim=(0, MAX), legend=False)
ax.set_ylabel('average 2U availability [%]')
system_costs_2050 = pd.DataFrame(0, index=['s1', 'none', 'DHS', 'DHS+RES'], columns=['participation'])
system_costs_2050.loc[('s1', 'participation')] = calculations.at[('s1', 'postsystemcostIT')]
system_costs_2050.loc[('none', 'participation')] = calculations.at[('s13', 'postsystemcostIT')]
system_costs_2050.loc[('DHS', 'participation')] = calculations.at[('s14', 'postsystemcostIT')]
system_costs_2050.loc[('DHS+RES', 'participation')] = calculations.at[('s16', 'postsystemcostIT')]
system_costs_2050 = (system_costs_2050 / )
MAX = (system_costs_2050.max().max() * 1.05)
ax = system_costs_2050.plot.bar(figsize=(12, 4), width=0.1, color=['cadetblue'], ylim=(0, MAX), legend=False)
ax.set_ylabel('total system costs [billion euro]')
costsdifference = pd.DataFrame(0, index=['none', 'DHS', 'RES', 'DHS+RES'], columns=['participation'])
costsdifference.loc[('DHS', 'participation')] = (((calculations.at[('s14', 'postsystemcostIT')] - calculations.at[('s13', 'postsystemcostIT')]) / calculations.at[('s13', 'postsystemcostIT')]) * 100)
costsdifference.loc[('RES', 'participation')] = (((calculations.at[('s15', 'postsystemcostIT')] - calculations.at[('s13', 'postsystemcostIT')]) / calculations.at[('s13', 'postsystemcostIT')]) * 100)
costsdifference.loc[('DHS+RES', 'participation')] = (((calculations.at[('s16', 'postsystemcostIT')] - calculations.at[('s13', 'postsystemcostIT')]) / calculations.at[('s13', 'postsystemcostIT')]) * 100)
MAX = (costsdifference.max().max() * 1.05)
MIN = (costsdifference.min().min() * 1.05)
ax = costsdifference.plot.bar(figsize=(12, 4), ylim=(MIN, MAX), legend=False)
ax.set_ylabel('[%]')
ax.legend(['CHP_participation', 'P2H_participation', 'CHP+P2H_participation'])
curtailment_2050 = pd.DataFrame(0, index=['s1', 'none', 'DHS', 'DHS+RES'], columns=['participation'])
curtailment_2050.loc[('s1', 'participation')] = curtailment.at[('s1', 'sum')]
curtailment_2050.loc[('none', 'participation')] = curtailment.at[('s13', 'sum')]
curtailment_2050.loc[('DHS', 'participation')] = curtailment.at[('s14', 'sum')]
curtailment_2050.loc[('DHS+RES', 'participation')] = curtailment.at[('s16', 'sum')]
curtailment_2050 = (curtailment_2050 / 1000000)
MAX = (curtailment_2050.max().max() * 1.05)
ax = curtailment_2050.plot.bar(figsize=(12, 4), width=0.1, color=['cadetblue'], ylim=(0, MAX), legend=False)
ax.set_ylabel('curtailment [TWh]')
plot_emissions = pd.DataFrame(0, index=['s1', 'none', 'DHS', 'DHS+RES'], columns=[])
plot_emissions.loc[('none', 'none power system')] = calculations.at[('s13', 'co2_power')]
plot_emissions.loc[('none', 'none heat slack')] = calculations.at[('s13', 'co2_heatslack')]
plot_emissions.loc[('DHS', 'DHS power system')] = calculations.at[('s14', 'co2_power')]
plot_emissions.loc[('DHS', 'DHS heat slack')] = calculations.at[('s14', 'co2_heatslack')]
plot_emissions.loc[('DHS+RES', 'DHS+RES power system')] = calculations.at[('s16', 'co2_power')]
plot_emissions.loc[('DHS+RES', 'DHS+RES heat slack')] = calculations.at[('s16', 'co2_heatslack')]
MAX =
(fig, ax) = plt.subplots()
ax = plot_emissions[['none power system', 'none heat slack']].plot.bar(figsize=(12, 4), stacked=True, width=0.8, colormap='rainbow', ax=ax, ylim=(0, MAX), legend=True)
ax = plot_emissions[['DHS power system', 'DHS heat slack']].plot.bar(figsize=(12, 4), stacked=True, width=0.8, colormap='rainbow', ax=ax, ylim=(0, MAX), legend=True)
ax = plot_emissions[['DHS+RES power system', 'DHS+RES heat slack']].plot.bar(figsize=(12, 4), stacked=True, width=0.8, colormap='rainbow', ax=ax, ylim=(0, MAX), legend=True)
ax.set_ylabel('emissions [ton co2]')
emission_difference = pd.DataFrame(0, index=['RES min', 'RES mid', 'RES max'], columns=['none', 'DHS', 'RES', 'DHS+RES'])
emission_difference.loc[('RES min', 'CHP_participation')] = (((calculations.at[('s2_B', 'co2_total')] - calculations.at[('s2_A', 'co2_total')]) / calculations.at[('s2_A', 'co2_total')]) * 100)
emission_difference.loc[('RES min', 'P2H_participation')] = (((calculations.at[('s3_B', 'co2_total')] - calculations.at[('s3_A', 'co2_total')]) / calculations.at[('s3_A', 'co2_total')]) * 100)
emission_difference.loc[('RES min', 'CHP+P2H_participation')] = (((calculations.at[('s4_B', 'co2_total')] - calculations.at[('s4_A', 'co2_total')]) / calculations.at[('s4_A', 'co2_total')]) * 100)
emission_difference.loc[('RES mid', 'CHP_participation')] = (((calculations.at[('s6_B', 'co2_total')] - calculations.at[('s6_A', 'co2_total')]) / calculations.at[('s6_A', 'co2_total')]) * 100)
emission_difference.loc[('RES mid', 'P2H_participation')] = (((calculations.at[('s7_B', 'co2_total')] - calculations.at[('s7_A', 'co2_total')]) / calculations.at[('s7_A', 'co2_total')]) * 100)
emission_difference.loc[('RES mid', 'CHP+P2H_participation')] = (((calculations.at[('s8_B', 'co2_total')] - calculations.at[('s8_A', 'co2_total')]) / calculations.at[('s8_A', 'co2_total')]) * 100)
emission_difference.loc[('RES max', 'CHP_participation')] = (((calculations.at[('s10_B', 'co2_total')] - calculations.at[('s10_A', 'co2_total')]) / calculations.at[('s10_A', 'co2_total')]) * 100)
emission_difference.loc[('RES max', 'P2H_participation')] = (((calculations.at[('s11_B', 'co2_total')] - calculations.at[('s11_A', 'co2_total')]) / calculations.at[('s11_A', 'co2_total')]) * 100)
emission_difference.loc[('RES max', 'CHP+P2H_participation')] = (((calculations.at[('s12_B', 'co2_total')] - calculations.at[('s12_A', 'co2_total')]) / calculations.at[('s12_A', 'co2_total')]) * 100)
MAX = (emission_difference.max().max() * 1.05)
MIN = (emission_difference.min().min() * 1.05)
ax = emission_difference.plot.bar(figsize=(12, 4), ylim=(MIN, MAX), legend=False)
ax.set_ylabel('[%]')
ax.legend(['CHP_participation', 'P2H_participation', 'CHP+P2H_participation']) |
class OptionPlotoptionsCylinderTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsCylinderTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsCylinderTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
(suppress_health_check=[HealthCheck.function_scoped_fixture])
(grid_properties())
def test_roff_prop_read_xtgeo(tmp_path, xtgeo_property):
filepath = (tmp_path / 'property.roff')
xtgeo_property.to_file(filepath, name=xtgeo_property.name)
xtgeo_property2 = xtgeo.gridproperty_from_file(filepath, name=xtgeo_property.name)
assert (xtgeo_property.ncol == xtgeo_property2.ncol)
assert (xtgeo_property.nrow == xtgeo_property2.nrow)
assert (xtgeo_property.nlay == xtgeo_property2.nlay)
assert (xtgeo_property.dtype == xtgeo_property2.dtype)
assert np.all((xtgeo_property.values3d == xtgeo_property2.values3d))
assert (xtgeo_property.codes == xtgeo_property2.codes) |
('/dbsearch/<sstr>/<page>')
def dbsearch(sstr, page=0):
if ((not sstr) or (sstr == '0')):
sstr = keyboard()
if ((not sstr) or (sstr == '0')):
return
try:
sstr = re.sub('\\s+', '', sstr)
url = (' % (parse.quote_plus(sstr), (int(page) + 1)))
xbmc.log(msg=url, level=xbmc.LOGERROR)
rsp = _
menus = []
rtxt = 'title\\x3D[\\x22\\x27](?P<title>.*?)[\\x22\\x27].*?\\x2Fmov\\x2F(?P<dbsubject>.*?)\\x2ehtml.*?data\\x2Durl\\x3D[\\x22\\x27](?P<img>.*?)[\\x22\\x27].*?(?:|percentW\\x3D[\\x22\\x27](?P<rat>.*?)[\\x22\\x27])'
for m in re.finditer(rtxt, rsp, re.DOTALL):
title = m.group('title')
rat = ''
if m.group('rat'):
rat = ('%.1f' % (float(m.group('rat')) * 10))
menus.append({'label': ('%s[[COLOR FFFF3333]%s[/COLOR]]' % (title, rat)), 'path': plugin.url_for('dbsubject', subject=m.group('dbsubject')), 'thumbnail': m.group('img'), 'context_menu': [(('' + colorize_label(title, color='00FF00')), (('Container.update(' + plugin.url_for('searchinit', stypes='pan,bt', sstr=comm.ensure_binary(m.group(2)), modify='1', otherargs='{}')) + ')'))]})
'\n minfo = json.loads(rsp[rsp.index(\'{\'):])\n menus =[]\n if \'items\' in minfo:\n for item in minfo[\'items\']:\n rtxt =r\'subject%2F(.*?)%2F.*?<img\\s+src="(.*?)">.*?}\\s*)\\s*"\\s*>(.*?)\\s*</a>.*?rating-info">(.*?)</div>\'\n patt = re.compile(rtxt, re.S)\n m = patt.search(item)\n if m:\n rat=\'-\'\n ratm = re.search(r\'rating_nums">(.*?)</span>\', m.group(4), re.DOTALL | re.IGNORECASE)\n if ratm:\n rat = ratm.group(1)\n searchtitle = m.group(3).replace(\'\',\'s01\').replace(\'\',\'s02\').replace(\'\',\'s03\').replace(\'\',\'s04\').replace(\'\',\'s05\') .replace(\'\',\'s06\').replace(\'\',\'s07\').replace(\'\',\'s08\').replace(\'\',\'s09\').replace(\'\',\'s10\') .replace(\'\',\'s11\').replace(\'\',\'s12\').replace(\'\',\'s13\').replace(\'\',\'s14\').replace(\'\',\'s15\') .replace(\'\',\'s16\').replace(\'\',\'s17\').replace(\'\',\'s18\').replace(\'\',\'s19\').replace(\'\',\'s20\')\n menus.append({\'label\': \'%s[[COLOR FFFF3333]%s[/COLOR]]\'%(m.group(3),rat),\n \'path\': plugin.url_for(\'dbsubject\', subject=m.group(1)),\n \'thumbnail\': m.group(2),\n \'context_menu\':[(\'\'+colorize_label(searchtitle, color=\'00FF00\'), \n \'Container.update(\'+plugin.url_for(\'searchinit\',stypes=\'pan,bt\',sstr=comm.ensure_binary(m.group(2)),modify=\'1\',otherargs=\'{}\')+\')\',)],\n })\n else:\n plugin.log.error(item)\n '
except:
xbmc.log(msg=format_exc(), level=xbmc.LOGERROR)
return
rtxt = 'page\\x5F(?P<lastpage>[0-9]+)\\x2Ehtml'
lastpage = 0
for m in re.finditer(rtxt, rsp, (re.IGNORECASE | re.DOTALL)):
lastpagetemp = int(m.group('lastpage'))
if (lastpagetemp > lastpage):
lastpage = lastpagetemp
if ((int(page) + 1) < lastpage):
menus.append({'label': '', 'path': plugin.url_for('dbsearch', sstr=comm.ensure_binary(sstr), page=(int(page) + 1)), 'thumbnail': xbmc.translatePath(os.path.join(IMAGES_PATH, 'nextpage.png'))})
comm.setViewCode = 'thumbnail'
return menus |
_vrrp_version(VRRP_VERSION_V2, 1)
class vrrpv2(vrrp):
_PACK_STR = '!BBBBBBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_CHECKSUM_PACK_STR = '!H'
_CHECKSUM_OFFSET = 6
_AUTH_DATA_PACK_STR = '!II'
_AUTH_DATA_LEN = struct.calcsize('!II')
def __len__(self):
return ((self._MIN_LEN + (self._IPV4_ADDRESS_LEN * self.count_ip)) + self._AUTH_DATA_LEN)
def checksum_ok(self, ipvx, vrrp_buf):
return (packet_utils.checksum(vrrp_buf) == 0)
def create(type_, vrid, priority, max_adver_int, ip_addresses):
return vrrp.create_version(VRRP_VERSION_V2, type_, vrid, priority, max_adver_int, ip_addresses, auth_type=VRRP_AUTH_NO_AUTH, auth_data=VRRP_AUTH_DATA)
def _ip_addresses_pack_str(count_ip):
return ('!' + (vrrpv2._IPV4_ADDRESS_PACK_STR_RAW * count_ip))
def parser(cls, buf):
(version_type, vrid, priority, count_ip, auth_type, adver_int, checksum) = struct.unpack_from(cls._PACK_STR, buf)
(version, type_) = vrrp_from_version_type(version_type)
offset = cls._MIN_LEN
ip_addresses_pack_str = cls._ip_addresses_pack_str(count_ip)
ip_addresses_bin = struct.unpack_from(ip_addresses_pack_str, buf, offset)
ip_addresses = [addrconv.ipv4.bin_to_text(x) for x in ip_addresses_bin]
offset += struct.calcsize(ip_addresses_pack_str)
auth_data = struct.unpack_from(cls._AUTH_DATA_PACK_STR, buf, offset)
msg = cls(version, type_, vrid, priority, count_ip, adver_int, checksum, ip_addresses, auth_type, auth_data)
return (msg, None, buf[len(msg):])
def serialize_static(vrrp_, prev):
assert (not vrrp_.is_ipv6)
ip_addresses_pack_str = vrrpv2._ip_addresses_pack_str(vrrp_.count_ip)
ip_addresses_len = struct.calcsize(ip_addresses_pack_str)
vrrp_len = ((vrrpv2._MIN_LEN + ip_addresses_len) + vrrpv2._AUTH_DATA_LEN)
checksum = False
if (vrrp_.checksum is None):
checksum = True
vrrp_.checksum = 0
if (vrrp_.auth_type is None):
vrrp_.auth_type = VRRP_AUTH_NO_AUTH
if (vrrp_.auth_data is None):
vrrp_.auth_data = VRRP_AUTH_DATA
buf = bytearray(vrrp_len)
offset = 0
struct.pack_into(vrrpv2._PACK_STR, buf, offset, vrrp_to_version_type(vrrp_.version, vrrp_.type), vrrp_.vrid, vrrp_.priority, vrrp_.count_ip, vrrp_.auth_type, vrrp_.max_adver_int, vrrp_.checksum)
offset += vrrpv2._MIN_LEN
struct.pack_into(ip_addresses_pack_str, buf, offset, *[addrconv.ipv4.text_to_bin(x) for x in vrrp_.ip_addresses])
offset += ip_addresses_len
struct.pack_into(vrrpv2._AUTH_DATA_PACK_STR, buf, offset, *vrrp_.auth_data)
if checksum:
vrrp_.checksum = packet_utils.checksum(buf)
struct.pack_into(vrrpv2._CHECKSUM_PACK_STR, buf, vrrpv2._CHECKSUM_OFFSET, vrrp_.checksum)
return buf
def is_valid(self):
return ((self.version == VRRP_VERSION_V2) and (self.type == VRRP_TYPE_ADVERTISEMENT) and (VRRP_VRID_MIN <= self.vrid) and (self.vrid <= VRRP_VRID_MAX) and (VRRP_PRIORITY_MIN <= self.priority) and (self.priority <= VRRP_PRIORITY_MAX) and (self.auth_type == VRRP_AUTH_NO_AUTH) and (VRRP_V2_MAX_ADVER_INT_MIN <= self.max_adver_int) and (self.max_adver_int <= VRRP_V2_MAX_ADVER_INT_MAX) and (self.count_ip == len(self.ip_addresses))) |
class HiddenExpander(Gtk.Bin):
__gtype_name__ = 'HiddenExpander'
expanded = GObject.property(type=bool, default=False)
label = GObject.property(type=str, default='')
def __init__(self, label='', visible=False):
super(HiddenExpander, self).__init__()
self.label = label
self.set_visible(visible)
def get_expanded(self):
return self.expanded
def set_expanded(self, expanded):
self.expanded = expanded |
class MPDWrapper(object):
def __init__(self, params):
self.client = mpd.MPDClient()
self._dbus = dbus
self._params = params
self._dbus_service = None
self._can_single = False
self._can_idle = False
self._errors = 0
self._poll_id = None
self._watch_id = None
self._idling = False
self._status = {'state': None, 'volume': None, 'random': None, 'repeat': None}
self._metadata = {}
self._temp_song_url = None
self._temp_cover = None
self._position = 0
self._time = 0
self._bus = dbus.SessionBus()
if self._params['mmkeys']:
self.setup_mediakeys()
def run(self):
if self.my_connect():
GLib.timeout_add_seconds(5, self.my_connect)
return False
else:
return True
def connected(self):
return (self.client._sock is not None)
def my_connect(self):
try:
self._idling = False
self._can_idle = False
self._can_single = False
self.client.connect(self._params['host'], self._params['port'])
if self._params['password']:
try:
self.client.password(self._params['password'])
except mpd.CommandError as e:
logger.error(e)
sys.exit(1)
commands = self.commands()
if ('urlhandlers' in commands):
global urlhandlers
urlhandlers = self.urlhandlers()
if ('idle' in commands):
self._can_idle = True
if ('single' in commands):
self._can_single = True
if (self._errors > 0):
notification.notify(identity, _('Reconnected'))
logger.info('Reconnected to MPD server.')
else:
logger.debug('Connected to MPD server.')
self.client._sock.settimeout(5.0)
if (not self._dbus_service):
self._dbus_service = MPRISInterface(self._params)
else:
self._dbus_service.acquire_name()
self.init_state()
if (not self._poll_id):
interval = (15 if self._can_idle else 1)
self._poll_id = GLib.timeout_add_seconds(interval, self.timer_callback)
if (self._can_idle and (not self._watch_id)):
self._watch_id = GLib.io_add_watch(self, GLib.PRIORITY_DEFAULT, (GLib.IO_IN | GLib.IO_HUP), self.socket_callback)
self._errors = 0
self.timer_callback()
self.idle_enter()
return False
except socket.error as e:
self._errors += 1
if (self._errors < 6):
logger.error(('Could not connect to MPD: %s' % e))
if (self._errors == 6):
logger.info('Continue to connect but going silent')
return True
def reconnect(self):
logger.warning('Disconnected')
notification.notify(identity, _('Disconnected'), 'error')
if (self._dbus_service is not None):
self._dbus_service.release_name()
if self._poll_id:
GLib.source_remove(self._poll_id)
self._poll_id = None
if self._watch_id:
GLib.source_remove(self._watch_id)
self._watch_id = None
try:
self.disconnect()
except:
self.disconnect()
self.run()
def disconnect(self):
self._temp_song_url = None
if self._temp_cover:
self._temp_cover.close()
self._temp_cover = None
self.client.disconnect()
def init_state(self):
self._status = self.status()
self._status['state'] = 'invalid'
self._status['songid'] = '-1'
self._position = 0
def idle_enter(self):
if (not self._can_idle):
return False
if (not self._idling):
self._write_command('idle', [])
self._idling = True
logger.debug('Entered idle')
return True
else:
logger.warning('Nested idle_enter()!')
return False
def idle_leave(self):
if (not self._can_idle):
return False
if self._idling:
self._write_command('noidle', [])
self._fetch_object()
self._idling = False
logger.debug('Left idle')
return True
else:
return False
def timer_callback(self):
try:
was_idle = self.idle_leave()
except (socket.error, mpd.MPDError, socket.timeout):
self.reconnect()
return False
self._update_properties(force=False)
if was_idle:
self.idle_enter()
return True
def socket_callback(self, fd, event):
logger.debug(('Socket event %r on fd %r' % (event, fd)))
if (event & GLib.IO_HUP):
self.reconnect()
return True
elif (event & GLib.IO_IN):
if self._idling:
self._idling = False
data = fd._fetch_objects('changed')
logger.debug(('Idle events: %r' % data))
updated = False
for item in data:
subsystem = item['changed']
if (subsystem in ('player', 'mixer', 'options', 'playlist')):
if (not updated):
self._update_properties(force=True)
updated = True
self.idle_enter()
return True
def mediakey_callback(self, appname, key):
logger.debug(('Got GNOME mmkey "%s" for "%s"' % (key, appname)))
if (key == 'Play'):
if (self._status['state'] == 'play'):
self.pause(1)
self.notify_about_state('pause')
else:
self.play()
self.notify_about_state('play')
elif (key == 'Next'):
self.next()
elif (key == 'Previous'):
self.previous()
elif (key == 'Stop'):
self.stop()
self.notify_about_state('stop')
def last_currentsong(self):
if self._currentsong:
return self._currentsong.copy()
return None
def metadata(self):
return self._metadata
def update_metadata(self):
self._metadata = {}
mpd_meta = self.last_currentsong()
if (not mpd_meta):
logger.warning('Attempted to update metadata, but retrieved none')
return
for tag in ('album', 'title'):
if (tag in mpd_meta):
self._metadata[('xesam:%s' % tag)] = mpd_meta[tag]
if ('id' in mpd_meta):
self._metadata['mpris:trackid'] = ('/org/mpris/MediaPlayer2/Track/%s' % mpd_meta['id'])
if ('time' in mpd_meta):
self._metadata['mpris:length'] = (int(mpd_meta['time']) * 1000000)
if ('date' in mpd_meta):
self._metadata['xesam:contentCreated'] = mpd_meta['date'][0:4]
if ('track' in mpd_meta):
if ((type(mpd_meta['track']) == list) and (len(mpd_meta['track']) > 0)):
track = str(mpd_meta['track'][0])
else:
track = str(mpd_meta['track'])
m = re.match('^([0-9]+)', track)
if m:
self._metadata['xesam:trackNumber'] = int(m.group(1))
if (self._metadata['xesam:trackNumber'] & ):
self._metadata['xesam:trackNumber'] += (- )
else:
self._metadata['xesam:trackNumber'] = 0
if ('disc' in mpd_meta):
if ((type(mpd_meta['disc']) == list) and (len(mpd_meta['disc']) > 0)):
disc = str(mpd_meta['disc'][0])
else:
disc = str(mpd_meta['disc'])
m = re.match('^([0-9]+)', disc)
if m:
self._metadata['xesam:discNumber'] = int(m.group(1))
if ('artist' in mpd_meta):
if (type(mpd_meta['artist']) == list):
self._metadata['xesam:artist'] = mpd_meta['artist']
else:
self._metadata['xesam:artist'] = [mpd_meta['artist']]
if ('albumartist' in mpd_meta):
if (type(mpd_meta['albumartist']) == list):
self._metadata['xesam:albumArtist'] = mpd_meta['albumartist']
else:
self._metadata['xesam:albumArtist'] = [mpd_meta['albumartist']]
if ('composer' in mpd_meta):
if (type(mpd_meta['composer']) == list):
self._metadata['xesam:composer'] = mpd_meta['composer']
else:
self._metadata['xesam:composer'] = [mpd_meta['composer']]
if ('genre' in mpd_meta):
if (type(mpd_meta['genre']) == list):
self._metadata['xesam:genre'] = mpd_meta['genre']
else:
self._metadata['xesam:genre'] = [mpd_meta['genre']]
if ('name' in mpd_meta):
if ('xesam:title' not in self._metadata):
self._metadata['xesam:title'] = mpd_meta['name']
elif ('xesam:album' not in self._metadata):
self._metadata['xesam:album'] = mpd_meta['name']
if ('file' in mpd_meta):
song_url = mpd_meta['file']
if (not any([song_url.startswith(prefix) for prefix in urlhandlers])):
song_url = os.path.join(self._params['music_dir'], song_url)
self._metadata['xesam:url'] = song_url
cover = self.find_cover(song_url)
if cover:
self._metadata['mpris:artUrl'] = cover
for (key, value) in self._metadata.items():
try:
self._metadata[key] = allowed_tags[key](value)
except ValueError:
del self._metadata[key]
logger.error(("Can't cast value %r to %s" % (value, allowed_tags[key])))
def convert_timestamp(self, secs, micros=0):
(seconds, minutes, hours) = (0, 0, 0)
if (micros > 0):
secs += (micros / 1000000)
if (secs > 0):
seconds = int((secs % 60))
minutes = int(((secs / 60) % 60))
hours = int((secs / 3600))
if (hours == 0):
duration = '{}:{:0>2}'.format(minutes, seconds)
else:
duration = '{}:{:0>2}:{:0>2}'.format(hours, minutes, seconds)
return duration
def format_notification(self, meta, text):
format_strings = {'album': meta.get('xesam:album', 'Unknown Album'), 'title': meta.get('xesam:title', 'Unknown Title'), 'id': meta.get('mpris:trackid', '').split('/')[(- 1)], 'time': self.convert_timestamp(0, meta.get('mpris:length', 0)), 'timeposition': self.convert_timestamp(self._position, 0), 'date': meta.get('xesam:contentCreated', ''), 'track': meta.get('xesam:trackNumber', ''), 'disc': meta.get('xesam:discNumber', ''), 'artist': ', '.join(meta.get('xesam:artist', ['Unknown Artist'])), 'albumartist': ', '.join(meta.get('xesam:albumArtist', [])), 'composer': meta.get('xesam:composer', ''), 'genre': ', '.join(meta.get('xesam:genre', [])), 'file': meta.get('xesam:url', '').split('/')[(- 1)]}
return re.sub('%([a-z]+)%', '{\\1}', text).format_map(format_strings)
def notify_about_track(self, meta, state='play'):
uri = meta.get('mpris:artUrl', 'sound')
if self._params['summary']:
title = self.format_notification(meta, self._params['summary'])
elif ('xesam:title' in meta):
title = meta['xesam:title']
elif ('xesam:url' in meta):
title = meta['xesam:url'].split('/')[(- 1)]
else:
title = 'Unknown Title'
if self._params['body']:
body = self.format_notification(meta, self._params['body'])
else:
artist = ', '.join(meta.get('xesam:artist', ['Unknown Artist']))
body = (_('by %s') % artist)
if (state == 'pause'):
if (not self._params['notify_paused']):
return
uri = 'media-playback-pause-symbolic'
if self._params['paused_summary']:
title = self.format_notification(meta, self._params['paused_summary'])
if self._params['paused_body']:
body = self.format_notification(meta, self._params['paused_body'])
else:
body += ' (Paused)'
notification.notify(title, body, uri)
def notify_about_state(self, state):
if (state == 'stop'):
notification.notify(identity, _('Stopped'), 'media-playback-stop-symbolic')
else:
self.notify_about_track(self.metadata, state)
def find_cover(self, song_url):
if song_url.startswith('file://'):
song_path = song_url[7:]
elif (song_url.startswith('local:track:') and self._params['music_dir'].startswith('file://')):
song_path = os.path.join(self._params['music_dir'][7:], urllib.parse.unquote(song_url[12:]))
else:
return None
song_dir = os.path.dirname(song_path)
if self._temp_cover:
if (song_url == self._temp_song_url):
logger.debug(('find_cover: Reusing old image at %r' % self._temp_cover.name))
return ('file://' + self._temp_cover.name)
else:
logger.debug(('find_cover: Cleaning up old image at %r' % self._temp_cover.name))
self._temp_song_url = None
self._temp_cover.close()
song = None
if (mutagen and os.path.exists(song_path)):
try:
song = mutagen.File(song_path)
except mutagen.MutagenError as e:
logger.error(("Can't extract covers from %r: %r" % (song_path, e)))
if (song is not None):
if hasattr(song, 'pictures'):
for pic in song.pictures:
if (pic.type == mutagen.id3.PictureType.COVER_FRONT):
self._temp_song_url = song_url
return self._create_temp_cover(pic)
if song.tags:
for tag in song.tags.keys():
if tag.startswith('APIC:'):
for pic in song.tags.getall(tag):
if (pic.type == mutagen.id3.PictureType.COVER_FRONT):
self._temp_song_url = song_url
return self._create_temp_cover(pic)
elif (tag == 'metadata_block_picture'):
for b64_data in song.get(tag, []):
try:
data = base64.b64decode(b64_data)
except (TypeError, ValueError):
continue
try:
pic = mutagen.flac.Picture(data)
except mutagen.flac.error:
continue
if (pic.type == mutagen.id3.PictureType.COVER_FRONT):
self._temp_song_url = song_url
return self._create_temp_cover(pic)
elif (tag == 'covr'):
for data in song.get(tag, []):
mimes = {mutagen.mp4.AtomDataType.JPEG: 'image/jpeg', mutagen.mp4.AtomDataType.PNG: 'image/png'}
pic = mutagen.id3.APIC(mime=mimes.get(data.imageformat, ''), data=data)
self._temp_song_url = song_url
return self._create_temp_cover(pic)
if (os.path.exists(song_dir) and os.path.isdir(song_dir)):
for f in os.listdir(song_dir):
if self._params['cover_regex'].match(f):
return ('file://' + os.path.join(song_dir, f))
if (('xesam:artist' in self._metadata) and ('xesam:album' in self._metadata)):
artist = ','.join(self._metadata['xesam:artist'])
album = self._metadata['xesam:album']
for template in downloaded_covers:
f = os.path.expanduser((template % (artist, album)))
if os.path.exists(f):
return ('file://' + f)
return None
def _create_temp_cover(self, pic):
extension = {'image/jpeg': '.jpg', 'image/png': '.png', 'image/gif': '.gif'}
self._temp_cover = tempfile.NamedTemporaryFile(prefix='cover-', suffix=extension.get(pic.mime, '.jpg'))
self._temp_cover.write(pic.data)
self._temp_cover.flush()
logger.debug(('find_cover: Storing embedded image at %r' % self._temp_cover.name))
return ('file://' + self._temp_cover.name)
def last_status(self):
if ((time.time() - self._time) >= 2):
self.timer_callback()
return self._status.copy()
def _update_properties(self, force=False):
old_status = self._status
old_position = self._position
old_time = self._time
self._currentsong = self.currentsong()
new_status = self.status()
self._time = new_time = int(time.time())
if (not new_status):
logger.debug('_update_properties: failed to get new status')
return
self._status = new_status
logger.debug(('_update_properties: current song = %r' % self._currentsong))
logger.debug(('_update_properties: current status = %r' % self._status))
if ('elapsed' in new_status):
new_position = float(new_status['elapsed'])
elif ('time' in new_status):
new_position = int(new_status['time'].split(':')[0])
else:
new_position = 0
self._position = new_position
if (old_status['state'] != new_status['state']):
self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'PlaybackStatus')
if (not force):
old_id = old_status.get('songid', None)
new_id = new_status.get('songid', None)
force = (old_id != new_id)
if (not force):
if (new_status['state'] == 'play'):
expected_position = (old_position + (new_time - old_time))
else:
expected_position = old_position
if (abs((new_position - expected_position)) > 0.6):
logger.debug(('Expected pos %r, actual %r, diff %r' % (expected_position, new_position, (new_position - expected_position))))
logger.debug(('Old position was %r at %r (%r seconds ago)' % (old_position, old_time, (new_time - old_time))))
self._dbus_service.Seeked((new_position * 1000000))
else:
old_meta = self._metadata.copy()
self.update_metadata()
new_meta = self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'Metadata')
if (self._params['notify'] and (new_status['state'] != 'stop')):
if ((old_meta.get('xesam:artist', None) != new_meta.get('xesam:artist', None)) or (old_meta.get('xesam:album', None) != new_meta.get('xesam:album', None)) or (old_meta.get('xesam:title', None) != new_meta.get('xesam:title', None)) or (old_meta.get('xesam:url', None) != new_meta.get('xesam:url', None))):
self.notify_about_track(new_meta, new_status['state'])
if (old_status.get('volume') != new_status.get('volume')):
self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'Volume')
if (old_status['random'] != new_status['random']):
self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'Shuffle')
if ((old_status['repeat'] != new_status['repeat']) or (old_status.get('single', 0) != new_status.get('single', 0))):
self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'LoopStatus')
if (('nextsongid' in old_status) != ('nextsongid' in new_status)):
self._dbus_service.update_property('org.mpris.MediaPlayer2.Player', 'CanGoNext')
def setup_mediakeys(self):
self.register_mediakeys()
self._dbus_obj = self._bus.get_object('org.freedesktop.DBus', '/org/freedesktop/DBus')
self._dbus_obj.connect_to_signal('NameOwnerChanged', self.gsd_name_owner_changed_callback, arg0='org.gnome.SettingsDaemon')
def register_mediakeys(self):
try:
try:
gsd_object = self._bus.get_object('org.gnome.SettingsDaemon.MediaKeys', '/org/gnome/SettingsDaemon/MediaKeys')
except:
gsd_object = self._bus.get_object('org.gnome.SettingsDaemon', '/org/gnome/SettingsDaemon/MediaKeys')
gsd_object.GrabMediaPlayerKeys('mpDris2', 0, dbus_interface='org.gnome.SettingsDaemon.MediaKeys')
except:
logger.warning("Failed to connect to GNOME Settings Daemon. Media keys won't work.")
else:
self._bus.remove_signal_receiver(self.mediakey_callback)
gsd_object.connect_to_signal('MediaPlayerKeyPressed', self.mediakey_callback)
def gsd_name_owner_changed_callback(self, bus_name, old_owner, new_owner):
if ((bus_name == 'org.gnome.SettingsDaemon') and (new_owner != '')):
def reregister():
logger.debug(('Re-registering with GNOME Settings Daemon (owner %s)' % new_owner))
self.register_mediakeys()
return False
GLib.timeout_add(600, reregister)
if hasattr(mpd.MPDClient, 'fileno'):
def fileno(self):
return self.client.fileno()
else:
def fileno(self):
if (not self.connected):
raise mpd.ConnectionError('Not connected')
return self.client._sock.fileno()
if hasattr(mpd.MPDClient, '_write_command'):
def _write_command(self, *args):
return self.client._write_command(*args)
else:
raise Exception('Could not find the _write_command method in MPDClient')
if hasattr(mpd.MPDClient, '_parse_objects_direct'):
def _fetch_object(self):
objs = self._fetch_objects()
if (not objs):
return {}
return objs[0]
elif hasattr(mpd.MPDClient, '_fetch_object'):
def _fetch_object(self):
return self.client._fetch_object()
else:
raise Exception('Could not find the _fetch_object method in MPDClient')
if hasattr(mpd.MPDClient, '_parse_objects_direct'):
def _fetch_objects(self, *args):
return list(self.client._parse_objects_direct(self.client._read_lines(), *args))
elif hasattr(mpd.MPDClient, '_fetch_objects'):
def _fetch_objects(self, *args):
return self.client._fetch_objects(*args)
else:
raise Exception('Could not find the _fetch_objects method in MPDClient')
def __getattr__(self, attr):
if (attr[0] == '_'):
raise AttributeError(attr)
return (lambda *a, **kw: self.call(attr, *a, **kw))
def previous(self):
if (self._params['cdprev'] and (self._position >= 3)):
self.seekid(int(self._status['songid']), 0)
else:
self.call('previous')
def call(self, command, *args):
fn = getattr(self.client, command)
try:
was_idle = self.idle_leave()
logger.debug(('Sending command %r (was idle? %r)' % (command, was_idle)))
r = fn(*args)
if was_idle:
self.idle_enter()
return r
except (socket.error, mpd.MPDError, socket.timeout) as ex:
logger.debug(('Trying to reconnect, got %r' % ex))
self.reconnect()
return False |
class port_status(message):
version = 6
type = 12
def __init__(self, xid=None, reason=None, desc=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (reason != None):
self.reason = reason
else:
self.reason = 0
if (desc != None):
self.desc = desc
else:
self.desc = ofp.port_desc()
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!B', self.reason))
packed.append(('\x00' * 7))
packed.append(self.desc.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_status()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 12)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.reason = reader.read('!B')[0]
reader.skip(7)
obj.desc = ofp.port_desc.unpack(reader)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.reason != other.reason):
return False
if (self.desc != other.desc):
return False
return True
def pretty_print(self, q):
q.text('port_status {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('reason = ')
value_name_map = {0: 'OFPPR_ADD', 1: 'OFPPR_DELETE', 2: 'OFPPR_MODIFY'}
if (self.reason in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.reason], self.reason)))
else:
q.text(('%#x' % self.reason))
q.text(',')
q.breakable()
q.text('desc = ')
q.pp(self.desc)
q.breakable()
q.text('}') |
def f(fs):
f = Function(fs, name='f')
f_split = f.subfunctions
x = SpatialCoordinate(fs.mesh())[0]
for fi in f_split:
fs_i = fi.function_space()
if (fs_i.rank == 1):
fi.interpolate(as_vector(((x,) * fs_i.value_size)))
elif (fs_i.rank == 2):
fi.interpolate(as_tensor([[x for i in range(fs_i.mesh().geometric_dimension())] for j in range(fs_i.rank)]))
else:
fi.interpolate(x)
return f |
def sample_IHDP(fn_data, test_frac=0.2):
Dataset = draw_ihdp_data(fn_data)
num_samples = len(Dataset)
train_size = int(np.floor((num_samples * (1 - test_frac))))
train_index = list(np.random.choice(range(num_samples), train_size, replace=False))
test_index = list((set(list(range(num_samples))) - set(train_index)))
feat_name = 'X1 X2 X3 X4 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25'
Data_train = Dataset.loc[Dataset.index[train_index]]
Data_test = Dataset.loc[Dataset.index[test_index]]
X_train = np.array(Data_train[feat_name.split()])
W_train = np.array(Data_train['Treatment'])
Y_train = np.array(Data_train['Response'])
T_true_train = np.array(Data_train['TE'])
Y_cf_train = np.array(((Data_train['Treatment'] * Data_train['Y_0']) + ((1 - Data_train['Treatment']) * Data_train['Y_1'])))
Y_0_train = np.array(Data_train['Y_0'])
Y_1_train = np.array(Data_train['Y_1'])
X_test = np.array(Data_test[feat_name.split()])
W_test = np.array(Data_test['Treatment'])
Y_test = np.array(Data_test['Response'])
T_true_test = np.array(Data_test['TE'])
Y_cf_test = np.array(((Data_test['Treatment'] * Data_test['Y_0']) + ((1 - Data_test['Treatment']) * Data_test['Y_1'])))
Y_0_test = np.array(Data_test['Y_0'])
Y_1_test = np.array(Data_test['Y_1'])
train_data = (X_train, W_train, Y_train, Y_0_train, Y_1_train, Y_cf_train, T_true_train)
test_data = (X_test, W_test, Y_test, Y_0_test, Y_1_test, Y_cf_test, T_true_test)
return (train_data, test_data) |
class _QueryBuilder():
def __init__(self) -> None:
self._parser = self._define_parser()
def _define_parser() -> Any:
pyparsing.ParserElement.enablePackrat()
class Buildable():
def build(self) -> sqlalchemy.sql.ColumnElement:
raise NotImplementedError()
class Token(Buildable):
pass
class IdentifierToken(Token):
def __init__(self, name: str) -> None:
self.name = name
def op(self, op: Callable[([Any, Any], sqlalchemy.sql.elements.BinaryExpression)], other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
if isinstance(other, IdentifierToken):
return op(getattr(Version, self.name), getattr(Version, other.name))
elif isinstance(other, Token):
raise TypeError('Comparing identifiers to labels is not supported.')
else:
return op(getattr(Version, self.name), other)
def __eq__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.eq, other)
def __ne__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.ne, other)
def __lt__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.lt, other)
def __le__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.le, other)
def __gt__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.gt, other)
def __ge__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.ge, other)
def like(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(sqlalchemy.sql.operators.like_op, other)
def build(self) -> sqlalchemy.sql.elements.BinaryExpression:
return (getattr(Version, self.name) != '')
class LabelToken(Token):
def __init__(self, name: str) -> None:
self.name = name
def op(self, op, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
if isinstance(other, Token):
raise TypeError('Comparing labels to labels or labels to identifiers is not supported.')
label_query = select(Label.version_id).filter(((Label.name == self.name) & op(Label.value, str(other))))
return Version.id.in_(label_query)
def __eq__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.eq, other)
def __ne__(self, other: Any) -> sqlalchemy.sql.elements.BinaryExpression:
return self.op(operator.ne, other)
def build(self) -> sqlalchemy.sql.elements.BinaryExpression:
label_query = select(Label.version_id).filter((Label.name == self.name))
return Version.id.in_(label_query)
attributes = []
for attribute in sqlalchemy.inspect(Version).mapper.composites:
attributes.append(attribute.key)
for attribute in sqlalchemy.inspect(Version).mapper.column_attrs:
attributes.append(attribute.key)
identifier = pyparsing.Regex('|'.join(attributes)).setParseAction((lambda s, l, t: IdentifierToken(t[0])))
integer = pyparsing.pyparsing_common.signed_integer
string = pyparsing.quotedString().setParseAction(pyparsing.removeQuotes)
bool_true = pyparsing.Keyword('True').setParseAction(pyparsing.replaceWith(True))
bool_false = pyparsing.Keyword('False').setParseAction(pyparsing.replaceWith(False))
label = (((pyparsing.Literal('labels') + pyparsing.Literal('[')) + string) + pyparsing.Literal(']')).setParseAction((lambda s, l, t: LabelToken(t[2])))
atom = (((((identifier | integer) | string) | bool_true) | bool_false) | label)
class BinaryOp(Buildable):
op: Optional[Callable[([Any, Any], sqlalchemy.sql.elements.BooleanClauseList)]] = None
def __init__(self, t) -> None:
assert (len(t[0]) == 3)
self.args = t[0][0::2]
def build(self) -> sqlalchemy.sql.elements.BooleanClauseList:
assert (self.op is not None)
return self.op(*self.args)
class EqOp(BinaryOp):
op = operator.eq
class NeOp(BinaryOp):
op = operator.ne
class LeOp(BinaryOp):
op = operator.le
class GeOp(BinaryOp):
op = operator.ge
class LtOp(BinaryOp):
op = operator.lt
class GtOp(BinaryOp):
op = operator.gt
class LikeOp(Buildable):
def __init__(self, t) -> None:
assert (len(t[0]) == 3)
self.args = t[0][0::2]
def build(self) -> sqlalchemy.sql.elements.BooleanClauseList:
return self.args[0].like(self.args[1])
class MultiaryOp(Buildable):
op: Any = None
def __init__(self, t) -> None:
args = t[0][0::2]
for token in args:
if (not isinstance(token, Buildable)):
raise pyparsing.ParseFatalException('Operands of boolean and must be expressions, identifier or label references.')
self.args = args
def build(self) -> sqlalchemy.sql.elements.BooleanClauseList:
assert (self.op is not None)
return self.op.__func__(*map((lambda token: token.build()), self.args))
class AndOp(MultiaryOp):
op = sqlalchemy.and_
class OrOp(MultiaryOp):
op = sqlalchemy.or_
class NotOp(Buildable):
def __init__(self, t) -> None:
self.args = [t[0][1]]
def build(self) -> sqlalchemy.sql.elements.BooleanClauseList:
return sqlalchemy.not_(self.args[0].build())
return pyparsing.infixNotation(atom, [('==', 2, pyparsing.opAssoc.LEFT, EqOp), ('!=', 2, pyparsing.opAssoc.LEFT, NeOp), ('<=', 2, pyparsing.opAssoc.LEFT, LeOp), ('>=', 2, pyparsing.opAssoc.LEFT, GeOp), ('<', 2, pyparsing.opAssoc.LEFT, LtOp), ('>', 2, pyparsing.opAssoc.LEFT, GtOp), ('like', 2, pyparsing.opAssoc.LEFT, LikeOp), ('not', 1, pyparsing.opAssoc.RIGHT, NotOp), ('and', 2, pyparsing.opAssoc.LEFT, AndOp), ('or', 2, pyparsing.opAssoc.LEFT, OrOp)])
def build(self, filter_expression: Optional[str], columns: Sequence[Any]=None) -> Any:
if (columns is not None):
query = select(*columns)
else:
query = select(Version)
if filter_expression:
try:
parsed_filter_expression = self._parser.parseString(filter_expression, parseAll=True)[0]
except (pyparsing.ParseException, pyparsing.ParseFatalException) as exception:
raise UsageError('Invalid filter expression {}.'.format(filter_expression)) from exception
try:
filter_result = parsed_filter_expression.build()
except (AttributeError, TypeError) as exception:
raise UsageError('Invalid filter expression {} (2).'.format(filter_expression)) from exception
if (not isinstance(filter_result, sqlalchemy.sql.ColumnElement)):
raise UsageError('Invalid filter expression {} (3).'.format(filter_expression))
query = query.filter(filter_result)
return query |
def test_procurement_success(setup_test_data, client):
resp = client.get(url.format(toptier_code='043', fiscal_year=2020, fiscal_period=8, type='procurement'))
assert (resp.status_code == status.HTTP_200_OK)
response = resp.json()
expected_results = {'unlinked_file_c_award_count': 14, 'unlinked_file_d_award_count': 28, 'total_linked_award_count': 7}
assert (response == expected_results) |
class ShadowIGMediaBuilder(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isShadowIGMediaBuilder = True
super(ShadowIGMediaBuilder, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
copyright_check_status = 'copyright_check_status'
id = 'id'
status = 'status'
status_code = 'status_code'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ShadowIGMediaBuilder, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'copyright_check_status': 'IGVideoCopyrightCheckStatus', 'id': 'string', 'status': 'string', 'status_code': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def test___setitem__checks_the_given_data_6():
wh = WorkingHours()
with pytest.raises(RuntimeError) as cm:
wh['sun'] = [['no proper data']]
assert (str(cm.value) == "WorkingHours.working_hours value should be a list of lists of two integers between and the range of integers should be 0-1440, not [['no proper data']]") |
def upgrade():
op.create_table('fidescloud', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_fidescloud_id'), 'fidescloud', ['id'], unique=False) |
class Test(Testbase.ClassSetup):
def setUpClass(cls):
super().setUpClass()
cls.mandelbrot_compiled_formula = MandelbrotCompiledFormula(cls.g_comp)
def tearDownClass(cls):
super().tearDownClass()
del cls.mandelbrot_compiled_formula
def setUp(self):
library_path = self.mandelbrot_compiled_formula.get_library_path()
formula_params = self.mandelbrot_compiled_formula.get_formula_params()
self.controller = Fract4dc.create_controller(library_path, formula_params, LOCATION_PARAMS)
def tearDown(self):
del self.controller
def test_set_message_handler(self):
message_handler = MockMessageHandler()
try:
self.controller.set_message_handler(message_handler)
except Exception:
self.fail('set_message_handler() raised Exception')
def test_start_calculating(self):
message_handler = MockMessageHandler()
self.controller.set_message_handler(message_handler)
color_map = Fract4dc.cmap_create(COLOR_MAP)
image = ImageWrapper(TILE_SIZE, TILE_SIZE).get_img()
initial_statuses_history = message_handler.get_statuses_history().copy()
self.controller.start_calculating(params=LOCATION_PARAMS, antialias=0, maxiter=100, yflip=0, nthreads=1, cmap=color_map, auto_deepen=0, periodicity=1, render_type=0, image=image)
first_status = message_handler.get_statuses_history()[0]
self.assertEqual(0, len(initial_statuses_history))
self.assertEqual(1, first_status)
def test_stop_calculating(self):
message_handler = MockMessageHandler()
self.controller.set_message_handler(message_handler)
color_map = Fract4dc.cmap_create(COLOR_MAP)
image = ImageWrapper(TILE_SIZE, TILE_SIZE).get_img()
initial_statuses_history = message_handler.get_statuses_history().copy()
self.controller.start_calculating(params=LOCATION_PARAMS, antialias=0, maxiter=100, yflip=0, nthreads=1, cmap=color_map, auto_deepen=0, periodicity=1, render_type=0, image=image)
first_status = message_handler.get_statuses_history()[0]
self.controller.stop_calculating()
last_status = message_handler.get_statuses_history()[(- 1)]
self.assertEqual(0, len(initial_statuses_history))
self.assertEqual(1, first_status)
self.assertEqual(0, last_status) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.