code stringlengths 281 23.7M |
|---|
def test_two_exits_to_one_case_depend_on_switch(task):
var_0 = Variable('var_0', Integer(32, True), None, True, Variable('var_10', Integer(32, True), 0, True, None))
var_1 = Variable('var_1', Pointer(Integer(32, True), 32), None, False, Variable('var_28', Pointer(Integer(32, True), 32), 1, False, None))
task.graph.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), print_call('Enter week number(1-7): ', 1)), Assignment(var_1, UnaryOperation(OperationType.address, [var_0], Pointer(Integer(32, True), 32), None, False)), Assignment(ListOperation([]), scanf_call(var_1, , 2)), Branch(Condition(OperationType.greater_us, [var_0, Constant(7, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(2, [IndirectBranch(var_0)]), BasicBlock(3, [Assignment(ListOperation([]), print_call('Invalid input! Please enter week number between 1-7.', 15))]), BasicBlock(4, [Assignment(ListOperation([]), print_call('Monday', 3))]), BasicBlock(5, [Assignment(ListOperation([]), print_call('Tuesday', 5)), Branch(Condition(OperationType.not_equal, [var_0, Constant(1, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(6, [Assignment(ListOperation([]), print_call('Wednesday', 7))]), BasicBlock(7, [Assignment(ListOperation([]), print_call('Thursday', 9))]), BasicBlock(8, [Assignment(ListOperation([]), print_call('Friday', 11))]), BasicBlock(9, [Assignment(ListOperation([]), print_call('Saturday', 13))]), BasicBlock(10, [Assignment(ListOperation([]), print_call('Sunday', 14))]), BasicBlock(11, [Return(ListOperation([Constant(0, Integer(32, True))]))])]))
task.graph.add_edges_from([FalseCase(vertices[0], vertices[1]), TrueCase(vertices[0], vertices[2]), SwitchCase(vertices[1], vertices[2], [Constant(0, Integer(32))]), SwitchCase(vertices[1], vertices[3], [Constant(1, Integer(32))]), SwitchCase(vertices[1], vertices[4], [Constant(2, Integer(32))]), SwitchCase(vertices[1], vertices[5], [Constant(3, Integer(32))]), SwitchCase(vertices[1], vertices[6], [Constant(4, Integer(32))]), SwitchCase(vertices[1], vertices[7], [Constant(5, Integer(32))]), SwitchCase(vertices[1], vertices[8], [Constant(6, Integer(32))]), SwitchCase(vertices[1], vertices[9], [Constant(7, Integer(32))]), UnconditionalEdge(vertices[2], vertices[10]), UnconditionalEdge(vertices[3], vertices[4]), TrueCase(vertices[4], vertices[7]), FalseCase(vertices[4], vertices[5]), UnconditionalEdge(vertices[5], vertices[6]), UnconditionalEdge(vertices[6], vertices[10]), UnconditionalEdge(vertices[7], vertices[8]), UnconditionalEdge(vertices[8], vertices[10]), UnconditionalEdge(vertices[9], vertices[10])])
PatternIndependentRestructuring().run(task)
assert (isinstance((seq_node := task._ast.root), SeqNode) and (len(seq_node.children) == 3))
assert (isinstance(seq_node.children[0], CodeNode) and (seq_node.children[0].instructions == vertices[0].instructions[:(- 1)]))
assert isinstance((switch := seq_node.children[1]), SwitchNode)
assert (isinstance(seq_node.children[2], CodeNode) and (seq_node.children[2].instructions == vertices[(- 1)].instructions))
assert ((switch.expression == var_0) and (len(switch.children) == 8))
assert (isinstance((case1 := switch.cases[0]), CaseNode) and (case1.constant == Constant(1, Integer(32))) and (case1.break_case is False))
assert (isinstance((case2 := switch.cases[1]), CaseNode) and (case2.constant == Constant(2, Integer(32))) and (case2.break_case is False))
assert (isinstance((case3 := switch.cases[2]), CaseNode) and (case3.constant == Constant(3, Integer(32))) and (case3.break_case is False))
assert (isinstance((case4 := switch.cases[3]), CaseNode) and (case4.constant == Constant(4, Integer(32))) and (case4.break_case is False))
assert (isinstance((case5 := switch.cases[4]), CaseNode) and (case5.constant == Constant(5, Integer(32))) and (case5.break_case is False))
assert (isinstance((case6 := switch.cases[5]), CaseNode) and (case6.constant == Constant(6, Integer(32))) and (case6.break_case is True))
assert (isinstance((case7 := switch.cases[6]), CaseNode) and (case7.constant == Constant(7, Integer(32))) and (case7.break_case is True))
assert (isinstance((default := switch.default), CaseNode) and (default.constant == 'default') and (default.break_case is False))
assert (isinstance(case1.child, CodeNode) and (case1.child.instructions == vertices[3].instructions))
assert (isinstance(case2.child, CodeNode) and (case2.child.instructions == vertices[4].instructions[:(- 1)]))
assert isinstance((cond_1 := case3.child), ConditionNode)
assert isinstance((cond_2 := case4.child), ConditionNode)
assert isinstance((cond_3 := case5.child), ConditionNode)
assert isinstance((cond_4 := case6.child), ConditionNode)
assert (isinstance(case7.child, CodeNode) and (case7.child.instructions == vertices[9].instructions))
assert (isinstance(default.child, CodeNode) and (default.child.instructions == vertices[2].instructions))
for (cond, child) in [(cond_1, 5), (cond_2, 6)]:
assert (cond.condition.is_negation and (~ cond.condition.is_symbol))
assert (task._ast.condition_map[(~ cond.condition)] == Condition(OperationType.equal, [var_0, Constant(2, Integer(32))]))
assert (isinstance(cond.true_branch_child, CodeNode) and (cond.true_branch_child.instructions == vertices[child].instructions) and (cond.false_branch is None))
for (cond, child) in [(cond_3, 7), (cond_4, 8)]:
assert (cond.condition.is_conjunction and (len((operands := cond.condition.operands)) == 3) and all((operands[i].is_negation for i in [0, 1, 2])))
term_1 = task._ast.condition_map[(~ operands[0])]
term_2 = task._ast.condition_map[(~ operands[1])]
term_3 = task._ast.condition_map[(~ operands[2])]
assert ({term_1, term_2, term_3} == {Condition(OperationType.equal, [var_0, Constant(1, Integer(32))]), Condition(OperationType.equal, [var_0, Constant(3, Integer(32))]), Condition(OperationType.equal, [var_0, Constant(4, Integer(32))])})
assert (isinstance(cond.true_branch_child, CodeNode) and (cond.true_branch_child.instructions == vertices[child].instructions) and (cond.false_branch is None)) |
class ListObjects(SimpleDirectiveMixin, Directive):
optional_arguments = 1
option_spec = {'baseclass': directives.unchanged}
def make_rst(self):
module = importlib.import_module(self.arguments[0])
base_class = None
if ('baseclass' in self.options):
base_class = import_class(*self.options['baseclass'].rsplit('.', 1))
objects = []
for item in dir(module):
obj = import_class(self.arguments[0], item)
if ((not inspect.isclass(obj)) or (base_class and (not issubclass(obj, base_class))) or (obj == base_class) or (is_widget(obj) and (item not in widgets)) or getattr(obj, '_qte_compatibility', False)):
continue
objects.append(item)
context = {'objects': objects}
rst = list_objects_template.render(**context)
for line in rst.splitlines():
if (not line.strip()):
continue
(yield line) |
('urllib3.poolmanager.PoolManager.urlopen')
def test_timeout(mock_urlopen, elasticapm_client):
elasticapm_client.server_version = (8, 0, 0)
transport = Transport(' timeout=5, client=elasticapm_client)
transport.start_thread()
mock_urlopen.side_effect = MaxRetryError(None, None, reason=TimeoutError())
try:
with pytest.raises(TransportException) as exc_info:
transport.send('x')
assert ('timeout' in str(exc_info.value))
finally:
transport.close() |
('selector_type', ['empty'])
def test_call_with_context_args(selector, switch):
selector.set_selector(switch)
selector.set_providers(one=providers.Callable((lambda *args, **kwargs: (args, kwargs))))
with switch.override('one'):
(args, kwargs) = selector(1, 2, three=3, four=4)
assert (args == (1, 2))
assert (kwargs == {'three': 3, 'four': 4}) |
class DisableParametersUpdate(ErtScript):
def run(self, disable_parameters: str) -> None:
ert_config = self.ert().ert_config
altered_update_step = [UpdateStep(name='DISABLED_PARAMETERS', observations=list(ert_config.observations.keys()), parameters=[key for key in ert_config.ensemble_config.parameters if (key not in [val.strip() for val in disable_parameters.split(',')])])]
self.ert().update_configuration = altered_update_step |
class CategoricalTest(unittest.TestCase):
def test_categorical_trivial(self) -> None:
self.maxDiff = None
queries = [c_trivial_simplex()]
observations = {}
observed = BMGInference().to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N0[label="[1.0]"];\n N1[label=Categorical];\n N2[label=Sample];\n N3[label=Query];\n N0 -> N1;\n N1 -> N2;\n N2 -> N3;\n}\n '
self.assertEqual(expected.strip(), observed.strip())
def test_categorical_dirichlet(self) -> None:
self.maxDiff = None
queries = [cd4()]
observations = {}
observed = BMGInference().to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N0[label="[1.0,1.0,1.0,1.0]"];\n N1[label=Dirichlet];\n N2[label=Sample];\n N3[label=Categorical];\n N4[label=Sample];\n N5[label=Query];\n N0 -> N1;\n N1 -> N2;\n N2 -> N3;\n N3 -> N4;\n N4 -> N5;\n}\n '
self.assertEqual(expected.strip(), observed.strip())
def test_categorical_equivalent_consts(self) -> None:
self.maxDiff = None
queries = [c_const_unnormalized(), c_const_simplex(), c_const_logit_simplex()]
observations = {}
observed = BMGInference().to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N0[label="[0.125,0.125,0.25,0.5]"];\n N1[label=Categorical];\n N2[label=Sample];\n N3[label=Query];\n N4[label=Sample];\n N5[label=Query];\n N6[label=Sample];\n N7[label=Query];\n N0 -> N1;\n N1 -> N2;\n N1 -> N4;\n N1 -> N6;\n N2 -> N3;\n N4 -> N5;\n N6 -> N7;\n}\n '
self.assertEqual(expected.strip(), observed.strip())
observed = BMGInference().to_python(queries, observations)
expected = '\nfrom beanmachine import graph\nfrom torch import tensor\ng = graph.Graph()\nn0 = g.add_constant_col_simplex_matrix(tensor([[0.125],[0.125],[0.25],[0.5]]))\nn1 = g.add_distribution(\n graph.DistributionType.CATEGORICAL,\n graph.AtomicType.NATURAL,\n [n0],\n)\nn2 = g.add_operator(graph.OperatorType.SAMPLE, [n1])\nq0 = g.query(n2)\nn3 = g.add_operator(graph.OperatorType.SAMPLE, [n1])\nq1 = g.query(n3)\nn4 = g.add_operator(graph.OperatorType.SAMPLE, [n1])\nq2 = g.query(n4)\n '
self.assertEqual(expected.strip(), observed.strip())
def test_categorical_random_logit(self) -> None:
self.maxDiff = None
queries = [c_random_logit()]
observations = {}
with self.assertRaises(ValueError) as ex:
BMGInference().infer(queries, observations, 10)
observed = str(ex.exception)
expected = '\nThe model uses a categorical operation unsupported by Bean Machine Graph.\nThe unsupported node was created in function call c_random_logit().\n '
self.assertEqual(expected.strip(), observed.strip())
def test_categorical_multi(self) -> None:
self.maxDiff = None
queries = [c_multi()]
observations = {}
with self.assertRaises(ValueError) as ex:
BMGInference().infer(queries, observations, 10)
observed = str(ex.exception)
expected = '\nThe probability of a categorical is required to be a 2 x 1 simplex matrix but is a 2 x 2 simplex matrix.\nThe categorical was created in function call c_multi().\n '
self.assertEqual(expected.strip(), observed.strip()) |
def test_query(base_bot):
query = 'Test query'
config = BaseLlmConfig()
with patch.object(base_bot.app, 'query') as mock_query:
mock_query.return_value = 'Query result'
result = base_bot.query(query, config)
assert isinstance(result, str)
assert (result == 'Query result') |
def _completions_for_options(options):
output = []
should_suffix = int(os.getenv('NUBIA_SUFFIX_ENABLED', '1'))
def __suffix(key, expects_argument=True):
if (should_suffix and expects_argument):
return (key + '=')
else:
return _space_suffix(key)
for option in options:
expects_argument = False
if option.get('expects_argument'):
expects_argument = True
output.append(__suffix(option.get('name'), expects_argument))
return output |
class DT(Options):
def allowMultidate(self):
return self._config_get()
def allowMultidate(self, flag):
self._config(flag)
if flag:
self.multidateSeparator = ','
def daysOfWeekDisabled(self):
return self._config_get()
def daysOfWeekDisabled(self, values):
self._config(values)
def locale(self):
return self._config_get()
def locale(self, text):
self._config(text)
def collapse(self):
return self._config_get(True)
def collapse(self, flag):
self._config(flag)
def format(self):
return self._config_get()
def format(self, text):
self._config(text)
def formats(self):
return EnumFormatTypes(self, 'format')
def icons(self):
return self._config_sub_data('icons', DTIcons)
def buttons(self):
return self._config_sub_data('buttons', DTButtons)
def multidateSeparator(self):
return self._config_get()
def multidateSeparator(self, text):
self.allowMultidate = True
self._config(text)
def sideBySide(self):
return self._config_get()
def sideBySide(self, flag):
self._config(flag)
def viewMode(self):
return self._config_get()
def viewMode(self, text):
self._config(text)
def viewModes(self):
return EnumViewMode(self, 'viewMode')
def useCurrent(self):
return self._config_get(True)
def useCurrent(self, flag):
self._config(flag)
def useStrict(self):
return self._config_get()
def useStrict(self, flag):
self._config(flag)
def viewDate(self):
return self._config_get(False)
def viewDate(self, value):
self._config(value)
def disabledHours(self):
return self._config_get(False)
def disabledHours(self, values):
self._config(values)
def enabledHours(self):
return self._config_get(False)
def enabledHours(self, values):
self._config(values)
def focusOnShow(self):
return self._config_get(False)
def focusOnShow(self, flag):
self._config(flag)
def allowInputToggle(self):
return self._config_get(False)
def allowInputToggle(self, flag):
self._config(flag)
def disabledTimeIntervals(self):
return self._config_get(False)
def disabledTimeIntervals(self, array):
self._config(array)
def ignoreReadonly(self):
return self._config_get(False)
def ignoreReadonly(self, flag):
self._config(flag)
def debug(self):
return self._config_get(False)
def debug(self, flag):
self._config(flag)
def keyBinds(self):
pass
def keepInvalid(self):
return self._config_get(False)
def keepInvalid(self, flag):
self._config(flag)
def inline(self):
return self._config_get(False)
def inline(self, flag):
self._config(flag)
def keepOpen(self):
return self._config_get(False)
def keepOpen(self, flag):
self._config(flag)
def toolbarplacement(self):
return self._config_get(False)
def toolbarplacement(self, text):
self._config(text)
def calendarWeeks(self):
return self._config_get(False)
def calendarWeeks(self, text):
self._config(text)
def enabledDates(self):
return self._config_get()
def enabledDates(self, array):
self._config(array)
def disabledDates(self):
return self._config_get()
def disabledDates(self, array):
self._config(array)
def defaultDate(self):
return self._config_get()
def defaultDate(self, value):
self._config(value)
def maxDate(self):
return self._config_get()
def maxDate(self, value):
self._config(value)
def minDate(self):
return self._config_get()
def minDate(self, value):
self._config(value)
def stepping(self):
return self._config_get(1)
def stepping(self, num):
self._config(num)
def extraFormats(self):
return self._config_get()
def extraFormats(self, value):
self._config(value)
def dayViewHeaderFormat(self):
return self._config_get('MMMM YYYY')
def dayViewHeaderFormat(self, value):
self._config(value)
def date(self):
return self._config_get()
def date(self, value):
self._config(value) |
.django_db
class TestDecreasePostsCountAfterPostDeletionReceiver(object):
def test_can_decrease_the_posts_count_of_the_post_being_deleted(self):
u1 = UserFactory.create()
top_level_forum = create_forum()
topic = create_topic(forum=top_level_forum, poster=u1)
PostFactory.create(topic=topic, poster=u1)
post = PostFactory.create(topic=topic, poster=u1)
profile = ForumProfile.objects.get(user=u1)
initial_posts_count = profile.posts_count
post.delete()
profile.refresh_from_db()
assert (profile.posts_count == (initial_posts_count - 1))
def test_do_nothing_if_the_poster_is_anonymous(self):
top_level_forum = create_forum()
topic = create_topic(forum=top_level_forum, poster=None)
post = PostFactory.create(topic=topic, poster=None, username='test')
post.delete()
assert (ForumProfile.objects.exists() is False)
def test_do_nothing_if_the_post_is_not_approved(self):
u1 = UserFactory.create()
top_level_forum = create_forum()
topic = create_topic(forum=top_level_forum, poster=u1)
PostFactory.create(topic=topic, poster=u1)
PostFactory.create(topic=topic, poster=u1)
post = PostFactory.create(topic=topic, poster=u1, approved=False)
profile = ForumProfile.objects.get(user=u1)
initial_posts_count = profile.posts_count
post.delete()
profile.refresh_from_db()
assert (profile.posts_count == initial_posts_count) |
def test_arg_cursor(golden):
def scal(n: size, alpha: R, x: [R][(n, n)]):
for i in seq(0, n):
x[(i, i)] = (alpha * x[(i, i)])
args = scal.args()
output = ''
for arg in args:
output += f'{arg.name()}, {arg.is_tensor()}'
if arg.is_tensor():
for dim in arg.shape():
output += f', {dim._impl._node}'
output += '\n'
print(output)
assert (output == golden) |
class TestFetchTestCases(BaseTaskTestCase):
(config.config, {'query_wiki_test_cases': True})
('bodhi.server.models.Build.update_test_cases')
def test_update_nonexistent(self, fetch):
with pytest.raises(BodhiException) as exc:
fetch_test_cases_main('foo')
assert (str(exc.value) == "Couldn't find alias foo in DB")
fetch.assert_not_called()
(config.config, {'query_wiki_test_cases': True})
('bodhi.server.models.MediaWiki')
('bodhi.server.tasks.fetch_test_cases.log.warning')
def test_fetch_test_cases_exception(self, warning, MediaWiki):
MediaWiki.return_value.categorymembers.side_effect = HTTPTimeoutError('oh no!')
update = self.db.query(models.Update).join(models.Build).filter((models.Build.nvr == 'bodhi-2.0-1.fc17')).one()
with pytest.raises(ExternalCallException):
fetch_test_cases_main(update.alias)
warning.assert_called_once_with('Error occurred during fetching testcases', exc_info=True)
(config.config, {'query_wiki_test_cases': True})
('bodhi.server.models.Build.update_test_cases')
def test_fetch_test_cases_run(self, fetch):
update = self.db.query(models.Update).join(models.Build).filter((models.Build.nvr == 'bodhi-2.0-1.fc17')).one()
fetch_test_cases_main(update.alias)
fetch.assert_called_once() |
class ShareLinkBaseFile(DatClass):
type: BaseFileType = None
file_id: str = None
name: str = None
parent_file_id: str = field(default=None, repr=False)
category: BaseFileCategory = field(default=None, repr=False)
size: int = field(default=None, repr=False)
created_at: str = field(default=None, repr=False)
content_type: str = field(default=None, repr=False)
description: str = field(default=None, repr=False)
content_hash: str = field(default=None, repr=False)
content_hash_name: BaseFileContentHashName = field(default=None, repr=False)
crc64_hash: str = field(default=None, repr=False)
domain_id: str = field(default=None, repr=False)
download_url: str = field(default=None, repr=False)
drive_id: str = field(default=None, repr=False)
encrypt_mode: str = field(default=None, repr=False)
file_extension: str = field(default=None, repr=False)
hidden: bool = field(default=None, repr=False)
image_media_metadata: ImageMedia = field(default=None, repr=False)
labels: list = field(default=None, repr=False)
meta: str = field(default=None, repr=False)
mime_extension: str = field(default=None, repr=False)
mime_type: str = field(default=None, repr=False)
punish_flag: int = field(default=None, repr=False)
starred: bool = field(default=None, repr=False)
status: str = field(default=None, repr=False)
streams_url_info: Dict = field(default=None, repr=False)
streams_info: Dict = field(default=None, repr=False)
thumbnail: str = field(default=None, repr=False)
trashed: bool = field(default=None, repr=False)
trashed_at: str = field(default=None, repr=False)
updated_at: str = field(default=None, repr=False)
upload_id: str = field(default=None, repr=False)
url: str = field(default=None, repr=False)
user_meta: str = field(default=None, repr=False)
video_media_metadata: VideoMedia = field(default=None, repr=False)
video_preview_metadata: VideoPreview = field(default=None, repr=False)
location: str = field(default=None, repr=False)
action_list: List[str] = field(default_factory=list, repr=False)
user_tags: Dict = field(default=None, repr=False)
last_modifier_type: str = field(default=None, repr=False)
last_modifier_id: str = field(default=None, repr=False)
last_modifier_name: str = field(default=None, repr=False)
creator_type: str = field(default=None, repr=False)
creator_id: str = field(default=None, repr=False)
creator_name: str = field(default=None, repr=False)
revision_id: str = field(default=None, repr=False)
sync_flag: bool = field(default=None, repr=False)
sync_device_flag: bool = field(default=None, repr=False)
sync_meta: str = field(default=None, repr=False)
ex_fields_info: FieldsInfo = field(default=None, repr=False)
from_share_id: str = None
revision_version: int = None
channel: str = None
meta_name_punish_flag: int = None
meta_name_investigation_status: int = None |
class TestCombatCommands(AinneveTestMixin, EvenniaCommandTest):
def setUp(self):
super().setUp()
self.target = create_object(Mob, key='rat', location=self.room1)
def tearDown(self):
super().tearDown()
self.target.delete()
def test_engage(self):
self.call(combat.CmdInitiateCombat(), 'rat', 'You prepare for combat! rat is at melee range.')
self.assertEqual(self.char1.ndb.combat, self.target.ndb.combat)
self.call(combat.CmdInitiateCombat(), 'char2', "You can't attack another player here.")
self.call(combat.CmdInitiateCombat(), 'obj', "You can't attack that.")
def test_hit(self):
combat_instance = CombatHandler(self.char1, self.target)
self.call(combat.CmdHit(), 'rat', 'You hit rat with your Empty Fists')
self.char1.cooldowns.clear()
combat_instance.retreat(self.char1, self.target)
self.call(combat.CmdHit(), 'rat', 'rat is too far away.')
def test_shoot(self):
combat_instance = CombatHandler(self.char1, self.target)
self.weapon.attack_range = CombatRange.RANGED
self.weapon.location = self.char1
self.char1.equipment.move(self.weapon)
self.call(combat.CmdShoot(), 'rat', 'You shoot rat with your weapon')
self.char1.cooldowns.clear()
combat_instance.retreat(self.char1, self.target)
self.call(combat.CmdShoot(), 'rat', 'You shoot rat with your weapon')
def test_flee(self):
combat_instance = CombatHandler(self.char1, self.target)
self.call(combat.CmdFlee(), '', 'You flee!')
self.assertFalse(self.char1.nattributes.has('combat'))
self.assertFalse(self.target.nattributes.has('combat'))
self.assertEqual(self.char1.location, self.room2) |
def generate_choices(item, id=id):
query = None
if (item == 'Ticket'):
query = FlicketTicket.query.filter_by(id=id).first()
elif (item == 'Post'):
query = FlicketPost.query.filter_by(id=id).first()
if query:
upload = []
for u in query.uploads:
upload.append((u.id, u.filename, u.original_filename))
uploads = []
for x in upload:
uri = url_for('flicket_bp.view_ticket_uploads', filename=x[1])
uri_label = (((('<a href="' + uri) + '">') + x[2]) + '</a>')
uploads.append((x[0], uri_label))
return uploads |
class OptionSeriesFunnel3dSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Engine():
def __init__(self, name: str, typ, process_pool: int=None) -> None:
self.exited = False
self.server_id: str = uuid.uuid4().hex
self.sid: bytes = self.server_id.encode()
self.name: str = name
self.server_type: int = typ
self.ip = '127.0.0.1'
self.port = 0
self.is_external = False
self.request_que = asyncio.Queue()
self.client_cmd_map = {}
self.server_cmd_map = {}
init_asyncio_loop_policy()
self.loop = asyncio.get_event_loop()
self.registry = EtcdRegistry()
self.broker = NatsBroker()
self.selector = Selector()
self.redis_store = RedisStore()
self.mongo_store = MongoStore()
async def init(self):
init_log(self)
logging.info('SrvEngine Init')
argv_len = len(sys.argv)
if (argv_len < 3):
raise ValueError('not valid argv: port IsSingle ')
else:
self.port = int(sys.argv[1])
if (sys.argv[2] == 'True'):
Config.LaunchSingle(True)
if Config.USE_ETCD:
(await self.registry.init(srv_inst))
if Config.USE_NATS:
(await self.broker.init(srv_inst))
self.selector.init(self.registry)
self.redis_store.init(srv_inst)
self.mongo_store.init(srv_inst)
async def register(self):
(client_cmd, server_cmd) = load_all_handlers('engine.handlers')
self.register_server_cmd(server_cmd)
self.register_client_cmd(client_cmd)
(await self.registry.register(self.server_id, self.server_type))
(await self.broker.subscribe())
self.add_timer(int((Config.ETCD_TTL / 2)), self.registry.tick)
async def start(self):
(await self.registry.watch_servers())
async def exit(self):
pass
async def launch(self):
(await self.init())
(await self.register())
(await self.start())
logging.critical(f' {self.name} ')
logging.info(':{}'.format(self.port))
def serve(self):
logging.getLogger().setLevel(logging.INFO)
self.loop.run_until_complete(asyncio.ensure_future(self.launch()))
try:
self.loop.run_forever()
except SystemExit:
logging.info('SYSTEM EXIT 0')
finally:
self.loop.close()
logging.info('loop closed!!')
def register_client_cmd(self, cmd_dct):
self.client_cmd_map.update(cmd_dct)
def register_server_cmd(self, cmd_dct):
self.server_cmd_map.update(cmd_dct)
def get_client_cmd(self, name):
return self.client_cmd_map.get(name)
def get_server_cmd(self, name):
return self.server_cmd_map.get(name)
async def on_client_request(self, client, cmd, request):
func = self.get_client_cmd(cmd)
if func:
if asyncio.iscoroutinefunction(func):
(await func(client, request))
else:
func(client, request)
else:
logging.error(f'no rpc func:{cmd}')
def on_client_disconect(self, player_id):
pass
def send_response_client(self, pid, pck):
logging.info(',')
async def on_server_message(self, pid, cmd, pck):
if self.get_client_cmd(cmd):
(await self.on_client_request(pid, cmd, pck))
return
func = self.get_server_cmd(cmd)
if func:
if asyncio.iscoroutinefunction(func):
(await func(pid, pck))
else:
func(pid, pck)
else:
logging.error(f'no rpc func:{cmd}')
def add_timer(self, interval=60, func=None, *args, **kwargs):
if ((func is None) or self.exited):
return
async def decorated(*args, **kwargs):
while True:
(await asyncio.sleep(interval, loop=self.loop))
try:
if asyncio.iscoroutinefunction(func):
(await func(*args, **kwargs))
else:
func(*args, **kwargs)
except Exception as e:
logging.error(f'timer func:{func} exec error e:{e}')
self.loop.create_task(decorated(*args, **kwargs))
def get_report_info(self):
info = {'ip': self.ip, 'port': self.port}
return info
def on_server_del(self, info):
logging.info(f'on_server_del.info:{info} ')
def on_server_add(self, info):
logging.info(f'on_server_add.info:{info} ')
def send_server_message(self, server_type, server_id, pid, pck):
data = BrokerPack().pack(pid, pck.DESCRIPTOR.name, pck)
(d_pid, d_cmd, d_pck) = BrokerPack().unpack(data)
if (server_id == '*'):
self.loop.create_task(self.broker.send_to_group_server(server_type, data))
elif ((server_id is not None) and (server_id != '')):
self.loop.create_task(self.broker.send_to_server(server_id, data))
else:
self.loop.create_task(self.broker.send_to_all_server(data))
def send_response_by_gateway(self, pid, pck, sid='*'):
self.send_server_message(SeverType.GATEWAY, sid, pid, pck) |
class BsBreadcrumb(Component):
name = 'Bootstrap Breadcrumb'
str_repr = '<nav aria-label="breadcrumb" {attrs}><ol class="breadcrumb">{sub_items}</ol></nav>'
dyn_repr = '{sub_item}'
def add_item(self, component: primitives.HtmlModel, active: bool=False):
if (not hasattr(component, 'options')):
component = HtmlList.Li(self.page, component)
component.attr['class'].add('breadcrumb-item')
if active:
component.attr['class'].add('active')
component.options.managed = False
self.components.add(component)
self.items.append(component)
return component
def add_section(self, text: str, url: str='#', active: bool=False):
if active:
component = HtmlList.Li(self.page, text)
component.attr['class'].add('breadcrumb-item')
component.attr['class'].add('active')
component.aria.current = 'page'
else:
link = self.page.web.std.link(text, url)
link.attr['class'].clear()
component = HtmlList.Li(self.page, link)
component.attr['class'].add('breadcrumb-item')
component.options.managed = False
self.components.add(component)
self.items.append(component)
return component |
def build_data_drift_report(reference_data: pd.DataFrame, current_data: pd.DataFrame, column_mapping: ColumnMapping, drift_share=0.4) -> Report:
data_drift_report = Report(metrics=[DataDriftPreset(drift_share=drift_share)])
data_drift_report.run(reference_data=reference_data, current_data=current_data, column_mapping=column_mapping)
return data_drift_report |
class GlobalAttrs():
env_prometheus_server = '??'
env_basic_auth_enabled = False
env_prometheus_username = None
env_prometheus_password = None
env_insecure = False
log_dir = '/tmp/'
log_file = 'kptop.log'
exceptions_num = 0
session = None
node_exporter_node_label = 'node'
kubernetes_exporter_node_label = 'instance'
live_update_interval = 8
start_graphs_with_zero = True
graphs_width = 45
debug = False
def __init__(self):
pass |
class GaugeThreadPoller(GaugePoller):
def __init__(self, conf, logname, prom_client):
super().__init__(conf, logname, prom_client)
self.thread = None
self.interval = self.conf.interval
self.ryudp = None
def start(self, ryudp, active):
self.stop()
super().start(ryudp, active)
if active:
self.thread = hub.spawn(self)
self.thread.name = 'GaugeThreadPoller'
def stop(self):
super().stop()
if self.is_active():
hub.kill(self.thread)
hub.joinall([self.thread])
self.thread = None
def is_active(self):
return (self.thread is not None)
def __call__(self):
hub.sleep(random.randint(1, self.conf.interval))
while True:
self.send_req()
self.reply_pending = True
hub.sleep(self.conf.interval)
if self.reply_pending:
self.no_response()
def send_req(self):
raise NotImplementedError |
class OptionSeriesAreasplinerangeSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IPv6Dscp(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(34525), ofp.oxm.ip_dscp(4)])
matching = {'dscp=4 ecn=0': simple_tcpv6_packet(ipv6_tc=16), 'dscp=4 ecn=3': simple_tcpv6_packet(ipv6_tc=19)}
nonmatching = {'dscp=5 ecn=0': simple_tcpv6_packet(ipv6_tc=20)}
self.verify_match(match, matching, nonmatching) |
('image-caption.diff')
def image_caption_diff(dataset, source_dataset):
db = connect()
examples = db.get_dataset(source_dataset)
counts = Counter()
blocks = [{'view_id': 'html', 'html_template': "<div style='opacity: 0.5'>{{orig_caption}}</div>"}, {'view_id': 'html', 'html_template': '{{caption}}'}, {'view_id': 'choice'}]
options = [{'id': 'SUBJECT', 'text': ' wrong subject'}, {'id': 'ATTRS', 'text': ' wrong subject attributes'}, {'id': 'BACKGROUND', 'text': ' wrong background or setting'}, {'id': 'NUMBER', 'text': ' wrong number'}, {'id': 'WORDING', 'text': ' wording or spelling change'}, {'id': 'OTHER', 'text': '\u200d other mistakes'}]
def get_stream():
for eg in examples:
if ((eg['answer'] == 'accept') and (eg['caption'] != eg['orig_caption'])):
eg['options'] = options
(yield eg)
def update(answers):
for eg in answers:
if (eg['answer'] == 'accept'):
selected = eg.get('accept', [])
for opt_id in selected:
counts[opt_id] += 1
def on_exit(ctrl):
print('\nMistakes')
for (opt_id, i) in counts.items():
print(i, opt_id)
return {'dataset': dataset, 'stream': get_stream(), 'update': update, 'on_exit': on_exit, 'view_id': 'blocks', 'config': {'blocks': blocks, 'choice_style': 'multiple'}} |
def construct_event_topic_set(event_abi: ABIEvent, abi_codec: ABICodec, arguments: Optional[Union[(Sequence[Any], Dict[(str, Any)])]]=None) -> List[HexStr]:
if (arguments is None):
arguments = {}
if isinstance(arguments, (list, tuple)):
if (len(arguments) != len(event_abi['inputs'])):
raise ValueError('When passing an argument list, the number of arguments must match the event constructor.')
arguments = {arg['name']: [arg_value] for (arg, arg_value) in zip(event_abi['inputs'], arguments)}
normalized_args = {key: (value if is_list_like(value) else [value]) for (key, value) in arguments.items()}
event_topic = encode_hex(event_abi_to_log_topic(event_abi))
indexed_args = get_indexed_event_inputs(event_abi)
zipped_abi_and_args = [(arg, normalized_args.get(arg['name'], [None])) for arg in indexed_args]
encoded_args = [[(None if (option is None) else encode_hex(abi_codec.encode([arg['type']], [option]))) for option in arg_options] for (arg, arg_options) in zipped_abi_and_args]
topics = list(normalize_topic_list(([event_topic] + encoded_args)))
return topics |
class OptionSeriesOrganizationLevelsStatesInactive(Options):
def animation(self) -> 'OptionSeriesOrganizationLevelsStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesOrganizationLevelsStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def linkOpacity(self):
return self._config_get(0.1)
def linkOpacity(self, num: float):
self._config(num, js_type=False)
def opacity(self):
return self._config_get(0.1)
def opacity(self, num: float):
self._config(num, js_type=False) |
def downgrade():
op.create_table('notification_actions', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('action_type', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('subject', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('link', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('notification_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('subject_id', sa.VARCHAR(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['notification_id'], ['notifications.id'], name='notification_actions_to_notification', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name='notification_actions_pkey')) |
.skipif(sys.platform.startswith('darwin'), reason='No flock on MacOS')
def test_optional_job_id_namespace(tmpdir, monkeypatch):
monkeypatch.chdir(tmpdir)
Path(PROXYFILE_FOR_TESTS).write_text(EXAMPLE_QSTAT_CONTENT, encoding='utf-8')
assert ('15399.s034' in EXAMPLE_QSTAT_CONTENT)
result_job_with_namespace = subprocess.run([PROXYSCRIPT, '15399', PROXYFILE_FOR_TESTS], check=True, capture_output=True)
lines = result_job_with_namespace.stdout.splitlines()
assert (lines[0].decode('utf-8') == 'Job Id: 15399.s034-lcam')
assert (len(lines) >= 5)
assert ('15400\n' in EXAMPLE_QSTAT_CONTENT)
result_job_without_namespace = subprocess.run([PROXYSCRIPT, '15400', PROXYFILE_FOR_TESTS], check=True, capture_output=True)
lines = result_job_without_namespace.stdout.splitlines()
assert (lines[0].decode('utf-8') == 'Job Id: 15400')
assert (len(lines) >= 5) |
class ArgCursorA(CursorArgumentProcessor):
def _cursor_call(self, arg_pattern, all_args):
if isinstance(arg_pattern, PC.ArgCursor):
return arg_pattern
elif isinstance(arg_pattern, str):
name = arg_pattern
proc = all_args['proc']
for arg in proc.args():
if (arg.name() == name):
return arg
self.err(f'no argument {name} found')
else:
self.err('expected an ArgCursor or a string') |
class UpgradeThread(QThread):
upgrade_signal = pyqtSignal('PyQt_PyObject')
progress_signal = pyqtSignal('int')
def __init__(self):
QThread.__init__(self)
def run(self):
self.progress_signal.emit(5)
upgrade_generator = Updater.apply_updates(vms=['dom0'], progress_start=5, progress_end=10)
results = {}
for (vm, progress, result) in upgrade_generator:
results[vm] = result
self.progress_signal.emit(progress)
self.progress_signal.emit(10)
results['apply_dom0'] = Updater.apply_dom0_state()
self.progress_signal.emit(15)
if Updater.migration_is_required():
self.progress_signal.emit(35)
results['apply_all'] = Updater.run_full_install()
self.progress_signal.emit(75)
else:
upgrade_generator = Updater.apply_updates(progress_start=15, progress_end=75)
for (vm, progress, result) in upgrade_generator:
results[vm] = result
self.progress_signal.emit(progress)
Updater.shutdown_and_start_vms()
run_results = Updater.overall_update_status(results)
Updater._write_updates_status_flag_to_disk(run_results)
if (run_results in {UpdateStatus.UPDATES_OK, UpdateStatus.REBOOT_REQUIRED}):
Updater._write_last_updated_flags_to_disk()
message = results
message['recommended_action'] = run_results
self.upgrade_signal.emit(message) |
class OptionPlotoptionsTreegraphStates(Options):
def hover(self) -> 'OptionPlotoptionsTreegraphStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsTreegraphStatesHover)
def inactive(self) -> 'OptionPlotoptionsTreegraphStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsTreegraphStatesInactive)
def normal(self) -> 'OptionPlotoptionsTreegraphStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsTreegraphStatesNormal)
def select(self) -> 'OptionPlotoptionsTreegraphStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsTreegraphStatesSelect) |
class TransformerLayer(fl.Chain):
def __init__(self, embedding_dim: int, num_heads: int, norm_eps: float, mlp_ratio: int, device: ((torch.device | str) | None)=None, dtype: (torch.dtype | None)=None) -> None:
self.embedding_dim = embedding_dim
self.num_heads = num_heads
self.norm_eps = norm_eps
self.mlp_ratio = mlp_ratio
super().__init__(fl.Residual(fl.LayerNorm(normalized_shape=embedding_dim, eps=norm_eps, device=device, dtype=dtype), fl.SelfAttention(embedding_dim=embedding_dim, num_heads=num_heads, device=device, dtype=dtype), LayerScale(embedding_dim=embedding_dim, device=device, dtype=dtype)), fl.Residual(fl.LayerNorm(normalized_shape=embedding_dim, eps=norm_eps, device=device, dtype=dtype), FeedForward(embedding_dim=embedding_dim, feedforward_dim=(embedding_dim * mlp_ratio), device=device, dtype=dtype), LayerScale(embedding_dim=embedding_dim, device=device, dtype=dtype))) |
class TestRenderedTag(object):
(autouse=True)
def setup(self):
self.loadstatement = '{% load forum_markup_tags %}'
self.request_factory = RequestFactory()
def test_can_render_a_formatted_text_on_the_fly(self):
def get_rendered(value):
request = self.request_factory.get('/')
t = Template((self.loadstatement + '{{ value|rendered|safe }}'))
c = Context({'value': value, 'request': request})
rendered = t.render(c)
return rendered
assert (get_rendered('**This is a test**').rstrip() == '<p><strong>This is a test</strong></p>') |
.django_db
def test_alternate_year(client, bureau_data):
resp = client.get(url.format(toptier_code='001', filter='?fiscal_year=2018'))
assert (resp.status_code == status.HTTP_200_OK)
expected_results = [{'name': 'Test Bureau 1', 'id': 'test-bureau-1', 'total_obligations': 20.0, 'total_outlays': 200.0, 'total_budgetary_resources': 2000.0}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['results'] == expected_results) |
def get_question_list(file_list: List[bytes]) -> list:
questions_list = []
temp = []
after_summary_tag = False
for line in file_list:
if line.startswith(b'<details>'):
temp.append(line)
after_summary_tag = True
elif (after_summary_tag and (line != b'') and (b'</details>' not in line)):
temp.append(line)
elif (after_summary_tag and (b'</details>' in line)):
temp.append(line)
after_summary_tag = False
questions_list.append(temp)
temp = []
return questions_list |
class OptionSeriesWaterfallDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class BaseSensor(AsyncAgentExecutorMixin, PythonTask):
def __init__(self, name: str, sensor_config: Optional[T]=None, task_type: str='sensor', **kwargs):
type_hints = get_type_hints(self.poke, include_extras=True)
signature = inspect.signature(self.poke)
inputs = collections.OrderedDict()
for (k, v) in signature.parameters.items():
annotation = type_hints.get(k, None)
inputs[k] = annotation
super().__init__(task_type=task_type, name=name, task_config=None, interface=Interface(inputs=inputs), **kwargs)
self._sensor_config = sensor_config
async def poke(self, **kwargs) -> bool:
raise NotImplementedError
def get_custom(self, settings: SerializationSettings) -> Dict[(str, Any)]:
cfg = {SENSOR_MODULE: type(self).__module__, SENSOR_NAME: type(self).__name__}
if (self._sensor_config is not None):
cfg[SENSOR_CONFIG_PKL] = jsonpickle.encode(self._sensor_config)
return cfg |
def normalize_string(string, charset=None, replacing=False):
if (not isinstance(string, unicode)):
try:
if re.search(u'=[0-9a-fA-F]{2}', string):
string = py2_decode(string, 'Quoted-printable')
string = json.loads((u'%s' % string), encoding=charset)
except ValueError:
try:
string = unicode(eval(string), 'raw_unicode_escape')
except (SyntaxError, NameError):
string = py2_decode(string, 'latin-1')
pass
except TypeError:
string = unicode(string, errors='ignore')
pass
except LookupError:
return u''
except TypeError:
string = unicode(string, errors='ignore')
pass
try:
string = remove_control_chars(string)
string = fix_bad_unicode(string)
string = unquote(string)
string = string.replace(u'<![CDATA[', u'').replace(u']]', u'')
if PY3:
string = html.unescape(string)
else:
string = HTMLParser().unescape(string)
if replacing:
string = string.replace(u"'", '')
string = string.lower()
except:
pass
return string |
class Sent140Dataset(Dataset):
def __init__(self, data_root, max_seq_len):
self.data_root = data_root
self.max_seq_len = max_seq_len
self.all_letters = {c: i for (i, c) in enumerate(string.printable)}
self.num_letters = len(self.all_letters)
self.UNK = self.num_letters
with open(data_root, 'r+') as f:
self.dataset = json.load(f)
self.data = {}
self.targets = {}
self.num_classes = 2
for (user_id, user_data) in self.dataset['user_data'].items():
self.data[user_id] = self.process_x(list(user_data['x']))
self.targets[user_id] = self.process_y(list(user_data['y']))
def __len__(self):
return len(self.data)
def __iter__(self):
for user_id in self.data.keys():
(yield self.__getitem__(user_id))
def __getitem__(self, user_id: str):
if ((user_id not in self.data) or (user_id not in self.targets)):
raise IndexError(f'User {user_id} is not in dataset')
return (self.data[user_id], self.targets[user_id])
def unicodeToAscii(self, s):
return ''.join((c for c in unicodedata.normalize('NFD', s) if ((unicodedata.category(c) != 'Mn') and (c in self.all_letters))))
def line_to_indices(self, line: str, max_seq_len: int):
line_list = self.split_line(line)
line_list = line_list
chars = self.flatten_list([list(word) for word in line_list])
indices = [self.all_letters.get(letter, self.UNK) for (i, letter) in enumerate(chars) if (i < max_seq_len)]
indices = (indices + ([self.UNK] * (max_seq_len - len(indices))))
return indices
def process_x(self, raw_x_batch):
x_batch = [e[4] for e in raw_x_batch]
x_batch = [self.line_to_indices(e, self.max_seq_len) for e in x_batch]
x_batch = torch.LongTensor(x_batch)
return x_batch
def process_y(self, raw_y_batch):
y_batch = [int(e) for e in raw_y_batch]
return y_batch
def split_line(self, line):
return re.findall("[\\w']+|[.,!?;]", line)
def flatten_list(self, nested_list):
return list(itertools.chain.from_iterable(nested_list)) |
class UnicommercePackageType(Document):
def validate(self):
self.__update_title()
self.__validate_sizes()
def __update_title(self):
self.title = f'{self.package_type}: {self.length}x{self.width}x{self.height}'
def __validate_sizes(self):
fields = ['length', 'width', 'height']
for field in fields:
if (cint(self.get(field)) <= 0):
frappe.throw(frappe._('Positive value required for {}').format(field)) |
class ExcelMathModel(object):
def __init__(self):
pass
def sum(self, elems):
sum_val = (('(' + ' + '.join(elems)) + ')')
if (sum_val == '()'):
return '0'
else:
return sum_val
def product(self, elems):
sum_val = (('(' + ' * '.join(elems)) + ')')
if (sum_val == '()'):
return '1'
else:
return sum_val
def plus(self, a, b):
return self.sum((a, b))
def divide(self, a, b):
return '({}/{})'.format(a, b)
def multiply(self, a, b):
return '({}*{})'.format(a, b)
def eval(self, elem):
return ('=' + elem) |
class TensorOperation(enum.Enum):
PassThrough = auto()
Add = auto()
AddAdd = auto()
AddMul = auto()
AddMulTanh = auto()
AlphaBetaAdd = auto()
AddRelu = auto()
AddFastGelu = auto()
AddTanh = auto()
AddHardswish = auto()
AddSwish = auto()
AddSigmoid = auto()
AddReluAdd = auto()
AddAddRelu = auto()
AddSigmoidMul = auto()
AddSigmoidMulTanh = auto()
AddHardswishAdd = auto()
UnaryIdentic = auto()
UnarySquare = auto()
UnaryAbs = auto()
UnarySqrt = auto()
AddMulAdd = auto()
AddAddAdd = auto()
AddAddAddRelu = auto()
Bilinear = auto()
CausalMask = auto() |
def update_internals(new_coords3d, old_internals, primitives, dihedral_inds, rotation_inds, bend_inds, check_dihedrals=False, check_bends=False, bend_min_deg=BEND_MIN_DEG, bend_max_deg=LB_MIN_DEG, rotation_thresh=0.9, logger=None):
prim_internals = eval_primitives(new_coords3d, primitives)
new_internals = np.array([prim_int.val for prim_int in prim_internals])
internal_diffs = (new_internals - old_internals)
new_rotations = new_internals[rotation_inds]
if (np.abs((new_rotations / np.pi)) >= rotation_thresh).any():
raise NeedNewInternalsException(new_coords3d)
dihedrals = [prim_internals[i] for i in dihedral_inds]
dihedral_diffs = internal_diffs[dihedral_inds]
shifted_by_2pi = (np.abs((np.abs(dihedral_diffs) - (2 * np.pi))) < (np.pi / 2))
new_dihedrals = np.array([dihed.val for dihed in dihedrals])
if any(shifted_by_2pi):
new_dihedrals[shifted_by_2pi] -= ((2 * np.pi) * np.sign(dihedral_diffs[shifted_by_2pi]))
for (dihed, new_val) in zip(dihedrals, new_dihedrals):
dihed.val = new_val
invalid_inds = list()
if check_dihedrals:
are_valid = [dihedral_valid(new_coords3d, prim.inds) for prim in dihedrals]
try:
first_dihedral = dihedral_inds[0]
except IndexError:
first_dihedral = 0
invalid_inds = [(i + first_dihedral) for (i, is_valid) in enumerate(are_valid) if (not is_valid)]
if (check_bends and (len(bend_inds) > 0)):
bends = [prim_internals[i] for i in bend_inds]
are_valid = [bend_valid(new_coords3d, prim.inds, bend_min_deg, bend_max_deg) for prim in bends]
first_bend = bend_inds[0]
invalid_inds = [(i + first_bend) for (i, is_valid) in enumerate(are_valid) if (not is_valid)]
if (len(invalid_inds) > 0):
invalid_prims = [primitives[i] for i in invalid_inds]
invalid_msg = ', '.join([str(tp) for tp in invalid_prims])
log(logger, 'Internal coordinate(s) became invalid! Need new internal coordinates!')
log(logger, f'Invalid primitives: {invalid_msg}')
raise NeedNewInternalsException(new_coords3d, invalid_inds=invalid_inds, invalid_prims=invalid_prims)
return prim_internals |
def get_first_mouse():
if (('RIVALCFG_PROFILE' in os.environ) and (os.environ['RIVALCFG_PROFILE'] == '0000:0000')):
return None
plugged_devices = list(devices.list_plugged_devices())
if (not plugged_devices):
return None
return mouse.get_mouse(vendor_id=plugged_devices[0]['vendor_id'], product_id=plugged_devices[0]['product_id']) |
def test_cube_thinning(tmpdir, loadsfile1):
logger.info('Import SEGY format via SEGYIO')
incube = loadsfile1
logger.info(incube)
incube.do_thinning(2, 2, 1)
logger.info(incube)
incube.to_file(join(tmpdir, 'cube_thinned.segy'))
incube2 = xtgeo.cube_from_file(join(tmpdir, 'cube_thinned.segy'))
logger.info(incube2) |
def new_user_record_list() -> List[auth.UserRecord]:
(uid1, email1) = _random_id()
(uid2, email2) = _random_id()
(uid3, email3) = _random_id()
users = [auth.create_user(uid=uid1, email=email1, password='password', phone_number=_random_phone()), auth.create_user(uid=uid2, email=email2, password='password', phone_number=_random_phone()), auth.create_user(uid=uid3, email=email3, password='password', phone_number=_random_phone())]
(yield users)
for user in users:
auth.delete_user(user.uid) |
def extractTaniandrisWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ToneSandhi():
def __init__(self):
self.must_neural_tone_words = {'', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''}
self.must_not_neural_tone_words = {'', '', '', '', '', '', '', '', '', '', ''}
self.punc = ":,;?!':,;.?!"
def _neural_sandhi(self, word: str, pos: str, finals: List[str]) -> List[str]:
for (j, item) in enumerate(word):
if (((j - 1) >= 0) and (item == word[(j - 1)]) and (pos[0] in {'n', 'v', 'a'}) and (word not in self.must_not_neural_tone_words)):
finals[j] = (finals[j][:(- 1)] + '5')
ge_idx = word.find('')
if ((len(word) >= 1) and (word[(- 1)] in '')):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
elif ((len(word) >= 1) and (word[(- 1)] in '')):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
elif ((len(word) > 1) and (word[(- 1)] in '') and (pos in {'r', 'n'}) and (word not in self.must_not_neural_tone_words)):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
elif ((len(word) > 1) and (word[(- 1)] in '') and (pos in {'s', 'l', 'f'})):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
elif ((len(word) > 1) and (word[(- 1)] in '') and (word[(- 2)] in '')):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
elif (((ge_idx >= 1) and (word[(ge_idx - 1)].isnumeric() or (word[(ge_idx - 1)] in ''))) or (word == '')):
finals[ge_idx] = (finals[ge_idx][:(- 1)] + '5')
elif ((word in self.must_neural_tone_words) or (word[(- 2):] in self.must_neural_tone_words)):
finals[(- 1)] = (finals[(- 1)][:(- 1)] + '5')
word_list = self._split_word(word)
finals_list = [finals[:len(word_list[0])], finals[len(word_list[0]):]]
for (i, word) in enumerate(word_list):
if ((word in self.must_neural_tone_words) or (word[(- 2):] in self.must_neural_tone_words)):
finals_list[i][(- 1)] = (finals_list[i][(- 1)][:(- 1)] + '5')
finals = sum(finals_list, [])
return finals
def _bu_sandhi(self, word: str, finals: List[str]) -> List[str]:
if ((len(word) == 3) and (word[1] == '')):
finals[1] = (finals[1][:(- 1)] + '5')
else:
for (i, char) in enumerate(word):
if ((char == '') and ((i + 1) < len(word)) and (finals[(i + 1)][(- 1)] == '4')):
finals[i] = (finals[i][:(- 1)] + '2')
return finals
def _yi_sandhi(self, word: str, finals: List[str]) -> List[str]:
if ((word.find('') != (- 1)) and all([item.isnumeric() for item in word if (item != '')])):
return finals
elif ((len(word) == 3) and (word[1] == '') and (word[0] == word[(- 1)])):
finals[1] = (finals[1][:(- 1)] + '5')
elif word.startswith(''):
finals[1] = (finals[1][:(- 1)] + '1')
else:
for (i, char) in enumerate(word):
if ((char == '') and ((i + 1) < len(word))):
if (finals[(i + 1)][(- 1)] == '4'):
finals[i] = (finals[i][:(- 1)] + '2')
elif (word[(i + 1)] not in self.punc):
finals[i] = (finals[i][:(- 1)] + '4')
return finals
def _split_word(self, word: str) -> List[str]:
word_list = jieba.cut_for_search(word)
word_list = sorted(word_list, key=(lambda i: len(i)), reverse=False)
first_subword = word_list[0]
first_begin_idx = word.find(first_subword)
if (first_begin_idx == 0):
second_subword = word[len(first_subword):]
new_word_list = [first_subword, second_subword]
else:
second_subword = word[:(- len(first_subword))]
new_word_list = [second_subword, first_subword]
return new_word_list
def _three_sandhi(self, word: str, finals: List[str]) -> List[str]:
if ((len(word) == 2) and self._all_tone_three(finals)):
finals[0] = (finals[0][:(- 1)] + '2')
elif (len(word) == 3):
word_list = self._split_word(word)
if self._all_tone_three(finals):
if (len(word_list[0]) == 2):
finals[0] = (finals[0][:(- 1)] + '2')
finals[1] = (finals[1][:(- 1)] + '2')
elif (len(word_list[0]) == 1):
finals[1] = (finals[1][:(- 1)] + '2')
else:
finals_list = [finals[:len(word_list[0])], finals[len(word_list[0]):]]
if (len(finals_list) == 2):
for (i, sub) in enumerate(finals_list):
if (self._all_tone_three(sub) and (len(sub) == 2)):
finals_list[i][0] = (finals_list[i][0][:(- 1)] + '2')
elif ((i == 1) and (not self._all_tone_three(sub)) and (finals_list[i][0][(- 1)] == '3') and (finals_list[0][(- 1)][(- 1)] == '3')):
finals_list[0][(- 1)] = (finals_list[0][(- 1)][:(- 1)] + '2')
finals = sum(finals_list, [])
elif (len(word) == 4):
finals_list = [finals[:2], finals[2:]]
finals = []
for sub in finals_list:
if self._all_tone_three(sub):
sub[0] = (sub[0][:(- 1)] + '2')
finals += sub
return finals
def _all_tone_three(self, finals: List[str]) -> bool:
return all(((x[(- 1)] == '3') for x in finals))
def _merge_bu(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = []
last_word = ''
for (word, pos) in seg:
if (last_word == ''):
word = (last_word + word)
if (word != ''):
new_seg.append((word, pos))
last_word = word[:]
if (last_word == ''):
new_seg.append((last_word, 'd'))
last_word = ''
return new_seg
def _merge_yi(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = ([] * len(seg))
i = 0
while (i < len(seg)):
(word, pos) = seg[i]
if (((i - 1) >= 0) and (word == '') and ((i + 1) < len(seg)) and (seg[(i - 1)][0] == seg[(i + 1)][0]) and (seg[(i - 1)][1] == 'v')):
new_seg[(i - 1)][0] = ((new_seg[(i - 1)][0] + '') + new_seg[(i - 1)][0])
i += 2
else:
if (((i - 2) >= 0) and (seg[(i - 1)][0] == '') and (seg[(i - 2)][0] == word) and (pos == 'v')):
continue
else:
new_seg.append([word, pos])
i += 1
seg = [i for i in new_seg if (len(i) > 0)]
new_seg = []
for (i, (word, pos)) in enumerate(seg):
if (new_seg and (new_seg[(- 1)][0] == '')):
new_seg[(- 1)][0] = (new_seg[(- 1)][0] + word)
else:
new_seg.append([word, pos])
return new_seg
def _merge_continuous_three_tones(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = []
sub_finals_list = [lazy_pinyin(word, neutral_tone_with_five=True, style=Style.FINALS_TONE3) for (word, pos) in seg]
assert (len(sub_finals_list) == len(seg))
merge_last = ([False] * len(seg))
for (i, (word, pos)) in enumerate(seg):
if (((i - 1) >= 0) and self._all_tone_three(sub_finals_list[(i - 1)]) and self._all_tone_three(sub_finals_list[i]) and (not merge_last[(i - 1)])):
if ((not self._is_reduplication(seg[(i - 1)][0])) and ((len(seg[(i - 1)][0]) + len(seg[i][0])) <= 3)):
new_seg[(- 1)][0] = (new_seg[(- 1)][0] + seg[i][0])
merge_last[i] = True
else:
new_seg.append([word, pos])
else:
new_seg.append([word, pos])
return new_seg
def _is_reduplication(self, word: str) -> bool:
return ((len(word) == 2) and (word[0] == word[1]))
def _merge_continuous_three_tones_2(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = []
sub_finals_list = [lazy_pinyin(word, neutral_tone_with_five=True, style=Style.FINALS_TONE3) for (word, pos) in seg]
assert (len(sub_finals_list) == len(seg))
merge_last = ([False] * len(seg))
for (i, (word, pos)) in enumerate(seg):
if (((i - 1) >= 0) and (sub_finals_list[(i - 1)][(- 1)][(- 1)] == '3') and (sub_finals_list[i][0][(- 1)] == '3') and (not merge_last[(i - 1)])):
if ((not self._is_reduplication(seg[(i - 1)][0])) and ((len(seg[(i - 1)][0]) + len(seg[i][0])) <= 3)):
new_seg[(- 1)][0] = (new_seg[(- 1)][0] + seg[i][0])
merge_last[i] = True
else:
new_seg.append([word, pos])
else:
new_seg.append([word, pos])
return new_seg
def _merge_er(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = []
for (i, (word, pos)) in enumerate(seg):
if (((i - 1) >= 0) and (word == '') and (seg[(i - 1)][0] != '#')):
new_seg[(- 1)][0] = (new_seg[(- 1)][0] + seg[i][0])
else:
new_seg.append([word, pos])
return new_seg
def _merge_reduplication(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
new_seg = []
for (i, (word, pos)) in enumerate(seg):
if (new_seg and (word == new_seg[(- 1)][0])):
new_seg[(- 1)][0] = (new_seg[(- 1)][0] + seg[i][0])
else:
new_seg.append([word, pos])
return new_seg
def pre_merge_for_modify(self, seg: List[Tuple[(str, str)]]) -> List[Tuple[(str, str)]]:
seg = self._merge_bu(seg)
try:
seg = self._merge_yi(seg)
except:
print('_merge_yi failed')
seg = self._merge_reduplication(seg)
seg = self._merge_continuous_three_tones(seg)
seg = self._merge_continuous_three_tones_2(seg)
seg = self._merge_er(seg)
return seg
def modified_tone(self, word: str, pos: str, finals: List[str]) -> List[str]:
finals = self._bu_sandhi(word, finals)
finals = self._yi_sandhi(word, finals)
finals = self._neural_sandhi(word, pos, finals)
finals = self._three_sandhi(word, finals)
return finals |
def cd_to_project_root():
project_root = os.path.split(os.path.realpath(__file__))[0]
class CwdContext():
def __enter__(self):
self.old_path = os.getcwd()
os.chdir(project_root)
sys.path.insert(0, project_root)
def __exit__(self, *args):
del sys.path[0]
os.chdir(self.old_path)
return CwdContext() |
def upgrade():
op.execute('ALTER TABLE email_notifications ALTER after_ticket_purchase TYPE boolean USING CASE after_ticket_purchase WHEN 1 THEN TRUE ELSE FALSE END', execution_options=None)
op.execute('ALTER TABLE email_notifications ALTER new_paper TYPE boolean USING CASE new_paper WHEN 1 THEN TRUE ELSE FALSE END', execution_options=None)
op.execute('ALTER TABLE email_notifications ALTER next_event TYPE boolean USING CASE next_event WHEN 1 THEN TRUE ELSE FALSE END', execution_options=None)
op.execute('ALTER TABLE email_notifications ALTER session_accept_reject TYPE boolean USING CASE session_accept_reject WHEN 1 THEN TRUE ELSE FALSE END', execution_options=None)
op.execute('ALTER TABLE email_notifications ALTER session_schedule TYPE boolean USING CASE session_schedule WHEN 1 THEN TRUE ELSE FALSE END', execution_options=None) |
class TestSerializerValidationWithCompiledRegexField():
def setup_method(self):
class ExampleSerializer(serializers.Serializer):
name = serializers.RegexField(re.compile('\\d'), required=True)
self.Serializer = ExampleSerializer
def test_validation_success(self):
serializer = self.Serializer(data={'name': '2'})
assert serializer.is_valid()
assert (serializer.validated_data == {'name': '2'})
assert (serializer.errors == {}) |
.flaky(reruns=MAX_FLAKY_RERUNS)
class TestSearchSkillsLocal():
def setup_class(cls):
cls.runner = CliRunner()
('aea.cli.search.format_items', return_value=FORMAT_ITEMS_SAMPLE_OUTPUT)
def test_correct_output_default_registry(self, _):
self.result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'search', '--local', 'skills'], standalone_mode=False)
assert (self.result.output == 'Searching for ""...\nSkills found:\n\n{}\n'.format(FORMAT_ITEMS_SAMPLE_OUTPUT)) |
def clean_up_queue(upload_queue_files):
try:
os.remove(upload_queue_files.image_output)
except OSError:
logger.exception('Error removing upload queue image')
try:
os.remove(upload_queue_files.thumbnail_output)
except OSError:
logger.exception('Error removing upload queue thumbnail') |
class DatasetMissingValues(MetricResult):
different_missing_values: Dict[(MissingValue, int)]
number_of_different_missing_values: int
different_missing_values_by_column: Dict[(str, Dict[(MissingValue, int)])]
number_of_different_missing_values_by_column: Dict[(str, int)]
number_of_missing_values: int
share_of_missing_values: float
number_of_missing_values_by_column: Dict[(str, int)]
share_of_missing_values_by_column: Dict[(str, float)]
number_of_rows: int
number_of_rows_with_missing_values: int
share_of_rows_with_missing_values: float
number_of_columns: int
columns_with_missing_values: List[str]
number_of_columns_with_missing_values: int
share_of_columns_with_missing_values: float |
class TestConfig(unittest.TestCase):
pwd = os.path.dirname(os.path.abspath(__file__))
module = browsepy.appconfig
def test_case_insensitivity(self):
cfg = self.module.Config(self.pwd, defaults={'prop': 2})
self.assertEqual(cfg['prop'], cfg['PROP'])
self.assertEqual(cfg['pRoP'], cfg.pop('prop'))
cfg.update(prop=1)
self.assertEqual(cfg['PROP'], 1)
self.assertEqual(cfg.get('pRop'), 1)
self.assertEqual(cfg.popitem(), ('PROP', 1))
self.assertRaises(KeyError, cfg.pop, 'prop')
cfg.update(prop=1)
del cfg['PrOp']
self.assertRaises(KeyError, cfg.__delitem__, 'prop')
self.assertIsNone(cfg.pop('prop', None))
self.assertIsNone(cfg.get('prop')) |
def run(client_args, other_args, action_file, dry_run=False):
logger = logging.getLogger(__name__)
logger.debug('action_file: %s', action_file)
all_actions = ActionsFile(action_file)
for idx in sorted(list(all_actions.actions.keys())):
action_def = all_actions.actions[idx]
if action_def.disabled:
logger.info('Action ID: %s: "%s" not performed because "disable_action" is set to True', idx, action_def.action)
continue
logger.info('Preparing Action ID: %s, "%s"', idx, action_def.action)
if action_def.timeout_override:
client_args.request_timeout = action_def.timeout_override
logger.info('Creating client object and testing connection')
try:
client = get_client(configdict={'elasticsearch': {'client': prune_nones(client_args.asdict()), 'other_settings': prune_nones(other_args.asdict())}})
except ClientException as exc:
click.echo('Unable to establish client connection to Elasticsearch!')
click.echo(f'Exception: {exc}')
sys.exit(1)
if ilm_action_skip(client, action_def):
continue
msg = f'Trying Action ID: {idx}, "{action_def.action}": {action_def.description}'
try:
logger.info(msg)
process_action(client, action_def, dry_run=dry_run)
except Exception as err:
exception_handler(action_def, err)
logger.info('Action ID: %s, "%s" completed.', idx, action_def.action)
logger.info('All actions completed.') |
def serialize_and_package(pkgs: typing.List[str], settings: SerializationSettings, source: str='.', output: str='./flyte-package.tgz', fast: bool=False, deref_symlinks: bool=False, options: typing.Optional[Options]=None):
serializable_entities = serialize(pkgs, settings, source, options=options)
package(serializable_entities, source, output, fast, deref_symlinks) |
class ValveDot1xACLSmokeTestCase(ValveDot1xSmokeTestCase):
ACL_CONFIG = '\nacls:\n auth_acl:\n - rule:\n actions:\n allow: 1\n noauth_acl:\n - rule:\n actions:\n allow: 0\n'
CONFIG = '\n{}\ndps:\n s1:\n{}\n interfaces:\n p1:\n number: 1\n native_vlan: v100\n dot1x: true\n dot1x_acl: True\n p2:\n number: 2\n output_only: True\nvlans:\n v100:\n vid: 0x100\n student:\n vid: 0x200\n dot1x_assigned: True\n'.format(ACL_CONFIG, DOT1X_ACL_CONFIG) |
class CmdIC(COMMAND_DEFAULT_CLASS):
key = 'ic'
locks = 'cmd:all()'
aliases = 'puppet'
help_category = 'General'
account_caller = True
def func(self):
account = self.account
session = self.session
new_character = None
character_candidates = []
if (not self.args):
character_candidates = ([account.db._last_puppet] if account.db._last_puppet else [])
if (not character_candidates):
self.msg('Usage: ic <character>')
return
else:
if (playables := account.characters):
character_candidates.extend(utils.make_iter(account.search(self.args, candidates=playables, search_object=True, quiet=True)))
if account.locks.check_lockstring(account, 'perm(Builder)'):
if session.puppet:
character_candidates = [char for char in session.puppet.search(self.args, quiet=True) if char.access(account, 'puppet')]
if (not character_candidates):
character_candidates.extend([char for char in search.object_search(self.args) if char.access(account, 'puppet')])
if (not character_candidates):
self.msg('That is not a valid character choice.')
return
if (len(character_candidates) > 1):
self.msg(('Multiple targets with the same name:\n %s' % ', '.join((('%s(#%s)' % (obj.key, obj.id)) for obj in character_candidates))))
return
else:
new_character = character_candidates[0]
try:
account.puppet_object(session, new_character)
account.db._last_puppet = new_character
logger.log_sec(f'Puppet Success: (Caller: {account}, Target: {new_character}, IP: {self.session.address}).')
except RuntimeError as exc:
self.msg(f'|rYou cannot become |C{new_character.name}|n: {exc}')
logger.log_sec(f'Puppet Failed: %s (Caller: {account}, Target: {new_character}, IP: {self.session.address}).') |
class Packetizer(LiteXModule):
def __init__(self, sink_description, source_description, header):
self.sink = sink = stream.Endpoint(sink_description)
self.source = source = stream.Endpoint(source_description)
self.header = Signal((header.length * 8))
data_width = len(self.sink.data)
bytes_per_clk = (data_width // 8)
header_words = ((header.length * 8) // data_width)
header_leftover = (header.length % bytes_per_clk)
aligned = (header_leftover == 0)
sr = Signal((header.length * 8), reset_less=True)
sr_load = Signal()
sr_shift = Signal()
count = Signal(max=max(header_words, 2))
sink_d = stream.Endpoint(sink_description)
self.comb += header.encode(sink, self.header)
self.sync += If(sr_load, sr.eq(self.header))
if (header_words != 1):
self.sync += If(sr_shift, sr.eq(sr[data_width:]))
source_last_a = Signal()
source_last_b = Signal()
source_last_s = Signal()
self.fsm = fsm = FSM(reset_state='IDLE')
fsm_from_idle = Signal()
fsm.act('IDLE', sink.ready.eq(1), NextValue(count, 1), If(sink.valid, sink.ready.eq(0), source.valid.eq(1), source_last_a.eq(0), source.data.eq(self.header[:data_width]), If((source.valid & source.ready), sr_load.eq(1), NextValue(fsm_from_idle, 1), If((header_words == 1), NextState(('ALIGNED-DATA-COPY' if aligned else 'UNALIGNED-DATA-COPY'))).Else(NextState('HEADER-SEND')))))
fsm.act('HEADER-SEND', source.valid.eq(1), source_last_a.eq(0), source.data.eq(sr[min(data_width, (len(sr) - 1)):]), If((source.valid & source.ready), sr_shift.eq(1), If((count == (header_words - 1)), sr_shift.eq(0), NextState(('ALIGNED-DATA-COPY' if aligned else 'UNALIGNED-DATA-COPY')), NextValue(count, (count + 1))).Else(NextValue(count, (count + 1)))))
fsm.act('ALIGNED-DATA-COPY', source.valid.eq(sink.valid), source_last_a.eq(sink.last), source.data.eq(sink.data), If((source.valid & source.ready), sink.ready.eq(1), If(source.last, NextState('IDLE'))))
if (not aligned):
header_offset_multiplier = (1 if (header_words == 1) else 2)
self.sync += If((source.valid & source.ready), sink_d.eq(sink))
fsm.act('UNALIGNED-DATA-COPY', source.valid.eq((sink.valid | sink_d.last)), source_last_a.eq((sink.last | sink_d.last)), If(fsm_from_idle, source.data[:max((header_leftover * 8), 1)].eq(sr[min((header_offset_multiplier * data_width), (len(sr) - 1)):])).Else(source.data[:max((header_leftover * 8), 1)].eq(sink_d.data[min(((bytes_per_clk - header_leftover) * 8), (data_width - 1)):])), source.data[(header_leftover * 8):].eq(sink.data), If((source.valid & source.ready), sink.ready.eq(((~ source.last) | sink.last)), NextValue(fsm_from_idle, 0), If(source.last, NextState('IDLE'))))
if (hasattr(sink, 'last_be') and hasattr(source, 'last_be')):
if (len(sink.last_be) == 1):
sink_last_be = Signal.like(sink.last_be)
self.comb += [sink_last_be.eq(sink.last)]
else:
sink_last_be = sink.last_be
right_rot_by = header_leftover
new_last_be = Signal.like(sink_last_be)
self.comb += [new_last_be.eq(Cat([sink_last_be[((i - right_rot_by) % bytes_per_clk)] for i in range(bytes_per_clk)]))]
delayed_last_be = Signal.like(sink_last_be)
self.last_be_fsm = last_be_fsm = FSM(reset_state='DEFAULT')
in_data_copy = Signal()
self.comb += [in_data_copy.eq((self.fsm.ongoing('ALIGNED-DATA-COPY') | self.fsm.ongoing('UNALIGNED-DATA-COPY')))]
self.last_be_fsm.act('DEFAULT', If(((in_data_copy & sink.last) & (sink_last_be > new_last_be)), source_last_b.eq(0), source_last_s.eq(1), source.last_be.eq(0), If((source.ready & source.valid), NextValue(delayed_last_be, new_last_be), NextState('DELAYED'))).Elif(in_data_copy, source_last_b.eq(sink.last), source_last_s.eq(1), source.last_be.eq(new_last_be)), If(in_data_copy, sink.ready.eq(source.ready)).Elif(self.fsm.ongoing('IDLE'), sink.ready.eq((~ sink.valid))))
self.last_be_fsm.act('DELAYED', source_last_b.eq(1), source_last_s.eq(1), source.last_be.eq(delayed_last_be), sink.ready.eq(0), If(source.ready, NextState('DEFAULT')))
self.comb += [If(source_last_s, source.last.eq(source_last_b)).Else(source.last.eq(source_last_a))]
if (hasattr(sink, 'error') and hasattr(source, 'error')):
self.comb += source.error.eq(sink.error) |
.skipif((not hasattr(_writer, 'CYTHON_MODULE')), reason='Cython-specific test')
def test_regular_vs_ordered_dict_record_typeerror():
schema = {'type': 'record', 'name': 'Test', 'namespace': 'test', 'fields': [{'name': 'field', 'type': {'type': 'int'}}]}
test_records = [{'field': 'foobar'}]
record = OrderedDict()
record['field'] = 'foobar'
test_records.append(record)
expected_write_record_stack_traces = [['write_data(fo, d_datum_value, field_type, named_schemas, name, options)'], ['write_data(fo, datum_value, field_type, named_schemas, name, options)']]
for (test_record, expected_write_record_stack_trace) in zip(test_records, expected_write_record_stack_traces):
new_file = BytesIO()
records = [test_record]
try:
fastavro.writer(new_file, schema, records)
assert False, "Should've raised TypeError"
except TypeError:
(_, _, tb) = sys.exc_info()
stack = traceback.extract_tb(tb)
filtered_stack = [frame[3] for frame in stack if ('write_record' in frame[2])]
assert (filtered_stack == expected_write_record_stack_trace) |
class MarkdownRenderer():
def __init__(self, no_emoji: bool=False):
self.data: List = []
self.no_emoji = no_emoji
def text(self) -> str:
return '\n\n'.join(self.data)
def add(self, content: str):
self.data.append(content)
def table(self, data: Iterable[Iterable[str]], header: Sequence[str], aligns: Optional[Sequence[Literal[('r', 'c', 'l')]]]=None) -> str:
if (aligns is None):
aligns = (['l'] * len(header))
if (len(aligns) != len(header)):
err = 'Invalid aligns: {} (header length: {})'.format(aligns, len(header))
raise ValueError(err)
get_divider = (lambda a: (':---:' if (a == 'c') else ('---:' if (a == 'r') else '---')))
head = '| {} |'.format(' | '.join(header))
divider = '| {} |'.format(' | '.join((get_divider(aligns[i]) for i in range(len(header)))))
body = '\n'.join(('| {} |'.format(' | '.join(row)) for row in data))
return '{}\n{}\n{}'.format(head, divider, body)
def title(self, level: int, text: str, emoji: Optional[str]=None) -> str:
prefix = ('{} '.format(emoji) if (emoji and (not self.no_emoji)) else '')
return '{} {}{}'.format(('#' * level), prefix, text)
def list(self, items: Iterable[str], numbered: bool=False) -> str:
content = []
for (i, item) in enumerate(items):
if numbered:
content.append('{}. {}'.format((i + 1), item))
else:
content.append('- {}'.format(item))
return '\n'.join(content)
def link(self, text: str, url: str) -> str:
return '[{}]({})'.format(text, url)
def code_block(self, text: str, lang: str='') -> str:
return '```{}\n{}\n```'.format(lang, text)
def code(self, text: str) -> str:
return self._wrap(text, '`')
def bold(self, text: str) -> str:
return self._wrap(text, '**')
def italic(self, text: str):
return self._wrap(text, '_')
def _wrap(self, text, marker):
return '{}{}{}'.format(marker, text, marker) |
_db(transaction=True)
def test_load_table_to_from_delta_for_transaction_search(spark, s3_unittest_data_bucket, populate_usas_data_and_recipients_from_broker, hive_unittest_metastore_db):
tables_to_load = ['awards', 'financial_accounts_by_awards', 'recipient_lookup', 'recipient_profile', 'sam_recipient', 'transaction_current_cd_lookup', 'transaction_fabs', 'transaction_fpds', 'transaction_normalized', 'zips']
create_and_load_all_delta_tables(spark, s3_unittest_data_bucket, tables_to_load)
verify_delta_table_loaded_to_delta(spark, 'transaction_search', s3_unittest_data_bucket, load_command='load_query_to_delta', ignore_fields=['award_update_date', 'etl_update_date']) |
class ConfigLoader(Generic[T], BaseConfigLoader):
def __init__(self, schema_filename: str, configuration_class: Type[T], skip_aea_validation: bool=True) -> None:
super().__init__(schema_filename)
self._configuration_class = configuration_class
self._skip_aea_validation = skip_aea_validation
def configuration_class(self) -> Type[T]:
return self._configuration_class
def validate(self, json_data: Dict) -> None:
if (not self._skip_aea_validation):
aea_version_specifier_set = AgentConfig.parse_aea_version_specifier(json_data['aea_version'])
aea_version = aea.__version__
enforce(aea_version_specifier_set.contains(aea_version), f"AEA version in use '{aea_version}' is not compatible with the specifier set '{aea_version_specifier_set}'.")
super().validate(json_data)
def load_protocol_specification(self, file_pointer: TextIO) -> ProtocolSpecification:
yaml_data = yaml.safe_load_all(file_pointer)
yaml_documents = list(yaml_data)
configuration_file_json = yaml_documents[0]
if (len(yaml_documents) == 1):
protobuf_snippets_json = {}
dialogue_configuration = {}
elif (len(yaml_documents) == 2):
protobuf_snippets_json = ({} if ('initiation' in yaml_documents[1]) else yaml_documents[1])
dialogue_configuration = (yaml_documents[1] if ('initiation' in yaml_documents[1]) else {})
elif (len(yaml_documents) == 3):
protobuf_snippets_json = yaml_documents[1]
dialogue_configuration = yaml_documents[2]
else:
raise ValueError('Incorrect number of Yaml documents in the protocol specification.')
self.validate(configuration_file_json)
protocol_specification = cast(ProtocolSpecification, self.configuration_class.from_json(configuration_file_json))
protocol_specification.protobuf_snippets = protobuf_snippets_json
protocol_specification.dialogue_config = dialogue_configuration
return protocol_specification
def load(self, file_pointer: TextIO) -> T:
if (self.configuration_class.package_type == PackageType.AGENT):
return cast(T, self._load_agent_config(file_pointer))
return self._load_component_config(file_pointer)
def dump(self, configuration: T, file_pointer: TextIO) -> None:
if (self.configuration_class.package_type == PackageType.AGENT):
self._dump_agent_config(cast(AgentConfig, configuration), file_pointer)
else:
self._dump_component_config(configuration, file_pointer)
def from_configuration_type(cls, configuration_type: Union[(PackageType, str)], **kwargs: Any) -> 'ConfigLoader':
configuration_type = PackageType(configuration_type)
return ConfigLoaders.from_package_type(configuration_type, **kwargs)
def _load_component_config(self, file_pointer: TextIO) -> T:
configuration_file_json = yaml_load(file_pointer)
return self._load_from_json(configuration_file_json)
def _load_from_json(self, configuration_file_json: Dict) -> T:
self.validate(configuration_file_json)
key_order = list(configuration_file_json.keys())
configuration_obj = cast(T, self.configuration_class.from_json(configuration_file_json))
configuration_obj._key_order = key_order
return configuration_obj
def load_agent_config_from_json(self, configuration_json: List[Dict], validate: bool=True) -> AgentConfig:
if (len(configuration_json) == 0):
raise ValueError('Agent configuration file was empty.')
agent_config_json = configuration_json[0]
if validate:
self.validate(agent_config_json)
key_order = list(agent_config_json.keys())
agent_configuration_obj = cast(AgentConfig, self.configuration_class.from_json(agent_config_json))
agent_configuration_obj._key_order = key_order
component_configurations = self._get_component_configurations(configuration_json)
agent_configuration_obj.component_configurations = component_configurations
return agent_configuration_obj
def _get_component_configurations(self, configuration_file_jsons: List[Dict]) -> Dict[(ComponentId, Dict)]:
component_configurations: Dict[(ComponentId, Dict)] = {}
for (i, component_configuration_json) in enumerate(configuration_file_jsons[_STARTING_INDEX_CUSTOM_CONFIGS:]):
component_id = self._process_component_section(i, component_configuration_json)
if (component_id in component_configurations):
raise ValueError(f'Configuration of component {component_id} occurs more than once.')
component_configurations[component_id] = component_configuration_json
return component_configurations
def _load_agent_config(self, file_pointer: TextIO) -> AgentConfig:
configuration_file_jsons = yaml_load_all(file_pointer)
return self.load_agent_config_from_json(configuration_file_jsons)
def _dump_agent_config(self, configuration: AgentConfig, file_pointer: TextIO) -> None:
agent_config_part = configuration.ordered_json
self.validate(agent_config_part)
agent_config_part.pop('component_configurations')
result = ([agent_config_part] + configuration.component_configurations_json())
yaml_dump_all(result, file_pointer)
def _dump_component_config(self, configuration: T, file_pointer: TextIO) -> None:
result = configuration.ordered_json
self.validate(result)
yaml_dump(result, file_pointer)
def _process_component_section(self, component_index: int, component_configuration_json: Dict) -> ComponentId:
component_id = self.validator.split_component_id_and_config(component_index, component_configuration_json)
self.validator.validate_component_configuration(component_id, component_configuration_json)
return component_id |
class JsCountAll():
params = ('keys',)
value = '\n var temp = {}; var order= [];\n data.forEach(function(rec){ \n keys.forEach(function(k){\n var aggKey = k +"#"+ rec[k]; if(!(aggKey in temp)){order.push(aggKey); temp[aggKey] = 1} else{temp[aggKey] += 1}})}); \n order.forEach(function(label){\n var keys = label.split("#"); var rec = {\'column\': keys[0], \'value\': keys[1], \'count\': temp[label], \'distinct\': false};\n result.push(rec)})' |
def test_overflow_uint():
assert (to_int((((2 ** 256) // 2) - 1)) == (((2 ** 256) // 2) - 1))
with pytest.raises(OverflowError):
to_int(((2 ** 256) // 2))
assert (to_int((((2 ** 256) // 2) - 1), 'int') == (((2 ** 256) // 2) - 1))
with pytest.raises(OverflowError):
to_int(((2 ** 256) // 2), 'int') |
class SGETracker(IntervalModule):
interval = 60
settings = (('ssh', 'The SSH connection address. Can be or user: or -p PORT etc.'), 'color', 'format')
required = ('ssh',)
format = 'SGE qw: {queued} / r: {running} / Eqw: {error}'
on_leftclick = None
color = '#ffffff'
def parse_qstat_xml(self):
xml = subprocess.check_output('ssh {0} "qstat -xml"'.format(self.ssh), stderr=subprocess.STDOUT, shell=True)
root = etree.fromstring(xml)
job_dict = {'qw': 0, 'Eqw': 0, 'r': 0}
for j in root.xpath('//job_info/job_info/job_list'):
job_dict[j.find('state').text] += 1
for j in root.xpath('//job_info/queue_info/job_list'):
job_dict[j.find('state').text] += 1
return job_dict
def run(self):
jobs = self.parse_qstat_xml()
fdict = {'queued': jobs['qw'], 'error': jobs['Eqw'], 'running': jobs['r']}
self.data = fdict
self.output = {'full_text': self.format.format(**fdict).strip(), 'color': self.color} |
def generate_html(offers: Sequence[Offer], file: Path, author_name: str, author_mail: str, author_web: str, feed_id_prefix: str, source: (Source | None)=None, type_: (OfferType | None)=None, duration: (OfferDuration | None)=None) -> None:
latest_date: (datetime | None) = None
entries = []
for offer in offers:
if ((not offer.seen_first) and (not offer.seen_last)):
continue
if (offer.valid_from and (offer.valid_from > offer.seen_last)):
continue
if (offer.valid_from and offer.seen_first and (offer.valid_from > offer.seen_first)):
updated: (datetime | None) = offer.valid_from
else:
updated = offer.seen_first
if ((not latest_date) or (updated and (updated > latest_date))):
latest_date = updated
entry = {}
entry['id'] = f'{feed_id_prefix}{int(offer.id)}'
additional_info = offer.type.value
if (offer.duration != OfferDuration.CLAIMABLE):
additional_info += f', {offer.duration.value}'
entry['title'] = offer.title
game: (Game | None) = offer.game
if offer.img_url:
entry['img_url'] = html.escape(offer.img_url)
elif (game and game.steam_info and game.steam_info.image_url):
entry['img_url'] = html.escape(game.steam_info.image_url)
valid_from = (offer.valid_from.strftime(TIMESTAMP_SHORT) if offer.valid_from else offer.seen_first.strftime(TIMESTAMP_SHORT))
entry['valid_from'] = valid_from
if offer.valid_to:
entry['valid_to'] = offer.valid_to.strftime(TIMESTAMP_SHORT)
if offer.url:
entry['source'] = html.escape(offer.source.value)
entry['url'] = html.escape(offer.url)
if (offer.valid_to and (offer.valid_to < datetime.now(tz=timezone.utc))):
entry['is_expired'] = True
else:
entry['is_expired'] = False
if game:
entry['has_game'] = True
if (game.igdb_info and game.igdb_info.name):
entry['game_name'] = html.escape(game.igdb_info.name)
elif (game.steam_info and game.steam_info.name):
entry['game_name'] = html.escape(game.steam_info.name)
if (game.steam_info and game.steam_info.metacritic_score):
entry['metacritic_score'] = game.steam_info.metacritic_score
if game.steam_info.metacritic_url:
entry['metacritic_url'] = html.escape(game.steam_info.metacritic_url)
if (game.steam_info and game.steam_info.percent and game.steam_info.score and game.steam_info.recommendations):
entry['steam_percent'] = game.steam_info.percent
entry['steam_score'] = game.steam_info.score
entry['steam_recommendations'] = game.steam_info.recommendations
entry['steam_url'] = html.escape(game.steam_info.url)
if (game.igdb_info and game.igdb_info.meta_ratings and game.igdb_info.meta_score):
entry['igdb_meta_score'] = game.igdb_info.meta_score
entry['igdb_meta_ratings'] = game.igdb_info.meta_ratings
entry['igdb_url'] = html.escape(game.igdb_info.url)
if (game.igdb_info and game.igdb_info.user_ratings and game.igdb_info.user_score):
entry['igdb_user_score'] = game.igdb_info.user_score
entry['igdb_user_ratings'] = game.igdb_info.user_ratings
if (game.igdb_info and game.igdb_info.release_date):
entry['release_date'] = game.igdb_info.release_date.strftime(TIMESTAMP_SHORT)
elif (game.steam_info and game.steam_info.release_date):
entry['release_date'] = game.steam_info.release_date.strftime(TIMESTAMP_SHORT)
if (game.steam_info and game.steam_info.recommended_price_eur):
entry['recommended_price'] = game.steam_info.recommended_price_eur
if (game.igdb_info and game.igdb_info.short_description):
entry['description'] = html.escape(game.igdb_info.short_description)
elif (game.steam_info and game.steam_info.short_description):
entry['description'] = html.escape(game.steam_info.short_description)
if (game.steam_info and game.steam_info.genres):
entry['genres'] = html.escape(game.steam_info.genres)
entries.append(entry)
entries.sort(key=(lambda x: x['valid_from']), reverse=True)
feed = {'author_name': author_name, 'author_email': author_mail, 'author_uri': author_web, 'title': get_title(source, type_, duration), 'updated': latest_date}
template = Template(TEMPLATE_STR)
rendered_html = template.render(entries=entries, feed=feed)
with file.open('w', encoding='utf-8') as f:
f.write(rendered_html) |
class OptionSeriesVariablepieDataDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def write_ram_ext_tags(segmk, tile_param):
for param in ['RAM_EXTENSION_A', 'RAM_EXTENSION_B']:
set_val = tile_param[param]
for opt in ['LOWER']:
segmk.add_site_tag(tile_param['site'], '{}_{}'.format(param, opt), (set_val == opt))
segmk.add_site_tag(tile_param['site'], '{}_NONE_OR_UPPER'.format(param), (set_val != 'LOWER')) |
def test_msgpack_custom_encoder_decoder():
class CustomObject():
def __init__(self, value):
self.value = value
def serialize_obj(obj, chain=None):
if isinstance(obj, CustomObject):
return {'__custom__': obj.value}
return (obj if (chain is None) else chain(obj))
def deserialize_obj(obj, chain=None):
if ('__custom__' in obj):
return CustomObject(obj['__custom__'])
return (obj if (chain is None) else chain(obj))
data = {'a': 123, 'b': CustomObject({'foo': 'bar'})}
with pytest.raises(TypeError):
msgpack_dumps(data)
msgpack_encoders.register('custom_object', func=serialize_obj)
msgpack_decoders.register('custom_object', func=deserialize_obj)
bytes_data = msgpack_dumps(data)
new_data = msgpack_loads(bytes_data)
assert (new_data['a'] == 123)
assert isinstance(new_data['b'], CustomObject)
assert (new_data['b'].value == {'foo': 'bar'})
data = {'a': numpy.zeros((1, 2, 3)), 'b': CustomObject({'foo': 'bar'})}
bytes_data = msgpack_dumps(data)
new_data = msgpack_loads(bytes_data)
assert isinstance(new_data['a'], numpy.ndarray)
assert isinstance(new_data['b'], CustomObject)
assert (new_data['b'].value == {'foo': 'bar'}) |
class TestInfraConfig(unittest.TestCase):
def test_stage_flow(self) -> None:
pass
def test_is_stage_flow_completed(self) -> None:
pass
def test_is_tls_enabled_when_flag_missing(self):
config = self._create_config(PrivateComputationGameType.LIFT, set())
is_tls_enabled = config.is_tls_enabled
self.assertFalse(is_tls_enabled)
def test_is_tls_enabled_when_not_supported(self):
config = self._create_config(PrivateComputationGameType.ATTRIBUTION, {PCSFeature.PCF_TLS})
is_tls_enabled = config.is_tls_enabled
self.assertFalse(is_tls_enabled)
('fbpcs.private_computation.entity.infra_config.InfraConfig')
def test_is_tls_enabled_when_supported_and_flag_present(self, mock_infra_config):
config = self._create_config(PrivateComputationGameType.LIFT, {PCSFeature.PCF_TLS})
is_tls_enabled = config.is_tls_enabled
self.assertTrue(is_tls_enabled)
def _create_config(self, game_type, pcs_features):
return InfraConfig(instance_id='test-instance-id', role=PrivateComputationRole.PARTNER, status=PrivateComputationInstanceStatus.CREATED, status_update_ts=1, instances=[], game_type=game_type, num_pid_containers=1, num_mpc_containers=1, num_files_per_mpc_container=1, status_updates=[], pcs_features=pcs_features) |
def admin_daily_update(location):
today = timezone.localtime(timezone.now()).date()
arriving_today = Use.objects.filter(location=location).filter(arrive=today).filter(status='confirmed')
maybe_arriving_today = Use.objects.filter(location=location).filter(arrive=today).filter(status='approved')
pending_now = Use.objects.filter(location=location).filter(status='pending')
approved_now = Use.objects.filter(location=location).filter(status='approved')
departing_today = Use.objects.filter(location=location).filter(depart=today).filter(status='confirmed')
events_today = published_events_today_local(location=location)
pending_or_feedback = events_pending(location=location)
subscriptions_ready = Subscription.objects.ready_for_billing(location, target_date=today)
if ((not arriving_today) and (not departing_today) and (not events_today) and (not maybe_arriving_today) and (not pending_now) and (not approved_now) and (not subscriptions_ready)):
logger.debug(('Nothing happening today at %s, skipping daily email' % location.name))
return
subject = ('[%s] %s Events and Guests' % (location.email_subject_prefix, today))
admins_emails = []
for admin in location.house_admins.all():
if (not (admin.email in admins_emails)):
admins_emails.append(admin.email)
if (len(admins_emails) == 0):
logger.debug(('%s: No admins to send to' % location.slug))
return None
c = {'today': today, 'domain': Site.objects.get_current().domain, 'location': location, 'arriving': arriving_today, 'maybe_arriving': maybe_arriving_today, 'pending_now': pending_now, 'approved_now': approved_now, 'departing': departing_today, 'events_today': events_today, 'events_pending': pending_or_feedback['pending'], 'events_feedback': pending_or_feedback['feedback'], 'subscriptions_ready': subscriptions_ready}
(text_content, html_content) = render_templates(c, location, LocationEmailTemplate.ADMIN_DAILY)
mailgun_data = {'from': location.from_email(), 'to': admins_emails, 'subject': subject, 'text': text_content}
if html_content:
mailgun_data['html'] = html_content
return mailgun_send(mailgun_data) |
class CLISerializer(AbstractSerializer):
regex_keys_with_values = '-+\\w+(?=\\s[^\\s-])'
regex_all_keys = '-+\\w+'
def __init__(self):
super().__init__(extensions=['cli'])
def parse_keys(regex, string):
results = [match.group(0) for match in finditer(regex, string)]
return results
'Helper method, returns a list of --keys based on the regex used'
def _get_parser(options):
parser = ArgumentParser(**options)
return parser
def decode(self, s=None, **kwargs):
parser = self._get_parser(options=kwargs)
keys_with_values = set(self.parse_keys(self.regex_keys_with_values, s))
all_keys = Counter(self.parse_keys(self.regex_all_keys, s))
for key in all_keys:
count = all_keys[key]
try:
if (key in keys_with_values):
if (count == 1):
parser.add_argument(key, nargs='*', required=False)
else:
parser.add_argument(key, action='append', required=False)
elif (count <= 1):
parser.add_argument(key, action='store_true', required=False)
else:
parser.add_argument(key, action='count', required=False)
except ArgumentError as error:
raise ValueError from error
try:
args = parser.parse_args(s.split())
except BaseException as error:
raise ValueError from error
dict = vars(args)
for key in dict:
value = dict[key]
if (type_util.is_list(value) and (len(value) == 1)):
dict[key] = value[0]
return dict
def encode(self, d, **kwargs):
raise NotImplementedError |
class OptionPlotoptionsAreaZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def is_hidden(path: AnyStr) -> bool:
hidden = False
f = os.path.basename(path)
if (f[:1] in ('.', b'.')):
hidden = True
elif (sys.platform == 'win32'):
results = os.lstat(path)
FILE_ATTRIBUTE_HIDDEN = 2
hidden = bool((results.st_file_attributes & FILE_ATTRIBUTE_HIDDEN))
elif (sys.platform == 'darwin'):
results = os.lstat(path)
hidden = bool((results.st_flags & stat.UF_HIDDEN))
return hidden |
class WebSocketRPCClient(RPCClient):
def __init__(self, ws):
self.ws = ws
self.queue = hub.Queue()
super().__init__(JSONRPCProtocol(), WebSocketClientTransport(ws, self.queue))
def serve_forever(self):
while True:
msg = self.ws.wait()
if (msg is None):
break
self.queue.put(msg) |
class TestProxyEnv(GymTestCase):
is_agent_to_agent_messages = False
def test__init__(self):
assert (self.proxy_env._is_rl_agent_trained is False)
assert (self.proxy_env._step_count == 0)
assert (self.proxy_env._active_dialogue is None)
def test_properties(self):
assert (self.proxy_env.gym_dialogues == self.gym_dialogues)
with pytest.raises(ValueError, match='GymDialogue not set yet.'):
assert self.proxy_env.active_gym_dialogue
self.proxy_env._active_dialogue = self.dummy_gym_dialogue
assert (self.proxy_env.active_gym_dialogue == self.dummy_gym_dialogue)
assert (self.proxy_env.queue == self.proxy_env._queue)
assert (self.proxy_env.is_rl_agent_trained is False)
def test_step_i(self):
action = 'some_action'
gym_dialogue = cast(GymDialogue, self.prepare_skill_dialogue(dialogues=self.gym_dialogues, messages=self.list_of_gym_messages[:2]))
self.proxy_env._active_dialogue = gym_dialogue
percept_msg = self.build_incoming_message(message_type=GymMessage, performative=GymMessage.Performative.PERCEPT, step_id=(self.proxy_env._step_count + 1), observation=self.mocked_observation, reward=self.mocked_reward, done=True, info=self.mocked_info)
with patch.object(self.proxy_env._queue, 'get', return_value=percept_msg) as mocked_q_get:
(actual_observation, actual_reward, actual_done, actual_info) = self.proxy_env.step(action)
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.ACT, to=gym_dialogue.dialogue_label.dialogue_opponent_addr, sender=str(self.skill.skill_context.skill_id), action=GymMessage.AnyObject(action), step_id=self.proxy_env._step_count)
assert has_attributes, error_str
mocked_q_get.assert_called_with(block=True, timeout=None)
assert (actual_observation == self.mocked_observation.any)
assert (actual_reward == self.mocked_reward)
assert (actual_done is True)
assert (actual_info == self.mocked_info.any)
def test_step_ii(self):
action = 'some_action'
gym_dialogue = cast(GymDialogue, self.prepare_skill_dialogue(dialogues=self.gym_dialogues, messages=self.list_of_gym_messages[:2]))
self.proxy_env._active_dialogue = gym_dialogue
invalid_percept_msg = self.build_incoming_message(message_type=GymMessage, performative=GymMessage.Performative.RESET)
with patch.object(self.proxy_env._queue, 'get', return_value=invalid_percept_msg) as mocked_q_get:
with pytest.raises(ValueError, match=f'Unexpected performative. Expected={GymMessage.Performative.PERCEPT} got={invalid_percept_msg.performative}'):
self.proxy_env.step(action)
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.ACT, to=gym_dialogue.dialogue_label.dialogue_opponent_addr, sender=str(self.skill.skill_context.skill_id), action=GymMessage.AnyObject(action), step_id=self.proxy_env._step_count)
assert has_attributes, error_str
mocked_q_get.assert_called_with(block=True, timeout=None)
def test_step_iii(self):
action = 'some_action'
gym_dialogue = cast(GymDialogue, self.prepare_skill_dialogue(dialogues=self.gym_dialogues, messages=self.list_of_gym_messages[:2]))
self.proxy_env._active_dialogue = gym_dialogue
invalid_percept_msg = self.build_incoming_message(message_type=GymMessage, performative=GymMessage.Performative.PERCEPT, step_id=self.proxy_env._step_count, observation=self.mocked_observation, reward=self.mocked_reward, done=True, info=self.mocked_info)
with patch.object(self.proxy_env._queue, 'get', return_value=invalid_percept_msg) as mocked_q_get:
with pytest.raises(ValueError, match=f'Unexpected step id! expected={(self.proxy_env._step_count + 1)}, actual={self.proxy_env._step_count}'):
self.proxy_env.step(action)
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.ACT, to=gym_dialogue.dialogue_label.dialogue_opponent_addr, sender=str(self.skill.skill_context.skill_id), action=GymMessage.AnyObject(action), step_id=self.proxy_env._step_count)
assert has_attributes, error_str
mocked_q_get.assert_called_with(block=True, timeout=None)
def test_reset_i(self):
status_msg = self.build_incoming_message(message_type=GymMessage, performative=GymMessage.Performative.STATUS, content=self.mocked_status_content)
with patch.object(self.proxy_env._queue, 'get', return_value=status_msg) as mocked_q_get:
self.proxy_env.reset()
assert (self.proxy_env._step_count == 0)
assert (self.proxy_env._is_rl_agent_trained is False)
assert (self.proxy_env._active_dialogue is not None)
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.RESET, to=self.proxy_env.gym_address, sender=str(self.skill.skill_context.skill_id))
assert has_attributes, error_str
mocked_q_get.assert_called_with(block=True, timeout=None)
def test_reset_ii(self):
invalid_msg = self.build_incoming_message(message_type=GymMessage, performative=GymMessage.Performative.RESET)
with patch.object(self.proxy_env._queue, 'get', return_value=invalid_msg) as mocked_q_get:
with pytest.raises(ValueError, match=f'Unexpected performative. Expected={GymMessage.Performative.STATUS} got={invalid_msg.performative}'):
self.proxy_env.reset()
assert (self.proxy_env._step_count == 0)
assert (self.proxy_env._is_rl_agent_trained is False)
assert (self.proxy_env._active_dialogue is not None)
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.RESET, to=self.proxy_env.gym_address, sender=str(self.skill.skill_context.skill_id))
assert has_attributes, error_str
mocked_q_get.assert_called_with(block=True, timeout=None)
def test_close_i(self):
self.proxy_env._is_rl_agent_trained = True
gym_dialogue = cast(GymDialogue, self.prepare_skill_dialogue(dialogues=self.gym_dialogues, messages=self.list_of_gym_messages[:4]))
self.proxy_env._active_dialogue = gym_dialogue
self.proxy_env.close()
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=GymMessage, performative=GymMessage.Performative.CLOSE, to=gym_dialogue.dialogue_label.dialogue_opponent_addr, sender=str(self.skill.skill_context.skill_id))
assert has_attributes, error_str |
(params=[2, 3], ids=['Rectangle', 'Box'])
def tp_mesh(request):
nx = 4
distribution = {'overlap_type': (DistributedMeshOverlapType.VERTEX, 1)}
m = UnitSquareMesh(nx, nx, quadrilateral=True, distribution_parameters=distribution)
if (request.param == 3):
m = ExtrudedMesh(m, nx)
x = SpatialCoordinate(m)
xnew = as_vector([(acos((1 - (2 * xj))) / pi) for xj in x])
m.coordinates.interpolate(xnew)
return m |
class Card(ft.GestureDetector):
def __init__(self, solitaire, color):
super().__init__()
self.slot = None
self.mouse_cursor = ft.MouseCursor.MOVE
self.drag_interval = 5
self.on_pan_start = self.start_drag
self.on_pan_update = self.drag
self.on_pan_end = self.drop
self.left = None
self.top = None
self.solitaire = solitaire
self.color = color
self.content = ft.Container(bgcolor=self.color, width=CARD_WIDTH, height=CARD_HEIGTH)
def move_on_top(self):
self.solitaire.controls.remove(self)
self.solitaire.controls.append(self)
self.solitaire.update()
def bounce_back(self):
self.top = self.slot.top
self.left = self.slot.left
self.update()
def place(self, slot):
self.top = slot.top
self.left = slot.left
self.slot = slot
def start_drag(self, e: ft.DragStartEvent):
self.move_on_top()
self.update()
def drag(self, e: ft.DragUpdateEvent):
self.top = max(0, (self.top + e.delta_y))
self.left = max(0, (self.left + e.delta_x))
self.update()
def drop(self, e: ft.DragEndEvent):
for slot in self.solitaire.slots:
if ((abs((self.top - slot.top)) < DROP_PROXIMITY) and (abs((self.left - slot.left)) < DROP_PROXIMITY)):
self.place(slot)
self.update()
return
self.bounce_back()
self.update() |
def encode_sequence(raw_sequence: Sequence[RLP]) -> Bytes:
joined_encodings = get_joined_encodings(raw_sequence)
len_joined_encodings = Uint(len(joined_encodings))
if (len_joined_encodings < 56):
return (Bytes([(192 + len_joined_encodings)]) + joined_encodings)
else:
len_joined_encodings_as_be = len_joined_encodings.to_be_bytes()
return ((Bytes([(247 + len(len_joined_encodings_as_be))]) + len_joined_encodings_as_be) + joined_encodings) |
class SpotifyAudiobook(SpotifyBase):
()
_and_process(single(FullAudiobook))
def audiobook(self, audiobook_id: str, market: str=None) -> FullAudiobook:
return self._get(('audiobooks/' + audiobook_id), market=market)
()
('audiobook_ids', 1, 50, join_lists)
_and_process(model_list(FullAudiobook, 'audiobooks'))
def audiobooks(self, audiobook_ids: list, market: str=None) -> List[FullAudiobook]:
return self._get(('audiobook/?ids=' + ','.join(audiobook_ids)), market=market)
()
_and_process(single(SimpleChapterPaging))
_limit(50)
def audiobook_chapters(self, audiobook_id: str, market: str=None, limit: int=20, offset: int=0) -> SimpleChapterPaging:
return self._get(f'audiobooks/{audiobook_id}/chapters', market=market, limit=limit, offset=offset) |
()
('type_', metavar='TYPE', type=click.Choice(list(crypto_registry.supported_ids)), required=True)
_option()
_context
_aea_project
def get_address(click_context: click.Context, type_: str, password: Optional[str]) -> None:
ctx = cast(Context, click_context.obj)
address = _try_get_address(ctx, type_, password)
click.echo(address) |
class Geoform():
def __init__(self, hexes, geotype):
self.type = geotype
self.hexes = hexes
self.size = len(hexes)
self.id = uuid.uuid4()
self.neighbors = set()
self.to_delete = False
for h in hexes:
h.geoform = self
def to_dict(self):
return {'id': self.id.hex, 'type': self.type.name, 'size': self.size}
def neighbor_of_type(self, other_type):
result = []
for n in self.neighbors:
if (n.type is other_type):
result.append(n)
return result
def neighbor_of_types(self, other_types):
result = []
for t in other_types:
result.extend(self.neighbor_of_type(t))
return result
def merge(self, other):
self.hexes.update(other.hexes)
self.size += len(other.hexes)
for h in other.hexes:
h.geoform = self
other.hexes = set()
other.size = 0
other.to_delete = True
def is_geotype(self, geotype):
return (self.type is geotype)
def __eq__(self, other):
return (self.id == other.id)
def __key(self):
return self.id
def __hash__(self):
return hash(self.__key())
def __str__(self):
return '<Geoform: type: {}, size: {}, id: {}>'.format(self.type.title, self.size, self.id) |
class Lock(Base, ReprMixIn):
__tablename__ = 'locks'
REPR_SQL_ATTR_SORT_FIRST = ['lock_name', 'host', 'process_id', 'date']
lock_name = sqlalchemy.Column(sqlalchemy.String(255), nullable=False, primary_key=True)
host = sqlalchemy.Column(sqlalchemy.String(255), nullable=False)
process_id = sqlalchemy.Column(sqlalchemy.String(255), nullable=False)
reason = sqlalchemy.Column(sqlalchemy.String(255), nullable=False)
date = sqlalchemy.Column(BenjiDateTime, nullable=False) |
_test_windows
.asyncio
class TestAEAHelperPosixNamedPipeChannel():
.asyncio
async def test_connection_communication(self):
pipe = PosixNamedPipeChannel()
assert ((pipe.in_path is not None) and (pipe.out_path is not None)), 'PosixNamedPipeChannel not properly setup'
connected = asyncio.ensure_future(pipe.connect())
client_pipe = PosixNamedPipeChannelClient(pipe.out_path, pipe.in_path)
client = Thread(target=_run_echo_service, args=[client_pipe])
client.start()
try:
assert (await connected), 'Failed to connect pipe'
message = b'hello'
(await pipe.write(message))
received = (await pipe.read())
assert (received == message), 'Echoed message differs'
except Exception:
raise
finally:
(await pipe.close())
client.join() |
def get_pdf_notes_last_added_first(limit: int=None) -> List[SiacNote]:
if limit:
limit = f'limit {limit}'
else:
limit = ''
conn = _get_connection()
res = conn.execute(f"select * from notes where lower(source) like '%.pdf' order by id desc {limit}").fetchall()
conn.close()
return _to_notes(res) |
def import_roff(filelike: Union[(TextIO, BinaryIO, _PathLike)], name: Optional[str]=None) -> np.ma.MaskedArray[(Any, np.dtype[np.float32])]:
looking_for = {'dimensions': {'nX': None, 'nY': None, 'nZ': None}, 'parameter': {'name': None, 'data': None}}
def reset_parameter() -> None:
looking_for['parameter'] = {'name': None, 'data': None}
def all_set() -> bool:
return all(((val is not None) for v in looking_for.values() for val in v.values()))
def should_skip_parameter(key: Tuple[(str, str)]) -> bool:
if (key[0] == 'name'):
if ((name is None) or (key[1] == name)):
return False
return True
return False
with roffio.lazy_read(filelike) as tag_generator:
for (tag, keys) in tag_generator:
if all_set():
break
if (tag in looking_for):
for key in keys:
if should_skip_parameter(key):
reset_parameter()
break
if (key[0] in looking_for[tag]):
looking_for[tag][key[0]] = key[1]
data = looking_for['parameter']['data']
if (data is None):
raise ValueError(f'Could not find roff parameter {name!r} in {filelike}')
if (not all_set()):
raise ValueError(f'Could not find dimensions for roff parameter {name!r} in {filelike}')
if (isinstance(data, bytes) or np.issubdtype(data.dtype, np.uint8)):
raise ValueError('Ert does not support discrete roff field parameters')
if np.issubdtype(data.dtype, np.integer):
raise ValueError('Ert does not support discrete roff field parameters')
if np.issubdtype(data.dtype, np.floating):
if (data.dtype == np.float64):
data = data.astype(np.float32)
dim = looking_for['dimensions']
if (((dim['nX'] * dim['nY']) * dim['nZ']) != data.size):
raise ValueError(f'Field parameter {name!r} does not have correct number of elements for given dimensions {dim} in {filelike}')
data = np.flip(data.reshape((dim['nX'], dim['nY'], dim['nZ'])), (- 1))
return np.ma.masked_values(data, RMS_UNDEFINED_FLOAT)
raise ValueError(f'Unexpected type of roff parameter {name} in {filelike}: {type(data)}') |
def write_until_byte_equals(offset, byte):
current = None
before = time()
while (current != byte):
bb = time()
set_key((('B' * offset) + '\x00'))
encrypt_message(('A' * 256))
current = leak_offset_byte(offset)
print(('attempt: ' + str((time() - bb))))
print(('FINAL: ' + str((time() - before))))
return current |
def extractCrazymoonlightCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("i'm just this 'sue'", 'Im Just This Sue', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class HighlightTreeprocessor(Treeprocessor):
def __init__(self, md, ext):
self.ext = ext
super().__init__(md)
def code_unescape(self, text):
text = text.replace('<', '<')
text = text.replace('>', '>')
text = text.replace('&', '&')
return text
def run(self, root):
blocks = root.iter('pre')
for block in blocks:
if ((len(block) == 1) and (block[0].tag == 'code')):
self.ext.pygments_code_block += 1
code = Highlight(guess_lang=self.config['guess_lang'], pygments_style=self.config['pygments_style'], use_pygments=self.config['use_pygments'], noclasses=self.config['noclasses'], linenums=self.config['linenums'], linenums_style=self.config['linenums_style'], linenums_special=self.config['linenums_special'], linenums_class=self.config['linenums_class'], extend_pygments_lang=self.config['extend_pygments_lang'], language_prefix=self.config['language_prefix'], code_attr_on_pre=self.config['code_attr_on_pre'], auto_title=self.config['auto_title'], auto_title_map=self.config['auto_title_map'], pygments_lang_class=self.config['pygments_lang_class'], stripnl=self.config['stripnl'], default_lang=self.config['default_lang'])
placeholder = self.md.htmlStash.store(code.highlight(self.code_unescape(block[0].text).rstrip('\n'), '', self.config['css_class'], code_block_count=self.ext.pygments_code_block))
block.clear()
block.tag = 'p'
block.text = placeholder |
class OptionSeriesTreegraphSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestSkillDocs():
def setup_class(cls):
markdown_parser = mistune.create_markdown(renderer=mistune.AstRenderer())
skill_doc_file = Path(ROOT_DIR, 'docs', 'skill.md')
doc = markdown_parser(skill_doc_file.read_text())
cls.code_blocks = list(filter((lambda x: (x['type'] == 'block_code')), doc))
def test_context(self):
block = self.code_blocks[0]
expected = 'self.context.outbox.put_message(message=reply)'
assert (block['text'].strip() == expected)
assert (block['info'].strip() == 'python')
def test_hello_world_behaviour(self):
offset = 2
block = self.code_blocks[offset]
text = block['text']
code_obj = compile(text, 'fakemodule', 'exec')
locals_dict = {}
exec(code_obj, globals(), locals_dict)
HelloWorldBehaviour = locals_dict['HelloWorldBehaviour']
assert issubclass(HelloWorldBehaviour, OneShotBehaviour)
block = self.code_blocks[(offset + 1)]
text = block['text']
assert (text.strip() == 'self.context.new_behaviours.put(HelloWorldBehaviour(name="hello_world", skill_context=self.context))')
block = self.code_blocks[(offset + 2)]
assert (block['text'] == 'def hello():\n print("Hello, World!")\n\nself.context.new_behaviours.put(OneShotBehaviour(act=hello, name="hello_world", skill_context=self.context))\n')
def test_task(self):
offset = 5
block = self.code_blocks[offset]
locals_dict = compile_and_exec(block['text'])
nth_prime_number = locals_dict['nth_prime_number']
assert (nth_prime_number(1) == 2)
assert (nth_prime_number(2) == 3)
assert (nth_prime_number(3) == 5)
assert (nth_prime_number(4) == 7)
LongTask = locals_dict['LongTask']
assert issubclass(LongTask, Task)
LongTask() |
class FipaDialogues(Model, BaseFipaDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> Dialogue.Role:
fipa_message = cast(FipaMessage, message)
if (fipa_message.performative != FipaMessage.Performative.CFP):
raise ValueError('First message must be a CFP!')
query = fipa_message.query
if (query.model is None):
raise ValueError('Query must have a data model!')
if (query.model.name not in [SUPPLY_DATAMODEL_NAME, DEMAND_DATAMODEL_NAME]):
raise ValueError('Query data model name must be in [{},{}]'.format(SUPPLY_DATAMODEL_NAME, DEMAND_DATAMODEL_NAME))
if (message.sender != receiver_address):
is_seller = (query.model.name == SUPPLY_DATAMODEL_NAME)
else:
is_seller = (query.model.name == DEMAND_DATAMODEL_NAME)
role = (FipaDialogue.Role.SELLER if is_seller else FipaDialogue.Role.BUYER)
return role
BaseFipaDialogues.__init__(self, self_address=self.context.agent_address, role_from_first_message=role_from_first_message, dialogue_class=FipaDialogue) |
class TestDifferenceToFetchedAgent(BaseAEATestCase):
_mock_called = False
original_function = yaml.safe_load_all
test_agent_name: str
def setup_class(cls) -> None:
super().setup_class()
cls.test_agent_name = 'test_agent'
cls.fetch_agent(str(MY_FIRST_AEA_PUBLIC_ID), cls.test_agent_name)
cls.set_agent_context(cls.test_agent_name)
cls.run_cli_command('config', 'set', 'vendor.fetchai.skills.echo.behaviours.echo.args.tick_interval', '2.0', cwd=cls._get_cwd())
def _safe_load_all_side_effect(cls, file):
result = list(cls.original_function(file))
if (not cls._mock_called):
cls._mock_called = True
fake_override = {}
result.append(fake_override)
return iter(result)
def test_difference_to_fetched_agent(self, *_mocks):
with mock.patch('yaml.safe_load_all', side_effect=self._safe_load_all_side_effect):
file_diff = self.difference_to_fetched_agent(str(MY_FIRST_AEA_PUBLIC_ID), self.test_agent_name)
assert file_diff |
def test_interference_graph_of_group_first_graph_c():
interference_graph = construct_graph(1)
sub_graph = interference_graph.get_subgraph_of(InsertionOrderedSet([v_1, v_2, x_1]))
assert (InsertionOrderedSet(sub_graph.nodes) == InsertionOrderedSet([v_1, v_2, x_1]))
assert (sub_graph.are_interfering(v_1, v_2) and sub_graph.are_interfering(v_2, x_1) and (not sub_graph.are_interfering(v_1, x_1))) |
class VRRPInterfaceMonitor(app_manager.RyuApp):
_CONSTRUCTORS = {}
def register(interface_cls):
def _register(cls):
VRRPInterfaceMonitor._CONSTRUCTORS[interface_cls] = cls
return cls
return _register
def factory(interface, config, router_name, statistics, *args, **kwargs):
cls = VRRPInterfaceMonitor._CONSTRUCTORS[interface.__class__]
app_mgr = app_manager.AppManager.get_instance()
kwargs = kwargs.copy()
kwargs['router_name'] = router_name
kwargs['vrrp_config'] = config
kwargs['vrrp_interface'] = interface
kwargs['vrrp_statistics'] = statistics
app = app_mgr.instantiate(cls, *args, **kwargs)
return app
def instance_name(cls, interface, vrid):
return ('%s-%s-%d' % (cls.__name__, str(interface), vrid))
def __init__(self, *args, **kwargs):
super(VRRPInterfaceMonitor, self).__init__(*args, **kwargs)
self.config = kwargs['vrrp_config']
self.interface = kwargs['vrrp_interface']
self.router_name = kwargs['router_name']
self.statistics = kwargs['vrrp_statistics']
self.name = self.instance_name(self.interface, self.config.vrid)
def _parse_received_packet(self, packet_data):
packet_ = packet.Packet(packet_data)
protocols = packet_.protocols
if (len(protocols) < 2):
self.logger.debug('len(protocols) %d', len(protocols))
return
vlan_vid = self.interface.vlan_id
may_vlan = protocols[1]
if ((vlan_vid is not None) != isinstance(may_vlan, vlan.vlan)):
self.logger.debug('vlan_vid: %s %s', vlan_vid, type(may_vlan))
return
if ((vlan_vid is not None) and (vlan_vid != may_vlan.vid)):
self.logger.debug('vlan_vid: %s vlan %s', vlan_vid, type(may_vlan))
return
(may_ip, may_vrrp) = vrrp.vrrp.get_payload(packet_)
if ((not may_ip) or (not may_vrrp)):
return
if (not vrrp.vrrp.is_valid_ttl(may_ip)):
self.logger.debug('valid_ttl')
return
if (may_vrrp.version != self.config.version):
self.logger.debug('vrrp version %d %d', may_vrrp.version, self.config.version)
return
if (not may_vrrp.is_valid()):
self.logger.debug('valid vrrp')
return
offset = 0
for proto in packet_.protocols:
if (proto == may_vrrp):
break
offset += len(proto)
if (not may_vrrp.checksum_ok(may_ip, packet_.data[offset:(offset + len(may_vrrp))])):
self.logger.debug('bad checksum')
return
if (may_vrrp.vrid != self.config.vrid):
self.logger.debug('vrid %d %d', may_vrrp.vrid, self.config.vrid)
return
if (may_vrrp.is_ipv6 != self.config.is_ipv6):
self.logger.debug('is_ipv6 %s %s', may_vrrp.is_ipv6, self.config.is_ipv6)
return
if (may_vrrp.priority == 0):
self.statistics.rx_vrrp_zero_prio_packets += 1
vrrp_received = vrrp_event.EventVRRPReceived(self.interface, packet_)
self.send_event(self.router_name, vrrp_received)
return True
def _send_vrrp_packet_received(self, packet_data):
valid = self._parse_received_packet(packet_data)
if (valid is True):
self.statistics.rx_vrrp_packets += 1
else:
self.statistics.rx_vrrp_invalid_packets += 1
_ev_handler(vrrp_event.EventVRRPTransmitRequest)
def vrrp_transmit_request_handler(self, ev):
raise NotImplementedError()
def _initialize(self):
raise NotImplementedError()
def _shutdown(self):
raise NotImplementedError()
_ev_handler(vrrp_event.EventVRRPStateChanged)
def vrrp_state_changed_handler(self, ev):
assert (ev.interface == self.interface)
if (ev.new_state == vrrp_event.VRRP_STATE_INITIALIZE):
if ev.old_state:
self._shutdown()
else:
self._initialize()
elif (ev.new_state in [vrrp_event.VRRP_STATE_BACKUP, vrrp_event.VRRP_STATE_MASTER]):
if (ev.old_state == vrrp_event.VRRP_STATE_INITIALIZE):
if (ev.new_state == vrrp_event.VRRP_STATE_MASTER):
self.statistics.idle_to_master_transitions += 1
else:
self.statistics.idle_to_backup_transitions += 1
elif (ev.old_state == vrrp_event.VRRP_STATE_MASTER):
self.statistics.master_to_backup_transitions += 1
else:
self.statistics.backup_to_master_transitions += 1
else:
raise RuntimeError(('unknown vrrp state %s' % ev.new_state)) |
class LiteEthPHYXGMIICRG(Module, AutoCSR):
def __init__(self, clock_pads, model=False):
self._reset = CSRStorage()
self.clock_domains.cd_eth_rx = ClockDomain()
self.clock_domains.cd_eth_tx = ClockDomain()
if model:
self.comb += [self.cd_eth_rx.clk.eq(ClockSignal()), self.cd_eth_tx.clk.eq(ClockSignal())]
else:
self.comb += [self.cd_eth_rx.clk.eq(clock_pads.rx), self.cd_eth_tx.clk.eq(clock_pads.tx)] |
class DatasetGraph():
def __init__(self, *datasets: GraphDataset) -> None:
nodes = [Node(dr, ds) for dr in datasets for ds in dr.collections]
self.nodes: dict[(CollectionAddress, Node)] = {node.address: node for node in nodes}
self.edges: Set[Edge] = set()
for (node_address, node) in self.nodes.items():
for (field_path, ref_list) in node.collection.references().items():
source_field_address: FieldAddress = FieldAddress(node_address.dataset, node_address.collection, *field_path.levels)
for (dest_field_address, direction) in ref_list:
if (dest_field_address.collection_address() not in self.nodes):
logger.warning('Referenced object {} does not exist', dest_field_address)
raise ValidationError(f'Referred to object {dest_field_address} does not exist')
self.edges.add(Edge.create_edge(source_field_address, dest_field_address, direction))
self.identity_keys: dict[(FieldAddress, SeedAddress)] = {FieldAddress(node.address.dataset, node.address.collection, *field_path.levels): seed_address for node in nodes for (field_path, seed_address) in node.collection.identities().items()}
def data_category_field_mapping(self) -> DataCategoryFieldMapping:
mapping: Dict[(CollectionAddress, Dict[(FidesKey, List[FieldPath])])] = defaultdict((lambda : defaultdict(list)))
for (node_address, node) in self.nodes.items():
mapping[node_address] = node.collection.field_paths_by_category
return mapping
def __repr__(self) -> str:
return f'Graph: nodes = {self.nodes.keys()}' |
class filenamesDataset(object):
def __init__(self, fnames):
self.fnames = fnames
self.fnames_array = []
f = open(self.fnames)
for line in f:
line = line.split('\n')[0]
self.fnames_array.append(line)
def __getitem__(self, idx):
return self.fnames_array[idx]
def __len__(self):
return len(self.fnames_array) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.