code stringlengths 281 23.7M |
|---|
class MonitorCLIParserTestCase(unittest.TestCase):
def test_empty_args(self):
empty_args = generate_args('69dbf26ab368', False, False)
status = MonitorCLIParser.verify_args(empty_args)
self.assertEqual(status, 1)
def test_all_args(self):
empty_args = generate_args('69dbf26ab368', True, True)
status = MonitorCLIParser.verify_args(empty_args)
self.assertEqual(status, 2)
def test_check_full_happy_path(self):
sys.argv = ['dagda.py', 'monitor', '69dbf26ab368', '--start']
parsed_args = MonitorCLIParser()
self.assertEqual(parsed_args.get_container_id(), '69dbf26ab368')
def test_check_exit_1(self):
sys.argv = ['dagda.py', 'monitor', '69dbf26ab368']
with self.assertRaises(SystemExit) as cm:
MonitorCLIParser()
self.assertEqual(cm.exception.code, 1)
def test_DagdaMonitorParser_exit_2(self):
with self.assertRaises(SystemExit) as cm:
DagdaMonitorParser().error('fail')
self.assertEqual(cm.exception.code, 2)
def test_DagdaMonitorParser_format_help(self):
self.assertEqual(DagdaMonitorParser().format_help(), monitor_parser_text) |
class OptionPlotoptionsAreaSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsVariablepieSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_get_download_url():
ali = Aligo()
file = ali.get_download_url(file_id=test_file)
assert isinstance(file, GetDownloadUrlResponse)
assert (file.size == )
batch = ali.batch_download_url(file_id_list=[test_file])
for i in batch:
assert isinstance(i, BatchSubResponse)
assert isinstance(i.body, GetDownloadUrlResponse) |
def extractLadykekeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class WebAppLink(AbstractObject):
def __init__(self, api=None):
super(WebAppLink, self).__init__()
self._isWebAppLink = True
self._api = api
class Field(AbstractObject.Field):
should_fallback = 'should_fallback'
url = 'url'
_field_types = {'should_fallback': 'bool', 'url': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
.parametrize('value,expected', [((lambda : None), False), (3, False), (None, False), ('3', False), ('0x3', False), ([], False), ([3], False), ([3, 3], False), ([None], False), ([tuple()], False), ([(1, 2)], False), (tuple(), True), ((3,), True), ((3, 3), True), ((None,), True), ((tuple(),), True), (((1, 2),), True)])
def test_is_tuple(value, expected):
assert (is_tuple(value) == expected) |
class ObjectListWithSelection(HasTraits):
values = List(Instance(ListItem))
selected = Instance(ListItem)
selections = List(Instance(ListItem))
selected_index = Int()
selected_indices = List(Int)
selected_column = Str()
selected_columns = List(Str)
selected_cell = Tuple(Instance(ListItem), Str)
selected_cells = List(Tuple(Instance(ListItem), Str))
selected_cell_index = Tuple(Int, Int)
selected_cell_indices = List(Tuple(Int, Int)) |
class DoordashAuthenticationStrategy(AuthenticationStrategy):
name = 'doordash'
configuration_model = DoordashAuthenticationConfiguration
def __init__(self, configuration: DoordashAuthenticationConfiguration):
self.developer_id = configuration.developer_id
self.key_id = configuration.key_id
self.signing_secret = configuration.signing_secret
def add_authentication(self, request: PreparedRequest, connection_config: ConnectionConfig) -> PreparedRequest:
secrets: Optional[Dict[(str, Any)]] = connection_config.secrets
token = jwt.encode({'aud': 'doordash', 'iss': (assign_placeholders(self.developer_id, secrets) if secrets else None), 'kid': (assign_placeholders(self.key_id, secrets) if secrets else None), 'exp': str(math.floor((time.time() + 60))), 'iat': str(math.floor(time.time()))}, jwt.utils.base64url_decode(assign_placeholders(self.signing_secret, secrets)), algorithm='HS256', headers={'dd-ver': 'DD-JWT-V1'})
request.headers['Authorization'] = f'Bearer {token}'
return request |
def get_operations(source, target):
visited = set([source])
actions = list(possible_transposes(len(source)))
def traverse(node, breadcrumbs, current_best):
if ((current_best is not None) and (len(breadcrumbs) >= len(current_best))):
return current_best
for (b_start, c_start) in actions:
result = transpose_axes(node, b_start, c_start)
if ((result in visited) and (result != target)):
continue
visited.add(result)
new_breadcrumbs = (breadcrumbs + ((b_start, c_start),))
if (result == target):
if ((current_best is None) or (len(current_best) > len(new_breadcrumbs))):
return new_breadcrumbs
current_best = traverse(result, new_breadcrumbs, current_best)
return current_best
return traverse(source, tuple(), None) |
class D3Request(JsPackage):
def header(self, key, value):
key = JsUtils.jsConvertData(key, None)
value = JsUtils.jsConvertData(value, None)
return self.fnc(('header(%s, %s)' % (key, value)))
def timeout(self, value):
value = JsUtils.jsConvertData(value, None)
return self.fnc(('timeout(%s)' % value))
def mimeType(self, mine_type):
mine_type = JsUtils.jsConvertData(mine_type, None)
return self.fnc(('mimeType(%s)' % mine_type))
def response(self, js_funcs: Union[(list, str)], profile: Union[(dict, bool)]=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
return self.fnc(('response(function(xhr) {var data = xhr.responseTex; %s})' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))
def get(self, js_funcs: Union[(list, str)], profile: Union[(dict, bool)]=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
return self.fnc(('get(function(data) {%s})' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))) |
def md_sub_render(src='', language='', class_name=None, options=None, md='', **kwargs):
try:
(fm, text) = get_frontmatter(src)
md = markdown.markdown(text, extensions=fm.get('extensions', []), extension_configs=fm.get('extension_configs', {}))
return md
except Exception:
import traceback
print(traceback.format_exc())
raise |
(wraparound=False, boundscheck=False, cdivision=True, nonecheck=False)
def _dilate(image: A, selem: A, out: Optional[A]=None, shift_x: np.int8=0, shift_y: np.int8=0):
rows: np.intp = image.shape[0]
cols: np.intp = image.shape[1]
srows: np.intp = selem.shape[0]
scols: np.intp = selem.shape[1]
centre_r: np.intp = (int((srows / 2)) - shift_y)
centre_c: np.intp = (int((scols / 2)) - shift_x)
image = np.ascontiguousarray(image)
if (out is None):
out = np.zeros((rows, cols), dtype=np.uint8)
selem_num: np.intp = np.sum((np.asarray(selem) != 0))
sr: A1d = np.empty(selem_num, dtype=np.intp)
sc: A1d = np.empty(selem_num, dtype=np.intp)
s: np.intp = 0
r: np.intp
c: np.intp
for r in range(srows):
for c in range(scols):
if (selem[(r, c)] != 0):
sr[s] = (r - centre_r)
sc[s] = (c - centre_c)
s += 1
local_max: np.uint8
value: np.uint8
rr: np.intp
cc: np.intp
for r in range(rows):
for c in range(cols):
local_max = 0
for s in range(selem_num):
rr = (r + sr[s])
cc = (c + sc[s])
if ((0 <= rr < rows) and (0 <= cc < cols)):
value = image[(rr, cc)]
if (value > local_max):
local_max = value
out[(r, c)] = local_max
return np.asarray(out) |
def mask_and_resample(map_name, subject, hemisphere, src_mesh, dest_mesh, surf_reg_name):
input_gii = func_gii_file(subject, map_name, hemisphere, src_mesh)
output_gii = func_gii_file(subject, map_name, hemisphere, dest_mesh)
roi_src = medial_wall_roi_file(subject, hemisphere, src_mesh)
roi_dest = medial_wall_roi_file(subject, hemisphere, dest_mesh)
run(['wb_command', '-metric-mask', input_gii, roi_src, input_gii])
run(['wb_command', '-metric-resample', input_gii, surf_file(subject, 'sphere.{}'.format(surf_reg_name), hemisphere, src_mesh), surf_file(subject, 'sphere', hemisphere, dest_mesh), 'ADAP_BARY_AREA', output_gii, '-area-surfs', surf_file(subject, 'midthickness', hemisphere, src_mesh), surf_file(subject, 'midthickness', hemisphere, dest_mesh), '-current-roi', roi_src])
run(['wb_command', '-metric-mask', output_gii, roi_dest, output_gii]) |
def test_enum_type():
t = TypeEngine.to_literal_type(Color)
assert (t is not None)
assert (t.enum_type is not None)
assert t.enum_type.values
assert (t.enum_type.values == [c.value for c in Color])
g = TypeEngine.guess_python_type(t)
assert ([e.value for e in g] == [e.value for e in Color])
ctx = FlyteContextManager.current_context()
lv = TypeEngine.to_literal(ctx, Color.RED, Color, TypeEngine.to_literal_type(Color))
assert lv
assert lv.scalar
assert (lv.scalar.primitive.string_value == 'red')
v = TypeEngine.to_python_value(ctx, lv, Color)
assert v
assert (v == Color.RED)
v = TypeEngine.to_python_value(ctx, lv, str)
assert v
assert (v == 'red')
with pytest.raises(ValueError):
TypeEngine.to_python_value(ctx, Literal(scalar=Scalar(primitive=Primitive(string_value=str(Color.RED)))), Color)
with pytest.raises(ValueError):
TypeEngine.to_python_value(ctx, Literal(scalar=Scalar(primitive=Primitive(string_value='bad'))), Color)
with pytest.raises(AssertionError):
TypeEngine.to_literal_type(UnsupportedEnumValues) |
class UltraJSONTests(unittest.TestCase):
def test_encodeDecimal(self):
sut = decimal.Decimal('1337.1337')
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded)
self.assertEqual(decoded, 1337.1337)
def test_encodeStringConversion(self):
input = 'A string \\ / \x08 \x0c \n \r \t </script> &'
not_html_encoded = '"A string \\\\ \\/ \\b \\f \\n \\r \\t <\\/script> &"'
html_encoded = '"A string \\\\ \\/ \\b \\f \\n \\r \\t \\u003c\\/script\\u003e \\u0026"'
not_slashes_escaped = '"A string \\\\ / \\b \\f \\n \\r \\t </script> &"'
def helper(expected_output, **encode_kwargs):
output = ujson.encode(input, **encode_kwargs)
self.assertEqual(output, expected_output)
if encode_kwargs.get('escape_forward_slashes', True):
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
helper(not_html_encoded, ensure_ascii=True)
helper(not_html_encoded, ensure_ascii=False)
helper(not_html_encoded, ensure_ascii=True, encode_html_chars=False)
helper(not_html_encoded, ensure_ascii=False, encode_html_chars=False)
helper(html_encoded, ensure_ascii=True, encode_html_chars=True)
helper(html_encoded, ensure_ascii=False, encode_html_chars=True)
helper(not_slashes_escaped, escape_forward_slashes=False)
def testWriteEscapedString(self):
self.assertEqual('"\\u003cimg src=\'\\u0026amp;\'\\/\\u003e"', ujson.dumps("<img src='&'/>", encode_html_chars=True))
def test_doubleLongIssue(self):
sut = {'a': (- )}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
self.assertEqual(sut, decoded)
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded)
self.assertEqual(sut, decoded)
def test_doubleLongDecimalIssue(self):
sut = {'a': (- .568)}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
self.assertEqual(sut, decoded)
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded)
self.assertEqual(sut, decoded)
def test_encodeDecodeLongDecimal(self):
sut = {'a': (- .4399388)}
encoded = ujson.dumps(sut)
ujson.decode(encoded)
def test_decimalDecodeTest(self):
sut = {'a': 4.56}
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded)
self.assertAlmostEqual(sut[u'a'], decoded[u'a'])
def test_encodeDictWithUnicodeKeys(self):
input = {'key1': 'value1', 'key1': 'value1', 'key1': 'value1', 'key1': 'value1', 'key1': 'value1', 'key1': 'value1'}
ujson.encode(input)
input = {'': 'value1', '': 'value1', '': 'value1', '': 'value1', '': 'value1', '': 'value1', '': 'value1'}
ujson.encode(input)
def test_encodeDoubleConversion(self):
input = math.pi
output = ujson.encode(input)
self.assertEqual(round(input, 5), round(json.loads(output), 5))
self.assertEqual(round(input, 5), round(ujson.decode(output), 5))
def test_encodeWithDecimal(self):
input = 1.0
output = ujson.encode(input)
self.assertEqual(output, '1.0')
def test_encodeDoubleNegConversion(self):
input = (- math.pi)
output = ujson.encode(input)
self.assertEqual(round(input, 5), round(json.loads(output), 5))
self.assertEqual(round(input, 5), round(ujson.decode(output), 5))
def test_encodeArrayOfNestedArrays(self):
input = ([[[[]]]] * 20)
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeArrayOfDoubles(self):
input = ([31337.31337, 31337.31337, 31337.31337, 31337.31337] * 10)
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeStringConversion2(self):
input = 'A string \\ / \x08 \x0c \n \r \t'
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, '"A string \\\\ \\/ \\b \\f \\n \\r \\t"')
self.assertEqual(input, ujson.decode(output))
def test_decodeUnicodeConversion(self):
pass
def test_encodeUnicodeConversion1(self):
input = 'Raksmorgas '
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(enc, json_unicode(input))
self.assertEqual(dec, json.loads(enc))
def test_encodeControlEscaping(self):
input = '\x19'
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(input, dec)
self.assertEqual(enc, json_unicode(input))
def test_encodeUnicodeConversion2(self):
input = '\x97N\x88'
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(enc, json_unicode(input))
self.assertEqual(dec, json.loads(enc))
def test_encodeUnicodeSurrogatePair(self):
input = '\x90\x8d\x86'
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(enc, json_unicode(input))
self.assertEqual(dec, json.loads(enc))
def test_encodeUnicode4BytesUTF8(self):
input = '\x91\x80TRAILINGNORMAL'
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(enc, json_unicode(input))
self.assertEqual(dec, json.loads(enc))
def test_encodeUnicode4BytesUTF8Highest(self):
input = 'oTRAILINGNORMAL'
enc = ujson.encode(input)
dec = ujson.decode(enc)
self.assertEqual(enc, json_unicode(input))
self.assertEqual(dec, json.loads(enc))
def testEncodeUnicodeBMP(self):
s = ''
encoded = ujson.dumps(s)
encoded_json = json.dumps(s)
if (len(s) == 4):
self.assertEqual(len(encoded), ((len(s) * 12) + 2))
else:
self.assertEqual(len(encoded), ((len(s) * 6) + 2))
self.assertEqual(encoded, encoded_json)
decoded = ujson.loads(encoded)
self.assertEqual(s, decoded)
encoded = ujson.dumps(s, ensure_ascii=False)
encoded_json = json.dumps(s, ensure_ascii=False)
self.assertEqual(len(encoded), (len(s) + 2))
self.assertEqual(encoded, encoded_json)
decoded = ujson.loads(encoded)
self.assertEqual(s, decoded)
def testEncodeSymbols(self):
s = ''
encoded = ujson.dumps(s)
encoded_json = json.dumps(s)
self.assertEqual(len(encoded), ((len(s) * 6) + 2))
self.assertEqual(encoded, encoded_json)
decoded = ujson.loads(encoded)
self.assertEqual(s, decoded)
encoded = ujson.dumps(s, ensure_ascii=False)
encoded_json = json.dumps(s, ensure_ascii=False)
self.assertEqual(len(encoded), (len(s) + 2))
self.assertEqual(encoded, encoded_json)
decoded = ujson.loads(encoded)
self.assertEqual(s, decoded)
def test_encodeArrayInArray(self):
input = [[[[]]]]
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeIntConversion(self):
input = 31337
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeIntNegConversion(self):
input = (- 31337)
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeLongNegConversion(self):
input = (- )
output = ujson.encode(input)
json.loads(output)
ujson.decode(output)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeListConversion(self):
input = [1, 2, 3, 4]
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeDictConversion(self):
input = {'k1': 1, 'k2': 2, 'k3': 3, 'k4': 4}
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeNoneConversion(self):
input = None
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeTrueConversion(self):
input = True
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeFalseConversion(self):
input = False
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeToUTF8(self):
input = b'\xe6\x97\xa5\xd1\x88'
input = input.decode('utf-8')
enc = ujson.encode(input, ensure_ascii=False)
dec = ujson.decode(enc)
self.assertEqual(enc, json.dumps(input, ensure_ascii=False))
self.assertEqual(dec, json.loads(enc))
def test_decodeFromUnicode(self):
input = '{"obj": 31337}'
dec1 = ujson.decode(input)
dec2 = ujson.decode(str(input))
self.assertEqual(dec1, dec2)
def test_encodeRecursionMax(self):
class O2():
member = 0
def toDict(self):
return {'member': self.member}
class O1():
member = 0
def toDict(self):
return {'member': self.member}
input = O1()
input.member = O2()
input.member.member = input
self.assertRaises(OverflowError, ujson.encode, input)
def test_encodeDoubleNan(self):
input = float('nan')
self.assertRaises(OverflowError, ujson.encode, input)
def test_encodeDoubleInf(self):
input = float('inf')
self.assertRaises(OverflowError, ujson.encode, input)
def test_encodeDoubleNegInf(self):
input = (- float('inf'))
self.assertRaises(OverflowError, ujson.encode, input)
def test_encodeOrderedDict(self):
from collections import OrderedDict
input = OrderedDict([(1, 1), (0, 0), (8, 8), (2, 2)])
self.assertEqual('{"1":1,"0":0,"8":8,"2":2}', ujson.encode(input))
def test_decodeJibberish(self):
input = 'fdsa sda v9sa fdsa'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenArrayStart(self):
input = '['
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenObjectStart(self):
input = '{'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenArrayEnd(self):
input = ']'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayDepthTooBig(self):
input = ('[' * (1024 * 1024))
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenObjectEnd(self):
input = '}'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeObjectTrailingCommaFail(self):
input = '{"one":1,}'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeObjectDepthTooBig(self):
input = ('{' * (1024 * 1024))
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeStringUnterminated(self):
input = '"TESTING'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeStringUntermEscapeSequence(self):
input = '"TESTING\\"'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeStringBadEscape(self):
input = '"TESTING\\"'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeTrueBroken(self):
input = 'tru'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeFalseBroken(self):
input = 'fa'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeNullBroken(self):
input = 'n'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenDictKeyTypeLeakTest(self):
input = '{{1337:""}}'
for x in range(1000):
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenDictLeakTest(self):
input = '{{"key":"}'
for x in range(1000):
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeBrokenListLeakTest(self):
input = '[[[true'
for x in range(1000):
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeDictWithNoKey(self):
input = '{{{{31337}}}}'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeDictWithNoColonOrValue(self):
input = '{{{{"key"}}}}'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeDictWithNoValue(self):
input = '{{{{"key":}}}}'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeNumericIntPos(self):
input = '31337'
self.assertEqual(31337, ujson.decode(input))
def test_decodeNumericIntNeg(self):
input = '-31337'
self.assertEqual((- 31337), ujson.decode(input))
def test_encodeUnicode4BytesUTF8Fail(self):
input = b'\xfd\xbf\xbf\xbf\xbf\xbf'
self.assertRaises(OverflowError, ujson.encode, input)
def test_encodeNullCharacter(self):
input = '31337 \x00 1337'
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
input = '\x00'
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
self.assertEqual('" \\u0000\\r\\n "', ujson.dumps(' \x00\r\n '))
def test_decodeNullCharacter(self):
input = '"31337 \\u0000 31337"'
self.assertEqual(ujson.decode(input), json.loads(input))
def test_encodeListLongConversion(self):
input = [, , , , , ]
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeListLongUnsignedConversion(self):
input = [, , ]
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(input, ujson.decode(output))
def test_encodeLongConversion(self):
input =
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_encodeLongUnsignedConversion(self):
input =
output = ujson.encode(input)
self.assertEqual(input, json.loads(output))
self.assertEqual(output, json.dumps(input))
self.assertEqual(input, ujson.decode(output))
def test_numericIntExp(self):
input = '1337E40'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_numericIntFrcExp(self):
input = '1.337E40'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpEPLUS(self):
input = '1337E+9'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpePLUS(self):
input = '1.337e+40'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpE(self):
input = '1337E40'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpe(self):
input = '1337e40'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpEMinus(self):
input = '1.337E-4'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_decodeNumericIntExpeMinus(self):
input = '1.337e-4'
output = ujson.decode(input)
self.assertEqual(output, json.loads(input))
def test_dumpToFile(self):
f = StringIO()
ujson.dump([1, 2, 3], f)
self.assertEqual('[1,2,3]', f.getvalue())
def test_dumpToFileLikeObject(self):
class filelike():
def __init__(self):
self.bytes = ''
def write(self, bytes):
self.bytes += bytes
f = filelike()
ujson.dump([1, 2, 3], f)
self.assertEqual('[1,2,3]', f.bytes)
def test_dumpFileArgsError(self):
self.assertRaises(TypeError, ujson.dump, [], '')
def test_loadFile(self):
f = StringIO('[1,2,3,4]')
self.assertEqual([1, 2, 3, 4], ujson.load(f))
def test_loadFileLikeObject(self):
class filelike():
def read(self):
try:
self.end
except AttributeError:
self.end = True
return '[1,2,3,4]'
f = filelike()
self.assertEqual([1, 2, 3, 4], ujson.load(f))
def test_loadFileArgsError(self):
self.assertRaises(TypeError, ujson.load, '[]')
def test_encodeNumericOverflow(self):
self.assertRaises(OverflowError, ujson.encode, )
def test_decodeNumberWith32bitSignBit(self):
docs = ('{"id": }', ('{"id": %s}' % (2 ** 31)), ('{"id": %s}' % (2 ** 32)), ('{"id": %s}' % ((2 ** 32) - 1)))
results = (, (2 ** 31), (2 ** 32), ((2 ** 32) - 1))
for (doc, result) in zip(docs, results):
self.assertEqual(ujson.decode(doc)['id'], result)
def test_encodeBigEscape(self):
for x in range(10):
base = 'a'.encode('utf-8')
input = (((base * 1024) * 1024) * 2)
ujson.encode(input)
def test_decodeBigEscape(self):
for x in range(10):
base = 'a'.encode('utf-8')
quote = '"'.encode()
input = ((quote + (((base * 1024) * 1024) * 2)) + quote)
ujson.decode(input)
def test_toDict(self):
d = {'key': 31337}
class DictTest():
def toDict(self):
return d
def __json__(self):
return '"json defined"'
o = DictTest()
output = ujson.encode(o)
dec = ujson.decode(output)
self.assertEqual(dec, d)
def test_object_with_json(self):
output_text = 'this is the correct output'
class JSONTest():
def __json__(self):
return (('"' + output_text) + '"')
d = {u'key': JSONTest()}
output = ujson.encode(d)
dec = ujson.decode(output)
self.assertEqual(dec, {u'key': output_text})
def test_object_with_json_unicode(self):
output_text = u'this is the correct output'
class JSONTest():
def __json__(self):
return ((u'"' + output_text) + u'"')
d = {u'key': JSONTest()}
output = ujson.encode(d)
dec = ujson.decode(output)
self.assertEqual(dec, {u'key': output_text})
def test_object_with_complex_json(self):
obj = {u'foo': [u'bar', u'baz']}
class JSONTest():
def __json__(self):
return ujson.encode(obj)
d = {u'key': JSONTest()}
output = ujson.encode(d)
dec = ujson.decode(output)
self.assertEqual(dec, {u'key': obj})
def test_object_with_json_type_error(self):
for return_value in (None, 1234, 12.34, True, {}):
class JSONTest():
def __json__(self):
return return_value
d = {u'key': JSONTest()}
self.assertRaises(TypeError, ujson.encode, d)
def test_object_with_json_attribute_error(self):
class JSONTest():
def __json__(self):
raise AttributeError
d = {u'key': JSONTest()}
self.assertRaises(AttributeError, ujson.encode, d)
def test_decodeArrayTrailingCommaFail(self):
input = '[31337,]'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayLeadingCommaFail(self):
input = '[,31337]'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayOnlyCommaFail(self):
input = '[,]'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayUnmatchedBracketFail(self):
input = '[]]'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayEmpty(self):
input = '[]'
obj = ujson.decode(input)
self.assertEqual([], obj)
def test_decodeArrayOneItem(self):
input = '[31337]'
ujson.decode(input)
def test_decodeLongUnsignedValue(self):
input = ''
ujson.decode(input)
def test_decodeBigValue(self):
input = ''
ujson.decode(input)
def test_decodeSmallValue(self):
input = '-'
ujson.decode(input)
def test_decodeTooBigValue(self):
input = ''
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeTooSmallValue(self):
input = '-'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeVeryTooBigValue(self):
input = ''
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeVeryTooSmallValue(self):
input = '-'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeWithTrailingWhitespaces(self):
input = '{}\n\t '
ujson.decode(input)
def test_decodeWithTrailingNonWhitespaces(self):
input = '{}\n\t a'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeArrayWithBigInt(self):
input = '[]'
self.assertRaises(ValueError, ujson.decode, input)
def test_decodeFloatingPointAdditionalTests(self):
self.assertAlmostEqual((- 1.), ujson.loads('-1.'))
self.assertAlmostEqual((- 1.), ujson.loads('-1.'))
self.assertAlmostEqual((- 1.), ujson.loads('-1.'))
self.assertAlmostEqual((- 1.4567893), ujson.loads('-1.4567893'))
self.assertAlmostEqual((- 1.567893), ujson.loads('-1.567893'))
self.assertAlmostEqual((- 1.67893), ujson.loads('-1.67893'))
self.assertAlmostEqual((- 1.7894), ujson.loads('-1.7894'))
self.assertAlmostEqual((- 1.893), ujson.loads('-1.893'))
self.assertAlmostEqual((- 1.3), ujson.loads('-1.3'))
self.assertAlmostEqual(1., ujson.loads('1.'))
self.assertAlmostEqual(1., ujson.loads('1.'))
self.assertAlmostEqual(1., ujson.loads('1.'))
self.assertAlmostEqual(1.4567893, ujson.loads('1.4567893'))
self.assertAlmostEqual(1.567893, ujson.loads('1.567893'))
self.assertAlmostEqual(1.67893, ujson.loads('1.67893'))
self.assertAlmostEqual(1.7894, ujson.loads('1.7894'))
self.assertAlmostEqual(1.893, ujson.loads('1.893'))
self.assertAlmostEqual(1.3, ujson.loads('1.3'))
def test_ReadBadObjectSyntax(self):
input = '{"age", 44}'
self.assertRaises(ValueError, ujson.decode, input)
def test_ReadTrue(self):
self.assertEqual(True, ujson.loads('true'))
def test_ReadFalse(self):
self.assertEqual(False, ujson.loads('false'))
def test_ReadNull(self):
self.assertEqual(None, ujson.loads('null'))
def test_WriteTrue(self):
self.assertEqual('true', ujson.dumps(True))
def test_WriteFalse(self):
self.assertEqual('false', ujson.dumps(False))
def test_WriteNull(self):
self.assertEqual('null', ujson.dumps(None))
def test_ReadArrayOfSymbols(self):
self.assertEqual([True, False, None], ujson.loads(' [ true, false,null] '))
def test_WriteArrayOfSymbolsFromList(self):
self.assertEqual('[true,false,null]', ujson.dumps([True, False, None]))
def test_WriteArrayOfSymbolsFromTuple(self):
self.assertEqual('[true,false,null]', ujson.dumps((True, False, None)))
def test_encodingInvalidUnicodeCharacter(self):
s = '\udc7f'
self.assertRaises(UnicodeEncodeError, ujson.dumps, s)
def test_sortKeys(self):
data = {'a': 1, 'c': 1, 'b': 1, 'e': 1, 'f': 1, 'd': 1}
sortedKeys = ujson.dumps(data, sort_keys=True)
self.assertEqual(sortedKeys, '{"a":1,"b":1,"c":1,"d":1,"e":1,"f":1}')
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_values(self):
import gc
gc.collect()
value = ['abc']
data = {'1': value}
ref_count = sys.getrefcount(value)
ujson.dumps(data)
self.assertEqual(ref_count, sys.getrefcount(value))
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_keys(self):
import gc
gc.collect()
key1 = '1'
key2 = '1'
value1 = ['abc']
value2 = [1, 2, 3]
data = {key1: value1, key2: value2}
ref_count1 = sys.getrefcount(key1)
ref_count2 = sys.getrefcount(key2)
ujson.dumps(data)
self.assertEqual(ref_count1, sys.getrefcount(key1))
self.assertEqual(ref_count2, sys.getrefcount(key2))
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_string_key(self):
import gc
gc.collect()
key1 = '1'
value1 = 1
data = {key1: value1}
ref_count1 = sys.getrefcount(key1)
ujson.dumps(data)
self.assertEqual(ref_count1, sys.getrefcount(key1))
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_tuple_key(self):
import gc
gc.collect()
key1 = ('a',)
value1 = 1
data = {key1: value1}
ref_count1 = sys.getrefcount(key1)
ujson.dumps(data)
self.assertEqual(ref_count1, sys.getrefcount(key1))
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_bytes_key(self):
import gc
gc.collect()
key1 = b'1'
value1 = 1
data = {key1: value1}
ref_count1 = sys.getrefcount(key1)
ujson.dumps(data)
self.assertEqual(ref_count1, sys.getrefcount(key1))
((not (hasattr(sys, 'getrefcount') == True)), reason='test requires sys.refcount')
def test_does_not_leak_dictionary_None_key(self):
import gc
gc.collect()
key1 = None
value1 = 1
data = {key1: value1}
ref_count1 = sys.getrefcount(key1)
ujson.dumps(data)
self.assertEqual(ref_count1, sys.getrefcount(key1)) |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def tools_logs(response):
try:
with open(f'{settings.MEDIA_ROOT}/debug.log', 'r') as logfile:
logs = logfile.read()
except Exception:
logs = "Error: can't fetch log file. Maybe nothing got logged yet?"
logger.exception("Error: can't fetch log file.")
return render(response, 'main/tools_logs.html', {'logs': logs}) |
def main():
description = 'spock Basic Tutorial'
config = SpockBuilder(ModelConfig, desc=description, create_save_path=True).save(file_extension='.toml').generate()
basic_nn = BasicNet(model_config=config.ModelConfig)
test_data = torch.rand(10, config.ModelConfig.n_features)
result = basic_nn(test_data)
print(result) |
class Translate():
def __init__(self, target: str='zh-CN', source: str='auto', fmt='html', user_agent: str=None, domain: str='com', proxies: Dict=None, timeout: int=None):
self.target = target
self.source = source
self.fmt = fmt
self.timeout = timeout
if (user_agent is None):
user_agent = f'GoogleTranslate/6.{random.randint(10, 100)}.0.06.{random.randint(, )} (Linux; U; Android {{random.randint(5, 11)}}; {{base64.b64encode(str(random.random())[2:].encode()).decode()}}) '
self.session = requests.Session()
self.session.headers = {'User-Agent': user_agent}
self.BASE_URL: str = (' + domain)
self.LANGUAGE_URL: str = f'{self.BASE_URL}/translate_a/l'
self.DETECT_URL: str = f'{self.BASE_URL}/translate_a/single'
self.TRANSLATE_URL: str = f'{self.BASE_URL}/translate_a/t'
self.TTS_URL: str = f'{self.BASE_URL}/translate_tts'
if (proxies is not None):
self.session.trust_env = False
self.session.proxies = proxies
def detect(self, q: str, timeout=...) -> Union[(DetectResponse, Null)]:
if (timeout is ...):
timeout = self.timeout
for i in range(1, 4):
response = self.session.post(self.DETECT_URL, params={'dj': 1, 'sl': 'auto', 'ie': 'UTF-8', 'oe': 'UTF-8', 'client': 'at'}, data={'q': q}, timeout=timeout)
if (response.status_code == 429):
time.sleep((5 * i))
continue
break
if (response.status_code != 200):
return Null(response)
rt = response.json()
return DetectResponse(language=rt['src'], confidence=rt['confidence'])
def translate(self, q: str, target: str=None, source: str=None, fmt: str=None, timeout=...) -> TranslateResponse:
def translate(self, q: List[str], target: str=None, source: str=None, fmt: str=None, timeout=...) -> List[TranslateResponse]:
def translate(self, q: Union[(str, List[str])], target: str=None, source: str=None, fmt: str=None, timeout=...) -> Union[(TranslateResponse, List[TranslateResponse], Null)]:
if (not q):
return []
if (timeout is ...):
timeout = self.timeout
if isinstance(q, str):
if (q == ''):
return TranslateResponse('')
for i in range(1, 4):
response = self.__translate(q=q, target=target, source=source, fmt=fmt, v='1.0', timeout=timeout)
if (response.status_code == 429):
time.sleep((5 * i))
continue
break
if (response.status_code == 200):
ll = [TranslateResponse(translatedText=i) for i in response.json()]
if isinstance(q, str):
return ll[0]
return ll
return Null(response)
def __translate(self, q: Union[(str, List[str])], target: str=None, source: str=None, fmt: str=None, v: str=None, timeout=...):
if (target is None):
target = self.target
if (source is None):
source = self.source
if (fmt is None):
fmt = self.fmt
if (timeout is ...):
timeout = self.timeout
for i in range(1, 4):
response = self.session.post(self.TRANSLATE_URL, params={'tl': target, 'sl': source, 'ie': 'UTF-8', 'oe': 'UTF-8', 'client': 'at', 'dj': '1', 'format': fmt, 'v': v}, data={'q': q}, timeout=timeout)
if (response.status_code == 429):
time.sleep((5 * i))
continue
break
return response
def tts(self, q: str, target: str=None, timeout=...) -> Union[(bytes, Null)]:
if (target is None):
target = self.target
if (timeout is ...):
timeout = self.timeout
for i in range(1, 4):
response = self.session.get(self.TTS_URL, params={'ie': 'UTF-8', 'client': 'at', 'tl': target, 'q': q}, timeout=timeout)
if (response.status_code == 429):
time.sleep((5 * i))
continue
break
if (response.status_code == 200):
return response.content
return Null(response) |
def upgrade():
op.execute('CREATE SEQUENCE raw_web_pages_version_transaction_id_seq OWNED BY raw_web_pages_version.transaction_id;')
op.execute("SELECT setval('raw_web_pages_version_transaction_id_seq', (SELECT max(transaction_id) FROM raw_web_pages_version));")
op.execute("ALTER TABLE raw_web_pages_version ALTER COLUMN transaction_id SET DEFAULT nextval('raw_web_pages_version_transaction_id_seq');")
op.execute('CREATE SEQUENCE web_pages_version_transaction_id_seq OWNED BY web_pages_version.transaction_id;')
op.execute("SELECT setval('web_pages_version_transaction_id_seq', (SELECT max(transaction_id) FROM web_pages_version));")
op.execute("ALTER TABLE web_pages_version ALTER COLUMN transaction_id SET DEFAULT nextval('web_pages_version_transaction_id_seq');")
op.execute('CREATE SEQUENCE rss_parser_feed_name_lut_version_transaction_id_seq OWNED BY rss_parser_feed_name_lut_version.transaction_id;')
op.execute("SELECT setval('rss_parser_feed_name_lut_version_transaction_id_seq', (SELECT max(transaction_id) FROM rss_parser_feed_name_lut_version));")
op.execute("ALTER TABLE rss_parser_feed_name_lut_version ALTER COLUMN transaction_id SET DEFAULT nextval('rss_parser_feed_name_lut_version_transaction_id_seq');")
op.execute('CREATE SEQUENCE rss_parser_funcs_version_transaction_id_seq OWNED BY rss_parser_funcs_version.transaction_id;')
op.execute("SELECT setval('rss_parser_funcs_version_transaction_id_seq', (SELECT max(transaction_id) FROM rss_parser_funcs_version));")
op.execute("ALTER TABLE rss_parser_funcs_version ALTER COLUMN transaction_id SET DEFAULT nextval('rss_parser_funcs_version_transaction_id_seq');")
pass |
def param_iter(params):
if isinstance(params[0], list):
params = [item for sublist in params for item in sublist]
iter_ind = 0
num_iters = 0
for (cur_ind, param) in enumerate(params):
if isinstance(param, Stepper):
num_iters += 1
iter_ind = cur_ind
if (num_iters > 1):
raise ValueError('Iteration is only supported across one parameter at a time.')
gen = params[iter_ind]
while True:
try:
params[iter_ind] = next(gen)
(yield params)
except StopIteration:
return |
def _find_contract(address: Any) -> Any:
if (address is None):
return
address = _resolve_address(address)
if (address in _contract_map):
return _contract_map[address]
if ('chainid' in CONFIG.active_network):
try:
from brownie.network.contract import Contract
return Contract(address)
except (ValueError, CompilerError):
pass |
class OptionSeriesHeatmap(Options):
def accessibility(self) -> 'OptionSeriesHeatmapAccessibility':
return self._config_sub_data('accessibility', OptionSeriesHeatmapAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(False)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def animationLimit(self):
return self._config_get(None)
def animationLimit(self, num: float):
self._config(num, js_type=False)
def boostBlending(self):
return self._config_get('undefined')
def boostBlending(self, value: Any):
self._config(value, js_type=False)
def boostThreshold(self):
return self._config_get(5000)
def boostThreshold(self, num: float):
self._config(num, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('value')
def colorKey(self, text: str):
self._config(text, js_type=False)
def colsize(self):
return self._config_get(1)
def colsize(self, num: float):
self._config(num, js_type=False)
def connectEnds(self):
return self._config_get(None)
def connectEnds(self, flag: bool):
self._config(flag, js_type=False)
def connectNulls(self):
return self._config_get(False)
def connectNulls(self, flag: bool):
self._config(flag, js_type=False)
def crisp(self):
return self._config_get(True)
def crisp(self, flag: bool):
self._config(flag, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def data(self) -> 'OptionSeriesHeatmapData':
return self._config_sub_data('data', OptionSeriesHeatmapData)
def dataLabels(self) -> 'OptionSeriesHeatmapDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesHeatmapDatalabels)
def dataSorting(self) -> 'OptionSeriesHeatmapDatasorting':
return self._config_sub_data('dataSorting', OptionSeriesHeatmapDatasorting)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionSeriesHeatmapEvents':
return self._config_sub_data('events', OptionSeriesHeatmapEvents)
def findNearestPointBy(self):
return self._config_get('x')
def findNearestPointBy(self, text: str):
self._config(text, js_type=False)
def getExtremesFromAll(self):
return self._config_get(False)
def getExtremesFromAll(self, flag: bool):
self._config(flag, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def index(self):
return self._config_get(None)
def index(self, num: float):
self._config(num, js_type=False)
def interpolation(self):
return self._config_get(False)
def interpolation(self, flag: bool):
self._config(flag, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionSeriesHeatmapLabel':
return self._config_sub_data('label', OptionSeriesHeatmapLabel)
def legendIndex(self):
return self._config_get(None)
def legendIndex(self, num: float):
self._config(num, js_type=False)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def marker(self) -> 'OptionSeriesHeatmapMarker':
return self._config_sub_data('marker', OptionSeriesHeatmapMarker)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def negativeColor(self):
return self._config_get(None)
def negativeColor(self, text: str):
self._config(text, js_type=False)
def nullColor(self):
return self._config_get('#f7f7f7')
def nullColor(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionSeriesHeatmapOnpoint':
return self._config_sub_data('onPoint', OptionSeriesHeatmapOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def point(self) -> 'OptionSeriesHeatmapPoint':
return self._config_sub_data('point', OptionSeriesHeatmapPoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointPadding(self):
return self._config_get(0)
def pointPadding(self, num: float):
self._config(num, js_type=False)
def pointPlacement(self):
return self._config_get(None)
def pointPlacement(self, text: str):
self._config(text, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def rowsize(self):
return self._config_get(1)
def rowsize(self, num: float):
self._config(num, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(None)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def softThreshold(self):
return self._config_get(True)
def softThreshold(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionSeriesHeatmapSonification':
return self._config_sub_data('sonification', OptionSeriesHeatmapSonification)
def stacking(self):
return self._config_get(None)
def stacking(self, text: str):
self._config(text, js_type=False)
def states(self) -> 'OptionSeriesHeatmapStates':
return self._config_sub_data('states', OptionSeriesHeatmapStates)
def step(self):
return self._config_get(None)
def step(self, value: Any):
self._config(value, js_type=False)
def stickyTracking(self):
return self._config_get(False)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def threshold(self):
return self._config_get(0)
def threshold(self, num: float):
self._config(num, js_type=False)
def tooltip(self) -> 'OptionSeriesHeatmapTooltip':
return self._config_sub_data('tooltip', OptionSeriesHeatmapTooltip)
def turboThreshold(self):
return self._config_get(1000)
def turboThreshold(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get(None)
def type(self, text: str):
self._config(text, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def xAxis(self):
return self._config_get(0)
def xAxis(self, num: float):
self._config(num, js_type=False)
def yAxis(self):
return self._config_get(0)
def yAxis(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False)
def zoneAxis(self):
return self._config_get('y')
def zoneAxis(self, text: str):
self._config(text, js_type=False)
def zones(self) -> 'OptionSeriesHeatmapZones':
return self._config_sub_data('zones', OptionSeriesHeatmapZones) |
class PipedQuery(EqlNode):
__slots__ = ('first', 'pipes')
def __init__(self, first, pipes=None):
self.first = first
self.pipes = (pipes or [])
def _render(self):
all_pipes = ([self.first] + self.pipes)
return '\n| '.join((pipe.render() for pipe in all_pipes)) |
class GradeSchoolTest(unittest.TestCase):
def test_roster_is_empty_when_no_student_is_added(self):
school = School()
expected = []
self.assertEqual(school.roster(), expected)
def test_add_a_student(self):
school = School()
school.add_student(name='Aimee', grade=2)
expected = [True]
self.assertEqual(school.added(), expected)
def test_student_is_added_to_the_roster(self):
school = School()
school.add_student(name='Aimee', grade=2)
expected = ['Aimee']
self.assertEqual(school.roster(), expected)
def test_adding_multiple_students_in_the_same_grade_in_the_roster(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='Paul', grade=2)
expected = [True, True, True]
self.assertEqual(school.added(), expected)
def test_multiple_students_in_the_same_grade_are_added_to_the_roster(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='Paul', grade=2)
expected = ['Blair', 'James', 'Paul']
self.assertEqual(school.roster(), expected)
def test_cannot_add_student_to_same_grade_in_the_roster_more_than_once(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='Paul', grade=2)
expected = [True, True, False, True]
self.assertEqual(school.added(), expected)
def test_student_not_added_to_same_grade_in_the_roster_more_than_once(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='Paul', grade=2)
expected = ['Blair', 'James', 'Paul']
self.assertEqual(school.roster(), expected)
def test_adding_students_in_multiple_grades(self):
school = School()
school.add_student(name='Chelsea', grade=3)
school.add_student(name='Logan', grade=7)
expected = [True, True]
self.assertEqual(school.added(), expected)
def test_students_in_multiple_grades_are_added_to_the_roster(self):
school = School()
school.add_student(name='Chelsea', grade=3)
school.add_student(name='Logan', grade=7)
expected = ['Chelsea', 'Logan']
self.assertEqual(school.roster(), expected)
def test_cannot_add_same_student_to_multiple_grades_in_the_roster(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=3)
school.add_student(name='Paul', grade=3)
expected = [True, True, False, True]
self.assertEqual(school.added(), expected)
def test_student_not_added_to_multiple_grades_in_the_roster(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=3)
school.add_student(name='Paul', grade=3)
expected = ['Blair', 'James', 'Paul']
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_grades_in_the_roster(self):
school = School()
school.add_student(name='Jim', grade=3)
school.add_student(name='Peter', grade=2)
school.add_student(name='Anna', grade=1)
expected = ['Anna', 'Peter', 'Jim']
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_name_in_the_roster(self):
school = School()
school.add_student(name='Peter', grade=2)
school.add_student(name='Zoe', grade=2)
school.add_student(name='Alex', grade=2)
expected = ['Alex', 'Peter', 'Zoe']
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_grades_and_then_by_name_in_the_roster(self):
school = School()
school.add_student(name='Peter', grade=2)
school.add_student(name='Anna', grade=1)
school.add_student(name='Barb', grade=1)
school.add_student(name='Zoe', grade=2)
school.add_student(name='Alex', grade=2)
school.add_student(name='Jim', grade=3)
school.add_student(name='Charlie', grade=1)
expected = ['Anna', 'Barb', 'Charlie', 'Alex', 'Peter', 'Zoe', 'Jim']
self.assertEqual(school.roster(), expected)
def test_grade_is_empty_if_no_students_in_the_roster(self):
school = School()
expected = []
self.assertEqual(school.grade(1), expected)
def test_grade_is_empty_if_no_students_in_that_grade(self):
school = School()
school.add_student(name='Peter', grade=2)
school.add_student(name='Zoe', grade=2)
school.add_student(name='Alex', grade=2)
school.add_student(name='Jim', grade=3)
expected = []
self.assertEqual(school.grade(1), expected)
def test_student_not_added_to_same_grade_more_than_once(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='Paul', grade=2)
expected = ['Blair', 'James', 'Paul']
self.assertEqual(school.grade(2), expected)
def test_student_not_added_to_multiple_grades(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=3)
school.add_student(name='Paul', grade=3)
expected = ['Blair', 'James']
self.assertEqual(school.grade(2), expected)
def test_student_not_added_to_other_grade_for_multiple_grades(self):
school = School()
school.add_student(name='Blair', grade=2)
school.add_student(name='James', grade=2)
school.add_student(name='James', grade=3)
school.add_student(name='Paul', grade=3)
expected = ['Paul']
self.assertEqual(school.grade(3), expected)
def test_students_are_sorted_by_name_in_a_grade(self):
school = School()
school.add_student(name='Franklin', grade=5)
school.add_student(name='Bradley', grade=5)
school.add_student(name='Jeff', grade=1)
expected = ['Bradley', 'Franklin']
self.assertEqual(school.grade(5), expected) |
.parametrize(('testcase', 'convrate'), [((1, (4, 6)), 1.9), ((2, (3, 6)), 2.9), ((3, (2, 4)), 3.9), ((4, (2, 4)), 4.7)])
def test_firedrake_helmholtz_scalar_convergence_on_quadrilaterals(testcase, convrate):
(degree, (start, end)) = testcase
l2err = np.zeros((end - start))
for ii in [(i + start) for i in range(len(l2err))]:
l2err[(ii - start)] = helmholtz(ii, quadrilateral=True, degree=degree)[0]
assert (np.array([np.log2((l2err[i] / l2err[(i + 1)])) for i in range((len(l2err) - 1))]) > convrate).all() |
class OptionSeriesSankeyDataDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
('/mod_moderator/<moderator:moderator>/role_add', methods=['POST'])
_role_restrict(roles.ROLE_ADMIN)
_token()
def mod_moderator_role_add(moderator):
role = request.form['role']
if (not moderator_service.role_exists(role)):
flash('That role does not exist')
else:
try:
moderator_service.add_role(moderator, role)
flash('Role added')
mod_log('add role {} to {}'.format(role, moderator.username))
except ArgumentError as e:
flash(e.message)
return redirect(url_for('.mod_moderator', moderator=moderator)) |
class DemoUCR(object):
def __init__(self, hex_triple):
self.hex_triple = hex_triple
def calculate(self):
self.rgb = self.get_rgb()
self.RGB = self.get_RGB()
self.CMY = self.get_CMY()
self.CMYK = self.get_CMYK()
self.cmyk = self.get_cmyk()
def get_rgb(self):
return hex2rgb(self.hex_triple)
def get_RGB(self):
return tuple((uncompand(v) for v in self.rgb))
def get_CMY(self):
(C, M, Y) = ((1.0 - V) for V in self.RGB)
return (C, M, Y)
def get_CMYK(self):
K = min(*self.CMY, 1.0)
(C, M, Y) = self.CMY
if (K == 1):
C = M = Y = 0
else:
denominator = (1 - K)
C = ((C - K) / denominator)
M = ((M - K) / denominator)
Y = ((Y - K) / denominator)
return (C, M, Y, K)
def get_cmyk(self):
return tuple((compand(V) for V in self.CMYK))
def stringify_demo_values(self):
self.calculate()
return ('\n \n %(hex_triple)s > rgb%(rgb)s\n > RGB%(RGB)s\n > CMY%(CMY)s\n > CMYK%(CMYK)s\n > cmyk%(cmyk)s\n \n ' % self.__dict__)
def __str__(self):
return self.stringify_demo_values() |
def evaluate_model(model, dev_X, dev_Y, batch_size):
correct = 0.0
total = 0.0
for (X, Y) in model.ops.multibatch(batch_size, dev_X, dev_Y):
Yh = model.predict(X)
for j in range(len(Yh)):
correct += (Yh[j].argmax(axis=0) == Y[j].argmax(axis=0))
total += len(Y)
return float((correct / total)) |
def test_mesh_tolerance():
parent_mesh = UnitSquareMesh(100, 100, quadrilateral=True)
points = [[0.1, 0.1], [0.2, 0.2], [1.1, 1.0]]
print(parent_mesh.tolerance)
parent_mesh.tolerance = 20.0
vom = VertexOnlyMesh(parent_mesh, points)
V = FunctionSpace(parent_mesh, 'CG', 2)
Function(V).at((1.1, 1.0))
assert vom |
class OptionSeriesVennOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
class Account():
_keys = keys
_default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
_use_unaudited_hdwallet_features = False
def enable_unaudited_hdwallet_features(cls):
cls._use_unaudited_hdwallet_features = True
def create(self, extra_entropy=''):
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak((os.urandom(32) + extra_key_bytes))
return self.from_key(key_bytes)
def decrypt(keyfile_json, password):
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError('The keyfile should be supplied as a JSON string, or a dictionary.')
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
def encrypt(cls, private_key, password, kdf=None, iterations=None):
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if (kdf is None):
kdf = cls._default_kdf
password_bytes = text_if_str(to_bytes, password)
assert (len(key_bytes) == 32)
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
def from_key(self, private_key):
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
def from_mnemonic(self, mnemonic: str, passphrase: str='', account_path: str=ETHEREUM_DEFAULT_PATH) -> LocalAccount:
if (not self._use_unaudited_hdwallet_features):
raise AttributeError('The use of the Mnemonic features of Account is disabled by default until its API stabilizes. To use these features, please enable them by running `Account.enable_unaudited_hdwallet_features()` and try again.')
seed = seed_from_mnemonic(mnemonic, passphrase)
private_key = key_from_seed(seed, account_path)
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
def create_with_mnemonic(self, passphrase: str='', num_words: int=12, language: str='english', account_path: str=ETHEREUM_DEFAULT_PATH) -> Tuple[(LocalAccount, str)]:
if (not self._use_unaudited_hdwallet_features):
raise AttributeError('The use of the Mnemonic features of Account is disabled by default until its API stabilizes. To use these features, please enable them by running `Account.enable_unaudited_hdwallet_features()` and try again.')
mnemonic = generate_mnemonic(num_words, language)
return (self.from_mnemonic(mnemonic, passphrase, account_path), mnemonic)
def recover_message(self, signable_message: SignableMessage, vrs: Optional[Tuple[(VRS, VRS, VRS)]]=None, signature: bytes=None) -> ChecksumAddress:
message_hash = _hash_eip191_message(signable_message)
return cast(ChecksumAddress, self._recover_hash(message_hash, vrs, signature))
def _recover_hash(self, message_hash: Hash32, vrs: Optional[Tuple[(VRS, VRS, VRS)]]=None, signature: bytes=None) -> ChecksumAddress:
hash_bytes = HexBytes(message_hash)
if (len(hash_bytes) != 32):
raise ValueError('The message hash must be exactly 32-bytes')
if (vrs is not None):
(v, r, s) = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif (signature is not None):
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError('You must supply the vrs tuple or the signature bytes')
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return cast(ChecksumAddress, pubkey.to_checksum_address())
def recover_transaction(self, serialized_transaction):
txn_bytes = HexBytes(serialized_transaction)
if ((len(txn_bytes) > 0) and (txn_bytes[0] <= 127)):
typed_transaction = TypedTransaction.from_bytes(txn_bytes)
msg_hash = typed_transaction.hash()
vrs = typed_transaction.vrs()
return self._recover_hash(msg_hash, vrs=vrs)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self._recover_hash(msg_hash, vrs=vrs_from(txn))
def set_key_backend(self, backend):
self._keys = KeyAPI(backend)
def sign_message(self, signable_message: SignableMessage, private_key: Union[(bytes, HexStr, int, keys.PrivateKey)]) -> SignedMessage:
message_hash = _hash_eip191_message(signable_message)
return cast(SignedMessage, self._sign_hash(message_hash, private_key))
def signHash(self, message_hash, private_key):
warnings.warn('signHash is deprecated in favor of sign_message', category=DeprecationWarning, stacklevel=2)
return self._sign_hash(message_hash, private_key)
def _sign_hash(self, message_hash: Hash32, private_key: Union[(bytes, HexStr, int, keys.PrivateKey)]) -> SignedMessage:
msg_hash_bytes = HexBytes(message_hash)
if (len(msg_hash_bytes) != 32):
raise ValueError('The message hash must be exactly 32-bytes')
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return SignedMessage(messageHash=msg_hash_bytes, r=r, s=s, v=v, signature=HexBytes(eth_signature_bytes))
def sign_transaction(self, transaction_dict, private_key):
if (not isinstance(transaction_dict, Mapping)):
raise TypeError(f'transaction_dict must be dict-like, got {repr(transaction_dict)}')
account = self.from_key(private_key)
if ('from' in transaction_dict):
if (transaction_dict['from'] == account.address):
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError(f"from field must match key's {account.address}, but it was {transaction_dict['from']}")
else:
sanitized_transaction = transaction_dict
(v, r, s, encoded_transaction) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(encoded_transaction)
return SignedTransaction(rawTransaction=HexBytes(encoded_transaction), hash=HexBytes(transaction_hash), r=r, s=s, v=v)
def _parsePrivateKey(self, key):
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(f'The private key must be exactly 32 bytes long, instead of {len(key)} bytes.') from original_exception
def sign_typed_data(self, private_key: Union[(bytes, HexStr, int, keys.PrivateKey)], domain_data: Dict[(str, Any)]=None, message_types: Dict[(str, Any)]=None, message_data: Dict[(str, Any)]=None, full_message: Dict[(str, Any)]=None) -> SignedMessage:
signable_message = encode_typed_data(domain_data, message_types, message_data, full_message)
message_hash = _hash_eip191_message(signable_message)
return cast(SignedMessage, self._sign_hash(message_hash, private_key)) |
def generate_peft_config(train_config, kwargs):
configs = (lora_config, llama_adapter_config, prefix_config)
peft_configs = (LoraConfig, AdaptionPromptConfig, PrefixTuningConfig)
names = tuple((c.__name__.rstrip('_config') for c in configs))
assert (train_config.peft_method in names), f'Peft config not found: {train_config.peft_method}'
config = configs[names.index(train_config.peft_method)]()
update_config(config, **kwargs)
params = asdict(config)
peft_config = peft_configs[names.index(train_config.peft_method)](**params)
return peft_config |
def test_message_type_equality() -> None:
assert (MyNumpyMessage == MyNumpyMessage2)
assert (MyNumpyMessage != MyNumpyMessage3)
assert (MyNumpyMessage != MyNumpyMessage4)
assert (MyNumpyMessage2 != MyNumpyMessage3)
assert (MyNumpyMessage2 != MyNumpyMessage4)
assert (MyNumpyMessage3 != MyNumpyMessage4) |
class CommonSegDatabin(CommonSegTextbin):
def is_text() -> bool:
return False
def is_data() -> bool:
return True
def get_linker_section(self) -> str:
return '.data'
def get_section_flags(self) -> Optional[str]:
return 'wa'
def split(self, rom_bytes):
if (self.rom_end is None):
log.error(f'segment {self.name} needs to know where it ends; add a position marker [0xDEADBEEF] after it')
self.write_bin(rom_bytes)
if (self.sibling is None):
s_path = self.out_path()
assert (s_path is not None)
s_path.parent.mkdir(parents=True, exist_ok=True)
with s_path.open('w') as f:
f.write('.include "macro.inc"\n\n')
preamble = options.opts.generated_s_preamble
if preamble:
f.write((preamble + '\n'))
self.write_asm_contents(rom_bytes, f) |
class Widget(HasTraits):
part1 = Instance(Part)
part2 = Instance(Part)
cost = Float(0.0)
def __init__(self):
self.part1 = Part()
self.part2 = Part()
self.part1.on_trait_change(self.update_cost, 'cost')
self.part2.on_trait_change(self.update_cost, 'cost')
def update_cost(self):
self.cost = (self.part1.cost + self.part2.cost) |
class OptionPlotoptionsTimelineSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
.parametrize('test_dataset, reference_dataset, test_object, expected_success', ((pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 1, 2, 5], 'target': [0, 0, 0, 1]}), None, TestColumnValueMin(column_name='numerical_feature', gte=10), False), (pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 1, 2, 5], 'target': [0, 0, 0, 1]}), None, TestColumnValueMin(column_name='numerical_feature', eq=0), True), (pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0.4, 0.1, (- 1.45), 5], 'target': [0, 0, 0, 1]}), None, TestColumnValueMin(column_name='numerical_feature', eq=approx((- 1), absolute=0.5)), True), (pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [10, 7, 5.1, 4.9], 'target': [0, 0, 0, 1]}), None, TestColumnValueMin(column_name='numerical_feature', lt=approx(10, relative=0.5)), True), (pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [10, 7, 5.1, 5], 'target': [0, 0, 0, 1]}), None, TestColumnValueMin(column_name='numerical_feature', lt=approx(10, relative=0.5)), False)))
def test_data_quality_test_min(test_dataset: pd.DataFrame, reference_dataset: pd.DataFrame, test_object: TestColumnValueMin, expected_success: bool) -> None:
suite = TestSuite(tests=[test_object])
mapping = ColumnMapping(categorical_features=['category_feature'], numerical_features=['numerical_feature'])
suite.run(current_data=test_dataset, reference_data=reference_dataset, column_mapping=mapping)
if expected_success:
suite._inner_suite.raise_for_error()
assert (bool(suite) is expected_success) |
class TestGenerateKeyWhenAlreadyExists():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
os.chdir(cls.t)
def test_fetchai(self):
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'generate-key', FetchAICrypto.identifier])
assert (result.exit_code == 0)
assert Path(FETCHAI_PRIVATE_KEY_FILE).exists()
make_crypto(FetchAICrypto.identifier, private_key_path=FETCHAI_PRIVATE_KEY_FILE)
content = Path(FETCHAI_PRIVATE_KEY_FILE).read_bytes()
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'generate-key', FetchAICrypto.identifier], input='n')
assert (result.exit_code == 0)
assert (Path(FETCHAI_PRIVATE_KEY_FILE).read_bytes() == content)
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'generate-key', FetchAICrypto.identifier], input='y')
assert (result.exit_code == 0)
assert (Path(FETCHAI_PRIVATE_KEY_FILE).read_bytes() != content)
make_crypto(FetchAICrypto.identifier, private_key_path=FETCHAI_PRIVATE_KEY_FILE)
def teardown_class(cls):
os.chdir(cls.cwd)
shutil.rmtree(cls.t) |
class NotebookJsCell():
def __init__(self, cell_ref):
self._cell_ref = cell_ref
def run(self):
return JsUtils.jsWrap(('window.Jupyter.notebook.execute_cells([%s])' % self._cell_ref))
def hide(self):
return JsUtils.jsWrap(('$(Jupyter.notebook.get_cell(%s).element).hide()' % self._cell_ref))
def show(self):
return JsUtils.jsWrap(('$(Jupyter.notebook.get_cell(%s).element).show()' % self._cell_ref)) |
def test_gauss_edge3():
print('3rd Order Polynomial')
print('Edge')
gaussEdge.setOrder(1)
int0_f3 = dot(f3(gaussEdge.points), gaussEdge.weights)
print(int0_f3)
gaussEdge.setOrder(2)
int1_f3 = dot(f3(gaussEdge.points), gaussEdge.weights)
print(int1_f3)
gaussEdge.setOrder(3)
int2_f3 = dot(f3(gaussEdge.points), gaussEdge.weights)
print(int2_f3)
gaussEdge.setOrder(4)
int3_f3 = dot(f3(gaussEdge.points), gaussEdge.weights)
print(int3_f3)
npt.assert_almost_equal(int2_f3, int3_f3) |
class FiltersPanel():
def __init__(self) -> None:
self.visible = False
self.interface_textarea = TextArea(multiline=False, style='class:dialog.textarea')
self.method_textarea = TextArea(multiline=False, style='class:dialog.textarea')
self.exclude = Checkbox()
self.type_filter_checkboxes = TypeCheckboxlist()
float_frame = Box(padding_top=1, padding_left=2, padding_right=2, body=HSplit(padding=1, width=50, align=VerticalAlign.TOP, children=[VSplit(children=[Label('Interface', width=10), self.interface_textarea]), VSplit(children=[Label('Method', width=10), self.method_textarea]), VSplit(children=[Label('Type', width=10, dont_extend_height=False), self.type_filter_checkboxes]), VSplit(children=[Label('Exclude', width=10), self.exclude])]))
kb = KeyBindings()
kb.add('tab')(focus_next)
kb.add('s-tab')(focus_previous)
self.container = Frame(title='Filters', body=float_frame, style='class:dialog.background', modal=True, key_bindings=kb)
def filter(self) -> Filter:
return Filter(self.interface_textarea.text, self.method_textarea.text, self.type_filter_checkboxes.current_values, (not self.exclude.checked))
def __pt_container__(self) -> AnyContainer:
return self.container |
def get_faas_data(context: func.Context, cold_start: bool, trigger_type: Optional[str]) -> dict:
account_id = get_account_id()
resource_group = os.environ.get('WEBSITE_RESOURCE_GROUP', None)
app_name = os.environ.get('WEBSITE_SITE_NAME', None)
function_name = context.function_name
data = {'coldstart': cold_start, 'execution': context.invocation_id}
if trigger_type:
data['trigger'] = {'type': trigger_type}
if (account_id and resource_group and app_name and function_name):
data['id'] = (f'/subscriptions/{account_id}/resourceGroups/{resource_group}/providers/Microsoft.Web/sites/{app_name}/' + f'functions/{function_name}')
if (app_name and function_name):
data['name'] = f'{app_name}/{function_name}'
return data |
class OptionPlotoptionsItemSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize(('full_type', 'expected_output'), [('foobar 123', False), ('Linux make config build file, ASCII text', True), ('Linux make config build file (old)', True)])
def test_foo1(full_type, expected_output):
test_file = FileObject()
test_file.processed_analysis['file_type'] = {'result': {'full': full_type}}
assert (AnalysisPlugin.has_kconfig_type(test_file) == expected_output) |
def fetch_exchange(zone_key1: str, zone_key2: str, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict:
sorted_zone_keys = '->'.join(sorted([zone_key1, zone_key2]))
raw_data = get_json_data()
dt = get_datetime()
total_generation = (raw_data['Orkney ANM'] + raw_data['Non-ANM Renewable Generation'])
netflow = (raw_data['Live Demand'] - total_generation)
data = {'netFlow': netflow, 'datetime': dt, 'sortedZoneKeys': sorted_zone_keys, 'source': 'ssen.co.uk'}
return data |
class HidePost(MethodView):
decorators = [login_required]
def post(self, post_id):
post = Post.query.filter((Post.id == post_id)).first_or_404()
if (not Permission(Has('makehidden'), IsAtleastModeratorInForum(forum=post.topic.forum))):
flash(_('You do not have permission to hide this post'), 'danger')
return redirect(post.topic.url)
if post.hidden:
flash(_('Post is already hidden'), 'warning')
return redirect(post.topic.url)
first_post = post.first_post
post.hide(current_user)
post.save()
if first_post:
flash(_('Topic hidden'), 'success')
else:
flash(_('Post hidden'), 'success')
if (post.first_post and (not Permission(Has('viewhidden')))):
return redirect(post.topic.forum.url)
return redirect(post.topic.url) |
def zernike_radial(n, m, r, cache=None):
m = abs(m)
if (cache is not None):
if (('rad', n, m) in cache):
return cache[('rad', n, m)]
if (n == m):
res = (r ** n)
elif ((n - m) == 2):
z1 = zernike_radial(n, n, r, cache)
z2 = zernike_radial((n - 2), (n - 2), r, cache)
res = ((n * z1) - ((n - 1) * z2))
else:
p = n
q = (m + 4)
h3 = ((((- 4) * (q - 2)) * (q - 3)) / float((((p + q) - 2) * ((p - q) + 4))))
h2 = ((((h3 * (p + q)) * ((p - q) + 2)) / float((4 * (q - 1)))) + (q - 2))
h1 = ((((q * (q - 1)) / 2.0) - (q * h2)) + (((h3 * ((p + q) + 2)) * (p - q)) / 8.0))
r2 = zernike_radial(2, 2, r, cache)
res = ((h1 * zernike_radial(p, q, r, cache)) + ((h2 + (h3 / r2)) * zernike_radial(n, (q - 2), r, cache)))
if (cache is not None):
cache[('rad', n, m)] = res
return res |
def main():
build_dir = 'gateware'
platform = arty.Platform(variant='a7-35', toolchain='vivado')
from litex.build.generic_platform import Pins, IOStandard
platform.add_extension([('do', 0, Pins('B7'), IOStandard('LVCMOS33'))])
if ('load' in sys.argv[1:]):
prog = platform.create_programmer()
prog.load_bitstream((build_dir + '/top.bit'))
exit()
if ('sim' in sys.argv[1:]):
ring = RingSerialCtrl()
run_simulation(ring, test(), clocks={'sys': (.0 / .0)}, vcd_name='sim.vcd')
exit()
design = Tuto(platform)
platform.build(design, build_dir=build_dir) |
class TestSpew(tests.LimitedTestCase):
def setUp(self):
self.orig_trace = sys.settrace
sys.settrace = self._settrace
self.tracer = None
def tearDown(self):
sys.settrace = self.orig_trace
sys.stdout = sys.__stdout__
def _settrace(self, cb):
self.tracer = cb
def test_spew(self):
debug.spew()
assert isinstance(self.tracer, debug.Spew)
def test_unspew(self):
debug.spew()
debug.unspew()
assert (self.tracer is None)
def test_line(self):
frame_str = 'f=<frame at'
sys.stdout = io.StringIO()
s = debug.Spew()
f = sys._getframe()
s(f, 'line', None)
lineno = (f.f_lineno - 1)
output = sys.stdout.getvalue()
assert (('%s:%i' % (__name__, lineno)) in output), ("Didn't find line %i in %s" % (lineno, output))
assert (frame_str in output)
def test_line_nofile(self):
sys.stdout = io.StringIO()
s = debug.Spew()
g = globals().copy()
del g['__file__']
f = eval('sys._getframe()', g)
lineno = f.f_lineno
s(f, 'line', None)
output = sys.stdout.getvalue()
assert (('[unknown]:%i' % lineno) in output), ("Didn't find [unknown]:%i in %s" % (lineno, output))
assert ('VM instruction #' in output), output
def test_line_global(self):
frame_str = 'f=<frame at'
global GLOBAL_VAR
sys.stdout = io.StringIO()
GLOBAL_VAR = debug.Spew()
f = sys._getframe()
GLOBAL_VAR(f, 'line', None)
lineno = (f.f_lineno - 1)
output = sys.stdout.getvalue()
assert (('%s:%i' % (__name__, lineno)) in output), ("Didn't find line %i in %s" % (lineno, output))
assert (frame_str in output)
assert ('GLOBAL_VAR' in f.f_globals)
assert ('GLOBAL_VAR=<eventlet.debug.Spew object at' in output)
del GLOBAL_VAR
def test_line_novalue(self):
sys.stdout = io.StringIO()
s = debug.Spew(show_values=False)
f = sys._getframe()
s(f, 'line', None)
lineno = (f.f_lineno - 1)
output = sys.stdout.getvalue()
assert (('%s:%i' % (__name__, lineno)) in output), ("Didn't find line %i in %s" % (lineno, output))
assert ('f=<frame object at' not in output)
def test_line_nooutput(self):
sys.stdout = io.StringIO()
s = debug.Spew(trace_names=['foo'])
f = sys._getframe()
s(f, 'line', None)
output = sys.stdout.getvalue()
assert (output == '') |
def print_pattern_matches(analysis_context, matches, skip_compliant=False, include_contracts='all', exclude_contracts=[]):
grouped_matches = [(a, list(b)) for (a, b) in groupby(matches, (lambda t: t.pattern))]
for (pattern, matches) in sorted(grouped_matches, key=(lambda p: p[0].name)):
prev_msg = None
from securify.analyses.patterns.abstract_pattern import PatternMatch, MatchType
if skip_compliant:
matches = [m for m in matches if (m.type != MatchType.COMPLIANT)]
if (not matches):
continue
match: PatternMatch
for match in sorted(matches, key=(lambda x: x.type.value)):
msg = format_match(analysis_context, pattern, match, include_contracts=include_contracts, exclude_contracts=exclude_contracts)
if (msg == prev_msg):
continue
if (match.type == MatchType.COMPLIANT):
print_styled(msg, Color.GREEN)
if (match.type == MatchType.WARNING):
print_styled(msg, Color.YELLOW)
if (match.type == MatchType.VIOLATION):
print_styled(msg, Color.RED)
if (match.type == MatchType.CONFLICT):
print_styled(msg, Color.PURPLE)
prev_msg = msg |
def push(url: str, taxonomy: Taxonomy, headers: Dict[(str, str)], dry: bool=False, diff: bool=False) -> None:
missing_datasets = get_orphan_datasets(taxonomy)
if (len(missing_datasets) > 0):
echo_red('Orphan Dataset Warning: The following datasets are not found referenced on a System')
for dataset in missing_datasets:
print(dataset)
for resource_type in taxonomy.__fields_set__:
print(('-' * 10))
print(f'Processing {resource_type} resource(s)...')
resource_list = getattr(taxonomy, resource_type)
if (diff or dry):
existing_keys = [resource.fides_key for resource in resource_list]
server_resource_list = get_server_resources(url, resource_type, existing_keys, headers)
(create_list, update_list) = sort_create_update(resource_list, server_resource_list, diff)
if dry:
echo_results('would create', resource_type, len(create_list))
echo_results('would update', resource_type, len(update_list))
continue
handle_cli_response(api.upsert(headers=headers, resource_type=resource_type, url=url, resources=[loads(resource.json()) for resource in resource_list]), verbose=False)
echo_results('pushed', resource_type, len(resource_list))
print(('-' * 10)) |
def ge(val: Union[(float, int, None)], bound: Union[(float, int)], allow_optional: bool=True) -> None:
if (allow_optional and (val is None)):
pass
elif ((val is None) and (not allow_optional)):
raise _SpockValueError(f'Set value is None and allow_optional is `{allow_optional}`')
elif (val < bound):
raise _SpockValueError(f'Set value `{val}` is not >= given bound value `{bound}`') |
class DrawerCirc(Drawer):
TYPE = 'circ'
def __init__(self, tree, viewport=None, panel=0, zoom=(1, 1), limits=None, collapsed_ids=None, active=None, selected=None, searches=None, layouts=None, tree_style=None, include_props=None, exclude_props=None):
super().__init__(tree, viewport, panel, zoom, limits, collapsed_ids, active, selected, searches, layouts, tree_style, include_props=include_props, exclude_props=exclude_props)
assert (self.zoom[0] == self.zoom[1]), 'zoom must be equal in x and y'
if (not limits):
(self.ymin, self.ymax) = ((- pi), pi)
self.dy2da = ((self.ymax - self.ymin) / self.tree.size[1])
def in_viewport(self, box, pos=None):
if (not self.viewport):
return dh.intersects_segment(((- pi), (+ pi)), dh.get_ys(box))
if ((self.panel == 0) and (pos != 'aligned')):
return (dh.intersects_box(self.viewport, dh.circumrect(box)) and dh.intersects_segment(((- pi), (+ pi)), dh.get_ys(box)))
else:
return dh.intersects_angles(self.viewport, box)
def flush_outline(self, minimum_dr=0):
(r, a, dr, da) = super().flush_outline(minimum_dr)
(a1, a2) = dh.clip_angles(a, (a + da))
return Box(r, a1, dr, (a2 - a1))
def node_size(self, node):
return Size(node.size[0], (node.size[1] * self.dy2da))
def content_size(self, node):
return Size(dist(node), (node.size[1] * self.dy2da))
def is_small(self, box):
z = self.zoom[0]
(r, a, dr, da) = box
return ((((r + dr) * da) * z) < self.COLLAPSE_SIZE)
def get_box(self, element):
return get_asec(element, self.zoom)
def draw_lengthline(self, p1, p2, parent_of, style):
if ((- pi) <= p1[1] < pi):
(yield dh.draw_line(dh.cartesian(p1), dh.cartesian(p2), 'lengthline', parent_of, style))
def draw_childrenline(self, p1, p2, style):
((r1, a1), (r2, a2)) = (p1, p2)
(a1, a2) = dh.clip_angles(a1, a2)
if (a1 < a2):
is_large = ((a2 - a1) > pi)
(yield dh.draw_arc(dh.cartesian((r1, a1)), dh.cartesian((r2, a2)), is_large, 'childrenline', style=style))
def draw_nodedot(self, center, max_size, active_node, style):
(r, a) = center
size = min(max_size, style['size'])
if active_node:
size = max(min(max_size, 4), size)
if (((- pi) < a < pi) and (size > 0)):
fill = style['fill']
nodedot_style = {'fill': fill, 'opacity': style['opacity']}
if (style['shape'] == 'circle'):
(yield dh.draw_circle(center, radius=size, circle_type=('nodedot ' + active_node), style=nodedot_style))
elif (style['shape'] == 'square'):
z = self.zoom[0]
(dr, da) = (((2 * size) / z), ((2 * size) / (z * r)))
box = Box((r - (dr / 2)), (a - (da / 2)), dr, da)
(yield dh.draw_rect(box, rect_type=('nodedot ' + active_node), style=nodedot_style))
def draw_nodebox(self, node, node_id, box, searched_by, style=None):
(r, a, dr, da) = box
(a1, a2) = dh.clip_angles(a, (a + da))
if (a1 < a2):
(yield dh.draw_nodebox(Box(r, a1, dr, (a2 - a1)), node.name, self.get_popup_props(node), node_id, searched_by, style))
def draw_collapsed(self, collapsed_node, active_children=TreeActive(0, 0), selected_children=[]):
(r, a, dr, da) = self.outline
p1 = (r, (a + (da / 2)))
p2 = ((r + dr), (a + (da / 2)))
(yield dh.draw_line(dh.cartesian(p1), dh.cartesian(p2), 'lengthline')) |
def main():
import argparse
parser = argparse.ArgumentParser(description='Generate fontTools.unicodedata from UCD data files')
parser.add_argument('--ucd-path', help='Path to local folder containing UCD data files')
parser.add_argument('-q', '--quiet', action='store_true')
options = parser.parse_args()
level = ('WARNING' if options.quiet else 'INFO')
logging.basicConfig(level=level, format='%(message)s')
build_ranges('Blocks.txt', local_ucd=options.ucd_path, default='No_Block')
script_aliases = parse_property_value_aliases('sc', options.ucd_path)
build_ranges('Scripts.txt', local_ucd=options.ucd_path, default='Unknown', aliases=script_aliases)
build_ranges('ScriptExtensions.txt', local_ucd=options.ucd_path, is_set=True) |
class MdsrModel(PreTrainedModel):
config_class = MdsrConfig
def __init__(self, args):
super(MdsrModel, self).__init__(args)
self.scale_list = [args.scale]
self.scale = args.scale
bam = args.bam
input_channel = 3
output_channel = 3
num_block = 32
inp = 64
rgb_range = 255
res_scale = 0.1
act = nn.ReLU(True)
self.head = nn.Sequential(conv(3, inp, input_channel))
self.pre_process = nn.ModuleDict(([str(scale), nn.Sequential(ResBlock(inp, bam, bias=True, act=act, res_scale=res_scale), ResBlock(inp, bam, bias=True, act=act, res_scale=res_scale))] for scale in self.scale_list))
self.body = nn.Sequential(*[ResBlock(inp, bam, bias=True, act=act, res_scale=res_scale) for _ in range(num_block)])
self.body.add_module(str(num_block), conv(inp, inp, 3))
self.upsample = nn.ModuleDict(([str(scale), Upsampler(scale, inp, act=False)] for scale in self.scale_list))
self.tail = nn.Sequential(conv(inp, 3, output_channel))
self.sub_mean = MeanShift(rgb_range, sign=(- 1))
self.add_mean = MeanShift(rgb_range, sign=1)
def forward(self, x):
scale_id = str(self.scale)
x = self.head(x)
x = self.pre_process[scale_id](x)
res = self.body(x)
res += x
x = self.upsample[scale_id](res)
x = self.tail(x)
return x
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for (name, param) in state_dict.items():
if (name in own_state):
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if (name.find('tail') == (- 1)):
raise RuntimeError(f'While copying the parameter named {name}, whose dimensions in the model are {own_state[name].size()} and whose dimensions in the checkpoint are {param.size()}.')
elif strict:
if (name.find('tail') == (- 1)):
raise KeyError(f'unexpected key "{name}" in state_dict') |
def load_kubernetes_config(config_file_path: Text=None):
global kubernetes_cluster_available
if (config_file_path is not None):
config.load_kube_config(config_file=config_file_path)
kubernetes_cluster_available = True
else:
try:
config.load_kube_config()
kubernetes_cluster_available = True
except Exception as e:
try:
config.load_incluster_config()
kubernetes_cluster_available = True
except Exception as e:
pass |
('shutil.rmtree')
('ciftify.bidsapp.fmriprep_ciftify.run')
def test_ux19_will_ciftify_subject_fmri_will_rerun_if_failed_for_ds005(mock_run, mock_delete, outputdir):
participant_label = '14'
fmriname = 'task-mixedgamblestask_run-01_desc-preproc'
results_dir = fake_complete_ciftify_subject_fmri(outputdir, participant_label, fmriname, fmri_logtxt=incomplete_log_tail, recon_all_logtxt=complete_log_tail)
uargs = [ds005_bids, outputdir, 'participant', '--participant_label={}'.format(participant_label), '--read-from-derivatives', ds005_derivs, '--rerun-if-incomplete', '--surf-reg', 'FS']
ret = simple_main_run(uargs)
call_list = parse_call_list_into_strings(mock_run.call_args_list)
assert (count_calls_to('ciftify_subject_fmri', call_list, call_contains=fmriname) == 1)
assert (mock_delete.call_args_list[0][0][0] == results_dir) |
_view(('GET', 'DELETE'))
_data(permissions=(IsSuperAdminOrReadOnly, IsUserTask))
def task_details(request, task_id=None, data=None):
(user_id, owner_id, dc_id) = user_owner_dc_ids_from_task_id(task_id)
res = UserTasks(owner_id).get(task_id)
if ((not res) or (int(dc_id) != request.dc.id)):
return TaskFailureResponse(request, 'Task does not exist', status=status.HTTP_404_NOT_FOUND)
if (request.method == 'GET'):
apiview = res.get('apiview', {})
apiview['task_id'] = task_id
return TaskSuccessResponse(request, apiview)
elif (request.method == 'DELETE'):
force = ForceSerializer(data=data, default=False).is_true()
(tid, err) = delete_task(task_id, force=force)
if err:
return TaskFailureResponse(request, err, status=status.HTTP_406_NOT_ACCEPTABLE)
else:
return TaskSuccessResponse(request, 'Delete pending') |
def test_wf1_with_fast_dynamic():
def t1(a: int) -> str:
a = (a + 2)
return ('fast-' + str(a))
def my_subwf(a: int) -> typing.List[str]:
s = []
for i in range(a):
s.append(t1(a=i))
return s
def my_wf(a: int) -> typing.List[str]:
v = my_subwf(a=a)
return v
with context_manager.FlyteContextManager.with_context(context_manager.FlyteContextManager.current_context().with_serialization_settings(flytekit.configuration.SerializationSettings(project='test_proj', domain='test_domain', version='abc', image_config=ImageConfig(Image(name='name', fqn='image', tag='name')), env={}, fast_serialization_settings=FastSerializationSettings(enabled=True, destination_dir='/User/flyte/workflows', distribution_location='s3://my-s3-bucket/fast/123')))) as ctx:
with context_manager.FlyteContextManager.with_context(ctx.with_execution_state(ctx.execution_state.with_params(mode=ExecutionState.Mode.TASK_EXECUTION))) as ctx:
input_literal_map = TypeEngine.dict_to_literal_map(ctx, {'a': 5})
dynamic_job_spec = my_subwf.dispatch_execute(ctx, input_literal_map)
assert (len(dynamic_job_spec._nodes) == 5)
assert (len(dynamic_job_spec.tasks) == 1)
args = ' '.join(dynamic_job_spec.tasks[0].container.args)
assert args.startswith('pyflyte-fast-execute --additional-distribution s3://my-s3-bucket/fast/123 --dest-dir /User/flyte/workflows')
assert (context_manager.FlyteContextManager.size() == 1) |
def extractSetsunatranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Harem', 'My Sister is Jealous of Another World Girls~Fun Harem Store Management', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] != ['Light Novel']):
return False
chp_prefixes = [('Harem ', 'My Sister is Jealous of Another World Girls~Fun Harem Store Management', 'translated'), ('Cat ', 'Me and My Beloved Cat (Girlfriend)', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('old, new', [('x = 0.0', 'x = 0.0'), ('x = 1.0', None), ('x = 2.0', None), ('x = 3.0', None), ('x = 4.0', 'x = 4.0'), ('x = 5.0', 'x = 5.0')])
def test_block_replace_forward_node(proc_bar, old, new):
for_j = _find_stmt(proc_bar, 'for j in _: _').body()
(bar_new, fwd) = for_j[1:4]._replace([LoopIR.Pass(None, for_j.parent()._node.srcinfo), LoopIR.Pass(None, for_j.parent()._node.srcinfo)])
old_c = _find_stmt(proc_bar, old)
if (new is None):
with pytest.raises(InvalidCursorError, match='node no longer exists'):
fwd(old_c)
else:
bar_new = Cursor.create(bar_new)
assert (fwd(old_c) == match_pattern(bar_new, new)[0][0]) |
def check_gen_password(save_stats: dict[(str, Any)]) -> tuple[(dict[(str, Any)], str)]:
inquiry_code = save_stats['inquiry_code']
password_refresh_token = save_stats['token']
info = user_info.UserInfo(inquiry_code)
password = info.get_password()
if password:
return (save_stats, password)
password_refresh_data = get_password_refresh(inquiry_code, password_refresh_token)
if (password_refresh_data is not None):
info.set_password(password_refresh_data['password'])
save_stats['token'] = password_refresh_data['passwordRefreshToken']
return (save_stats, password_refresh_data['password'])
password_refresh_data = get_password(inquiry_code)
if (password_refresh_data is None):
inquiry_code = get_inquiry_code()
save_stats['inquiry_code'] = inquiry_code
return check_gen_password(save_stats)
if ('accountCode' in password_refresh_data):
save_stats['inquiry_code'] = password_refresh_data['accountCode']
info = user_info.UserInfo(password_refresh_data['accountCode'])
password_refresh_token = password_refresh_data['passwordRefreshToken']
save_stats['token'] = password_refresh_token
info.set_password(password_refresh_data['password'])
return (save_stats, password_refresh_data['password']) |
def test_check_relayed_addr():
public_host = Address('8.8.8.8', 80, 80)
local_host = Address('127.0.0.1', 80, 80)
assert check_relayed_addr(local_host, local_host)
assert (not check_relayed_addr(public_host, local_host))
private = Address('192.168.1.1', 80, 80)
assert check_relayed_addr(private, private)
assert (not check_relayed_addr(public_host, private))
reserved = Address('240.0.0.1', 80, 80)
assert (not check_relayed_addr(local_host, reserved))
assert (not check_relayed_addr(public_host, reserved))
unspecified = Address('0.0.0.0', 80, 80)
assert (not check_relayed_addr(local_host, unspecified))
assert (not check_relayed_addr(public_host, unspecified)) |
class OptionPlotoptionsXrangeSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
.usefixtures('use_tmpdir')
def test_workflow_success():
WorkflowCommon.createWaitJob()
external_job = WorkflowJob.from_file(name='EXTERNAL_WAIT', config_file='external_wait_job')
wait_job = WorkflowJob.from_file(name='WAIT', config_file='wait_job')
workflow = Workflow.from_file('fast_wait_workflow', SubstitutionList(), {'WAIT': wait_job, 'EXTERNAL_WAIT': external_job})
assert (len(workflow) == 2)
workflow_runner = WorkflowRunner(workflow, ert=None)
assert (not workflow_runner.isRunning())
with workflow_runner:
workflow_runner.wait()
assert os.path.exists('wait_started_0')
assert (not os.path.exists('wait_cancelled_0'))
assert os.path.exists('wait_finished_0')
assert os.path.exists('wait_started_1')
assert (not os.path.exists('wait_cancelled_1'))
assert os.path.exists('wait_finished_1')
assert workflow_runner.workflowResult() |
class Outputable(object):
def output_html(self):
raise NotImplementedError((((_('HTML output not yet supported in') + ' "') + self.__class__.__name__) + '".'))
def output_json(self):
raise NotImplementedError((((_('JSON output not yet supported in') + ' "') + self.__class__.__name__) + '".'))
def output_text(self):
raise NotImplementedError((((_('Text output not yet supported in') + ' "') + self.__class__.__name__) + '".'))
def output_xml(self):
raise NotImplementedError((((_('XML output not yet supported in') + ' "') + self.__class__.__name__) + '".')) |
def test_external_pod_security_policy():
config = '\nrbac:\n create: true\n serviceAccountName: ""\n\npodSecurityPolicy:\n create: false\n name: "customPodSecurityPolicy"\n'
resources = ('role', 'rolebinding')
r = helm_template(config)
for resource in resources:
assert (resource in r)
assert (r['role'][name]['rules'][0] == {'apiGroups': ['extensions'], 'verbs': ['use'], 'resources': ['podsecuritypolicies'], 'resourceNames': ['customPodSecurityPolicy']}) |
(name='format', help={'check': 'Checks if source is formatted without applying changes'})
def format_(c, check=False):
isort_options = (['--check-only', '--diff'] if check else [])
_run(c, f"poetry run isort {' '.join(isort_options)} {PYTHON_TARGETS_STR}")
black_options = (['--diff', '--check'] if check else ['--quiet'])
_run(c, f"poetry run black {' '.join(black_options)} {PYTHON_TARGETS_STR}") |
def load_token() -> tuple[((str | None), (str | None))]:
lines = _read_token_file((_check_dir_exist() / _TOKEN_FILE))
if (not lines):
return (None, None)
refresh_token = lines[0]
access_token = None
if (len(lines) > 1):
access_token = lines[1]
return (refresh_token, access_token) |
class PatternMatch():
pattern: AbstractPattern
type: MatchType
info: list
def name(self):
return self.pattern.name
def description(self):
return self.pattern.description
def severity(self):
return self.pattern.severity
def tags(self):
return self.pattern.tags
def with_info(self, *info):
new = copy.copy(self)
new.info = (new.info + list(info))
return new
def find_info(self, types: Type[T]) -> Iterator[T]:
return of_type[types](self.info) |
class TestRawUnpacker(TestUnpackerBase):
def test_unpacker_selection(self):
self.check_unpacker_selection('data/raw', 'RAW')
def test_extraction(self):
input_file = Path(TEST_DATA_DIR, 'raw.bin')
(unpacked_files, meta_data) = self.unpacker.extract_files_from_file(str(input_file), self.tmp_dir.name)
assert (meta_data['padding seperated sections'] == 3)
assert (meta_data['LZMA'] == 1)
assert (len(unpacked_files) == 4)
assert (f'{self.tmp_dir.name}/0x2f' in unpacked_files)
assert (f'{self.tmp_dir.name}/0x8d.lzma' in unpacked_files)
def test_extraction_encoded(self):
input_file = Path(TEST_DATA_DIR, 'encoded.bin')
(unpacked_files, meta_data) = self.unpacker._extract_files_from_file_using_specific_unpacker(str(input_file), self.tmp_dir.name, self.unpacker.unpacker_plugins['data/raw'])
assert (meta_data['Intel Hex'] == 1)
assert (meta_data['Motorola S-Record'] == 1)
assert (f'{self.tmp_dir.name}/0x6.ihex' in unpacked_files)
assert (f'{self.tmp_dir.name}/0x291f.srec' in unpacked_files)
assert (len(unpacked_files) == 2)
def test_extraction_nothing_included(self):
input_file = Path(TEST_DATA_DIR, 'nothing.bin')
(unpacked_files, _) = self.unpacker.extract_files_from_file(str(input_file), self.tmp_dir.name)
assert (len(unpacked_files) == 0) |
def test_import_unique_trust(raw_trust, raw_domain):
expected_domain_count = 1
expected_trust_count = 1
adds = ADDS()
adds = ADDS()
adds.import_objects([raw_domain, raw_trust])
adds.process()
assert (len(adds.domains) == expected_domain_count)
assert (len(adds.domains[0].Trusts) == expected_trust_count) |
def extractCloversNook(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('A mistaken marriage match: A generation of military counselor' in item['tags']):
return buildReleaseMessageWithType(item, 'A mistaken marriage match: A generation of military counselor', vol, chp, frag=frag, postfix=postfix)
if ('A mistaken marriage match: Record of washed grievances' in item['tags']):
return buildReleaseMessageWithType(item, 'A mistaken marriage match: Record of washed grievances', vol, chp, frag=frag, postfix=postfix)
if ('Three Marriages' in item['tags']):
return buildReleaseMessageWithType(item, 'Three Marriages', vol, chp, frag=frag, postfix=postfix)
return False |
class PadBorderDivisible(aug.Augmentation):
def __init__(self, size_divisibility: int, pad_mode: str='constant'):
super().__init__()
self.size_divisibility = size_divisibility
self.pad_mode = pad_mode
def get_transform(self, image: np.ndarray) -> Transform:
assert ((len(image.shape) == 3) and (image.shape[2] in [1, 3])), f'Invalid image shape {image.shape}'
(H, W) = image.shape[:2]
new_h = int((math.ceil((H / self.size_divisibility)) * self.size_divisibility))
new_w = int((math.ceil((W / self.size_divisibility)) * self.size_divisibility))
return PadTransform(0, 0, W, H, new_w, new_h, pad_mode=self.pad_mode) |
class FileChannel(ChannelInterface):
listed_path = Signal(str, str, list)
changed_file = Signal(str, float)
packages = Signal(str, dict)
packages_available = Signal(str)
file_content = Signal(str, int, float, str)
def __init__(self):
ChannelInterface.__init__(self)
self._cache_file_content = {}
self._cache_packages = {}
self._cache_path = {}
def clear_cache(self, grpc_path=''):
if grpc_path:
try:
del self._cache_file_content[grpc_path]
except Exception:
pass
try:
del self._cache_path[grpc_path]
except Exception:
pass
try:
del self._cache_packages[grpc_path]
except Exception:
pass
else:
self._cache_file_content.clear()
self._cache_packages.clear()
self._cache_path.clear()
def get_file_manager(self, uri='localhost:12321'):
channel = self.get_insecure_channel(uri)
return (fstub.FileStub(channel), channel)
def package_name(self, grpc_path):
(uri, _path) = nmdurl.split(grpc_path, with_scheme=True)
path = grpc_path
try:
pl = self._cache_packages[uri]
while (path and (path != os.path.sep)):
if (path in pl):
return (pl[path], path)
path = os.path.dirname(path).rstrip(os.path.sep)
except Exception:
pass
return (None, None)
def get_packages(self, url=''):
if url:
grpc_url = nmdurl.nmduri(url)
if (grpc_url in self._cache_packages):
return self._cache_packages[grpc_url]
return {}
return self._cache_packages
def clear_package_cache(self, url):
if url:
grpc_url = nmdurl.nmduri(url)
try:
del self._cache_packages[grpc_url]
rospy.logdebug("cache for packages from '%s' removed", grpc_url)
except KeyError:
pass
def list_path_threaded(self, grpc_path='grpc://localhost:12321', clear_cache=False):
self._threads.start_thread(('lpt_%s' % grpc_path), target=self._list_path_threaded, args=(grpc_path, clear_cache))
def _list_path_threaded(self, grpc_path='grpc://localhost:12321', clear_cache=False):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(("[thread] list path: %s, '%s'" % (uri, path)))
(fm, channel) = self.get_file_manager(uri)
result = None
try:
if (not clear_cache):
result = self._cache_path[grpc_path]
else:
self._cache_path['']
except KeyError:
try:
result = fm.list_path(path)
if (uri not in self._cache_packages):
self.list_packages_threaded(grpc_path, clear_cache)
except Exception as e:
self.error.emit('list_path', ('grpc://%s' % uri), path, e)
if (result is not None):
self._cache_path[grpc_path] = result
self.listed_path.emit(('grpc://%s' % uri), path, result)
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('lpt_%s' % grpc_path))
def check_for_changed_files_threaded(self, grpc_path_dict):
dests = {}
for (grpc_path, mtime) in grpc_path_dict.items():
(uri, path) = nmdurl.split(grpc_path, with_scheme=True)
if (uri not in dests):
dests[uri] = {}
dests[uri][path] = mtime
for (uri, paths) in dests.items():
self._threads.start_thread(('cft_%s' % uri), target=self._check_for_changed_files_threaded, args=(uri, paths))
def _check_for_changed_files_threaded(self, grpc_url, path_dict):
rospy.logdebug(('[thread] check_for_changed_files_threaded: with %d files on %s' % (len(path_dict), grpc_url)))
(uri, _path) = nmdurl.split(grpc_url, with_scheme=False)
(fm, channel) = self.get_file_manager(uri)
try:
response = fm.changed_files(path_dict)
for item in response:
self.changed_file.emit(nmdurl.join(grpc_url, item.path), item.mtime)
except Exception as e:
self.error.emit('changed_files', ('grpc://%s' % uri), '', e)
(url, _path) = nmdurl.split(grpc_url, with_scheme=True)
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('cft_%s' % url))
def list_packages_threaded(self, grpc_url_or_path='grpc://localhost:12321', clear_ros_cache=False):
self._threads.start_thread(('gmt_%s_%d' % (grpc_url_or_path, clear_ros_cache)), target=self._list_packages, args=(grpc_url_or_path, clear_ros_cache))
def _list_packages(self, grpc_url_or_path='grpc://localhost:12321', clear_ros_cache=False):
(uri, path) = nmdurl.split(grpc_url_or_path)
grpc_url = ('grpc://%s' % uri)
result = {}
try:
if (not clear_ros_cache):
result = self._cache_packages[grpc_url]
else:
self._cache_packages['']
except KeyError:
rospy.logdebug(('[thread] get packages %s' % grpc_url))
(fm, channel) = self.get_file_manager(uri)
try:
result = fm.list_packages(clear_ros_cache)
fixed_result = {nmdurl.join(grpc_url, path): name for (path, name) in result.items()}
self._cache_packages[grpc_url] = fixed_result
self.packages.emit(grpc_url, fixed_result)
self.packages_available.emit(grpc_url)
except Exception as err:
self.error.emit('_list_packages', ('grpc://%s' % uri), path, err)
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('gmt_%s_%d' % (grpc_url_or_path, clear_ros_cache)))
def get_file_content_threaded(self, grpc_path='grpc://localhost:12321', force=False):
self._threads.start_thread(('gfc_%s_%d' % (grpc_path, force)), target=self.get_file_content, args=(grpc_path, force))
def get_file_content(self, grpc_path='grpc://localhost:12321', force=False):
(file_size, file_mtime, file_content) = (0, 0, '')
try:
if force:
del self._cache_file_content[grpc_path]
(file_size, file_mtime, file_content) = self._cache_file_content[grpc_path]
except KeyError:
rospy.logdebug(('get file content for %s:' % grpc_path))
(uri, path) = nmdurl.split(grpc_path)
(fm, channel) = self.get_file_manager(uri)
try:
(file_size, file_mtime, file_content) = fm.get_file_content(path)
file_content = utf8(file_content)
self._cache_file_content[grpc_path] = (file_size, file_mtime, file_content)
except Exception as e:
self.error.emit('get_file_content', ('grpc://%s' % uri), grpc_path, e)
raise e
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('gfc_%s_%d' % (grpc_path, force)))
self.file_content.emit(grpc_path, file_size, file_mtime, file_content)
return (file_size, file_mtime, file_content)
def save_file(self, grpc_path, content, mtime):
rospy.logdebug(('save_file_content: %s' % grpc_path))
(uri, path) = nmdurl.split(grpc_path)
(fm, channel) = self.get_file_manager(uri)
result = fm.save_file_content(path, content, mtime)
for ack in result:
if ((ack.path == path) and (ack.mtime != 0)):
self.clear_cache(grpc_path)
self.close_channel(channel, uri)
return ack.mtime
self.close_channel(channel, uri)
return 0
def rename(self, grpc_path_old='grpc://localhost:12321', grpc_path_new='grpc://localhost:12321'):
(uri, old) = nmdurl.split(grpc_path_old)
(_, new) = nmdurl.split(grpc_path_new)
rospy.logdebug(('rename path on %s' % uri))
(fm, channel) = self.get_file_manager(uri)
result = fm.rename(old, new)
self.close_channel(channel, uri)
return result
def copy(self, grpc_path='grpc://localhost:12321', grpc_dest='grpc://localhost:12321'):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(("copy '%s' to '%s'" % (grpc_path, grpc_dest)))
(fm, channel) = self.get_file_manager(uri)
fm.copy(path, grpc_dest)
self.close_channel(channel, uri)
def get_package_binaries(self, pkgname, grpc_url='grpc://localhost:12321'):
(uri, _path) = nmdurl.split(grpc_url)
rospy.logdebug(("get_package_binaries for '%s' from '%s'" % (pkgname, uri)))
(fm, channel) = self.get_file_manager(uri)
response = fm.get_package_binaries(pkgname)
(url, _) = nmdurl.split(grpc_url, with_scheme=True)
result = {}
for item in response:
result[nmdurl.join(url, item.path)] = item.mtime
self.close_channel(channel, uri)
return result
def delete(self, grpc_path='grpc://localhost:12321'):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(("delete '%s' %s" % (path, uri)))
(fm, channel) = self.get_file_manager(uri)
result = fm.delete(path)
self.close_channel(channel, uri)
return result
def new(self, grpc_path='grpc://localhost:12321', path_type=0):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(("create new '%s' %s" % (path, uri)))
(fm, channel) = self.get_file_manager(uri)
result = fm.new(path, path_type)
self.close_channel(channel, uri)
return result |
class JWTStrategy(Strategy[(models.UP, models.ID)], Generic[(models.UP, models.ID)]):
def __init__(self, secret: SecretType, lifetime_seconds: Optional[int], token_audience: List[str]=['fastapi-users:auth'], algorithm: str='HS256', public_key: Optional[SecretType]=None):
self.secret = secret
self.lifetime_seconds = lifetime_seconds
self.token_audience = token_audience
self.algorithm = algorithm
self.public_key = public_key
def encode_key(self) -> SecretType:
return self.secret
def decode_key(self) -> SecretType:
return (self.public_key or self.secret)
async def read_token(self, token: Optional[str], user_manager: BaseUserManager[(models.UP, models.ID)]) -> Optional[models.UP]:
if (token is None):
return None
try:
data = decode_jwt(token, self.decode_key, self.token_audience, algorithms=[self.algorithm])
user_id = data.get('sub')
if (user_id is None):
return None
except jwt.PyJWTError:
return None
try:
parsed_id = user_manager.parse_id(user_id)
return (await user_manager.get(parsed_id))
except (exceptions.UserNotExists, exceptions.InvalidID):
return None
async def write_token(self, user: models.UP) -> str:
data = {'sub': str(user.id), 'aud': self.token_audience}
return generate_jwt(data, self.encode_key, self.lifetime_seconds, algorithm=self.algorithm)
async def destroy_token(self, token: str, user: models.UP) -> None:
raise StrategyDestroyNotSupportedError("A JWT can't be invalidated: it's valid until it expires.") |
def test_plain_marker_data_from_event_attributes():
attr = MarkerRecordedEventAttributes()
attr.details = '{"id": "test-id"}'.encode('utf-8')
marker_data = MarkerInterface.from_event_attributes(attr)
assert isinstance(marker_data, PlainMarkerData)
assert (marker_data.id == 'test-id') |
class Migration(migrations.Migration):
dependencies = [('frontend', '0006_importlog_populate')]
operations = [migrations.AlterModelOptions(name='importlog', options={'ordering': ['-current_at']}), migrations.AlterField(model_name='importlog', name='category', field=models.CharField(db_index=True, max_length=50))] |
class Client(object):
def __init__(self, server):
self.server = server
self._priv_key = crypto.generate_rsa_key()
self.uid = self.server.register(binascii.hexlify(crypto.get_pubkey_bytes(self._priv_key.public_key())))
self._all_messages = defaultdict(list)
def list(self):
return self.server.list_users()
def send(self, msg, *users):
(km, cm) = crypto.encrypt_inner(msg)
mid = self.server.put_msg(binascii.hexlify(cm))
hcm = crypto.hash(cm)
for user in users:
self._send_ctxt(mid, km, hcm, user)
def _send_ctxt(self, mid, km, hcm, user):
out_msg = ((mid + km) + hcm)
pubkey = binascii.unhexlify(self.server.get_user(user))
(ctxt, com) = crypto.encrypt_outer(out_msg, pubkey)
out = self.server.send(self.uid, user, binascii.hexlify((ctxt + com)))
assert (out == b'sent'), out
def recv(self):
lines = self.server.recv(self.uid)
msgs = []
for line in lines:
(who, ts, msg, fbtag) = line.split(b' ')
msgs.append((who, int(ts), binascii.unhexlify(msg), binascii.unhexlify(fbtag)))
out = []
for (who, ts, ctxt, fbtag) in msgs:
msg = crypto.decrypt_outer(ctxt, self._priv_key)
(mid, km, hcm, _) = crypto.split_outer_message(msg)
cm = binascii.unhexlify(self.server.get_msg(mid))
assert (crypto.hash(cm) == hcm), 'bad message hash'
m = crypto.decrypt_inner(km, cm)
self._all_messages[who].append((mid, ts, ctxt, msg, fbtag))
out.append((who, mid, m))
return out
def report(self, who, mid):
(_, ts, ctxt, msg, fbtag) = [x for x in self._all_messages[who] if (x[0] == mid)][0]
return self.server.report(self.uid, who, str(ts).encode('utf-8'), binascii.hexlify(ctxt), binascii.hexlify(fbtag), binascii.hexlify(msg)) |
def test_generic_search_parent(frontend_db, backend_db):
(fo, fw) = create_fw_with_child_fo()
fw.file_name = 'fw.image'
fo.file_name = 'foo.bar'
fo.processed_analysis = {'plugin': generate_analysis_entry(analysis_result={'foo': 'bar', 'list': ['a', 'b']})}
backend_db.insert_multiple_objects(fw, fo)
insert_test_fw(backend_db, 'some_other_fw', vendor='foo123')
fo2 = create_test_file_object()
fo2.uid = 'some_other_fo'
backend_db.insert_object(fo2)
assert (frontend_db.generic_search({'file_name': 'foo.bar'}) == [fo.uid])
assert (frontend_db.generic_search({'file_name': 'foo.bar'}, only_fo_parent_firmware=True) == [fw.uid])
assert (frontend_db.generic_search({'processed_analysis.plugin.foo': 'bar'}, only_fo_parent_firmware=True) == [fw.uid])
assert (frontend_db.generic_search({'file_name': 'fw.image'}, only_fo_parent_firmware=True) == [fw.uid])
assert (frontend_db.generic_search({'vendor': 'foo123'}, only_fo_parent_firmware=True) == ['some_other_fw']) |
def build_data_provider(data_config):
IMAGE_SIZE: int = 32
transform = transforms.Compose([transforms.Resize(IMAGE_SIZE), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
train_dataset = CelebaDataset(data_root='leaf/data/celeba/data/train/all_data_0_0_keep_0_train_9.json', image_root='leaf/data/celeba/data/raw/', transform=transform)
test_dataset = CelebaDataset(data_root='leaf/data/celeba/data/test/all_data_0_0_keep_0_test_9.json', transform=transform, image_root=train_dataset.image_root)
print(f'Created datasets with {len(train_dataset)} train users and {len(test_dataset)} test users')
dataloader = LEAFDataLoader(train_dataset, test_dataset, test_dataset, batch_size=data_config.local_batch_size, drop_last=data_config.drop_last)
data_provider = DataProvider(dataloader)
print(f'Training clients in total: {data_provider.num_train_users()}')
return data_provider |
def test_countable_list():
l1 = CountableList(big_endian_int)
serializable = [(), (1, 2), tuple(range(500))]
for s in serializable:
r = l1.serialize(s)
assert (l1.deserialize(r) == s)
not_serializable = ([1, 'asdf'], ['asdf'], [1, [2]], [[]])
for n in not_serializable:
with pytest.raises(SerializationError):
l1.serialize(n)
l2 = CountableList(CountableList(big_endian_int))
serializable = ((), ((),), ((1, 2, 3), (4,)), ((5,), (6, 7, 8)), ((), (), (9, 0)))
for s in serializable:
r = l2.serialize(s)
assert (l2.deserialize(r) == s)
not_serializable = ([[[]]], [1, 2], [1, ['asdf'], ['fdsa']])
for n in not_serializable:
with pytest.raises(SerializationError):
l2.serialize(n)
l3 = CountableList(big_endian_int, max_length=3)
serializable = [(), (1,), (1, 2), (1, 2, 3)]
for s in serializable:
r = l3.serialize(s)
assert (r == l1.serialize(s))
assert (l3.deserialize(r) == s)
not_serializable = [(1, 2, 3, 4), (1, 2, 3, 4, 5, 6, 7), range(500)]
for s in not_serializable:
with pytest.raises(SerializationError):
l3.serialize(s)
r = l1.serialize(s)
with pytest.raises(DeserializationError):
l3.deserialize(r)
ll = rlp.decode_lazy(rlp.encode(r))
with pytest.raises(DeserializationError):
l3.deserialize(ll)
assert (len(ll._elements) == (3 + 1)) |
class Node(object):
__slots__ = ['lineno', 'col_offset']
class OPS():
UAdd = 'UAdd'
USub = 'USub'
Not = 'Not'
Invert = 'Invert'
Add = 'Add'
Sub = 'Sub'
Mult = 'Mult'
Div = 'Div'
FloorDiv = 'FloorDiv'
Mod = 'Mod'
Pow = 'Pow'
LShift = 'LShift'
RShift = 'RShift'
BitOr = 'BitOr'
BitXor = 'BitXor'
BitAnd = 'BitAnd'
And = 'And'
Or = 'Or'
class COMP():
Eq = 'Eq'
NotEq = 'NotEq'
Lt = 'Lt'
LtE = 'LtE'
Gt = 'Gt'
GtE = 'GtE'
Is = 'Is'
IsNot = 'IsNot'
In = 'In'
NotIn = 'NotIn'
def __init__(self, *args):
names = self.__slots__
assert (len(args) == len(names))
if docheck:
assert (not hasattr(self, '__dict__')), 'Nodes must have __slots__'
assert (self.__class__ is not Node), 'Node is an abstract class'
for (name, val) in zip(names, args):
assert (not isinstance(val, ast.AST))
if (name == 'name'):
assert isinstance(val, (basestring, NoneType)), 'name not a string'
elif (name == 'op'):
assert ((val in Node.OPS.__dict__) or (val in Node.COMP.__dict__))
elif name.endswith('_node'):
assert isinstance(val, (Node, NoneType)), ('%r is not a Node' % name)
elif name.endswith('_nodes'):
islistofnodes = (isinstance(val, list) and all((isinstance(n, Node) for n in val)))
assert islistofnodes, ('%r is not a list of nodes' % name)
else:
assert (not isinstance(val, Node)), ('%r should not be a Node' % name)
assert (not (isinstance(val, list) and all((isinstance(n, Node) for n in val))))
for (name, val) in zip(names, args):
setattr(self, name, val)
def tojson(self, indent=2):
return json.dumps(self._todict(), indent=indent, sort_keys=True)
def fromjson(cls, text):
return Node._fromdict(json.loads(text))
def _fromdict(cls, d):
assert ('_type' in d)
Cls = globals()[d['_type']]
args = []
for name in Cls.__slots__:
val = d[name]
if (val is None):
pass
elif name.endswith('_node'):
val = Node._fromdict(val)
elif name.endswith('_nodes'):
val = [Node._fromdict(x) for x in val]
elif isinstance(val, basestring):
if val.startswith('BYTES:'):
val = decodebytes(val[6:].encode('utf-8'))
elif val.startswith('COMPLEX:'):
val = complex(val[8:])
elif (pyversion < (3,)):
val = unicode(val)
args.append(val)
return Cls(*args)
def _todict(self):
d = {}
d['_type'] = self.__class__.__name__
for name in self.__slots__:
val = getattr(self, name)
if (val is None):
pass
elif name.endswith('_node'):
val = val._todict()
elif name.endswith('_nodes'):
val = [x._todict() for x in val]
elif (isinstance(self, Bytes) and isinstance(val, bytes)):
val = ('BYTES:' + encodebytes(val).decode('utf-8').rstrip())
elif (isinstance(self, Num) and isinstance(val, complex)):
val = ('COMPLEX:' + repr(val))
d[name] = val
return d
def __eq__(self, other):
if (not isinstance(other, Node)):
raise ValueError('Can only compare nodes to other nodes.')
return (self._todict() == other._todict())
def __repr__(self):
names = ', '.join([repr(x) for x in self.__slots__])
return ('<%s with %s at 0x%x>' % (self.__class__.__name__, names, id(self)))
def __str__(self):
return self.tojson() |
def test_password():
assert (htmlfill.render('<input name="password" type="password" value="">', defaults={'password': 'secure passwd'}) == '<input name="password" type="password" value="secure passwd">')
assert (htmlfill.render('<input name="password" type="password" value="">', defaults={'password': 'secure passwd'}, skip_passwords=True) == '<input name="password" type="password" value="">')
assert (htmlfill.render('<input name="password" type="password">', defaults={'password': 'secure passwd'}) == '<input name="password" type="password" value="secure passwd">')
assert (htmlfill.render('<input name="password" type="password">', defaults={'password': 'secure passwd'}, skip_passwords=True) == '<input name="password" type="password">') |
def test_schema_migration_writer_union():
schema = {'type': 'record', 'name': 'test_schema_migration_writer_union', 'fields': [{'name': 'test', 'type': ['string', 'int']}]}
new_schema = {'type': 'record', 'name': 'test_schema_migration_writer_union', 'fields': [{'name': 'test', 'type': 'int'}]}
new_file = BytesIO()
records = [{'test': 1}]
fastavro.writer(new_file, schema, records)
new_file.seek(0)
new_reader = fastavro.reader(new_file, new_schema)
new_records = list(new_reader)
assert (new_records == records) |
class DataGen(object):
GO_ID = 1
EOS_ID = 2
IMAGE_HEIGHT = 32
CHARMAP = (['', '', ''] + list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'))
def set_full_ascii_charmap():
DataGen.CHARMAP = (['', '', ''] + [chr(i) for i in range(32, 127)])
def __init__(self, annotation_fn, buckets, epochs=1000, max_width=None):
self.epochs = epochs
self.max_width = max_width
self.bucket_specs = buckets
self.bucket_data = BucketData()
dataset = TFRecordDataset([annotation_fn])
dataset = dataset.map(self._parse_record)
dataset = dataset.shuffle(buffer_size=10000)
self.dataset = dataset.repeat(self.epochs)
def clear(self):
self.bucket_data = BucketData()
def gen(self, batch_size):
dataset = self.dataset.batch(batch_size)
iterator = tf.compat.v1.data.make_one_shot_iterator(dataset)
(images, labels, comments) = iterator.get_next()
with tf.compat.v1.Session(config=tf.compat.v1.ConfigProto(allow_soft_placement=True)) as sess:
while True:
try:
(raw_images, raw_labels, raw_comments) = sess.run([images, labels, comments])
for (img, lex, comment) in zip(raw_images, raw_labels, raw_comments):
if (self.max_width and (Image.open(IO(img)).size[0] <= self.max_width)):
word = self.convert_lex(lex)
bucket_size = self.bucket_data.append(img, word, lex, comment)
if (bucket_size >= batch_size):
bucket = self.bucket_data.flush_out(self.bucket_specs, go_shift=1)
(yield bucket)
except tf.errors.OutOfRangeError:
break
self.clear()
def convert_lex(self, lex):
if (sys.version_info >= (3,)):
lex = lex.decode('iso-8859-1')
assert (len(lex) < self.bucket_specs[(- 1)][1])
return np.array((([self.GO_ID] + [self.CHARMAP.index(char) for char in lex]) + [self.EOS_ID]), dtype=np.int32)
def _parse_record(example_proto):
features = tf.io.parse_single_example(example_proto, features={'image': tf.io.FixedLenFeature([], tf.string), 'label': tf.io.FixedLenFeature([], tf.string), 'comment': tf.io.FixedLenFeature([], tf.string, default_value='')})
return (features['image'], features['label'], features['comment']) |
def handle_azurefile(request, hostname, path, query_params, service, service_type, context):
account_name = hostname.split('.')[0]
method = request.method
resource_name = (path.split('/', 1)[1] if ('/' in path) else account_name)
headers = request.headers
context['destination']['service'] = {'name': service, 'resource': '{}/{}'.format(service, account_name), 'type': service_type}
operation_name = 'Unknown'
if (method.lower() == 'get'):
operation_name = 'Download'
if ('list' in query_params.get('comp', [])):
operation_name = 'List'
elif ('properties' in query_params.get('comp', [])):
operation_name = 'GetProperties'
elif ('share' in query_params.get('restype', [])):
operation_name = 'GetProperties'
elif ('metadata' in query_params.get('comp', [])):
operation_name = 'GetMetadata'
elif ('acl' in query_params.get('comp', [])):
operation_name = 'GetAcl'
elif ('stats' in query_params.get('comp', [])):
operation_name = 'Stats'
elif ('filepermission' in query_params.get('comp', [])):
operation_name = 'GetPermission'
elif ('listhandles' in query_params.get('comp', [])):
operation_name = 'ListHandles'
elif ('rangelist' in query_params.get('comp', [])):
operation_name = 'ListRanges'
elif (method.lower() == 'put'):
operation_name = 'Create'
if ('properties' in query_params.get('comp', [])):
operation_name = 'SetProperties'
if ('share' in query_params.get('restype', [])):
operation_name = 'SetProperties'
elif ('snapshot' in query_params.get('comp', [])):
operation_name = 'Snapshot'
elif ('metadata' in query_params.get('comp', [])):
operation_name = 'SetMetadata'
elif ('undelete' in query_params.get('comp', [])):
operation_name = 'Undelete'
elif ('acl' in query_params.get('comp', [])):
operation_name = 'SetAcl'
elif ('filepermission' in query_params.get('comp', [])):
operation_name = 'SetPermission'
elif ('directory' in query_params.get('restype', [])):
operation_name = 'Create'
elif ('forceclosehandles' in query_params.get('comp', [])):
operation_name = 'CloseHandles'
elif ('range' in query_params.get('comp', [])):
operation_name = 'Upload'
elif ('x-ms-copy-source' in headers):
operation_name = 'Copy'
elif (('x-ms-copy-action' in headers) and (headers['x-ms-copy-action'] == 'abort')):
operation_name = 'Abort'
elif ('lease' in query_params.get('comp', [])):
operation_name = 'Lease'
elif (method.lower() == 'options'):
operation_name = 'Preflight'
elif (method.lower() == 'head'):
operation_name = 'GetProperties'
if ('share' in query_params.get('restype', [])):
operation_name = 'GetProperties'
elif ('metadata' in query_params.get('comp', [])):
operation_name = 'GetMetadata'
elif ('acl' in query_params.get('comp', [])):
operation_name = 'GetAcl'
elif (method.lower() == 'delete'):
operation_name = 'Delete'
signature = 'AzureFile {} {}'.format(operation_name, resource_name)
return HandlerInfo(signature, service_type, service, operation_name, context) |
def check_if_chain_matches_chain_uri(w3: 'Web3', blockchain_uri: URI) -> bool:
(chain_id, resource_type, resource_hash) = parse_BIP122_uri(blockchain_uri)
genesis_block = w3.eth.get_block('earliest')
if (encode_hex(genesis_block['hash']) != chain_id):
return False
if (resource_type == BLOCK):
resource = w3.eth.get_block(resource_hash)
else:
raise ValueError(f'Unsupported resource type: {resource_type}')
if (encode_hex(resource['hash']) == resource_hash):
return True
else:
return False |
class OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class TestBool(unittest.TestCase):
def test_default_value(self):
a = A()
self.assertEqual(type(a.foo), bool)
self.assertFalse(a.foo)
def test_accepts_bool(self):
a = A()
a.foo = True
self.assertTrue(a.foo)
a.foo = False
self.assertFalse(a.foo)
def test_does_not_accept_int_or_float(self):
a = A()
bad_values = [(- 1), 'a string', 1.0]
for bad_value in bad_values:
with self.assertRaises(TraitError):
a.foo = bad_value
self.assertEqual(type(a.foo), bool)
self.assertFalse(a.foo)
_numpy
def test_accepts_numpy_bool(self):
a = A()
a.foo = numpy.bool_(True)
self.assertTrue(a.foo)
_numpy
def test_numpy_bool_retrieved_as_bool(self):
a = A()
a.foo = numpy.bool_(True)
self.assertIsInstance(a.foo, bool)
a.foo = numpy.bool_(False)
self.assertIsInstance(a.foo, bool)
_numpy
def test_numpy_bool_accepted_as_dict_value(self):
class HasBoolDict(HasTraits):
foo = Dict(Int, Bool)
has_bool_dict = HasBoolDict()
has_bool_dict.foo[1] = numpy.bool_(True)
self.assertTrue(has_bool_dict.foo[1])
_numpy
def test_numpy_bool_accepted_as_dict_key(self):
class HasBoolDict(HasTraits):
foo = Dict(Bool, Int)
has_bool_dict = HasBoolDict()
key = numpy.bool_(True)
has_bool_dict.foo[key] = 1
self.assertEqual(has_bool_dict.foo[key], 1) |
class KeyTable(Options):
def activate(self):
self.component.options.columns = ':not(:first-child)'
return self
def blurable(self):
return self._config_get()
def blurable(self, val):
self._config(val)
def className(self):
return self._config_get()
def className(self, val):
self._config(val)
def clipboard(self):
return self._config_get()
def clipboard(self, val):
self._config(val)
def clipboardOrthogonal(self):
return self._config_get()
def clipboardOrthogonal(self, val):
self._config(val)
def columns(self):
return self._config_get()
def columns(self, val):
self._config(val)
def focus(self):
return self._config_get()
def focus(self, val):
self._config(val)
def keys(self):
return self._config_get()
def keys(self, val):
self._config(val)
def tabIndex(self):
return self._config_get()
def tabIndex(self, val):
self._config(val) |
_exempt
def login(request):
username = request.POST['username']
password = request.POST['password']
user = auth.authenticate(username=username, password=password)
user_dict = None
if (user is not None):
if user.is_active:
auth.login(request, user)
log_svc.log_login(request.user)
user_dict = _user2dict(user)
return JsonResponse(user_dict, safe=False) |
class table_stats_request(stats_request):
version = 3
type = 18
stats_type = 3
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_stats_request()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 3)
obj.flags = reader.read('!H')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
return True
def pretty_print(self, q):
q.text('table_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
def test_form_rules():
(app, db, admin) = setup()
with app.app_context():
(Model1, _) = create_models(db)
db.create_all()
view = CustomModelView(Model1, db.session, form_rules=('test2', 'test1', rules.Field('test4')))
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
assert (rv.status_code == 200)
data = rv.data.decode('utf-8')
pos1 = data.find('Test1')
pos2 = data.find('Test2')
pos3 = data.find('Test3')
pos4 = data.find('Test4')
assert (pos1 > pos2)
assert (pos4 > pos1)
assert (pos3 == (- 1)) |
class ResultField(QtWidgets.QLabel):
def __init__(self):
QtWidgets.QLabel.__init__(self)
self.setAlignment(QtCore.Qt.AlignmentFlag.AlignRight)
self.result = ''
self.setFont(monospace())
self.setTextInteractionFlags(QtCore.Qt.TextInteractionFlag.TextSelectableByMouse)
self.setSizePolicy(QtWidgets.QSizePolicy.Policy.MinimumExpanding, QtWidgets.QSizePolicy.Policy.MinimumExpanding)
self.setFrameStyle(QtWidgets.QFrame.Shape.StyledPanel)
def set_result(self, as_unsigned=None, as_int=None, as_flt=None, error_message=None):
to_print = ''
if (error_message is not None):
to_print = error_message
else:
to_print += f'''0b{as_unsigned:b}
'''
if (as_flt is not None):
(literal, string) = to_float(as_unsigned)
to_print += ((string + f' = {literal}') + '\n')
elif (as_int is not None):
to_print += f'''{as_int}
'''
else:
to_print += f'''{as_unsigned}
'''
to_print += f'0x{as_unsigned:x}'
self.result = to_print
self.setText(to_print) |
class InformationBasedSimilarityTests(unittest.TestCase):
def test_information_based_similarity(self):
x_axis_data = [0., 0., (- 1.), 0., 0., (- 0.), (- 1.), (- 1.), (- 0.), (- 0.), 0., (- 0.), (- 0.5643706), (- 1.), 1., (- 0.), 0., 1., (- 1.), (- 0.7751325), 0., (- 1.), 0., 2., (- 0.), (- 0.), 0.5917969, 0., 0., (- 1.), 1., 0., (- 0.), 0., (- 0.), (- 1.), (- 0.), 1., (- 0.), (- 1.), (- 0.), 0., 1., (- 0.), 1., (- 0.1920458), 1., 1., (- 0.), 1., 1., 0., 1., 0., (- 1.), (- 1.), (- 1.), (- 0.3166042), 0., (- 0.), 0.7890322, 1.5252607, (- 0.), (- 1.), (- 0.), (- 0.), (- 1.9678474), 0., (- 0.), (- 0.), (- 0.), (- 1.), (- 0.3542932), (- 0.), (- 0.), 0., (- 0.), (- 0.), (- 2.), 0., (- 0.), (- 0.), (- 0.), 0., 0., 0., (- 0.7760291), (- 1.), 0., (- 0.), (- 1.), (- 1.), 0., (- 0.), 1., (- 1.), (- 0.), (- 0.), (- 0.), 1.]
y_axis_data = [(- 0.), (- 0.), (- 0.), (- 0.), 1., (- 1.), 1., 0., 0., (- 2.9507694), (- 1.), (- 0.), (- 2.), (- 0.5247534), (- 0.), 1., (- 2.), 1., 0., 0., 1., (- 0.), 0., 0., (- 0.), 1., (- 0.), 0., (- 0.), (- 0.), (- 0.), 0., 1., 0., (- 1.), (- 1.), 1., 0., (- 0.), 0., (- 0.), 0., (- 0.5710966), 0., (- 0.), 0., 0., (- 0.), 0., (- 2.), 0., (- 0.), (- 0.), (- 0.), 1., (- 0.), (- 0.), (- 0.7150911), 0., (- 0.), 0., 1., 0., (- 0.), 0., 1., (- 0.), 2., (- 0.), (- 1.), 0., 0.2709552, (- 0.), (- 2.4057175), (- 1.), 0., 0., (- 0.3481339), (- 0.), 0.8221449, 0., (- 1.), (- 0.), 0., 0., (- 0.), (- 0.), 0., 1., (- 1.), 1., (- 1.), (- 0.), 0., 0., 0.3705322, (- 0.), 0., (- 0.), 0.]
self.assertEqual(information_based_similarity(x_axis_data, y_axis_data, 8), 0.) |
class Interface(ShowBase):
def __init__(self, config: ShowBaseConfig):
super().__init__(self, windowType=config.window_type)
self.openMainWindow(fbprops=config.fb_prop, size=config.window_size)
Global.base.setBackgroundColor(0.04, 0.04, 0.04)
simplepbr.init(enable_shadows=ani.settings['graphics']['shadows'], max_lights=13)
if isinstance(self.win, GraphicsWindow):
mouse.init()
cam.init()
if (not ani.settings['graphics']['shader']):
Global.render.set_shader_off()
Global.clock.setMode(ClockObject.MLimited)
Global.clock.setFrameRate(ani.settings['graphics']['fps'])
Global.register_mode_mgr(ModeManager(all_modes))
assert (Global.mode_mgr is not None)
Global.mode_mgr.init_modes()
self.frame = 0
tasks.add(self.increment_frame, 'increment_frame')
if config.monitor:
tasks.add(self.monitor, 'monitor')
self.listen_constant_events()
self.stdout = terminal.Run()
def listen_constant_events(self):
tasks.register_event('window-event', window_task)
tasks.register_event('close-scene', self.close_scene)
tasks.register_event('toggle-help', hud.toggle_help)
def close_scene(self):
visual.teardown()
environment.unload_room()
environment.unload_lights()
hud.destroy()
multisystem.reset()
cam.fixation = None
cam.fixation_object = None
cam.fixated = False
gc.collect()
def create_scene(self):
Global.render.attachNewNode('scene')
visual.attach_system(multisystem.active)
visual.buildup()
environment.init(multisystem.active.table)
R = max([ball.params.R for ball in multisystem.active.balls.values()])
cam.fixate(pos=((multisystem.active.table.w / 2), (multisystem.active.table.l / 2), R), node=visual.table.get_node('table'))
def monitor(self, task):
if ((Global.mode_mgr.mode == Mode.purgatory) or (Global.mode_mgr.mode is None)):
return task.cont
keymap = Global.mode_mgr.get_keymap()
header = partial(self.stdout.warning, '', lc='green', nl_before=1, nl_after=0)
header(header=f'Frame {self.frame}')
self.stdout.info('Mode', Global.mode_mgr.mode)
self.stdout.info('Last', Global.mode_mgr.last_mode)
self.stdout.info('Tasks', [task.name for task in Global.task_mgr.getAllTasks()])
self.stdout.info('Memory', get_total_memory_usage())
self.stdout.info('Actions', [k for k in keymap if keymap[k]])
self.stdout.info('Keymap', Global.mode_mgr.get_keymap())
self.stdout.info('Frame', self.frame)
return task.cont
def increment_frame(self, task):
self.frame += 1
return task.cont
def finalizeExit(self):
self.stop()
def stop(self):
sys.exit() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.