code stringlengths 281 23.7M |
|---|
def cache_web_image(name, url):
img_name = ''.join(name.split()).lower().encode('punycode').decode()
img_name = (img_name[:(- 1)] if (img_name[(- 1)] == '-') else img_name)
with urlopen(url) as response:
filetype = response.getheader('Content-Type')
ext = filetype.split('/')[1]
if (ext == 'jpeg'):
ext = 'jpg'
with open(((('images/' + img_name) + '.') + ext), 'wb') as fh:
fh.write(response.read()) |
def gf_mult(x, y):
assert (len(x) == BLOCK_SIZE)
assert (len(y) == BLOCK_SIZE)
z = encode_int(0, BLOCK_SIZE)
v = copy_buf(x)
for i in range(0, 128):
if get_bit(y, i):
z = xor_block(z, v)
if (not get_bit(v, 127)):
v = shift_right_block(v)
else:
v = xor_block(shift_right_block(v), GF_MULT_R)
return z |
def lazy_import():
from fastly.model.billing import Billing
from fastly.model.billing_response_item_items_data import BillingResponseItemItemsData
from fastly.model.billing_response_line_items import BillingResponseLineItems
from fastly.model.billing_status import BillingStatus
from fastly.model.billing_total import BillingTotal
globals()['Billing'] = Billing
globals()['BillingResponseItemItemsData'] = BillingResponseItemItemsData
globals()['BillingResponseLineItems'] = BillingResponseLineItems
globals()['BillingStatus'] = BillingStatus
globals()['BillingTotal'] = BillingTotal |
class OdysseyError(CommandLineError):
def __init__(self, value):
super().__init__("Invalid --odyssey '{}': Use a combination of one or more from 'Y' for Yes, 'N' for No or '?' for unknown, e.g. 'YN?' matches any station while 'Y?' matches yes or unknown, or 'N' matches only non-odyssey stations.".format(value)) |
def test_initialize_ctx_with_absolute_dir(hydra_restore_singletons: Any, tmpdir: Any) -> None:
with raises(HydraException, match=re.escape('config_path in initialize() must be relative')):
with initialize(version_base=None, config_path=str(tmpdir)):
compose(overrides=['+test_group=test']) |
(((detect_target().name() == 'cuda') and (int(detect_target()._arch) < 80)), 'Not supported by CUDA < SM80.')
class TestCastConverter(AITTestCase):
([('half_to_float', torch.half, torch.float), ('float_to_half', torch.float, torch.half), ('half_to_bf16', torch.half, torch.bfloat16), ('bool_to_half', torch.bool, torch.half)])
def test_cast(self, name, dtype, cast_dtype):
class Cast(nn.Module):
def __init__(self, dtype):
super().__init__()
self.cast_ty = dtype
def forward(self, x):
y = x.to(self.cast_ty)
return (x.to(y.dtype) + y)
model = Cast(cast_dtype).cuda()
x = torch.randn(3, 4, 5)
if (dtype == torch.bool):
x = (x < 0.5)
else:
x.to(dtype)
inputs = [x]
self.run_test(model, inputs, expected_ops={acc_ops.to_dtype}) |
def filter_impulse_response(sos_or_fir_coef: np.ndarray, N: int=2048, fs: float=None, sos: bool=True) -> Tuple[(np.ndarray, Optional[np.ndarray])]:
if sos:
response = sosfilt(sos_or_fir_coef, unit_impulse(N))
else:
response = lfilter(b=sos_or_fir_coef, a=1, x=unit_impulse(N))
if (fs is not None):
time = (np.arange(N) / fs)
return (response, time)
else:
return response |
class TestGreenIoLong(tests.LimitedTestCase):
TEST_TIMEOUT = 10
def test_multiple_readers(self):
debug.hub_prevent_multiple_readers(False)
recvsize = (2 * min_buf_size())
sendsize = (10 * recvsize)
def reader(sock, results):
while True:
data = sock.recv(recvsize)
if (not data):
break
results.append(data)
results1 = []
results2 = []
listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listener.bind(('127.0.0.1', 0))
listener.listen(50)
def server():
(sock, addr) = listener.accept()
sock = bufsized(sock)
try:
c1 = eventlet.spawn(reader, sock, results1)
c2 = eventlet.spawn(reader, sock, results2)
try:
c1.wait()
c2.wait()
finally:
c1.kill()
c2.kill()
finally:
sock.close()
server_coro = eventlet.spawn(server)
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(('127.0.0.1', listener.getsockname()[1]))
bufsized(client, size=sendsize)
for i in range(20):
eventlet.sleep(0.001)
client.sendall((b'*' * (sendsize // 20)))
client.close()
server_coro.wait()
listener.close()
assert (len(results1) > 0)
assert (len(results2) > 0)
debug.hub_prevent_multiple_readers() |
class EntryStatsFragmented(BaseGenTableTest):
def runTest(self):
for i in range(0, 4095):
self.do_add(vlan_vid=i, ipv4=, mac=(0, 1, 2, 3, 4, 5))
do_barrier(self.controller)
verify_no_errors(self.controller)
entries = self.do_entry_stats()
seen = set()
for entry in entries:
key = tlv_dict(entry.key)
vlan_vid = key[ofp.bsn_tlv.vlan_vid]
self.assertNotIn(vlan_vid, seen)
seen.add(vlan_vid)
self.assertEquals(seen, set(range(0, 4095))) |
class MakingTheGradeTest(unittest.TestCase):
.task(taskno=1)
def test_round_scores(self):
test_data = [tuple(), (0.5,), (1.5,), (90.33, 40.5, 55.44, 70.05, 30.55, 25.45, 80.45, 95.3, 38.7, 40.3), (50, 36.03, 76.92, 40.7, 43, 78.29, 63.58, 91, 28.6, 88.0)]
result_data = [[], [0], [2], [90, 40, 55, 70, 31, 25, 80, 95, 39, 40], [50, 36, 77, 41, 43, 78, 64, 91, 29, 88]]
for (variant, (student_scores, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', student_scores=student_scores, expected=expected):
actual_result = round_scores(list(student_scores))
error_message = f'Called round_scores({list(student_scores)}). The function returned {sorted(actual_result)} after sorting, but the tests expected {sorted(expected)} after sorting. One or more scores were rounded incorrectly.'
self.assertEqual(sorted(actual_result), sorted(expected), msg=error_message)
.task(taskno=2)
def test_count_failed_students(self):
test_data = [[89, 85, 42, 57, 90, 100, 95, 48, 70, 96], [40, 40, 35, 70, 30, 41, 90]]
result_data = [0, 4]
for (variant, (student_scores, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', student_scores=student_scores, expected=expected):
actual_result = count_failed_students(student_scores)
error_message = f'Called count_failed_students({student_scores}). The function returned {actual_result}, but the tests expected {expected} for the number of students who failed.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=3)
def test_above_threshold(self):
test_data = [([40, 39, 95, 80, 25, 31, 70, 55, 40, 90], 98), ([88, 29, 91, 64, 78, 43, 41, 77, 36, 50], 80), ([100, 89], 100), ([88, 29, 91, 64, 78, 43, 41, 77, 36, 50], 78), ([], 80)]
result_data = [[], [88, 91], [100], [88, 91, 78], []]
for (variant, (params, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', params=params, expected=expected):
actual_result = above_threshold(*params)
error_message = f'Called above_threshold{params}. The function returned {actual_result}, but the tests expected {expected} for the scores that are above the threshold.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=4)
def test_letter_grades(self):
test_data = [100, 97, 85, 92, 81]
result_data = [[41, 56, 71, 86], [41, 55, 69, 83], [41, 52, 63, 74], [41, 54, 67, 80], [41, 51, 61, 71]]
for (variant, (highest, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', highest=highest, expected=expected):
actual_result = letter_grades(highest)
error_message = f'Called letter_grades({highest}). The function returned {actual_result}, but the tests expected {expected} for the letter grade cutoffs.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=5)
def test_student_ranking(self):
test_data = [([82], ['Betty']), ([88, 73], ['Paul', 'Ernest']), ([100, 98, 92, 86, 70, 68, 67, 60], ['Rui', 'Betty', 'Joci', 'Yoshi', 'Kora', 'Bern', 'Jan', 'Rose'])]
result_data = [['1. Betty: 82'], ['1. Paul: 88', '2. Ernest: 73'], ['1. Rui: 100', '2. Betty: 98', '3. Joci: 92', '4. Yoshi: 86', '5. Kora: 70', '6. Bern: 68', '7. Jan: 67', '8. Rose: 60']]
for (variant, (params, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', params=params, expected=expected):
actual_result = student_ranking(*params)
error_message = f'Called student_ranking{params}. The function returned {actual_result}, but the tests expected {expected} for the student rankings.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=6)
def test_perfect_score(self):
test_data = [[['Joci', 100], ['Vlad', 100], ['Raiana', 100], ['Alessandro', 100]], [['Jill', 30], ['Paul', 73]], [], [['Rui', 60], ['Joci', 58], ['Sara', 91], ['Kora', 93], ['Alex', 42], ['Jan', 81], ['Lilliana', 40], ['John', 60], ['Bern', 28], ['Vlad', 55]], [['Yoshi', 52], ['Jan', 86], ['Raiana', 100], ['Betty', 60], ['Joci', 100], ['Kora', 81], ['Bern', 41], ['Rose', 94]]]
result_data = [['Joci', 100], [], [], [], ['Raiana', 100]]
for (variant, (student_info, expected)) in enumerate(zip(test_data, result_data), start=1):
with self.subTest(f'variation #{variant}', student_info=student_info, expected=expected):
actual_result = perfect_score(student_info)
error_message = f'Called perfect_score({student_info}). The function returned {actual_result}, but the tests expected {expected} for the first "perfect" score.'
self.assertEqual(actual_result, expected, msg=error_message) |
def test_export_airflow_format_option(airflow_runtime_instance):
runner = CliRunner()
with runner.isolated_filesystem():
cwd = Path.cwd().resolve()
resource_dir = ((Path(__file__).parent / 'resources') / 'pipelines')
copy_to_work_dir(str(cwd), [(resource_dir / 'airflow.pipeline'), (resource_dir / 'hello.ipynb')])
pipeline_file = 'airflow.pipeline'
pipeline_file_path = (cwd / pipeline_file)
assert (pipeline_file_path.is_file() is True)
for supported_export_format_value in ['yaml', 'py']:
if supported_export_format_value:
expected_output_file = pipeline_file_path.with_suffix(f'.{supported_export_format_value}')
else:
expected_output_file = pipeline_file_path.with_suffix('.py')
if expected_output_file.is_file():
expected_output_file.unlink()
for invalid_export_format_value in ['humpty', 'dumpty']:
options = ['export', str(pipeline_file_path), '--runtime-config', airflow_runtime_instance, '--format', invalid_export_format_value]
result = runner.invoke(pipeline, options)
assert (result.exit_code == 2), result.output
assert ("Invalid value for --format: Valid export formats are ['py']." in result.output), result.output |
class TestPrototypeStorage(BaseEvenniaTest):
def setUp(self):
super().setUp()
self.maxDiff = None
self.prot1 = spawner.prototype_from_object(self.obj1)
self.prot1['prototype_key'] = 'testprototype1'
self.prot1['prototype_desc'] = 'testdesc1'
self.prot1['prototype_tags'] = [('foo1', _PROTOTYPE_TAG_META_CATEGORY)]
self.prot2 = self.prot1.copy()
self.prot2['prototype_key'] = 'testprototype2'
self.prot2['prototype_desc'] = 'testdesc2'
self.prot2['prototype_tags'] = [('foo1', _PROTOTYPE_TAG_META_CATEGORY)]
self.prot3 = self.prot2.copy()
self.prot3['prototype_key'] = 'testprototype3'
self.prot3['prototype_desc'] = 'testdesc3'
self.prot3['prototype_tags'] = [('foo1', _PROTOTYPE_TAG_META_CATEGORY)]
def test_prototype_storage(self):
prot1 = protlib.create_prototype(self.prot1)
self.assertTrue(bool(prot1))
self.assertEqual(prot1, self.prot1)
self.assertEqual(prot1['prototype_desc'], 'testdesc1')
self.assertEqual(prot1['prototype_tags'], [('foo1', _PROTOTYPE_TAG_META_CATEGORY)])
self.assertEqual(protlib.DbPrototype.objects.get_by_tag('foo1', _PROTOTYPE_TAG_META_CATEGORY)[0].db.prototype, prot1)
prot2 = protlib.create_prototype(self.prot2)
self.assertEqual([pobj.db.prototype for pobj in protlib.DbPrototype.objects.get_by_tag('foo1', _PROTOTYPE_TAG_META_CATEGORY)], [prot1, prot2])
prot1b = protlib.create_prototype({'prototype_key': 'testprototype1', 'foo': 'bar', 'prototype_tags': ['foo2']})
self.assertEqual([pobj.db.prototype for pobj in protlib.DbPrototype.objects.get_by_tag('foo2', _PROTOTYPE_TAG_META_CATEGORY)], [prot1b])
self.assertEqual(list(protlib.search_prototype('testprototype2')), [prot2])
self.assertNotEqual(list(protlib.search_prototype('testprototype1')), [prot1])
self.assertEqual(list(protlib.search_prototype('testprototype1')), [prot1b])
prot3 = protlib.create_prototype(self.prot3)
with mock.patch('evennia.prototypes.prototypes._MODULE_PROTOTYPES', {}):
self.assertCountEqual(protlib.search_prototype('prot'), [prot1b, prot2, prot3])
self.assertCountEqual(protlib.search_prototype(tags='foo1'), [prot1b, prot2, prot3])
self.assertTrue(str(str(protlib.list_prototypes(self.char1)))) |
class TestFocusVisible(util.TestCase):
MARKUP = '\n <form id="form">\n <input type="text">\n </form>\n '
def test_focus_visible(self):
self.assert_selector(self.MARKUP, 'form:focus-visible', [], flags=util.HTML)
def test_not_focus_visible(self):
self.assert_selector(self.MARKUP, 'form:not(:focus-visible)', ['form'], flags=util.HTML) |
def ajax_staff_required(view_func):
(view_func)
def wrapper(request, *args, **kwargs):
if request.user.is_staff:
return view_func(request, *args, **kwargs)
resp = json.dumps({'not_authenticated': True})
return HttpResponse(resp, content_type='application/json', status=401)
return wrapper |
def validate_custom_claims(custom_claims, required=False):
if ((custom_claims is None) and (not required)):
return None
claims_str = str(custom_claims)
if (len(claims_str) > MAX_CLAIMS_PAYLOAD_SIZE):
raise ValueError('Custom claims payload must not exceed {0} characters.'.format(MAX_CLAIMS_PAYLOAD_SIZE))
try:
parsed = json.loads(claims_str)
except Exception:
raise ValueError('Failed to parse custom claims string as JSON.')
if (not isinstance(parsed, dict)):
raise ValueError('Custom claims must be parseable as a JSON object.')
invalid_claims = RESERVED_CLAIMS.intersection(set(parsed.keys()))
if (len(invalid_claims) > 1):
joined = ', '.join(sorted(invalid_claims))
raise ValueError('Claims "{0}" are reserved, and must not be set.'.format(joined))
if (len(invalid_claims) == 1):
raise ValueError('Claim "{0}" is reserved, and must not be set.'.format(invalid_claims.pop()))
return claims_str |
class FBDelay(fb.FBCommand):
def name(self):
return 'zzz'
def description(self):
return 'Executes specified lldb command after delay.'
def args(self):
return [fb.FBCommandArgument(arg='delay in seconds', type='float', help='time to wait before executing specified command'), fb.FBCommandArgument(arg='lldb command', type='string', help='another lldb command to execute after specified delay', default='process interrupt')]
def run(self, arguments, options):
lldb.debugger.SetAsync(True)
lldb.debugger.HandleCommand('process continue')
delay = float(arguments[0])
command = str(arguments[1])
t = Timer(delay, (lambda : self.runDelayed(command)))
t.start()
def runDelayed(self, command):
lldb.debugger.HandleCommand('process interrupt')
lldb.debugger.HandleCommand(command) |
def deposit(validation_addr: address, withdrawal_addr: address):
assert (self.current_epoch == (block.number / self.epoch_length))
assert (extract32(raw_call(self.purity_checker, concat('\x90>', as_bytes32(validation_addr)), gas=500000, outsize=32), 0) != as_bytes32(0))
self.validators[self.nextValidatorIndex] = {deposit: msg.value, dynasty_start: (self.dynasty + 2), original_dynasty_start: (self.dynasty + 2), dynasty_end: , withdrawal_epoch: , addr: validation_addr, withdrawal_addr: withdrawal_addr, prev_commit_epoch: 0}
self.nextValidatorIndex += 1
self.second_next_dynasty_wei_delta += msg.value |
class HagerZhang(LineSearch):
def __init__(self, *args, alpha_prev=None, f_prev=None, dphi0_prev=None, quad_step=False, eps=1e-06, theta=0.5, gamma=0.5, rho=5, psi_0=0.01, psi_1=0.1, psi_2=2.0, psi_low=0.1, psi_hi=10, Delta=0.7, omega=0.001, max_bisects=10, **kwargs):
kwargs['cond'] = 'wolfe'
super().__init__(*args, **kwargs)
self.alpha_prev = alpha_prev
self.f_prev = f_prev
self.dphi0_prev = dphi0_prev
self.quad_step = quad_step
self.eps = eps
self.theta = theta
self.gamma = gamma
self.rho = rho
self.psi_0 = psi_0
self.psi_1 = psi_1
self.psi_2 = psi_2
self.psi_low = psi_low
self.psi_hi = psi_hi
self.Delta = Delta
self.omega = omega
self.max_bisects = max_bisects
def prepare_line_search(self):
super().prepare_line_search()
self.epsk = (self.eps * abs(self.get_fg('f', 0.0)))
def bisect(self, a, b):
for i in range(self.max_bisects):
d = (((1 - self.theta) * a) + (self.theta * b))
dphi_d = self.get_phi_dphi('g', d)
if (dphi_d >= 0):
return (a, d)
phi_d = self.get_phi_dphi('f', d)
if (phi_d <= (self.phi0 + self.epsk)):
a = d
elif (phi_d > (self.phi0 + self.epsk)):
b = d
raise Exception('Bisect failed!')
def interval_update(self, a, b, c):
if (not (a < c < b)):
return (a, b)
(phi_c, dphi_c) = self.get_phi_dphi('fg', c)
if (dphi_c >= 0):
return (a, c)
elif (phi_c <= (self.phi0 + self.epsk)):
return (c, b)
return self.bisect(a, c)
def secant(self, a, b):
dphia = self.get_phi_dphi('g', a)
dphib = self.get_phi_dphi('g', b)
return (((a * dphib) - (b * dphia)) / (dphib - dphia))
def double_secant(self, a, b):
c = self.secant(a, b)
(A, B) = self.interval_update(a, b, c)
cB_close = np.isclose(c, B)
cA_close = np.isclose(c, A)
if cB_close:
c_dash = self.secant(b, B)
elif cA_close:
c_dash = self.secant(a, A)
if (cB_close or cA_close):
(a_dash, b_dash) = self.interval_update(A, B, c_dash)
else:
(a_dash, b_dash) = (A, B)
return (a_dash, b_dash)
def bracket(self, c):
cs = list()
p0epsk = (self.phi0 + self.epsk)
for j in range(10):
cs.append(c)
dphi_j = self.get_phi_dphi('g', c)
if ((dphi_j >= 0) and (j == 0)):
return (0, c)
phi_j = self.get_phi_dphi('f', c)
if (dphi_j >= 0):
phi_inds = (np.array([self.get_fg('f', c) for c in cs[:(- 1)]]) <= p0epsk)
ci = ((len(phi_inds) - phi_inds[::(- 1)].argmax()) - 1)
return (cs[ci], c)
elif (phi_j > p0epsk):
return self.bisect(0, c)
c *= self.rho
def norm_inf(self, arr):
return np.linalg.norm(arr, np.inf)
def initial(self):
if (~ np.isclose(self.x0, np.zeros_like(self.x0))).any():
c = ((self.psi_0 * self.norm_inf(self.x0)) / self.norm_inf(self.g0))
elif (not np.isclose(self.f0, 0)):
c = ((self.psi_0 * self.f0) / (self.norm_inf(self.g0) ** 2))
else:
c = 1
return c
def take_quad_step(self, alpha, g0_):
fact = max(self.psi_low, (g0_ / (self.dphi0 * self.psi_2)))
alpha_ = (min(fact, self.psi_hi) * alpha)
phi_ = self.get_phi_dphi('f', alpha_)
denom = (2 * (((phi_ - self.phi0) / alpha_) - self.dphi0))
f_temp = self.get_fg('f', alpha_)
if (denom > 0.0):
c = (((- self.dphi0) * alpha_) / denom)
if (f_temp > self.get_fg('f', 0)):
c = max(c, (alpha_ * 1e-10))
else:
c = alpha
return c
def run_line_search(self):
if ((self.alpha_init is None) and self.alpha_prev):
alpha_init = self.alpha_prev
elif ((self.alpha_init is None) and (self.alpha_prev is None)):
alpha_init = self.initial()
else:
alpha_init = self.alpha_init
if self.quad_step:
g0_ = (((- 2) * abs((self.get_fg('f', 0) / alpha_init))) if (self.dphi0_prev is None) else self.dphi0_prev)
alpha_init = self.take_quad_step((self.psi_2 * alpha_init), g0_)
_ = self.get_phi_dphi('fg', alpha_init)
(ak, bk) = self.bracket(alpha_init)
for k in range(self.max_cycles):
if self.cond_func(ak):
break
(a, b) = self.double_secant(ak, bk)
if ((b - a) > (self.gamma * (bk - ak))):
c = ((a + b) / 2)
(a, b) = self.interval_update(a, b, c)
(ak, bk) = (a, b)
else:
raise LineSearchNotConverged
return ak |
def msolc(monkeypatch):
installed = [Version('0.5.8'), Version('0.5.7'), Version('0.4.23'), Version('0.4.22'), Version('0.4.6')]
monkeypatch.setattr('solcx.get_installed_solc_versions', (lambda : installed))
monkeypatch.setattr('solcx.install_solc', (lambda k, **z: installed.append(k)))
monkeypatch.setattr('solcx.get_installable_solc_versions', (lambda : [Version('0.6.7'), Version('0.6.2'), Version('0.6.0'), Version('0.5.15'), Version('0.5.8'), Version('0.5.7'), Version('0.5.0'), Version('0.4.25'), Version('0.4.24'), Version('0.4.22')]))
(yield installed) |
class ErrorCode(Enum):
UNSUCCESSFUL_MESSAGE_SIGNING = 0
UNSUCCESSFUL_TRANSACTION_SIGNING = 1
def encode(error_code_protobuf_object: Any, error_code_object: 'ErrorCode') -> None:
error_code_protobuf_object.error_code = error_code_object.value
def decode(cls, error_code_protobuf_object: Any) -> 'ErrorCode':
enum_value_from_pb2 = error_code_protobuf_object.error_code
return ErrorCode(enum_value_from_pb2) |
class InlineResponse2005(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([SecretStoreResponse],), 'meta': (PaginationCursorMeta,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data', 'meta': 'meta'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class Plain(AmbassadorTest):
single_namespace = True
namespace = 'plain-namespace'
def variants(cls) -> Generator[(Node, None, None)]:
(yield cls(variants(MappingTest)))
def manifests(self) -> str:
m = ((namespace_manifest('plain-namespace') + namespace_manifest('evil-namespace')) + '\n---\nkind: Service\napiVersion: v1\nmetadata:\n name: plain-simplemapping- namespace: evil-namespace\n annotations:\n getambassador.io/config: |\n ---\n apiVersion: getambassador.io/v3alpha1\n kind: Mapping\n name: SimpleMapping-HTTP-all\n hostname: "*"\n prefix: /SimpleMapping-HTTP-all/\n service: ambassador_id: [plain]\n ---\n apiVersion: getambassador.io/v3alpha1\n kind: Host\n name: cleartext-host-{self.path.k8s}\n ambassador_id: [ "plain" ]\n hostname: "*"\n mappingSelector:\n matchLabels:\n hostname: {self.path.k8s}\n acmeProvider:\n authority: none\n requestPolicy:\n insecure:\n action: Route\n # additionalPort: 8080\n labels:\n scope: AmbassadorTest\nspec:\n selector:\n backend: plain-simplemapping- ports:\n - name: protocol: TCP\n port: 80\n targetPort: 8080\n - name: protocol: TCP\n port: 443\n targetPort: 8443\n')
if EDGE_STACK:
m += '\n---\nkind: Service\napiVersion: v1\nmetadata:\n name: cleartext-host-{self.path.k8s}\n namespace: plain-namespace\n annotations:\n getambassador.io/config: |\n ---\n apiVersion: getambassador.io/v3alpha1\n kind: Host\n name: cleartext-host-{self.path.k8s}\n ambassador_id: [ "plain" ]\n hostname: "*"\n mappingSelector:\n matchLabels:\n hostname: {self.path.k8s}\n acmeProvider:\n authority: none\n requestPolicy:\n insecure:\n action: Route\n # Since this is cleartext already, additionalPort: 8080 is technically\n # an error. Leave it in to make sure it\'s a harmless no-op error.\n additionalPort: 8080\n labels:\n scope: AmbassadorTest\nspec:\n selector:\n backend: plain-simplemapping- ports:\n - name: protocol: TCP\n port: 80\n targetPort: 8080\n - name: protocol: TCP\n port: 443\n targetPort: 8443\n'
return (m + super().manifests())
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self, '\n---\napiVersion: getambassador.io/v3alpha1\nkind: Module\nname: ambassador\nconfig: {}\n'))
def queries(self):
(yield Query(self.url('ambassador/v0/diag/?json=true&filter=errors'), phase=2))
def check(self):
errors = self.results[0].json
for (source, error) in errors:
if (('could not find' in error) and ('CRD definitions' in error)):
assert False, f'Missing CRDs: {error}'
if ('Ingress resources' in error):
assert False, f'Ingress resource error: {error}' |
(max_examples=250)
(value=st.one_of(st.integers(), st.decimals(), st.none()), value_bit_size=st.integers(min_value=1, max_value=32).map((lambda v: (v * 8))), frac_places=st.integers(min_value=1, max_value=80), data_byte_size=st.integers(min_value=0, max_value=32))
def test_encode_signed_fixed(value, value_bit_size, frac_places, data_byte_size):
if (value_bit_size > (data_byte_size * 8)):
pattern = 'Value byte size exceeds data size'
with pytest.raises(ValueError, match=pattern):
SignedFixedEncoder(value_bit_size=value_bit_size, frac_places=frac_places, data_byte_size=data_byte_size)
return
encoder = SignedFixedEncoder(value_bit_size=value_bit_size, frac_places=frac_places, data_byte_size=data_byte_size)
if (not is_number(value)):
pattern = 'Value `None` of type .*NoneType.* cannot be encoded by SignedFixedEncoder'
with pytest.raises(EncodingTypeError, match=pattern):
encoder(value)
return
if SignedFixedEncoder.illegal_value_fn(value):
pattern = 'Value .*(NaN|Infinity|-Infinity).* cannot be encoded by SignedFixedEncoder'
with pytest.raises(IllegalValue, match=pattern):
encoder(value)
return
(lower, upper) = compute_signed_fixed_bounds(value_bit_size, frac_places)
if ((value < lower) or (value > upper)):
pattern = 'Value .* cannot be encoded by SignedFixedEncoder: Cannot be encoded in .* bits'
with pytest.raises(ValueOutOfBounds, match=pattern):
encoder(value)
return
with decimal.localcontext(abi_decimal_context):
residue = (value % (TEN ** (- frac_places)))
if (residue > 0):
pattern = 'Value .* cannot be encoded by SignedFixedEncoder: residue .* outside allowed'
with pytest.raises(IllegalValue, match=pattern):
encoder(value)
return
encoder(value) |
class ThermalMonitor():
def __init__(self, log_handle: List[str], adb: ADB, thermal_monitor_config: Dict[(str, str)], pattern: str, delay: float=10.0, lead_in_delay: float=15.0):
self.log_handle = log_handle
self.thermal_monitor_config = thermal_monitor_config
self.adb = adb
self.initialized = False
self.thermal_monitor_script_dst = '/data/local/tmp/thermal_monitor.sh'
if thermal_monitor_config:
try:
self.log(f'Thermal config found for device {adb.device}.')
self.trip_temp_expr = self.thermal_monitor_config['trip_temp_expr']
self.temp_probe = self.thermal_monitor_config['temp_probe']
self.push_thermal_script()
self.initialized = True
except Exception:
self.log(f'WARNING! Thermal monitoring was not properly initialized for device {self.adb.device}.', logging.CRITICAL, exc_info=True)
self.pattern = pattern
self.delay = delay
self.lead_in_delay = lead_in_delay
self.thermal_trip_pattern = 'THERMAL LIMIT EXCEEDED! STOPPING PROCESSES'
self.no_matching_process_pattern = 'No processes running matching pattern found. Stopping.'
self.monitor = None
self.running = False
def __enter__(self):
if self.initialized:
self.running = True
self.monitor = threading.Thread(target=self.start_thermal_monitoring)
self.monitor.start()
self.log(f'Thermal monitoring started. self.trip_temp_expr={self.trip_temp_expr!r} self.temp_probe={self.temp_probe!r} self.pattern={self.pattern!r} self.delay={self.delay!r}')
def __exit__(self, type, value, traceback):
if self.initialized:
if (self.running is True):
self.running = False
self.monitor.join(timeout=5.0)
self.log('Exiting thermal monitor.')
def thermal_stop_action(self):
setRunKilled(True)
def push_thermal_script(self):
thermal_monitor_script = self.thermal_monitor_config.get('script')
thermal_monitor_script_loc = pkg_resources.resource_filename('aibench', thermal_monitor_script)
self.adb.push(thermal_monitor_script_loc, self.thermal_monitor_script_dst)
self.adb.shell(['chmod', '+x', self.thermal_monitor_script_dst], silent=True)
def start_thermal_monitoring(self):
lead_in = True
while self.running:
if lead_in:
time.sleep(self.lead_in_delay)
lead_in = False
cmd = f'export TRIP_TEMP="{self.trip_temp_expr}" && export TEMP_PROBE="{self.temp_probe}" && export PROG_PATTERN="{self.pattern}" && export THERMAL_TRIP_PATTERN="{self.thermal_trip_pattern}" && export NO_MATCHING_PROCESS_PATTERN="{self.no_matching_process_pattern}" && {self.thermal_monitor_script_dst}'.split(' ')
result = self.adb.shell(cmd, silent=True)
if result:
self.log('\n'.join(result))
if (self.no_matching_process_pattern in result):
self.running = False
self.log('Process pattern not matched, exiting thermal monitor.')
break
elif (self.thermal_trip_pattern in result):
self.log('\n\n## CRITICAL TEMPERATURE REACHED ON DEVICE! PROCESSES HAVE BEEN SHUT DOWN. ##\n\n')
self.thermal_stop_action()
time.sleep(self.delay)
def log(self, msg: str, level=logging.INFO, exc_info=False):
getLogger().log(level, '\n'.join([('Thermal monitoring:\t' + string) for string in msg.split('\n')]), exc_info=exc_info)
self.log_handle += [('Thermal monitoring:\t' + string) for string in msg] |
class HttpSerializer(Serializer):
def encode(msg: Message) -> bytes:
msg = cast(HttpMessage, msg)
message_pb = ProtobufMessage()
dialogue_message_pb = DialogueMessage()
=
dialogue_message_pb.message_id = msg.message_id
dialogue_reference = msg.dialogue_reference
dialogue_message_pb.dialogue_starter_reference = dialogue_reference[0]
dialogue_message_pb.dialogue_responder_reference = dialogue_reference[1]
dialogue_message_pb.target = msg.target
performative_id = msg.performative
if (performative_id == HttpMessage.Performative.REQUEST):
performative =
method = msg.method
performative.method = method
url = msg.url
performative.url = url
version = msg.version
performative.version = version
headers = msg.headers
performative.headers = headers
body = msg.body
performative.body = body
elif (performative_id == HttpMessage.Performative.RESPONSE):
performative =
version = msg.version
performative.version = version
status_code = msg.status_code
performative.status_code = status_code
status_text = msg.status_text
performative.status_text = status_text
headers = msg.headers
performative.headers = headers
body = msg.body
performative.body = body
else:
raise ValueError('Performative not valid: {}'.format(performative_id))
dialogue_message_pb.content =
message_pb.dialogue_message.CopyFrom(dialogue_message_pb)
message_bytes = message_pb.SerializeToString()
return message_bytes
def decode(obj: bytes) -> Message:
message_pb = ProtobufMessage()
=
message_pb.ParseFromString(obj)
message_id = message_pb.dialogue_message.message_id
dialogue_reference = (message_pb.dialogue_message.dialogue_starter_reference, message_pb.dialogue_message.dialogue_responder_reference)
target = message_pb.dialogue_message.target
performative =
performative_id = HttpMessage.Performative(str(performative))
performative_content = {}
if (performative_id == HttpMessage.Performative.REQUEST):
method =
performative_content['method'] = method
url =
performative_content['url'] = url
version =
performative_content['version'] = version
headers =
performative_content['headers'] = headers
body =
performative_content['body'] = body
elif (performative_id == HttpMessage.Performative.RESPONSE):
version =
performative_content['version'] = version
status_code =
performative_content['status_code'] = status_code
status_text =
performative_content['status_text'] = status_text
headers =
performative_content['headers'] = headers
body =
performative_content['body'] = body
else:
raise ValueError('Performative not valid: {}.'.format(performative_id))
return HttpMessage(message_id=message_id, dialogue_reference=dialogue_reference, target=target, performative=performative, **performative_content) |
def test_multipart_rewinds_files():
with tempfile.TemporaryFile() as upload:
upload.write(b'Hello, world!')
transport =
client =
files = {'file': upload}
response = client.post(' files=files)
assert (response.status_code == 200)
assert (b'\r\nHello, world!\r\n' in response.content)
files = {'file': upload}
response = client.post(' files=files)
assert (response.status_code == 200)
assert (b'\r\nHello, world!\r\n' in response.content) |
def _transform_1x1_conv_gemm_rcr(sorted_graph: List[Tensor]) -> List[Tensor]:
conv_to_gemm = {'conv2d': ops.gemm_rcr, 'conv2d_bias': ops.gemm_rcr_bias, 'conv2d_bias_relu': ops.gemm_rcr_bias_relu, 'conv2d_bias_sigmoid': ops.gemm_rcr_bias_sigmoid, 'conv2d_bias_hardswish': ops.gemm_rcr_bias_hardswish, 'conv2d_bias_add_relu': ops.gemm_rcr_bias_add_relu}
def match_func(tensor: Tensor) -> bool:
src_ops = tensor._attrs['src_ops']
if ((src_ops is None) or (len(src_ops) != 1)):
return False
src_op = list(src_ops)[0]
if (src_op._attrs['op'] not in conv_to_gemm):
return False
valid_pad = ((src_op._attrs['pad'] == 0) or (src_op._attrs['pad'] == (0, 0)))
valid_dilate = ((src_op._attrs['dilate'] == 1) or (src_op._attrs['dilate'] == (1, 1)))
valid_stride = ((src_op._attrs['stride'] == 1) or (src_op._attrs['stride'] == (1, 1)))
valid_group = (src_op._attrs['group'] == 1)
if ((not valid_pad) or (not valid_dilate) or (not valid_stride) or (not valid_group)):
return False
w_shape = src_op._attrs['inputs'][1]._attrs['shape']
if ((not is_singleton_dimension(w_shape[1])) or (not is_singleton_dimension(w_shape[2]))):
return False
return True
def replace_func(old_tensor: Tensor) -> List[Tensor]:
src_op = list(old_tensor._attrs['src_ops'])[0]
inputs = src_op._attrs['inputs']
X = inputs[0]
W = inputs[1]
W_squeeze_0 = ops.squeeze(1)(W)
W_squeeze_1 = ops.squeeze(1)(W_squeeze_0)
(batch, HH, WW, CI) = ops.size()(X)
CO = ops.size()(W, dim=0)
X_reshape = ops.reshape()(X, ((- 1), CI))
new_op = conv_to_gemm[src_op._attrs['op']]
if (len(inputs) == 2):
new_tensor = new_op()(X_reshape, W_squeeze_1)
elif (len(inputs) == 3):
new_tensor = new_op()(X_reshape, W_squeeze_1, inputs[2])
elif (len(inputs) == 4):
new_tensor = new_op()(X_reshape, W_squeeze_1, inputs[2], inputs[3])
else:
raise NotImplementedError(f'Unsupported number of inputs: {len(inputs)}')
new_tensor_reshape = ops.reshape()(new_tensor, (batch, HH, WW, CO))
copy_tensor_attributes(new_tensor_reshape, old_tensor)
copy_src_op_attributes(new_tensor_reshape, old_tensor)
remove_dst_op_from_tensor(inputs, src_op)
replace_tensor(old_tensor, new_tensor_reshape)
return [W_squeeze_0, W_squeeze_1, batch, HH, WW, CI, CO, X_reshape, new_tensor, new_tensor_reshape]
return _simple_transform_with_constraint(sorted_graph, match_func, replace_func) |
def test_warning_if_transform_df_contains_categories_not_present_in_fit_df(df_enc, df_enc_rare):
msg = 'During the encoding, NaN values were introduced in the feature(s) var_A.'
with pytest.warns(UserWarning) as record:
encoder = MeanEncoder(unseen='ignore')
encoder.fit(df_enc[['var_A', 'var_B']], df_enc['target'])
encoder.transform(df_enc_rare[['var_A', 'var_B']])
assert (len(record) == 1)
assert (record[0].message.args[0] == msg)
with pytest.raises(ValueError) as record:
encoder = MeanEncoder(unseen='raise')
encoder.fit(df_enc[['var_A', 'var_B']], df_enc['target'])
encoder.transform(df_enc_rare[['var_A', 'var_B']])
assert (str(record.value) == msg) |
class OptionPlotoptionsStreamgraphSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class KeyattrDict(BaseDict):
_keyattr_enabled = None
_keyattr_dynamic = None
def __init__(self, *args, **kwargs):
self._keyattr_enabled = kwargs.pop('keyattr_enabled', True)
self._keyattr_dynamic = kwargs.pop('keyattr_dynamic', False)
super().__init__(*args, **kwargs)
def keyattr_enabled(self):
return self._keyattr_enabled
_enabled.setter
def keyattr_enabled(self, value):
self._keyattr_enabled = value
def keyattr_dynamic(self):
return self._keyattr_dynamic
_dynamic.setter
def keyattr_dynamic(self, value):
self._keyattr_dynamic = value
def __getattr__(self, attr):
attr_message = f'{self.__class__.__name__!r} object has no attribute {attr!r}'
if (not self._keyattr_enabled):
raise AttributeError(attr_message)
try:
return self.__getitem__(attr)
except KeyError:
if attr.startswith('_'):
raise AttributeError(attr_message) from None
if (not self._keyattr_dynamic):
raise AttributeError(attr_message) from None
self.__setitem__(attr, {})
return self.__getitem__(attr)
def __setattr__(self, attr, value):
attr_message = f'{self.__class__.__name__!r} object has no attribute {attr!r}'
if (attr in self):
if (not self._keyattr_enabled):
raise AttributeError(attr_message)
self.__setitem__(attr, value)
elif hasattr(self.__class__, attr):
super().__setattr__(attr, value)
else:
if (not self._keyattr_enabled):
raise AttributeError(attr_message)
self.__setitem__(attr, value)
def __setstate__(self, state):
super().__setstate__(state)
self._keyattr_enabled = state['_keyattr_enabled']
self._keyattr_dynamic = state['_keyattr_dynamic'] |
def exposed_process_nu_pages(transmit=True):
wg = WebRequest.WebGetRobust()
with db.session_context() as sess:
if (transmit == True):
print('Transmitting processed results')
rm = common.RunManager.Crawler(1, 1)
message_q = rm.start_aggregator()
else:
print('Not translating processed results')
message_q = queue.Queue()
pages = []
print('Beginning DB retreival')
for row in sess.query(db.WebPages).filter((db.WebPages.netloc == 'www.novelupdates.com')).filter(db.WebPages.url.ilike('%/series/%')).yield_per(50).all():
rowtmp = {'pageUrl': row.url, 'pgContent': row.content, 'type': row.mimetype, 'wg': wg, 'message_q': message_q}
pages.append(rowtmp)
if ((len(pages) % 100) == 0):
print(('Loaded %s pages...' % len(pages)))
sess.flush()
sess.commit()
for row in pages:
try:
if (row['pgContent'] and NuSeriesPageFilter.NUSeriesPageFilter.wantsUrl(row['pageUrl'])):
proc = NuSeriesPageFilter.NUSeriesPageFilter(db_sess=sess, **row)
proc.extractContent()
except Exception:
print('')
print('ERROR!')
for line in traceback.format_exc().split('\n'):
print(line.rstrip())
print('')
except KeyboardInterrupt:
break
runStatus.run_state.value = 0
if (transmit == True):
rm.join_aggregator() |
class BstBalance(Bst):
def _check_balance(self, node):
if (node is None):
return 0
left_height = self._check_balance(node.left)
if (left_height == (- 1)):
return (- 1)
right_height = self._check_balance(node.right)
if (right_height == (- 1)):
return (- 1)
diff = abs((left_height - right_height))
if (diff > 1):
return (- 1)
return (1 + max(left_height, right_height))
def check_balance(self):
if (self.root is None):
raise TypeError('root cannot be None')
height = self._check_balance(self.root)
return (height != (- 1)) |
class TwoConv(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(8, 8, 2, 2)
self.conv2 = nn.Conv2d(8, 8, 2, 2)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
return x
def fill_all(self, value):
def fill(layer):
if (type(layer) == nn.Conv2d):
layer.bias.data.fill_(value)
layer.weight.data.fill_(value)
self.apply(fill) |
def _decorator_factory(level: LogStatsLevel, reduce_function: Callable, input_name: Optional[str], output_name: Optional[str], group_by: Optional[str], cumulative: bool=False) -> Callable:
def decorator(func_obj) -> Callable:
input_to_reducers = getattr(func_obj, level.name, None)
if (not input_to_reducers):
input_to_reducers = defaultdict(list)
setattr(func_obj, level.name, input_to_reducers)
_check_input_name(level, func_obj, input_name)
_output_name = _check_output_name(input_name=input_name, output_name=output_name, input_to_reducers=input_to_reducers)
input_to_reducers[input_name].append((reduce_function, _output_name, group_by, cumulative))
return func_obj
return decorator |
class RawTrace(Trace):
def __init__(self, *args):
super(RawTrace, self).__init__(*args)
def __getitem__(self, i):
try:
i = self.wrapindex(i)
buf = np.zeros(self.shape, dtype=self.dtype)
return self.filehandle.gettr(buf, i, 1, 1, 0, self.shape, 1, self.shape)
except TypeError:
try:
indices = i.indices(len(self))
except AttributeError:
msg = 'trace indices must be integers or slices, not {}'
raise TypeError(msg.format(type(i).__name__))
(start, _, step) = indices
length = len(range(*indices))
buf = np.empty((length, self.shape), dtype=self.dtype)
return self.filehandle.gettr(buf, start, step, length, 0, self.shape, 1, self.shape) |
class Data(models.Model):
client = models.ForeignKey(Client, on_delete=models.CASCADE)
topic = models.ForeignKey(Topic, on_delete=models.CASCADE)
qos = models.IntegerField(choices=PROTO_MQTT_QoS, default=0)
payload = models.TextField(blank=True, null=True)
retain = models.BooleanField(default=False)
datetime = models.DateTimeField(auto_now=True)
class Meta():
unique_together = ['client', 'topic']
def __str__(self):
return ('%s - %s - %s' % (self.payload, self.topic, self.client))
def __unicode__(self):
return ('%s - %s - %s' % (self.payload, self.topic, self.client))
def update_remote(self):
cli = self.client.get_mqtt_client(empty_client_id=(self.client.client_id is None))
try:
mqtt_connect.send(sender=Server.__class__, client=self.client)
cli.connect(self.client.server.host, self.client.server.port, self.client.keepalive)
mqtt_pre_publish.send(sender=Data.__class__, client=self.client, topic=self.topic, payload=self.payload, qos=self.qos, retain=self.retain)
(rc, mid) = cli.publish(self.topic.name, payload=self.payload, qos=self.qos, retain=self.retain)
self.client.server.status = rc
self.client.server.save()
mqtt_publish.send(sender=Client.__class__, client=self.client, userdata=cli._userdata, mid=mid)
cli.loop_write()
if ((not self.client.clean_session) and (not self.client.client_id)):
if (hasattr(cli, '_client_id') and cli._client_id):
if isinstance(cli._client_id, str):
name = cli._client_id.split('/')[(- 1)]
elif isinstance(cli._client_id, bytes):
name = cli._client_id.split(b'/')[(- 1)].decode('utf8')
else:
name = get_random_string(length=20)
else:
name = get_random_string(length=20)
(cli_id, is_new) = ClientId.objects.get_or_create(name=name)
self.client.client_id = cli_id
self.client.save()
cli.disconnect()
mqtt_disconnect.send(sender=Server.__class__, client=self.client, userdata=cli._userdata, rc=rc)
except socket.gaierror as ex:
if (ex.errno == 11004):
self.client.server.status = PROTO_MQTT_CONN_ERROR_ADDR_FAILED
else:
self.client.server.status = PROTO_MQTT_CONN_ERROR_GENERIC
self.client.server.save()
except IOError as ex:
if (ex.errno in IO_ERROR_MAP):
self.client.server.status = IO_ERROR_MAP[ex.errno]
else:
self.client.server.status = PROTO_MQTT_CONN_ERROR_GENERIC
self.client.server.save() |
def check_sequence_name___fix():
shots = pm.ls(type='shot')
shot = None
for s in shots:
if (s.referenceFile() is None):
shot = s
break
sequencers = shot.outputs(type='sequencer')
if (not sequencers):
raise PublishError('There are no sequencers in the scene!')
sequencer = sequencers[0]
from anima.dcc import mayaEnv
m = mayaEnv.Maya()
v = m.get_current_version()
task = v.task
sequence_name = get_seq_name_from_task(task)
scene_name = get_scene_name_from_task(task)
name = ('%s_%s' % (sequence_name, scene_name))
sequencer.set_sequence_name(name) |
class TestCreateIndexRunner():
('elasticsearch.Elasticsearch')
.asyncio
async def test_creates_multiple_indices(self, es):
es.indices.create = mock.AsyncMock()
r = runner.CreateIndex()
request_params = {'wait_for_active_shards': 'true'}
params = {'indices': [('indexA', {'settings': {}}), ('indexB', {'settings': {}})], 'request-params': request_params}
result = (await r(es, params))
assert (result == {'weight': 2, 'unit': 'ops', 'success': True})
es.indices.create.assert_has_awaits([mock.call(index='indexA', body={'settings': {}}, params=request_params), mock.call(index='indexB', body={'settings': {}}, params=request_params)])
('elasticsearch.Elasticsearch')
.asyncio
async def test_create_with_timeout_and_headers(self, es):
es.indices.create = mock.AsyncMock()
create_index_runner = runner.CreateIndex()
request_params = {'wait_for_active_shards': 'true'}
params = {'indices': [('indexA', {'settings': {}})], 'request-timeout': 3.0, 'headers': {'header1': 'value1'}, 'opaque-id': 'test-id1', 'request-params': request_params}
result = (await create_index_runner(es, params))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True})
es.indices.create.assert_awaited_once_with(index='indexA', body={'settings': {}}, headers={'header1': 'value1'}, opaque_id='test-id1', params={'wait_for_active_shards': 'true'}, request_timeout=3.0)
('elasticsearch.Elasticsearch')
.asyncio
async def test_ignore_invalid_params(self, es):
es.indices.create = mock.AsyncMock()
r = runner.CreateIndex()
request_params = {'wait_for_active_shards': 'true'}
params = {'indices': [('indexA', {'settings': {}})], 'index': 'SHOULD-NOT-BE-PASSED', 'body': 'SHOULD-NOT-BE-PASSED', 'request-params': request_params}
result = (await r(es, params))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True})
es.indices.create.assert_awaited_once_with(index='indexA', body={'settings': {}}, params={'wait_for_active_shards': 'true'})
('elasticsearch.Elasticsearch')
.asyncio
async def test_param_indices_mandatory(self, es):
es.indices.create = mock.AsyncMock()
r = runner.CreateIndex()
params = {}
with pytest.raises(exceptions.DataError, match="Parameter source for operation 'create-index' did not provide the mandatory parameter 'indices'. Add it to your parameter source and try again."):
(await r(es, params))
assert (es.indices.create.await_count == 0) |
class OptionPlotoptionsWaterfallSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_is_valid_ip_address():
result_localhost = is_valid_ip_address('127.0.0.1')
assert (result_localhost is True)
result_unknown = is_valid_ip_address('unknown')
assert (result_unknown is False)
result_ipv4_valid = is_valid_ip_address('::ffff:192.168.0.1')
assert (result_ipv4_valid is True)
result_ipv4_valid = is_valid_ip_address('192.168.256.1')
assert (result_ipv4_valid is False)
result_ipv6_valid = is_valid_ip_address('fd40:363d:ee85::')
assert (result_ipv6_valid is True)
result_ipv6_valid = is_valid_ip_address('fd40:363d:ee85::1::')
assert (result_ipv6_valid is False) |
class OptionSeriesArcdiagramDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class _GridTableBase(GridTableBase):
def __init__(self, model):
GridTableBase.__init__(self)
self.model = model
return
def GetNumberRows(self):
return self.model.GetNumberRows()
def GetNumberCols(self):
return self.model.GetNumberCols()
def IsEmptyCell(self, row, col):
return self.model.IsEmptyCell(row, col)
def GetValue(self, row, col):
return self.model.GetValue(row, col)
def SetValue(self, row, col, value):
return self.model.SetValue(row, col, value)
def GetRowLabelValue(self, row):
return self.model.GetRowLabelValue(row)
def GetColLabelValue(self, col):
return self.model.GetColLabelValue(col)
def GetTypeName(self, row, col):
return self.model.GetTypeName(row, col)
def CanGetValueAs(self, row, col, type_name):
return self.model.CanGetValueAs(row, col, type_name)
def CanSetValueAs(self, row, col, type_name):
return self.model.CanSetValueAs(row, col, type_name)
def DeleteRows(self, pos, num_rows):
return self.model.DeleteRows(pos, num_rows) |
class OptionPlotoptionsBarSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class RedisOps(object):
def __init__(self):
self.host = _setting('REDIS_HOST', 'localhost')
self.port = _setting('REDIS_PORT', 6379)
self.db = _setting('REDIS_DB', 0)
self.prefix = _setting('WHINBOX_REDIS_PREFIX', 'wi-')
self.item_max = _setting('WHINBOX_ITEM_MAX', 100)
self.item_burst_time = _setting('WHINBOX_ITEM_BURST_TIME', 120)
self.item_burst_max = _setting('WHINBOX_ITEM_BURST_MAX', 1200)
self.lock = threading.Lock()
self.redis = None
def _get_redis(self):
self.lock.acquire()
if (not self.redis):
self.redis = redis.Redis(host=self.host, port=self.port, db=self.db, decode_responses=True)
self.lock.release()
return self.redis
def _gen_id():
return ''.join((random.choice((string.ascii_letters + string.digits)) for n in range(8)))
def _validate_id(id):
for c in id:
if ((c not in string.ascii_letters) and (c not in string.digits) and (c not in '_-')):
raise InvalidId(('id contains invalid character: %s' % c))
def _timestamp_utcnow():
return calendar.timegm(datetime.utcnow().utctimetuple())
def inbox_create(self, id, ttl, response_mode):
if (id is not None):
RedisOps._validate_id(id)
assert isinstance(ttl, int)
r = self._get_redis()
val = dict()
val['ttl'] = ttl
val['response_mode'] = response_mode
set_key = (self.prefix + 'inbox')
exp_key = (self.prefix + 'inbox-exp')
now = RedisOps._timestamp_utcnow()
while True:
with r.pipeline() as pipe:
try:
if (id is not None):
try_id = id
else:
try_id = RedisOps._gen_id()
key = ((self.prefix + 'inbox-') + try_id)
pipe.watch(key)
pipe.watch(exp_key)
if pipe.exists(key):
if (id is not None):
raise ObjectExists()
else:
continue
exp_time = (now + ttl)
pipe.multi()
pipe.set(key, json.dumps(val))
pipe.sadd(set_key, try_id)
pipe.zadd(exp_key, {try_id: exp_time})
pipe.execute()
return try_id
except redis.WatchError:
continue
def inbox_delete(self, id):
RedisOps._validate_id(id)
r = self._get_redis()
key = ((self.prefix + 'inbox-') + id)
set_key = (self.prefix + 'inbox')
exp_key = (self.prefix + 'inbox-exp')
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
if (not pipe.exists(key)):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
pipe.multi()
pipe.delete(key)
pipe.srem(set_key, id)
pipe.zrem(exp_key, id)
pipe.delete(items_key)
pipe.delete(items_baseindex_key)
pipe.execute()
break
except redis.WatchError:
continue
def inbox_get(self, id):
RedisOps._validate_id(id)
r = self._get_redis()
key = ((self.prefix + 'inbox-') + id)
val_json = r.get(key)
if (val_json is None):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
return json.loads(val_json)
def inbox_refresh(self, id, newttl=None):
assert ((not newttl) or isinstance(newttl, int))
RedisOps._validate_id(id)
r = self._get_redis()
key = ((self.prefix + 'inbox-') + id)
exp_key = (self.prefix + 'inbox-exp')
now = RedisOps._timestamp_utcnow()
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
pipe.watch(exp_key)
val_json = pipe.get(key)
if (val_json is None):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
val = json.loads(val_json)
if (newttl is not None):
val['ttl'] = newttl
exp_time = (now + val['ttl'])
pipe.multi()
pipe.set(key, json.dumps(val))
pipe.zadd(exp_key, {id: exp_time})
pipe.execute()
break
except redis.WatchError:
continue
def inbox_next_expiration(self):
r = self._get_redis()
exp_key = (self.prefix + 'inbox-exp')
items = r.zrange(exp_key, 0, 0, withscores=True)
if (len(items) > 0):
return int(items[0][1])
else:
return None
def inbox_take_expired(self):
out = list()
r = self._get_redis()
set_key = (self.prefix + 'inbox')
exp_key = (self.prefix + 'inbox-exp')
now = RedisOps._timestamp_utcnow()
while True:
with r.pipeline() as pipe:
try:
pipe.watch(exp_key)
items = pipe.zrange(exp_key, 0, 0, withscores=True)
if (len(items) == 0):
break
if (int(items[0][1]) > now):
break
id = items[0][0]
key = ((self.prefix + 'inbox-') + id)
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
val = json.loads(pipe.get(key))
pipe.multi()
pipe.delete(key)
pipe.srem(set_key, id)
pipe.zrem(exp_key, id)
pipe.delete(items_key)
pipe.delete(items_baseindex_key)
pipe.execute()
val['id'] = id
out.append(val)
except redis.WatchError:
continue
return out
def inbox_get_all(self):
r = self._get_redis()
set_key = (self.prefix + 'inbox')
return set(r.smembers(set_key))
def inbox_append_item(self, id, item):
RedisOps._validate_id(id)
r = self._get_redis()
key = ((self.prefix + 'inbox-') + id)
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
pipe.watch(items_key)
pipe.watch(items_baseindex_key)
if (not pipe.exists(key)):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
end_pos = pipe.llen(items_key)
baseindex = pipe.get(items_baseindex_key)
if (baseindex is not None):
baseindex = int(baseindex)
else:
baseindex = 0
item['created'] = RedisOps._timestamp_utcnow()
pipe.multi()
pipe.rpush(items_key, json.dumps(item))
pipe.execute()
prev_pos = ((baseindex + end_pos) - 1)
if (prev_pos != (- 1)):
return (str((baseindex + end_pos)), str(prev_pos), item['created'])
else:
return (str((baseindex + end_pos)), '', item['created'])
except redis.WatchError:
continue
def inbox_get_items_after(self, id, item_id, item_max):
RedisOps._validate_id(id)
assert ((not item_max) or (item_max > 0))
r = self._get_redis()
if ((item_id is not None) and (len(item_id) > 0)):
item_pos = (int(item_id) + 1)
else:
item_pos = (- 1)
key = ((self.prefix + 'inbox-') + id)
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
pipe.watch(items_key)
pipe.watch(items_baseindex_key)
if (not pipe.exists(key)):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
count = pipe.llen(items_key)
if (count == 0):
return (list(), '')
baseindex = pipe.get(items_baseindex_key)
if (baseindex is not None):
baseindex = int(baseindex)
else:
baseindex = 0
if (item_pos != (- 1)):
start_pos = (item_pos - baseindex)
else:
start_pos = 0
if item_max:
end_pos = ((start_pos + item_max) - 1)
if (end_pos > (count - 1)):
end_pos = (count - 1)
else:
end_pos = (count - 1)
if (start_pos > end_pos):
return (list(), str((baseindex + end_pos)))
pipe.multi()
pipe.lrange(items_key, start_pos, end_pos)
ret = pipe.execute()
items_json = ret[0]
items = list()
for (n, i) in enumerate(items_json):
item = json.loads(i)
item['id'] = str(((baseindex + start_pos) + n))
items.append(item)
return (items, str((baseindex + end_pos)))
except redis.WatchError:
continue
def inbox_get_items_before(self, id, item_id, item_max):
RedisOps._validate_id(id)
assert ((not item_max) or (item_max > 0))
r = self._get_redis()
if ((item_id is not None) and (len(item_id) > 0)):
item_pos = (int(item_id) - 1)
if (item_pos < 0):
return (list(), '', True)
else:
item_pos = (- 1)
key = ((self.prefix + 'inbox-') + id)
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
pipe.watch(items_key)
pipe.watch(items_baseindex_key)
if (not pipe.exists(key)):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
count = pipe.llen(items_key)
if (count == 0):
return (list(), '', True)
baseindex = pipe.get(items_baseindex_key)
if (baseindex is not None):
baseindex = int(baseindex)
else:
baseindex = 0
if (item_pos != (- 1)):
end_pos = (item_pos - baseindex)
else:
end_pos = (count - 1)
if item_max:
start_pos = (end_pos - (item_max - 1))
if (start_pos < 0):
start_pos = 0
else:
start_pos = 0
if (start_pos > end_pos):
return (list(), str((baseindex + start_pos)), (start_pos == 0))
pipe.multi()
pipe.lrange(items_key, start_pos, end_pos)
ret = pipe.execute()
items_json = ret[0]
items = list()
for (n, i) in enumerate(items_json):
item = json.loads(i)
item['id'] = str(((baseindex + start_pos) + n))
items.insert(0, item)
return (items, str((baseindex + start_pos)), (start_pos == 0))
except redis.WatchError:
continue
def inbox_get_newest_id(self, id):
RedisOps._validate_id(id)
r = self._get_redis()
key = ((self.prefix + 'inbox-') + id)
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
while True:
with r.pipeline() as pipe:
try:
pipe.watch(key)
pipe.watch(items_key)
pipe.watch(items_baseindex_key)
if (not pipe.exists(key)):
raise ObjectDoesNotExist(('No such inbox: %s' + id))
count = pipe.llen(items_key)
if (count == 0):
return ''
baseindex = pipe.get(items_baseindex_key)
if (baseindex is not None):
baseindex = int(baseindex)
else:
baseindex = 0
last_pos = (count - 1)
pipe.multi()
pipe.execute()
return str((baseindex + last_pos))
except redis.WatchError:
continue
def inbox_clear_expired_items(self, id):
RedisOps._validate_id(id)
r = self._get_redis()
items_key = ((self.prefix + 'inbox-items-') + id)
items_baseindex_key = ((self.prefix + 'inbox-items-baseindex-') + id)
now = RedisOps._timestamp_utcnow()
total = 0
while True:
with r.pipeline() as pipe:
try:
pipe.watch(items_key)
pipe.watch(items_baseindex_key)
items = pipe.lrange(items_key, 0, 0)
if (not items):
break
item = json.loads(items[0])
count = pipe.llen(items_key)
item_pos = 0
item_time = item['created']
expire = False
if (item_time > (now - self.item_burst_time)):
if (item_pos < (count - self.item_burst_max)):
expire = True
elif (item_pos < (count - self.item_max)):
expire = True
if (not expire):
break
pipe.multi()
pipe.lpop(items_key)
pipe.incr(items_baseindex_key)
pipe.execute()
total += 1
except redis.WatchError:
continue
return total
def request_add_pending(self, inbox_id, item_id):
r = self._get_redis()
req_id = ((inbox_id + '-') + item_id)
req_key = ((self.prefix + 'req-item-') + req_id)
req_exp_key = (self.prefix + 'req-exp')
now = RedisOps._timestamp_utcnow()
while True:
with r.pipeline() as pipe:
try:
pipe.watch(req_key)
pipe.watch(req_exp_key)
if pipe.exists(req_key):
raise ObjectExists()
exp_time = (now + 20)
pipe.multi()
pipe.set(req_key, json.dumps([inbox_id, item_id]))
pipe.zadd(req_exp_key, {req_id: exp_time})
pipe.execute()
break
except redis.WatchError:
continue
def request_remove_pending(self, inbox_id, item_id):
r = self._get_redis()
req_id = ((inbox_id + '-') + item_id)
req_key = ((self.prefix + 'req-item-') + req_id)
req_exp_key = (self.prefix + 'req-exp')
while True:
with r.pipeline() as pipe:
try:
pipe.watch(req_key)
pipe.watch(req_exp_key)
if (not pipe.exists(req_key)):
raise ObjectDoesNotExist()
pipe.multi()
pipe.delete(req_key)
pipe.zrem(req_exp_key, req_id)
pipe.execute()
break
except redis.WatchError:
continue
def request_is_pending(self, inbox_id, item_id):
r = self._get_redis()
req_id = ((inbox_id + '-') + item_id)
req_key = ((self.prefix + 'req-item-') + req_id)
return r.exists(req_key)
def request_take_expired(self):
out = list()
r = self._get_redis()
req_exp_key = (self.prefix + 'req-exp')
now = RedisOps._timestamp_utcnow()
while True:
with r.pipeline() as pipe:
try:
pipe.watch(req_exp_key)
items = pipe.zrange(req_exp_key, 0, 0, withscores=True)
if (len(items) == 0):
break
if (int(items[0][1]) > now):
break
req_id = items[0][0]
req_key = ((self.prefix + 'req-item-') + req_id)
pipe.watch(req_key)
val_raw = pipe.get(req_key)
if (not val_raw):
continue
val = json.loads(val_raw)
inbox_id = val[0]
item_id = val[1]
pipe.multi()
pipe.delete(req_key)
pipe.zrem(req_exp_key, req_id)
pipe.execute()
out.append((inbox_id, item_id))
except redis.WatchError:
continue
return out |
def check_xss_impact(res_headers):
if res_headers['Content-Type']:
if ((res_headers['Content-Type'].find('application/json') != (- 1)) or (res_headers['Content-Type'].find('text/plain') != (- 1))):
impact = 'Low'
else:
impact = 'High'
else:
impact = 'Low'
return impact |
class DrawTestCase(unittest.TestCase):
properties = {'cat': u'meow', 'dog': 'woof'}
classes = [u'foo', 'cat']
lis1 = [[(- 110.6), 35.3], [(- 110.7), 35.5], [(- 110.3), 35.5], [(- 110.2), 35.1], [(- 110.2), 35.8], [(- 110.3), 35.2], [(- 110.1), 35.8], [(- 110.8), 35.5], [(- 110.7), 35.7], [(- 110.1), 35.4], [(- 110.7), 35.1], [(- 110.6), 35.3]]
lis2 = [[(- 110.8), 35.3], [(- 110.6), 35.4], [(- 110.1), 35.5], [(- 110.1), 35.5], [(- 110.4), 35.2], [(- 110.5), 35.1], [(- 110.5), 35.1], [(- 110.9), 35.8], [(- 110.5), 35.1], [(- 110.8), 35.3]]
def setUp(self):
self.multipolygon = {'properties': self.properties, 'geometry': {'type': 'MultiPolygon', 'id': 'MultiPolygon', 'coordinates': [[self.lis1], [self.lis2]]}}
self.polygon = {'properties': self.properties, 'geometry': {'type': 'Polygon', 'id': 'Polygon', 'coordinates': [self.lis1]}}
self.multilinestring = {'properties': self.properties, 'geometry': {'type': 'MultiLineString', 'id': 'MultiLineString', 'coordinates': [self.lis2, self.lis2]}}
self.linestring = {'properties': self.properties, 'geometry': {'coordinates': self.lis2, 'type': 'LineString', 'id': 'LineString'}}
self.point = {'properties': self.properties, 'geometry': {'coordinates': (0.0, 0), 'type': 'Point', 'id': 'Point'}}
self.obj = svgis.SVGIS([])
def testDrawPoint(self):
feat = self.obj.feature(self.point, [], classes=self.classes, id_field=None)
assert isinstance(feat, six.string_types)
self.assertIn('cat_meow', feat)
def testDrawLine(self):
line = draw.lines(self.linestring['geometry'])
assert isinstance(line, six.string_types)
feat = self.obj.feature(self.linestring, [], classes=self.classes, id_field=None)
assert isinstance(feat, six.string_types)
assert ('cat_meow' in feat)
def testDrawMultiLine(self):
mls1 = draw.multilinestring(self.multilinestring['geometry']['coordinates'])
mls2 = draw.lines(self.multilinestring['geometry'])
assert isinstance(mls1, six.string_types)
assert isinstance(mls2, six.string_types)
grp = self.obj.feature(self.multilinestring, [], classes=self.classes, id_field=None)
assert isinstance(grp, six.string_types)
assert ('cat_meow' in grp)
def testDrawPolygon(self):
drawn = draw.polygon(self.polygon['geometry']['coordinates'])
assert ('{},{}'.format(*self.lis1[0]) in drawn)
feat = self.obj.feature(self.polygon, [], classes=self.classes, id_field=None)
assert ('fill-rule="evenodd"' in feat)
assert ('cat_meow' in feat)
def testDrawMultiPolygon(self):
drawn = draw.multipolygon(self.multipolygon['geometry']['coordinates'])
assert isinstance(drawn, six.string_types)
def testDrawMultiPoint(self):
points = draw.multipoint(self.lis1, id='foo')
self.assertIn('cy="35.1"', points)
self.assertIn('cx="-110.6"', points)
assert re.search('<g[^>]*id="foo"', points)
def testAddClass(self):
geom = {'coordinates': (0, 0), 'type': 'Point'}
kwargs = {'class': 'boston'}
point = draw.points(geom, **kwargs)
self.assertIsInstance(point, six.string_types)
point = draw.points(geom, **kwargs)
assert isinstance(point, six.string_types)
def testDrawPolygonComplicated(self):
coordinates = [[(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0), (0.0, 0.0)], [(4.0, 4.0), (4.0, 5.0), (5.0, 5.0), (5.0, 4.0), (4.0, 4.0)]]
polygon = draw.polygon(coordinates)
self.assertIsInstance(polygon, six.string_types)
assert ('class="polygon"' in polygon)
kw = {'class': 'a'}
assert ('polygon a' in draw.polygon(coordinates, **kw))
def testUnkownGeometry(self):
with self.assertRaises(errors.SvgisError):
draw.geometry({'type': 'FooBar', 'coordinates': []})
def testGeometryCollection(self):
gc = {'type': 'GeometryCollection', 'id': 'GC', 'geometries': [self.polygon['geometry'], self.linestring['geometry'], self.point['geometry'], self.multipolygon['geometry'], self.multilinestring['geometry']]}
a = draw.geometry(gc, id='cats')
assert isinstance(a, six.string_types)
assert ('id="cats"' in a)
def testDrawAndConvertToString(self):
draw.geometry(self.linestring['geometry'])
draw.geometry(self.multilinestring['geometry'])
draw.geometry(self.polygon['geometry'])
draw.geometry(self.multipolygon['geometry']) |
class GenericAgent():
source_type = None
target_type = None
def __init__(self, args: Optional[Namespace]=None) -> None:
if (args is not None):
self.args = args
assert self.source_type
assert self.target_type
self.device = 'cpu'
self.states = self.build_states()
self.reset()
def build_states(self) -> AgentStates:
return AgentStates()
def reset(self) -> None:
self.states.reset()
def policy(self, states: Optional[AgentStates]=None) -> Action:
assert NotImplementedError
def push(self, source_segment: Segment, states: Optional[AgentStates]=None, upstream_states: Optional[List[AgentStates]]=None) -> None:
if (states is None):
states = self.states
if (upstream_states is None):
upstream_states = []
states.upstream_states = upstream_states
states.update_config(source_segment.config)
states.update_source(source_segment)
def pop(self, states: Optional[AgentStates]=None) -> Segment:
if (len(signature(self.policy).parameters) == 0):
is_stateless = False
if states:
raise RuntimeError('Feeding states to stateful agents.')
else:
is_stateless = True
if (states is None):
states = self.states
if states.target_finished:
return EmptySegment(finished=True)
if is_stateless:
action = self.policy(states)
else:
action = self.policy()
if (not isinstance(action, Action)):
raise RuntimeError(f'The return value of {self.policy.__qualname__} is not an {Action.__qualname__} instance')
if action.is_read():
return EmptySegment()
else:
if isinstance(action.content, Segment):
return action.content
segment = SEGMENT_TYPE_DICT[self.target_type](index=0, content=action.content, finished=action.finished)
states.update_target(segment)
return segment
def pushpop(self, segment: Segment, states: Optional[AgentStates]=None, upstream_states: Optional[List[AgentStates]]=None) -> Segment:
self.push(segment, states, upstream_states)
return self.pop(states)
def add_args(parser: ArgumentParser):
pass
def from_args(cls, args):
return cls(args)
def to(self, device: str, *args, **kwargs) -> None:
pass
def __repr__(self) -> str:
return f'{self.__class__.__name__}[{self.source_type} -> {self.target_type}]'
def __str__(self) -> str:
return self.__repr__() |
class SmartPlaylistManager(PlaylistManager):
def __init__(self, playlist_dir, playlist_class=SmartPlaylist, collection=None):
self.collection = collection
PlaylistManager.__init__(self, playlist_dir=playlist_dir, playlist_class=playlist_class)
def _create_playlist(self, name):
return self.playlist_class(name=name, collection=self.collection) |
def inspect(mt, device, baudrate):
def config_fmt(config):
return ('[%s]' % ', '.join((('(0x%04X, %d)' % (mode, freq)) for (mode, freq) in config)))
def hex_fmt(size=4):
fmt = ('0x%%0%dX' % (2 * size))
def f(value):
return (fmt % value)
return f
def sync_fmt(settings):
return ('[%s]' % ', '.join((('(0x%02X, 0x%02X, 0x%02X, 0x%02X, 0x%04X, 0x%04X, 0x%04X, 0x%04X)' % s) for s in settings)))
def try_message(m, f, formater=None, *args, **kwargs):
print((' %s ' % m), end=' ')
try:
if (formater is not None):
print(formater(f(*args, **kwargs)))
else:
pprint.pprint(f(*args, **kwargs), indent=4)
except MTTimeoutException as e:
print('timeout: might be unsupported by your device.')
except MTErrorMessage as e:
if (e.code == 4):
print('message unsupported by your device.')
else:
raise e
print(('Device: %s at %d Bd:' % (device, baudrate)))
try_message('device ID:', mt.GetDeviceID, hex_fmt(4))
try_message('product code:', mt.GetProductCode)
try_message('hardware version:', mt.GetHardwareVersion)
try_message('firmware revision:', mt.GetFirmwareRev)
try_message('baudrate:', mt.GetBaudrate)
try_message('error mode:', mt.GetErrorMode, hex_fmt(2))
try_message('option flags:', mt.GetOptionFlags, hex_fmt(4))
try_message('location ID:', mt.GetLocationID, hex_fmt(2))
try_message('transmit delay:', mt.GetTransmitDelay)
try_message('synchronization settings:', mt.GetSyncSettings, sync_fmt)
try_message('general configuration:', mt.GetConfiguration)
try_message('output configuration (mark IV devices):', mt.GetOutputConfiguration, config_fmt)
try_message('string output type:', mt.GetStringOutputType)
try_message('period:', mt.GetPeriod)
try_message('alignment rotation sensor:', mt.GetAlignmentRotation, parameter=0)
try_message('alignment rotation local:', mt.GetAlignmentRotation, parameter=1)
try_message('output mode:', mt.GetOutputMode, hex_fmt(2))
try_message('extended output mode:', mt.GetExtOutputMode, hex_fmt(2))
try_message('output settings:', mt.GetOutputSettings, hex_fmt(4))
try_message('GPS coordinates (lat, lon, alt):', mt.GetLatLonAlt)
try_message('GNSS platform:', mt.GetGnssPlatform)
try_message('available scenarios:', mt.GetAvailableScenarios)
try_message('current scenario ID:', mt.GetCurrentScenario)
try_message('UTC time:', mt.GetUTCTime) |
def _transpose(pitch_motif: PitchLine, scale: List[Pitch], step: int, error: bool=True) -> PitchLine:
try:
pitches = _extract(pitch_motif)
pitches = [_move(pitch, scale, step, error) for pitch in pitches]
motif = _replace(pitch_motif, pitches)
except:
motif = []
return motif |
def extractBoxnovelCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class UserEmailSchema(SoftDeletionSchema):
class Meta():
type_ = 'user-email'
self_view = 'v1.user_emails_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
email_address = TrimmedEmail(allow_none=False)
type = fields.Str(allow_none=False, validate=validate.OneOf(choices=['home', 'work', 'business', 'office', 'other']))
user_id = fields.Integer(allow_none=False)
user = Relationship(self_view='v1.user_emails_user', self_view_kwargs={'id': '<id>'}, related_view='v1.user_detail', related_view_kwargs={'user_email_id': '<id>'}, schema='UserSchema', type_='user') |
def normalize_prim_input(prim_inp):
if (prim_inp is None):
return []
(prim_type, *indices) = prim_inp
indices = list(map(int, indices))
if isinstance(prim_type, PrimTypes):
return [prim_inp]
try:
return [tuple(([PrimTypes(int(prim_type))] + indices))]
except ValueError:
pass
try:
prim_type_ = getattr(PrimTypes, str(prim_type).upper())
return [tuple(([prim_type_] + indices))]
except AttributeError:
pass
try:
prim_types_ = PrimTypeShortcuts[str(prim_type).upper()]
return [tuple(([prim_type_] + indices)) for prim_type_ in prim_types_]
except KeyError as error:
print(f"Could not normalize 'prim_inp'={prim_inp}!")
raise error |
class OptionSeriesPieMarkerStatesHover(Options):
def animation(self) -> 'OptionSeriesPieMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesPieMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
.parametrize('input_', [pytest.param('\x0b'), pytest.param('\t##'), pytest.param('a\n\n\xa0\n\nb'), pytest.param('\xa0\n\n# heading'), pytest.param('```\na\n```\n\u2003\n# A\n', marks=pytest.mark.xfail())])
def test_output_is_equal(input_):
output = mdformat.text(input_)
assert is_md_equal(input_, output) |
('foremast.utils.credentials.gate_request')
('foremast.utils.templates.TEMPLATES_PATH', None)
def test_iam_construct_policy(gate_request, get_base_settings):
settings = get_base_settings
policy_json = construct_policy(pipeline_settings=settings)
assert (policy_json is None)
settings.update({'services': {'s3': True}})
policy_json = construct_policy(app='unicornforrest', env='stage', group='forrest', pipeline_settings=settings)
assert (type(json.loads(policy_json)) == dict)
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest', 'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json) |
def test_post_three_images(client, png_image):
for _ in range(3):
client.simulate_post('/images', body=png_image)
resp = client.simulate_get('/images')
images = [(item['image'], item['size']) for item in resp.json]
assert (images == [('/images/-48e5-4a61-be67-e426b11821ed.jpeg', [640, 360]), ('/images/3bc731ac-8cd8-4f39-b6fe-1a195d3b4e74.jpeg', [640, 360]), ('/images/ba1c4951-73bc-45a4-a1f6-aa2b958dafa4.jpeg', [640, 360])]) |
.parametrize('compiled', [True, False])
def test_flag_read(compiled):
d = '\n flag Test16 : uint16 {\n A = 0x1,\n B = 0x2\n };\n\n flag Test24 : uint24 {\n A = 0x1,\n B = 0x2\n };\n\n flag Test32 : uint32 {\n A = 0x1,\n B = 0x2\n };\n\n struct test {\n Test16 a16;\n Test16 b16;\n Test24 a24;\n Test24 b24;\n Test32 a32;\n Test32 b32;\n Test16 l[2];\n Test16 c16;\n };\n '
c = cstruct.cstruct()
c.load(d, compiled=compiled)
a = c.test(b'\x01\x00\x02\x00\x01\x00\x00\x02\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\x00\x02\x00\x03\x00')
assert ((a.a16.enum == c.Test16) and (a.a16.value == c.Test16.A))
assert ((a.b16.enum == c.Test16) and (a.b16.value == c.Test16.B))
assert ((a.a24.enum == c.Test24) and (a.a24.value == c.Test24.A))
assert ((a.b24.enum == c.Test24) and (a.b24.value == c.Test24.B))
assert ((a.a32.enum == c.Test32) and (a.a32.value == c.Test32.A))
assert ((a.b32.enum == c.Test32) and (a.b32.value == c.Test32.B))
assert (len(a.l) == 2)
assert ((a.l[0].enum == c.Test16) and (a.l[0].value == c.Test16.A))
assert ((a.l[1].enum == c.Test16) and (a.l[1].value == c.Test16.B))
assert (a.c16 == (c.Test16.A | c.Test16.B))
assert (a.c16 & c.Test16.A)
assert (str(a.c16) == 'Test16.B|A') |
def test_init_terms_strict_negative():
ledger_id = DEFAULT_LEDGER
sender_addr = 'SenderAddress'
counterparty_addr = 'CounterpartyAddress'
amount_by_currency_id = {'FET': 10}
quantities_by_good_id = {'good_1': 20}
is_sender_payable_tx_fee = True
nonce = 'somestring'
with pytest.raises(AEAEnforceError):
Terms(ledger_id=ledger_id, sender_address=sender_addr, counterparty_address=counterparty_addr, amount_by_currency_id=amount_by_currency_id, quantities_by_good_id=quantities_by_good_id, is_sender_payable_tx_fee=is_sender_payable_tx_fee, nonce=nonce, is_strict=True) |
class _Dispatch(object):
def __init__(self, comobj):
self.__dict__['_comobj'] = comobj
self.__dict__['_ids'] = {}
self.__dict__['_methods'] = set()
def __enum(self):
e = self._comobj.Invoke((- 4))
return e.QueryInterface(comtypes.automation.IEnumVARIANT)
def __cmp__(self, other):
if (not isinstance(other, _Dispatch)):
return 1
return cmp(self._comobj, other._comobj)
def __hash__(self):
return hash(self._comobj)
def __getitem__(self, index):
enum = self.__enum()
if (index > 0):
if (0 != enum.Skip(index)):
raise IndexError('index out of range')
(item, fetched) = enum.Next(1)
if (not fetched):
raise IndexError('index out of range')
return item
def QueryInterface(self, *args):
return self._comobj.QueryInterface(*args)
def _FlagAsMethod(self, *names):
self._methods.update(names)
def __getattr__(self, name):
if (name.startswith('__') and name.endswith('__')):
raise AttributeError(name)
dispid = self._ids.get(name)
if (not dispid):
dispid = self._comobj.GetIDsOfNames(name)[0]
self._ids[name] = dispid
if (name in self._methods):
result = MethodCaller(dispid, self)
self.__dict__[name] = result
return result
flags = comtypes.automation.DISPATCH_PROPERTYGET
try:
result = self._comobj.Invoke(dispid, _invkind=flags)
except COMError as err:
(hresult, text, details) = err.args
if (hresult in ERRORS_BAD_CONTEXT):
result = MethodCaller(dispid, self)
self.__dict__[name] = result
else:
raise
except:
raise
return result
def __setattr__(self, name, value):
dispid = self._ids.get(name)
if (not dispid):
dispid = self._comobj.GetIDsOfNames(name)[0]
self._ids[name] = dispid
flags = (8 if _is_object(value) else 4)
return self._comobj.Invoke(dispid, value, _invkind=flags)
def __iter__(self):
return _Collection(self.__enum()) |
def test_mark_entities_mesh_mark_entities_1d():
label_name = 'test_label'
label_value = 999
mesh = UnitIntervalMesh(2)
(x,) = SpatialCoordinate(mesh)
V = FunctionSpace(mesh, 'P', 1)
f = Function(V).interpolate(conditional((x < 0.25), 1.0, 0.0))
mesh.mark_entities(f, label_value, label_name=label_name)
plex = mesh.topology.topology_dm
label = plex.getLabel(label_name)
assert (label.getStratumIS(label_value).getSize() == 1)
assert all((label.getStratumIS(label_value).getIndices() == [2])) |
class SSHNetconf(SSHCommandSession):
TERM_TYPE: typing.Optional[str] = None
DELIM: bytes = b']]>]]>'
PROMPT: re.Pattern = re.compile(DELIM)
HELLO_MESSAGE: bytes = b'<?xml version="1.0" encoding="UTF-8" ?>\n<hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">\n <capabilities>\n <capability>urn:ietf:params:netconf:base:1.0</capability>\n </capabilities>\n</hello>\n'
def __init__(self, service: 'FcrServiceBase', devinfo: 'DeviceInfo', options: typing.Dict[(str, typing.Any)], loop: asyncio.AbstractEventLoop) -> None:
super().__init__(service, devinfo, options, loop)
self.server_hello: typing.Optional[str] = None
def register_counter(cls, counters: Counters):
super().register_counters(counters)
stats = ['sum', 'avg']
counters.add_stats_counter('netconf_capability_construction.error', stats)
counters.add_stats_counter('netconf_capability_construction.all', stats)
async def _setup_connection(self) -> None:
resp = (await self.wait_prompt(self.PROMPT))
self.server_hello = resp.data.strip()
self._send_command(self.HELLO_MESSAGE)
self._validate_netconf_capabilities()
def _send_command(self, cmd: bytes) -> None:
self._stream_writer.write((((b'\n' + cmd) + self.DELIM) + b'\n'))
def _format_output(self, cmd: bytes, resp: ResponseMatch) -> bytes:
return resp.data.strip()
def build_result(self, output: str, status: str, command: str) -> CommandResult:
result = super().build_result(output, status, command)
if self.server_hello:
result.capabilities = self.server_hello
self.server_hello = None
return result
def _validate_netconf_capabilities(self) -> None:
assert self.server_hello, "We haven't received hello message from Device yet!"
self.inc_counter('netconf_capability_construction.all')
try:
remote_host_netconf_base_capabilities_set = construct_netconf_capability_set(self.server_hello)
local_netconf_base_capabilities_set = construct_netconf_capability_set(self.HELLO_MESSAGE)
except Exception:
self.logger.exception("Failed at constructing remote host's capability set")
self.inc_counter('netconf_capability_construction.error')
return
if (not (remote_host_netconf_base_capabilities_set & local_netconf_base_capabilities_set)):
super().close()
raise UnsupportedDeviceErrorException(f'''Remote host and FCR do not share common Netconf base capabilities!
Current FCR supported Netconf base capabilities: {local_netconf_base_capabilities_set}''')
async def _run_command(self, command: bytes, timeout: typing.Optional[int]=None, prompt_re: typing.Optional[typing.Pattern]=None) -> bytes:
try:
self.logger.info(f'Sending command to device. Command: {command}')
self._send_command(command)
resp = (await asyncio.wait_for(self.wait_prompt(self.PROMPT), (timeout or self._devinfo.vendor_data.cmd_timeout_sec), loop=self._loop))
return self._format_output(command, resp)
except asyncio.TimeoutError:
self.logger.error('Timeout waiting for command response')
data = (await self._stream_reader.drain())
raise CommandExecutionTimeoutErrorException('Command Response Timeout', data[(- 200):])
async def _connect(self, subsystem: typing.Optional[str]=None, exec_command: typing.Optional[str]=None) -> None:
command = None
device = self._opts.get('device')
subsystem = (device.session_data.subsystem if device.session_data else None)
if (not subsystem):
command = (device.session_data.exec_command if device.session_data else None)
if (not command):
raise ValidationErrorException('either subsystem or exec_command must be specified for netconf session')
return (await super()._connect(subsystem=subsystem, exec_command=command)) |
def convert_vertex_colors(self, context):
obj = bpy.context.active_object
for i in range(len(obj.material_slots)):
slot = obj.material_slots[i]
if slot.material:
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bm = bmesh.from_edit_mesh(bpy.context.active_object.data)
for face in bm.faces:
if (face.material_index == i):
face.select = True
color = utilities_color.get_color(i).copy()
color[0] = pow(color[0], (1 / gamma))
color[1] = pow(color[1], (1 / gamma))
color[2] = pow(color[2], (1 / gamma))
utilities_bake.assign_vertex_color(obj)
bpy.ops.object.mode_set(mode='VERTEX_PAINT')
bpy.context.tool_settings.vertex_paint.brush.color = color
bpy.context.object.data.use_paint_mask = True
bpy.ops.paint.vertex_color_set()
bpy.ops.object.mode_set(mode='VERTEX_PAINT')
bpy.context.object.data.use_paint_mask = False
for area in bpy.context.screen.areas:
if (area.type == 'PROPERTIES'):
for space in area.spaces:
if (space.type == 'PROPERTIES'):
space.context = 'DATA'
for area in bpy.context.screen.areas:
if (area.type == 'VIEW_3D'):
for space in area.spaces:
if (space.type == 'VIEW_3D'):
space.shading.type = 'SOLID'
bpy.ops.ui.textools_popup('INVOKE_DEFAULT', message='Vertex colors assigned') |
class GithubRest(requests.Session):
GET = 'GET'
POST = 'POST'
PUT = 'PUT'
DELETE = 'DELETE'
base_url = '
repos_url = (base_url + '/repos')
def __init__(self, token: str, wait_til_limits: bool=True):
super().__init__()
self.token = token
self.limits = RequestsLimit(None, None, None)
self.wait_til_limits = wait_til_limits
def request(self, method, url, **kwargs) -> requests.Response:
resp = self.__make_request(method=method, url=url, **kwargs)
if (self.wait_til_limits and (resp.status_code == 403) and (self.limits.limit_reset_time is not None)):
wait: datetime.timedelta = (self.limits.limit_reset_time - datetime.datetime.now())
log.warning(f'{method} response [{resp.status_code}]remaining_retries={self.limits.requests_remaining} Wait til {self.limits.limit_reset_time} ({wait})url=`{url}`')
time.sleep(wait.total_seconds())
log.debug(f'Retry making request to Github API method={method}, url={url}, kwargs={kwargs} after reset limits')
resp = self.__make_request(method=method, url=url, **kwargs)
return resp
def __make_request(self, method, url, **kwargs) -> requests.Response:
log.debug(f'Make request to Github API method={method}, url={url}, kwargs={kwargs}')
kwargs['headers'] = {**kwargs.get('headers', {}), **{'Authorization': f'token {self.token}'}}
resp = super().request(method=method, url=url, **kwargs)
self.limits = self.__get_limits(resp.headers)
log.debug(f'Get response[{resp.status_code}] from Github API method={method}, url={url}, kwargs={kwargs}')
return resp
def _get_repo_url(self, repo_name):
return f'{self.repos_url}/{repo_name}'
def _get_repo_events_url(self, repo_name):
return f'{self._get_repo_url(repo_name)}/events'
def __get_limits(headers) -> RequestsLimit:
def __parse_int(value: str) -> Optional[int]:
try:
return int(value)
except Exception as ex:
log.warning(f'Error parse `{value}` to int. Exception: {ex}')
return
def __parse_datetime(value: str) -> Optional[datetime.datetime]:
value = __parse_int(value)
if (value is None):
return
try:
return datetime.datetime.fromtimestamp(value)
except Exception as ex:
log.warning(f'Error parse `{value}` to datetime. Exception: {ex}')
return
return RequestsLimit(requests_limit=__parse_int(headers.get('X-RateLimit-Limit')), requests_remaining=__parse_int(headers.get('X-RateLimit-Remaining')), limit_reset_time=__parse_datetime(headers.get('X-RateLimit-Reset')))
def get_repository_events(self, repo_name: str) -> List[dict]:
log.info(f'Get events in repository {repo_name}')
resp = self.request(method=self.GET, url=self._get_repo_events_url(repo_name=repo_name))
try:
if (resp.status_code == 200):
return resp.json()
except Exception as ex:
log.warning(f'Exception on parse events in repository {repo_name}: {ex}')
log.warning(f'Empty response on events in repository {repo_name}')
return []
def get_repository(self, repo_name: str) -> Optional[Dict[(str, Any)]]:
log.info(f'Get repository {repo_name} information')
resp = self.request(method=self.GET, url=self._get_repo_url(repo_name=repo_name))
try:
if (resp.status_code == 200):
return resp.json()
except Exception as ex:
log.warning(f'Exception on parse response on getting repository {repo_name}: {ex}')
log.warning(f'Empty response on getting repository {repo_name}') |
def init_myst_file(path, kernel, verbose=True):
try:
from jupytext.cli import jupytext
except ImportError:
raise ImportError('In order to use myst markdown features, please install jupytext first.')
if (not Path(path).exists()):
raise FileNotFoundError(f'Markdown file not found: {path}')
kernels = list(find_kernel_specs().keys())
kernels_text = '\n'.join(kernels)
if (kernel is None):
if (len(kernels) > 1):
_error(f'''There are multiple kernel options, so you must give one manually. with `--kernel`
Please specify one of the following kernels.
{kernels_text}''')
else:
kernel = kernels[0]
if (kernel not in kernels):
raise ValueError(f'''Did not find kernel: {kernel}
Please specify one of the installed kernels:
{kernels_text}''')
args = (str(path), '-q', '--set-kernel', kernel, '--set-formats', 'myst')
jupytext(args)
if verbose:
print(f'''Initialized file: {path}
With kernel: {kernel}''') |
class LinkedFaceAreaCache(Cache):
def __init__(self, angle=0.0, *args, **kwargs):
super().__init__(angle, *args, **kwargs)
self._angle = angle
def _calc(self, face, *args, **kwargs):
linked_faces = utils.get_linked_faces(face, self._angle)
linked_face_area = sum((f.calc_area() for f in linked_faces))
for f in linked_faces:
self._cache[(f, *args)] = linked_face_area
return linked_face_area |
class OptionPlotoptionsTreegraphSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesOrganizationTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesOrganizationTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesOrganizationTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(True)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('<span style="font-size: 0.8em">{series.name}</span><br/>')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormat(self):
return self._config_get('{point.name}<br>{point.title}<br>{point.description}')
def nodeFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormatter(self):
return self._config_get(None)
def nodeFormatter(self, value: Any):
self._config(value, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('{point.fromNode.name} {point.toNode.name}: <b>{point.weight}</b><br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class TaskTimingInfoWidget(QtWidgets.QWidget):
def __init__(self, task=None, parent=None, **kwargs):
self._task = None
self.parent = parent
super(TaskTimingInfoWidget, self).__init__(parent=parent)
self.vertical_layout = None
self.title_label = None
self.form_layout = None
self.bid_label = None
self.bid_field = None
self.schedule_timing_label = None
self.schedule_timing_field = None
self.total_time_logs_label = None
self.total_time_logs_field = None
self.time_to_complete_label = None
self.time_to_complete_field = None
self.schedule_model_label = None
self.schedule_model_field = None
self._setup_ui()
self.task = task
def _setup_ui(self):
self.setStyleSheet('\n QLabel{\n background-color: #d9edf7;\n color: #3a87ad;\n padding-top: 2px;\n padding-bottom: 2px;\n }\n QLabel[labelField="true"] {\n font-weight: bold;\n }\n ')
self.vertical_layout = QtWidgets.QVBoxLayout(self)
self.title_label = QtWidgets.QLabel(self)
self.title_label.setText('Timing (0 TimeLogs)')
self.title_label.setAlignment(QtCore.Qt.AlignCenter)
self.title_label.setStyleSheet('\n font-weight: bold;\n background-color: white;\n color: #7c9fca;\n ')
self.vertical_layout.addWidget(self.title_label)
self.form_layout = QtWidgets.QFormLayout()
self.form_layout.setLabelAlignment(((QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing) | QtCore.Qt.AlignVCenter))
self.form_layout.setSpacing(2)
self.vertical_layout.addLayout(self.form_layout)
i = (- 1)
label_width = 120
field_width = 60
label_align = QtCore.Qt.AlignRight
i += 1
self.bid_label = QtWidgets.QLabel(self)
self.bid_label.setText('Bid')
self.bid_label.setProperty('labelField', True)
self.bid_label.setMinimumWidth(label_width)
self.bid_label.setAlignment(label_align)
self.bid_field = QtWidgets.QLabel(self)
self.bid_field.setMinimumWidth(field_width)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, self.bid_label)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.bid_field)
i += 1
self.schedule_timing_label = QtWidgets.QLabel(self)
self.schedule_timing_label.setText('Schedule Timing')
self.schedule_timing_label.setProperty('labelField', True)
self.schedule_timing_label.setMinimumWidth(label_width)
self.schedule_timing_label.setAlignment(label_align)
self.schedule_timing_field = QtWidgets.QLabel(self)
self.schedule_timing_field.setMinimumWidth(field_width)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, self.schedule_timing_label)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.schedule_timing_field)
i += 1
self.total_time_logs_label = QtWidgets.QLabel(self)
self.total_time_logs_label.setText('Total Time Logs')
self.total_time_logs_label.setProperty('labelField', True)
self.total_time_logs_label.setMinimumWidth(label_width)
self.total_time_logs_label.setAlignment(label_align)
self.total_time_logs_field = QtWidgets.QLabel(self)
self.total_time_logs_field.setMinimumWidth(field_width)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, self.total_time_logs_label)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.total_time_logs_field)
i += 1
self.time_to_complete_label = QtWidgets.QLabel(self)
self.time_to_complete_label.setText('Time To Complete')
self.time_to_complete_label.setProperty('labelField', True)
self.time_to_complete_label.setMinimumWidth(label_width)
self.time_to_complete_label.setAlignment(label_align)
self.time_to_complete_field = QtWidgets.QLabel(self)
self.time_to_complete_field.setMinimumWidth(field_width)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, self.time_to_complete_label)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.time_to_complete_field)
i += 1
self.schedule_model_label = QtWidgets.QLabel(self)
self.schedule_model_label.setText('Schedule Model')
self.schedule_model_label.setProperty('labelField', True)
self.schedule_model_label.setMinimumWidth(label_width)
self.schedule_model_label.setAlignment(label_align)
self.schedule_model_field = QtWidgets.QLabel(self)
self.schedule_model_field.setMinimumWidth(field_width)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.LabelRole, self.schedule_model_label)
self.form_layout.setWidget(i, QtWidgets.QFormLayout.FieldRole, self.schedule_model_field)
def task(self):
return self._task
def task(self, task):
from stalker import Task
if isinstance(task, Task):
self._task = task
self.bid_field.setText(('%s %s' % (task.bid_timing, task.bid_unit)))
self.schedule_timing_field.setText(('%s %s' % (task.schedule_timing, task.schedule_unit)))
self.total_time_logs_field.setText(('%s %s' % task.least_meaningful_time_unit(task.total_logged_seconds)))
self.time_to_complete_field.setText(('%s %s' % task.least_meaningful_time_unit((task.total_seconds - task.total_logged_seconds))))
self.schedule_model_field.setText(task.schedule_model)
else:
field_value = 'No Task'
self.bid_field.setText(field_value)
self.schedule_timing_field.setText(field_value)
self.total_time_logs_field.setText(field_value)
self.time_to_complete_field.setText(field_value)
self._task = None |
class _FaunaJSONEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, _Expr):
return obj.to_fauna_json()
elif isinstance(obj, datetime):
return FaunaTime(obj).to_fauna_json()
elif isinstance(obj, date):
return {'': obj.isoformat()}
elif isinstance(obj, (bytes, bytearray)):
return {'': urlsafe_b64encode(obj).decode('utf-8')}
else:
raise UnexpectedError('Unserializable object {} of type {}'.format(obj, type(obj)), None) |
def render_target(jinja2_env, target_dir, project_name, target, target_cfg):
target_cfg_items = gen_target_cfg_items(target_cfg)
if (not target_cfg_items):
logger.error('[%s] Invalid type for target config: %s', project_name, type(target_cfg))
return
for (target_env, target_env_cfg) in gen_target_env_cfg(target_cfg_items):
dockerfile_name = ('Dockerfile-' + target_env)
dockerfile_path = os.path.join(target_dir, dockerfile_name)
logger.info('[%s] Rendering target env <%s-%s> to %s...', project_name, target, target_env, dockerfile_path)
logger.debug('[%s] Rendering template with context %s', project_name, target_env_cfg)
template_name = target_env_cfg['_template']
template = jinja2_env.get_template(template_name)
with open(dockerfile_path, 'w') as f:
f.write(template.render(**target_env_cfg)) |
class TestTupleLenFlip():
def test_tuple_len_set(self, monkeypatch):
with monkeypatch.context() as m:
with pytest.raises(_SpockInstantiationError):
m.setattr(sys, 'argv', [''])
config = ConfigArgBuilder(TupleFailFlip, desc='Test Builder')
config.generate() |
class DcNodeSerializer(s.InstanceSerializer):
_model_ = DcNode
_update_fields_ = ('strategy', 'cpu', 'ram', 'disk', 'priority')
_default_fields_ = ('cpu', 'ram', 'disk')
hostname = s.Field(source='node.hostname')
strategy = s.IntegerChoiceField(choices=DcNode.STRATEGY, default=DcNode.SHARED)
priority = s.IntegerField(min_value=0, max_value=9999, default=100)
cpu = s.IntegerField()
ram = s.IntegerField()
disk = s.IntegerField()
cpu_free = s.IntegerField(read_only=True)
ram_free = s.IntegerField(read_only=True)
disk_free = s.IntegerField(read_only=True)
ram_kvm_overhead = s.IntegerField(read_only=True)
def __init__(self, request, instance, *args, **kwargs):
super(DcNodeSerializer, self).__init__(request, instance, *args, **kwargs)
if (not kwargs.get('many', False)):
(cpu_n, ram_n, disk_n) = instance.node.resources
self.fields['cpu'].validators.append(validators.MaxValueValidator(int(cpu_n)))
self.fields['ram'].validators.append(validators.MaxValueValidator(int(ram_n)))
self.fields['disk'].validators.append(validators.MaxValueValidator(int(disk_n)))
if (request.method == 'PUT'):
(cpu_min, ram_min, disk_min) = instance.node.get_used_resources(request.dc)
else:
cpu_min = ((instance.cpu or 0) - instance.cpu_free)
ram_min = ((instance.ram or 0) - instance.ram_free)
disk_min = ((instance.disk or 0) - instance.disk_free)
self.fields['cpu'].validators.append(validators.MinValueValidator(cpu_min))
self.fields['ram'].validators.append(validators.MinValueValidator(ram_min))
self.fields['disk'].validators.append(validators.MinValueValidator(disk_min))
def validate(self, attrs):
strategy = int(attrs.get('strategy', self.object.strategy))
if (strategy == DcNode.RESERVED):
cpu = int(attrs.get('cpu', self.object.cpu))
ram = int(attrs.get('ram', self.object.ram))
disk = int(attrs.get('disk', self.object.disk))
(cpu_nf, ram_nf, disk_nf) = self.object.get_nonreserved_free_resources(exclude_this_dc=True)
if (cpu > cpu_nf):
self._errors['cpu'] = s.ErrorList([_('Not enough free CPUs on node.')])
if (ram > ram_nf):
self._errors['ram'] = s.ErrorList([_('Not enough free RAM on node.')])
if (disk > disk_nf):
self._errors['disk'] = s.ErrorList([_('Not enough free disk space on node.')])
return attrs
def detail_dict(self, **kwargs):
details = super(DcNodeSerializer, self).detail_dict()
details['dc'] = self.request.dc
return details |
class TestOFPTableStatsRequest(unittest.TestCase):
class Datapath(object):
ofproto = ofproto
ofproto_parser = ofproto_v1_0_parser
flags = {'buf': b'\x00\x00', 'val': 0}
c = OFPTableStatsRequest(Datapath, flags['val'])
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(ofproto.OFPST_TABLE, self.c.type)
eq_(self.flags['val'], self.c.flags)
def test_parser(self):
pass
def test_serialize(self):
self.c.serialize()
eq_(ofproto.OFP_VERSION, self.c.version)
eq_(ofproto.OFPT_STATS_REQUEST, self.c.msg_type)
eq_(0, self.c.xid)
fmt = (('!' + ofproto.OFP_HEADER_PACK_STR.replace('!', '')) + ofproto.OFP_STATS_MSG_PACK_STR.replace('!', ''))
res = struct.unpack(fmt, six.binary_type(self.c.buf))
eq_(ofproto.OFP_VERSION, res[0])
eq_(ofproto.OFPT_STATS_REQUEST, res[1])
eq_(len(self.c.buf), res[2])
eq_(0, res[3])
eq_(ofproto.OFPST_TABLE, res[4])
eq_(self.flags['val'], res[5]) |
(scope='session')
def univariate_data() -> UnivariateData:
x = np.linspace((- 5.0), 5.0, 25)
y = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])
xi = np.linspace((- 5.0), 5.0, 100)
yi = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])
yi_d1 = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., 0., 0., 0.])
yi_d2 = np.array([0.0, (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 1., 1., 1., 0., 0., (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), (- 0.), 0., 0., 0., 0., (- 0.), (- 0.), (- 0.), 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0.0])
yi_ad1 = np.array([0, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 2., 2., 2., 2., 2., 2., 2., 2., 2., 3., 3., 3., 3., 3., 3., 3., 3., 3., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5., 5.])
integral = 5.
smooth = 0.
return UnivariateData(x, y, xi, yi, yi_d1, yi_d2, yi_ad1, integral, smooth) |
.EventDecorator()
def restrict(fine_dual, coarse_dual):
check_arguments(coarse_dual, fine_dual, needs_dual=True)
Vf = fine_dual.function_space()
Vc = coarse_dual.function_space()
if (len(Vc) > 1):
if (len(Vc) != len(Vf)):
raise ValueError('Mixed spaces have different lengths')
for (in_, out) in zip(fine_dual.subfunctions, coarse_dual.subfunctions):
manager = firedrake.dmhooks.get_transfer_manager(in_.function_space().dm)
manager.restrict(in_, out)
return coarse_dual
if ((Vc.ufl_element().family() == 'Real') or (Vf.ufl_element().family() == 'Real')):
assert (Vc.ufl_element().family() == 'Real')
assert (Vf.ufl_element().family() == 'Real')
with coarse_dual.dat.vec_wo as dest, fine_dual.dat.vec_ro as src:
src.copy(dest)
return coarse_dual
(hierarchy, coarse_level) = utils.get_level(ufl_expr.extract_unique_domain(coarse_dual))
(_, fine_level) = utils.get_level(ufl_expr.extract_unique_domain(fine_dual))
refinements_per_level = hierarchy.refinements_per_level
repeat = ((fine_level - coarse_level) * refinements_per_level)
next_level = (fine_level * refinements_per_level)
element = Vc.ufl_element()
meshes = hierarchy._meshes
for j in range(repeat):
next_level -= 1
if (j == (repeat - 1)):
coarse_dual.dat.zero()
next = coarse_dual
else:
Vc = firedrake.FunctionSpace(meshes[next_level], element)
next = firedrake.Cofunction(Vc.dual())
Vc = next.function_space()
node_locations = utils.physical_node_locations(Vf)
coarse_coords = Vc.mesh().coordinates
fine_to_coarse = utils.fine_node_to_coarse_node_map(Vf, Vc)
fine_to_coarse_coords = utils.fine_node_to_coarse_node_map(Vf, coarse_coords.function_space())
for d in [coarse_coords]:
d.dat.global_to_local_begin(op2.READ)
d.dat.global_to_local_end(op2.READ)
kernel = kernels.restrict_kernel(Vf, Vc)
op2.par_loop(kernel, fine_dual.node_set, next.dat(op2.INC, fine_to_coarse), fine_dual.dat(op2.READ), node_locations.dat(op2.READ), coarse_coords.dat(op2.READ, fine_to_coarse_coords))
fine_dual = next
Vf = Vc
return coarse_dual |
def kernel_name(op):
from cutlass_lib import library
threadblock = op.tile_description.procedural_name()
extended_name = op.extended_name()
opcode_class_name = library.OpcodeClassNames[op.tile_description.math_instruction.opcode_class]
layout = op.layout_name()
align_ab = op.A.alignment
align_c = op.C.alignment
prefix = ''
if (op.prefix != ''):
kernel_schedule = library.KernelScheduleSuffixes[op.kernel_schedule]
epilogue_schedule = library.EpilogueScheduleSuffixes[op.epilogue_schedule]
prefix = f'{op.prefix}{kernel_schedule}{epilogue_schedule}'
name = KERNEL_KEY_TEMPLATE.render(prefix=prefix, threadblock=threadblock, extended_name=extended_name, opcode_class_name=opcode_class_name, layout=layout, align_ab=align_ab, align_c=align_c)
return name.replace('\n', '') |
def eq_anr(record, value):
def fmap(f, obj):
if isinstance(obj, dict):
return any((fmap(f, sub) for sub in obj.values()))
elif isinstance(obj, list):
return any((fmap(f, k) for k in obj))
elif isinstance(obj, str):
return f(obj)
else:
raise UnexpectedFormatException(f'Unexpected value, expected: dict, list or str, obtained: {type(obj)}.')
validate = (lambda x: x.lower().startswith(value.lower()))
keys = ['displayName', 'givenName', 'legacyExchangeDN', 'physicalDeliveryOfficeName', 'proxyAddresses', 'Name', 'sAMAccountName', 'sn']
for k in keys:
if ((k in record) and fmap(validate, record[k])):
return True |
def test_generate_volume_config_with_test_dir():
output_dir = '/tmp/random_dir_on_disk'
test_dir = '/home/user/test_directory'
volume_map = docker_volume.generate_volume_config(output_dir, test_dir)
assert volume_map
verify_required_paths(volume_map, output_dir)
assert (test_dir in volume_map)
test_map = volume_map[test_dir]
assert (test_map['bind'] == constants.NIGHTHAWK_EXTERNAL_TEST_DIR)
assert (test_map['mode'] == constants.MOUNT_READ_ONLY) |
def _transform_function(f: Callable) -> Tuple[(Optional[List[ast.stmt]], str, str)]:
if (f.__name__ == '<lambda>'):
return _transform_lambda(f)
(source, original_ast) = _get_lines_ast(f)
assert (len(original_ast.body) == 1)
if (not isinstance(original_ast.body[0], ast.FunctionDef)):
return (None, '', '')
transformed_ast: ast.Module = _bm_ast_to_bmg_ast(original_ast)
assert (len(transformed_ast.body) >= 1)
funcdef = transformed_ast.body[0]
assert isinstance(funcdef, ast.FunctionDef)
return (transformed_ast.body, funcdef.name, source) |
def to_decimal(value: Any) -> Fixed:
d: Fixed = Fixed(value)
if ((d < (- (2 ** 127))) or (d >= (2 ** 127))):
raise OverflowError(f'{value} is outside allowable range for decimal')
if (d.quantize(Decimal('1.')) != d):
raise ValueError('Maximum of 10 decimal points allowed')
return d |
def run_bpftrace(prompt: str, verbose: bool=False) -> CommandResult:
messages = [{'role': 'user', 'content': prompt}]
response = openai.ChatCompletion.create(model='gpt-3.5-turbo', messages=messages, functions=functions, function_call='auto')
response_message = response['choices'][0]['message']
if verbose:
print(response_message)
if response_message.get('function_call'):
full_command = ['sudo']
if (response_message['function_call']['name'] == 'bpftrace'):
full_command.append(response_message['function_call']['name'])
args = json.loads(response_message['function_call']['arguments'])
command = construct_command(args)
full_command.extend(command)
timeout = 300
if args.get('timeout'):
timeout = args['timeout']
res = run_command_with_timeout(full_command, int(timeout))
if (args.get('continue') and (res['stderr'] == '')):
res['stderr'] = 'The conversation shall not complete.'
return res
elif (response_message['function_call']['name'] == 'SaveFile'):
args = json.loads(response_message['function_call']['arguments'])
filename = args['filename']
print(('Save to file: ' + filename))
print(args['content'])
with open(filename, 'w') as file:
file.write(args['content'])
res = {'command': 'SaveFile', 'stdout': args['content'], 'stderr': '', 'returncode': 0}
return res
else:
return {'command': 'response_message', 'stdout': response_message['content'], 'stderr': '', 'returncode': 0} |
class OptionPlotoptionsPyramidTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
def get_target_line(col_names, job_targets):
target_list = []
target_dict = {}
col_names_filtered = col_names
for name in col_names:
target_dict[name] = 'na'
if isinstance(job_targets, dict):
for (metric, subdict1) in job_targets.items():
for (iotype, subdict2) in subdict1.items():
for (valuetype, value) in subdict2.items():
target_list += [[metric, iotype, valuetype, value]]
for col in [x.split('_') for x in col_names_filtered]:
col_filtered = col
col_filtered = list(itertools.chain.from_iterable(((['throughput', 'MIN'] if (x == 'BW') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['iops', 'MIN'] if (x == 'IOPS') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['latency'] if (x == 'Latency') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['read'] if (x == 'Read') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['write'] if (x == 'Write') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['trim'] if (x == 'Trim') else [x]) for x in col_filtered)))
col_filtered = list(itertools.chain.from_iterable(((['MAX'] if (x == 'Max') else [x]) for x in col_filtered)))
for target in target_list:
target_param = target[0:3]
if all(((item in target_param) for item in col_filtered)):
target_dict['_'.join(col)] = target[(- 1)]
return list(target_dict.values()) |
.django_db
def test_agency_endpoint(client, create_agency_data):
resp = client.get('/api/v2/references/agency/1/')
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['results']['outlay_amount'] == '2.00')
assert (resp.data['results']['obligated_amount'] == '2.00')
assert (resp.data['results']['budget_authority_amount'] == '2.00')
assert (resp.data['results']['congressional_justification_url'] == 'test.com/cj')
assert (resp.data['results']['current_total_budget_authority_amount'] == '2000.00')
resp = client.get('/api/v2/references/agency/4/')
assert (resp.data == {'results': {}}) |
class Testglob(_TestGlob):
cases = [[('a',), [('a',)]], [('a', 'D'), [('a', 'D')]], [('aab',), [('aab',)]], [('zymurgy',), []], Options(absolute=True), [['*'], None], [[os.curdir, '*'], None], Options(absolute=False), [('a*',), [('a',), ('aab',), ('aaa',)]], [('*a',), [('a',), ('aaa',)]], [('.*',), [('.',), ('..',), ('.aa',), ('.bb',)], glob.SCANDOTDIR], [('.*',), [('.aa',), ('.bb',)]], [('?aa',), [('aaa',)]], [('aa?',), [('aaa',), ('aab',)]], [('aa[ab]',), [('aaa',), ('aab',)]], [('*q',), []], [('.',), [('.',)]], [('?',), [('a',)]], [('[.a]',), [('a',)]], [('*.',), []], [('{a*,a*}',), [('a',), ('aab',), ('aaa',)]], [('{a*,a*}',), [('a',), ('aab',), ('aaa',), ('a',), ('aab',), ('aaa',)], glob.Q], [('**', '*'), ([('aab',), ('aab', 'F'), ('a',), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa',), ('aaa', 'zzzF'), ('EF',), ('ZZZ',)] if (not can_symlink()) else [('aab',), ('aab', 'F'), ('a',), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa',), ('aaa', 'zzzF'), ('EF',), ('ZZZ',), ('sym1',), ('sym2',), ('sym3',)]), glob.L], [('**',), ([('',), ('aab',), ('aab', 'F'), ('a',), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa',), ('aaa', 'zzzF'), ('EF',), ('ZZZ',)] if (not can_symlink()) else [('',), ('aab',), ('aab', 'F'), ('a',), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa',), ('aaa', 'zzzF'), ('EF',), ('ZZZ',), ('sym1',), ('sym2',)]), glob.L], Options(default_negate='**'), [('a*', '**'), ([('EF',), ('ZZZ',), ('',)] if (not can_symlink()) else [('EF',), ('ZZZ',), ('',), ('sym1',), ('sym3',), ('sym2',), ('sym3', 'efg'), ('sym3', 'efg', 'ha'), ('sym3', 'EF')]), glob.N], Options(default_negate='sym3/EF'), [('**', 'EF'), ([] if (not can_symlink()) else []), (glob.N | glob.L)], [('**', 'EF'), ([] if (not can_symlink()) else []), glob.N], Options(default_negate='**'), [('a*', '**'), ([('EF',), ('ZZZ',), ('',)] if (not can_symlink()) else [('EF',), ('ZZZ',), ('',), ('sym1',), ('sym2',)]), (glob.N | glob.L)], Options(cwd_temp=True, absolute=True), [('*',), ([('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F')] if (not can_symlink()) else [('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F'), ('sym1',), ('sym2',), ('sym3',)]), (glob.L | glob.X)], [('aab', '*'), [('aab', 'F')], (glob.L | glob.X)], Options(cwd_temp=False, absolute=False), [('**',), ([('a', 'bcd', 'EF'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa', 'zzzF'), ('aab', 'F'), ('EF',), ('ZZZ',)] if (not can_symlink()) else [('a', 'bcd', 'EF'), ('a', 'bcd', 'efg', 'ha'), ('a', 'D'), ('aaa', 'zzzF'), ('aab', 'F'), ('EF',), ('sym1',), ('sym2',), ('ZZZ',)]), (glob.L | glob.O)], [('a', 'bcd', 'E*'), ([('a', 'bcd', 'EF')] if util.is_case_sensitive() else [('a', 'bcd', 'EF'), ('a', 'bcd', 'efg')])], [('a', 'bcd', '*g'), [('a', 'bcd', 'efg')]], [('a', 'bcd', 'E*'), [('a', 'bcd', 'EF')], glob.C], [('a', 'bcd', 'E*'), [('a', 'bcd', 'EF'), ('a', 'bcd', 'efg')], glob.I], [('*', 'D'), [('a', 'D')]], [('*', '*a'), []], [('a', '*', '*', '*a'), [('a', 'bcd', 'efg', 'ha')]], [('?a?', '*F'), [('aaa', 'zzzF'), ('aab', 'F')]], Options(absolute=True, skip=(sys.platform != 'win32')), [('*:',), []], [('?:',), []], [('?\\\\c:\\\\',), [('\\\\?\\c:\\',)]], [('*\\\\*\\\\',), []], Options(absolute=False, skip=False), Options(skip=(not can_symlink())), [('sym*',), [('sym1',), ('sym2',), ('sym3',)]], [('sym1',), [('sym1',)]], [('sym2',), [('sym2',)]], [('sym3',), [('sym3',)]], [('sym3', '*'), [('sym3', 'EF'), ('sym3', 'efg')]], [('sym3', ''), [('sym3', '')]], [('*', '*F'), [('aaa', 'zzzF'), ('aab', 'F'), ('sym3', 'EF')]], Options(skip=False), [('*', ''), ([('aab', ''), ('aaa', ''), ('a', '')] if (not can_symlink()) else [('aab', ''), ('aaa', ''), ('a', ''), ('sym3', '')])], Options(skip=util.is_case_sensitive()), [('*\\',), ([('a',), ('aab',), ('aaa',), ('ZZZ',), ('EF',)] if (not can_symlink()) else [('a',), ('aab',), ('aaa',), ('ZZZ',), ('EF',), ('sym1',), ('sym2',), ('sym3',)])], [('*\\\\',), ([('aab', ''), ('aaa', ''), ('a', '')] if (not can_symlink()) else [('aab', ''), ('aaa', ''), ('a', ''), ('sym3', '')])], Options(skip=False), [('(a|aa*(a|b))',), [('aab',), ('aaa',), ('a',)]], [('[a]',), [('a',)]], [('[!b]',), [('a',)]], [('[^b]',), [('a',)]], [('([\\a]|\\aaa)',), [('a',), ('aaa',)]], Options(absolute=True), [('',), []], Options(absolute=False), [('Z*Z', ''), []], [('ZZZ', ''), []], [('aa*', ''), [('aaa', ''), ('aab', '')]], [('**',), ([('',), ('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F')] if (not can_symlink()) else [('',), ('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F'), ('sym1',), ('sym2',), ('sym3',), ('sym3', 'EF'), ('sym3', 'efg'), ('sym3', 'efg', 'ha')])], [('**', '**'), ([('',), ('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F')] if (not can_symlink()) else [('',), ('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F'), ('sym1',), ('sym2',), ('sym3',), ('sym3', 'EF'), ('sym3', 'efg'), ('sym3', 'efg', 'ha')])], [('.', '**'), ([('.', ''), ('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F')] if (not can_symlink()) else [('.', ''), ('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F'), ('.', 'sym1'), ('.', 'sym2'), ('.', 'sym3'), ('.', 'sym3', 'EF'), ('.', 'sym3', 'efg'), ('.', 'sym3', 'efg', 'ha')])], [('**', ''), ([('',), ('a', ''), ('a', 'bcd', ''), ('a', 'bcd', 'efg', ''), ('aaa', ''), ('aab', '')] if (not can_symlink()) else [('',), ('a', ''), ('a', 'bcd', ''), ('a', 'bcd', 'efg', ''), ('aaa', ''), ('aab', ''), ('sym3', ''), ('sym3', 'efg', '')])], [('a', '**'), [('a', ''), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha')]], [('a**',), [('a',), ('aaa',), ('aab',)]], [('**', 'EF'), ([('a', 'bcd', 'EF'), ('EF',)] if (not can_symlink()) else [('a', 'bcd', 'EF'), ('EF',), ('sym3', 'EF')])], [('**', '*F'), ([('a', 'bcd', 'EF'), ('aaa', 'zzzF'), ('aab', 'F'), ('EF',)] if (not can_symlink()) else [('a', 'bcd', 'EF'), ('aaa', 'zzzF'), ('aab', 'F'), ('EF',), ('sym3', 'EF')])], [('**', '*F', ''), []], [('**', 'bcd', '*'), [('a', 'bcd', 'EF'), ('a', 'bcd', 'efg')]], [('a', '**', 'bcd'), [('a', 'bcd')]], Options(cwd_temp=True, absolute=True), [('**',), ([('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F')] if (not can_symlink()) else [('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F'), ('sym1',), ('sym2',), ('sym3',), ('sym3', 'EF'), ('sym3', 'efg'), ('sym3', 'efg', 'ha')])], [('**', '*'), ([('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F')] if (not can_symlink()) else [('EF',), ('ZZZ',), ('a',), ('a', 'D'), ('a', 'bcd'), ('a', 'bcd', 'EF'), ('a', 'bcd', 'efg'), ('a', 'bcd', 'efg', 'ha'), ('aaa',), ('aaa', 'zzzF'), ('aab',), ('aab', 'F'), ('sym1',), ('sym2',), ('sym3',), ('sym3', 'EF'), ('sym3', 'efg'), ('sym3', 'efg', 'ha')])], [(os.curdir, '**'), ([('.', ''), ('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F')] if (not can_symlink()) else [('.', ''), ('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F'), ('.', 'sym1'), ('.', 'sym2'), ('.', 'sym3'), ('.', 'sym3', 'EF'), ('.', 'sym3', 'efg'), ('.', 'sym3', 'efg', 'ha')])], [(os.curdir, '**', '*'), ([('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F')] if (not can_symlink()) else [('.', 'EF'), ('.', 'ZZZ'), ('.', 'a'), ('.', 'a', 'D'), ('.', 'a', 'bcd'), ('.', 'a', 'bcd', 'EF'), ('.', 'a', 'bcd', 'efg'), ('.', 'a', 'bcd', 'efg', 'ha'), ('.', 'aaa'), ('.', 'aaa', 'zzzF'), ('.', 'aab'), ('.', 'aab', 'F'), ('.', 'sym1'), ('.', 'sym2'), ('.', 'sym3'), ('.', 'sym3', 'EF'), ('.', 'sym3', 'efg'), ('.', 'sym3', 'efg', 'ha')])], [('**', ''), ([('a', ''), ('a', 'bcd', ''), ('a', 'bcd', 'efg', ''), ('aaa', ''), ('aab', '')] if (not can_symlink()) else [('a', ''), ('a', 'bcd', ''), ('a', 'bcd', 'efg', ''), ('aaa', ''), ('aab', ''), ('sym3', ''), ('sym3', 'efg', '')])], [(os.curdir, '**', ''), ([('.', ''), ('.', 'a', ''), ('.', 'a', 'bcd', ''), ('.', 'a', 'bcd', 'efg', ''), ('.', 'aaa', ''), ('.', 'aab', '')] if (not can_symlink()) else [('.', ''), ('.', 'a', ''), ('.', 'a', 'bcd', ''), ('.', 'a', 'bcd', 'efg', ''), ('.', 'aaa', ''), ('.', 'aab', ''), ('.', 'sym3', ''), ('.', 'sym3', 'efg', '')])], [('**', 'zz*F'), [('aaa', 'zzzF')]], [('**zz*F',), []], [('**', 'EF'), ([('a', 'bcd', 'EF'), ('EF',)] if (not can_symlink()) else [('a', 'bcd', 'EF'), ('EF',), ('sym3', 'EF')])], Options(just_negative=True, default_negate='**'), [('a*', '**'), ([] if (not can_symlink()) else []), glob.N], Options(just_negative=False, cwd_temp=False, absolute=False), [[], [[]]]]
def setup_fs(cls):
cls.mktemp('a', 'D')
cls.mktemp('aab', 'F')
cls.mktemp('.aa', 'G')
cls.mktemp('.bb', 'H')
cls.mktemp('aaa', 'zzzF')
cls.mktemp('ZZZ')
cls.mktemp('EF')
cls.mktemp('a', 'bcd', 'EF')
cls.mktemp('a', 'bcd', 'efg', 'ha')
cls.can_symlink = can_symlink()
if cls.can_symlink:
os.symlink(cls.norm('broken'), cls.norm('sym1'))
os.symlink('broken', cls.norm('sym2'))
os.symlink(os.path.join('a', 'bcd'), cls.norm('sym3'))
.parametrize('case', cases)
def test_glob_cases(self, case):
self.eval_glob_cases(case)
def test_negateall(self):
for file in glob.glob('!**/', flags=((glob.N | glob.NEGATEALL) | glob.G), root_dir=self.tempdir):
self.assert_equal(os.path.isdir(file), False)
def test_negateall_bytes(self):
for file in glob.glob(b'!**/', flags=((glob.N | glob.NEGATEALL) | glob.G), root_dir=os.fsencode(self.tempdir)):
self.assert_equal(os.path.isdir(file), False)
def test_magic_non_magic(self):
with change_cwd(self.tempdir):
self.assert_equal(sorted(glob.glob(['**/aab', 'dummy'], flags=glob.G)), ['aab'])
def test_non_magic_magic(self):
with change_cwd(self.tempdir):
self.assert_equal(sorted(glob.glob(['dummy', '**/aab'], flags=glob.G)), ['aab']) |
class OptionPlotoptionsAreaSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def generate_engine_id_mac(pen: int, mac_address: str) -> bytes:
buffer = bytearray(pen.to_bytes(4, 'big'))
buffer[0] = (16 * 8)
if ('-' in mac_address):
octets = [int(oct, 16) for oct in mac_address.split('-')]
else:
octets = [int(oct, 16) for oct in mac_address.split(':')]
buffer.append(3)
buffer.extend(octets)
return bytes(buffer) |
class OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractJinzeffectWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Eternal Reverence', 'Eternal Reverence', 'translated'), ("i'm an olympic superstar", "i'm an olympic superstar", 'translated'), ('tpe', 'The Paranoid Emperors Black Moonlight Shizun', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['translation']):
titlemap = [('TPE ', 'The Paranoid Emperors Black Moonlight Shizun', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('folder')
args = parser.parse_args()
folder = Path(args.folder)
if (not folder.exists()):
print(f'Cloning google/oss-fuzz into: {folder}')
folder.mkdir(parents=True)
subprocess.check_call(['git', 'clone', '--single-branch', ' str(folder)])
else:
print(f'Using google/oss-fuzz in: {folder}')
if (not (folder / 'build').exists()):
print(f"Building fuzzers in: {(folder / 'build')}")
subprocess.check_call(['python', str(((folder / 'infra') / 'helper.py')), 'build_fuzzers', 'markdown-it-py'])
else:
print(f"Using existing fuzzers in: {(folder / 'build')}") |
def test_init_check():
cur_list = rq.registry.StartedJobRegistry(queue=q).get_job_ids()
if (len(cur_list) > 0):
try:
job_id = cur_list[0]
logger.info('Deleting job: {:s}'.format(job_id))
job = q.fetch_job(job_id)
job.meta['CrackQ State'] = 'Stop'
job.save_meta()
time.sleep(5)
counter = 0
while ((len(cur_list) > 0) and (counter < 9)):
cur_list = rq.registry.StartedJobRegistry(queue=q).get_job_ids()
time.sleep(5)
counter += 2
job.delete()
time.sleep(21)
comp_list = crack_q.check_complete(q)
assert (job_id not in comp_list)
assert (len(cur_list) < 1)
except AttributeError as err:
logger.error('Failed to delete job: {}'.format(err))
assert (len(cur_list) < 1) |
def main():
log_level = os.getenv('LOGGING_LEVEL')
logger.remove()
logger.add(sys.stderr, level=log_level)
logger.info('Read config file')
parser = argparse.ArgumentParser(description='Listener process')
parser.add_argument('--config', type=str, help='Config path', default='/etc/inferoxy/bridges.yaml')
args = parser.parse_args()
with open(args.config, 'r') as file_:
config_dict = yaml.full_load(file_)
config = dm.Config(**config_dict)
supervisord_config = bridges_to_supervisord(config.bridges)
with open(config.supervisord_config_path, 'a') as supervisord_config_file:
supervisord_config_file.write(supervisord_config) |
class TestGetSystemFromFidesKey():
def test_get_system_from_fides_key(self, db, system):
resp = _get_system_from_fides_key(system.fides_key, db)
assert (resp.system == system)
assert (resp.original_data == system.fides_key)
def test_get_system_from_fides_key_not_found(self, db):
resp = _get_system_from_fides_key('unknown_fides_key', db)
assert (resp.system is None)
assert (resp.original_data == 'unknown_fides_key') |
class Formatter():
encoding_internal = None
def __init__(self, indent=DEFAULT_INDENT, preserve=[], compress=DEFAULT_COMPRESS, indent_char=DEFAULT_INDENT_CHAR, encoding_input=DEFAULT_ENCODING_INPUT, encoding_output=DEFAULT_ENCODING_OUTPUT, inline=DEFAULT_INLINE, correct=DEFAULT_CORRECT, noemptytag=DEFAULT_NOEMPTYTAG, emptyattr=DEFAULT_EMPTYATTR, indent_data=DEFAULT_INDENT_DATA, wraped=['node', 'group', 'include']):
self.compress = compress
self.noemptytag = noemptytag
self.emptyattr = emptyattr
self.correct = correct
self.encoding_input = self.enc_normalize(encoding_input)
self.encoding_output = self.enc_normalize(encoding_output)
self.indent = int(indent)
self.indent_char = indent_char
self.indent_data = indent_data
self.inline = inline
self.preserve = preserve
self.wraped = wraped
self.attr_order = []
def encoding_effective(self, enc=None):
if self.encoding_output:
return self.encoding_output
elif self.encoding_internal:
return self.encoding_internal
elif self.encoding_input:
return self.encoding_input
else:
return 'UTF-8'
def enc_normalize(self, string):
if isinstance(string, str):
return string.upper()
return None
def enc_encode(self, strg):
if (sys.version_info > (3, 0)):
return strg.encode(self.encoding_effective)
return strg.decode('utf-8').encode(self.encoding_effective)
def enc_output(self, path, strg):
fh = sys.stdout
if (strg is not None):
if (path is not None):
open(path, 'w+b').write(strg)
elif (sys.version_info > (3, 0)):
fh.buffer.write(strg)
else:
fh.write(strg)
def format_string(self, xmldoc=''):
token_list = Formatter.TokenList(self)
token_list.parser.Parse(xmldoc)
return self.enc_encode(str(token_list))
def format_file(self, file):
fh = open(file, 'rb')
token_list = Formatter.TokenList(self)
token_list.parser.ParseFile(fh)
fh.close()
return self.enc_encode(str(token_list))
class TokenList():
cdata_section = False
desc_mixed_level = None
indent_level = None
formatter = None
level_counter = 0
preserve_level = None
def __init__(self, formatter):
self._list = []
self.formatter = formatter
self.parser = xml.parsers.expat.ParserCreate(encoding=self.formatter.encoding_input)
self.parser.ordered_attributes = True
self.parser.specified_attributes = 1
self.parser.buffer_text = True
for pattern in ['XmlDecl%s', 'ElementDecl%s', 'AttlistDecl%s', 'EntityDecl%s', 'StartElement%s', 'EndElement%s', 'ProcessingInstruction%s', 'CharacterData%s', 'Comment%s', 'Default%s', 'StartDoctypeDecl%s', 'EndDoctypeDecl%s', 'StartCdataSection%s', 'EndCdataSection%s', 'NotationDecl%s']:
setattr(self.parser, (pattern % 'Handler'), self.xml_handler((pattern % '')))
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __getitem__(self, pos):
if (0 <= pos < len(self._list)):
return self._list[pos]
else:
raise IndexError
def __setitem__(self, pos, value):
if (0 <= pos < len(self._list)):
self._list[pos] = value
else:
raise IndexError
def __str__(self):
for step in ['configure', 'pre_operate', 'post_operate']:
for tk in iter(self):
getattr(tk, step)()
result = ''
prev_comment = False
for tk in iter(self):
tk_str = str(tk)
if prev_comment:
if (tk.arg[0] in self.formatter.wraped):
tk_str = tk_str.replace('\n\n', '\n', 1)
result += tk_str
if (not isinstance(tk, Formatter.CharacterData)):
prev_comment = isinstance(tk, Formatter.Comment)
return result
def append(self, tk):
tk.pos = len(self._list)
self._list.append(tk)
def level_increment(self):
self.level_counter += 1
def level_decrement(self):
self.level_counter -= 1
def token_descendant_mixed(self, tk):
if (tk.name == 'StartElement'):
if ((tk.content_model in [2, 3]) and (self.desc_mixed_level is None)):
self.desc_mixed_level = tk.level
return False
return (self.desc_mixed_level is not None)
elif (tk.name == 'EndElement'):
if (tk.level is self.desc_mixed_level):
self.desc_mixed_level = None
elif (self.desc_mixed_level is not None):
return True
return False
elif (self.desc_mixed_level is None):
return False
return (self.desc_mixed_level >= (tk.level - 1))
def sequence(self, tk, scheme=None):
if ((scheme == 'EndElement') or ((scheme is None) and tk.end)):
return reversed(self._list[:tk.pos])
return self._list[(tk.pos + 1):]
def token_indent(self, tk):
if self.formatter.inline:
return self.token_indent_inline(tk)
' Indent outside of text of mixed content. '
if (tk.name == 'StartElement'):
if ((tk.content_model in [2, 3]) and (self.indent_level is None)):
self.indent_level = tk.level
elif (self.indent_level is not None):
return False
return True
elif (tk.name == 'EndElement'):
if (tk.level == self.indent_level):
self.indent_level = None
elif (self.indent_level is None):
return True
return False
return (self.indent_level is None)
def token_indent_inline(self, tk):
for itk in iter(self.sequence(tk, 'EndElement')):
if ((itk.level < tk.level) and (itk.name == 'StartElement')):
if (itk.content_model == 1):
return True
return False
if ((itk.level == tk.level) and (tk.name == 'EndElement') and (itk.name == 'StartElement')):
if (itk.content_model == 1):
return True
return False
return True
def token_model(self, tk):
eflag = tflag = 0
for itk in iter(self.sequence(tk)):
if (itk.level <= tk.level):
break
elif ((itk.level - 1) == tk.level):
if itk.start:
eflag = 1
elif itk.not_empty:
tflag = 2
return (eflag + tflag)
def token_preserve(self, tk):
if (tk.name == 'StartElement'):
if (self.preserve_level is not None):
return 2
if (tk.arg[0] in self.formatter.preserve):
self.preserve_level = tk.level
return 1
return 0
elif (tk.name == 'EndElement'):
if ((tk.arg[0] in self.formatter.preserve) and (tk.level == self.preserve_level)):
self.preserve_level = None
return 1
elif (self.preserve_level is None):
return 0
return 2
return (self.preserve_level is not None)
def whitespace_append_trailing(self, tk):
if (self.formatter.correct and tk.leading and tk.not_empty):
self.whitespace_append(tk, 'EndElement', 'StartElement', True)
def whitespace_append_leading(self, tk):
if (self.formatter.correct and tk.trailing and tk.not_empty):
self.whitespace_append(tk)
def whitespace_append(self, tk, start='StartElement', stop='EndElement', direct=False):
for itk in self.sequence(tk, start):
if (itk.empty or ((itk.name == stop) and (itk.descendant_mixed is False)) or ((itk.name == start) and (abs((tk - itk)) == 1))):
break
elif (itk.not_empty or ((itk.name == start) and itk.descendant_mixed)):
self.insert_empty(itk, direct)
break
def whitespace_delete_leading(self, tk):
if (self.formatter.correct and tk.leading and (not tk.preserve) and (not tk.cdata_section)):
for itk in self.sequence(tk, 'EndElement'):
if itk.trailing:
return True
elif (itk.name in ['EndElement', 'CharacterData', 'EndCdataSection']):
return False
return True
return False
def whitespace_delete_trailing(self, tk):
if (self.formatter.correct and tk.trailing and (not tk.preserve) and (not tk.cdata_section)):
for itk in self.sequence(tk, 'StartElement'):
if itk.end:
return True
elif ((itk.name in ['StartElement', 'StartCdataSection']) or itk.not_empty):
return False
return True
return False
def insert_empty(self, tk, before=True):
if (not (0 < tk.pos < (len(self) - 1))):
return False
ptk = self[(tk.pos - 1)]
ntk = self.formatter.CharacterData(self, [' '])
ntk.level = max(ptk.level, tk.level)
ntk.descendant_mixed = tk.descendant_mixed
ntk.preserve = (ptk.preserve * tk.preserve)
ntk.cdata_section = (ptk.cdata_section or tk.cdata_section)
if before:
self._list.insert((tk.pos + 1), ntk)
else:
self._list.insert(tk.pos, ntk)
for i in range((tk.pos - 1), len(self._list)):
self._list[i].pos = i
def xml_handler(self, key):
return (lambda *arg: self.append(getattr(self.formatter, key)(self, arg)))
class Token(object):
def __init__(self, tklist, arg):
self.list = tklist
self.arg = list(arg)
self.cdata_section = False
self.content_model = None
self.delete_trailing = False
self.delete_leading = False
self.descendant_mixed = False
self.formatter = tklist.formatter
self.indent = False
self.level = self.list.level_counter
self.name = self.__class__.__name__
self.preserve = False
self.pos = None
def __sub__(self, other):
return (self.pos - other.pos)
def __unicode__(self):
return ''
if (sys.version_info > (3, 0)):
__str__ = (lambda x: x.__unicode__())
else:
__str__ = (lambda x: unicode(x).encode('utf-8'))
def end(self):
return (self.name == 'EndElement')
def empty(self):
return ((self.name == 'CharacterData') and re.match('^[\\t\\s\\n]*$', self.arg[0]))
def leading(self):
return ((self.name == 'CharacterData') and re.search('^[\\t\\s\\n]+', self.arg[0]))
def not_empty(self):
return ((self.name == 'CharacterData') and (not self.cdata_section) and (not re.match('^[\\t\\s\\n]+$', self.arg[0])))
def trailing(self):
return ((self.name == 'CharacterData') and re.search('[\\t\\s\\n]+$', self.arg[0]))
def start(self):
return (self.name == 'StartElement')
def correct(self):
return self.formatter.correct
def attribute(self, key, value):
if ((key and value) or self.formatter.emptyattr):
return (' %s="%s"' % (key, value))
return ''
def indent_insert(self):
if (((self.level > 0) and (not (self.end and self.list[(self.pos - 1)].start))) or (self.end and (not self.list[(self.pos - 1)].start))):
return self.indent_create(self.level)
return ''
def indent_create(self, times=1):
if ((not self.formatter.compress) and self.formatter.indent):
return ('\n%s' % ((times * self.formatter.indent) * self.formatter.indent_char))
return ''
def identifier(self, systemid, publicid):
if (publicid and systemid):
return (' PUBLIC "%s" "%s"' % (publicid, systemid))
elif publicid:
return (' PUBLIC "%s"' % publicid)
elif systemid:
return (' SYSTEM "%s"' % systemid)
return ''
def configure(self):
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.cdata_section = self.list.cdata_section
def pre_operate(self):
pass
def post_operate(self):
pass
class AttlistDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += ('<!ATTLIST %s %s' % (self.arg[0], self.arg[1]))
if (self.arg[2] is not None):
str += (' %s' % self.arg[2])
if (self.arg[4] and (not self.arg[3])):
str += ' #REQUIRED'
elif (self.arg[3] and self.arg[4]):
str += ' #FIXED'
elif ((not self.arg[4]) and (not self.arg[3])):
str += ' #IMPLIED'
if self.arg[3]:
str += (' "%s"' % self.arg[3])
str += '>'
return str
class CharacterData(Token):
def __unicode__(self):
str = self.arg[0]
if ((not self.preserve) and (not self.cdata_section)):
if (self.empty and (not self.descendant_mixed)):
str = ''
else:
if self.delete_leading:
str = re.sub('^\\s', '', str)
if self.delete_trailing:
str = re.sub('\\s$', '', str)
if self.correct:
try:
indent = ''
if self.formatter.indent_data:
indent = self.indent_create((self.level + 1)).replace('\n', '')
yamlftr = yamlformatter.YamlFormatter(indent)
str = yamlftr.format_string(str)
except Exception:
str = re.sub('\\r\\n', '\n', str)
str = re.sub('\\r|\\t', ' ', str)
str = re.sub('\\s+', ' ', str)
if self.formatter.indent_data:
str = ('%s%s' % (self.indent_create((self.level + 1)), str))
if (not self.cdata_section):
str = re.sub('&', '&', str)
str = re.sub('<', '<', str)
return str
def pre_operate(self):
self.list.whitespace_append_trailing(self)
self.list.whitespace_append_leading(self)
def post_operate(self):
self.delete_leading = self.list.whitespace_delete_leading(self)
self.delete_trailing = self.list.whitespace_delete_trailing(self)
class Comment(Token):
def __unicode__(self):
str = ''
if ((self.preserve in [0, 1]) and self.indent):
str += self.indent_insert()
str += ('<!--%s-->' % re.sub('^[\\r\\n]+$', '\n', re.sub('^[\\r\\n]+', '\n', self.arg[0])))
return str
def configure(self):
super(Formatter.Comment, self).configure()
self.indent = self.list.token_indent(self)
class Default(Token):
pass
class EndCdataSection(Token):
def __unicode__(self):
return ']]>'
def configure(self):
self.list.cdata_section = False
class ElementDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += ('<!ELEMENT %s%s>' % (self.arg[0], self.evaluate_model(self.arg[1])))
return str
def evaluate_model(self, model, modelStr='', concatStr=''):
childSeq = []
mixed = (model[0] == xml.parsers.expat.model.XML_CTYPE_MIXED)
hasChilds = (len(model[3]) or mixed)
if (model[0] == xml.parsers.expat.model.XML_CTYPE_EMPTY):
modelStr += ' EMPTY'
elif (model[0] == xml.parsers.expat.model.XML_CTYPE_ANY):
modelStr += ' ANY'
elif (model[0] == xml.parsers.expat.model.XML_CTYPE_NAME):
modelStr = ('%s' % model[2])
elif (model[0] in (xml.parsers.expat.model.XML_CTYPE_CHOICE, xml.parsers.expat.model.XML_CTYPE_MIXED)):
concatStr = '|'
elif (model[0] == xml.parsers.expat.model.XML_CTYPE_SEQ):
concatStr = ','
if hasChilds:
modelStr += ' ('
if mixed:
childSeq.append('#PCDATA')
for child in model[3]:
childSeq.append(self.evaluate_model(child))
modelStr += concatStr.join(childSeq)
if hasChilds:
modelStr += ')'
modelStr += {xml.parsers.expat.model.XML_CQUANT_NONE: '', xml.parsers.expat.model.XML_CQUANT_OPT: '?', xml.parsers.expat.model.XML_CQUANT_PLUS: '+', xml.parsers.expat.model.XML_CQUANT_REP: '*'}[model[1]]
return modelStr
class EndDoctypeDecl(Token):
def __unicode__(self):
str = ''
if (self.list[(self.pos - 1)].name != 'StartDoctypeDecl'):
str += self.indent_create(0)
str += ']'
str += '>'
str += self.indent_create(0)
return str
class EndElement(Token):
def __init__(self, list, arg):
list.level_decrement()
super(Formatter.EndElement, self).__init__(list, arg)
def __unicode__(self):
str = ''
if (self.formatter.noemptytag or (self.list[(self.pos - 1)].name != 'StartElement')):
if ((not self.formatter.compress) or ((self.preserve in [0]) and self.indent)):
str += self.indent_insert()
str += ('</%s>' % self.arg[0])
return str
def configure(self):
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.indent = self.list.token_indent(self)
class EntityDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += '<!ENTITY '
if self.arg[1]:
str += '% '
str += ('%s ' % self.arg[0])
if self.arg[2]:
str += ('"%s"' % self.arg[2])
else:
str += ('%s ' % self.identifier(self.arg[4], self.arg[5]))
if self.arg[6]:
str += ('NDATA %s' % self.arg[6])
str += '>'
return str
class NotationDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += ('<!NOTATION %s%s>' % (self.arg[0], self.identifier(self.arg[2], self.arg[3])))
return str
class ProcessingInstruction(Token):
def __unicode__(self):
str = ''
if ((self.preserve in [0, 1]) and self.indent):
str += self.indent_insert()
str += ('<?%s %s?>' % (self.arg[0], self.arg[1]))
return str
def configure(self):
super(Formatter.ProcessingInstruction, self).configure()
self.indent = self.list.token_indent(self)
class StartCdataSection(Token):
def __unicode__(self):
return '<![CDATA['
def configure(self):
self.list.cdata_section = True
class StartDoctypeDecl(Token):
def __unicode__(self):
str = ('<!DOCTYPE %s' % self.arg[0])
if self.arg[1]:
str += self.identifier(self.arg[1], self.arg[2])
if self.arg[3]:
str += ' ['
return str
class StartElement(Token):
def __init__(self, list, arg):
super(Formatter.StartElement, self).__init__(list, arg)
self.list.level_increment()
def __unicode__(self):
str = ''
if (self.arg[0] in self.formatter.wraped):
str += '\n'
if ((self.preserve in [0, 1]) and self.indent):
str += self.indent_insert()
str += ('<%s' % self.arg[0])
args_attr = ''
ordered = ['' for i in range(len(self.formatter.attr_order))]
for i in range(0, len(self.arg[1]), 2):
str_val = self.attribute(self.arg[1][i], self.arg[1][(i + 1)])
if (self.arg[1][i] == 'args'):
args_attr = ('%s %s' % (self.indent_insert(), str_val))
else:
try:
idx = self.formatter.attr_order.index(self.arg[1][i])
del ordered[idx]
ordered.insert(idx, ('%s%s' % ((' ' if (self.arg[1][i] == 'if') else ''), str_val)))
except Exception:
ordered.append(str_val)
for val in ordered:
if val:
str += val
if args_attr:
str += args_attr
if (self.list[(self.pos + 1)].end and (not self.formatter.noemptytag)):
str += '/>'
else:
str += '>'
return str
def configure(self):
self.content_model = self.list.token_model(self)
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.indent = self.list.token_indent(self)
class XmlDecl(Token):
def __init__(self, list, arg):
super(Formatter.XmlDecl, self).__init__(list, arg)
if (len(self.arg) > 1):
self.formatter.encoding_internal = self.arg[1]
def __unicode__(self):
str = ('<?xml%s%s' % (self.attribute('version', self.arg[0]), self.attribute('encoding', self.formatter.encoding_effective)))
if (self.arg[2] > (- 1)):
str += self.attribute('standalone', 'yes')
str += '?>\n'
return str |
def authenticate_user():
users = db.fetch_all_users()
usernames = [user['key'] for user in users]
names = [user['name'] for user in users]
hashed_passwords = [user['password'] for user in users]
authenticator = LoginSignup(names, usernames, hashed_passwords, 'query_aichat', 'abcdef', cookie_expiry_days=0)
st.session_state['authenticator'] = authenticator
(st.session_state['name'], authentication_status, st.session_state['username']) = authenticator.login('Login')
if (authentication_status == None):
st.stop()
if (authentication_status == False):
st.warning('Username/password is incorrect')
st.stop() |
class ExchangeJsonTestcase(unittest.TestCase):
def test_all_zones_in_zones_json(self):
zone_keys = ZONES_CONFIG.keys()
for (zone_key, values) in EXCHANGES_CONFIG.items():
self.assertIn('->', zone_key)
for zone in zone_key.split('->'):
if (zone == 'US'):
continue
self.assertIn(zone, zone_keys) |
def extractSakuratlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_staticfiles_304_with_etag_match(tmpdir, test_client_factory):
path = os.path.join(tmpdir, 'example.txt')
with open(path, 'w') as file:
file.write('<file content>')
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
first_resp = client.get('/example.txt')
assert (first_resp.status_code == 200)
last_etag = first_resp.headers['etag']
second_resp = client.get('/example.txt', headers={'if-none-match': last_etag})
assert (second_resp.status_code == 304)
assert (second_resp.content == b'')
second_resp = client.get('/example.txt', headers={'if-none-match': f'W/{last_etag}, "123"'})
assert (second_resp.status_code == 304)
assert (second_resp.content == b'') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.