code stringlengths 281 23.7M |
|---|
(params=MINING_MAINNET_VMS[0:9])
def pre_london_miner(request, base_db, genesis_state):
vm_under_test = request.param
klass = _configure_mining_chain('EndsBeforeLondon', MINING_MAINNET_VMS[0], vm_under_test)
header_fields = dict(difficulty=1, gas_limit=100000)
return klass.from_genesis(base_db, header_fields, genesis_state) |
def test_describe_description(f):
ln = f.object('LONG-NAME', 'SHORT-LONG-NAME', 10, 0)
case1 = ' Description\n --\n Quantity : diameter\n and stuff\n Source part number : 10\n\n'
buf = StringIO()
exclude = parseoptions('e')
describe_description(buf, ln, width=31, indent=' ', exclude=exclude)
assert (str(buf.getvalue()) == case1)
case2 = ' Description : string\n desc\n\n'
buf = StringIO()
describe_description(buf, 'string desc', width=21, indent=' ', exclude=exclude)
assert (str(buf.getvalue()) == case2) |
class UserFavouriteEventList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(UserFavouriteEvent)
if (view_kwargs.get('user_id') is not None):
user = safe_query_kwargs(User, view_kwargs, 'user_id')
query_ = query_.join(User).filter((User.id == user.id))
elif has_access('is_admin'):
pass
return query_
methods = ['GET']
schema = UserFavouriteEventSchema
data_layer = {'session': db.session, 'model': UserFavouriteEvent, 'methods': {'query': query}} |
def _serialize(oxx, mod, n, value, mask, buf, offset):
(n, exp_hdr) = _make_exp_hdr(oxx, mod, n)
exp_hdr_len = len(exp_hdr)
value_len = len(value)
if mask:
assert (value_len == len(mask))
pack_str = ('!I%ds%ds%ds' % (exp_hdr_len, value_len, len(mask)))
msg_pack_into(pack_str, buf, offset, (((n << 9) | (1 << 8)) | (exp_hdr_len + (value_len * 2))), bytes(exp_hdr), value, mask)
else:
pack_str = ('!I%ds%ds' % (exp_hdr_len, value_len))
msg_pack_into(pack_str, buf, offset, (((n << 9) | (0 << 8)) | (exp_hdr_len + value_len)), bytes(exp_hdr), value)
return struct.calcsize(pack_str) |
class RingControl(Module):
def __init__(self, pad, mode, nleds, sys_clk_freq):
self.bus = bus = wishbone.Interface(data_width=32)
color = Signal(24, reset=32768)
version = Constant(((MAJOR << 8) + MINOR))
self.sync += If((((bus.cyc & bus.stb) & bus.we) & (bus.adr[0:3] == 0)), color.eq(bus.dat_w), bus.ack.eq(1)).Else(bus.ack.eq(0))
self.sync += If((((bus.cyc & bus.stb) & (~ bus.we)) & (bus.adr[0:3] == 1)), bus.dat_r.eq(version), bus.ack.eq(1)).Else(bus.ack.eq(0))
ring = RingSerialCtrl(nleds, sys_clk_freq)
self.submodules += ring
ring_timer = WaitTimer(int((0.05 * sys_clk_freq)))
self.submodules += ring_timer
if (mode == mode.DOUBLE):
print('Led ring controller configured for dual led')
led_array = Array([2080, 1040, 520, 260, 130, 65, 2080, 1040, 520, 260, 130, 65])
else:
print('Led ring controller configured for single led')
led_array = Array([2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1])
index = Signal(12, reset=1)
self.comb += ring_timer.wait.eq((~ ring_timer.done))
self.sync += [If(ring_timer.done, index.eq((index + 1)), If((index == 11), index.eq(0)))]
self.comb += ring.leds.eq(led_array[index])
self.comb += [ring.colors.eq(color), pad.eq(ring.do)] |
def extractThenoobtranslatorEpizyCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_mode()
def generate_output_ex(model, tokenizer, params, device, context_len=2048, stream_interval=2):
prompt = params['prompt']
temperature = float(params.get('temperature', 1.0))
max_new_tokens = int(params.get('max_new_tokens', 2048))
stop_parameter = params.get('stop', None)
if (stop_parameter == tokenizer.eos_token):
stop_parameter = None
stop_strings = []
if isinstance(stop_parameter, str):
stop_strings.append(stop_parameter)
elif isinstance(stop_parameter, list):
stop_strings = stop_parameter
elif (stop_parameter is None):
pass
else:
raise TypeError('Stop parameter must be string or list of strings.')
input_ids = tokenizer(prompt).input_ids
output_ids = []
max_src_len = ((context_len - max_new_tokens) - 8)
input_ids = input_ids[(- max_src_len):]
stop_word = None
for i in range(max_new_tokens):
if (i == 0):
out = model(torch.as_tensor([input_ids], device=device), use_cache=True)
logits = out.logits
past_key_values = out.past_key_values
else:
out = model(input_ids=torch.as_tensor([[token]], device=device), use_cache=True, past_key_values=past_key_values)
logits = out.logits
past_key_values = out.past_key_values
last_token_logits = logits[0][(- 1)]
if (temperature < 0.0001):
token = int(torch.argmax(last_token_logits))
else:
probs = torch.softmax((last_token_logits / temperature), dim=(- 1))
token = int(torch.multinomial(probs, num_samples=1))
output_ids.append(token)
if (token == tokenizer.eos_token_id):
stopped = True
else:
stopped = False
output = tokenizer.decode(output_ids, skip_special_tokens=True)
for stop_str in stop_strings:
pos = output.rfind(stop_str)
if (pos != (- 1)):
output = output[:pos]
stopped = True
stop_word = stop_str
break
else:
pass
if stopped:
break
del past_key_values
if (pos != (- 1)):
return output[:pos]
return output |
class HashPointPenTest(object):
def test_addComponent(self):
pen = HashPointPen(_TestGlyph().width, {'a': _TestGlyph()})
pen.addComponent('a', (2, 0, 0, 3, (- 10), 5))
assert (pen.hash == 'w500[l0+0l10+110o50+75o60+50c50+0|(+2+0+0+3-10+5)]')
def test_NestedComponents(self):
pen = HashPointPen(_TestGlyph().width, {'a': _TestGlyph5(), 'b': _TestGlyph()})
pen.addComponent('a', (2, 0, 0, 3, (- 10), 5))
assert (pen.hash == 'w500[[l0+0l10+110o50+75o60+50c50+0|(+1+0+0+1+0+0)](+2+0+0+3-10+5)]')
def test_outlineAndComponent(self):
pen = HashPointPen(_TestGlyph().width, {'a': _TestGlyph()})
glyph = _TestGlyph()
glyph.drawPoints(pen)
pen.addComponent('a', (2, 0, 0, 2, (- 10), 5))
assert (pen.hash == 'w500l0+0l10+110o50+75o60+50c50+0|[l0+0l10+110o50+75o60+50c50+0|(+2+0+0+2-10+5)]')
def test_addComponent_missing_raises(self):
pen = HashPointPen(_TestGlyph().width, dict())
with pytest.raises(KeyError) as excinfo:
pen.addComponent('a', Identity)
assert (excinfo.value.args[0] == 'a')
def test_similarGlyphs(self):
pen = HashPointPen(_TestGlyph().width)
glyph = _TestGlyph()
glyph.drawPoints(pen)
pen2 = HashPointPen(_TestGlyph2().width)
glyph = _TestGlyph2()
glyph.drawPoints(pen2)
assert (pen.hash != pen2.hash)
def test_similarGlyphs2(self):
pen = HashPointPen(_TestGlyph().width)
glyph = _TestGlyph()
glyph.drawPoints(pen)
pen2 = HashPointPen(_TestGlyph3().width)
glyph = _TestGlyph3()
glyph.drawPoints(pen2)
assert (pen.hash != pen2.hash)
def test_similarGlyphs3(self):
pen = HashPointPen(_TestGlyph().width)
glyph = _TestGlyph()
glyph.drawPoints(pen)
pen2 = HashPointPen(_TestGlyph4().width)
glyph = _TestGlyph4()
glyph.drawPoints(pen2)
assert (pen.hash != pen2.hash)
def test_glyphVsComposite(self):
pen = HashPointPen(_TestGlyph().width, {'a': _TestGlyph()})
pen.addComponent('a', Identity)
pen2 = HashPointPen(_TestGlyph().width)
glyph = _TestGlyph()
glyph.drawPoints(pen2)
assert (pen.hash != pen2.hash) |
def test_h5_1():
R1 = bytearray.fromhex('d5cb8454d177733effffb2ec712baeab')
R2 = bytearray.fromhex('a6e8e7cc25a75f6e216583f7ff3dc4cf')
W = bytearray.fromhex('b089c4e39d7c192c3aba3c2109d24c0d')
DAK = W
(ComputedHash, ComputedSRESm, ComputedSRESs, ComputedACO) = h5(DAK, R1, R2)
Hash = bytearray.fromhex('746af87e1eeb1137c683b97d9d421f911f3ddfb362958c458976d65')
SRESm = bytearray.fromhex('746af87e')
SRESs = bytearray.fromhex('1eeb1137')
ACO = bytearray.fromhex('c683b97d9d421f91')
emsg1 = 'test_h5_1: Hash {} != {}'.format(repr(Hash), repr(ComputedHash))
assert (Hash == ComputedHash), emsg1
emsg2 = 'test_h5_1: SRESm {} != {}'.format(repr(SRESm), repr(ComputedSRESm))
assert (SRESm == ComputedSRESm), emsg2
emsg3 = 'test_h5_1: SRESs {} != {}'.format(repr(SRESs), repr(ComputedSRESs))
assert (SRESs == ComputedSRESs), emsg3
emsg4 = 'test_h5_1: ACO {} != {}'.format(repr(ACO), repr(ComputedACO))
assert (ACO == ComputedACO), emsg4 |
def coulomb3d_30(ax, da, A, bx, db, B, R):
result = numpy.zeros((10, 1), dtype=float)
x0 = (ax + bx)
x1 = (x0 ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = (x2 + A[0])
x4 = (x2 + R[0])
x5 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x6 = (x5 + R[1])
x7 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x8 = (x7 + R[2])
x9 = (x0 * (((x4 ** 2) + (x6 ** 2)) + (x8 ** 2)))
x10 = boys(1, x9)
x11 = boys(0, x9)
x12 = (x1 * ((- x10) + x11))
x13 = (x10 * x3)
x14 = boys(2, x9)
x15 = (x14 * x4)
x16 = (x13 - x15)
x17 = (2.0 * x4)
x18 = (((- x10) * x4) + (x11 * x3))
x19 = (2.0 * x3)
x20 = (x1 * (x10 - x14))
x21 = boys(3, x9)
x22 = (2.0 * x1)
x23 = (((3. * da) * db) * numpy.exp(((((- ax) * bx) * x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x24 = (x1 * x23)
x25 = (0. * x24)
x26 = (x5 + A[1])
x27 = (x10 * x26)
x28 = (((- x10) * x6) + (x11 * x26))
x29 = (x1 * (((x14 * x6) - x27) + x28))
x30 = (- x28)
x31 = (((- x14) * x6) + x27)
x32 = (- x31)
x33 = ((x14 * x26) - (x21 * x6))
x34 = (- x33)
x35 = (0. * x24)
x36 = (x7 + A[2])
x37 = (x10 * x36)
x38 = (((- x10) * x8) + (x11 * x36))
x39 = (x1 * (((x14 * x8) - x37) + x38))
x40 = (- x38)
x41 = (((- x14) * x8) + x37)
x42 = (- x41)
x43 = ((x14 * x36) - (x21 * x8))
x44 = (- x43)
x45 = (2.0 * x26)
x46 = (2.0 * x6)
x47 = ((x26 * x40) - (x42 * x6))
x48 = ((x26 * x42) - (x44 * x6))
x49 = (2.0 * x36)
x50 = (2.0 * x8)
x51 = ((x12 - (x40 * x49)) + (x42 * x50))
x52 = ((x20 - (x42 * x49)) + (x44 * x50))
result[(0, 0)] = numpy.sum(((- x25) * (((x22 * (((- x13) + x15) + x18)) + (x3 * ((x12 - (x16 * x17)) + (x18 * x19)))) - (x4 * (((x16 * x19) - (x17 * ((x14 * x3) - (x21 * x4)))) + x20)))))
result[(1, 0)] = numpy.sum(((- x35) * (((x17 * ((x3 * x32) - (x34 * x4))) + x29) - ((2.0 * x3) * ((x3 * x30) - (x32 * x4))))))
result[(2, 0)] = numpy.sum(((- x35) * (((x17 * ((x3 * x42) - (x4 * x44))) - ((2.0 * x3) * ((x3 * x40) - (x4 * x42)))) + x39)))
result[(3, 0)] = numpy.sum(((- x35) * ((x3 * ((x12 - (x30 * x45)) + (x32 * x46))) - (x4 * ((x20 - (x32 * x45)) + (x34 * x46))))))
result[(4, 0)] = numpy.sum(((x22 * x23) * ((x3 * x47) - (x4 * x48))))
result[(5, 0)] = numpy.sum((x35 * (((- x3) * x51) + (x4 * x52))))
result[(6, 0)] = numpy.sum(((- x25) * (((x26 * ((x12 + (x28 * x45)) - (x31 * x46))) + (2.0 * x29)) - (x6 * ((x20 + (x31 * x45)) - (x33 * x46))))))
result[(7, 0)] = numpy.sum((x35 * ((((2.0 * x26) * x47) - x39) - (x46 * x48))))
result[(8, 0)] = numpy.sum((x35 * (((- x26) * x51) + (x52 * x6))))
result[(9, 0)] = numpy.sum(((- x25) * (((x36 * ((x12 + (x38 * x49)) - (x41 * x50))) + (2.0 * x39)) - (x8 * ((x20 + (x41 * x49)) - (x43 * x50))))))
return result |
class ListView(AbstractView):
__gtype_name__ = 'ListView'
name = 'listview'
use_plugin_window = False
def __init__(self):
super(ListView, self).__init__()
self.view = self
self._has_initialised = False
self.show_policy = ListShowingPolicy(self)
def initialise(self, source):
if self._has_initialised:
return
self._has_initialised = True
self.view_name = 'list_view'
super(ListView, self).initialise(source)
self.shell = source.shell
def switch_to_view(self, source, album):
self.initialise(source)
GLib.idle_add(self.shell.props.display_page_tree.select, self.shell.props.library_source)
def get_selected_objects(self):
return [] |
class ErsiliaCloner(ErsiliaBase):
def __init__(self, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json)
checker = Checker()
checker._package_path()
self.development_path = checker.get_development_path()
def clone(self, path, version):
path_repo = os.path.join(path, self.cfg.HUB.PACKAGE)
if (self.development_path is not None):
path_version = Versioner().ersilia_version_from_path(self.development_path)
if (path_version is None):
shutil.copytree(self.development_path, path_repo)
return path_repo
if (path_version == version):
shutil.copytree(self.development_path, path_repo)
return path_repo
gd = GitHubDownloader(overwrite=True)
gd.clone(self.cfg.HUB.ORG, self.cfg.HUB.PACKAGE, path_repo)
return path_repo |
def _setup_logging(clsid):
global _logging_configured
if _logging_configured:
return
_logging_configured = True
try:
hkey = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, ('CLSID\\%s\\Logging' % clsid))
except WindowsError:
return
from comtypes.logutil import NTDebugHandler
handler = NTDebugHandler()
try:
(val, typ) = winreg.QueryValueEx(hkey, 'format')
formatter = logging.Formatter(val)
except:
formatter = logging.Formatter('(Thread %(thread)s):%(levelname)s:%(message)s')
handler.setFormatter(formatter)
logging.root.addHandler(handler)
try:
(values, typ) = winreg.QueryValueEx(hkey, 'levels')
except:
return
if (typ == winreg.REG_SZ):
values = [values]
elif (typ != winreg.REG_MULTI_SZ):
return
for val in values:
(name, level) = val.split('=')
level = getattr(logging, level)
logging.getLogger(name).setLevel(level) |
class OptionPlotoptionsWaterfallAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsWaterfallAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsWaterfallAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsWaterfallAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsWaterfallAccessibilityPoint) |
.xfail(reason='Test case should fail because of wrong input.')
def test_interaction_bigwig_fail():
outfile = NamedTemporaryFile(suffix='.tar.gz', delete=False)
outfile.close()
args = '-f {} -o {} -om {} -oft {}'.format((ROOT + 'chicViewpoint/two_matrices.hdf5'), outfile.name, 'all', 'bigwig').split()
chicExportData.main(args) |
def extractPeppermintiiTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('18+OGPC', '18 Kin otome gemu de, teiso o mamorinukimasu!', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class HandlerBaseTest():
def setUp(self):
self.handler = None
self.data = {'filename': 'tests/yaml/hello-world.txt', 'content': '', 'metadata': {}}
def read_from_tests(self):
with open(self.data['filename']) as f:
return f.read()
def test_external(self):
filename = self.data['filename']
content = self.data['content']
metadata = self.data['metadata']
post = frontmatter.load(filename)
self.assertEqual(post.content, content.strip())
for (k, v) in metadata.items():
self.assertEqual(post[k], v)
posttext = frontmatter.dumps(post, handler=self.handler)
post_2 = frontmatter.loads(posttext)
for k in post.metadata:
self.assertEqual(post.metadata[k], post_2.metadata[k])
self.assertEqual(post.content, post_2.content)
def test_detect(self):
text = self.read_from_tests()
self.assertTrue(self.handler.detect(text))
def test_split_content(self):
text = self.read_from_tests()
(fm, content) = self.handler.split(text)
self.assertEqual(content, self.data['content'])
def test_split_load(self):
text = self.read_from_tests()
(fm, content) = self.handler.split(text)
fm_load = self.handler.load(fm)
any_fail = False
failmsg = 'The following metadata did not match the test:'
for k in self.data['metadata']:
if (fm_load[k] == self.data['metadata'][k]):
continue
any_fail = True
failmsg += '\n"{0}": {1},'.format(k, repr(fm_load[k]))
if any_fail:
self.fail(failmsg) |
def extractGreenpandatrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsSeriesSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_cat_talents(cat_talents: list[dict[(str, Any)]], cat_talent_data: dict[(str, Any)]) -> dict[(Any, Any)]:
data: dict[(Any, Any)] = {}
letter_order = find_order(cat_talents, cat_talent_data)
for (i, letter) in enumerate(letter_order):
cat_data = {}
if (letter == 'F'):
text_id_str = 'tFxtID_F'
else:
text_id_str = f'textID_{letter}'
cat_data['name'] = cat_talent_data[text_id_str].strip('\n')
cat_data['max'] = int(cat_talent_data[f'MAXLv_{letter}'])
if (cat_data['max'] == 0):
cat_data['max'] = 1
data[i] = cat_data
return data |
def _get_python_id(python, gitinfo=None):
m = hashlib.sha256()
if gitinfo:
_hash_file('pyconfig.h', m, gitinfo)
_hash_file(os.path.join('Misc', 'python-config.sh'), m, gitinfo)
(_, rev) = gitinfo
m.update(rev.encode('utf-8'))
else:
_hash_file(python, m)
return m.hexdigest() |
class CropBoxAug(aug.Augmentation):
def __init__(self, box_scale_factor: float=1.0):
super().__init__()
self.box_scale_factor = box_scale_factor
def get_transform(self, image: np.ndarray, boxes: np.ndarray) -> Transform:
assert (boxes.shape[0] == 1)
assert isinstance(image, np.ndarray)
assert isinstance(boxes, np.ndarray)
(img_h, img_w) = image.shape[0:2]
box_xywh = bu.get_bbox_xywh_from_xyxy(boxes[0])
if (self.box_scale_factor != 1.0):
box_xywh = bu.scale_bbox_center(box_xywh, self.box_scale_factor)
box_xywh = bu.clip_box_xywh(box_xywh, [img_h, img_w])
box_xywh = box_xywh.int().tolist()
return CropTransform(*box_xywh, orig_w=img_w, orig_h=img_h) |
.parametrize(('testcase', 'convrate'), [(('S', 1, (4, 6)), 1.9), (('S', 2, (3, 5)), 2.8), (('S', 3, (2, 4)), 3.8), (('S', 4, (2, 4)), 4.7)])
def test_scalar_convergence(extmesh, testcase, convrate):
(family, degree, (start, end)) = testcase
l2err = np.zeros((end - start))
for ii in [(i + start) for i in range(len(l2err))]:
mesh = extmesh((2 ** ii), (2 ** ii), (2 ** ii), quadrilateral=True)
fspace = FunctionSpace(mesh, family, degree)
u = TrialFunction(fspace)
v = TestFunction(fspace)
(x, y, z) = SpatialCoordinate(mesh)
uex = ((cos(((2 * np.pi) * x)) * cos(((2 * np.pi) * y))) * cos(((2 * np.pi) * z)))
f = ((- div(grad(uex))) + uex)
a = ((inner(grad(u), grad(v)) + inner(u, v)) * dx(degree=(degree + 7)))
L = (inner(f, v) * dx(degree=(degree + 7)))
params = {'snes_type': 'ksponly', 'ksp_type': 'preonly', 'pc_type': 'lu'}
sol = Function(fspace)
solve((a == L), sol, solver_parameters=params)
l2err[(ii - start)] = sqrt(assemble((((sol - uex) * (sol - uex)) * dx)))
assert (np.array([np.log2((l2err[i] / l2err[(i + 1)])) for i in range((len(l2err) - 1))]) > convrate).all() |
def render_repositories(*_args, **_kwargs):
owner = flask.g.user
projects = ComplexLogic.get_coprs_permissible_by_user(owner)
projects = sorted(projects, key=(lambda p: p.full_name))
OutdatedChrootsLogic.make_review(owner)
db.session.commit()
return flask.render_template('repositories.html', tasks_info=ComplexLogic.get_queue_sizes_cached(), graph=BuildsLogic.get_small_graph_data('30min'), owner=owner, projects=projects) |
.parametrize('ops', [NumpyOps(), NumpyOps(use_blis=True)])
def test_chain(ops):
data = numpy.asarray([[1, 2, 3, 4]], dtype='f')
model = chain(Linear(1), Dropout(), Linear(1))
model.ops = ops
model.initialize(data, data)
(Y, backprop) = model(data, is_train=True)
backprop(Y)
model = chain(Linear(1), Dropout(), Linear(1, 1))
model.initialize(data, data)
model = chain(Linear(1), Dropout(), Linear(1))
model.set_dim('nO', 1)
model.initialize(data, None)
model = chain(Linear(1, 1), Dropout(), Linear(1, 1))
model.set_dim('nI', 1)
model.initialize(None, data)
with pytest.raises(TypeError):
chain(Linear())
with pytest.raises(TypeError):
chain() |
class NonlinearDAE(DAE_base):
def __init__(self, a, p):
self.a_ = a
self.p_ = p
if (self.p_ == 1):
self.func = (lambda t: exp(((- self.a_) * t)))
else:
q = old_div(1.0, (1.0 - self.p_))
self.func = (lambda t: (max((1.0 - (((1.0 - self.p_) * self.a_) * t)), 0.0) ** q))
def uOfT(self, t):
return self.func(t)
def fOfUT(self, u, t):
return ((- self.a_) * (max(u, 0.0) ** self.p_)) |
def main():
print('transfer EOS token from [consumer1111] to [consumer2222] by eosapi')
trx = {'actions': [{'account': 'eosio.token', 'name': 'transfer', 'authorization': [{'actor': account_name, 'permission': 'active'}], 'data': {'from': account_name, 'to': 'consumer2222', 'quantity': '0.0001 EOS', 'memo': 'by eosapi'}}]}
try:
resp = api.push_transaction(trx)
print('transaction ok: {0}'.format(resp))
except RequestException as e:
print('network error: {0}'.format(str(e)))
except NodeException as e:
print('eos node error, http status code {0}, response text: {1}'.format(e.resp.status_code, e.resp.text))
except TransactionException as e:
print('eos transaction error, http status code {0}, response text: {1}'.format(e.resp.status_code, e.resp.text)) |
('pyscf')
.parametrize('bonds, ref_cycle', [(None, 8), ([[1, 2, (- 1)], [2, 0, 1]], 8)])
def test_dimer_hcn(bonds, ref_cycle):
geom = geom_loader('lib:baker_ts/01_hcn.xyz')
ref_energy = (- 92.24604)
N_raw = ' 0.5858 0. 0.0543 -0.7697 -0. 0.061 0.2027 0. -0.1295'.split()
if (bonds is not None):
N_raw = None
calc = PySCF('321g', pal=2)
dimer_kwargs = {'rotation_method': 'fourier', 'calculator': calc, 'N_raw': N_raw, 'bonds': bonds}
dimer = Dimer(**dimer_kwargs)
geom.set_calculator(dimer)
opt_kwargs = {'precon': True, 'max_step_element': 0.25, 'max_cycles': 15}
opt = PreconLBFGS(geom, **opt_kwargs)
opt.run()
assert opt.is_converged
assert (geom.energy == pytest.approx(ref_energy, abs=1e-05)) |
class OptionSeriesBellcurveSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_builder_pins_manifest_to_provided_ipfs_backend(backend, request):
if (not request.config.getoption('--integration')):
pytest.skip('Not asked to run integration tests')
minified_manifest_hash = 'QmVwwpt2BAkmWQt4eNnswhWd6bYgLbnUQDMHdVMHotwiqz'
(manifest,) = b.build({}, b.package_name('package'), b.manifest_version('2'), b.version('1.0.0'), b.pin_to_ipfs(backend=backend))
assert (manifest['Hash'] == minified_manifest_hash)
pinned_manifest = backend.fetch_uri_contents(manifest['Hash'])
assert (pinned_manifest == b'{"manifest_version":"2","package_name":"package","version":"1.0.0"}') |
class Evaluator(object):
def __init__(self, *larg, **karg):
self._env = dict(*larg, **karg)
self.attached = []
def __call__(self, expr, locals={}):
return eval(expr, self._env, locals)
def declare(self, stmt, locals=None):
exec(stmt, self._env, locals)
def attach(self, other):
self._env = other._env
other.attached.append(self)
for e in list(self.attached):
e.attach(self)
def detach(self, other):
self._env = self._env.copy()
other.attached.remove(self)
def update(self, other):
self._env.update(other._env)
def copy(self):
return self.__class__(self._env)
def __contains__(self, name):
return (name in self._env)
def __getitem__(self, key):
return self._env[key]
def __setitem__(self, key, val):
self._env[key] = val
def __iter__(self):
return iter(self._env.items())
def __eq__(self, other):
return (self._env == other._env)
def __ne__(self, other):
return (not self.__eq__(other)) |
def test_overfitting_IO():
nlp = English.from_config()
nlp.add_pipe('sentencizer')
nlp.add_pipe('experimental_arc_predicter')
nlp.add_pipe('experimental_arc_labeler')
train_examples = []
for t in TRAIN_DATA:
train_examples.append(Example.from_dict(nlp.make_doc(t[0]), t[1]))
optimizer = nlp.initialize(get_examples=(lambda : train_examples))
for i in range(150):
losses = {}
nlp.update(train_examples, sgd=optimizer, losses=losses, annotates=['sentencizer'])
assert (losses['experimental_arc_labeler'] < 1e-05)
test_text = 'She likes green eggs'
doc = nlp(test_text)
assert (doc[0].head == doc[1])
assert (doc[0].dep_ == 'nsubj')
assert (doc[1].head == doc[1])
assert (doc[1].dep_ == 'ROOT')
assert (doc[2].head == doc[3])
assert (doc[2].dep_ == 'amod')
assert (doc[3].head == doc[1])
assert (doc[3].dep_ == 'dobj')
with util.make_tempdir() as tmp_dir:
nlp.to_disk(tmp_dir)
nlp2 = util.load_model_from_path(tmp_dir)
doc2 = nlp2(test_text)
assert (doc2[0].head == doc2[1])
assert (doc2[0].dep_ == 'nsubj')
assert (doc2[1].head == doc2[1])
assert (doc2[1].dep_ == 'ROOT')
assert (doc2[2].head == doc2[3])
assert (doc2[2].dep_ == 'amod')
assert (doc2[3].head == doc2[1])
assert (doc2[3].dep_ == 'dobj')
nlp_bytes = nlp.to_bytes()
nlp3 = English()
nlp3.add_pipe('sentencizer')
nlp3.add_pipe('experimental_arc_predicter')
nlp3.add_pipe('experimental_arc_labeler')
nlp3.from_bytes(nlp_bytes)
doc3 = nlp3(test_text)
assert (doc3[0].head == doc3[1])
assert (doc3[0].dep_ == 'nsubj')
assert (doc3[1].head == doc3[1])
assert (doc3[1].dep_ == 'ROOT')
assert (doc3[2].head == doc3[3])
assert (doc3[2].dep_ == 'amod')
assert (doc3[3].head == doc3[1])
assert (doc3[3].dep_ == 'dobj') |
def extractLofnsquickmtlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def has_to_build(output_file: Path, input_file: Path):
output_file = PathSeq(output_file)
input_file = PathSeq(input_file)
if (not output_file.exists()):
return True
mod_date_output = modification_date(output_file)
if (mod_date_output < modification_date(input_file)):
return True
return False |
class RMTTestRecordTxt2(object):
def rmttest_pos_01(self):
txt_doc = TxtRecord.from_string('', 'Nothing', TxtIOConfig())
assert (0 == len(txt_doc))
assert ('' == txt_doc.get_comment())
def rmttest_neg_01(self):
mstderr = StringIO()
init_logger(mstderr)
txt_doc = TxtRecord.from_string('rubbish', 'Rubbish', TxtIOConfig())
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtParser;split_entries;===LINENO===; 79:Rubbish:1:Expected tag line not found\n'
assert (result_expected == lstderr)
def rmttest_neg_02(self):
mstderr = StringIO()
init_logger(mstderr)
txt_doc = TxtRecord.from_string(':', 'Rubbish', TxtIOConfig())
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtParser;split_entries;===LINENO===; 79:Rubbish:1:Expected tag line not found\n'
assert (result_expected == lstderr)
def rmttest_neg_03(self):
mstderr = StringIO()
init_logger(mstderr)
txt_doc = TxtRecord.from_string(': something', 'Rubbish', TxtIOConfig())
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtParser;split_entries;===LINENO===; 79:Rubbish:1:Expected tag line not found\n'
assert (result_expected == lstderr)
def rmttest_neg_04(self):
mstderr = StringIO()
init_logger(mstderr)
cfg = Cfg.new_by_json_str('{"max_input_line_length": 7}')
tioconfig = TxtIOConfig(cfg)
txt_doc = TxtRecord.from_string('good: but too long', 'TooLong', tioconfig)
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:1:line too long: is [18], max allowed [7]\n'
assert (result_expected == lstderr)
def rmttest_neg_05(self):
mstderr = StringIO()
init_logger(mstderr)
cfg = Cfg.new_by_json_str('{"max_input_line_length": 7}')
tioconfig = TxtIOConfig(cfg)
txt_doc = TxtRecord.from_string('# com\nok: yes\n no\n# cs\n# dds\ngood: but too long\n# dds\n\n', 'TooLong', tioconfig)
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:6:line too long: is [18], max allowed [7]\n'
assert (result_expected == lstderr)
def rmttest_neg_06(self):
mstderr = StringIO()
init_logger(mstderr)
cfg = Cfg.new_by_json_str('{"max_input_line_length": 7}')
tioconfig = TxtIOConfig(cfg)
txt_doc = TxtRecord.from_string('#1 com\nok: yes\n no\n#4 cs\n#5 dds\ngood: but too long\n#7 dds\n#8 hi\nalso good: but too long\n#10 gsst\n dhd\n#12 dhdh\nd:\n#14\n', 'TooLong', tioconfig)
assert (txt_doc.is_usable() is False)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = '===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:6:line too long: is [18], max allowed [7]\n===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:9:line too long: is [23], max allowed [7]\n===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:10:line too long: is [8], max allowed [7]\n===DATETIMESTAMP===;rmtoo;ERROR;TxtRecord;check_line_length;===LINENO===; 80:TooLong:12:line too long: is [8], max allowed [7]\n===DATETIMESTAMP===;rmtoo;INFO;TxtParser;split_next_record;===LINENO===; 80:TooLong:11:Compatibility info: Comments will be reordered when they are re-written with rmtoo-tools. Please consult rmtoo-req-format(5) or rmtoo-topic-format(5)\n'
assert (result_expected == lstderr)
def rmttest_neg_07(self):
mstderr = StringIO()
init_logger(mstderr)
tioconfig = TxtIOConfig()
txt_doc = TxtRecord.from_string('#1 com\nt1: uuuu\n#3 Comment not allowed here.\n#4 Should emitt a warning\n vvvv\nt2: uuuu\n vvvv\n#8 Here a comment is also not allowed\n wwww\nt3: uuuu\n#11 Same as t1 but with additional\n#12 comment at the end of the requirement\n vvvv\n#14 End comment for t3\nt4: uuuu\n vvvv\n#17 Same as t2 but with additional\n#18 comment at the end of the requirement\n wwww\n#20 End comment for t4\n', 'CommentsEverywhere', tioconfig)
assert (txt_doc.is_usable() is True)
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
result_expected = ((((comment_line % 5) + (comment_line % 9)) + (comment_line % 13)) + (comment_line % 19))
assert (result_expected == lstderr)
def rmttest_neg_08(self):
mstderr = StringIO()
init_logger(mstderr)
tioconfig = TxtIOConfig()
txt_doc = TxtRecord.from_string('#1 com', 'OnlyEntryComment', tioconfig)
assert (txt_doc.is_usable() is True)
assert (txt_doc.get_comment() == '1 com\n')
lstderr = hide_volatile(mstderr.getvalue())
tear_down_log_handler()
assert ('' == lstderr) |
class OptionPlotoptionsTimelineDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(True)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def alternate(self):
return self._config_get(True)
def alternate(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsTimelineDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsTimelineDatalabelsAnimation)
def backgroundColor(self):
return self._config_get('#ffffff')
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get('#999999')
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(3)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(1)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#333333')
def color(self, text: str):
self._config(text, js_type=False)
def connectorColor(self):
return self._config_get(None)
def connectorColor(self, text: str):
self._config(text, js_type=False)
def connectorWidth(self):
return self._config_get(1)
def connectorWidth(self, num: float):
self._config(num, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def distance(self):
return self._config_get('undefined')
def distance(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsTimelineDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsTimelineDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self) -> 'OptionPlotoptionsTimelineDatalabelsStyle':
return self._config_sub_data('style', OptionPlotoptionsTimelineDatalabelsStyle)
def textPath(self) -> 'OptionPlotoptionsTimelineDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsTimelineDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def state_data(db):
award_old = baker.make('search.AwardSearch', award_id=1, type='07')
award_old2 = baker.make('search.AwardSearch', award_id=2, type='08')
baker.make('search.TransactionSearch', transaction_id=1, award=award_old, pop_state_code='TS', pop_country_code='USA', federal_action_obligation=100000, action_date=TODAY.strftime('%Y-%m-%d'), is_fpds=False)
baker.make('search.TransactionSearch', transaction_id=2, award=award_old2, pop_state_code='TS', pop_country_code='USA', federal_action_obligation=100000, action_date=OUTSIDE_OF_LATEST.strftime('%Y-%m-%d'), is_fpds=False)
baker.make('search.TransactionSearch', transaction_id=3, pop_state_code='TD', pop_country_code='USA', federal_action_obligation=1000, action_date=TODAY.strftime('%Y-%m-%d'), is_fpds=False)
baker.make('search.TransactionSearch', transaction_id=4, pop_state_code='TT', pop_country_code='USA', federal_action_obligation=1000, action_date=TODAY.strftime('%Y-%m-%d'), is_fpds=False)
baker.make('recipient.StateData', id='01-{}'.format((CURRENT_FISCAL_YEAR - 2)), fips='01', name='Test State', code='TS', type='state', year=(CURRENT_FISCAL_YEAR - 2), population=50000, pop_source='Census 2010 Pop', median_household_income=50000, mhi_source='Census 2010 MHI')
baker.make('recipient.StateData', id='01-{}'.format(CURRENT_FISCAL_YEAR), fips='01', name='Test State', code='TS', type='state', year=CURRENT_FISCAL_YEAR, population=100000, pop_source='Census 2010 Pop', median_household_income=None, mhi_source='Census 2010 MHI')
baker.make('recipient.StateData', id='02-{}'.format((CURRENT_FISCAL_YEAR - 2)), fips='02', name='Test District', code='TD', type='district', year=(CURRENT_FISCAL_YEAR - 2), population=5000, pop_source='Census 2010 Pop', median_household_income=20000, mhi_source='Census 2010 MHI')
baker.make('recipient.StateData', id='03-{}'.format((CURRENT_FISCAL_YEAR - 2)), fips='03', name='Test Territory', code='TT', type='territory', year=(CURRENT_FISCAL_YEAR - 2), population=5000, pop_source='Census 2010 Pop', median_household_income=10000, mhi_source='Census 2010 MHI') |
class HModeling(Estimator, HasInputCol, HasOutputCols, DefaultParamsReadable, DefaultParamsWritable):
model = Param(Params._dummy(), 'model', 'model')
_only
def __init__(self, inputCol=None, model=None):
super(HModeling, self).__init__()
self._setDefault(model=None)
kwargs = self._input_kwargs
self._set(**kwargs)
_only
def setParams(self, inputCol=None, model=None):
kwargs = self._input_kwargs
return self._set(**kwargs)
def setModel(self, model):
return self._set(model=model)
def getModel(self):
return self.getOrDefault(self.model)
def getParams(self):
return self.getOrDefault(self.params)
def transform(self, df):
pass
def _fit(self, df):
if (type(df) == tuple):
df = df[0]
rfc = eval(self.getModel())
model = rfc.fit(df)
return model |
class OptionSeriesBubbleSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class RFM69(object):
def __init__(self, freqBand, nodeID, networkID, isRFM69HW=False, intPin=18, rstPin=22, spiBus=0, spiDevice=0):
self.freqBand = freqBand
self.address = nodeID
self.networkID = networkID
self.isRFM69HW = isRFM69HW
self.intPin = intPin
self.rstPin = rstPin
self.spiBus = spiBus
self.spiDevice = spiDevice
self.intLock = False
self.mode = ''
self.promiscuousMode = False
self.DATASENT = False
self.DATALEN = 0
self.SENDERID = 0
self.TARGETID = 0
self.PAYLOADLEN = 0
self.ACK_REQUESTED = 0
self.ACK_RECEIVED = 0
self.RSSI = 0
self.DATA = []
self.sendSleepTime = 0.05
GPIO.setmode(GPIO.BOARD)
GPIO.setup(self.intPin, GPIO.IN)
GPIO.setup(self.rstPin, GPIO.OUT)
frfMSB = {RF69_315MHZ: RF_FRFMSB_315, RF69_433MHZ: RF_FRFMSB_433, RF69_868MHZ: RF_FRFMSB_868, RF69_915MHZ: RF_FRFMSB_915}
frfMID = {RF69_315MHZ: RF_FRFMID_315, RF69_433MHZ: RF_FRFMID_433, RF69_868MHZ: RF_FRFMID_868, RF69_915MHZ: RF_FRFMID_915}
frfLSB = {RF69_315MHZ: RF_FRFLSB_315, RF69_433MHZ: RF_FRFLSB_433, RF69_868MHZ: RF_FRFLSB_868, RF69_915MHZ: RF_FRFLSB_915}
self.CONFIG = {1: [REG_OPMODE, ((RF_OPMODE_SEQUENCER_ON | RF_OPMODE_LISTEN_OFF) | RF_OPMODE_STANDBY)], 2: [REG_DATAMODUL, ((RF_DATAMODUL_DATAMODE_PACKET | RF_DATAMODUL_MODULATIONTYPE_FSK) | RF_DATAMODUL_MODULATIONSHAPING_00)], 3: [REG_BITRATEMSB, RF_BITRATEMSB_4800], 4: [REG_BITRATELSB, RF_BITRATELSB_4800], 5: [REG_FDEVMSB, RF_FDEVMSB_5000], 6: [REG_FDEVLSB, RF_FDEVLSB_5000], 7: [REG_FRFMSB, frfMSB[freqBand]], 8: [REG_FRFMID, frfMID[freqBand]], 9: [REG_FRFLSB, frfLSB[freqBand]], 25: [REG_RXBW, ((RF_RXBW_DCCFREQ_010 | RF_RXBW_MANT_16) | RF_RXBW_EXP_2)], 37: [REG_DIOMAPPING1, RF_DIOMAPPING1_DIO0_01], 41: [REG_RSSITHRESH, 220], 46: [REG_SYNCCONFIG, (((RF_SYNC_ON | RF_SYNC_FIFOFILL_AUTO) | RF_SYNC_SIZE_2) | RF_SYNC_TOL_0)], 47: [REG_SYNCVALUE1, 45], 48: [REG_SYNCVALUE2, networkID], 55: [REG_PACKETCONFIG1, ((((RF_PACKET1_FORMAT_VARIABLE | RF_PACKET1_DCFREE_OFF) | RF_PACKET1_CRC_ON) | RF_PACKET1_CRCAUTOCLEAR_ON) | RF_PACKET1_ADRSFILTERING_OFF)], 56: [REG_PAYLOADLENGTH, 66], 60: [REG_FIFOTHRESH, (RF_FIFOTHRESH_TXSTART_FIFONOTEMPTY | RF_FIFOTHRESH_VALUE)], 61: [REG_PACKETCONFIG2, ((RF_PACKET2_RXRESTARTDELAY_2BITS | RF_PACKET2_AUTORXRESTART_ON) | RF_PACKET2_AES_OFF)], 111: [REG_TESTDAGC, RF_DAGC_IMPROVED_LOWBETA0], 0: [255, 0]}
self.spi = spidev.SpiDev()
self.spi.open(self.spiBus, self.spiDevice)
self.spi.max_speed_hz = 4000000
GPIO.output(self.rstPin, GPIO.HIGH)
time.sleep(0.1)
GPIO.output(self.rstPin, GPIO.LOW)
time.sleep(0.1)
while (self.readReg(REG_SYNCVALUE1) != 170):
self.writeReg(REG_SYNCVALUE1, 170)
while (self.readReg(REG_SYNCVALUE1) != 85):
self.writeReg(REG_SYNCVALUE1, 85)
for value in self.CONFIG.values():
self.writeReg(value[0], value[1])
self.encrypt(0)
self.setHighPower(self.isRFM69HW)
while ((self.readReg(REG_IRQFLAGS1) & RF_IRQFLAGS1_MODEREADY) == 0):
pass
GPIO.remove_event_detect(self.intPin)
GPIO.add_event_detect(self.intPin, GPIO.RISING, callback=self.interruptHandler)
def setFrequency(self, freqHz):
step = 61.
freq = int(round((freqHz / step)))
self.writeReg(REG_FRFMSB, (freq >> 16))
self.writeReg(REG_FRFMID, (freq >> 8))
self.writeReg(REG_FRFLSB, freq)
def getFrequency(self):
step = 61.
freq = (((self.readReg(REG_FRFMSB) << 16) + (self.readReg(REG_FRFMID) << 8)) + self.readReg(REG_FRFLSB))
return int(round((freq * step)))
def setMode(self, newMode):
if (newMode == self.mode):
return
if (newMode == RF69_MODE_TX):
self.writeReg(REG_OPMODE, ((self.readReg(REG_OPMODE) & 227) | RF_OPMODE_TRANSMITTER))
if self.isRFM69HW:
self.setHighPowerRegs(True)
elif (newMode == RF69_MODE_RX):
self.writeReg(REG_OPMODE, ((self.readReg(REG_OPMODE) & 227) | RF_OPMODE_RECEIVER))
if self.isRFM69HW:
self.setHighPowerRegs(False)
elif (newMode == RF69_MODE_SYNTH):
self.writeReg(REG_OPMODE, ((self.readReg(REG_OPMODE) & 227) | RF_OPMODE_SYNTHESIZER))
elif (newMode == RF69_MODE_STANDBY):
self.writeReg(REG_OPMODE, ((self.readReg(REG_OPMODE) & 227) | RF_OPMODE_STANDBY))
elif (newMode == RF69_MODE_SLEEP):
self.writeReg(REG_OPMODE, ((self.readReg(REG_OPMODE) & 227) | RF_OPMODE_SLEEP))
else:
return
while ((self.mode == RF69_MODE_SLEEP) and ((self.readReg(REG_IRQFLAGS1) & RF_IRQFLAGS1_MODEREADY) == 0)):
pass
self.mode = newMode
def sleep(self):
self.setMode(RF69_MODE_SLEEP)
def setAddress(self, addr):
self.address = addr
self.writeReg(REG_NODEADRS, self.address)
def setNetwork(self, networkID):
self.networkID = networkID
self.writeReg(REG_SYNCVALUE2, networkID)
def setPowerLevel(self, powerLevel):
if (powerLevel > 31):
powerLevel = 31
self.powerLevel = powerLevel
self.writeReg(REG_PALEVEL, ((self.readReg(REG_PALEVEL) & 224) | self.powerLevel))
def canSend(self):
if (self.mode == RF69_MODE_STANDBY):
self.receiveBegin()
return True
elif ((self.mode == RF69_MODE_RX) and (self.PAYLOADLEN == 0) and (self.readRSSI() < CSMA_LIMIT)):
self.setMode(RF69_MODE_STANDBY)
return True
return False
def send(self, toAddress, buff='', requestACK=False):
self.writeReg(REG_PACKETCONFIG2, ((self.readReg(REG_PACKETCONFIG2) & 251) | RF_PACKET2_RXRESTART))
now = time.time()
while ((not self.canSend()) and ((time.time() - now) < RF69_CSMA_LIMIT_S)):
self.receiveDone()
self.sendFrame(toAddress, buff, requestACK, False)
def sendWithRetry(self, toAddress, buff='', retries=3, retryWaitTime=10):
for i in range(0, retries):
self.send(toAddress, buff, True)
sentTime = time.time()
while (((time.time() - sentTime) * 1000) < retryWaitTime):
if self.ACKReceived(toAddress):
return True
time.sleep(0.01)
return False
def ACKReceived(self, fromNodeID):
if self.receiveDone():
return (((self.SENDERID == fromNodeID) or (fromNodeID == RF69_BROADCAST_ADDR)) and self.ACK_RECEIVED)
return False
def ACKRequested(self):
return (self.ACK_REQUESTED and (self.TARGETID != RF69_BROADCAST_ADDR))
def sendACK(self, toAddress=0, buff=''):
toAddress = (toAddress if (toAddress > 0) else self.SENDERID)
while (not self.canSend()):
self.receiveDone()
self.sendFrame(toAddress, buff, False, True)
def sendFrame(self, toAddress, buff, requestACK, sendACK):
self.setMode(RF69_MODE_STANDBY)
while ((self.readReg(REG_IRQFLAGS1) & RF_IRQFLAGS1_MODEREADY) == 0):
pass
if (len(buff) > RF69_MAX_DATA_LEN):
buff = buff[0:RF69_MAX_DATA_LEN]
ack = 0
if sendACK:
ack = 128
elif requestACK:
ack = 64
if isinstance(buff, str):
self.spi.xfer2(([(REG_FIFO | 128), (len(buff) + 3), toAddress, self.address, ack] + [int(ord(i)) for i in list(buff)]))
else:
self.spi.xfer2(([(REG_FIFO | 128), (len(buff) + 3), toAddress, self.address, ack] + buff))
self.DATASENT = False
self.setMode(RF69_MODE_TX)
while ((self.readReg(REG_IRQFLAGS2) & RF_IRQFLAGS2_PACKETSENT) == 0):
pass
self.setMode(RF69_MODE_RX)
def interruptHandler(self, pin):
self.intLock = True
self.DATASENT = True
if ((self.mode == RF69_MODE_RX) and (self.readReg(REG_IRQFLAGS2) & RF_IRQFLAGS2_PAYLOADREADY)):
self.setMode(RF69_MODE_STANDBY)
(self.PAYLOADLEN, self.TARGETID, self.SENDERID, CTLbyte) = self.spi.xfer2([(REG_FIFO & 127), 0, 0, 0, 0])[1:]
if (self.PAYLOADLEN > 66):
self.PAYLOADLEN = 66
if (not (self.promiscuousMode or (self.TARGETID == self.address) or (self.TARGETID == RF69_BROADCAST_ADDR))):
self.PAYLOADLEN = 0
self.intLock = False
return
self.DATALEN = (self.PAYLOADLEN - 3)
self.ACK_RECEIVED = (CTLbyte & 128)
self.ACK_REQUESTED = (CTLbyte & 64)
self.DATA = self.spi.xfer2(([(REG_FIFO & 127)] + [0 for i in range(0, self.DATALEN)]))[1:]
self.RSSI = self.readRSSI()
self.intLock = False
def receiveBegin(self):
while self.intLock:
time.sleep(0.1)
self.DATALEN = 0
self.SENDERID = 0
self.TARGETID = 0
self.PAYLOADLEN = 0
self.ACK_REQUESTED = 0
self.ACK_RECEIVED = 0
self.RSSI = 0
if (self.readReg(REG_IRQFLAGS2) & RF_IRQFLAGS2_PAYLOADREADY):
self.writeReg(REG_PACKETCONFIG2, ((self.readReg(REG_PACKETCONFIG2) & 251) | RF_PACKET2_RXRESTART))
self.writeReg(REG_DIOMAPPING1, RF_DIOMAPPING1_DIO0_01)
self.setMode(RF69_MODE_RX)
def receiveDone(self):
if (((self.mode == RF69_MODE_RX) or (self.mode == RF69_MODE_STANDBY)) and (self.PAYLOADLEN > 0)):
self.setMode(RF69_MODE_STANDBY)
return True
if (self.readReg(REG_IRQFLAGS1) & RF_IRQFLAGS1_TIMEOUT):
self.writeReg(REG_PACKETCONFIG2, ((self.readReg(REG_PACKETCONFIG2) & 251) | RF_PACKET2_RXRESTART))
elif (self.mode == RF69_MODE_RX):
return False
self.receiveBegin()
return False
def readRSSI(self, forceTrigger=False):
rssi = 0
if forceTrigger:
self.writeReg(REG_RSSICONFIG, RF_RSSI_START)
while ((self.readReg(REG_RSSICONFIG) & RF_RSSI_DONE) == 0):
pass
rssi = (self.readReg(REG_RSSIVALUE) * (- 1))
rssi = (rssi >> 1)
return rssi
def encrypt(self, key):
self.setMode(RF69_MODE_STANDBY)
if ((key != 0) and (len(key) == 16)):
self.spi.xfer(([(REG_AESKEY1 | 128)] + [int(ord(i)) for i in list(key)]))
self.writeReg(REG_PACKETCONFIG2, ((self.readReg(REG_PACKETCONFIG2) & 254) | RF_PACKET2_AES_ON))
else:
self.writeReg(REG_PACKETCONFIG2, ((self.readReg(REG_PACKETCONFIG2) & 254) | RF_PACKET2_AES_OFF))
def readReg(self, addr):
return self.spi.xfer([(addr & 127), 0])[1]
def writeReg(self, addr, value):
self.spi.xfer([(addr | 128), value])
def promiscuous(self, onOff):
self.promiscuousMode = onOff
def setHighPower(self, onOff):
if onOff:
self.writeReg(REG_OCP, RF_OCP_OFF)
self.writeReg(REG_PALEVEL, (((self.readReg(REG_PALEVEL) & 31) | RF_PALEVEL_PA1_ON) | RF_PALEVEL_PA2_ON))
else:
self.writeReg(REG_OCP, RF_OCP_ON)
self.writeReg(REG_PALEVEL, (((RF_PALEVEL_PA0_ON | RF_PALEVEL_PA1_OFF) | RF_PALEVEL_PA2_OFF) | powerLevel))
def setHighPowerRegs(self, onOff):
if onOff:
self.writeReg(REG_TESTPA1, 93)
self.writeReg(REG_TESTPA2, 124)
else:
self.writeReg(REG_TESTPA1, 85)
self.writeReg(REG_TESTPA2, 112)
def readAllRegs(self):
results = []
for address in range(1, 80):
results.append([str(hex(address)), str(bin(self.readReg(address)))])
return results
def readTemperature(self, calFactor):
self.setMode(RF69_MODE_STANDBY)
self.writeReg(REG_TEMP1, RF_TEMP1_MEAS_START)
while (self.readReg(REG_TEMP1) & RF_TEMP1_MEAS_RUNNING):
pass
return (((int((~ self.readReg(REG_TEMP2))) * (- 1)) + COURSE_TEMP_COEF) + calFactor)
def rcCalibration(self):
self.writeReg(REG_OSC1, RF_OSC1_RCCAL_START)
while ((self.readReg(REG_OSC1) & RF_OSC1_RCCAL_DONE) == 0):
pass
def shutdown(self):
self.setHighPower(False)
self.sleep()
GPIO.cleanup() |
class Align(GenericAlign):
def __init__(self, input):
super(Align, self).__init__(input)
def run_alignment(self, clean=True):
(fd, aln) = tempfile.mkstemp(suffix='.muscle')
os.close(fd)
cmd = [get_user_path('binaries', 'muscle'), '-in', self.input, '-out', aln]
proc = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
self.alignment = AlignIO.read(open(aln, 'rU'), 'fasta')
for seq in self.alignment:
seq.annotations = {'molecule_type': 'DNA'}
if clean:
self._clean(aln) |
class ReqOwner(ReqTagGeneric):
def __init__(self, config):
ReqTagGeneric.__init__(self, config, 'Owner', set([InputModuleTypes.ctstag, InputModuleTypes.reqtag, InputModuleTypes.testcase]))
def rewrite(self, rid, req):
self.check_mandatory_tag(rid, req, 10)
tcontent = req[self.get_tag()].get_content()
stakeholders = self.get_config().get_value('requirements.stakeholders')
if ((tcontent != 'flonatel') and (tcontent not in stakeholders)):
raise RMTException(11, ("%s: invalid owner '%s'. Must be one of the stakeholder '%s'" % (rid, tcontent, stakeholders)))
del req[self.get_tag()]
return (self.get_tag(), tcontent) |
def test_equivalent_sources_points_depth(points, coordinates_small, data_small):
(easting, northing, upward) = coordinates_small[:]
eqs = EquivalentSources(depth=1300.0, depth_type='constant')
eqs.fit(coordinates_small, data_small)
expected_points = vdb.n_1d_arrays((easting, northing, ((- 1300.0) * np.ones_like(easting))), n=3)
npt.assert_allclose(expected_points, eqs.points_)
eqs = EquivalentSources(depth=1300.0, depth_type='relative')
eqs.fit(coordinates_small, data_small)
expected_points = vdb.n_1d_arrays((easting, northing, (upward - 1300.0)), n=3)
npt.assert_allclose(expected_points, eqs.points_)
eqs = EquivalentSources(depth=300, depth_type='constant')
eqs.depth_type = 'blabla'
points = eqs._build_points(vd.grid_coordinates(region=((- 1), 1, (- 1), 1), spacing=0.25, extra_coords=1))
assert (points is None) |
class TestRandomRecDataModule(testslide.TestCase):
def test_manual_seed_generator(self) -> None:
dm1 = RandomRecDataModule(manual_seed=353434, min_ids_per_features=2)
iterator1 = iter(dm1.init_loader)
dm2 = RandomRecDataModule(manual_seed=353434, min_ids_per_features=2)
iterator2 = iter(dm2.init_loader)
for _ in range(10):
batch1 = next(iterator1)
batch2 = next(iterator2)
self.assertTrue(torch.equal(batch1.dense_features, batch2.dense_features))
self.assertTrue(torch.equal(batch1.sparse_features.values(), batch2.sparse_features.values()))
self.assertTrue(torch.equal(batch1.sparse_features.offsets(), batch2.sparse_features.offsets()))
self.assertTrue(torch.equal(batch1.labels, batch2.labels))
def test_no_manual_seed_generator(self) -> None:
dm1 = RandomRecDataModule(min_ids_per_features=2)
iterator1 = iter(dm1.init_loader)
dm2 = RandomRecDataModule(min_ids_per_features=2)
iterator2 = iter(dm2.init_loader)
for _ in range(10):
batch1 = next(iterator1)
batch2 = next(iterator2)
self.assertFalse(torch.equal(batch1.dense_features, batch2.dense_features))
self.assertFalse(torch.equal(batch1.sparse_features.values(), batch2.sparse_features.values()))
self.assertTrue(torch.equal(batch1.sparse_features.offsets(), batch2.sparse_features.offsets())) |
_converter(torch.ops.aten.split_with_sizes.default)
_converter(torch.ops.aten.split.Tensor)
def aten_ops_split(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = args[0]
if (not isinstance(input_val, AITTensor)):
raise ValueError(f'Non-tensor inputs for {name}: {input_val}')
split_size_or_sections = args[1]
dim = (args[2] if (len(args) > 2) else 0)
if (not isinstance(dim, int)):
raise ValueError(f'Unexpected value for dim in {name}: {dim}')
return split()(input_val, split_size_or_sections, dim) |
class Config():
default = {'savepath': 'download', 'runafterdownload': '', 'libraryautocheck': 'true', 'autocheck_interval': '24', 'autosave': '5', 'errorlog': 'false', 'lastcheckupdate': '0', 'selectall': 'true', 'mission_conflict_action': 'update'}
def __init__(self, path):
self.path = expanduser(path)
self.config = CaseSensitiveConfigParser(interpolation=None)
self.load()
def load(self):
self.config.read(self.path, 'utf-8-sig')
if ('DEFAULT' not in self.config):
self.config['DEFAULT'] = {}
if ('ComicCrawler' in self.config):
self.config['DEFAULT'].update(self.config['ComicCrawler'])
del self.config['ComicCrawler']
self.default.update(self.config['DEFAULT'])
self.config['DEFAULT'].update(self.default)
self.config['DEFAULT']['savepath'] = normpath(self.config['DEFAULT']['savepath'])
def save(self):
if (not isdir(dirname(self.path))):
makedirs(dirname(self.path))
with open(self.path, 'w', encoding='utf-8') as f:
self.config.write(f) |
class LocalWorkerManager(WorkerManager):
def __init__(self, register_func: RegisterFunc=None, deregister_func: DeregisterFunc=None, send_heartbeat_func: SendHeartbeatFunc=None, model_registry: ModelRegistry=None, host: str=None, port: int=None) -> None:
self.workers: Dict[(str, List[WorkerRunData])] = dict()
self.executor = ThreadPoolExecutor(max_workers=(os.cpu_count() * 5))
self.register_func = register_func
self.deregister_func = deregister_func
self.send_heartbeat_func = send_heartbeat_func
self.model_registry = model_registry
self.host = host
self.port = port
self.start_listeners = []
self.run_data = WorkerRunData(host=self.host, port=self.port, worker_key=self._worker_key(WORKER_MANAGER_SERVICE_TYPE, WORKER_MANAGER_SERVICE_NAME), worker=None, worker_params=None, model_params=None, stop_event=asyncio.Event(), semaphore=None, command_args=None)
def _worker_key(self, worker_type: str, model_name: str) -> str:
return WorkerType.to_worker_key(model_name, worker_type)
async def run_blocking_func(self, func, *args):
if asyncio.iscoroutinefunction(func):
raise ValueError(f'The function {func} is not blocking function')
loop = asyncio.get_event_loop()
return (await loop.run_in_executor(self.executor, func, *args))
async def start(self):
if (len(self.workers) > 0):
out = (await self._start_all_worker(apply_req=None))
if (not out.success):
raise Exception(out.message)
if self.register_func:
(await self.register_func(self.run_data))
if self.send_heartbeat_func:
asyncio.create_task(_async_heartbeat_sender(self.run_data, 20, self.send_heartbeat_func))
for listener in self.start_listeners:
if asyncio.iscoroutinefunction(listener):
(await listener(self))
else:
listener(self)
async def stop(self, ignore_exception: bool=False):
if (not self.run_data.stop_event.is_set()):
logger.info('Stop all workers')
self.run_data.stop_event.clear()
stop_tasks = []
stop_tasks.append(self._stop_all_worker(apply_req=None, ignore_exception=ignore_exception))
if self.deregister_func:
if ignore_exception:
async def safe_deregister_func(run_data):
try:
(await self.deregister_func(run_data))
except Exception as e:
logger.warning(f'Stop worker, ignored exception from deregister_func: {e}')
stop_tasks.append(safe_deregister_func(self.run_data))
else:
stop_tasks.append(self.deregister_func(self.run_data))
results = (await asyncio.gather(*stop_tasks))
if ((not results[0].success) and (not ignore_exception)):
raise Exception(results[0].message)
def after_start(self, listener: Callable[(['WorkerManager'], None)]):
self.start_listeners.append(listener)
def add_worker(self, worker: ModelWorker, worker_params: ModelWorkerParameters, command_args: List[str]=None) -> bool:
if (not command_args):
command_args = sys.argv[1:]
worker.load_worker(**asdict(worker_params))
if (not worker_params.worker_type):
worker_params.worker_type = worker.worker_type()
if isinstance(worker_params.worker_type, WorkerType):
worker_params.worker_type = worker_params.worker_type.value
worker_key = self._worker_key(worker_params.worker_type, worker_params.model_name)
model_params = worker.parse_parameters(command_args=command_args)
worker_run_data = WorkerRunData(host=self.host, port=self.port, worker_key=worker_key, worker=worker, worker_params=worker_params, model_params=model_params, stop_event=asyncio.Event(), semaphore=asyncio.Semaphore(worker_params.limit_model_concurrency), command_args=command_args)
instances = self.workers.get(worker_key)
if (not instances):
instances = [worker_run_data]
self.workers[worker_key] = instances
logger.info(f'Init empty instances list for {worker_key}')
return True
else:
logger.warning(f'Instance {worker_key} exist')
return False
def _remove_worker(self, worker_params: ModelWorkerParameters) -> None:
worker_key = self._worker_key(worker_params.worker_type, worker_params.model_name)
instances = self.workers.get(worker_key)
if instances:
del self.workers[worker_key]
async def model_startup(self, startup_req: WorkerStartupRequest):
model_name = startup_req.model
worker_type = startup_req.worker_type
params = startup_req.params
logger.debug(f'start model, model name {model_name}, worker type {worker_type}, params: {params}')
worker_params: ModelWorkerParameters = ModelWorkerParameters.from_dict(params, ignore_extra_fields=True)
if (not worker_params.model_name):
worker_params.model_name = model_name
assert (model_name == worker_params.model_name)
worker = _build_worker(worker_params)
command_args = _dict_to_command_args(params)
success = (await self.run_blocking_func(self.add_worker, worker, worker_params, command_args))
if (not success):
msg = f'Add worker {model_name}{worker_type}, worker instances is exist'
logger.warning(f'{msg}, worker_params: {worker_params}')
self._remove_worker(worker_params)
raise Exception(msg)
supported_types = WorkerType.values()
if (worker_type not in supported_types):
self._remove_worker(worker_params)
raise ValueError(f'Unsupported worker type: {worker_type}, now supported worker type: {supported_types}')
start_apply_req = WorkerApplyRequest(model=model_name, apply_type=WorkerApplyType.START, worker_type=worker_type)
out: WorkerApplyOutput = None
try:
out = (await self.worker_apply(start_apply_req))
except Exception as e:
self._remove_worker(worker_params)
raise e
if (not out.success):
self._remove_worker(worker_params)
raise Exception(out.message)
async def model_shutdown(self, shutdown_req: WorkerStartupRequest):
logger.info(f'Begin shutdown model, shutdown_req: {shutdown_req}')
apply_req = WorkerApplyRequest(model=shutdown_req.model, apply_type=WorkerApplyType.STOP, worker_type=shutdown_req.worker_type)
out = (await self._stop_all_worker(apply_req))
if (not out.success):
raise Exception(out.message)
async def supported_models(self) -> List[WorkerSupportedModel]:
models = (await self.run_blocking_func(list_supported_models))
return [WorkerSupportedModel(host=self.host, port=self.port, models=models)]
async def get_model_instances(self, worker_type: str, model_name: str, healthy_only: bool=True) -> List[WorkerRunData]:
return self.sync_get_model_instances(worker_type, model_name, healthy_only)
async def get_all_model_instances(self, worker_type: str, healthy_only: bool=True) -> List[WorkerRunData]:
instances = list(itertools.chain(*self.workers.values()))
result = []
for instance in instances:
(name, wt) = WorkerType.parse_worker_key(instance.worker_key)
if ((wt != worker_type) or (healthy_only and instance.stopped)):
continue
result.append(instance)
return result
def sync_get_model_instances(self, worker_type: str, model_name: str, healthy_only: bool=True) -> List[WorkerRunData]:
worker_key = self._worker_key(worker_type, model_name)
return self.workers.get(worker_key, [])
def _simple_select(self, worker_type: str, model_name: str, worker_instances: List[WorkerRunData]) -> WorkerRunData:
if (not worker_instances):
raise Exception(f'Cound not found worker instances for model name {model_name} and worker type {worker_type}')
worker_run_data = random.choice(worker_instances)
return worker_run_data
async def select_one_instance(self, worker_type: str, model_name: str, healthy_only: bool=True) -> WorkerRunData:
worker_instances = (await self.get_model_instances(worker_type, model_name, healthy_only))
return self._simple_select(worker_type, model_name, worker_instances)
def sync_select_one_instance(self, worker_type: str, model_name: str, healthy_only: bool=True) -> WorkerRunData:
worker_instances = self.sync_get_model_instances(worker_type, model_name, healthy_only)
return self._simple_select(worker_type, model_name, worker_instances)
async def _get_model(self, params: Dict, worker_type: str='llm') -> WorkerRunData:
model = params.get('model')
if (not model):
raise Exception('Model name count not be empty')
return (await self.select_one_instance(worker_type, model, healthy_only=True))
def _sync_get_model(self, params: Dict, worker_type: str='llm') -> WorkerRunData:
model = params.get('model')
if (not model):
raise Exception('Model name count not be empty')
return self.sync_select_one_instance(worker_type, model, healthy_only=True)
async def generate_stream(self, params: Dict, async_wrapper=None, **kwargs) -> Iterator[ModelOutput]:
with root_tracer.start_span('WorkerManager.generate_stream', params.get('span_id')) as span:
params['span_id'] = span.span_id
try:
worker_run_data = (await self._get_model(params))
except Exception as e:
(yield ModelOutput(text=f'**LLMServer Generate Error, Please CheckErrorInfo.**: {e}', error_code=1))
return
async with worker_run_data.semaphore:
if worker_run_data.worker.support_async():
async for outout in worker_run_data.worker.async_generate_stream(params):
(yield outout)
else:
if (not async_wrapper):
from starlette.concurrency import iterate_in_threadpool
async_wrapper = iterate_in_threadpool
async for output in async_wrapper(worker_run_data.worker.generate_stream(params)):
(yield output)
async def generate(self, params: Dict) -> ModelOutput:
with root_tracer.start_span('WorkerManager.generate', params.get('span_id')) as span:
params['span_id'] = span.span_id
try:
worker_run_data = (await self._get_model(params))
except Exception as e:
return ModelOutput(text=f'**LLMServer Generate Error, Please CheckErrorInfo.**: {e}', error_code=1)
async with worker_run_data.semaphore:
if worker_run_data.worker.support_async():
return (await worker_run_data.worker.async_generate(params))
else:
return (await self.run_blocking_func(worker_run_data.worker.generate, params))
async def embeddings(self, params: Dict) -> List[List[float]]:
with root_tracer.start_span('WorkerManager.embeddings', params.get('span_id')) as span:
params['span_id'] = span.span_id
try:
worker_run_data = (await self._get_model(params, worker_type='text2vec'))
except Exception as e:
raise e
async with worker_run_data.semaphore:
if worker_run_data.worker.support_async():
return (await worker_run_data.worker.async_embeddings(params))
else:
return (await self.run_blocking_func(worker_run_data.worker.embeddings, params))
def sync_embeddings(self, params: Dict) -> List[List[float]]:
worker_run_data = self._sync_get_model(params, worker_type='text2vec')
return worker_run_data.worker.embeddings(params)
async def count_token(self, params: Dict) -> int:
with root_tracer.start_span('WorkerManager.count_token', params.get('span_id')) as span:
params['span_id'] = span.span_id
try:
worker_run_data = (await self._get_model(params))
except Exception as e:
raise e
prompt = params.get('prompt')
async with worker_run_data.semaphore:
if worker_run_data.worker.support_async():
return (await worker_run_data.worker.async_count_token(prompt))
else:
return (await self.run_blocking_func(worker_run_data.worker.count_token, prompt))
async def get_model_metadata(self, params: Dict) -> ModelMetadata:
with root_tracer.start_span('WorkerManager.get_model_metadata', params.get('span_id')) as span:
params['span_id'] = span.span_id
try:
worker_run_data = (await self._get_model(params))
except Exception as e:
raise e
async with worker_run_data.semaphore:
if worker_run_data.worker.support_async():
return (await worker_run_data.worker.async_get_model_metadata(params))
else:
return (await self.run_blocking_func(worker_run_data.worker.get_model_metadata, params))
async def worker_apply(self, apply_req: WorkerApplyRequest) -> WorkerApplyOutput:
apply_func: Callable[([WorkerApplyRequest], Awaitable[str])] = None
if (apply_req.apply_type == WorkerApplyType.START):
apply_func = self._start_all_worker
elif (apply_req.apply_type == WorkerApplyType.STOP):
apply_func = self._stop_all_worker
elif (apply_req.apply_type == WorkerApplyType.RESTART):
apply_func = self._restart_all_worker
elif (apply_req.apply_type == WorkerApplyType.UPDATE_PARAMS):
apply_func = self._update_all_worker_params
else:
raise ValueError(f'Unsupported apply type {apply_req.apply_type}')
return (await apply_func(apply_req))
async def parameter_descriptions(self, worker_type: str, model_name: str) -> List[ParameterDescription]:
worker_instances = (await self.get_model_instances(worker_type, model_name))
if (not worker_instances):
raise Exception(f'Not worker instances for model name {model_name} worker type {worker_type}')
worker_run_data = worker_instances[0]
return worker_run_data.worker.parameter_descriptions()
async def _apply_worker(self, apply_req: WorkerApplyRequest, apply_func: ApplyFunction) -> None:
logger.info(f'Apply req: {apply_req}, apply_func: {apply_func}')
if apply_req:
worker_type = apply_req.worker_type.value
model_name = apply_req.model
worker_instances = (await self.get_model_instances(worker_type, model_name, healthy_only=False))
if (not worker_instances):
raise Exception(f'No worker instance found for the model {model_name} worker type {worker_type}')
else:
worker_instances = list(itertools.chain(*self.workers.values()))
logger.info(f'Apply to all workers')
return (await asyncio.gather(*(apply_func(worker) for worker in worker_instances)))
async def _start_all_worker(self, apply_req: WorkerApplyRequest) -> WorkerApplyOutput:
start_time = time.time()
logger.info(f'Begin start all worker, apply_req: {apply_req}')
async def _start_worker(worker_run_data: WorkerRunData):
_start_time = time.time()
info = worker_run_data._to_print_key()
out = WorkerApplyOutput('')
try:
(await self.run_blocking_func(worker_run_data.worker.start, worker_run_data.model_params, worker_run_data.command_args))
worker_run_data.stop_event.clear()
if (worker_run_data.worker_params.register and self.register_func):
(await self.register_func(worker_run_data))
if (worker_run_data.worker_params.send_heartbeat and self.send_heartbeat_func):
asyncio.create_task(_async_heartbeat_sender(worker_run_data, worker_run_data.worker_params.heartbeat_interval, self.send_heartbeat_func))
out.message = f'{info} start successfully'
except Exception as e:
out.success = False
out.message = f'{info} start failed, {str(e)}'
finally:
out.timecost = (time.time() - _start_time)
return out
outs = (await self._apply_worker(apply_req, _start_worker))
out = WorkerApplyOutput.reduce(outs)
out.timecost = (time.time() - start_time)
return out
async def _stop_all_worker(self, apply_req: WorkerApplyRequest, ignore_exception: bool=False) -> WorkerApplyOutput:
start_time = time.time()
async def _stop_worker(worker_run_data: WorkerRunData):
_start_time = time.time()
info = worker_run_data._to_print_key()
out = WorkerApplyOutput('')
try:
(await self.run_blocking_func(worker_run_data.worker.stop))
worker_run_data.stop_event.set()
if worker_run_data._heartbeat_future:
worker_run_data._heartbeat_future.result()
worker_run_data._heartbeat_future = None
if (worker_run_data.worker_params.register and self.register_func and self.deregister_func):
_deregister_func = self.deregister_func
if ignore_exception:
async def safe_deregister_func(run_data):
try:
(await self.deregister_func(run_data))
except Exception as e:
logger.warning(f'Stop worker, ignored exception from deregister_func: {e}')
_deregister_func = safe_deregister_func
(await _deregister_func(worker_run_data))
self._remove_worker(worker_run_data.worker_params)
out.message = f'{info} stop successfully'
except Exception as e:
out.success = False
out.message = f'{info} stop failed, {str(e)}'
finally:
out.timecost = (time.time() - _start_time)
return out
outs = (await self._apply_worker(apply_req, _stop_worker))
out = WorkerApplyOutput.reduce(outs)
out.timecost = (time.time() - start_time)
return out
async def _restart_all_worker(self, apply_req: WorkerApplyRequest) -> WorkerApplyOutput:
out = (await self._stop_all_worker(apply_req, ignore_exception=True))
if (not out.success):
return out
return (await self._start_all_worker(apply_req))
async def _update_all_worker_params(self, apply_req: WorkerApplyRequest) -> WorkerApplyOutput:
start_time = time.time()
need_restart = False
async def update_params(worker_run_data: WorkerRunData):
nonlocal need_restart
new_params = apply_req.params
if (not new_params):
return
if worker_run_data.model_params.update_from(new_params):
need_restart = True
(await self._apply_worker(apply_req, update_params))
message = f'Update worker params successfully'
timecost = (time.time() - start_time)
if need_restart:
logger.info('Model params update successfully, begin restart worker')
(await self._restart_all_worker(apply_req))
timecost = (time.time() - start_time)
message = f'Update worker params and restart successfully'
return WorkerApplyOutput(message=message, timecost=timecost) |
def aggregate_spans(spans):
breakdown = {}
tasks = {}
nodes = {}
workflows = {}
for span in spans:
id_type = span.WhichOneof('id')
if (id_type == 'operation_id'):
operation_id = span.operation_id
start_time = datetime.fromtimestamp((span.start_time.seconds + (span.start_time.nanos / .0)))
end_time = datetime.fromtimestamp((span.end_time.seconds + (span.end_time.nanos / .0)))
total_time = (end_time - start_time).total_seconds()
if (operation_id in breakdown):
breakdown[operation_id] += total_time
else:
breakdown[operation_id] = total_time
else:
(id, underlying_span) = aggregate_reference_span(span)
if (id_type == 'workflow_id'):
workflows[id] = underlying_span
elif (id_type == 'node_id'):
nodes[id] = underlying_span
elif (id_type == 'task_id'):
tasks[id] = underlying_span
for (operation_id, total_time) in underlying_span['breakdown'].items():
if (operation_id in breakdown):
breakdown[operation_id] += total_time
else:
breakdown[operation_id] = total_time
span = {'breakdown': breakdown}
if (len(tasks) > 0):
span['task_attempts'] = tasks
if (len(nodes) > 0):
span['nodes'] = nodes
if (len(workflows) > 0):
span['workflows'] = workflows
return span |
class Controller(lg.Node):
KEYS_TOPIC = lg.Topic(KeysMessage)
DISPLAY_TOPIC = lg.Topic(DisplayMessage)
def setup(self) -> None:
self._keys = None
(KEYS_TOPIC)
def set_keys(self, message: KeysMessage) -> None:
self._keys = message.keys
(DISPLAY_TOPIC)
async def control(self) -> lg.AsyncPublisher:
while (self._keys is None):
(await asyncio.sleep(0.1))
for i in range(10):
key = self._keys[(i % len(self._keys))]
(yield (self.DISPLAY_TOPIC, DisplayMessage(key)))
(await asyncio.sleep(1.0))
raise lg.NormalTermination() |
def test_deepcopy_attributes():
provider = providers.Factory(Example)
dependent_provider1 = providers.Factory(list)
dependent_provider2 = providers.Factory(dict)
provider.add_attributes(a1=dependent_provider1, a2=dependent_provider2)
provider_copy = providers.deepcopy(provider)
dependent_provider_copy1 = provider_copy.attributes['a1']
dependent_provider_copy2 = provider_copy.attributes['a2']
assert (provider.attributes != provider_copy.attributes)
assert (dependent_provider1.cls is dependent_provider_copy1.cls)
assert (dependent_provider1 is not dependent_provider_copy1)
assert (dependent_provider2.cls is dependent_provider_copy2.cls)
assert (dependent_provider2 is not dependent_provider_copy2) |
class LLMCacheClient(CacheClient[(LLMCacheKeyData, LLMCacheValueData)]):
def __init__(self, cache_manager: CacheManager) -> None:
super().__init__()
self._cache_manager: CacheManager = cache_manager
async def get(self, key: LLMCacheKey, cache_config: Optional[CacheConfig]=None) -> Optional[LLMCacheValue]:
return (await self._cache_manager.get(key, LLMCacheValue, cache_config))
async def set(self, key: LLMCacheKey, value: LLMCacheValue, cache_config: Optional[CacheConfig]=None) -> None:
return (await self._cache_manager.set(key, value, cache_config))
async def exists(self, key: LLMCacheKey, cache_config: Optional[CacheConfig]=None) -> bool:
return ((await self.get(key, cache_config)) is not None)
def new_key(self, **kwargs) -> LLMCacheKey:
key = LLMCacheKey(**kwargs)
key.set_serializer(self._cache_manager.serializer)
return key
def new_value(self, **kwargs) -> LLMCacheValue:
value = LLMCacheValue(**kwargs)
value.set_serializer(self._cache_manager.serializer)
return value |
def perm(accessing_obj, accessed_obj, *args, **kwargs):
try:
permission = args[0].lower()
perms_object = accessing_obj.permissions.all()
except (AttributeError, IndexError):
return False
gtmode = kwargs.pop('_greater_than', False)
is_quell = False
account = (utils.inherits_from(accessing_obj, 'evennia.objects.objects.DefaultObject') and accessing_obj.account)
perms_account = []
if account:
perms_account = account.permissions.all()
is_quell = account.attributes.get('_quell')
hpos_target = None
if (permission in _PERMISSION_HIERARCHY):
hpos_target = _PERMISSION_HIERARCHY.index(permission)
elif (permission.endswith('s') and (permission[:(- 1)] in _PERMISSION_HIERARCHY)):
hpos_target = _PERMISSION_HIERARCHY.index(permission[:(- 1)])
if (hpos_target is not None):
hpos_account = (- 1)
hpos_object = (- 1)
if account:
perms_account_single = [(p[:(- 1)] if p.endswith('s') else p) for p in perms_account]
hpos_account = [hpos for (hpos, hperm) in enumerate(_PERMISSION_HIERARCHY) if (hperm in perms_account_single)]
hpos_account = (hpos_account[(- 1)] if hpos_account else (- 1))
if ((not account) or is_quell):
perms_object_single = [(p[:(- 1)] if p.endswith('s') else p) for p in perms_object]
hpos_object = [hpos for (hpos, hperm) in enumerate(_PERMISSION_HIERARCHY) if (hperm in perms_object_single)]
hpos_object = (hpos_object[(- 1)] if hpos_object else (- 1))
if (account and is_quell):
if gtmode:
return (hpos_target < min(hpos_account, hpos_object))
else:
return (hpos_target <= min(hpos_account, hpos_object))
elif account:
if gtmode:
return (hpos_target < hpos_account)
else:
return (hpos_target <= hpos_account)
elif gtmode:
return (hpos_target < hpos_object)
else:
return (hpos_target <= hpos_object)
elif account:
if (is_quell and (permission in perms_object)):
return True
elif (permission in perms_account):
return True
else:
return (permission in perms_object)
elif (permission in perms_object):
return True
return False |
def bump(arguments: argparse.Namespace) -> int:
new_plugin_versions = parse_plugin_versions(arguments.plugin_new_version)
logging.info(f'Parsed arguments: {arguments}')
logging.info(f'Parsed plugin versions: {new_plugin_versions}')
have_updated_specifier_set = False
if (arguments.new_version is not None):
new_aea_version = Version(arguments.new_version)
aea_version_bumper = make_aea_bumper(new_aea_version)
aea_version_bumper.run()
have_updated_specifier_set = aea_version_bumper.result
logging.info('AEA package processed.')
else:
logging.info('AEA package not processed - no version provided.')
logging.info('Processing plugins:')
have_updated_specifier_set |= process_plugins(new_plugin_versions)
logging.info('OK')
return_code = 0
if arguments.no_fingerprints:
logging.info('Not updating fingerprints, since --no-fingerprints was specified.')
elif (have_updated_specifier_set is False):
logging.info('Not updating fingerprints, since no specifier set has been updated.')
else:
logging.info('Updating hashes and fingerprints.')
return_code = update_hashes()
return return_code |
def dispatch_delete(cfg):
what = cfg.opts('system', 'delete.config.option')
if (what == 'race'):
metrics.delete_race(cfg)
elif (what == 'annotation'):
metrics.delete_annotation(cfg)
else:
raise exceptions.SystemSetupError(('Cannot delete unknown configuration option [%s]' % what)) |
class Wrapper(Filter):
filter = Instance(PipelineBase, allow_none=False, record=True)
label_text = Str('Enable Filter')
enabled = Bool(True, desc='if the filter is enabled or not')
_show_enabled = Bool(False)
def __set_pure_state__(self, state):
children = [f for f in [self.filter] if (f is not None)]
handle_children_state(children, [state.filter])
self.filter = children[0]
super(Wrapper, self).__set_pure_state__(state)
def default_traits_view(self):
if self._show_enabled:
view = View(Group(Group(Item(name='enabled', label=self.label_text)), Group(Item(name='filter', style='custom', enabled_when='enabled', resizable=True), show_labels=False)), resizable=True)
else:
view = View(Group(Item(name='filter', style='custom', enabled_when='enabled', resizable=True), show_labels=False), resizable=True)
return view
def setup_pipeline(self):
if (self.filter is not None):
self._setup_events(self.filter)
def stop(self):
super(Wrapper, self).stop()
if (self.filter is not None):
self.filter.stop()
def update_pipeline(self):
self._enabled_changed(self.enabled)
self.pipeline_changed = True
def update_data(self):
self.data_changed = True
def _enabled_changed(self, value):
if ((len(self.inputs) == 0) or (self.filter is None)):
return
my_input = self.inputs[0]
filter = self.filter
if (len(filter.name) == 0):
name = filter.__class__.__name__
else:
name = filter.name
if (value and (filter is not None)):
filter.inputs = [my_input]
if (not filter.running):
filter.start()
self._set_outputs(self.filter.outputs)
else:
self._set_outputs(my_input.outputs)
name += ' (disabled)'
self.name = name
self.render()
def _filter_changed(self, old, new):
if (old is not None):
self._setup_events(old, remove=True)
old.stop()
if (self.scene is not None):
new.scene = self.scene
self._setup_events(new, remove=False)
self._enabled_changed(self.enabled)
def _scene_changed(self, old, new):
if (self.filter is not None):
self.filter.scene = new
super(Wrapper, self)._scene_changed(old, new)
def _filter_pipeline_changed(self):
if self.enabled:
self._set_outputs(self.filter.outputs)
def _setup_events(self, obj, remove=False):
obj.on_trait_change(self._filter_pipeline_changed, 'pipeline_changed', remove=remove)
obj.on_trait_change(self.update_data, 'data_changed', remove=remove)
def _visible_changed(self, value):
self.filter.visible = value
super(Wrapper, self)._visible_changed(value) |
class TestConsentRequestReporting():
(scope='function')
def url(self) -> str:
return f'{V1_URL_PREFIX}{CONSENT_REQUEST_PREFERENCES}'
def test_consent_request_report_wrong_scope(self, url, generate_auth_header, api_client):
auth_header = generate_auth_header(scopes=[])
response = api_client.get(url, headers=auth_header)
assert (response.status_code == 403)
assert (response.json() == {'detail': 'Not Authorized for this action'})
def test_consent_request_report(self, url, generate_auth_header, api_client, consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get(url, headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 2)
consent_records.sort(key=(lambda consent: consent.updated_at), reverse=True)
for idx in [0, 1]:
item = data['items'][idx]
consent_record = consent_records[idx]
assert (item['data_use'] == consent_record.data_use)
assert (item['has_gpc_flag'] == consent_record.has_gpc_flag)
assert (item['opt_in'] == consent_record.opt_in)
assert (item['identity']['email'] == consent_record.provided_identity.encrypted_value['value'])
def test_consent_request_report_handles_anonymous_consent_requests(self, url, generate_auth_header, api_client, anonymous_consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get(url, headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 2)
anonymous_consent_records.sort(key=(lambda consent: consent.updated_at), reverse=True)
for idx in [0, 1]:
item = data['items'][idx]
consent_record = anonymous_consent_records[idx]
assert (item['data_use'] == consent_record.data_use)
assert (item['has_gpc_flag'] == consent_record.has_gpc_flag)
assert (item['opt_in'] == consent_record.opt_in)
assert (item['identity']['fides_user_device_id'] == consent_record.provided_identity.encrypted_value['value'])
def test_all_consent_requests_handled(self, url, generate_auth_header, api_client, anonymous_consent_records, consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get(url, headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 4)
def test_consent_request_report_filters_data_use(self, url, generate_auth_header, api_client, consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get((url + '?data_use=email'), headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 1)
item = data['items'][0]
assert (item['data_use'] == 'email')
def test_consent_request_report_filters_identity(self, url, generate_auth_header, api_client, consent_records, provided_identity_value):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get((url + f'?identity={provided_identity_value}'), headers=auth_header)
assert (response.status_code == 200)
assert (response.json()['total'] == 2)
response = api_client.get((url + '?identity=not-an-identity'), headers=auth_header)
assert (response.status_code == 200)
assert (response.json()['total'] == 0)
def test_consent_request_report_filters_opt_in(self, url, generate_auth_header, api_client, consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get((url + '?opt_in=true'), headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 1)
item = data['items'][0]
assert (item['opt_in'] == True)
def test_consent_request_report_filters_gpc_flag(self, url, generate_auth_header, api_client, consent_records):
auth_header = generate_auth_header(scopes=[CONSENT_READ])
response = api_client.get((url + '?has_gpc_flag=false'), headers=auth_header)
assert (response.status_code == 200)
data = response.json()
assert (data['total'] == 2)
for idx in [0, 1]:
item = data['items'][idx]
assert (item['has_gpc_flag'] == False) |
class OptionSeriesVariablepieLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesVariablepieLabelStyle':
return self._config_sub_data('style', OptionSeriesVariablepieLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class CustomDataTest(TestCase):
def test_normalize(self):
content = Content(product_id='id0', quantity='quantity1', item_price=3.99)
custom_properties = {'custom1': 'property1', 'custom2': 'property2'}
delivery_category = DeliveryCategory.CURBSIDE
expected = {'value': 0.5, 'currency': 'usd', 'content_name': 'content-content1', 'content_category': 'content-category2', 'content_ids': ['id1', 'id2'], 'content_type': 'content-type3', 'contents': [{'id': content.product_id, 'quantity': content.quantity, 'item_price': content.item_price}], 'order_id': 'order-id4', 'predicted_ltv': 5.99, 'num_items': 6, 'status': 'status7', 'search_string': 'search-string8', 'item_number': 'item-number9', 'delivery_category': delivery_category.value, 'custom1': 'property1', 'custom2': 'property2'}
custom_data = CustomData(value=expected['value'], currency=expected['currency'], content_name=expected['content_name'], content_category=expected['content_category'], content_ids=expected['content_ids'], contents=[content], content_type=expected['content_type'], order_id=expected['order_id'], predicted_ltv=expected['predicted_ltv'], num_items=expected['num_items'], status=expected['status'], search_string=expected['search_string'], item_number=expected['item_number'], delivery_category=delivery_category, custom_properties=custom_properties)
self.assertEqual(custom_data.normalize(), expected)
def test_delivery_category_validate(self):
delivery_category = 'undefined_delivery_category'
with self.assertRaises(TypeError) as context:
CustomData(value=123.12, delivery_category=delivery_category)
expected_exception_message = ('delivery_category must be of type DeliveryCategory. Passed invalid category: ' + delivery_category)
self.assertTrue((expected_exception_message in str(context.exception)))
def test_validate_value(self):
bad_value = 'bad-value'
with self.assertRaises(TypeError) as context:
CustomData(value=bad_value)
expected_exception_message = ('CustomData.value must be a float or int. TypeError on value: ' + bad_value)
self.assertTrue((expected_exception_message in str(context.exception)))
def test_normalize_int_and_float_values_work(self):
self.assertEqual(CustomData(value=123).normalize(), {'value': 123})
self.assertEqual(CustomData(value=123.45).normalize(), {'value': 123.45})
def test_validate_predicted_ltv(self):
bad_predicted_ltv = 'bad-predicted_ltv'
with self.assertRaises(TypeError) as context:
CustomData(predicted_ltv=bad_predicted_ltv)
expected_exception_message = ('CustomData.predicted_ltv must be a float or int. TypeError on predicted_ltv: ' + bad_predicted_ltv)
self.assertTrue((expected_exception_message in str(context.exception)))
def test_normalize_int_and_float_predicted_ltvs_work(self):
self.assertEqual(CustomData(predicted_ltv=123).normalize(), {'predicted_ltv': 123})
self.assertEqual(CustomData(predicted_ltv=123.45).normalize(), {'predicted_ltv': 123.45})
def test_emptyobject_normalize(self):
custom_data = CustomData()
self.assertEqual(custom_data.normalize(), {}) |
def lazy_import():
from fastly.model.type_waf_exclusion import TypeWafExclusion
from fastly.model.waf_exclusion_data import WafExclusionData
from fastly.model.waf_exclusion_response_data_all_of import WafExclusionResponseDataAllOf
from fastly.model.waf_exclusion_response_data_attributes import WafExclusionResponseDataAttributes
from fastly.model.waf_exclusion_response_data_relationships import WafExclusionResponseDataRelationships
globals()['TypeWafExclusion'] = TypeWafExclusion
globals()['WafExclusionData'] = WafExclusionData
globals()['WafExclusionResponseDataAllOf'] = WafExclusionResponseDataAllOf
globals()['WafExclusionResponseDataAttributes'] = WafExclusionResponseDataAttributes
globals()['WafExclusionResponseDataRelationships'] = WafExclusionResponseDataRelationships |
class TwoWire():
I2C_SLAVE = 1795
def __init__(self, i2c_bus_num=1, i2c_address=0):
self.busy = False
self.i2c_address = int(i2c_address)
self.i2c_bus_num = (- 1)
self.i2cr = None
self.iid = (- 1)
self.queue = []
self.enddelay = 0.001
if (self.i2c_address != 0):
self.i2c_bus_num = i2c_bus_num
self.connect()
def connect(self):
time.sleep(0.1)
try:
self.i2cr = open(('/dev/i2c-' + str(self.i2c_bus_num)), 'rb', buffering=0)
self.i2cw = open(('/dev/i2c-' + str(self.i2c_bus_num)), 'wb', buffering=0)
fcntl.ioctl(self.i2cr, self.I2C_SLAVE, self.i2c_address)
fcntl.ioctl(self.i2cw, self.I2C_SLAVE, self.i2c_address)
except Exception as e:
self.i2cr = None
self.i2c_bus_num = (- 1)
self.busy = False
def setEndDelay(self, enddelay):
self.enddelay = enddelay
def beginTransmission(self, oid=0, queue_enabled=False):
if self.busy:
if queue_enabled:
if (oid not in self.queue):
self.queue.append(oid)
return 0
elif ((len(self.queue) > 0) and queue_enabled):
if (self.queue[0] != oid):
if (oid not in self.queue):
self.queue.append(oid)
return 0
else:
del self.queue[0]
if (self.i2cr is not None):
self.busy = True
self.iid = int((str(int(time.time())) + str(oid)))
return self.iid
else:
self.connect()
return 0
def write(self, data, iid=0):
if (self.busy and (self.iid == iid)):
try:
self.i2cw.write(data)
except:
self.connect()
def read(self, size, iid=0):
buf = []
if (self.busy and (self.iid == iid)):
try:
buf = self.i2cr.read(size)
except:
self.connect()
return buf
def endTransmission(self, iid=0):
if (self.iid == iid):
if self.busy:
time.sleep(self.enddelay)
self.busy = False
self.iid = (- 1)
def close(self):
try:
if (self.i2cr is not None):
self.i2cr.close()
self.i2cw.close()
except:
pass
def __del__(self):
self.close()
def __exit__(self, t, value, traceback):
self.close() |
def main():
rng = Random.new().read
print('Enter challenge pin from site: ')
pin = input()
print('Signing "{}" with a new RSA key....'.format(pin))
RSAkey = RSA.generate(1024, rng)
signature = RSAkey.sign(int(pin), rng)
key_params = RSAkey.__getstate__()
print_twitter(signature[0])
print('\n\nPlease input your public key on the web form:')
print(' "{}:{}"'.format(key_params['e'], key_params['d']))
print('\n\n') |
(boundscheck=False, wraparound=False, cdivision=True, nonecheck=False)
def brightness(img: Auint8, stateimg: Auint8, factor: float, offset: int):
height = img.shape[0]
width = img.shape[1]
lut: A1dC = np.empty(256, dtype=np.uint8)
for k in range(256):
op_result = ((k * factor) + offset)
if (op_result > 255):
op_result = 255
elif (op_result < 0):
op_result = 0
lut[k] = np.uint8(op_result)
for i in range(height):
for j in range(width):
img[(i, j, 0)] = lut[stateimg[(i, j, 0)]]
img[(i, j, 1)] = lut[stateimg[(i, j, 1)]]
img[(i, j, 2)] = lut[stateimg[(i, j, 2)]] |
class Builder():
def build(self, o):
m = getattr(self, ('build_' + o.__class__.__name__), None)
if (m is None):
raise UnknownType(o.__class__.__name__)
return m(o)
def build_List(self, o):
return list(map(self.build, o.getChildren()))
def build_Const(self, o):
return o.value
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = next(i)
return d
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
if (o.name == 'None'):
return None
if (o.name == 'True'):
return True
if (o.name == 'False'):
return False
raise UnknownType('Undefined Name')
def build_Add(self, o):
(real, imag) = list(map(self.build_Const, o.getChildren()))
try:
real = float(real)
except TypeError:
raise UnknownType('Add')
if ((not isinstance(imag, complex)) or (imag.real != 0.0)):
raise UnknownType('Add')
return (real + imag)
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
def build_UnarySub(self, o):
return (- self.build_Const(o.getChildren()[0]))
def build_UnaryAdd(self, o):
return self.build_Const(o.getChildren()[0]) |
class AMFFunction(models.Model):
aMFIdentifier = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=True)
pLMNIdList = models.ManyToManyField(PLMNIdList, related_name='pLMNIdListId_AMFFunction')
sBIFQDN = models.TextField()
sBIServiceList = models.TextField()
weightFactor = models.IntegerField()
sNSSAIList = models.ManyToManyField(SNSSAIList, related_name='sNSSAIListId_AMFFunction') |
class CommerceMerchantSettingsSetupStatus(AbstractObject):
def __init__(self, api=None):
super(CommerceMerchantSettingsSetupStatus, self).__init__()
self._isCommerceMerchantSettingsSetupStatus = True
self._api = api
class Field(AbstractObject.Field):
deals_setup = 'deals_setup'
marketplace_approval_status = 'marketplace_approval_status'
marketplace_approval_status_details = 'marketplace_approval_status_details'
payment_setup = 'payment_setup'
review_status = 'review_status'
shop_setup = 'shop_setup'
_field_types = {'deals_setup': 'string', 'marketplace_approval_status': 'string', 'marketplace_approval_status_details': 'Object', 'payment_setup': 'string', 'review_status': 'Object', 'shop_setup': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class TestAutoNeck(unittest.TestCase):
def test_transformers_neck(self):
import torch
from video_transformers import AutoNeck
config = {'name': 'TransformerNeck', 'num_features': 256, 'num_timesteps': 8, 'transformer_enc_num_heads': 4, 'transformer_enc_num_layers': 2, 'transformer_enc_act': 'gelu', 'dropout_p': 0.1, 'return_mean': True}
batch_size = 2
neck = AutoNeck.from_config(config)
input = torch.randn(batch_size, config['num_timesteps'], config['num_features'])
output = neck(input)
self.assertEqual(output.shape, (batch_size, neck.num_features))
def test_lstm_neck(self):
import torch
from video_transformers import AutoNeck
config = {'name': 'LSTMNeck', 'num_features': 256, 'num_timesteps': 8, 'hidden_size': 128, 'num_layers': 2, 'return_last': True}
batch_size = 2
neck = AutoNeck.from_config(config)
input = torch.randn(batch_size, config['num_timesteps'], config['num_features'])
output = neck(input)
self.assertEqual(output.shape, (batch_size, config['hidden_size']))
def test_gru_neck(self):
import torch
from video_transformers import AutoNeck
config = {'name': 'GRUNeck', 'num_features': 256, 'num_timesteps': 8, 'hidden_size': 128, 'num_layers': 2, 'return_last': True}
batch_size = 2
neck = AutoNeck.from_config(config)
input = torch.randn(batch_size, config['num_timesteps'], config['num_features'])
output = neck(input)
self.assertEqual(output.shape, (batch_size, config['hidden_size'])) |
(os.environ, {'FIDES__CONSENT__TCF_ENABLED': 'false', 'FIDES__CONSENT__AC_ENABLED': 'true'}, clear=True)
.unit
def test_get_config_ac_mode_without_tc_mode() -> None:
with pytest.raises(ValidationError) as exc:
get_config()
assert (exc.value.errors()[0]['msg'] == 'AC cannot be enabled unless TCF mode is also enabled.') |
class FixedByteSizeDecoder(SingleDecoder):
decoder_fn = None
value_bit_size = None
data_byte_size = None
is_big_endian = None
def validate(self):
super().validate()
if (self.value_bit_size is None):
raise ValueError('`value_bit_size` may not be None')
if (self.data_byte_size is None):
raise ValueError('`data_byte_size` may not be None')
if (self.decoder_fn is None):
raise ValueError('`decoder_fn` may not be None')
if (self.is_big_endian is None):
raise ValueError('`is_big_endian` may not be None')
if ((self.value_bit_size % 8) != 0):
raise ValueError('Invalid value bit size: {self.value_bit_size}. Must be a multiple of 8')
if (self.value_bit_size > (self.data_byte_size * 8)):
raise ValueError('Value byte size exceeds data size')
def read_data_from_stream(self, stream):
data = stream.read(self.data_byte_size)
if (len(data) != self.data_byte_size):
raise InsufficientDataBytes(f'Tried to read {self.data_byte_size} bytes, only got {len(data)} bytes.')
return data
def split_data_and_padding(self, raw_data):
value_byte_size = self._get_value_byte_size()
padding_size = (self.data_byte_size - value_byte_size)
if self.is_big_endian:
padding_bytes = raw_data[:padding_size]
data = raw_data[padding_size:]
else:
data = raw_data[:value_byte_size]
padding_bytes = raw_data[value_byte_size:]
return (data, padding_bytes)
def validate_padding_bytes(self, value, padding_bytes):
value_byte_size = self._get_value_byte_size()
padding_size = (self.data_byte_size - value_byte_size)
if (padding_bytes != (b'\x00' * padding_size)):
raise NonEmptyPaddingBytes(f'Padding bytes were not empty: {repr(padding_bytes)}')
def _get_value_byte_size(self):
value_byte_size = (self.value_bit_size // 8)
return value_byte_size |
class OptionPlotoptionsFunnelSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_tlv_type(LLDP_TLV_MANAGEMENT_ADDRESS)
class ManagementAddress(LLDPBasicTLV):
_LEN_MIN = 9
_LEN_MAX = 167
_ADDR_PACK_STR = '!BB'
_ADDR_PACK_SIZE = struct.calcsize(_ADDR_PACK_STR)
_ADDR_LEN_MIN = 1
_ADDR_LEN_MAX = 31
_INTF_PACK_STR = '!BIB'
_INTF_PACK_SIZE = struct.calcsize(_INTF_PACK_STR)
_OID_LEN_MIN = 0
_OID_LEN_MAX = 128
def __init__(self, buf=None, *args, **kwargs):
super(ManagementAddress, self).__init__(buf, *args, **kwargs)
if buf:
(self.addr_len, self.addr_subtype) = struct.unpack(self._ADDR_PACK_STR, self.tlv_info[:self._ADDR_PACK_SIZE])
assert self._addr_len_valid()
offset = ((self._ADDR_PACK_SIZE + self.addr_len) - 1)
self.addr = self.tlv_info[self._ADDR_PACK_SIZE:offset]
(self.intf_subtype, self.intf_num, self.oid_len) = struct.unpack(self._INTF_PACK_STR, self.tlv_info[offset:(offset + self._INTF_PACK_SIZE)])
assert self._oid_len_valid()
offset = (offset + self._INTF_PACK_SIZE)
self.oid = self.tlv_info[offset:]
else:
self.addr_subtype = kwargs['addr_subtype']
self.addr = kwargs['addr']
self.addr_len = (len(self.addr) + 1)
assert self._addr_len_valid()
self.intf_subtype = kwargs['intf_subtype']
self.intf_num = kwargs['intf_num']
self.oid = kwargs['oid']
self.oid_len = len(self.oid)
assert self._oid_len_valid()
self.len = ((((self._ADDR_PACK_SIZE + self.addr_len) - 1) + self._INTF_PACK_SIZE) + self.oid_len)
assert self._len_valid()
self.typelen = ((self.tlv_type << LLDP_TLV_TYPE_SHIFT) | self.len)
def serialize(self):
tlv_info = struct.pack(self._ADDR_PACK_STR, self.addr_len, self.addr_subtype)
tlv_info += self.addr
tlv_info += struct.pack(self._INTF_PACK_STR, self.intf_subtype, self.intf_num, self.oid_len)
tlv_info += self.oid
return (struct.pack('!H', self.typelen) + tlv_info)
def _addr_len_valid(self):
return ((self._ADDR_LEN_MIN <= self.addr_len) or (self.addr_len <= self._ADDR_LEN_MAX))
def _oid_len_valid(self):
return (self._OID_LEN_MIN <= self.oid_len <= self._OID_LEN_MAX) |
def test_get_experiment_ensemble(poly_example_tmp_dir, dark_storage_client):
resp: Response = dark_storage_client.get('/experiments')
experiment_json = resp.json()
assert (len(experiment_json) == 1)
assert (len(experiment_json[0]['ensemble_ids']) == 2)
experiment_id = experiment_json[0]['id']
resp: Response = dark_storage_client.get(f'/experiments/{experiment_id}/ensembles')
ensembles_json = resp.json()
assert (len(ensembles_json) == 2)
assert (ensembles_json[0]['experiment_id'] == experiment_json[0]['id'])
assert (ensembles_json[0]['userdata']['name'] in ('alpha', 'beta')) |
.parametrize('graph_info', GRAPHS)
def test_scheduler_dependency_management(graph_info):
scheduler = to_scheduler(graph_info['graph'])
while scheduler:
for group in scheduler.iter_available_groups():
assert (not group.dependencies)
scheduler.finish(group, SUCCESS) |
def f_inner_fft(vals, inv=False):
roots = [x.n for x in get_roots_of_unity(len(vals))]
(o, nvals) = (b.curve_order, [x.n for x in vals])
if inv:
invlen = (f_inner(1) / len(vals))
reversed_roots = ([roots[0]] + roots[1:][::(- 1)])
return [(f_inner(x) * invlen) for x in _fft(nvals, o, reversed_roots)]
else:
return [f_inner(x) for x in _fft(nvals, o, roots)] |
class TestNumberOfOutListValues(BaseDataQualityValueListMetricsTest):
name: ClassVar = 'Number Out-of-List Values'
alias: ClassVar = 'number_value_list'
def calculate_value_for_test(self) -> Numeric:
return self.metric.get_result().current.number_not_in_list
def get_description(self, value: Numeric) -> str:
return f'The number of values out of list in the column **{self.column_name}** is {value}. The test threshold is {self.get_condition()}.' |
()
def company_languages_df():
return pd.DataFrame([{OSCILanguagesReportSchema.company: 'Google', OSCILanguagesReportSchema.language: 'python', OSCILanguagesReportSchema.commits: 50}, {OSCILanguagesReportSchema.company: 'Google', OSCILanguagesReportSchema.language: 'go', OSCILanguagesReportSchema.commits: 30}, {OSCILanguagesReportSchema.company: 'Microsoft', OSCILanguagesReportSchema.language: 'typescript', OSCILanguagesReportSchema.commits: 40}, {OSCILanguagesReportSchema.company: 'Microsoft', OSCILanguagesReportSchema.language: 'powershell', OSCILanguagesReportSchema.commits: 20}]) |
class Migration(migrations.Migration):
dependencies = [('frontend', '0018_auto__1648')]
operations = [migrations.AlterField(model_name='maillog', name='message', field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='frontend.EmailMessage'))] |
def test_data_quality_test_most_common_value_share() -> None:
test_dataset = pd.DataFrame({'feature1': [0, 1, 1, 5], 'target': [0, 0, 0, 1], 'prediction': [0, 0, 1, 1]})
suite = TestSuite(tests=[TestMostCommonValueShare(column_name='feature1')])
mapping = ColumnMapping(numerical_features=['feature1'])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=mapping)
assert suite
suite = TestSuite(tests=[TestMostCommonValueShare(column_name='no_existing_feature', eq=0.5)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert (not suite)
suite = TestSuite(tests=[TestMostCommonValueShare(column_name='feature1', lt=0.5)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping)
assert (not suite)
suite = TestSuite(tests=[TestMostCommonValueShare(column_name='feature1', eq=0.5)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping)
assert suite
assert suite.show()
assert suite.json() |
_deserializable
class DiscordBot(BaseBot):
def __init__(self, *args, **kwargs):
BaseBot.__init__(self, *args, **kwargs)
def add_data(self, message):
data = message.split(' ')[(- 1)]
try:
self.add(data)
response = f'Added data from: {data}'
except Exception:
logging.exception(f'Failed to add data {data}.')
response = 'Some error occurred while adding data.'
return response
def ask_bot(self, message):
try:
response = self.query(message)
except Exception:
logging.exception(f'Failed to query {message}.')
response = 'An error occurred. Please try again!'
return response
def start(self):
client.run(os.environ['DISCORD_BOT_TOKEN']) |
def new_dataset(data):
print('Creating new dataset')
u = url('items/datasets')
d = {'file': data['file_id'], 'name': 'test_dataset'}
r = data['session'].post(u, data=d)
assert_ret_code(r, 201)
j = r.content.decode()
print(('New dataset:' + j))
data['dataset_id'] = r.json()['id']
return data |
.django_db
def test_correct_response_single_defc(client, monkeypatch, helpers, elasticsearch_award_index, cfda_awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = helpers.post_for_count_endpoint(client, url, def_codes=['L'])
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['count'] == 3) |
def run():
segmk = Segmaker('design.bits')
print('Loading tags')
f = open('params.jl', 'r')
design = json.load(f)
for p in design:
ps = p['params']
if (p['site_type'] in 'ICAP'):
param = verilog.unquote(ps['ICAP_WIDTH'])
segmaker.add_site_group_zero(segmk, p['site'], 'ICAP_WIDTH_', ['X32', 'X8', 'X16'], 'X32', param)
elif (p['site_type'] in 'BSCAN'):
param = str(ps['JTAG_CHAIN'])
segmaker.add_site_group_zero(segmk, p['site'], 'JTAG_CHAIN_', ['1', '2', '3', '4'], param, param)
elif (p['site_type'] in 'CAPTURE'):
param = verilog.unquote(ps['ONESHOT'])
segmk.add_site_tag(p['site'], 'ONESHOT', (True if (param in 'TRUE') else False))
elif (p['site_type'] in 'STARTUP'):
param = verilog.unquote(ps['PROG_USR'])
segmk.add_site_tag(p['site'], 'PROG_USR', (True if (param in 'TRUE') else False))
elif (p['site_type'] in 'FRAME_ECC'):
param = verilog.unquote(ps['FARSRC'])
segmaker.add_site_group_zero(segmk, p['site'], 'FARSRC_', ['FAR', 'EFAR'], param, param)
elif (p['site_type'] in ['USR_ACCESS', 'DCIRESET']):
feature = 'ENABLED'
segmk.add_site_tag(p['site'], feature, (True if ps['ENABLED'] else False))
segmk.compile(bitfilter=bitfilter)
segmk.write() |
def test_index(client, app):
github_client_mock = mock.Mock(spec=Github)
github_client_mock.search_repositories.return_value = [mock.Mock(html_url='repo1-url', name='repo1-name', owner=mock.Mock(login='owner1-login', html_url='owner1-url', avatar_url='owner1-avatar-url'), get_commits=mock.Mock(return_value=[mock.Mock()])), mock.Mock(html_url='repo2-url', name='repo2-name', owner=mock.Mock(login='owner2-login', html_url='owner2-url', avatar_url='owner2-avatar-url'), get_commits=mock.Mock(return_value=[mock.Mock()]))]
with app.container.github_client.override(github_client_mock):
response = client.get(url_for('example.index'))
assert (response.status_code == 200)
assert (b'Results found: 2' in response.data)
assert (b'repo1-url' in response.data)
assert (b'repo1-name' in response.data)
assert (b'owner1-login' in response.data)
assert (b'owner1-url' in response.data)
assert (b'owner1-avatar-url' in response.data)
assert (b'repo2-url' in response.data)
assert (b'repo2-name' in response.data)
assert (b'owner2-login' in response.data)
assert (b'owner2-url' in response.data)
assert (b'owner2-avatar-url' in response.data) |
class UpBlocks(fl.Chain):
def __init__(self, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
super().__init__(fl.Chain(ResidualBlock(in_channels=2560, out_channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=2560, out_channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=2560, out_channels=1280, device=device, dtype=dtype), fl.Upsample(channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=2560, out_channels=1280, device=device, dtype=dtype), CLIPLCrossAttention(channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=2560, out_channels=1280, device=device, dtype=dtype), CLIPLCrossAttention(channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1920, out_channels=1280, device=device, dtype=dtype), CLIPLCrossAttention(channels=1280, device=device, dtype=dtype), fl.Upsample(channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1920, out_channels=640, device=device, dtype=dtype), CLIPLCrossAttention(channels=640, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1280, out_channels=640, device=device, dtype=dtype), CLIPLCrossAttention(channels=640, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=960, out_channels=640, device=device, dtype=dtype), CLIPLCrossAttention(channels=640, device=device, dtype=dtype), fl.Upsample(channels=640, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=960, out_channels=320, device=device, dtype=dtype), CLIPLCrossAttention(channels=320, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=640, out_channels=320, device=device, dtype=dtype), CLIPLCrossAttention(channels=320, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=640, out_channels=320, device=device, dtype=dtype), CLIPLCrossAttention(channels=320, device=device, dtype=dtype))) |
class TestShutterReleaseLever(object):
def test_nothing_happens_when_there_is_no_exposure_control_system(self):
srl = ShutterReleaseLever()
assert (srl.depress() == None)
def test_depress_lever_in_manual_mode_is_not_blocked(self):
ecs = ExposureControlSystem()
ecs.mode = 'Manual'
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == False)
def test_depress_lever_in_AE_mode_is_blocked(self):
ecs = ExposureControlSystem()
assert (ecs.mode == 'Shutter priority')
assert (ecs.meter() == None)
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == True)
def test_depress_lever_in_bright_light(self):
c = Camera()
ecs = c.exposure_control_system
assert (ecs.meter() == 16)
assert (ecs.mode == 'Shutter priority')
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == False)
def test_depress_lever_in_too_bright_light(self):
c = Camera()
c.environment.scene_luminosity = 8096
ecs = c.exposure_control_system
assert (ecs.meter() == 'Over')
assert (ecs.mode == 'Shutter priority')
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == True)
def test_depress_lever_in_dim_light(self):
c = Camera()
c.environment.scene_luminosity = 256
ecs = c.exposure_control_system
assert (pytest.approx(ecs.meter()) == 4)
assert (ecs.mode == 'Shutter priority')
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == False)
def test_depress_lever_in_too_dim_light(self):
c = Camera()
c.environment.scene_luminosity = 32
ecs = c.exposure_control_system
assert (ecs.meter() == 'Under')
assert (ecs.mode == 'Shutter priority')
ecs.shutter.cock()
ecs.shutter_release_lever.depress()
assert (ecs.shutter.cocked == True) |
def test():
params = ((32, 32, 1024), (32, 256, 1024), (32, 2048, 1024), (32, 16384, 1024), (32, 131072, 1024))
for (_min, _max, _sam) in params:
print(f'Testing: min={_min} max={_max} sample_size={_sam}')
o = [sample_attack(_min, _max, _sam, (1 / 3)) for i in range(10000)]
print('Attacker share standard deviation: {}'.format(standev(o)))
top_milli = sorted(o)[(- 10)]
print(f'Attacker top 0.1 percentile share: {top_milli}') |
class OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class RallyError(Exception):
def __init__(self, message, cause=None):
super().__init__(message)
self.message = message
self.cause = cause
def full_message(self):
msg = str(self.message)
nesting = 0
current_exc = self
while (hasattr(current_exc, 'cause') and current_exc.cause):
nesting += 1
current_exc = current_exc.cause
if hasattr(current_exc, 'message'):
msg += ('\n%s%s' % (('\t' * nesting), current_exc.message))
else:
msg += ('\n%s%s' % (('\t' * nesting), str(current_exc)))
return msg |
class MockDelftModelWrapper():
def __init__(self, model_wrapper: Model):
self._model_wrapper = model_wrapper
self._label_by_layout_token: Dict[(LayoutTokenId, str)] = {}
self._default_label = 'O'
model_wrapper._lazy_model_impl._value = MagicMock(name='model_impl')
model_wrapper._iter_flat_label_model_data_lists_to = self._iter_flat_label_model_data_lists_to
def update_label_by_layout_tokens(self, label_by_layout_token: Mapping[(LayoutTokenId, str)]):
self._label_by_layout_token.update(label_by_layout_token)
def update_label_by_layout_block(self, layout_block: LayoutBlock, label: str):
self.update_label_by_layout_tokens(get_label_by_layout_token_for_block(layout_block, label))
def _iter_flat_label_model_data_lists_to(self, model_data_list_iterable: Iterable[Sequence[LayoutModelData]], item_factory: Callable[([str, LayoutModelData], T)]) -> Iterable[Union[(T, NewDocumentMarker)]]:
for (index, model_data_list) in enumerate(model_data_list_iterable):
if (index > 0):
(yield NEW_DOCUMENT_MARKER)
for model_data in model_data_list:
if model_data.layout_token:
label = self._label_by_layout_token.get(id(model_data.layout_token), self._default_label)
LOGGER.debug('id(layout_token)=%r, label=%r', id(model_data.layout_token), label)
else:
assert model_data.layout_line
first_layout_token = model_data.layout_line.tokens[0]
label = self._label_by_layout_token.get(id(first_layout_token), self._default_label)
LOGGER.debug('id(first_layout_token)=%r, label=%r', id(first_layout_token), label)
(yield item_factory(label, model_data)) |
class ProgressButtonControl(PlaylistButtonControl):
name = 'progress_button'
title = _('Progress button')
description = _('Playback progress and access to the current playlist')
__gsignals__ = {}
def __init__(self):
PlaylistButtonControl.__init__(self)
self.set_name('progressbutton')
self.add_events(Gdk.EventMask.POINTER_MOTION_MASK)
self.progressbar = SeekProgressBar(player.PLAYER)
self.progressbar.set_size_request((- 1), 1)
self.progressbar.formatter = ProgressButtonFormatter()
self.progressbar.set_text = (lambda *a: None)
gtk_widget_replace(self.label, self.progressbar)
self.label = self.progressbar
if (player.PLAYER.current is not None):
self.progressbar.on_playback_track_start('playback_track_start', player.PLAYER, player.PLAYER.current)
self.tooltip = TrackToolTip(self, player.PLAYER)
self.tooltip.set_auto_update(True)
def destroy(self):
self.tooltip.destroy()
PlaylistButtonControl.destroy(self)
def do_button_press_event(self, event):
if (event.button == Gdk.BUTTON_PRIMARY):
PlaylistButtonControl.do_button_press_event(self, event)
elif (event.button == Gdk.BUTTON_MIDDLE):
event = event.copy()
event.button = Gdk.BUTTON_PRIMARY
(x, y) = self.translate_coordinates(self.progressbar, int(event.x), int(event.y))
(event.x, event.y) = (float(x), float(y))
self.progressbar.emit('button-press-event', event)
def do_button_release_event(self, event):
if (event.button == Gdk.BUTTON_PRIMARY):
PlaylistButtonControl.do_button_release_event(self, event)
elif (event.button == Gdk.BUTTON_MIDDLE):
event = event.copy()
event.button = Gdk.BUTTON_PRIMARY
(x, y) = self.translate_coordinates(self.progressbar, int(event.x), int(event.y))
(event.x, event.y) = (float(x), float(y))
self.progressbar.emit('button-release-event', event)
def do_motion_notify_event(self, event):
event = event.copy()
(x, y) = self.translate_coordinates(self.progressbar, int(event.x), int(event.y))
(event.x, event.y) = (float(x), float(y))
self.progressbar.emit('motion-notify-event', event)
def do_leave_notify_event(self, event):
event = event.copy()
(x, y) = self.translate_coordinates(self.progressbar, int(event.x), int(event.y))
(event.x, event.y) = (float(x), float(y))
self.progressbar.emit('leave-notify-event', event) |
class TestId(util.TestCase):
MARKUP = '\n <div>\n <p>Some text <span id="1"> in a paragraph</span>.\n <a id="2" href=" </p>\n </div>\n '
def test_id(self):
self.assert_selector(self.MARKUP, '#\\31', ['1'], flags=util.HTML)
def test_tag_and_id(self):
self.assert_selector(self.MARKUP, 'a#\\32', ['2'], flags=util.HTML)
def test_malformed_id(self):
self.assert_raises('td#.some-class', SelectorSyntaxError) |
class OptionSeriesStreamgraphSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesStreamgraphSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesStreamgraphSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesStreamgraphSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesStreamgraphSonificationTracksMappingTremoloSpeed) |
_type(ofproto.OFPTFPT_MATCH)
_type(ofproto.OFPTFPT_WILDCARDS)
_type(ofproto.OFPTFPT_WRITE_SETFIELD)
_type(ofproto.OFPTFPT_WRITE_SETFIELD_MISS)
_type(ofproto.OFPTFPT_APPLY_SETFIELD)
_type(ofproto.OFPTFPT_APPLY_SETFIELD_MISS)
class OFPTableFeaturePropOxm(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, oxm_ids=None):
oxm_ids = (oxm_ids if oxm_ids else [])
super(OFPTableFeaturePropOxm, self).__init__(type_, length)
self.oxm_ids = oxm_ids
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i, rest) = OFPOxmId.parse(rest)
ids.append(i)
return cls(oxm_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.oxm_ids:
bin_ids += i.serialize()
return bin_ids |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class ConvBiasAddHardswishTestCase(unittest.TestCase):
def _test_conv_bias_add_hardswish(self, batch=4, copy_op=False, test_name='conv2d_bias_add_hardswish', dtype='float16'):
target = detect_target()
(CO, HH, WW, CI) = (256, 28, 28, 128)
X = Tensor(shape=[IntImm(batch), HH, WW, CI], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[CO, 3, 3, CI], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[CO], dtype=dtype, name='input_2', is_input=True)
R = Tensor(shape=[IntImm(batch), HH, WW, CO], dtype=dtype, name='input_3', is_input=True)
OP = ops.conv2d_bias_add_hardswish(stride=1, pad=1, dilate=1)
if copy_op:
OP = ops.conv2d_bias_add_hardswish(**OP._get_op_attributes())
Y = OP(X, W, B, R)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
X_pt = get_random_torch_tensor([batch, CI, HH, WW], dtype=dtype)
W_pt = get_random_torch_tensor([CO, CI, 3, 3], dtype=dtype)
B_pt = get_random_torch_tensor([1, CO, 1, 1], dtype=dtype)
R_pt = get_random_torch_tensor([batch, CO, HH, WW], dtype=dtype)
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt, padding=1)
Y_pt = ((Y_pt + B_pt) + R_pt)
Y_pt = hard_swish(Y_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
r = R_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'input_2': B_pt.squeeze(), 'input_3': r}
y = torch.empty_like(Y_pt).permute((0, 2, 3, 1)).contiguous()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute(0, 3, 1, 2)
if (dtype == 'float32'):
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.05, rtol=0.01))
else:
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.01, rtol=0.01))
(**filter_test_cases_by_params({TestEnv.CUDA_LESS_THAN_SM80: ['float16'], TestEnv.CUDA_SM80: ['float32']}))
def test_conv_bias_add_hardswish(self, dtype):
self._test_conv_bias_add_hardswish(test_name=f'conv2d_bias_add_hardswish_{dtype}', dtype=dtype)
self._test_conv_bias_add_hardswish(copy_op=True, test_name=f'conv2d_bias_add_hardswish_{dtype}_copy_op', dtype=dtype) |
def solve_model(demands, parent_width=100, cutStyle='exactCuts'):
num_orders = len(demands)
solver = newSolver('Cutting Stock', True)
(k, b) = bounds(demands, parent_width)
y = [solver.IntVar(0, 1, f'y_{i}') for i in range(k[1])]
x = [[solver.IntVar(0, b[i], f'x_{i}_{j}') for j in range(k[1])] for i in range(num_orders)]
unused_widths = [solver.NumVar(0, parent_width, f'w_{j}') for j in range(k[1])]
nb = solver.IntVar(k[0], k[1], 'nb')
for i in range(num_orders):
if (cutStyle == 'minWaste'):
solver.Add((sum((x[i][j] for j in range(k[1]))) >= demands[i][0]))
else:
solver.Add((sum((x[i][j] for j in range(k[1]))) == demands[i][0]))
for j in range(k[1]):
solver.Add((sum(((demands[i][1] * x[i][j]) for i in range(num_orders))) <= (parent_width * y[j])))
solver.Add((((parent_width * y[j]) - sum(((demands[i][1] * x[i][j]) for i in range(num_orders)))) == unused_widths[j]))
if (j < (k[1] - 1)):
solver.Add((sum((x[i][j] for i in range(num_orders))) >= sum((x[i][(j + 1)] for i in range(num_orders)))))
solver.Add((nb == solver.Sum((y[j] for j in range(k[1])))))
Cost = solver.Sum((((j + 1) * y[j]) for j in range(k[1])))
solver.Minimize(Cost)
status = solver.Solve()
numRollsUsed = SolVal(nb)
return (status, numRollsUsed, rolls(numRollsUsed, SolVal(x), SolVal(unused_widths), demands), SolVal(unused_widths), solver.WallTime()) |
class RangeTextEditor(TextEditor):
low = Any()
high = Any()
evaluate = Any()
def init(self, parent):
TextEditor.init(self, parent)
factory = self.factory
if (not factory.low_name):
self.low = factory.low
if (not factory.high_name):
self.high = factory.high
self.evaluate = factory.evaluate
self.sync_value(factory.evaluate_name, 'evaluate', 'from')
self.sync_value(factory.low_name, 'low', 'from')
self.sync_value(factory.high_name, 'high', 'from')
if ((self.low is not None) and (self.low > self.value)):
self.value = self.low
elif ((self.high is not None) and (self.high < self.value)):
self.value = (self.low if (self.low is not None) else self.high)
def update_object(self):
try:
value = eval(str(self.control.text()))
if (self.evaluate is not None):
value = self.evaluate(value)
if ((self.low is not None) and (self.low > value)):
value = self.low
col = ErrorColor
elif ((self.high is not None) and (self.high < value)):
value = (self.low if (self.low is not None) else self.high)
col = ErrorColor
else:
col = OKColor
self.value = value
except Exception:
col = ErrorColor
if (self.control is not None):
pal = QtGui.QPalette(self.control.palette())
pal.setColor(QtGui.QPalette.ColorRole.Base, col)
self.control.setPalette(pal) |
class SpinnakerApp():
def __init__(self, provider, pipeline_config=None, app=None, email=None, project=None, repo=None):
self.log = logging.getLogger(__name__)
self.appinfo = {'app': app, 'email': email, 'project': project, 'repo': repo, 'provider': provider}
self.appname = app
self.pipeline_config = pipeline_config
def create(self):
self.appinfo['accounts'] = ['default']
self.log.debug('Pipeline Config\n%s', pformat(self.pipeline_config))
self.log.debug('App info:\n%s', pformat(self.appinfo))
jsondata = self.render_application_template()
wait_for_task(jsondata)
self.log.info('Successfully created %s application', self.appname)
return jsondata
def render_application_template(self):
self.pipeline_config['instance_links'] = self.retrieve_instance_links()
jsondata = get_template(template_file='infrastructure/app_data.json.j2', appinfo=self.appinfo, pipeline_config=self.pipeline_config)
return jsondata
def retrieve_instance_links(self):
instance_links = copy.copy(LINKS)
self.log.debug('Default instance links: %s', instance_links)
instance_links.update(self.pipeline_config['instance_links'])
self.log.debug('Updated instance links: %s', instance_links)
return instance_links
def get_accounts(self):
uri = '/credentials'
response = gate_request(uri=uri)
assert response.ok, 'Failed to get accounts: {0}'.format(response.text)
all_accounts = response.json()
self.log.debug('Accounts in Spinnaker:\n%s', all_accounts)
filtered_accounts = []
for account in all_accounts:
if (account['type'] == self.provider):
filtered_accounts.append(account)
if (not filtered_accounts):
raise ForemastError('No Accounts matching {0}.'.format(self.provider))
return filtered_accounts |
class DetectFaceFromImageClient(ClientRequest):
def __init__(self, api_key: str, domain: str, port: str):
super().__init__()
self.client_url: str = DETECTION_API
self.api_key: str = api_key
self.url: str = (((domain + ':') + port) + self.client_url)
def get(self):
pass
'\n POST request for detection faces in image. \n \n :param image_path: Path to image in file system.\n :param options: dictionary with options for server.\n \n :return: json from server.\n '
def post(self, image: str=('' or bytes), options: ExpandedOptionsDict={}):
url: str = (self.url + '?')
for key in options.keys():
check_fields_by_name(key, options[key])
url += ((('&' + key) + '=') + str(options[key]))
m = multipart_constructor(image)
result = requests.post(url, data=m, headers={'Content-Type': m.content_type, 'x-api-key': self.api_key})
return result.json()
def put(self):
pass
def delete(self):
pass |
def get_endpoint_by_name(session, endpoint_name):
try:
result = session.query(Endpoint).filter((Endpoint.name == endpoint_name)).one()
result.time_added = to_local_datetime(result.time_added)
result.last_requested = to_local_datetime(result.last_requested)
except NoResultFound:
result = Endpoint(name=endpoint_name)
session.add(result)
session.flush()
session.expunge(result)
return result |
class HFModelStatsAdapter():
def consumed_tokens(self):
return 0
def num_queries(self):
return 0
def num_generate_calls(self):
return openai.AsyncConfiguration.get_stats().sum_batch_size
def billable_tokens(self):
return openai.AsyncConfiguration.get_stats().tokens
def reset_stats(self):
openai.AsyncConfiguration.get_stats().reset()
def cost_estimate(self, model):
return openai.AsyncConfiguration.get_stats().cost_estimate(model) |
class remove_test_case(unittest.TestCase):
def test_remove_with_single_key(self):
d = {'a': 1, 'b': 2, 'c': 3}
_remove(d, 'c')
r = {'a': 1, 'b': 2}
self.assertEqual(d, r)
def test_remove_with_multiple_keys_as_args(self):
d = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
_remove(d, 'c', 'd', 'e')
r = {'a': 1, 'b': 2}
self.assertEqual(d, r)
def test_remove_with_multiple_keys_as_list(self):
d = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
_remove(d, ['c', 'd', 'e'])
r = {'a': 1, 'b': 2}
self.assertEqual(d, r) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.