code stringlengths 281 23.7M |
|---|
class CreatePolicy(SchemaBase):
sub: str = Field(..., description='uuid / ')
path: str = Field(..., description='api ')
method: MethodType = Field(default=MethodType.GET, description='')
_validator('method')
def method_validator(cls, v):
if (not v.isupper()):
raise ValueError('')
return v |
class DevicePanel(panel.Panel):
__gsignals__ = {'append-items': (GObject.SignalFlags.RUN_LAST, None, (object, bool)), 'replace-items': (GObject.SignalFlags.RUN_LAST, None, (object,)), 'queue-items': (GObject.SignalFlags.RUN_LAST, None, (object,)), 'collection-tree-loaded': (GObject.SignalFlags.RUN_LAST, None, ())}
ui_info = ('device.ui', 'DevicePanel')
def __init__(self, parent, main, device, name):
label = device.get_name()
panel.Panel.__init__(self, parent, name, label)
self.device = device
self.main = main
self.notebook = self.builder.get_object('device_notebook')
self.collectionpanel = ReceptiveCollectionPanel(parent, collection=device.collection, name=name, label=label)
self.collectionpanel.add_tracks_func = self.add_tracks_func
self.collectionpanel.connect('append-items', (lambda *e: self.emit('append-items', *e[1:])))
self.collectionpanel.connect('replace-items', (lambda *e: self.emit('replace-items', *e[1:])))
self.collectionpanel.connect('queue-items', (lambda *e: self.emit('queue-items', *e[1:])))
self.collectionpanel.connect('collection-tree-loaded', (lambda *e: self.emit('collection-tree-loaded')))
def add_tracks_func(self, tracks):
self.device.add_tracks(tracks)
thread = DeviceTransferThread(self.device)
thread.connect('done', (lambda *e: self.load_tree()))
self.main.controller.progress_manager.add_monitor(thread, (_('Transferring to %s...') % self.name), 'drive-harddisk')
def get_panel(self):
return self.collectionpanel.get_panel()
def add_panel(self, child, name):
label = Gtk.Label(label=name)
self.notebook.append_page(child, label)
def load_tree(self, *args):
self.collectionpanel.load_tree(*args) |
def test_traversal_overriding():
provider1 = providers.Provider()
provider2 = providers.Provider()
provider3 = providers.DependenciesContainer(provider1=provider1, provider2=provider2)
provider = providers.DependenciesContainer()
provider.override(provider3)
all_providers = list(provider.traverse())
assert (len(all_providers) == 5)
assert (provider1 in all_providers)
assert (provider2 in all_providers)
assert (provider3 in all_providers)
assert (provider.provider1 in all_providers)
assert (provider.provider2 in all_providers) |
class Net2HTML(object):
def __init__(self, net, gv, abcd):
self.gv = gv
self.abcd = abcd
self.tree = Tree()
self.n2a = collections.defaultdict(set)
self.n2t = {}
snk = net.label('snakes')
self.count = collections.defaultdict(int)
for place in net.place():
nid = gv.nodemap[place.name]
if (place.status in (snk.entry, snk.internal, snk.exit)):
for (char, trans) in ([('R', net.transition(t)) for t in place.pre] + [('L', net.transition(t)) for t in place.post]):
span = abcd[trans]
self.n2a[nid].add((char + span.id))
else:
self.addtree(0, 'buffer', place)
for trans in net.transition():
self.addtree(10, 'action', trans)
def addtree(self, weight, kind, node):
nid = self.gv.nodemap[node.name]
aid = self.abcd[node]
tid = aid.copy(id=(('T%X' % self.count[aid.id]) + aid.id), tree=[], abcd=[aid.id], net=[nid])
self.count[aid.id] += 1
aid.tree.add(tid.id)
aid.net.add(nid)
self.n2a[nid].add(aid.id)
self.n2t[nid] = tid.id
pos = self.tree
path = node.label('path')
try:
inst = node.label('instances')
except:
inst = ([None] * len(path))
for (name, (_, srow, scol, _, _)) in zip(path, inst):
a = self.abcd[('I', srow, scol)]
t = a.copy(id=(('T%X' % self.count[a.id]) + a.id), tree=[], abcd=[a.id], net=[])
self.count[a.id] += 1
a.tree.add(t.id)
pos = pos[((20, srow, scol), 'instance', TreeInfo(t, name))]
prefix = (sum((len(p) for p in path)) + len(path))
(srow, scol, _, _) = node.label('srcloc')
pos[((weight, srow, scol), kind, (node, node.name[prefix:]))] = tid
def _tree(self, tree, indent=''):
(yield (indent + '<ul>'))
for ((_, kind, data), child) in sorted(tree.items()):
if (kind == 'instance'):
(yield (indent + ('<li>%s%s</span>' % tuple(data))))
for item in self._tree(child, (indent + ' ')):
(yield item)
(yield (indent + '</li>'))
else:
(node, name) = data
if (kind == 'buffer'):
content = ("<span class='kw'>buffer</span> <span class='name'>%s</span> = <span class='content'>%s</span>" % (name, node.tokens))
(yield (indent + ('<li>%s%s</span></li>' % (child, content))))
elif (kind == 'action'):
content = name
(yield (((indent + "<li>%s%s</span><ul class='modes'>") + '</ul></li>') % (child, content)))
else:
raise ValueError(('unexpected data %r' % kind))
(yield (indent + '</ul>'))
def html(self):
return (template_tree % {'tree': '\n'.join(self._tree(self.tree))})
def svg(self):
with tempfile.NamedTemporaryFile(suffix='.svg') as tmp:
self.gv.render(tmp.name)
with codecs.open(tmp.name, 'r', 'utf-8') as infile:
svg = infile.read()
for (r, s) in _svgclean:
svg = r.sub(s, svg)
for (node, abcd) in self.n2a.items():
abcd = ', '.join((('#' + t) for t in abcd))
if (node in self.n2t):
svg = svg.replace((' id="%s" ' % node), (' id="%s" data-abcd="%s" data-tree="#%s" ' % (node, abcd, self.n2t[node])))
else:
svg = svg.replace((' id="%s" ' % node), (' id="%s" data-abcd="%s" ' % (node, abcd)))
return (u"<div class='petrinet'>%s</div>" % svg) |
def test_wifiicon_deprecated_font_colour(caplog):
widget = qtile_extras.widget.network.WiFiIcon(font_colour='ffffff')
assert (caplog.record_tuples[0] == ('libqtile', logging.WARNING, 'The use of `font_colour` is deprecated. Please update your config to use `foreground` instead.'))
assert (widget.foreground == 'ffffff') |
class OptionSeriesBoxplotDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsScatterSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_auth_token(fb_auth_token, fb_user_id):
if ('error' in fb_auth_token):
return {'error': 'could not retrieve fb_auth_token'}
if ('error' in fb_user_id):
return {'error': 'could not retrieve fb_user_id'}
url = (config.host + '/v2/auth/login/facebook')
req = requests.post(url, headers=headers, data=json.dumps({'token': fb_auth_token, 'facebook_id': fb_user_id}))
try:
tinder_auth_token = req.json()['data']['api_token']
headers.update({'X-Auth-Token': tinder_auth_token})
get_headers.update({'X-Auth-Token': tinder_auth_token})
print('You have been successfully authorized!')
return tinder_auth_token
except Exception as e:
print(e)
return {'error': 'Something went wrong. Sorry, but we could not authorize you.'} |
def _test_special_outputs(check_tensor, check_all, test_name, capfd: pytest.CaptureFixture[str]):
X1 = Tensor(shape=[IntImm(1), IntImm(3)], dtype='float16', name='input0', is_input=True)
X2_op = ops.elementwise(FuncEnum.DIV)
X2 = X2_op(X1, 0.0)
X2._attrs['is_output'] = True
X2._attrs['name'] = 'output0'
X2._attrs['check_outputs'] = check_tensor
target = detect_target()
debug_settings = AITDebugSettings(check_all_outputs=check_all)
module = compile_model(X2, target, './tmp', test_name, debug_settings=debug_settings)
x1_pt = torch.Tensor([[1.0, (- 2.0), 0.0]]).cuda().half()
x2 = torch.empty_like(x1_pt)
module.run_with_tensors([x1_pt], [x2])
(out, _) = capfd.readouterr()
check_str = 'inf, -inf, nan'
assert (check_str in out) |
_models('spacy.Davinci.v2')
def openai_davinci_v2(config: Dict[(Any, Any)]=SimpleFrozenDict(max_tokens=500, temperature=_DEFAULT_TEMPERATURE), name: Literal['davinci']='davinci', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]:
return OpenAI(name=name, endpoint=(endpoint or Endpoints.NON_CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time) |
def main():
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
add_env(parser)
add_region(parser)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
assert create_event_source_mapping_trigger(**vars(args)) |
class TestVariableDecode(unittest.TestCase):
def test_list_decode(self):
src = {'a-0': 'a', 'a-1': 'b', 'a-2': 'c'}
expect = {'a': ['a', 'b', 'c']}
self.assertEqual(expect, variable_decode(src))
def test_list_decode_non_int(self):
src = {'a-0': 'a', 'a-a': 'b', 'a-2': 'c'}
expect = {'a': ['a', 'c'], 'a-a': 'b'}
self.assertEqual(expect, variable_decode(src))
def test_list_decode_double_dash(self):
src = {'a-0': 'a', 'a-1-2': 'b', 'a-3': 'c'}
expect = {'a': ['a', 'c'], 'a-1-2': 'b'}
self.assertEqual(expect, variable_decode(src))
def test_list_decode_non_int_nested(self):
src = {'a-0.name': 'a', 'a-a.name': 'b', 'a-2.name': 'c'}
expect = {'a': [{'name': 'a'}, {'name': 'c'}], 'a-a': {'name': 'b'}}
self.assertEqual(expect, variable_decode(src))
def test_dict_decode(self):
src = {'a.a': 'a', 'a.b': 'b', 'a.c': 'c'}
expect = {'a': {'a': 'a', 'b': 'b', 'c': 'c'}}
self.assertEqual(expect, variable_decode(src))
def test_list_dict(self):
src = {'a-0.name': 'a', 'a-1.name': 'b', 'a-2.name': 'c'}
expect = {'a': [{'name': 'a'}, {'name': 'b'}, {'name': 'c'}]}
self.assertEqual(expect, variable_decode(src))
def test_dict_list_dict(self):
src = {'a.b-0.name': 'a', 'a.b-1.name': 'b', 'a.b-2.name': 'c'}
expect = {'a': {'b': [{'name': 'a'}, {'name': 'b'}, {'name': 'c'}]}}
self.assertEqual(expect, variable_decode(src))
def test_list_dict_bad_key(self):
src = {'a-0.good': 'a', 'a.0.bad': 'b', 'a-1.good': 'c'}
expect = {'a': [{'good': 'a'}, {'good': 'c'}, {'bad': 'b'}]}
self.assertEqual(expect, variable_decode(src)) |
(version=(1, 1, 5))
()
('-k', '--size', help='Plot size', type=int, default=32, show_default=True)
('--override-k', help='Force size smaller than 32', default=False, show_default=True, is_flag=True)
('-n', '--num', help='Number of plots or challenges', type=int, default=1, show_default=True)
('-b', '--buffer', help='Megabytes for sort/plot buffer', type=int, default=3389, show_default=True)
('-r', '--num_threads', help='Number of threads to use', type=int, default=2, show_default=True)
('-u', '--buckets', help='Number of buckets', type=int, default=128, show_default=True)
('-a', '--alt_fingerprint', type=int, default=None, help='Enter the alternative fingerprint of the key you want to use')
('-c', '--pool_contract_address', type=str, default=None, help='Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None')
('-f', '--farmer_public_key', help='Hex farmer public key', type=str, default=None)
('-p', '--pool_public_key', help='Hex public key of pool', type=str, default=None)
('-t', '--tmp_dir', help='Temporary directory for plotting files', type=click.Path(), default=pathlib.Path('.'), show_default=True)
('-2', '--tmp2_dir', help='Second temporary directory for plotting files', type=click.Path(), default=None)
('-d', '--final_dir', help='Final directory for plots (relative or absolute)', type=click.Path(), default=pathlib.Path('.'), show_default=True)
('-i', '--plotid', help='PlotID in hex for reproducing plots (debugging only)', type=str, default=None)
('-m', '--memo', help='Memo in hex for reproducing plots (debugging only)', type=str, default=None)
('-e', '--nobitfield', help='Disable bitfield', default=False, is_flag=True)
('-x', '--exclude_final_dir', help='Skips adding [final dir] to harvester for farming', default=False, is_flag=True)
def _cli_1_1_5() -> None:
pass |
class FingerJointBase(ABC):
def calcFingers(self, length: float, bedBolts) -> tuple[(int, float)]:
(space, finger) = (self.settings.space, self.settings.finger)
fingers = int(((length - ((self.settings.surroundingspaces - 1) * space)) // (space + finger)))
if ((fingers == 0) and (length > (finger + (1.0 * self.settings.thickness)))):
fingers = 1
if (not finger):
fingers = 0
if bedBolts:
fingers = bedBolts.numFingers(fingers)
leftover = ((length - (fingers * (space + finger))) + space)
if (fingers <= 0):
fingers = 0
leftover = length
return (fingers, leftover)
def fingerLength(self, angle: float) -> tuple[(float, float)]:
if ((angle >= 90) or (angle <= (- 90))):
return ((self.settings.thickness + self.settings.extra_length), 0.0)
if (angle < 0):
return (((math.sin(math.radians((- angle))) * self.settings.thickness) + self.settings.extra_length), 0)
a = (90 - ((180 - angle) / 2.0))
fingerlength = (self.settings.thickness * math.tan(math.radians(a)))
b = (90 - (2 * a))
spacerecess = ((- math.sin(math.radians(b))) * fingerlength)
return ((fingerlength + self.settings.extra_length), spacerecess) |
()
('type_', metavar='TYPE', type=click.Choice(list(faucet_apis_registry.supported_ids)), required=True)
('url', metavar='URL', type=str, required=False, default=None)
_option()
('--sync', is_flag=True, help='For waiting till the faucet has released the funds.')
_context
_aea_project
def generate_wealth(click_context: click.Context, type_: str, url: str, password: Optional[str], sync: bool) -> None:
ctx = cast(Context, click_context.obj)
_try_generate_wealth(ctx, type_, url, sync, password) |
class OptionPlotoptionsStreamgraphTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsStreamgraphTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsStreamgraphTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class OptionYaxisDatetimelabelformats(Options):
def day(self) -> 'OptionYaxisDatetimelabelformatsDay':
return self._config_sub_data('day', OptionYaxisDatetimelabelformatsDay)
def hour(self) -> 'OptionYaxisDatetimelabelformatsHour':
return self._config_sub_data('hour', OptionYaxisDatetimelabelformatsHour)
def millisecond(self) -> 'OptionYaxisDatetimelabelformatsMillisecond':
return self._config_sub_data('millisecond', OptionYaxisDatetimelabelformatsMillisecond)
def minute(self) -> 'OptionYaxisDatetimelabelformatsMinute':
return self._config_sub_data('minute', OptionYaxisDatetimelabelformatsMinute)
def month(self) -> 'OptionYaxisDatetimelabelformatsMonth':
return self._config_sub_data('month', OptionYaxisDatetimelabelformatsMonth)
def second(self) -> 'OptionYaxisDatetimelabelformatsSecond':
return self._config_sub_data('second', OptionYaxisDatetimelabelformatsSecond)
def week(self) -> 'OptionYaxisDatetimelabelformatsWeek':
return self._config_sub_data('week', OptionYaxisDatetimelabelformatsWeek)
def year(self) -> 'OptionYaxisDatetimelabelformatsYear':
return self._config_sub_data('year', OptionYaxisDatetimelabelformatsYear) |
class OptionSeriesSankeySonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_s3
def test_download_log_files_and_skip_existing_files():
with tempfile.TemporaryDirectory() as dirpath:
given_a_bucket('some-bucket')
given_an_object('some-bucket', TEST_LOG_KEY, 'some-file-content')
given_an_object('some-bucket', TEST_LOG_KEY_EXISTING, 'some-file-content')
given_a_file(dirpath, TEST_LOG_KEY_EXISTING, 'some-content-already-existing')
runner = CliRunner()
result = runner.invoke(cli.root_group, args=['download', '--log-dir', dirpath, '--bucket', 'some-bucket', '--region', 'some-region-1', '--account-id', '000', '--prefix', 'some-prefix/', '--from', '2017-01-01', '--to', '2017-01-01'])
assert (result.exit_code == 0)
assert (file_content(dirpath, TEST_LOG_KEY) == 'some-file-content')
assert (file_content(dirpath, TEST_LOG_KEY_EXISTING) == 'some-content-already-existing') |
class OptionTheme(Options):
def mode(self):
return self._config_get()
def mode(self, value):
self._config(value)
def palette(self):
return self._config_get()
def palette(self, value):
self._config(value)
def monochrome(self) -> OptionMonochrome:
return self._config_sub_data('monochrome', OptionMonochrome) |
def test_pwm_phase(pwm: PWMGenerator, la: LogicAnalyzer):
frequency = 10000.0
duty_cycle = 0.5
phase = 0.25
pwm.generate(['SQ1', 'SQ2'], frequency, duty_cycle, phase)
time.sleep(0.1)
interval = la.measure_interval(['LA1', 'LA2'], ['rising', 'rising'])
if (interval < 0):
interval += ((frequency ** (- 1)) / MICROSECONDS)
assert ((interval * MICROSECONDS) == pytest.approx(((frequency ** (- 1)) * phase), rel=RELTOL)) |
.parametrize('media_type', ['nope/json'])
def test_unknown_media_type(asgi, media_type):
client = _create_client_invalid_media(asgi, errors.HTTPUnsupportedMediaType)
headers = {'Content-Type': media_type}
assert (client.simulate_post('/', body=b'something', headers=headers).status_code == 200)
title_msg = '415 Unsupported Media Type'
description_msg = '{} is an unsupported media type.'.format(media_type)
assert (client.resource.captured_error.value.title == title_msg)
assert (client.resource.captured_error.value.description == description_msg) |
class Solution():
def match(self, w1, w2):
return sum(((i == j) for (i, j) in zip(w1, w2)))
def findSecretWord(self, wordlist, master):
n = 0
while (n < 6):
count = collections.Counter((w1 for (w1, w2) in itertools.permutations(wordlist, 2) if (self.match(w1, w2) == 0)))
guess = min(wordlist, key=(lambda w: count[w]))
n = master.guess(guess)
wordlist = [w for w in wordlist if (self.match(w, guess) == n)] |
class EnumTests(unittest.TestCase):
def test_empty(self):
src = 'enum foo {\n};\n'
ast = parser.parse(src)
self.assertEqual(ast, [['enum', 'foo', [], []]])
def test_one(self):
src = 'enum foo {\n BAR = 1\n};\n'
ast = parser.parse(src)
self.assertEqual(ast, [['enum', 'foo', [], [['BAR', [], 1]]]])
def test_params(self):
src = 'enum foo(wire_type=uint32, bitmask=False, complete=False) {\n BAR = 1\n};\n'
ast = parser.parse(src)
self.assertEqual(ast, [['enum', 'foo', [['wire_type', 'uint32'], ['bitmask', 'False'], ['complete', 'False']], [['BAR', [], 1]]]])
def test_multiple(self):
src = 'enum foo {\n OFP_A = 1,\n OFP_B = 2,\n OFP_C = 3\n};\n'
ast = parser.parse(src)
self.assertEqual(ast, [['enum', 'foo', [], [['OFP_A', [], 1], ['OFP_B', [], 2], ['OFP_C', [], 3]]]])
def test_trailing_comma(self):
src = 'enum foo {\n OFP_A = 1,\n OFP_B = 2,\n OFP_C = 3,\n};\n'
ast = parser.parse(src)
self.assertEqual(ast, [['enum', 'foo', [], [['OFP_A', [], 1], ['OFP_B', [], 2], ['OFP_C', [], 3]]]]) |
def _get_deck(new_user_params: ExecutionParameters, ignore_jupyter: bool=False) -> typing.Union[(str, 'IPython.core.display.HTML')]:
deck_map = {deck.name: deck.html for deck in new_user_params.decks}
raw_html = get_deck_template().render(metadata=deck_map)
if ((not ignore_jupyter) and ipython_check()):
try:
from IPython.core.display import HTML
except ImportError:
...
return HTML(raw_html)
return raw_html |
class AuthenticationLog(SimpleEntity):
__auto_name__ = True
__tablename__ = 'AuthenticationLogs'
__mapper_args__ = {'polymorphic_identity': 'AuthenticationLog'}
log_id = Column('id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True)
user_id = Column('uid', Integer, ForeignKey('Users.id'), nullable=False)
user = relationship('User', primaryjoin='AuthenticationLogs.c.uid==Users.c.id', uselist=False, back_populates='authentication_logs', doc='The :class:`.User` instance that this AuthenticationLog is created for')
action = Column('action', Enum(LOGIN, LOGOUT, name='ActionNames'), nullable=False)
date = Column(GenericDateTime, nullable=False)
def __init__(self, user=None, date=None, action=LOGIN, **kwargs):
super(AuthenticationLog, self).__init__(**kwargs)
self.user = user
self.date = date
self.action = action
def __lt__(self, other):
return (('%s %s %s' % (self.date, self.action, self.user.name)), ('%s %s %s' % (other.date, other.action, other.user.name)))
('user')
def __validate_user__(self, key, user):
if (not isinstance(user, User)):
raise TypeError(('%s.user should be a User instance, not %s' % (self.__class__.__name__, user.__class__.__name__)))
return user
('action')
def __validate_action__(self, key, action):
if (action is None):
import copy
action = copy.copy(LOGIN)
if (action not in [LOGIN, LOGOUT]):
raise ValueError(('%s.action should be one of "login" or "logout", not "%s"' % (self.__class__.__name__, action)))
return action
('date')
def __validate_date__(self, key, date):
if (date is None):
date = datetime.datetime.now(pytz.utc)
if (not isinstance(date, datetime.datetime)):
raise TypeError(('%s.date should be a "datetime.datetime" instance, not %s' % (self.__class__.__name__, date.__class__.__name__)))
return date |
class Signal(BaseSignal[VT]):
async def send(self, value: VT=None, *, key: Any=None, force: bool=False) -> None:
current_test = current_test_stack.top
if (current_test is None):
if (not force):
return
assert key
else:
key = (key if (key is not None) else current_test.id)
(await self.case.app.bus.send(key=key, value=SignalEvent(signal_name=self.name, case_name=self.case.name, key=key, value=value)))
async def wait(self, *, key: Any=None, timeout: Optional[Seconds]=None) -> VT:
runner = self.case.current_execution
if (runner is None):
raise RuntimeError('No test executing.')
test = runner.test
assert test
k: Any = (test.id if (key is None) else key)
timeout_s = want_seconds(timeout)
(await runner.on_signal_wait(self, timeout=timeout_s))
time_start = monotonic()
event = (await self._wait_for_message_by_key(key=k, timeout=timeout_s))
time_end = monotonic()
(await runner.on_signal_received(self, time_start=time_start, time_end=time_end))
self._verify_event(event, k, self.name, self.case.name)
return cast(VT, maybe_model(event.value))
def _verify_event(self, ev: SignalEvent, key: Any, name: str, case: str) -> None:
assert (ev.key == key), f'{ev.key!r} == {key!r}'
assert (ev.signal_name == name), f'{ev.signal_name!r} == {name!r}'
assert (ev.case_name == case), f'{ev.case_name!r} == {case!r}'
async def _wait_for_message_by_key(self, key: Any, *, timeout: Optional[float]=None, max_interval: float=2.0) -> SignalEvent:
app = self.case.app
time_start = monotonic()
remaining = timeout
try:
return self._get_current_value(key)
except KeyError:
pass
while (not app.should_stop):
if (remaining is not None):
remaining = (remaining - (monotonic() - time_start))
try:
if ((remaining is not None) and (remaining <= 0.0)):
try:
return self._get_current_value(key)
except KeyError:
raise asyncio.TimeoutError() from None
max_wait = None
if (remaining is not None):
max_wait = min(remaining, max_interval)
(await self._wait_for_resolved(timeout=max_wait))
except asyncio.TimeoutError:
msg = f'Timed out waiting for signal {self.name} ({timeout})'
raise LiveCheckTestTimeout(msg) from None
if app.should_stop:
break
try:
val = self._get_current_value(key)
return val
except KeyError:
pass
raise asyncio.CancelledError() |
class TabularEditor(Editor):
update = Event()
refresh = Event()
selected = Any()
multi_selected = List()
selected_row = Int((- 1))
multi_selected_rows = List(Int)
activated = Any()
activated_row = Int()
clicked = Instance('TabularEditorEvent')
dclicked = Instance('TabularEditorEvent')
right_clicked = Instance('TabularEditorEvent')
right_dclicked = Instance('TabularEditorEvent')
column_clicked = Instance('TabularEditorEvent')
scrollable = True
row = Any()
edit = Bool(False)
adapter = Instance(TabularAdapter)
images = Dict()
image_resources = Dict()
image = Image
_update_visible = Bool(False)
def init(self, parent):
factory = self.factory
self.adapter = factory.adapter
style = ((wx.LC_REPORT | wx.LC_VIRTUAL) | wx.BORDER_NONE)
if factory.editable_labels:
style |= wx.LC_EDIT_LABELS
if factory.horizontal_lines:
style |= wx.LC_HRULES
if factory.vertical_lines:
style |= wx.LC_VRULES
if (not factory.multi_select):
style |= wx.LC_SINGLE_SEL
if (not factory.show_titles):
style |= wx.LC_NO_HEADER
self.control = control = wxListCtrl(parent, (- 1), style=style, can_edit=factory.editable, edit_labels=factory.editable_labels)
control._editor = self
id = control.GetId()
parent.Bind(wx.EVT_LIST_BEGIN_DRAG, self._begin_drag, id=id)
parent.Bind(wx.EVT_LIST_BEGIN_LABEL_EDIT, self._begin_label_edit, id=id)
parent.Bind(wx.EVT_LIST_END_LABEL_EDIT, self._end_label_edit, id=id)
parent.Bind(wx.EVT_LIST_ITEM_SELECTED, self._item_selected, id=id)
parent.Bind(wx.EVT_LIST_ITEM_DESELECTED, self._item_selected, id=id)
parent.Bind(wx.EVT_LIST_KEY_DOWN, self._key_down, id=id)
parent.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self._item_activated, id=id)
parent.Bind(wx.EVT_LIST_COL_END_DRAG, self._size_modified, id=id)
parent.Bind(wx.EVT_LIST_COL_RIGHT_CLICK, self._column_right_clicked, id=id)
parent.Bind(wx.EVT_LIST_COL_CLICK, self._column_clicked, id=id)
control.Bind(wx.EVT_LEFT_DOWN, self._left_down)
control.Bind(wx.EVT_LEFT_DCLICK, self._left_dclick)
control.Bind(wx.EVT_RIGHT_DOWN, self._right_down)
control.Bind(wx.EVT_RIGHT_DCLICK, self._right_dclick)
control.Bind(wx.EVT_MOTION, self._motion)
control.Bind(wx.EVT_SIZE, self._size_modified)
if (PythonDropTarget is not None):
control.SetDropTarget(PythonDropTarget(self))
if factory.multi_select:
self.sync_value(factory.selected, 'multi_selected', 'both', is_list=True)
self.sync_value(factory.selected_row, 'multi_selected_rows', 'both', is_list=True)
else:
self.sync_value(factory.selected, 'selected', 'both')
self.sync_value(factory.selected_row, 'selected_row', 'both')
self.sync_value(factory.update, 'update', 'from', is_event=True)
self.sync_value(factory.refresh, 'refresh', 'from', is_event=True)
self.sync_value(factory.activated, 'activated', 'to')
self.sync_value(factory.activated_row, 'activated_row', 'to')
self.sync_value(factory.clicked, 'clicked', 'to')
self.sync_value(factory.dclicked, 'dclicked', 'to')
self.sync_value(factory.right_clicked, 'right_clicked', 'to')
self.sync_value(factory.right_dclicked, 'right_dclicked', 'to')
self.sync_value(factory.column_clicked, 'column_clicked', 'to')
try:
self.context_object.on_trait_change(self.update_editor, (self.extended_name + '_items'), dispatch='ui')
except:
pass
if factory.auto_update:
self.context_object.on_trait_change(self.refresh_editor, (self.extended_name + '.-'), dispatch='ui')
for image_resource in factory.images:
self._add_image(image_resource)
self.on_trait_change(self._refresh, 'adapter.+update', dispatch='ui')
self.on_trait_change(self._rebuild_all, 'adapter.columns', dispatch='ui')
self._rebuild()
self.set_tooltip()
def dispose(self):
control = self.control
parent = control.GetParent()
id = control.GetId()
parent.Bind(wx.EVT_LIST_BEGIN_DRAG, None, id=id)
parent.Bind(wx.EVT_LIST_BEGIN_LABEL_EDIT, None, id=id)
parent.Bind(wx.EVT_LIST_END_LABEL_EDIT, None, id=id)
parent.Bind(wx.EVT_LIST_ITEM_SELECTED, None, id=id)
parent.Bind(wx.EVT_LIST_ITEM_DESELECTED, None, id=id)
parent.Bind(wx.EVT_LIST_KEY_DOWN, None, id=id)
parent.Bind(wx.EVT_LIST_ITEM_ACTIVATED, None, id=id)
parent.Bind(wx.EVT_LIST_COL_END_DRAG, None, id=id)
parent.Bind(wx.EVT_LIST_COL_RIGHT_CLICK, None, id=id)
parent.Bind(wx.EVT_LIST_COL_CLICK, None, id=id)
control.Unbind(wx.EVT_LEFT_DOWN)
control.Unbind(wx.EVT_LEFT_DCLICK)
control.Unbind(wx.EVT_RIGHT_DOWN)
control.Unbind(wx.EVT_RIGHT_DCLICK)
control.Unbind(wx.EVT_MOTION)
control.Unbind(wx.EVT_SIZE)
self.context_object.on_trait_change(self.update_editor, (self.extended_name + '_items'), remove=True)
if self.factory.auto_update:
self.context_object.on_trait_change(self.refresh_editor, (self.extended_name + '.-'), remove=True)
self.on_trait_change(self._refresh, 'adapter.+update', remove=True)
self.on_trait_change(self._rebuild_all, 'adapter.columns', remove=True)
super().dispose()
def _update_changed(self, event):
if (event is True):
self.update_editor()
elif isinstance(event, int):
self._refresh_row(event)
else:
self._refresh_editor(event)
def refresh_editor(self, item, name, old, new):
self._refresh_editor(item)
def _refresh_editor(self, item):
adapter = self.adapter
(object, name) = (self.object, self.name)
agi = adapter.get_item
for row in range(adapter.len(object, name)):
if (item is agi(object, name, row)):
self._refresh_row(row)
return
self.update_editor()
def _refresh_row(self, row):
self.control.RefreshRect(self.control.GetItemRect(row, wx.LIST_RECT_BOUNDS))
def _update_editor(self, object, name, old_value, new_value):
self._update_visible = True
super()._update_editor(object, name, old_value, new_value)
def update_editor(self):
control = self.control
n = self.adapter.len(self.object, self.name)
top = control.GetTopItem()
pn = control.GetCountPerPage()
bottom = min(((top + pn) - 1), n)
control.SetItemCount(n)
if self._update_visible:
control.RefreshItems(0, (n - 1))
self._update_visible = False
if (len(self.multi_selected_rows) > 0):
self._multi_selected_rows_changed(self.multi_selected_rows)
if (len(self.multi_selected) > 0):
self._multi_selected_changed(self.multi_selected)
(edit, self.edit) = (self.edit, False)
(row, self.row) = (self.row, None)
if (row is not None):
if (row >= n):
row -= 1
if (row < 0):
row = None
if (row is None):
visible = bottom
if ((visible >= 0) and (visible < control.GetItemCount())):
control.ScrollHint.EnsureVisible(visible)
return
if (0 <= (row - top) < pn):
control.ScrollHint.EnsureVisible(min(((top + pn) - 2), (control.GetItemCount() - 1)))
elif (row < top):
control.ScrollHint.EnsureVisible(min(((row + pn) - 1), (control.GetItemCount() - 1)))
else:
control.ScrollHint.EnsureVisible(row)
control.SetItemState(row, (wx.LIST_STATE_SELECTED | wx.LIST_STATE_FOCUSED), (wx.LIST_STATE_SELECTED | wx.LIST_STATE_FOCUSED))
if edit:
control.EditLabel(row)
def _selected_changed(self, selected):
if (not self._no_update):
if (selected is None):
for row in self._get_selected():
self.control.SetItemState(row, 0, wx.LIST_STATE_SELECTED)
else:
try:
self.control.SetItemState(self.value.index(selected), wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
except:
pass
def _selected_row_changed(self, old, new):
if (not self._no_update):
if (new < 0):
if (old >= 0):
self.control.SetItemState(old, 0, wx.LIST_STATE_SELECTED)
else:
self.control.SetItemState(new, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
def _multi_selected_changed(self, selected):
if (not self._no_update):
values = self.value
try:
self._multi_selected_rows_changed([values.index(item) for item in selected])
except:
pass
def _multi_selected_items_changed(self, event):
values = self.value
try:
self._multi_selected_rows_items_changed(TraitListEvent(index=0, removed=[values.index(item) for item in event.removed], added=[values.index(item) for item in event.added]))
except:
pass
def _multi_selected_rows_changed(self, selected_rows):
if (not self._no_update):
control = self.control
selected = self._get_selected()
for row in selected_rows:
if (row in selected):
selected.remove(row)
else:
control.SetItemState(row, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
for row in selected:
control.SetItemState(row, 0, wx.LIST_STATE_SELECTED)
def _multi_selected_rows_items_changed(self, event):
control = self.control
for row in event.removed:
control.SetItemState(row, 0, wx.LIST_STATE_SELECTED)
for row in event.added:
control.SetItemState(row, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
def _refresh_changed(self):
self.update_editor()
def _left_down(self, event):
self._mouse_click(event, 'clicked')
def _left_dclick(self, event):
self._mouse_click(event, 'dclicked')
def _right_down(self, event):
self._mouse_click(event, 'right_clicked')
def _right_dclick(self, event):
self._mouse_click(event, 'right_dclicked')
def _begin_drag(self, event):
if (PythonDropSource is not None):
adapter = self.adapter
(object, name) = (self.object, self.name)
selected = self._get_selected()
drag_items = []
for row in selected:
drag = adapter.get_drag(object, name, row)
if (drag is None):
return
drag_items.append(drag)
self._drag_rows = selected
try:
if (len(drag_items) == 1):
drag_items = drag_items[0]
ds = PythonDropSource(self.control, drag_items)
if ((ds.result == wx.DragMove) and (self._drag_local or self.factory.drag_move)):
rows = self._drag_rows
rows.reverse()
for row in rows:
adapter.delete(object, name, row)
finally:
self._drag_rows = None
self._drag_local = False
def _begin_label_edit(self, event):
if (not self.adapter.get_can_edit(self.object, self.name, event.GetIndex())):
event.Veto()
def _end_label_edit(self, event):
self.adapter.set_text(self.object, self.name, event.GetIndex(), event.GetColumn(), event.GetText())
self.row = (event.GetIndex() + 1)
def _item_selected(self, event):
self._no_update = True
try:
get_item = self.adapter.get_item
(object, name) = (self.object, self.name)
selected_rows = self._get_selected()
if self.factory.multi_select:
self.multi_selected_rows = selected_rows
self.multi_selected = [get_item(object, name, row) for row in selected_rows]
elif (len(selected_rows) == 0):
self.selected_row = (- 1)
self.selected = None
else:
self.selected_row = selected_rows[0]
self.selected = get_item(object, name, selected_rows[0])
finally:
self._no_update = False
def _item_activated(self, event):
self.activated_row = event.GetIndex()
self.activated = self.adapter.get_item(self.object, self.name, self.activated_row)
def _key_down(self, event):
key = event.GetKeyCode()
if (key == wx.WXK_PAGEDOWN):
self._append_new()
elif (key in (wx.WXK_BACK, wx.WXK_DELETE)):
self._delete_current()
elif (key == wx.WXK_INSERT):
self._insert_current()
elif (key == wx.WXK_LEFT):
self._move_up_current()
elif (key == wx.WXK_RIGHT):
self._move_down_current()
elif (key in (wx.WXK_RETURN, wx.WXK_ESCAPE)):
self._edit_current()
else:
event.Skip()
def _column_right_clicked(self, event):
column = event.GetColumn()
if ((self._cached_widths is not None) and (0 <= column < len(self._cached_widths))):
self._cached_widths[column] = None
self._size_modified(event)
def _column_clicked(self, event):
editor_event = TabularEditorEvent(editor=self, row=0, column=event.GetColumn())
setattr(self, 'column_clicked', editor_event)
event.Skip()
def _size_modified(self, event):
control = self.control
n = control.GetColumnCount()
if (n == 1):
(dx, dy) = control.GetClientSize()
control.SetColumnWidth(0, (dx - 1))
elif (n > 1):
do_later(self._set_column_widths)
event.Skip()
def _motion(self, event):
x = event.GetX()
column = self._get_column(x)
(row, flags) = self.control.HitTest(wx.Point(x, event.GetY()))
if ((row != self._last_row) or (column != self._last_column)):
(self._last_row, self._last_column) = (row, column)
if ((row == (- 1)) or (column is None)):
tooltip = ''
else:
tooltip = self.adapter.get_tooltip(self.object, self.name, row, column)
if (tooltip != self._last_tooltip):
self._last_tooltip = tooltip
wx.ToolTip.Enable(False)
wx.ToolTip.Enable(True)
self.control.SetToolTip(wx.ToolTip(tooltip))
def wx_dropped_on(self, x, y, data, drag_result):
(row, flags) = self.control.HitTest(wx.Point(x, y))
if ((row == (- 1)) and ((flags & wx.LIST_HITTEST_NOWHERE) != 0) and (self.control.GetItemCount() == 0)):
row = 0
if (row != (- 1)):
if (not isinstance(data, list)):
self._wx_dropped_on(row, data)
else:
data.reverse()
for item in data:
self._wx_dropped_on(row, item)
if (self._drag_indices is not None):
self._drag_local = True
return drag_result
return wx.DragNone
def _wx_dropped_on(self, row, item):
adapter = self.adapter
(object, name) = (self.object, self.name)
destination = adapter.get_dropped(object, name, row, item)
if (destination == 'after'):
row += 1
adapter.insert(object, name, row, item)
rows = self._drag_rows
if (rows is not None):
for i in range((len(rows) - 1), (- 1), (- 1)):
if (rows[i] < row):
break
rows[i] += 1
def wx_drag_over(self, x, y, data, drag_result):
if isinstance(data, list):
rc = wx.DragNone
for item in data:
rc = self.wx_drag_over(x, y, item, drag_result)
if (rc == wx.DragNone):
break
return rc
(row, flags) = self.control.HitTest(wx.Point(x, y))
if ((row == (- 1)) and ((flags & wx.LIST_HITTEST_NOWHERE) != 0) and (self.control.GetItemCount() == 0)):
row = 0
if ((row != (- 1)) and self.adapter.get_can_drop(self.object, self.name, row, data)):
return drag_result
return wx.DragNone
def restore_prefs(self, prefs):
self._cached_widths = cws = prefs.get('cached_widths')
if (cws is not None):
set_column_width = self.control.SetColumnWidth
for (i, width) in enumerate(cws):
if (width is not None):
set_column_width(i, width)
def save_prefs(self):
cws = self._cached_widths
if (cws is not None):
cws = [(cw if ((cw is not None) and (cw >= 0)) else None) for cw in cws]
return {'cached_widths': cws}
def _refresh(self):
n = self.adapter.len(self.object, self.name)
if (n > 0):
self.control.RefreshItems(0, (n - 1))
def _rebuild(self):
control = self.control
control.ClearAll()
(adapter, object, name) = (self.adapter, self.object, self.name)
(adapter.object, adapter.name) = (object, name)
get_alignment = adapter.get_alignment
get_width = adapter.get_width
for (i, label) in enumerate(adapter.label_map):
control.InsertColumn(i, label, alignment_map.get(get_alignment(object, name, i), wx.LIST_FORMAT_LEFT))
self._set_column_widths()
def _rebuild_all(self):
self._rebuild()
self.update_editor()
def _set_column_widths(self):
control = self.control
if (control is None):
return
(object, name) = (self.object, self.name)
(dx, dy) = control.GetClientSize()
if is_mac:
dx -= scrollbar_dx
n = control.GetColumnCount()
get_width = self.adapter.get_width
pdx = 0
wdx = 0.0
widths = []
cached = self._cached_widths
current = [control.GetColumnWidth(i) for i in range(n)]
if ((cached is None) or (len(cached) != n)):
self._cached_widths = cached = ([None] * n)
for i in range(n):
cw = cached[i]
if ((cw is None) or ((- cw) == current[i])):
width = float(get_width(object, name, i))
if (width <= 0.0):
width = 0.1
if (width <= 1.0):
wdx += width
cached[i] = (- 1)
else:
width = int(width)
pdx += width
if (cw is None):
cached[i] = width
else:
cached[i] = width = current[i]
pdx += width
widths.append(width)
adx = max(0, (dx - pdx))
control.Freeze()
for i in range(n):
width = cached[i]
if (width < 0):
width = widths[i]
if (width <= 1.0):
widths[i] = w = max(30, int(round(((adx * width) / wdx))))
wdx -= width
width = w
adx -= width
cached[i] = (- w)
control.SetColumnWidth(i, width)
control.Thaw()
def _add_image(self, image_resource):
bitmap = image_resource.create_image().ConvertToBitmap()
image_list = self._image_list
if (image_list is None):
self._image_list = image_list = wx.ImageList(bitmap.GetWidth(), bitmap.GetHeight())
self.control.AssignImageList(image_list, wx.IMAGE_LIST_SMALL)
self.image_resources[image_resource] = self.images[image_resource.name] = row = image_list.Add(bitmap)
return row
def _get_image(self, image):
if isinstance(image, str):
self.image = image
image = self.image
if isinstance(image, ImageResource):
result = self.image_resources.get(image)
if (result is not None):
return result
return self._add_image(image)
return self.images.get(image)
def _get_selected(self):
selected = []
item = (- 1)
control = self.control
if (len(self.value) == 0):
return selected
while True:
item = control.GetNextItem(item, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
if (item == (- 1)):
break
selected.append(item)
return selected
def _append_new(self):
if ('append' in self.factory.operations):
adapter = self.adapter
self.row = self.control.GetItemCount()
self.edit = True
adapter.insert(self.object, self.name, self.row, adapter.get_default_value(self.object, self.name))
def _insert_current(self):
if ('insert' in self.factory.operations):
selected = self._get_selected()
if (len(selected) == 1):
adapter = self.adapter
adapter.insert(self.object, self.name, selected[0], adapter.get_default_value(self.object, self.name))
self.row = selected[0]
self.edit = True
def _delete_current(self):
if ('delete' in self.factory.operations):
selected = self._get_selected()
if (len(selected) == 0):
return
delete = self.adapter.delete
selected.reverse()
for row in selected:
delete(self.object, self.name, row)
n = self.adapter.len(self.object, self.name)
if (not self.factory.multi_select):
self.selected_row = self.row = ((n - 1) if (row >= n) else row)
else:
self.multi_selected = []
self.multi_selected_rows = []
def _move_up_current(self):
if ('move' in self.factory.operations):
selected = self._get_selected()
if (len(selected) == 1):
row = selected[0]
if (row > 0):
adapter = self.adapter
(object, name) = (self.object, self.name)
item = adapter.get_item(object, name, row)
adapter.delete(object, name, row)
adapter.insert(object, name, (row - 1), item)
self.row = (row - 1)
def _move_down_current(self):
if ('move' in self.factory.operations):
selected = self._get_selected()
if (len(selected) == 1):
row = selected[0]
if (row < (self.control.GetItemCount() - 1)):
adapter = self.adapter
(object, name) = (self.object, self.name)
item = adapter.get_item(object, name, row)
adapter.delete(object, name, row)
adapter.insert(object, name, (row + 1), item)
self.row = (row + 1)
def _edit_current(self):
if (('edit' in self.factory.operations) and self.factory.editable_labels):
selected = self._get_selected()
if (len(selected) == 1):
self.control.EditLabel(selected[0])
def _get_column(self, x, translate=False):
if (x >= 0):
control = self.control
for i in range(control.GetColumnCount()):
x -= control.GetColumnWidth(i)
if (x < 0):
if translate:
return self.adapter.get_column(self.object, self.name, i)
return i
return None
def _mouse_click(self, event, trait):
x = event.GetX()
(row, flags) = self.control.HitTest(wx.Point(x, event.GetY()))
if (row == wx.NOT_FOUND):
if self.factory.multi_select:
self.multi_selected = []
self.multi_selected_rows = []
else:
self.selected = None
self.selected_row = (- 1)
else:
if (self.factory.multi_select and event.ShiftDown()):
do_later(self._item_selected, None)
setattr(self, trait, TabularEditorEvent(editor=self, row=row, column=self._get_column(x, translate=True)))
event.Skip(True) |
def render_generate_module_repo_file(copr, name_release, module_nsv):
module = ModulesLogic.get_by_nsv_str(copr, module_nsv).one()
mock_chroot = coprs_logic.MockChrootsLogic.get_from_name(name_release, noarch=True).first()
url = os.path.join(copr.main_dir.repo_url, '')
repo_url = generate_repo_url(mock_chroot, copr.modules_url)
baseurl = '{}+{}/latest/$basearch'.format(repo_url.rstrip('/'), module_nsv)
pubkey_url = urljoin(url, 'pubkey.gpg')
response = flask.make_response(flask.render_template('coprs/copr-modules.cfg', copr=copr, module=module, baseurl=baseurl, pubkey_url=pubkey_url))
response.mimetype = 'text/plain'
response.headers['Content-Disposition'] = 'filename={0}.cfg'.format(copr.repo_name)
return response |
def _new_game(n_players: int, small_blind: int=50, big_blind: int=100, initial_chips: int=10000) -> Tuple[(ShortDeckPokerState, Pot)]:
pot = Pot()
players = [ShortDeckPokerPlayer(player_i=player_i, pot=pot, initial_chips=initial_chips) for player_i in range(n_players)]
state = ShortDeckPokerState(players=players, load_card_lut=False, small_blind=small_blind, big_blind=big_blind)
return (state, pot) |
('/adconfig')
def handle_adconfig(self):
global TXBuffer, navMenuIndex
TXBuffer = ''
navMenuIndex = 2
if rpieGlobals.wifiSetup:
return self.redirect('/setup')
if (not isLoggedIn(self.get, self.cookie)):
return self.redirect('/login')
sendHeadandTail('TmplStd', _HEAD)
if (self.type == 'GET'):
responsearr = self.get
else:
responsearr = self.post
try:
import lib.web_adconfig as ADConfig
ADConfig.handle_adconfig(responsearr)
except Exception as e:
print('Adconfig error', e)
sendHeadandTail('TmplStd', _TAIL)
return TXBuffer |
def fasta_iterator(input_file):
with myopen(input_file) as f:
sequence = []
name = ''
begun = False
for line in f:
line = line.strip()
if line.startswith('>'):
if begun:
(yield Fasta(name, ''.join(sequence)))
name = line[1:]
sequence = ''
begun = True
else:
sequence += line
if (name != ''):
(yield Fasta(name, ''.join(sequence))) |
class OptionPlotoptionsScatter3dSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class LocalExecutedConditionalSection(ConditionalSection):
def __init__(self, name: str):
self._selected_case: Optional[Case] = None
super().__init__(name=name)
def start_branch(self, c: Case, last_case: bool=False) -> Case:
added_case = super().start_branch(c, last_case)
ctx = FlyteContextManager.current_context()
if (self._selected_case is None):
if ((c.expr is None) or c.expr.eval() or last_case):
ctx.execution_state.take_branch()
self._selected_case = added_case
return added_case
def end_branch(self) -> Union[(Condition, Promise)]:
ctx = FlyteContextManager.current_context()
ctx.execution_state.branch_complete()
if (self._last_case and self._selected_case):
FlyteContextManager.pop_context()
if ((self._selected_case.output_promise is None) and (self._selected_case.err is None)):
raise AssertionError('Bad conditional statements, did not resolve in a promise')
elif (self._selected_case.output_promise is not None):
return typing.cast(Promise, self._compute_outputs(self._selected_case.output_promise))
raise ValueError(self._selected_case.err)
return self._condition
def _compute_outputs(self, selected_output_promise) -> Optional[Union[(Tuple[Promise], Promise, VoidPromise)]]:
curr = self.compute_output_vars()
if (curr is None):
return VoidPromise(self.name)
if (not isinstance(selected_output_promise, tuple)):
selected_output_promise = (selected_output_promise,)
promises = [Promise(var=x, val=v.val) for (x, v) in zip(curr, selected_output_promise)]
return create_task_output(promises) |
_default
class ImageAttachment(Attachment):
original_extension = attr.ib(None, type=Optional[str])
width = attr.ib(None, converter=_util.int_or_none, type=Optional[int])
height = attr.ib(None, converter=_util.int_or_none, type=Optional[int])
is_animated = attr.ib(None, type=Optional[bool])
previews = attr.ib(factory=set, type=Set[Image])
def _from_graphql(cls, data):
previews = {Image._from_uri_or_none(data.get('thumbnail')), Image._from_uri_or_none((data.get('preview') or data.get('preview_image'))), Image._from_uri_or_none(data.get('large_preview')), Image._from_uri_or_none(data.get('animated_image'))}
return cls(original_extension=(data.get('original_extension') or (data['filename'].split('-')[0] if data.get('filename') else None)), width=data.get('original_dimensions', {}).get('width'), height=data.get('original_dimensions', {}).get('height'), is_animated=(data['__typename'] == 'MessageAnimatedImage'), previews={p for p in previews if p}, id=data.get('legacy_attachment_id'))
def _from_list(cls, data):
previews = {Image._from_uri_or_none(data['image']), Image._from_uri(data['image1']), Image._from_uri(data['image2'])}
return cls(width=data['original_dimensions'].get('x'), height=data['original_dimensions'].get('y'), previews={p for p in previews if p}, id=data['legacy_attachment_id']) |
def infer_conv_output_shape(input_shape: List[int], kernel_size: int, stride: int, padding: int) -> List[int]:
output_shape = []
for input_length in input_shape:
output_length = ((((input_length - kernel_size) + (2 * padding)) / stride) + 1)
if (not output_length.is_integer()):
raise ValueError(f'Stride {stride} is not compatible with input shape {input_shape}, kernel size {kernel_size} and padding {padding}!')
output_shape.append(int(output_length))
return output_shape |
class OptionSeriesNetworkgraphSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class BMGNodesTest(unittest.TestCase):
def test_RealNode(self) -> None:
r42 = RealNode(42.0)
self.assertEqual(r42.value, 42.0)
self.assertEqual(size(r42), torch.Size([]))
self.assertEqual(support(r42), 'tensor(42.)')
def test_MultiplicationNode(self) -> None:
r2 = RealNode(2.0)
r3 = RealNode(3.0)
rx = MultiplicationNode([r2, r3])
self.assertEqual(size(rx), torch.Size([]))
self.assertEqual(support(rx), 'tensor(6.)')
def test_ConstantTensorNode_1d(self) -> None:
v42 = torch.tensor([42, 43])
t42 = ConstantTensorNode(v42)
self.assertEqual(t42.value[0], v42[0])
self.assertEqual(t42.value[1], v42[1])
self.assertEqual(v42.size(), torch.Size([2]))
self.assertEqual(size(t42), v42.size())
self.assertEqual(support(t42), 'tensor([42, 43])')
def test_ConstantTensorNode_2d(self) -> None:
v42 = torch.tensor([[42, 43], [44, 45]])
t42 = ConstantTensorNode(v42)
self.assertEqual(t42.value[(0, 0)], v42[(0, 0)])
self.assertEqual(t42.value[(1, 0)], v42[(1, 0)])
self.assertEqual(v42.size(), torch.Size([2, 2]))
self.assertEqual(size(t42), v42.size())
expected = '\ntensor([[42, 43],\n [44, 45]])'
self.assertEqual(support(t42).strip(), expected.strip())
def test_ConstantRealMatrixNode_2d(self) -> None:
v42 = torch.tensor([[42, 43], [44, 45]])
t42 = ConstantRealMatrixNode(v42)
self.assertEqual(t42.value[(0, 0)], v42[(0, 0)])
self.assertEqual(t42.value[(1, 0)], v42[(1, 0)])
self.assertEqual(v42.size(), torch.Size([2, 2]))
self.assertEqual(size(t42), v42.size())
expected = '\ntensor([[42, 43],\n [44, 45]])'
self.assertEqual(support(t42).strip(), expected.strip())
def test_MatrixMultiplicationNode(self) -> None:
v42 = torch.tensor([[42, 43], [44, 45]])
mv = torch.mm(v42, v42)
t42 = ConstantRealMatrixNode(v42)
mt = MatrixMultiplicationNode(t42, t42)
self.assertEqual(v42.size(), torch.Size([2, 2]))
self.assertEqual(size(mt), mv.size())
expected = '\ntensor([[3656, 3741],\n [3828, 3917]])\n'
self.assertEqual(support(mt).strip(), expected.strip())
def test_inputs_and_outputs(self) -> None:
r1 = RealNode(1.0)
self.assertEqual(len(r1.outputs.items), 0)
n = NormalNode(r1, r1)
self.assertEqual(r1.outputs.items[n], 2)
r2 = RealNode(2.0)
n.inputs[0] = r2
self.assertEqual(r1.outputs.items[n], 1)
self.assertEqual(r2.outputs.items[n], 1) |
class MagicIndex(object):
def find_magic_index(self, array):
if ((array is None) or (not array)):
return (- 1)
return self._find_magic_index(array, 0, (len(array) - 1))
def _find_magic_index(self, array, start, end):
if ((end < start) or (start < 0) or (end >= len(array))):
return (- 1)
mid = ((start + end) // 2)
if (mid == array[mid]):
return mid
left_end = min((mid - 1), array[mid])
left_result = self._find_magic_index(array, start, end=left_end)
if (left_result != (- 1)):
return left_result
right_start = max((mid + 1), array[mid])
right_result = self._find_magic_index(array, start=right_start, end=end)
if (right_result != (- 1)):
return right_result
return (- 1) |
class LoggingGenericCommon(ModelNormal):
allowed_values = {('message_type',): {'CLASSIC': 'classic', 'LOGGLY': 'loggly', 'LOGPLEX': 'logplex', 'BLANK': 'blank'}, ('compression_codec',): {'ZSTD': 'zstd', 'SNAPPY': 'snappy', 'GZIP': 'gzip'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'message_type': (str,), 'timestamp_format': (str, none_type), 'compression_codec': (str,)}
_property
def discriminator():
return None
attribute_map = {'message_type': 'message_type', 'timestamp_format': 'timestamp_format', 'compression_codec': 'compression_codec'}
read_only_vars = {'timestamp_format'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionPlotoptionsBarSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CollisionObj(object):
swagger_types = {'embedded': 'object', 'front': 'CollisionObjFront', 'id': 'str', 'lateral': 'CollisionObjFront', 'pedestrian': 'bool', 'rear': 'CollisionObjFront', 'roll_over': 'bool', 'updated_at': 'datetime'}
attribute_map = {'embedded': '_embedded', 'front': 'front', 'id': 'id', 'lateral': 'lateral', 'pedestrian': 'pedestrian', 'rear': 'rear', 'roll_over': 'rollOver', 'updated_at': 'updatedAt'}
def __init__(self, embedded=None, front=None, id=None, lateral=None, pedestrian=None, rear=None, roll_over=None, updated_at=None):
self._embedded = None
self._front = None
self._id = None
self._lateral = None
self._pedestrian = None
self._rear = None
self._roll_over = None
self._updated_at = None
self.discriminator = None
if (embedded is not None):
self.embedded = embedded
if (front is not None):
self.front = front
if (id is not None):
self.id = id
if (lateral is not None):
self.lateral = lateral
if (pedestrian is not None):
self.pedestrian = pedestrian
if (rear is not None):
self.rear = rear
if (roll_over is not None):
self.roll_over = roll_over
if (updated_at is not None):
self.updated_at = updated_at
def embedded(self):
return self._embedded
def embedded(self, embedded):
self._embedded = embedded
def front(self):
return self._front
def front(self, front):
self._front = front
def id(self):
return self._id
def id(self, id):
self._id = id
def lateral(self):
return self._lateral
def lateral(self, lateral):
self._lateral = lateral
def pedestrian(self):
return self._pedestrian
def pedestrian(self, pedestrian):
self._pedestrian = pedestrian
def rear(self):
return self._rear
def rear(self, rear):
self._rear = rear
def roll_over(self):
return self._roll_over
_over.setter
def roll_over(self, roll_over):
self._roll_over = roll_over
def updated_at(self):
return self._updated_at
_at.setter
def updated_at(self, updated_at):
self._updated_at = updated_at
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(CollisionObj, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, CollisionObj)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class TestPutDefaultStorageConfigSecretsS3():
(scope='function')
def url(self, storage_config_default) -> str:
return (V1_URL_PREFIX + STORAGE_DEFAULT_SECRETS).format(storage_type=StorageType.s3.value)
(scope='function')
def payload(self):
return {StorageSecrets.AWS_ACCESS_KEY_ID.value: '', StorageSecrets.AWS_SECRET_ACCESS_KEY.value: ''}
def test_put_default_config_secrets_unauthenticated(self, api_client: TestClient, payload, url):
response = api_client.put(url, headers={}, json=payload)
assert (401 == response.status_code)
def test_put_default_config_secrets_wrong_scope(self, api_client: TestClient, payload, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_READ])
response = api_client.put(url, headers=auth_header, json=payload)
assert (403 == response.status_code)
def test_put_default_config_secret_invalid_config(self, api_client: TestClient, payload, generate_auth_header):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
url = (V1_URL_PREFIX + STORAGE_DEFAULT_SECRETS).format(storage_type='invalid_type')
response = api_client.put(url, headers=auth_header, json=payload)
assert (422 == response.status_code)
def test_update_default_with_invalid_secrets_key(self, api_client: TestClient, generate_auth_header, url):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.put((url + '?verify=False'), headers=auth_header, json={'bad_key': '12345'})
assert (response.status_code == 400)
assert (response.json() == {'detail': ["field required ('aws_access_key_id',)", "field required ('aws_secret_access_key',)", "extra fields not permitted ('bad_key',)"]})
('fides.api.models.storage.StorageConfig.set_secrets')
def test_update_default_set_secrets_error(self, set_secrets_mock: Mock, api_client: TestClient, generate_auth_header, url, payload):
set_secrets_mock.side_effect = ValueError('This object must have a `type` to validate secrets.')
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.put((url + '?verify=False'), headers=auth_header, json=payload)
assert (response.status_code == 400)
def test_put_default_config_secrets_without_verifying(self, db: Session, api_client: TestClient, payload, url, generate_auth_header, storage_config_default):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.put((url + '?verify=False'), headers=auth_header, json=payload)
assert (200 == response.status_code)
db.refresh(storage_config_default)
assert (json.loads(response.text) == {'msg': 'Secrets updated for default config of storage type: s3.', 'test_status': None, 'failure_reason': None})
assert (storage_config_default.secrets[StorageSecrets.AWS_ACCESS_KEY_ID.value] == '')
assert (storage_config_default.secrets[StorageSecrets.AWS_SECRET_ACCESS_KEY.value] == '')
('fides.api.api.v1.endpoints.storage_endpoints.secrets_are_valid')
def test_put_default_config_secrets_and_verify(self, mock_valid: Mock, db: Session, api_client: TestClient, payload, url, generate_auth_header, storage_config_default):
mock_valid.return_value = True
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.put(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
db.refresh(storage_config_default)
assert (json.loads(response.text) == {'msg': 'Secrets updated for default config of storage type: s3.', 'test_status': 'succeeded', 'failure_reason': None})
assert (storage_config_default.secrets[StorageSecrets.AWS_ACCESS_KEY_ID.value] == '')
assert (storage_config_default.secrets[StorageSecrets.AWS_SECRET_ACCESS_KEY.value] == '')
mock_valid.reset_mock()
mock_valid.return_value = False
response = api_client.put(url, headers=auth_header, json=payload)
assert (json.loads(response.text) == {'msg': 'Secrets updated for default config of storage type: s3.', 'test_status': 'failed', 'failure_reason': None})
('fides.api.service.storage.storage_authenticator_service.get_s3_session')
def test_put_default_s3_config_secrets_and_verify(self, get_s3_session_mock: Mock, api_client: TestClient, payload, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.put(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
get_s3_session_mock.assert_called_once_with(S3AuthMethod.SECRET_KEYS.value, {'aws_access_key_id': payload['aws_access_key_id'], 'aws_secret_access_key': payload['aws_secret_access_key']}) |
class MultiSelect(HasPrivateTraits):
choices = List(Str)
selected = List(Str)
result = List(Str)
traits_view = View(HGroup(Item('selected', show_label=False, editor=StringListEditor(choices='choices')), Item('result', show_label=False, editor=StringListEditor(choices='selected'))), width=0.2, height=0.25) |
class ProviderInfo():
def __init__(self, name, guid, level=et.TRACE_LEVEL_INFORMATION, any_keywords=None, all_keywords=None, params=None):
self.name = name
self.guid = guid
self.level = level
if ((type(any_keywords) is list) or (any_keywords is None)):
self.any_bitmask = get_keywords_bitmask(guid, any_keywords)
else:
self.any_bitmask = any_keywords
if ((type(all_keywords) is list) or (all_keywords is None)):
self.all_bitmask = get_keywords_bitmask(guid, all_keywords)
else:
self.all_bitmask = all_keywords
self.params = params
def __eq__(self, other):
result = True
self_dict = self.__dict__
other_dict = other.__dict__
self_params = self_dict.pop('params')
other_params = other_dict.pop('params')
if self_params:
if other_params:
for field in self_params.contents._fields_:
attr_name = field[0]
(a, b) = (getattr(self_params.contents, attr_name), getattr(other_params.contents, attr_name))
is_desc = isinstance(a, ct.POINTER(ep.EVENT_FILTER_DESCRIPTOR))
if (is_desc is True):
if a:
for desc_field in a.contents._fields_:
desc_attr_name = desc_field[0]
(a_desc, b_desc) = (getattr(a.contents, desc_attr_name), getattr(b.contents, desc_attr_name))
if (a_desc != b_desc):
result = False
break
elif (a != b):
result = False
break
else:
result = False
result = ((self_dict == other_dict) and result)
self_dict['params'] = self_params
other_dict['params'] = other_params
return result |
def extractKagerouscansWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('the film emperor asks for divorce every day', 'the film emperor asks for divorce every day', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesVariablepieSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('n_gpus', [0])
def test_python_backend(n_gpus: int):
proc = run_common_setup_steps(n_gpus)
choices = enter_input(proc, '.*Choose your backend.*Enter your choice[^:]+: ?', '2')
assert ('[2] Python backend' in choices), 'Option 2 should be Python backend'
choices = enter_input(proc, '.*Models available:.*Enter your choice[^:]+: ?', '1')
assert ('[1] codegen-350M-mono' in choices), 'Option 1 should be codegen-350M-mono'
enter_input(proc, '.*share (your )?huggingface cache[^:]+: ?', 'y')
enter_input(proc, '.*cache directory[^:]+: ?', '')
enter_input(proc, '.*use int8[^:]+: ?', 'n')
enter_input(proc, '.*run FauxPilot\\? \\[y/n\\] ', 'n', timeout=120)
shutil.copy(str(root.joinpath('.env')), str(curdir.joinpath('test.env')))
compose_file = f"docker-compose-with{('' if (n_gpus > 0) else 'out')}-gpus.yaml"
docker_proc = None
try:
docker_proc = pexpect.pty_spawn.spawn(f'docker compose -f {compose_file} up', encoding='utf-8', cwd=curdir, env=load_test_env())
print('Waiting for API to be ready...')
docker_proc.expect('.*Started GRPCInferenceService at 0.0.0.0:8001', timeout=120)
print('API ready, sending request...')
response = run_inference('def hello_world():\n', max_tokens=16, return_all=True)
assert (response['choices'][0]['text'].rstrip() == ' print("Hello World")\n\nhello_world()\n\n#')
assert (response['choices'][0]['finish_reason'] == 'length')
finally:
if ((docker_proc is not None) and docker_proc.isalive()):
docker_proc.kill(signal.SIGINT)
subprocess.run(['docker-compose', '-f', compose_file, 'down'], cwd=curdir, check=True, env=load_test_env()) |
('/feed-filters/feedid/<int:feedid>')
_required
def feedIdView(feedid):
feed = g.session.query(db.RssFeedEntry).filter((db.RssFeedEntry.id == feedid)).scalar()
if feed:
urls = [tmp.feed_url for tmp in feed.urls if tmp.feed_url]
if urls:
filter_state = content_views.get_filter_state_for_url(urls[0])
else:
filter_state = 'No URLs?'
else:
filter_state = 'Entry missing?'
return render_template('rss-pages/feed_filter_item.html', feed=feed, feedid=feedid, filter_state=filter_state) |
def processImportFile(tdenv, db, importPath, tableName):
tdenv.DEBUG0("Processing import file '{}' for table '{}'", str(importPath), tableName)
fkeySelectStr = '(SELECT {newValue} FROM {table} WHERE {stmt})'
uniquePfx = 'unq:'
uniqueLen = len(uniquePfx)
ignorePfx = '!'
with importPath.open('r', encoding='utf-8') as importFile:
csvin = csv.reader(importFile, delimiter=',', quotechar="'", doublequote=True)
columnDefs = next(csvin)
columnCount = len(columnDefs)
bindColumns = []
bindValues = []
joinHelper = []
uniqueIndexes = []
for (cIndex, cName) in enumerate(columnDefs):
(colName, _, srcKey) = cName.partition('')
if colName.startswith(uniquePfx):
uniqueIndexes.append(cIndex)
colName = colName[uniqueLen:]
if (not srcKey):
bindColumns.append(colName)
bindValues.append('?')
continue
(queryTab, _, queryCol) = srcKey.partition('.')
if colName.startswith(ignorePfx):
assert srcKey
colName = colName[len(ignorePfx):]
joinHelper.append((colName, queryTab, queryCol))
continue
joinTable = [queryTab]
joinStmt = []
for (nextCol, nextTab, nextJoin) in joinHelper:
joinTable.append('INNER JOIN {} USING({})'.format(nextTab, nextJoin))
joinStmt.append('{}.{} = ?'.format(nextTab, nextCol))
joinHelper = []
joinStmt.append('{}.{} = ?'.format(queryTab, colName))
bindColumns.append(queryCol)
bindValues.append(fkeySelectStr.format(newValue=srcKey, table=' '.join(joinTable), stmt=' AND '.join(joinStmt)))
sql_stmt = '\n INSERT OR REPLACE INTO {table} ({columns}) VALUES({values})\n '.format(table=tableName, columns=','.join(bindColumns), values=','.join(bindValues))
tdenv.DEBUG0('SQL-Statement: {}', sql_stmt)
deprecationFn = getattr(sys.modules[__name__], ('deprecationCheck' + tableName), None)
importCount = 0
uniqueIndex = dict()
for linein in csvin:
if (not linein):
continue
lineNo = csvin.line_num
if (len(linein) == columnCount):
tdenv.DEBUG1(' Values: {}', ', '.join(linein))
if deprecationFn:
try:
deprecationFn(importPath, lineNo, linein)
except (DeprecatedKeyError, DeletedKeyError) as e:
if (not tdenv.ignoreUnknown):
raise e
e.category = 'WARNING'
tdenv.NOTE('{}', e)
continue
if uniqueIndexes:
keyValues = [str(linein[col]).upper() for col in uniqueIndexes]
key = ':!:'.join(keyValues)
prevLineNo = uniqueIndex.get(key, 0)
if prevLineNo:
key = '/'.join(keyValues)
raise DuplicateKeyError(importPath, lineNo, 'entry', key, prevLineNo)
uniqueIndex[key] = lineNo
try:
db.execute(sql_stmt, linein)
importCount += 1
except Exception as e:
tdenv.WARN('*** INTERNAL ERROR: {err}\nCSV File: {file}:{line}\nSQL Query: {query}\nParams: {params}\n'.format(err=str(e), file=str(importPath), line=lineNo, query=sql_stmt.strip(), params=linein))
pass
else:
tdenv.NOTE('Wrong number of columns ({}:{}): {}', importPath, lineNo, ', '.join(linein))
db.commit()
tdenv.DEBUG0('{count} {table}s imported', count=importCount, table=tableName) |
class _PreFMBuildMessage(_BuildChrootMessage):
body_schema = {'id': ' '$schema': ' 'description': 'Message sent by Copr build system', 'type': 'object', 'required': ['status', 'chroot', 'build', 'owner', 'copr', 'pkg', 'version', 'what', 'ip', 'who', 'user', 'pid'], 'properties': {'status': {'type': 'number', 'description': 'numerical representation of build status'}, 'chroot': {'type': 'string', 'description': "what chroot was this build run against, 'srpm-builds' for source builds"}, 'owner': {'type': 'string', 'description': 'owner (grup/user) of the project this build was done in'}, 'copr': {'type': 'string', 'description': 'name of the project the build was built in'}, 'build': {'type': 'number', 'description': 'build id'}, 'pkg': {'type': ['string', 'null'], 'description': 'Package name, null if unknown'}, 'version': {'type': ['string', 'null'], 'description': 'Package version, null if unknown'}, 'user': {'type': ['string', 'null'], 'description': 'Copr user who submitted the build, null if unknown'}, 'what': {'type': 'string', 'description': 'combination of all the fields'}, 'ip': {'type': ['string', 'null'], 'description': 'IP address (usually not public) of the builder'}, 'who': {'type': ['string'], 'description': 'what python module has sent this message'}, 'pid': {'type': 'number', 'description': 'process ID of the process on backend taking care of the task'}}}
def build_id(self):
return self.body['build']
def project_name(self):
return str(self.body['copr'])
def project_owner(self):
return str(self.body['owner'])
def chroot(self):
return self.body['chroot']
def status(self):
return StatusEnum(self.body['status'])
def package_name(self):
return self.body.get('pkg')
def _evr(self):
evr = self.body.get('version')
if (not evr):
return (None, None, None)
e_v = evr.split(':', 1)
epoch = None
if (len(e_v) == 1):
v_r = e_v[0]
else:
epoch = e_v[0]
v_r = e_v[1]
(version, release) = v_r.split('-', 1)
return (epoch, version, release) |
def create_content_addressed_github_uri(uri: URI) -> URI:
if (not is_valid_api_github_uri(uri)):
raise CannotHandleURI(f"{uri} does not conform to Github's API 'url' scheme.")
response = requests.get(uri)
response.raise_for_status()
contents = json.loads(response.content)
if (contents['type'] != 'file'):
raise CannotHandleURI(f"Expected url to point to a 'file' type, instead received {contents['type']}.")
return contents['git_url'] |
class ToNumber(FunctionSignature):
name = 'number'
argument_types = [TypeHint.String, TypeHint.Numeric]
minimum_args = 1
return_value = TypeHint.Numeric
sometimes_null = True
def run(cls, source, base=None):
if is_string(source):
if ((len(source.split('.')) == 2) and (base in (None, 10))):
return float(source)
elif (source.startswith('0x') and (base in (None, 16))):
return int(source[2:], 16)
elif source.lstrip('-+').isdigit():
return int(source, (base or 10)) |
.parametrize('call_args,call_kwargs', (((9, 7), {}), ((9,), {'b': 7}), (tuple(), {'a': 9, 'b': 7})))
def test_call_with_multiple_arguments(math_contract, call, call_args, call_kwargs):
result = call(contract=math_contract, contract_function='add', func_args=call_args, func_kwargs=call_kwargs)
assert (result == 16) |
class IniHandler(BaseHandler):
def load(self, value):
config = KaptanIniParser()
config.read_file(StringIO(value))
return config.as_dict()
def dump(self, data, file_=None):
if (file_ is None):
raise NotImplementedError('Exporting .ini as string is not supported.')
config = KaptanIniParser()
config.from_dict(data)
with open(file_, 'w') as fp:
config.write(fp) |
def find_uv_mesh(objects, insideModifiers=True):
for obj in objects:
if (obj and (obj.type == 'MESH')):
if (obj.data.shape_keys and (len(obj.data.shape_keys.key_blocks) == 2)):
if (('uv' in obj.data.shape_keys.key_blocks) and ('model' in obj.data.shape_keys.key_blocks)):
return obj
if insideModifiers:
if (len(obj.modifiers) > 0):
for modifier in obj.modifiers:
if (modifier.type == 'SURFACE_DEFORM'):
if modifier.target:
if (modifier.target.data.shape_keys and (len(modifier.target.data.shape_keys.key_blocks) == 2)):
return modifier.target
return None |
def test_skill2skill_message():
with tempfile.TemporaryDirectory() as dir_name:
with cd(dir_name):
agent_name = 'MyAgent'
private_key_path = os.path.join(CUR_PATH, 'data', DEFAULT_PRIVATE_KEY_FILE)
builder = AEABuilder(registry_dir=Path(ROOT_DIR, 'packages'))
builder.set_name(agent_name)
builder.add_private_key(DEFAULT_LEDGER, private_key_path)
builder.add_skill(Path(CUR_PATH, 'data', 'dummy_skill'))
builder.add_connection(Path(ROOT_DIR, 'packages', 'fetchai', 'connections', 'stub'))
agent = builder.build()
msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello')
msg.to = str(DUMMY_SKILL_PUBLIC_ID)
msg.sender = 'some_author/some_skill:0.1.0'
envelope = Envelope(to=msg.to, sender=msg.sender, message=msg)
with run_in_thread(agent.start, timeout=20, on_exit=agent.stop):
wait_for_condition((lambda : agent.is_running), timeout=20)
default_protocol_public_id = DefaultMessage.protocol_id
handler = agent.resources.get_handler(default_protocol_public_id, DUMMY_SKILL_PUBLIC_ID)
assert (handler is not None), 'Handler is not set.'
handler.context.send_to_skill(envelope)
wait_for_condition((lambda : (len(handler.handled_messages) == 1)), timeout=5, error_msg='The message is not inside the handled_messages.') |
class PrivateChat(Chat):
other: ChatMember
def __init__(self, *, channel: Optional[SlaveChannel]=None, middleware: Optional[Middleware]=None, module_name: str='', channel_emoji: str='', module_id: ModuleID=ModuleID(''), name: str='', alias: Optional[str]=None, id: ChatID=ChatID(''), uid: ChatID=ChatID(''), vendor_specific: Dict[(str, Any)]=None, description: str='', notification: ChatNotificationState=ChatNotificationState.ALL, with_self: bool=True, other_is_self: bool=False):
super().__init__(channel=channel, middleware=middleware, module_name=module_name, channel_emoji=channel_emoji, module_id=module_id, name=name, alias=alias, id=id, uid=uid, vendor_specific=vendor_specific, description=description, notification=notification, with_self=with_self)
if (other_is_self and with_self):
assert (self.self is not None)
self.other = self.self
else:
self.other = self.add_member(name=name, alias=alias, uid=uid, vendor_specific=vendor_specific, description=description)
self.verify()
def verify(self):
super().verify()
assert all((isinstance(member, ChatMember) for member in self.members)), f'Some members of this chat is not a valid one: {self.members!r}' |
class OptionSeriesBoxplotSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesBoxplotSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesBoxplotSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesBoxplotSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesBoxplotSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesBoxplotSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionSeriesBoxplotSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesBoxplotSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesBoxplotSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionSeriesBoxplotSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionSeriesBoxplotSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesBoxplotSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesBoxplotSonificationContexttracksMappingVolume) |
class DrQTorso(hk.Module):
def __init__(self, activation: Callable[([jnp.ndarray], jnp.ndarray)]=jax.nn.relu, data_format: str='NHWC', name: str='drq_torso'):
super().__init__(name=name)
gain = ((2 ** 0.5) if (activation is jax.nn.relu) else 1.0)
def build_conv_layer(name: str, output_channels: int=32, kernel_shape: Sequence[int]=(3, 3), stride: int=1):
return hk.Conv2D(output_channels=output_channels, kernel_shape=kernel_shape, stride=stride, padding='SAME', data_format=data_format, w_init=hk.initializers.Orthogonal(scale=gain), b_init=jnp.zeros, name=name)
self._network = hk.Sequential([build_conv_layer('conv_0', stride=2), activation, build_conv_layer('conv_1', stride=1), activation, build_conv_layer('conv_2', stride=1), activation, build_conv_layer('conv_3', stride=1), activation, hk.Flatten()])
def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray:
if (not jnp.issubdtype(inputs, jnp.floating)):
raise ValueError('Expect inputs to be float pixel values normalized between 0 to 1.')
preprocessed_inputs = (inputs - 0.5)
torso_output = self._network(preprocessed_inputs)
return torso_output |
class CommandTestCase(ApiTestBase):
fixtures = (ApiTestBase.fixtures + ['functional-measures-dont-edit'])
def setUpTestData(cls):
super(CommandTestCase, cls).setUpTestData()
max_measure_date = MeasureGlobal.objects.order_by('-month')[0].month
ImportLog.objects.create(current_at=max_measure_date, category='dashboard_data')
def test_send_alerts(self):
factory = DataFactory()
bookmark = factory.create_org_bookmark(None)
call_command('send_all_england_alerts')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [bookmark.user.email])
mail.outbox = []
bookmark2 = factory.create_org_bookmark(None)
call_command('send_all_england_alerts')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [bookmark2.user.email])
mail.outbox = []
call_command('send_all_england_alerts')
self.assertEqual(len(mail.outbox), 0) |
('cuda.masked_select.func_decl')
def gen_function_decl(func_attrs) -> str:
backend_spec = CUDASpec()
(x, mask) = func_attrs['inputs']
input_type = cuda_common.dtype_to_cuda_type(x._attrs['dtype'])
return FUNC_DECL_TEMPLATE.render(func_name=func_attrs['name'], input_type=input_type, index_type=backend_spec.index_type, need_broadcast=(x._attrs['shape'] != mask._attrs['shape']), dynamic_dims_decl=gen_dynamic_dim_str(index_type=backend_spec.index_type, dynamic_dims=get_dynamic_dims(x.shape(), mask.shape()), has_type=True)) |
def test_pycomponent_heritage():
C = MyPComponent2
assert (C.__name__ == C.JS.__name__)
assert (('PyComponent' in repr(C)) and ('PyComponent' in repr(C.JS)))
assert ((not ('proxy' in repr(C))) and ('proxy' in repr(C.JS)))
assert ((not ('JS' in repr(C))) and ('for JS' in repr(C.JS)))
mro = [MyPComponent2, MyPComponent1, PyComponent, LocalComponent, BaseAppComponent, Component, object]
assert (C.mro() == mro)
for cls in mro:
assert issubclass(C, cls)
for cls in all_classes:
if (cls not in mro):
assert (not issubclass(C, cls))
foo = C(flx_session=StubSession())
for cls in mro:
assert isinstance(foo, cls)
for cls in all_classes:
if (cls not in mro):
assert (not isinstance(foo, cls))
mro = [MyPComponent2.JS, MyPComponent1.JS, PyComponent.JS, ProxyComponent, BaseAppComponent, Component, object]
assert (C.JS.mro() == mro)
for cls in mro:
assert issubclass(C.JS, cls)
for cls in all_classes:
if (cls not in mro):
assert (not issubclass(C.JS, cls)) |
def test_capture_four_rising_edges(la):
events = 100
(t1, t2) = la.capture(2, events, modes=['rising', 'four rising'])
expected = (((FREQUENCY ** (- 1)) * MICROSECONDS) * 3)
result = ((t2 - t1) - (t2 - t1)[0])
assert (np.arange(0, (expected * events), expected) == pytest.approx(result, abs=TWO_CLOCK_CYCLES)) |
class PaginationCursorMeta(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'next_cursor': (str,), 'limit': (int,)}
_property
def discriminator():
return None
attribute_map = {'next_cursor': 'next_cursor', 'limit': 'limit'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class IndexRange():
base: Optional[LoopIR.expr]
lo: Optional[int]
hi: Optional[int]
def get_bounds(self):
if (self.base is None):
lo = ('-inf' if (self.lo is None) else str(self.lo))
hi = ('inf' if (self.hi is None) else str(self.hi))
else:
lo = ('-inf' if (self.lo is None) else f'{self.base} + {self.lo}')
hi = ('inf' if (self.hi is None) else f'{self.base} + {(self.hi + 1)}')
return (lo, hi)
def get_size(self):
if ((self.lo is None) or (self.hi is None)):
return None
return ((self.hi - self.lo) + 1)
def __str__(self):
base = ('0' if (self.base is None) else str(self.base))
lo = ('-inf' if (self.lo is None) else str(self.lo))
hi = ('inf' if (self.hi is None) else str(self.hi))
return f'({base}, {lo}, {hi})'
def __add__(self, other: (int | IndexRange)) -> IndexRange:
if isinstance(other, int):
(new_lo, new_hi) = (None, None)
if (self.lo is not None):
new_lo = (self.lo + other)
if (self.hi is not None):
new_hi = (self.hi + other)
return IndexRange(self.base, new_lo, new_hi)
elif isinstance(other, IndexRange):
if (self.base is None):
new_base = other.base
elif (other.base is None):
new_base = self.base
else:
new_base = binop('+', self.base, other.base)
(new_lo, new_hi) = (None, None)
if ((self.lo is not None) and (other.lo is not None)):
new_lo = (self.lo + other.lo)
if ((self.hi is not None) and (other.hi is not None)):
new_hi = (self.hi + other.hi)
return IndexRange(new_base, new_lo, new_hi)
else:
raise ValueError(f'Invalid type for add: {type(other)}')
def __radd__(self, c: int) -> IndexRange:
assert isinstance(c, int)
return self.__add__(c)
def __neg__(self) -> IndexRange:
(new_base, new_lo, new_hi) = (None, None, None)
if (self.base is not None):
new_base = LoopIR.USub(self.base, self.base.type, self.base.srcinfo)
if (self.lo is not None):
new_hi = (- self.lo)
if (self.hi is not None):
new_lo = (- self.hi)
return IndexRange(new_base, new_lo, new_hi)
def __sub__(self, other: (int | IndexRange)) -> IndexRange:
assert isinstance(other, (int, IndexRange))
return (self + (- other))
def __rsub__(self, c: int) -> IndexRange:
assert isinstance(c, int)
return ((- self) + c)
def __mul__(self, c: int) -> IndexRange:
assert isinstance(c, int)
if (c == 0):
return 0
(new_base, new_lo, new_hi) = (None, None, None)
if (self.base is not None):
const = LoopIR.Const(c, T.index, self.base.srcinfo)
new_base = binop('*', self.base, const)
if (self.lo is not None):
new_lo = (self.lo * c)
if (self.hi is not None):
new_hi = (self.hi * c)
if (c > 0):
return IndexRange(new_base, new_lo, new_hi)
else:
return IndexRange(new_base, new_hi, new_lo)
def __rmul__(self, c: int) -> IndexRange:
assert isinstance(c, int)
return self.__mul__(c)
def __floordiv__(self, c: int) -> IndexRange:
assert isinstance(c, int)
if (c == 0):
return ValueError('Cannot divide by 0.')
elif (c < 0):
return IndexRange(None, None, None)
(new_lo, new_hi) = (None, None)
if (self.base is None):
if (self.lo is not None):
new_lo = (self.lo // c)
if (self.hi is not None):
new_hi = (self.hi // c)
return IndexRange(None, new_lo, new_hi)
else:
if ((self.lo is not None) and (self.hi is not None)):
new_lo = 0
new_hi = ((self.hi - self.lo) // c)
return IndexRange(None, new_lo, new_hi)
def __mod__(self, c: int) -> IndexRange:
assert isinstance(c, int)
if ((self.base is None) and (self.lo is not None) and (self.hi is not None)):
if ((self.lo // c) == (self.hi // c)):
return IndexRange(None, (self.lo % c), (self.hi % c))
return IndexRange(None, 0, (c - 1))
def __or__(self, other: IndexRange) -> IndexRange:
assert isinstance(other, IndexRange)
compare_ir = LoopIR_Compare()
if (((self.base is None) and (other.base is None)) or compare_ir.match_e(self.base, other.base)):
if (self.lo is None):
new_lo = other.lo
elif (other.lo is None):
new_lo = self.lo
else:
new_lo = min(self.lo, other.lo)
if (self.hi is None):
new_hi = other.hi
elif (other.hi is None):
new_hi = self.hi
else:
new_hi = max(self.hi, other.hi)
return IndexRange(self.base, new_lo, new_hi)
else:
return IndexRange(None, None, None) |
def get_port_desc(dp, waiters):
stats = dp.ofproto_parser.OFPFeaturesRequest(dp)
msgs = []
ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG)
descs = []
for msg in msgs:
stats = msg.ports
for stat in stats.values():
d = {'port_no': UTIL.ofp_port_to_user(stat.port_no), 'hw_addr': stat.hw_addr, 'name': stat.name.decode('utf-8'), 'config': stat.config, 'state': stat.state, 'curr': stat.curr, 'advertised': stat.advertised, 'supported': stat.supported, 'peer': stat.peer, 'curr_speed': stat.curr_speed, 'max_speed': stat.max_speed}
descs.append(d)
return {str(dp.id): descs} |
def create_template_args(func_attrs: Dict[(str, Any)], indent: str=' ') -> Dict[(str, Any)]:
x = func_attrs['inputs'][0]
y = func_attrs['outputs'][0]
dst = y._attrs['name']
src = x._attrs['name']
func_name = func_attrs['name']
custom_libs = Target.current().get_custom_libs(os.path.dirname(__file__), 'repeat.h')
rocm_spec = ROCMSpec()
dtype = rocm_spec.dtype_to_backend_dtype[x.dtype()]
assert (dtype is not None), f'ROCM implementation does not support dtype {x.dtype()} (yet)'
dtype2 = rocm_spec.type_for_size.get((rocm_spec.sizeof_types[dtype] * 2), None)
dtype4 = rocm_spec.type_for_size.get((rocm_spec.sizeof_types[dtype] * 4), None)
xshape = x._attrs['shape']
yshape = y._attrs['shape']
dim_types: List[ExpandDimensionType] = func_attrs['dim_types']
index_type = 'int64_t'
assert all(((dim.lower_bound() == dim.upper_bound()) for dim in xshape)), 'All input shapes need to be fixed'
assert all(((dim.lower_bound() == dim.upper_bound()) for dim in yshape)), 'All output shapes need to be fixed'
head_size_lower = 1
head_size_upper = 1
head_dim_count = 0
for (dim_type, dim) in zip(func_attrs['dim_types'], yshape):
if ((dim_type == ExpandDimensionType.KEEP_DIM) and (dim.lower_bound() != 1)):
break
head_size_lower *= dim.lower_bound()
head_size_upper *= dim.upper_bound()
head_dim_count += 1
if (head_size_lower == head_size_upper):
head_size_symbolic = f'{head_size_upper}l'
else:
head_size_symbolic = '*'.join([((f'static_cast<{index_type}>(' + dim._attrs['name']) + ')') for dim in yshape[:head_dim_count]])
tail_dim_count = 0
tail_size = 1
for (dim_type, dim) in reversed(list(zip(dim_types[head_dim_count:], yshape[head_dim_count:]))):
if ((dim_type != ExpandDimensionType.KEEP_DIM) and (dim.lower_bound() != 1)):
break
tail_dim_count += 1
tail_size *= dim.lower_bound()
input_strides = list(reversed(list(accumulate(([1] + [d.lower_bound() for d in reversed(xshape)]), mul))))
output_strides = list(reversed(list(accumulate(([1] + [d.lower_bound() for d in reversed(yshape[head_dim_count:])]), mul))))
output_numel = output_strides[0]
input_numel = input_strides[0]
if (tail_size > 0):
mid_size = (output_numel // tail_size)
else:
mid_size = 0
mid_dim_count = ((len(yshape) - tail_dim_count) - head_dim_count)
if (input_numel > 0):
mid_expansion_rate = ((mid_size * tail_size) // input_numel)
else:
mid_expansion_rate = 1
output_strides = (([0] * head_dim_count) + output_strides[1:])
input_strides = input_strides[1:]
input_stride_pos = 0
read_strides = ([0] * len(yshape))
for i in range(len(yshape)):
if (dim_types[i] == ExpandDimensionType.ADD_DIM):
continue
if (dim_types[i] == ExpandDimensionType.KEEP_DIM):
read_strides[i] = input_strides[input_stride_pos]
input_stride_pos += 1
assert (input_stride_pos == len(xshape)), 'Incorrect number of keep and expand dims. Something went wrong.'
output_rank = len(yshape)
dim_types = ','.join([str(int(dt)) for dt in func_attrs['dim_types']])
if (((tail_size % 4) == 0) and (dtype4 is not None)):
dtype = dtype4
tail_size = (tail_size // 4)
output_strides = [(s // 4) for s in output_strides]
read_strides = [(s // 4) for s in read_strides]
elif ((tail_size % 2) == 0):
dtype = dtype2
tail_size = (tail_size // 2)
output_strides = [(s // 2) for s in output_strides]
read_strides = [(s // 2) for s in read_strides]
grid_blocks_x = 1
grid_threads_x = max(1, min(tail_size, 64))
max_y_threads = (1024 // grid_threads_x)
grid_threads_y = max(1, min(max_y_threads, mid_size))
grid_blocks_y = _ceil((mid_size / grid_threads_y))
if (dtype == 'bfloat16'):
dtype = 'half'
return {'func_name': func_name, 'dst': dst, 'src': src, 'output_strides': output_strides, 'read_strides': read_strides, 'tail_dim_count': tail_dim_count, 'tail_size': tail_size, 'head_dim_count': head_dim_count, 'head_size': head_size_symbolic, 'mid_dim_count': mid_dim_count, 'mid_size': mid_size, 'mid_expansion_rate': mid_expansion_rate, 'output_rank': output_rank, 'dim_types': dim_types, 'dtype': dtype, 'indent': indent, 'index_type': index_type, 'grid_blocks_y': grid_blocks_y, 'grid_blocks_x': grid_blocks_x, 'grid_threads_y': grid_threads_y, 'grid_threads_x': grid_threads_x, 'custom_libs': custom_libs} |
class OptionPlotoptionsParetoMarkerStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsParetoMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsParetoMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
class LondonTransactionBuilder(BerlinTransactionBuilder):
legacy_signed = LondonLegacyTransaction
legacy_unsigned = LondonUnsignedLegacyTransaction
typed_transaction = LondonTypedTransaction
def new_unsigned_dynamic_fee_transaction(cls, chain_id: int, nonce: int, max_priority_fee_per_gas: int, max_fee_per_gas: int, gas: int, to: Address, value: int, data: bytes, access_list: Sequence[Tuple[(Address, Sequence[int])]]) -> LondonTypedTransaction:
transaction = UnsignedDynamicFeeTransaction(chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas, to, value, data, access_list)
return transaction
def new_dynamic_fee_transaction(cls, chain_id: int, nonce: int, max_priority_fee_per_gas: int, max_fee_per_gas: int, gas: int, to: Address, value: int, data: bytes, access_list: Sequence[Tuple[(Address, Sequence[int])]], y_parity: int, r: int, s: int) -> LondonTypedTransaction:
transaction = DynamicFeeTransaction(chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas, to, value, data, access_list, y_parity, r, s)
return LondonTypedTransaction(DYNAMIC_FEE_TRANSACTION_TYPE, transaction) |
class SelectDeviceForSinkPreference(widgets.ComboPreference, widgets.Conditional):
default = 'auto'
name = 'player/audiosink_device'
condition_preference_name = 'player/audiosink'
def __init__(self, preferences, widget):
self.is_enabled = False
widgets.ComboPreference.__init__(self, preferences, widget)
widgets.Conditional.__init__(self)
def on_check_condition(self):
return (self.get_condition_value() == 'auto')
def on_condition_met(self):
self.is_enabled = False
model = self.widget.get_model()
if (model is None):
return
model.clear()
for (device_name, device_id, _create_audiosink_cb) in get_devices():
model.append((device_id, device_name))
self.is_enabled = True
self._set_value()
self.show_widget()
self.set_widget_sensitive(True)
def on_condition_failed(self):
if (self.get_condition_value() == 'custom'):
self.hide_widget()
else:
self.show_widget()
self.set_widget_sensitive(False)
self.is_enabled = False
model = self.widget.get_model()
if model:
model.clear()
def done(self):
return self.is_enabled
def _get_value(self):
if self.is_enabled:
return widgets.ComboPreference._get_value(self)
return '' |
class OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _handle_data_and_model(new_config):
if ('data_config' in new_config):
new_config['data'] = new_config['data_config']
del new_config['data_config']
if ('model_config' in new_config):
new_config['model'] = new_config['model_config']
del new_config['model_config']
if ('local_batch_size' in new_config):
new_config['data'] = new_config.get('data', {})
new_config['data']['local_batch_size'] = new_config['local_batch_size']
del new_config['local_batch_size']
if ('use_resnet' in new_config):
new_config['model'] = new_config.get('model', {})
new_config['model']['use_resnet'] = new_config['use_resnet']
del new_config['use_resnet'] |
def get_dependencies(contract_to_constructor, contract, dependencies):
if (contract in contract_to_constructor):
for arg in contract_to_constructor[contract]:
if ((type(arg) == str) and (ADDRESS_ARG_TAG in arg) and (arg not in dependencies)):
get_dependencies(contract_to_constructor, arg, dependencies)
dependencies.append(contract) |
class OptionPlotoptionsSeriesSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_prepare_transaction_replacement_without_nonce_sets_correct_nonce(w3):
current_transaction = SIMPLE_CURRENT_TRANSACTION
new_transaction = {'value': 1}
replacement_transaction = prepare_replacement_transaction(w3, current_transaction, new_transaction)
assert (replacement_transaction == {'value': 1, 'nonce': 2, 'gasPrice': 12}) |
def validate_observations_has_result(doc):
if (doc.ref_doctype == 'Sales Invoice'):
submittable = True
observations = frappe.db.get_all('Observation', {'sales_invoice': doc.docname, 'docstatus': ['!=', 2], 'has_component': False, 'status': ['!=', 'Cancelled']}, pluck='name')
for obs in observations:
if (not frappe.get_doc('Observation', obs).has_result()):
submittable = False
return submittable |
def placeholder_validator(value, **valid_placeholders):
placeholders = set(RE_PLACEHOLDER.findall(value))
if (not placeholders.issubset(valid_placeholders.keys())):
raise ValidationError(_('Invalid placeholders.'))
try:
return value.format(**valid_placeholders)
except (KeyError, ValueError, TypeError, IndexError):
raise ValidationError(_('Invalid placeholders.')) |
def extractFoolishtranslationssWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class UserFollowGroupSchema(JSONAPISchema):
class Meta():
type_ = 'user-follow-group'
self_view = 'v1.user_follow_group_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
created_at = fields.DateTime(dump_only=True, timezone=True)
group = Relationship(attribute='group', self_view='v1.user_follow_group_group', self_view_kwargs={'id': '<id>'}, related_view='v1.group_detail', related_view_kwargs={'user_follow_group_id': '<id>'}, schema='GroupSchema', type_='group')
user = Relationship(attribute='user', self_view='v1.user_follow_group_user', self_view_kwargs={'id': '<id>'}, related_view='v1.user_detail', related_view_kwargs={'user_follow_group_id': '<id>'}, schema='UserSchema', type_='user') |
class bsn_flow_idle(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 40
def __init__(self, xid=None, cookie=None, priority=None, table_id=None, match=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
if (match != None):
self.match = match
else:
self.match = ofp.match()
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!Q', self.cookie))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!B', self.table_id))
packed.append(('\x00' * 5))
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_flow_idle()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 40)
obj.cookie = reader.read('!Q')[0]
obj.priority = reader.read('!H')[0]
obj.table_id = reader.read('!B')[0]
reader.skip(5)
obj.match = ofp.match.unpack(reader)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.cookie != other.cookie):
return False
if (self.priority != other.priority):
return False
if (self.table_id != other.table_id):
return False
if (self.match != other.match):
return False
return True
def pretty_print(self, q):
q.text('bsn_flow_idle {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.breakable()
q.text('}') |
def ml_start_workflow(data):
u = url('ml/workflow/execute')
j = '{"wf_unique_id": "4", "wf_body": {"nodes": [ { "task_name": "SelectDataset", "task_id": "flowchartNode1", "position": "left: 600px; top: 114px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [], "parameters": [{"name": "name", "type": "str", "display_name": "Dataset Name", "lookup": [{"title": "Bank Campaign Dataset", "uuid": "ff6e6b81fedd705d.csv"}], "value": "test_data/bank.csv"}, {"name": "delimiter", "type": "str", "display_name": "Dataset Name", "value": ","}]}, { "task_name": "SelectColumns", "task_id": "flowchartNode2", "position": "left: 480px; top: 280px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopCenter", "targets": [{"nodeId": "flowchartNode1", "id": "BottomCenter"}]}], "parameters": [{"name": "col_name", "type": "str", "display_name": "Column Name(s)", "value": "age,job,marital,education,default,balance,housing,loan,contact,day,month,duration,campaign,pdays,previous,poutcome,y"}]}, { "task_name":"FillMissingWithMean", "task_id": "flowchartNode3", "position": "left: 480px; top: 280px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopCenter", "targets": [{"nodeId": "flowchartNode2", "id": "BottomCenter"}]}], "parameters": [{"name": "col_name", "type": "str", "display_name": "Column Name(s)", "value": "balance,age"}]}, { "task_name":"CategoryEncoding", "task_id": "flowchartNode4", "position": "left: 480px; top: 280px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopCenter", "targets": [{"nodeId": "flowchartNode3", "id": "BottomCenter"}]}], "parameters": [{"name": "col_name", "type": "str", "display_name": "Column Name(s)", "value": "job,marital,education,default,housing,loan,contact,month,poutcome"}] }, { "task_name": "SplitData", "task_id": "flowchartNode5", "position": "left: 460px; top: 460px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopCenter", "targets": [{"nodeId": "flowchartNode4", "id": "BottomCenter"}]}], "parameters": [{"name": "perc", "type": "float", "display_name": "Partition Percentage", "value": 0.7}]},\n { "task_name": "RandomForestClassifier", "task_id": "flowchartNode6", "position": "left: 150px; top: 366px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [], "parameters": [ {"name": "n_estimators", "type": "integer", "display_name": "Number of Estimators", "value": 100}]}, { "task_name": "FitModel", "task_id": "flowchartNode7", "position": "left: 150px; top: 366px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopLeft", "targets": [{"nodeId": "flowchartNode6", "id": "BottomCenter"}]}, {"id": "TopRight", "targets": [{"nodeId": "flowchartNode5", "id": "BottomLeft"}]}], "parameters": [ {"name": "label_column", "type": "string", "display_name": "Number of Estimators", "value": "y"}]}, { "task_name": "RunModel", "task_id": "flowchartNode8", "position": "left: 680px; top: 567px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopLeft", "targets":\n [{"nodeId": "flowchartNode7", "id": "BottomCenter"}]}, {"id": "TopRight", "targets": [{"nodeId": "flowchartNode5", "id": "BottomRight"}]}], "parameters": [{"name": "label_column", "type": "string", "display_name": "Number of Estimators", "value": "y"}]}, { "task_name": "ClassificationMetrics", "task_id": "flowchartNode9", "position": "left: 680px; top: 567px; color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); line-height: 35px;", "inputs": [{"id": "TopCenter", "targets": [{"nodeId": "flowchartNode8", "id": "BottomCenter"}]}], "parameters": [{"name": "label_column", "type": "string", "display_name": "target column", "value": "y"},{"name": "predict_column", "type": "string", "display_name": "predicted column", "value": "predictions"}]}]}, "run_to_task_id": "", "execution_mode": "light", "storage": false, "deploy": "flowchartNode7", "node_name": "flowchartNode1", "output_port_id": "1", "col_1": "day", "num_bins": 10, "col_2": "previous", "col_names": "day,previous", "plot_type": "matrix", "model_name": "b03ffdaa422e450788cec3e80b308a3d", "observation": [ { "age": 36, "balance": 260, "campaign": 1, "contact": "cellular", "day": 7, "default": "no", "duration": 49, "education": "tertiary", "housing": "yes", "job": "management", "loan": "no", "marital": "single", "month": "may", "pdays": -1, "poutcome": "unknown", "previous": 0 }, { "age": 40, "balance": 11084, "campaign": 1, "contact": "unknown", "day": 11, "default": "no", "duration": 113, "education": "secondary", "housing": "no", "job": "blue-collar", "loan": "no", "marital": "married", "month": "jun", "pdays": -1, "poutcome": "unknown", "previous": 0 }, { "age": 52, "balance": 20, "campaign": 1, "contact": "telephone", "day": 28, "default": "no", "duration": 172, "education": "primary", "housing": "yes", "job": "unemployed", "loan": "no", "marital": "married", "month": "jan", "pdays": -1, "poutcome": "unknown", "previous": 0 }, { "age": 30, "balance": 102, "campaign": 7, "contact": "cellular", "day": 5, "default": "no", "duration": 470, "education": "tertiary", "housing": "yes", "job": "management", "loan": "no", "marital": "married", "month": "apr", "pdays": 426, "poutcome": "other", "previous": 3 }, { "age": 42, "balance": 292, "campaign": 3, "contact": "cellular", "day": 11, "default": "no", "duration": 222, "education": "secondary", "housing": "yes", "job": "services", "loan": "no", "marital": "married", "month": "may", "pdays": -1, "poutcome": "unknown", "previous": 0 }]}'
r = data['session'].post(u, data=j)
assert_ret_code(r, 200) |
_os(*metadata.platforms)
def main():
common.log('PowerShell Suspicious Commands')
temp_script = Path('tmp.ps1').resolve()
with open(temp_script, 'w') as f:
f.write('whoami.exe\nexit\n')
powershell_commands = [['powershell.exe', '-ExecutionPol', 'Bypass', temp_script], ['powershell.exe', 'iex', 'Get-Process'], ['powershell.exe', '-ec', encode(('Get-Process' + (' ' * 1000)))]]
for command in powershell_commands:
common.execute(command)
common.remove_file(temp_script) |
def adapt_int_width(n, width, signed=True):
n = int(n)
if (width == 1):
result = n
elif (width == 2):
result = n
elif (width == 4):
result = (ctypes.c_int8(n).value if signed else ctypes.c_uint8(n).value)
elif (width == 8):
result = (ctypes.c_int8(n).value if signed else ctypes.c_uint8(n).value)
elif (width == 16):
result = (ctypes.c_int16(n).value if signed else ctypes.c_uint16(n).value)
elif (width == 32):
result = (ctypes.c_int32(n).value if signed else ctypes.c_uint32(n).value)
elif (width == 64):
result = (ctypes.c_int64(n).value if signed else ctypes.c_uint64(n).value)
else:
result = n
return result |
class TestACESccToACES2056_1(util.ColorAssertsPyTest):
COLORS = [(f'color(--aces2065-1 {(2 ** (- 24))} {(2 ** (- 24))} {(2 ** (- 24))})', 'color(--acescc -0. -0. -0.)'), ('color(--aces2065-1 0.18 0.18 0.18)', 'color(--acescc 0.4135884 0.4135884 0.4135884)'), ('color(--aces2065-1 65504 65504 65504)', 'color(--acescc 1.4679964 1.4679964 1.4679964)'), ('color(--aces2065-1 0.08731 0.07443 0.27274)', 'color(--acescc 0. 0.3139529 0.)'), ('color(--aces2065-1 0.15366 0.25692 0.09071)', 'color(--acescc 0. 0. 0.)'), ('color(--aces2065-1 0.21743 0.07070 0.05130)', 'color(--acescc 0. 0. 0.)'), ('color(--aces2065-1 0.58921 0.53944 0.09157)', 'color(--acescc 0. 0.5099772 0.3592168)'), ('color(--aces2065-1 0.30904 0.14818 0.27426)', 'color(--acescc 0. 0.382433 0.)'), ('color(--aces2065-1 0.14900 0.23377 0.35939)', 'color(--acescc 0. 0. 0.4702988)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color2).convert('aces2065-1'), Color(color1)) |
('Events > Events under an Event Sub-topic > List All Events under an Event Sub-topic')
def evnt_sub_topic_event_get_list(transaction):
with stash['app'].app_context():
event_sub_topic = EventSubTopicFactory()
db.session.add(event_sub_topic)
event = EventFactoryBasic(event_sub_topic_id=1)
db.session.add(event)
db.session.commit() |
def docs_get_section(docstring, section, output='extract', end=None):
outs = []
in_section = False
docstring_split = docstring.split('\n')
for (ind, line) in enumerate(docstring_split):
if ((section in line) and ('--' in docstring_split[(ind + 1)])):
in_section = True
if end:
if (in_section and ((' ' + end) == line)):
in_section = False
outs.pop()
elif (in_section and (line == '')):
in_section = False
if ((output == 'extract') and in_section):
outs.append(line)
if ((output == 'remove') and (not in_section)):
outs.append(line)
if (in_section and (output == 'remove') and (not line.isspace()) and (line.strip()[0] == '%')):
in_section = False
out_docstring = '\n'.join(outs)
return out_docstring |
def convert2dash(node):
dashes = []
offset = 0
style = node.style
if ('stroke-dasharray' in style):
if (style['stroke-dasharray'].find(',') > 0):
dashes = [float(dash) for dash in style['stroke-dasharray'].split(',')]
if ('stroke-dashoffset' in style):
offset = style['stroke-dashoffset']
if (not dashes):
return
new = []
for sub in node.path.to_superpath():
idash = 0
dash = dashes[0]
length = float(offset)
while (dash < length):
length = (length - dash)
idash = ((idash + 1) % len(dashes))
dash = dashes[idash]
new.append([sub[0][:]])
i = 1
while (i < len(sub)):
dash = (dash - length)
length = bezier.cspseglength(new[(- 1)][(- 1)], sub[i])
while (dash < length):
(new[(- 1)][(- 1)], nxt, sub[i]) = bezier.cspbezsplitatlength(new[(- 1)][(- 1)], sub[i], (dash / length))
if (idash % 2):
new.append([nxt[:]])
else:
new[(- 1)].append(nxt[:])
length = (length - dash)
idash = ((idash + 1) % len(dashes))
dash = dashes[idash]
if (idash % 2):
new.append([sub[i]])
else:
new[(- 1)].append(sub[i])
i += 1
style.pop('stroke-dasharray')
node.pop('sodipodi:type')
node.path = CubicSuperPath(new)
node.style = style |
class MyInstallData(install_data.install_data):
def run(self):
install_data_command = self.get_finalized_command('install_data')
for project in list_doc_projects():
install_data_command.data_files.extend(list_docs_data_files(project))
build_tvtk_classes_zip()
tvtk_dir = 'tvtk'
install_data_command.data_files.append((tvtk_dir, [join(tvtk_dir, 'tvtk_classes.zip')]))
install_data.install_data.run(self) |
class ForumModerationURLPatternsFactory(URLPatternsFactory):
app_namespace = 'forum_moderation'
topic_lock_view = get_class('forum_moderation.views', 'TopicLockView')
topic_unlock_view = get_class('forum_moderation.views', 'TopicUnlockView')
topic_delete_view = get_class('forum_moderation.views', 'TopicDeleteView')
topic_move_view = get_class('forum_moderation.views', 'TopicMoveView')
topic_update_to_normal_topic_view = get_class('forum_moderation.views', 'TopicUpdateToNormalTopicView')
topic_update_to_sticky_topic_view = get_class('forum_moderation.views', 'TopicUpdateToStickyTopicView')
topic_update_to_announce_view = get_class('forum_moderation.views', 'TopicUpdateToAnnounceView')
moderation_queue_list_view = get_class('forum_moderation.views', 'ModerationQueueListView')
moderation_queue_detail_view = get_class('forum_moderation.views', 'ModerationQueueDetailView')
post_approve_view = get_class('forum_moderation.views', 'PostApproveView')
post_disapprove_view = get_class('forum_moderation.views', 'PostDisapproveView')
def get_urlpatterns(self):
return [path('topic/<str:slug>-<int:pk>/lock/', self.topic_lock_view.as_view(), name='topic_lock'), path('topic/<str:slug>-<int:pk>/unlock/', self.topic_unlock_view.as_view(), name='topic_unlock'), path('topic/<str:slug>-<int:pk>/delete/', self.topic_delete_view.as_view(), name='topic_delete'), path('topic/<str:slug>-<int:pk>/move/', self.topic_move_view.as_view(), name='topic_move'), path('topic/<str:slug>-<int:pk>/change/topic/', self.topic_update_to_normal_topic_view.as_view(), name='topic_update_to_post'), path('topic/<str:slug>-<int:pk>/change/sticky/', self.topic_update_to_sticky_topic_view.as_view(), name='topic_update_to_sticky'), path('topic/<str:slug>-<int:pk>/change/announce/', self.topic_update_to_announce_view.as_view(), name='topic_update_to_announce'), path('queue/', self.moderation_queue_list_view.as_view(), name='queue'), path('queue/<int:pk>/', self.moderation_queue_detail_view.as_view(), name='queued_post'), path('queue/<int:pk>/approve/', self.post_approve_view.as_view(), name='approve_queued_post'), path('queue/<int:pk>/disapprove/', self.post_disapprove_view.as_view(), name='disapprove_queued_post')] |
def __read_toc_entry(fd, toc_entry_num):
CDROM_MSF = 2
FORMAT_cdrom_tocentry = 'BBBiB'
cdrom_tocentry = struct.pack(FORMAT_cdrom_tocentry, toc_entry_num, 0, CDROM_MSF, 0, 0)
CDROMREADTOCENTRY = 21254
cdrom_tocentry = fcntl.ioctl(fd, CDROMREADTOCENTRY, cdrom_tocentry)
(cdte_track, cdte_adr_ctrl, cdte_format, cdte_addr, _cdte_datamode) = struct.unpack(FORMAT_cdrom_tocentry, cdrom_tocentry)
if (cdte_format is not CDROM_MSF):
raise OSError('Invalid syscall answer')
cdte_ctrl = ((cdte_adr_ctrl & 240) >> 4)
CDROM_DATA_TRACK = 4
is_data_track = bool((cdte_ctrl & CDROM_DATA_TRACK))
FORMAT_cdrom_addr = ('BBB' + ('x' * (struct.calcsize('i') - 3)))
(minute, second, frame) = struct.unpack(FORMAT_cdrom_addr, struct.pack('i', cdte_addr))
return (cdte_track, is_data_track, minute, second, frame) |
.parallel(nprocs=3)
def test_io_timestepping(element, tmpdir):
filename = os.path.join(str(tmpdir), 'test_io_timestepping_dump.h5')
filename = COMM_WORLD.bcast(filename, root=0)
mycolor = (COMM_WORLD.rank > (COMM_WORLD.size - 1))
comm = COMM_WORLD.Split(color=mycolor, key=COMM_WORLD.rank)
method = ('project' if isinstance(element, finat.ufl.MixedElement) else 'interpolate')
if (mycolor == 0):
mesh = Mesh('./docs/notebooks/stokes-control.msh', name=mesh_name, comm=comm)
V = FunctionSpace(mesh, element)
f = Function(V, name=func_name)
with CheckpointFile(filename, 'w', comm=comm) as afile:
for i in range(5):
_project(f, _get_expr(V, i), method)
afile.save_function(f, idx=i)
mycolor = (COMM_WORLD.rank > (COMM_WORLD.size - 2))
comm = COMM_WORLD.Split(color=mycolor, key=COMM_WORLD.rank)
if (mycolor == 0):
with CheckpointFile(filename, 'r', comm=comm) as afile:
mesh = afile.load_mesh(mesh_name)
for i in range(5):
f = afile.load_function(mesh, func_name, idx=i)
V = f.function_space()
g = Function(V)
_project(g, _get_expr(V, i), method)
assert (assemble((inner((g - f), (g - f)) * dx)) < 1e-16) |
class TestCacheSystemStatusesForConsentReporting():
def test_cache_initial_status_and_identities_for_consent_reporting(self, db, privacy_request_with_consent_policy, connection_config, privacy_preference_history, privacy_preference_history_fr_provide_service_frontend_only):
privacy_preference_history.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
privacy_preference_history_fr_provide_service_frontend_only.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
cache_initial_status_and_identities_for_consent_reporting(db, privacy_request_with_consent_policy, connection_config, relevant_preferences=[privacy_preference_history_fr_provide_service_frontend_only], relevant_user_identities={'email': 'customer-'})
db.refresh(privacy_preference_history)
db.refresh(privacy_preference_history_fr_provide_service_frontend_only)
assert (privacy_preference_history_fr_provide_service_frontend_only.affected_system_status == {connection_config.name: 'pending'})
assert (privacy_preference_history_fr_provide_service_frontend_only.secondary_user_ids == {'email': 'customer-'})
assert (privacy_preference_history.affected_system_status == {connection_config.name: 'skipped'})
assert (privacy_preference_history.secondary_user_ids is None)
def test_add_complete_system_status_for_consent_reporting(self, db, privacy_request_with_consent_policy, connection_config, privacy_preference_history, privacy_preference_history_fr_provide_service_frontend_only):
privacy_preference_history.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
privacy_preference_history_fr_provide_service_frontend_only.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
cache_initial_status_and_identities_for_consent_reporting(db, privacy_request_with_consent_policy, connection_config, relevant_preferences=[privacy_preference_history_fr_provide_service_frontend_only], relevant_user_identities={'email': 'customer-'})
add_complete_system_status_for_consent_reporting(db, privacy_request_with_consent_policy, connection_config)
db.refresh(privacy_preference_history)
db.refresh(privacy_preference_history_fr_provide_service_frontend_only)
assert (privacy_preference_history_fr_provide_service_frontend_only.affected_system_status == {connection_config.name: 'complete'})
assert (privacy_preference_history_fr_provide_service_frontend_only.secondary_user_ids == {'email': 'customer-'})
assert (privacy_preference_history.affected_system_status == {connection_config.name: 'skipped'})
assert (privacy_preference_history.secondary_user_ids is None)
def test_add_error_system_status_for_consent_reporting(self, db, privacy_request_with_consent_policy, connection_config, privacy_preference_history, privacy_preference_history_fr_provide_service_frontend_only):
privacy_preference_history.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
privacy_preference_history_fr_provide_service_frontend_only.privacy_request_id = privacy_request_with_consent_policy.id
privacy_preference_history.save(db)
cache_initial_status_and_identities_for_consent_reporting(db, privacy_request_with_consent_policy, connection_config, relevant_preferences=[privacy_preference_history_fr_provide_service_frontend_only], relevant_user_identities={'email': 'customer-'})
add_errored_system_status_for_consent_reporting(db, privacy_request_with_consent_policy, connection_config)
db.refresh(privacy_preference_history)
db.refresh(privacy_preference_history_fr_provide_service_frontend_only)
assert (privacy_preference_history_fr_provide_service_frontend_only.affected_system_status == {connection_config.name: 'error'})
assert (privacy_preference_history_fr_provide_service_frontend_only.secondary_user_ids == {'email': 'customer-'})
assert (privacy_preference_history.affected_system_status == {connection_config.name: 'skipped'})
assert (privacy_preference_history.secondary_user_ids is None) |
def cache_action_required(cache_key: str, step: Optional[CurrentStep]=None, collection: Optional[CollectionAddress]=None, action_needed: Optional[List[ManualAction]]=None) -> None:
cache: FidesopsRedis = get_cache()
action_required: Optional[CheckpointActionRequired] = None
if step:
action_required = CheckpointActionRequired(step=step, collection=collection, action_needed=action_needed)
cache.set_encoded_object(cache_key, (action_required.dict() if action_required else None)) |
def extractHololonovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_metaclass(abc.ABCMeta)
class chunk(stringify.StringifyMixin):
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def chunk_type(cls):
pass
def __init__(self, type_, length):
self._type = type_
self.length = length
def parser(cls, buf):
pass
def __len__(self):
return self.length |
def test_update_versions_is_working_properly_case_5(create_test_data, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
data['asset2_model_main_v002'].is_published = True
data['asset2_model_main_v003'].is_published = True
maya_env.open(data['asset2_model_take1_v001'])
maya_env.reference(data['asset2_model_main_v002'])
pm.saveFile()
data['asset2_model_take1_v001'].is_published = True
pm.newFile(force=True)
maya_env.open(data['asset2_model_take1_v003'])
maya_env.reference(data['asset2_model_main_v003'])
pm.saveFile()
data['asset2_model_take1_v003'].is_published = True
pm.newFile(force=True)
maya_env.open(data['version11'])
maya_env.reference(data['asset2_model_take1_v001'])
maya_env.reference(data['asset2_model_take1_v001'])
pm.saveFile()
data['version11'].is_published = True
pm.newFile(force=True)
maya_env.open(data['version12'])
maya_env.reference(data['asset2_model_take1_v003'])
maya_env.reference(data['asset2_model_take1_v003'])
pm.saveFile()
data['version12'].is_published = True
pm.newFile(force=True)
maya_env.open(data['version15'])
maya_env.reference(data['version11'])
maya_env.reference(data['version11'])
pm.saveFile()
pm.newFile(force=True)
visited_versions = []
for v in data['version15'].walk_inputs():
visited_versions.append(v)
expected_visited_versions = [data['version15'], data['version11'], data['asset2_model_take1_v001'], data['asset2_model_main_v002']]
assert (expected_visited_versions == visited_versions)
reference_resolution = maya_env.open(data['version15'])
updated_versions = maya_env.update_versions(reference_resolution)
assert (0 == len(updated_versions))
assert (data['version15'] == maya_env.get_current_version())
refs = pm.listReferences()
version12_ref1 = refs[0]
version12_ref2 = refs[1]
refs = pm.listReferences(version12_ref1)
version6_ref1 = refs[0]
version6_ref2 = refs[1]
refs = pm.listReferences(version12_ref2)
version6_ref3 = refs[0]
version6_ref4 = refs[1]
version3_ref1 = pm.listReferences(version6_ref1)[0]
version3_ref2 = pm.listReferences(version6_ref2)[0]
version3_ref3 = pm.listReferences(version6_ref3)[0]
version3_ref4 = pm.listReferences(version6_ref4)[0]
published_version = data['version12'].latest_published_version
assert (published_version == maya_env.get_version_from_full_path(version12_ref1.path))
assert (published_version == maya_env.get_version_from_full_path(version12_ref2.path))
published_version = data['asset2_model_take1_v002'].latest_published_version
assert (published_version == maya_env.get_version_from_full_path(version6_ref1.path))
assert (published_version == maya_env.get_version_from_full_path(version6_ref2.path))
assert (published_version == maya_env.get_version_from_full_path(version6_ref3.path))
assert (published_version == maya_env.get_version_from_full_path(version6_ref4.path))
published_version = data['asset2_model_main_v002'].latest_published_version
assert (published_version == maya_env.get_version_from_full_path(version3_ref1.path))
assert (published_version == maya_env.get_version_from_full_path(version3_ref2.path))
assert (published_version == maya_env.get_version_from_full_path(version3_ref3.path))
assert (published_version == maya_env.get_version_from_full_path(version3_ref4.path)) |
def main():
oj = (lambda *x: os.path.join(*x))
algorithm = 'particle_swarm_optimization'
passivbot_mode = 'neat_grid'
d0 = f'results_{algorithm}_{passivbot_mode}'
d0 = sys.argv[1]
if ('clock' in d0):
passivbot_mode = 'clock'
elif ('neat' in d0):
passivbot_mode = 'neat_grid'
elif ('recursive' in d0):
passivbot_mode = 'recursive_grid'
elif ('static' in d0):
passivbot_mode = 'static_grid'
else:
raise Exception('unknown passivbot_mode')
if ('harmony_search' in d0):
algorithm = 'harmony_search'
elif ('particle_swarm_optimization' in d0):
algorithm = 'particle_swarm_optimization'
else:
raise Exception('unknown algorithm')
date_now = ts_to_date(time.time())[:19]
dump_dir = make_get_filepath(f"configs/extracted/{algorithm}_{passivbot_mode}_{date_now.replace(':', '_')}/")
symbols_done = set()
for d1 in sorted(os.listdir(d0))[::(- 1)]:
fp = oj(d0, d1, 'all_results.txt')
symbol = d1[20:]
if ((not os.path.exists(fp)) or (symbol in symbols_done)):
print('skipping', fp)
continue
symbols_done.add(symbol)
subprocess.run(['python3', 'inspect_opt_results.py', fp, '-d'])
shutil.move(oj(d0, d1, 'all_results_best_config.json'), oj(dump_dir, f'{symbol}.json'))
try:
shutil.copy(oj(d0, d1, 'table_best_config.txt'), oj(dump_dir, f'{symbol}_inspect_results.txt'))
except Exception as e:
print('error copying', e) |
class Kor2EngDataModule(LightningDataModule):
name: str = 'kor2eng'
def __init__(self, config: dict, tokenizer: Tokenizer):
super().__init__()
self.config = config
self.tokenizer = tokenizer
self.kor2eng_train: Optional[List[Tuple[(str, str)]]] = None
self.kor2eng_val: Optional[List[Tuple[(str, str)]]] = None
self.kor2eng_test: Optional[List[Tuple[(str, str)]]] = None
def prepare_data(self) -> None:
(self.kor2eng_train, self.kor2eng_val, self.kor2eng_test) = fetch_kor2eng()
def build_dataset(self, src2tgt: List[Tuple[(str, str)]]) -> CleanformerDataset:
srcs = [src for (src, _) in src2tgt]
tgts = [tgt for (_, tgt) in src2tgt]
X = TrainInputsBuilder(self.tokenizer, self.config['max_length'])(srcs=srcs, tgts=tgts)
Y = LabelsBuilder(self.tokenizer, self.config['max_length'])(tgts=tgts)
return CleanformerDataset(X, Y)
def train_dataloader(self) -> DataLoader:
return DataLoader(self.build_dataset(self.kor2eng_train), batch_size=self.config['batch_size'], shuffle=self.config['shuffle'], num_workers=self.config['num_workers'])
def val_dataloader(self) -> DataLoader:
return DataLoader(self.build_dataset(self.kor2eng_val), batch_size=self.config['batch_size'], shuffle=False, num_workers=self.config['num_workers'])
def test_dataloader(self) -> DataLoader:
return DataLoader(self.build_dataset(self.kor2eng_test), batch_size=self.config['batch_size'], shuffle=False, num_workers=self.config['num_workers'])
def predict_dataloader(self):
pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.