code stringlengths 281 23.7M |
|---|
class TaskWindowLaunchGroup(Group):
id = 'TaskWindowLaunchGroup'
items = List
def _items_default(self):
manager = self
while isinstance(manager, Group):
manager = manager.parent
application = manager.controller.task.window.application
items = []
for factory in application.task_factories:
action = TaskWindowLaunchAction(task_id=factory.id)
items.append(ActionItem(action=action))
return items |
def version_list_normalize(vlist):
out_list = []
if (vlist.find(',') > 0):
vlist = vlist.split(',')
else:
vlist = vlist.split()
vlist.sort()
for ver in vlist:
try:
out_list.append(OFVersions.from_string(ver))
except KeyError:
sys.stderr.write(('Bad version input, %s' % str(ver)))
sys.exit(1)
return out_list |
def test_to_timedelta():
preprocessed_messages = 3600
expected = timedelta(seconds=3600)
processor = BuildTimedelta({'units': 'seconds'})
res = processor.process_arg(preprocessed_messages, None, {})
assert (res == expected)
preprocessed_messages = None
expected = None
processor = BuildTimedelta({'units': 'seconds'})
res = processor.process_arg(preprocessed_messages, None, {})
assert (res == expected) |
class meter_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 9
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = meter_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 9)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.meter_stats.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('meter_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def test_vom_manual_points_outside_domain():
for i in [0]:
parent_mesh = UnitSquareMesh(100, 100, quadrilateral=True)
points = [[0.1, 0.1], [0.2, 0.2], [1.1, 1.0]]
vom = True
with pytest.raises(VertexOnlyMeshMissingPointsError):
vom = VertexOnlyMesh(parent_mesh, points, missing_points_behaviour='error')
def display_correct_indent():
vom = VertexOnlyMesh(parent_mesh, points, missing_points_behaviour='warn')
vom = VertexOnlyMesh(parent_mesh, points, missing_points_behaviour=None)
assert vom
assert vom
display_correct_indent() |
.parametrize('distance_matrix, expected_permutation, expected_distance', [(distance_matrix1, optimal_permutation1, optimal_distance1), (distance_matrix2, optimal_permutation2, optimal_distance2), (distance_matrix3, optimal_permutation3, optimal_distance3)])
def test_solution_is_optimal(distance_matrix, expected_permutation, expected_distance):
(permutation, distance) = solve_tsp_dynamic_programming(distance_matrix)
assert (permutation == expected_permutation)
assert (distance == expected_distance) |
class PairwiseBilinear(nn.Module):
def __init__(self, in_features: int, out_features: int, *, bias_u=True, bias_v=True):
super(PairwiseBilinear, self).__init__()
self.bias_u = bias_u
self.bias_v = bias_v
bias_u_dim = (1 if bias_u else 0)
bias_v_dim = (1 if bias_v else 0)
self.weight = nn.parameter.Parameter(torch.empty(out_features, (in_features + bias_u_dim), (in_features + bias_v_dim)))
self.reset_parameters()
def reset_parameters(self):
nn.init.xavier_uniform_(self.weight)
def forward(self, u: torch.Tensor, v: torch.Tensor):
assert (u.shape == v.shape), 'Inputs to PairwiseBilinear must have the same shape'
assert (len(u.shape) == 3), 'Inputs to PairwiseBilinear must have a 3d shape'
(batch_size, seq_len, _) = u.shape
ones = torch.ones((batch_size, seq_len, 1), dtype=u.dtype, device=u.device)
if self.bias_u:
u = torch.cat([u, ones], (- 1))
if self.bias_v:
v = torch.cat([v, ones], (- 1))
intermediate = torch.einsum('blu,ouv->blov', u, self.weight)
return torch.einsum('bmv,blov->bmlo', v, intermediate) |
def lsf_reader(filename: str) -> None:
tidy3d_file = 'import numpy as np\n'
tidy3d_file += 'import matplotlib.pyplot as plt\n'
tidy3d_file += 'import tidy3d as td\n'
tidy3d_file += 'import tidy3d.web as web\n'
tidy3d_file += '\n'
structures = '['
(rect, sph, cyl, polyslab) = (0, 0, 0, 0)
(sources, modesrc, planewv, ptdipole, gauss, tfsf) = ('[', 0, 0, 0, 0, 0)
monitors = '['
(modemon, fluxmon, fieldmon, fieldTime, indexmon) = (0, 0, 0, 0, 0)
gridspec_string = 'grid_spec=td.GridSpec.auto()'
(override_structures, overstrct) = ('[', 0)
with open(filename) as file_lsf:
Lines = file_lsf.readlines()
variable_dict = {}
for (i, line) in enumerate(Lines):
Lines[i] = _remove_spaces(line)
for (i, line) in enumerate(Lines):
line = Lines[i]
if (line[0] == '#'):
tidy3d_file += line
elif (line[0] == '\n'):
tidy3d_file += line
elif _is_declaration(line):
_add_to_variable_dict(line, variable_dict)
tidy3d_file += line.replace(';', ' ')
elif (line[:8] == 'addrect;'):
(addrect_string, rect_name) = _addrect(Lines, (i + 1), rect)
tidy3d_file += addrect_string
structures += (rect_name + ', ')
rect += 1
elif (line[:10] == 'addsphere;'):
(addsphere_string, sphere_name) = _addsphere(Lines, (i + 1), sph)
tidy3d_file += addsphere_string
structures += (sphere_name + ', ')
sph += 1
elif (line[:10] == 'addcircle;'):
(addcircle_string, cylinder_name) = _addcircle(Lines, (i + 1), cyl)
tidy3d_file += addcircle_string
structures += (cylinder_name + ', ')
cyl += 1
elif (line[:8] == 'addpoly;'):
(addpoly_string, poly_name) = _addpoly(Lines, (i + 1), polyslab)
tidy3d_file += addpoly_string
structures += (poly_name + ', ')
polyslab += 1
elif (line[:10] == 'adddipole;'):
(adddipole_string, dipole_name) = _adddipole(Lines, (i + 1), ptdipole)
tidy3d_file += adddipole_string
sources += (dipole_name + ', ')
ptdipole += 1
elif (line[:12] == 'addgaussian;'):
(addgaussian_string, gaussian_name) = _addgaussian(Lines, (i + 1), gauss)
tidy3d_file += addgaussian_string
sources += (gaussian_name + ', ')
gauss += 1
elif (line[:9] == 'addplane;'):
(addplane_string, plane_name) = _addplane(Lines, (i + 1), planewv)
tidy3d_file += addplane_string
sources += (plane_name + ', ')
planewv += 1
elif (line[:8] == 'addtfsf;'):
(addtfsf_string, tfsf_name) = _addtfsf(Lines, (i + 1), tfsf)
tidy3d_file += addtfsf_string
sources += (tfsf_name + ', ')
tfsf += 1
elif (line[:8] == 'addmode;'):
(addmode_string, mode_src_name) = _addmode(Lines, (i + 1), modesrc)
tidy3d_file += addmode_string
sources += (mode_src_name + ', ')
modesrc += 1
elif (line[:8] == 'addfdtd;'):
(addfdtd_string, gridspec_string) = _addfdtd(Lines, (i + 1), variable_dict)
tidy3d_file += addfdtd_string
elif (line[:8] == 'addmesh;'):
(override_string, refine_box_name) = _addmesh(Lines, (i + 1), overstrct)
override_structures += (refine_box_name + ', ')
tidy3d_file += (refine_box_name + override_string)
overstrct += 1
elif ((line[:9] == 'addindex;') or (line[:18] == 'addeffectiveindex;') or (line[:12] == 'addemeindex;')):
(addindex_string, index_name) = _addindex(Lines, (i + 1), indexmon)
monitors += (index_name + ', ')
tidy3d_file += (index_name + addindex_string)
indexmon += 1
elif (line[:9] == 'addpower;'):
(addpower_string, power_name) = _addpower(Lines, (i + 1), fluxmon)
monitors += (power_name + ', ')
tidy3d_file += (power_name + addpower_string)
fluxmon += 1
elif ((line[:9] == 'addmovie;') or (line[:22] == 'addemfieldtimemonitor;') or (line[:8] == 'addtime;')):
(addmovie_string, movie_name) = _addmovie(Lines, (i + 1), fieldTime)
monitors += (movie_name + ', ')
tidy3d_file += (movie_name + addmovie_string)
fieldTime += 1
elif ((line[:17] == 'addefieldmonitor;') or (line[:11] == 'addprofile;') or (line[:18] == 'addemfieldmonitor;') or (line[:14] == 'addemeprofile;')):
(addefieldmonitor_string, monitor_name) = _addefieldmonitor(Lines, (i + 1), fieldmon)
monitors += (monitor_name + ', ')
tidy3d_file += (monitor_name + addefieldmonitor_string)
fieldmon += 1
elif (line[:17] == 'addmodeexpansion;'):
(addmodeexpansion_string, modemonitor_name) = _addmodeexpansion(Lines, (i + 1), modemon)
tidy3d_file += addmodeexpansion_string
monitors += (modemonitor_name + ', ')
modemon += 1
elif (line[:5] == "set('"):
pass
elif (line[:14] == 'switchtolayout'):
pass
elif (line[:9] == 'selectall'):
pass
elif (line[:6] == 'delete'):
pass
else:
tidy3d_file += (('# ' + line[:(- 1)]) + ' # NOTE: does not yet parse to Tidy3D\n')
if (len(structures) > 1):
structures = (structures[:(- 2)] + ']')
else:
structures = '[]'
if (len(sources) > 1):
sources = (sources[:(- 2)] + ']')
else:
sources = '[]'
if (len(monitors) > 1):
monitors = (monitors[:(- 2)] + ']')
else:
monitors = '[]'
if (len(override_structures) > 1):
override_structures = (override_structures[:(- 2)] + ']')
else:
override_structures = '[]'
tidy3d_file += (('\nsim = sim.copy(update=dict(structures=' + structures) + ',')
tidy3d_file += ' # NOTE: Check order of structures for potential overlap issues\n'
tidy3d_file += (('\tsources=' + sources) + ',\n')
tidy3d_file += (('\tmonitors=' + monitors) + ',\n')
tidy3d_file += (((('\tgrid_spec=' + gridspec_string[:(- 1)]) + ', override_structures=') + override_structures) + ')\n\t)\n')
tidy3d_file += ')'
return tidy3d_file |
.long_test
.download
def test_grib_index_eumetnet():
request = {'param': '2ti', 'date': '', 'step': ['0-24', '24-48', '48-72', '72-96', '96-120', '120-144', '144-168'], 'url': ' 'month': '12', 'year': '2017'}
PATTERN = '{url}data/fcs/efi/EU_forecast_efi_params_{year}-{month}_0.grb'
ds = load_source('indexed-urls', PATTERN, request)
assert (len(ds) == 7), len(ds)
check(ds, 0, (- 0.))
check(ds, 1, (- 0.))
check(ds, 2, 0.)
check(ds, 3, 0.)
xds = ds.to_xarray()
print(xds) |
class OptionSeriesBarSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Migration(migrations.Migration):
dependencies = [('core', '0001_initial')]
operations = [migrations.CreateModel(name='Todo', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('description', models.CharField(max_length=512)), ('done', models.BooleanField(default=False))])] |
class PrintingOutputWriter():
def __init__(self, clear=False):
self.clear = clear
self.print_output = True
def add_decoder_state(*args, **kwargs):
pass
async def add_interpreter_head_state(self, variable, head, prompt, where, trace, is_valid, is_final, mask, num_tokens, program_variables):
if (head == 0):
if self.clear:
sys.stderr.write('\x1bc')
sys.stderr.flush()
if self.print_output:
print(f'''{prompt}
valid={is_valid}, final={is_final}''')
def add_compiler_output(self, code):
pass |
class MinorObjectClassFinancialSpendingViewSet(CachedDetailViewSet):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/financial_spending/object_class.md'
serializer_class = MinorObjectClassFinancialSpendingSerializer
def get_queryset(self):
json_request = self.request.query_params
fiscal_year = json_request.get('fiscal_year', None)
funding_agency_id = json_request.get('funding_agency_id', None)
major_object_class_code = json_request.get('major_object_class_code', None)
if (not (fiscal_year and funding_agency_id and major_object_class_code)):
raise InvalidParameterException('Missing one or more required query parameters: fiscal_year, funding_agency_id, major_object_class_code')
toptier_agency = Agency.objects.filter(id=funding_agency_id).first()
if (toptier_agency is None):
return FinancialAccountsByProgramActivityObjectClass.objects.none()
toptier_agency = toptier_agency.toptier_agency
submission_queryset = SubmissionAttributes.objects.all()
submission_queryset = submission_queryset.filter(toptier_code=toptier_agency.toptier_code, reporting_fiscal_year=fiscal_year).order_by('-reporting_fiscal_year', '-reporting_fiscal_quarter').annotate(fiscal_year=F('reporting_fiscal_year'), fiscal_quarter=F('reporting_fiscal_quarter'))
submission = submission_queryset.first()
if (submission is None):
return FinancialAccountsByProgramActivityObjectClass.objects.none()
active_fiscal_year = submission.reporting_fiscal_year
active_fiscal_quarter = submission.fiscal_quarter
queryset = FinancialAccountsByProgramActivityObjectClass.objects.filter(submission__is_final_balances_for_fy=True)
queryset = queryset.filter(submission__reporting_fiscal_year=active_fiscal_year, submission__reporting_fiscal_quarter=active_fiscal_quarter, treasury_account__funding_toptier_agency=toptier_agency, object_class__major_object_class=major_object_class_code)
queryset = queryset.annotate(object_class_name=F('object_class__object_class_name'), object_class_code=F('object_class__object_class'))
queryset = queryset.values('object_class_name', 'object_class_code').annotate(obligated_amount=Sum('obligations_incurred_by_program_object_class_cpe'))
return queryset |
class PythonParser(BaseParser):
def parse(path: Path) -> List[str]:
try:
with tokenize.open(str(path)) as fd:
return fd.readlines()
except (SyntaxError, UnicodeError):
with open(str(path), encoding='utf8') as fd:
return fd.readlines() |
class TestHighlightLineAnchorsPymdownsTable(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.superfences']
extension_configs = {'pymdownx.highlight': {'line_anchors': '__my_span', 'linenums_style': 'table'}}
def test_linespans(self):
self.check_markdown('\n ```python linenums="2"\n import test\n ```\n ', '\n <div class="highlight"><table class="highlighttable"><tr><td class="linenos"><div class="linenodiv"><pre><span></span><span class="normal">2</span></pre></div></td><td class="code"><div><pre><span></span><code><a id="__my_span-0-2" name="__my_span-0-2"></a><span class="kn">import</span> <span class="nn">test</span>\n </code></pre></div></td></tr></table></div>\n ', True) |
class FromReader(object):
def __init__(self, ffrom):
self._ffrom = BytesIO(file_read(ffrom))
def read(self, size=(- 1)):
return self._ffrom.read(size)
def seek(self, position, whence=os.SEEK_SET):
self._ffrom.seek(position, whence)
def _write_zeros_to_from(self, blocks, from_dict, overwrite_size=4):
from_sorted = sorted(from_dict.items())
for (from_offset, _, values) in blocks:
for i in range(len(values)):
from_address = from_sorted[(from_offset + i)][0]
self._ffrom.seek(from_address)
self._ffrom.write((overwrite_size * b'\x00')) |
def recite(start_verse, end_verse):
generated = [verse.strip().split('\n') for verse in verses(get_song())]
if (start_verse == end_verse):
return generated[(start_verse - 1)]
else:
result = []
for idx in range((start_verse - 1), end_verse):
result += (generated[idx] + [''])
result.pop()
return result |
('Users > Get User Details for an Email Notification > Get User Details for an Email Notification')
def user_email_notification(transaction):
with stash['app'].app_context():
email_notification = EmailNotificationFactory()
db.session.add(email_notification)
db.session.commit() |
def extractTiredtranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
print('\nmodule top();\n (* KEEP, DONT_TOUCH *)\n LUT6 dummy();\n ')
site_to_cmt = dict(read_site_to_cmt())
luts = LutMaker()
wires = StringIO()
bufgs = StringIO()
clock_sources = ClockSources()
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
def gen_sites(desired_site_type):
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for (site, site_type) in gridinfo.sites.items():
if (site_type == desired_site_type):
(yield (tile_name, site))
for (_, site) in gen_sites('MMCME2_ADV'):
mmcm_clocks = ['mmcm_clock_{site}_{idx}'.format(site=site, idx=idx) for idx in range(13)]
for clk in mmcm_clocks:
clock_sources.add_clock_source(clk, site_to_cmt[site])
print('\n wire {c0}, {c1}, {c2}, {c3}, {c4}, {c5};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n MMCME2_ADV pll_{site} (\n .CLKOUT0({c0}),\n .CLKOUT0B({c1}),\n .CLKOUT1({c2}),\n .CLKOUT1B({c3}),\n .CLKOUT2({c4}),\n .CLKOUT2B({c5}),\n .CLKOUT3({c6}),\n .CLKOUT3B({c7}),\n .CLKOUT4({c8}),\n .CLKOUT5({c9}),\n .CLKOUT6({c10}),\n .CLKFBOUT({c11}),\n .CLKFBOUTB({c12})\n );\n '.format(site=site, c0=mmcm_clocks[0], c1=mmcm_clocks[1], c2=mmcm_clocks[2], c3=mmcm_clocks[3], c4=mmcm_clocks[4], c5=mmcm_clocks[5], c6=mmcm_clocks[6], c7=mmcm_clocks[7], c8=mmcm_clocks[8], c9=mmcm_clocks[9], c10=mmcm_clocks[10], c11=mmcm_clocks[11], c12=mmcm_clocks[12]))
for (_, site) in sorted(gen_sites('BUFGCTRL'), key=(lambda x: BUFGCTRL_XY_FUN(x[1]))):
print('\n wire O_{site};\n wire S1_{site};\n wire S0_{site};\n wire IGNORE1_{site};\n wire IGNORE0_{site};\n wire I1_{site};\n wire I0_{site};\n wire CE1_{site};\n wire CE0_{site};\n '.format(site=site), file=wires)
print('\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n BUFGCTRL bufg_{site} (\n .O(O_{site}),\n .S1(S1_{site}),\n .S0(S0_{site}),\n .IGNORE1(IGNORE1_{site}),\n .IGNORE0(IGNORE0_{site}),\n .I1(I1_{site}),\n .I0(I0_{site}),\n .CE1(CE1_{site}),\n .CE0(CE0_{site})\n );\n '.format(site=site), file=bufgs)
' BUFG clock sources:\n\n 2 from interconnect\n Output of BUFG +/- 1\n Cascade in (e.g. PLL, MMCM)\n\n '
CLOCK_CHOICES = ('LUT', 'BUFG_+1', 'BUFG_-1', 'CASCADE')
def find_bufg_cmt(tile):
if ('_BOT_' in tile):
inc = 1
else:
inc = (- 1)
loc = grid.loc_of_tilename(tile)
offset = 1
while True:
gridinfo = grid.gridinfo_at_loc((loc.grid_x, (loc.grid_y + (offset * inc))))
if gridinfo.tile_type.startswith('CLK_HROW_'):
return site_to_cmt[list(gridinfo.sites.keys())[0]]
offset += 1
def get_clock_net(tile, site, source_type):
if (source_type == 'LUT'):
return luts.get_next_output_net()
elif (source_type == 'BUFG_+1'):
(x, y) = BUFGCTRL_XY_FUN(site)
target_y = (y + 1)
max_y = (((y // 16) + 1) * 16)
if (target_y >= max_y):
target_y -= 16
return 'O_BUFGCTRL_X{x}Y{y}'.format(x=x, y=target_y)
elif (source_type == 'BUFG_-1'):
(x, y) = BUFGCTRL_XY_FUN(site)
target_y = (y - 1)
min_y = ((y // 16) * 16)
if (target_y < min_y):
target_y += 16
return 'O_BUFGCTRL_X{x}Y{y}'.format(x=x, y=target_y)
elif (source_type == 'CASCADE'):
cmt = find_bufg_cmt(tile)
return clock_sources.get_random_source(cmt)
else:
assert False, source_type
for (tile, site) in sorted(gen_sites('BUFGCTRL'), key=(lambda x: BUFGCTRL_XY_FUN(x[1]))):
if random.randint(0, 1):
print('\n assign I0_{site} = {i0_net};'.format(site=site, i0_net=get_clock_net(tile, site, random.choice(CLOCK_CHOICES))), file=bufgs)
if random.randint(0, 1):
print('\n assign I1_{site} = {i1_net};'.format(site=site, i1_net=get_clock_net(tile, site, random.choice(CLOCK_CHOICES))), file=bufgs)
print('\n assign S0_{site} = {s0_net};\n assign S1_{site} = {s1_net};\n assign IGNORE0_{site} = {ignore0_net};\n assign IGNORE1_{site} = {ignore1_net};\n assign CE0_{site} = {ce0_net};\n assign CE1_{site} = {ce1_net};\n '.format(site=site, s0_net=luts.get_next_output_net(), s1_net=luts.get_next_output_net(), ignore0_net=luts.get_next_output_net(), ignore1_net=luts.get_next_output_net(), ce0_net=luts.get_next_output_net(), ce1_net=luts.get_next_output_net()), file=bufgs)
for l in luts.create_wires_and_luts():
print(l)
print(wires.getvalue())
print(bufgs.getvalue())
itr = iter(gen_sites('BUFHCE'))
for (tile, site) in sorted(gen_sites('BUFGCTRL'), key=(lambda x: BUFGCTRL_XY_FUN(x[1]))):
if random.randint(0, 1):
(_, bufhce_site) = next(itr)
print('\n (* KEEP, DONT_TOUCH, LOC = "{bufhce_site}" *)\n BUFHCE bufhce_{bufhce_site} (\n .I(O_{site})\n );'.format(site=site, bufhce_site=bufhce_site))
print('endmodule') |
def swap_partitioned_table_with_partitions(apps, _):
call_command('swap_in_new_table', '--table=transaction_search_fabs', '--keep-old-data')
call_command('swap_in_new_table', '--table=transaction_search_fpds', '--keep-old-data')
call_command('swap_in_new_table', '--table=transaction_search', '--keep-old-data') |
def upgrade():
op.create_table('plus_custom_field_value_list', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.String(), nullable=True), sa.Column('allowed_values', sa.ARRAY(sa.String()), nullable=True), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_plus_custom_field_value_list_id'), 'plus_custom_field_value_list', ['id'], unique=False)
op.create_table('plus_custom_field_definition', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.String(), nullable=True), sa.Column('field_type', sa.Enum('string', 'string_list', name='allowedtypes'), nullable=False), sa.Column('allow_list_id', sa.String(), nullable=True), sa.Column('resource_type', sa.Enum('system', 'data_use', 'data_category', 'data_subject', name='resourcetypes'), nullable=False), sa.Column('field_definition', sa.String(), nullable=True), sa.Column('active', sa.BOOLEAN(), nullable=False), sa.ForeignKeyConstraint(['allow_list_id'], ['plus_custom_field_value_list.id']), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_plus_custom_field_definition_field_definition'), 'plus_custom_field_definition', ['field_definition'], unique=False)
op.create_index(op.f('ix_plus_custom_field_definition_id'), 'plus_custom_field_definition', ['id'], unique=False)
op.create_index(op.f('ix_plus_custom_field_definition_name'), 'plus_custom_field_definition', ['name'], unique=False)
op.create_table('plus_custom_field', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('resource_type', sa.Enum('system', 'data_use', 'data_category', 'data_subject', name='resourcetypes'), nullable=False), sa.Column('resource_id', sa.String(), nullable=False), sa.Column('custom_field_definition_id', sa.String(), nullable=False), sa.Column('value', sa.ARRAY(sa.String()), nullable=True), sa.ForeignKeyConstraint(['custom_field_definition_id'], ['plus_custom_field_definition.id']), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_plus_custom_field_id'), 'plus_custom_field', ['id'], unique=False)
op.create_index(op.f('ix_plus_custom_field_resource_id'), 'plus_custom_field', ['resource_id'], unique=False) |
class OptionSeriesBarSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def search_hnsw_minhash_jaccard_topk(index_data, query_data, index_params, k):
(index_sets, index_keys, index_minhashes, index_cache) = index_data
(query_sets, query_keys, query_minhashes) = query_data
num_perm = index_params['num_perm']
cache_key = json.dumps(index_params)
if (cache_key not in index_cache):
(index_minhash_time, query_minhash_time) = lazy_create_minhashes_from_sets(index_minhashes, index_sets, query_minhashes, query_sets, num_perm)
print('Building HNSW Index for MinHash.')
start = time.perf_counter()
kwargs = index_params.copy()
kwargs.pop('num_perm')
index = HNSW(distance_func=compute_minhash_jaccard_distance, **kwargs)
for i in tqdm.tqdm(range(len(index_keys)), desc='Indexing', unit=' minhash', total=len(index_keys)):
index.insert(i, index_minhashes[num_perm][i])
indexing_time = (time.perf_counter() - start)
print('Indexing time: {:.3f}.'.format(indexing_time))
index_cache[cache_key] = (index, {'index_minhash_time': index_minhash_time, 'query_minhash_time': query_minhash_time, 'indexing_time': indexing_time})
(index, indexing) = index_cache[cache_key]
print('Querying.')
times = []
results = []
for (query_minhash, query_key, query_set) in tqdm.tqdm(zip(query_minhashes[num_perm], query_keys, query_sets), total=len(query_keys), desc='Querying', unit=' query'):
start = time.perf_counter()
result = index.query(query_minhash, k)
result = [[index_keys[i], compute_jaccard(query_set, index_sets[i])] for (i, _) in result]
result.sort(key=(lambda x: x[1]), reverse=True)
duration = (time.perf_counter() - start)
times.append(duration)
results.append((query_key, result))
return (indexing, results, times) |
class TestGetAccessManualWebhooks():
(scope='function')
def url(self, integration_manual_webhook_config) -> str:
return (V1_URL_PREFIX + ACCESS_MANUAL_WEBHOOKS)
def test_get_manual_webhook_not_authenticated(self, api_client: TestClient, url):
response = api_client.get(url, headers={})
assert (401 == response.status_code)
def test_get_manual_webhook_wrong_scopes(self, api_client: TestClient, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_READ])
response = api_client.get(url, headers=auth_header)
assert (403 == response.status_code)
def test_disabled_webhooks(self, db, api_client, url, generate_auth_header, integration_manual_webhook_config, access_manual_webhook):
integration_manual_webhook_config.disabled = True
integration_manual_webhook_config.save(db)
auth_header = generate_auth_header([WEBHOOK_READ])
response = api_client.get(url, headers=auth_header)
assert (200 == response.status_code)
assert (len(response.json()) == 0)
def test_get_manual_webhooks(self, api_client: TestClient, db, url, generate_auth_header, access_manual_webhook, integration_manual_webhook_config):
auth_header = generate_auth_header([WEBHOOK_READ])
response = api_client.get(url, headers=auth_header)
assert (200 == response.status_code)
assert (len(response.json()) == 1)
resp = response.json()[0]
assert (resp['fields'] == [{'pii_field': 'email', 'dsr_package_label': 'email', 'data_categories': ['user.contact.email']}, {'pii_field': 'Last Name', 'dsr_package_label': 'last_name', 'data_categories': ['user.name']}])
connection_config_details = resp['connection_config']
assert (connection_config_details['key'] == integration_manual_webhook_config.key)
assert (connection_config_details['connection_type'] == 'manual_webhook')
assert (connection_config_details['access'] == 'read')
assert (connection_config_details['created_at'] is not None)
assert (connection_config_details['updated_at'] is not None)
assert (connection_config_details['secrets'] is None) |
def test_dpa_update_accumulators():
traces = np.array([[0, 1, 2, 3], [1, 2, 3, 4], [4, 5, 6, 7], [2, 3, 4, 5], [3, 4, 5, 6]], dtype='uint8')
data = np.array([[1, 0], [0, 1], [1, 0], [1, 0], [0, 0]], dtype='uint8')
d = scared.DPADistinguisher()
d.update(traces, data)
assert ([3, 1] == d.processed_ones.tolist())
assert ([10, 15, 20, 25] == d.accumulator_traces.tolist())
assert ([[6, 9, 12, 15], [1, 2, 3, 4]] == d.accumulator_ones.tolist())
assert (5 == d.processed_traces)
traces_batch_2 = np.array([[2, 3, 4, 5], [1, 2, 3, 4], [0, 1, 2, 3], [3, 4, 5, 6], [4, 5, 6, 7]], dtype='uint8')
data_batch_2 = np.array([[1, 0], [0, 1], [0, 0], [0, 1], [1, 0]], dtype='uint8')
d.update(traces_batch_2, data_batch_2)
assert ([5, 3] == d.processed_ones.tolist())
assert ([20, 30, 40, 50] == d.accumulator_traces.tolist())
assert ([[12, 17, 22, 27], [5, 8, 11, 14]] == d.accumulator_ones.tolist())
assert (10 == d.processed_traces) |
def _processDelayData(input_data, stats):
if (not isinstance(input_data, dict)):
return input_data
data = {}
for k in input_data:
d = input_data[k]
if (d is not None):
data[k] = copy.deepcopy(d)
if ('values' in d):
if ('summary' not in d):
data[k]['summary'] = _getStatistics(d['values'], stats)
if ('num_runs' not in d):
data[k]['num_runs'] = len(data[k]['values'])
return data |
class OptionPlotoptionsBulletSonificationTracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsBulletSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsBulletSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsBulletSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsBulletSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsBulletSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsBulletSonificationTracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsBulletSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsBulletSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsBulletSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsBulletSonificationTracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsBulletSonificationTracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsBulletSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsBulletSonificationTracksMappingVolume) |
class TestChangeEmailView(object):
def test_renders_get_okay(self, mocker):
form = self.produce_form()
handler = mocker.Mock(spec=ChangeSetHandler)
view = ChangeEmail(form=form, update_email_handler=handler)
view.get()
def test_update_user_email_successfully(self, user, mocker):
form = self.produce_form(old_email=user.email, new_email='', confirm_new_email='')
handler = mocker.Mock(spec=ChangeSetHandler)
view = ChangeEmail(form=form, update_email_handler=handler)
result = view.post()
flashed = get_flashed_messages(with_categories=True)
assert (flashed == [('success', 'Email address updated.')])
handler.apply_changeset.assert_called_once_with(user, EmailUpdate(old_email=user.email, new_email=''))
assert (result.status_code == 302)
assert (result.headers['Location'] == url_for('user.change_email'))
def test_update_user_email_fails_with_invalid_input(self, user, mocker):
form = self.produce_form(old_email=user.email, new_email='')
handler = mocker.Mock(spec=ChangeSetHandler)
view = ChangeEmail(form=form, update_email_handler=handler)
view.post()
assert form.errors
handler.apply_changeset.assert_not_called()
def test_update_user_email_fails_with_stopvalidation(self, user, mocker):
form = self.produce_form(old_email=user.email, new_email='', confirm_new_email='')
handler = mocker.Mock(spec=ChangeSetHandler)
handler.apply_changeset.side_effect = StopValidation([('new_email', 'bad email')])
view = ChangeEmail(form=form, update_email_handler=handler)
view.post()
assert (form.errors == {'new_email': ['bad email']})
def test_update_email_fails_with_persistence_error(self, user, mocker):
form = self.produce_form(old_email=user.email, new_email='', confirm_new_email='')
handler = mocker.Mock(spec=ChangeSetHandler)
handler.apply_changeset.side_effect = PersistenceError('nope')
view = ChangeEmail(form=form, update_email_handler=handler)
result = view.post()
flashed = get_flashed_messages(with_categories=True)
assert (flashed == [('danger', 'Error while updating email')])
assert (result.status_code == 302)
assert (result.headers['Location'] == url_for('user.change_email'))
def produce_form(self, **data):
return ChangeEmailForm(formdata=MultiDict(data), user=current_user, meta={'csrf': False}) |
class BaseEngine(ConfigurableWalker, ParserConfig):
def __init__(self, config=None):
ConfigurableWalker.__init__(self, config)
self.analytics = []
self.preprocessor = PreProcessor()
ParserConfig.__init__(self, preprocessor=self.preprocessor, schema=self._schema)
with self.schema:
definitions = self.get_config('definitions', [])
if is_string(definitions):
definitions = parse_definitions(definitions)
self.preprocessor.add_definitions(definitions)
for path in self.get_config('definitions_files', []):
with ignore_missing_functions, open(path, 'r') as f:
definitions = parse_definitions(f.read())
self.preprocessor.add_definitions(definitions)
def add_analytic(self, analytic):
analytic = self.preprocessor.expand(analytic)
self.analytics.append(analytic)
def add_analytics(self, analytics):
for analytic in analytics:
self.add_analytic(analytic) |
class AutoBackup(plugins.Plugin):
__author__ = '+.github.com'
__version__ = '1.0.0'
__license__ = 'GPL3'
__description__ = 'This plugin backups files when internet is available.'
def __init__(self):
self.ready = False
self.tries = 0
self.status = StatusFile('/root/.auto-backup')
def on_loaded(self):
for opt in ['files', 'interval', 'commands', 'max_tries']:
if ((opt not in self.options) or ((opt in self.options) and (self.options[opt] is None))):
logging.error(f'AUTO-BACKUP: Option {opt} is not set.')
return
self.ready = True
logging.info('AUTO-BACKUP: Successfully loaded.')
def on_internet_available(self, agent):
if (not self.ready):
return
if (self.options['max_tries'] and (self.tries >= self.options['max_tries'])):
return
if self.status.newer_then_days(self.options['interval']):
return
existing_files = list(filter((lambda f: os.path.exists(f)), self.options['files']))
files_to_backup = ' '.join(existing_files)
try:
display = agent.view()
logging.info('AUTO_BACKUP: Backing up ...')
display.set('status', 'Backing up ...')
display.update()
for cmd in self.options['commands']:
logging.info(f'AUTO_BACKUP: Running {cmd.format(files=files_to_backup)}')
process = subprocess.Popen(cmd.format(files=files_to_backup), shell=True, stdin=None, stdout=open('/dev/null', 'w'), stderr=None, executable='/bin/bash')
process.wait()
if (process.returncode > 0):
raise OSError(f'Command failed (rc: {process.returncode})')
logging.info('AUTO_BACKUP: backup done')
display.set('status', 'Backup done!')
display.update()
self.status.update()
except OSError as os_e:
self.tries += 1
logging.info(f'AUTO_BACKUP: Error: {os_e}')
display.set('status', 'Backup failed!')
display.update() |
class HCSR04():
def __init__(self, device: SerialHandler, trig: str='SQ1', echo: str='LA1'):
self._device = device
self._la = LogicAnalyzer(self._device)
self._pwm = PWMGenerator(self._device)
self._trig = trig
self._echo = echo
self._measure_period = 0.06
self._trigger_pulse_length = 1e-05
def estimate_distance(self, average: int=10, speed_of_sound: float=340) -> float:
self._la.capture(channels=self._echo, events=(2 * average), block=False)
self._pwm.generate(channels=self._trig, frequency=(self._measure_period ** (- 1)), duty_cycles=(self._trigger_pulse_length / self._measure_period))
time.sleep((self._measure_period * (average + 1)))
self._pwm.set_state(**{self._trig.lower(): 0})
(t,) = self._la.fetch_data()
self._sanity_check(len(t), (2 * average))
high_times = (t[1::2] - t[::2])
return (((speed_of_sound * high_times.mean()) / 2) * 1e-06)
def _sanity_check(self, events: int, expected_events: int):
if self._la.get_initial_states()[self._echo]:
raise RuntimeError('ECHO pin was HIGH when measurement started.')
if (events < expected_events):
raise TimeoutError |
def require_exclusive_relationship(resource_list, data, optional=False):
present = False
multiple = False
for resource in resource_list:
if (resource in data):
if present:
multiple = True
present = True
if (multiple or (not (optional or present))):
raise UnprocessableEntityError({'pointer': f'/data/relationships'}, f'A valid relationship with either of resources is required: {resource_list}') |
def test_open_yaml_as_json():
with tempfile.NamedTemporaryFile() as temp_json:
with open(temp_json.name, 'w') as temp_data:
temp_data.write(_TEST_YAML)
with pytest.raises(json.decoder.JSONDecodeError) as decode_error:
_ = file_ops.open_json(temp_json.name)
assert ('Expecting value' in str(decode_error.value)) |
def parse(packet):
peer = packet.retrieve('peer')
rssi = packet.retrieve('rssi')
svc_data = packet.retrieve('Service Data uuid')
adv_payload = packet.retrieve('Adv Payload')
if (peer and rssi and svc_data and adv_payload):
mac = peer[0].val
uuid = svc_data[0].val
if (b'\x18\x1a' == uuid):
mac_in_payload = ':'.join((('%02x' % x) for x in adv_payload[0].val[:6]))
if (mac == mac_in_payload):
return parse_payload(mac, rssi[0].val, adv_payload[0].val) |
def check_ratelimit(response: Response) -> bool:
if (response.status_code == 429):
try:
time = int(response.headers['X-RateLimit-Reset'])
print(f'Ratelimit hit. Sleeping for {(time - int(pytime.time()))} seconds.')
sleep_until(time)
return False
except KeyError:
return False
return True |
class Migration(migrations.Migration):
dependencies = [('search', '0025_award_keys_upper_indexes')]
operations = [migrations.RunSQL(sql='\n ALTER TABLE rpt.award_search ALTER COLUMN generated_unique_award_id SET NOT NULL;\n CREATE UNIQUE INDEX as_idx_generated_unique_award_id_uq ON rpt.award_search(generated_unique_award_id);\n ', reverse_sql='\n DROP INDEX IF EXISTS rpt.as_idx_generated_unique_award_id_uq;\n ALTER TABLE rpt.award_search ALTER COLUMN generated_unique_award_id SET NULL;\n ', state_operations=[migrations.AlterField(model_name='awardsearch', name='generated_unique_award_id', field=models.TextField(unique=True))])] |
class AMIEventTest(unittest.TestCase):
def test_dict(self):
keys = {'a': 1, 'b': 2}
event = ami.Event('TestEvent', dict(keys))
self.assertEqual(event['a'], 1)
self.assertEqual(event['b'], 2)
self.assertSetEqual(set(iter(keys)), set(iter(event)))
self.assertIn('a', event) |
class ExceptHandler(excepthandler):
_fields = ('type', 'name', 'body')
_attributes = ('lineno', 'col_offset')
def __init__(self, type=None, name=None, body=[], lineno=0, col_offset=0, **ARGS):
excepthandler.__init__(self, **ARGS)
self.type = type
self.name = name
self.body = list(body)
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
def get_executable_from_executable_config(executable_config: Config) -> Optional[str]:
if isinstance(executable_config, str):
return executable_config
if isinstance(executable_config, dict):
if ('path' in executable_config):
return executable_config['path']
if (('conda' in executable_config) and (shutil.which('conda') is not None)):
conda_config = executable_config['conda']
if isinstance(conda_config, str):
envname = conda_config
return get_executable_from_conda_envname(envname)
if isinstance(conda_config, dict):
if ('name' in conda_config):
envname = conda_config['name']
return get_executable_from_conda_envname(envname)
if ('path' in conda_config):
envpath = conda_config['path']
return get_executable_from_conda_envpath(envpath) |
class Solution():
def leastBricks(self, wall: List[List[int]]) -> int:
tr = {}
for row in wall:
s = 0
for e in row:
s += e
tr[s] = (tr.get(s, 0) + 1)
del tr[sum(wall[0])]
if (not tr):
return len(wall)
return (len(wall) - max(tr.values())) |
def test_get(server):
url = str(server.url)
runner = CliRunner()
result = runner.invoke( [url])
assert (result.exit_code == 0)
assert (remove_date_header(splitlines(result.output)) == ['HTTP/1.1 200 OK', 'server: uvicorn', 'content-type: text/plain', 'Transfer-Encoding: chunked', '', 'Hello, world!']) |
def test_insert_aliased_variable_dominator_prev_block():
(list_instructions, aliased_variables, task) = construct_graph_aliased(3)
InsertMissingDefinitions().run(task)
assert ([node.instructions for node in task.graph.nodes] == [((([list_instructions[0], Assignment(aliased_variables[0], Variable('x', Integer.int32_t(), 0, is_aliased=True))] + list_instructions[1:3]) + [Relation(aliased_variables[1], aliased_variables[0])]) + list_instructions[3:5]), (((((((list_instructions[5:8] + [Assignment(aliased_variables[2], aliased_variables[1])]) + list_instructions[8:10]) + [Relation(aliased_variables[3], aliased_variables[2])]) + list_instructions[10:13]) + [Assignment(aliased_variables[4], aliased_variables[3])]) + list_instructions[13:15]) + [Relation(aliased_variables[5], aliased_variables[4])]), ((list_instructions[15:24] + [Assignment(Variable('x', Integer.int32_t(), 9, is_aliased=True), Variable('x', Integer.int32_t(), 8, is_aliased=True))]) + list_instructions[24:26])]) |
.skipcomplex
def test_firedrake_Adaptivity_netgen():
try:
from netgen.geom2d import SplineGeometry
import netgen
except ImportError:
pytest.skip(reason='Netgen unavailable, skipping Netgen test.')
try:
from slepc4py import SLEPc
except ImportError:
pytest.skip(reason='SLEPc unavailable, skipping adaptive test refinement.')
gc.collect()
comm = COMM_WORLD
def Solve(msh, labels):
V = FunctionSpace(msh, 'CG', 2)
u = TrialFunction(V)
v = TestFunction(V)
a = (inner(grad(u), grad(v)) * dx)
m = ((u * v) * dx)
uh = Function(V)
bc = DirichletBC(V, 0, labels)
A = assemble(a, bcs=bc)
M = assemble(m)
(Asc, Msc) = (A.M.handle, M.M.handle)
E = SLEPc.EPS().create()
E.setType(SLEPc.EPS.Type.ARNOLDI)
E.setProblemType(SLEPc.EPS.ProblemType.GHEP)
E.setDimensions(1, SLEPc.DECIDE)
E.setOperators(Asc, Msc)
ST = E.getST()
ST.setType(SLEPc.ST.Type.SINVERT)
PC = ST.getKSP().getPC()
PC.setType('lu')
PC.setFactorSolverType('mumps')
E.setST(ST)
E.solve()
(vr, vi) = Asc.getVecs()
with uh.dat.vec_wo as vr:
lam = E.getEigenpair(0, vr, vi)
return (lam, uh, V)
def Mark(msh, uh, lam):
W = FunctionSpace(msh, 'DG', 0)
w = TestFunction(W)
R_T = ((lam.real * uh) + div(grad(uh)))
n = FacetNormal(V.mesh())
h = CellDiameter(msh)
R_dT = dot(grad(uh), n)
eta = assemble((((((h ** 2) * (R_T ** 2)) * w) * dx) + ((((h('+') + h('-')) * ((R_dT('+') - R_dT('-')) ** 2)) * (w('+') + w('-'))) * dS)))
frac = 0.95
delfrac = 0.05
part = 0.2
mark = Function(W)
with mark.dat.vec as markedVec:
with eta.dat.vec as etaVec:
sum_eta = etaVec.sum()
if (sum_eta < tolerance):
return markedVec
eta_max = etaVec.max()[1]
(sct, etaVec0) = PETSc.Scatter.toZero(etaVec)
markedVec0 = etaVec0.duplicate()
sct(etaVec, etaVec0)
if (etaVec.getComm().getRank() == 0):
eta = etaVec0.getArray()
marked = np.zeros(eta.size, dtype='bool')
sum_marked_eta = 0.0
while (sum_marked_eta < (part * sum_eta)):
new_marked = ((~ marked) & (eta > (frac * eta_max)))
sum_marked_eta += sum(eta[new_marked])
marked += new_marked
frac -= delfrac
markedVec0.getArray()[:] = (1.0 * marked[:])
sct(markedVec0, markedVec, mode=PETSc.Scatter.Mode.REVERSE)
return mark
tolerance = 1e-16
max_iterations = 15
exact = (3. ** 2)
geo = SplineGeometry()
pnts = [(0, 0), (1, 0), (1, 1), (0, 1), ((- 1), 1), ((- 1), 0), ((- 1), (- 1)), (0, (- 1))]
(p1, p2, p3, p4, p5, p6, p7, p8) = [geo.AppendPoint(*pnt) for pnt in pnts]
curves = [[['line', p1, p2], 'line'], [['spline3', p2, p3, p4], 'curve'], [['spline3', p4, p5, p6], 'curve'], [['spline3', p6, p7, p8], 'curve'], [['line', p8, p1], 'line']]
[geo.Append(c, bc=bc) for (c, bc) in curves]
if (comm.Get_rank() == 0):
ngmsh = geo.GenerateMesh(maxh=0.2)
labels = [(i + 1) for (i, name) in enumerate(ngmsh.GetRegionNames(codim=1)) if ((name == 'line') or (name == 'curve'))]
else:
ngmsh = netgen.libngpy._meshing.Mesh(2)
labels = None
labels = comm.bcast(labels, root=0)
msh = Mesh(ngmsh)
for i in range(max_iterations):
printf('level {}'.format(i))
(lam, uh, V) = Solve(msh, labels)
mark = Mark(msh, uh, lam)
msh = msh.refine_marked_elements(mark)
File('Sol.pvd').write(uh)
assert (abs((lam - exact)) < 0.01) |
def get_announcement():
todays_date = str(datetime.today().strftime('%Y-%m-%d'))
file_name = (todays_date + '.json')
s3 = boto3.client('s3')
announcement_file_message = ''
try:
s3_file = s3.get_object(Bucket=os.environ['ANNOUNCEMENTS_BUCKET'], Key=file_name)
except Exception as e:
return None
else:
s3_file_content = s3_file['Body'].read().decode('utf-8')
json_content = json.loads(s3_file_content)
announcement_file_message = json_content['message']
abs_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(abs_dir, 'announcements_template.html')) as fh:
announcement_html = fh.read()
announcement_html = announcement_html.replace('{announcement_message}', announcement_file_message)
return announcement_html |
class LayoutNumberLeaves(TreeLayout):
def __init__(self, name='Number of leaves', pos='branch_right', collapsed_only=True, formatter='(%s)', color='black', min_fsize=4, max_fsize=15, ftype='sans-serif', padding_x=5, padding_y=0):
super().__init__(name)
self.pos = pos
self.aligned_faces = (self.pos == 'aligned')
self.color = color
self.formatter = formatter
self.ftype = ftype
self.min_fsize = min_fsize
self.max_fsize = max_fsize
self.padding = Padding(padding_x, padding_y)
self.active = False
self.collapsed_only = collapsed_only
def set_node_style(self, node):
if (not node.is_leaf):
face = TextFace((self.formatter % len(node)), color=self.color, min_fsize=self.min_fsize, max_fsize=self.max_fsize, ftype=self.ftype, padding_x=self.padding.x, padding_y=self.padding.y)
node.add_face(face, position=self.pos, column=1, collapsed_only=True)
if (not self.collapsed_only):
node.add_face(face, position=self.pos, column=0) |
(image=cpu_image, secret=deepl_secret, shared_volumes={str(SHARED): volume}, cpu=1.1, memory=8000, timeout=(60 * 60))
def run(path_in: str, path_out: str, path_tmp: str, config: Config) -> Path:
logger.info(f'Processing {path_in} to {path_out} in {path_tmp}')
deepl_key = os.getenv('DEEPL_KEY', '')
path_audio = os.path.join(path_tmp, 'audio.wav')
path_video = os.path.join(path_tmp, 'video.mp4')
(fps, length, (height, width)) = get_video_metadata(path_in)
path_audio = extract_audio(path_in, path_audio)
speakers = extract_speakers.spawn(path_audio)
segments = extract_segments.spawn(path_audio)
faces = extract_faces.spawn(path_in)
segments = segments.get()
t_segments = translate_segments(segments, target_lang=config.target_lang, auth_key=deepl_key)
speakers = speakers.get()
segment_to_speaker = match_speakers_to_phrases(t_segments, speakers)
faces = faces.get()
face_to_speaker = match_speakers_to_faces(faces, speakers, fps, length)
aligned = assign_to_frames(segments=t_segments, faces=faces, segment_to_speaker=segment_to_speaker, face_to_speaker=face_to_speaker, fps=fps)
assert (len(aligned) == length)
frames = frame_iterator(path_in)
items = zip(frames, aligned)
processed = annotate_frame.map(items, kwargs={'config': config})
height = get_annotated_h(height, config.border_size)
save_frames(processed, fps, path_video, height=height, width=width)
combine_streams(path_video, path_audio, path_out)
return Path(path_out) |
def oscilloscope(device, channels, duration):
headers = ['Timestamp', 'CH1', 'CH2', 'CH3', 'MIC'][:(1 + channels)]
timestamp = np.arange(0, (duration * 1000000.0), ((duration * 1000000.0) / SAMPLES))
data = ([np.random.random_sample(SAMPLES)] * channels)
return (headers, ([timestamp] + data)) |
class TestDataWrapper(TestCase):
def test_instantiate(self):
data_wrapper = DataWrapper()
self.assertEqual(data_wrapper.mimetypes(), set())
def test_mimedata_roundtrip(self):
data_wrapper = DataWrapper()
data_wrapper.set_mimedata('text/plain', b'hello world')
result = data_wrapper.get_mimedata('text/plain')
self.assertEqual(data_wrapper.mimetypes(), {'text/plain'})
self.assertEqual(result, b'hello world')
def test_mimedata_overwrite(self):
data_wrapper = DataWrapper()
data_wrapper.set_mimedata('text/plain', b'hello world')
data_wrapper.set_mimedata('text/plain', b'hello mars')
result = data_wrapper.get_mimedata('text/plain')
self.assertEqual(data_wrapper.mimetypes(), {'text/plain'})
self.assertEqual(result, b'hello mars')
def test_set_format(self):
data_wrapper = DataWrapper()
format = text_format()
data_wrapper.set_format(format, 'hell wrld')
result = data_wrapper.get_mimedata('text/plain')
self.assertEqual(data_wrapper.mimetypes(), {'text/plain'})
self.assertEqual(result, 'hell wrld'.encode('utf-8')) |
class Reshape(Expression):
def __init__(self, t_colon):
super().__init__()
assert isinstance(t_colon, MATLAB_Token)
assert (t_colon.kind == 'COLON')
self.t_colon = t_colon
self.t_colon.set_ast(self)
def loc(self):
return self.t_colon.location
def set_parent(self, n_parent):
assert isinstance(n_parent, (Reference, Cell_Reference))
super().set_parent(n_parent)
def __str__(self):
return ':' |
class TestUpgradeConnectionLocally(BaseTestCase):
ITEM_TYPE = 'connection'
ITEM_PUBLIC_ID = SOEF_PUBLIC_ID
LOCAL: List[str] = ['--local']
def setup(cls):
super(TestUpgradeConnectionLocally, cls).setup()
result = cls.runner.invoke(cli, ['-v', 'DEBUG', 'add', '--local', cls.ITEM_TYPE, str(cls.ITEM_PUBLIC_ID)], standalone_mode=False)
assert (result.exit_code == 0)
def test_upgrade_to_same_version(self):
with pytest.raises(ClickException, match="The .* with id '.*' already has version .*. Nothing to upgrade."):
self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, str(self.ITEM_PUBLIC_ID)], standalone_mode=False, catch_exceptions=False)
('aea.cli.upgrade.ItemUpgrader.is_non_vendor', True)
def test_upgrade_non_vendor(self):
with pytest.raises(ClickException, match="The .* with id '.*' already has version .*. Nothing to upgrade."):
self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:100.0.0'], standalone_mode=False, catch_exceptions=False)
def test_upgrade_to_latest_but_same_version(self):
with pytest.raises(ClickException, match="The .* with id '.*' already has version .*. Nothing to upgrade."):
self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest'], standalone_mode=False, catch_exceptions=False)
def test_upgrade_to_non_registered(self):
with pytest.raises(ClickException, match='.* with id .* is not registered. Please use the `add` command. Aborting...'):
self.runner.invoke(cli, ['-v', 'DEBUG', 'upgrade', *self.LOCAL, self.ITEM_TYPE, 'nonexits/dummy:0.0.0'], standalone_mode=False, catch_exceptions=False)
def test_upgrade_required_mock(self):
with patch('aea.cli.upgrade.ItemUpgrader.check_upgrade_is_required', return_value='100.0.0'):
result = self.runner.invoke(cli, ['-v', 'DEBUG', 'upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest'], catch_exceptions=False)
assert (result.exit_code == 0)
def test_do_upgrade(self):
with self.with_config_update():
result = self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest'], standalone_mode=False)
assert (result.exit_code == 0)
def test_package_can_not_be_found_in_registry(self):
with self.with_config_update():
with patch('aea.cli.registry.utils.get_package_meta', side_effects=Exception('expected!')), patch('aea.cli.registry.utils.find_item_locally', side_effects=Exception('expected!')), pytest.raises(ClickException, match='Package .* details can not be fetched from the registry!'):
self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest'], standalone_mode=False, catch_exceptions=False)
def test_package_can_not_upgraded_cause_required(self):
with self.with_config_update():
with patch('aea.cli.upgrade.ItemRemoveHelper.check_remove', return_value=(set([PackageId('connection', PublicId('test', 'test', '0.0.1'))]), set(), dict())), pytest.raises(ClickException, match="Can not upgrade .* because it is required by '.*'"):
self.runner.invoke(cli, ['upgrade', *self.LOCAL, self.ITEM_TYPE, f'{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest'], standalone_mode=False, catch_exceptions=False)
def teardown(cls):
super(TestUpgradeConnectionLocally, cls).teardown()
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
def setup_dropbox_loader(mocker):
mock_dropbox = mocker.patch('dropbox.Dropbox')
mock_dbx = mocker.MagicMock()
mock_dropbox.return_value = mock_dbx
os.environ['DROPBOX_ACCESS_TOKEN'] = 'test_token'
loader = DropboxLoader()
(yield (loader, mock_dbx))
if ('DROPBOX_ACCESS_TOKEN' in os.environ):
del os.environ['DROPBOX_ACCESS_TOKEN'] |
def test_composite_channel_arguments():
channel_name = 'default_channels'
with Image(filename='rose:') as img:
base = img.signature
left = base
right = base
with img.clone() as img1:
with Image(width=img.width, height=img.height, pseudo='gradient:') as mask:
img1.composite_channel(channel_name, mask, 'blend')
left = img1.signature
assert (base != left)
with img.clone() as img2:
with Image(width=img.width, height=img.height, pseudo='gradient:') as mask:
img2.composite_channel(channel_name, mask, 'blend', arguments='7,7')
right = img2.signature
assert (base != right)
assert (left != right) |
def get_gpus(num_gpu=1, worker_index=(- 1)):
try:
list_gpus = subprocess.check_output(['nvidia-smi', '--list-gpus']).decode()
logging.debug('all GPUs:\n{0}'.format(list_gpus))
gpus = [x for x in list_gpus.split('\n') if (len(x) > 0)]
except Exception as e:
gpus = []
return gpus
def parse_gpu(gpu_str):
cols = gpu_str.split(' ')
return (cols[5].split(')')[0], cols[1].split(':')[0])
gpu_list = [parse_gpu(gpu) for gpu in gpus]
free_gpus = []
retries = 0
while ((len(free_gpus) < num_gpu) and (retries < MAX_RETRIES)):
smi_output = subprocess.check_output(['nvidia-smi', '--format=csv,noheader,nounits', '--query-compute-apps=gpu_uuid']).decode()
logging.debug('busy GPUs:\n{0}'.format(smi_output))
busy_uuids = [x for x in smi_output.split('\n') if (len(x) > 0)]
for (uuid, index) in gpu_list:
if (uuid not in busy_uuids):
free_gpus.append(index)
if (len(free_gpus) < num_gpu):
logging.warn('Unable to find available GPUs: requested={0}, available={1}'.format(num_gpu, len(free_gpus)))
retries += 1
time.sleep((30 * retries))
free_gpus = []
logging.info('Available GPUs: {}'.format(free_gpus))
if (len(free_gpus) < num_gpu):
smi_output = subprocess.check_output(['nvidia-smi', '--format=csv', '--query-compute-apps=gpu_uuid,pid,process_name,used_gpu_memory']).decode()
logging.info(': {0}'.format(smi_output))
raise Exception('Unable to find {} free GPU(s)\n{}'.format(num_gpu, smi_output))
num_available = len(free_gpus)
if (worker_index == (- 1)):
random.shuffle(free_gpus)
proposed_gpus = free_gpus[:num_gpu]
else:
if ((worker_index + num_gpu) > num_available):
worker_index = (worker_index % num_available)
proposed_gpus = free_gpus[worker_index:(worker_index + num_gpu)]
logging.info('Proposed GPUs: {}'.format(proposed_gpus))
return proposed_gpus |
('/config')
def handle_config(self):
global TXBuffer, navMenuIndex
TXBuffer = ''
navMenuIndex = 1
if rpieGlobals.wifiSetup:
return self.redirect('/setup')
if (not isLoggedIn(self.get, self.cookie)):
return self.redirect('/login')
if (self.type == 'GET'):
responsearr = self.get
else:
responsearr = self.post
netdev0 = arg('netdev0', responsearr)
netdev1 = arg('netdev1', responsearr)
nd0_dhcp = ''
nd1_dhcp = ''
nd0_ip = ''
nd0_gw = ''
nd0_mask = ''
nd0_dns = ''
nd1_ip = ''
nd1_gw = ''
nd1_mask = ''
nd1_dns = ''
netmanage = (arg('netman', responsearr) == 'on')
saved = arg('Submit', responsearr)
if saved:
Settings.Settings['Name'] = arg('name', responsearr).replace(' ', '')
Settings.Settings['Unit'] = arg('unit', responsearr)
tpw = arg('password', responsearr)
pwh = (arg('passwordhack', responsearr) == 'on')
try:
Settings.Settings['PasswordHack'] = pwh
except Exception as e:
print(e)
if ('**' not in tpw):
Settings.Settings['Password'] = tpw
Settings.savesettings()
if Settings.NetMan:
Settings.NetMan.APMode = int(arg('apmode', responsearr))
Settings.NetMan.APModeDev = int(arg('apmodedev', responsearr))
Settings.NetMan.APModeTime = int(arg('apmodetime', responsearr))
Settings.NetMan.APStopTime = int(arg('apstoptime', responsearr))
Settings.NetMan.WifiAPChannel = int(arg('wifiapchannel', responsearr))
tpw = arg('apkey', responsearr)
if ('**' not in tpw):
Settings.NetMan.WifiAPKey = tpw
Settings.NetMan.WifiSSID = arg('ssid', responsearr)
Settings.NetMan.WifiSSID2 = arg('ssid2', responsearr)
tpw = arg('key', responsearr)
if ('**' not in tpw):
Settings.NetMan.WifiKey = tpw
tpw = arg('key2', responsearr)
if ('**' not in tpw):
Settings.NetMan.WifiKey2 = tpw
Settings.NetMan.setAPconf()
else:
Settings.NetMan = Network.NetworkManager()
try:
netdev0 = int(netdev0)
except:
netdev0 = (- 1)
try:
netdev1 = int(netdev1)
except:
netdev1 = (- 1)
nd0_dhcp = (arg('nd0_dhcp', responsearr) == 'on')
nd1_dhcp = (arg('nd1_dhcp', responsearr) == 'on')
nd0_ip = arg('nd0_ip', responsearr)
nd0_gw = arg('nd0_gw', responsearr)
nd0_mask = arg('nd0_mask', responsearr)
nd0_dns = arg('nd0_dns', responsearr)
nd1_ip = arg('nd1_ip', responsearr)
nd1_gw = arg('nd1_gw', responsearr)
nd1_mask = arg('nd1_mask', responsearr)
nd1_dns = arg('nd1_dns', responsearr)
if (netdev0 != (- 1)):
Settings.NetMan.setdeviceorder(netdev0, netdev1)
Settings.NetworkDevices[netdev0].dhcp = nd0_dhcp
Settings.NetworkDevices[netdev0].ip = nd0_ip
Settings.NetworkDevices[netdev0].gw = nd0_gw
Settings.NetworkDevices[netdev0].mask = nd0_mask
Settings.NetworkDevices[netdev0].dns = nd0_dns
if (netdev1 != (- 1)):
Settings.NetworkDevices[netdev1].dhcp = nd1_dhcp
Settings.NetworkDevices[netdev1].ip = nd1_ip
Settings.NetworkDevices[netdev1].gw = nd1_gw
Settings.NetworkDevices[netdev1].mask = nd1_mask
Settings.NetworkDevices[netdev1].dns = nd1_dns
if netmanage:
Settings.NetMan.saveconfig()
else:
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, 'Settings saved without OS network settings modifications as you wish!')
if (netmanage and (Settings.NetMan.WifiSSID != '') and (Settings.NetMan.WifiKey != '')):
Network.AP_stop(Settings.NetMan.WifiDevNum)
time.sleep(3)
Settings.savenetsettings()
else:
Settings.loadsettings()
sendHeadandTail('TmplStd', _HEAD)
TXBuffer += "<form name='frmselect' method='post'><table class='normal'>"
addFormHeader('Main Settings')
addFormTextBox('Unit Name', 'name', Settings.Settings['Name'], 25)
addFormNumericBox('Unit Number', 'unit', Settings.Settings['Unit'], 0, 256)
addFormPasswordBox('Admin Password', 'password', Settings.Settings['Password'], 25)
try:
ph = Settings.Settings['PasswordHack']
except:
ph = False
addFormCheckBox('Disable password for safe commands', 'passwordhack', ph)
addFormNote('NOT SAFE COMMANDS: Reboot,Reset,Halt,Update,Exit')
addFormSeparator(2)
try:
choice = Settings.NetMan.APMode
except:
choice = (- 1)
options = ['Never', 'At startup without condition', 'Primary dev disconnected', 'Secondary dev disconnected', 'First WiFi dev disconnected']
optionvalues = [(- 1), 100, 0, 1, 99]
addFormSelector('Start AP when', 'apmode', len(optionvalues), options, optionvalues, None, int(choice))
try:
choice = Settings.NetMan.APModeDev
except:
choice = 99
options = ['Primary network device', 'Secondary network device', 'First wireless network device']
optionvalues = [0, 1, 99]
addFormSelector('On this device', 'apmodedev', len(optionvalues), options, optionvalues, None, int(choice))
try:
dval = Settings.NetMan.APModeTime
except:
dval = 30
addFormNumericBox('After this time', 'apmodetime', dval, 5, 600)
addUnit('sec')
try:
dval = Settings.NetMan.WifiAPChannel
except:
dval = 1
addFormNumericBox('On this channel', 'wifiapchannel', dval, 1, 13)
try:
dval = Settings.NetMan.APStopTime
except:
dval = (- 1)
options = ['Never', '3', '5', '10', '15']
optionvalues = [(- 1), 180, 300, 600, 900]
addFormSelector('Stop AP after', 'apstoptime', len(optionvalues), options, optionvalues, None, int(dval))
addUnit('min')
addFormPasswordBox('WPA AP Mode Key', 'apkey', Settings.NetMan.WifiAPKey, 128)
addFormNote('Password has to be at least 8 character long!')
try:
if plugindeps.modulelist:
pass
except:
import plugindeps
try:
TXBuffer += '<TR><TD>HostAPD library status:<TD>'
modname = 'wifiap'
puse = plugindeps.ismoduleusable(modname)
addEnabled(puse)
if (puse == False):
usable = False
TXBuffer += (("<a href='plugins?installmodule=" + modname) + "'>")
TXBuffer += (modname + ' ')
if (puse == False):
TXBuffer += '</a> (Not installed, AP mode will not work!)'
else:
TXBuffer += 'module installed'
except Exception as e:
print(e)
TXBuffer += "<TR><TD style='width:150px;' align='left'><TD>"
addSubmitButton()
netmanager = OS.detectNM()
oslvl = misc.getsupportlevel(1)
if (oslvl in [1, 2, 3, 9, 10]):
addFormSeparator(2)
if (oslvl != 2):
if netmanager:
addFormNote("<font color=red><b><a href=' is currently not supported!</b></font>")
else:
addFormCheckBox('I have root rights and i really want to manage network settings below', 'netman', netmanage)
addFormNote('<font color=red><b>If this checkbox not enabled, OS config files will not be overwritten!</b></font>')
addFormSubHeader('Wifi Settings')
addFormTextBox('SSID', 'ssid', Settings.NetMan.WifiSSID, 32)
addFormPasswordBox('WPA Key', 'key', Settings.NetMan.WifiKey, 64)
addFormTextBox('Fallback SSID', 'ssid2', Settings.NetMan.WifiSSID2, 32)
addFormPasswordBox('Fallback WPA Key', 'key2', Settings.NetMan.WifiKey2, 64)
addFormSeparator(2)
addFormSubHeader('IP Settings')
TXBuffer += '<TR><TD>Primary network device:<TD>'
netdevs = Settings.NetMan.getdevicenames()
if (netdev0 == ''):
defaultdev = Settings.NetMan.getprimarydevice()
else:
defaultdev = int(netdev0)
if (len(netdevs) > 0):
addSelector_Head('netdev0', True)
for i in range(0, len(netdevs)):
addSelector_Item(netdevs[i], i, (int(i) == int(defaultdev)), False)
addSelector_Foot()
seld = defaultdev
if (defaultdev < 0):
seld = 0
if (nd0_dhcp == ''):
nd0_dhcp = Settings.NetworkDevices[seld].dhcp
if (nd0_dhcp != True):
nd0_dhcp = False
if (nd0_ip == ''):
nd0_ip = Settings.NetworkDevices[seld].ip
if (nd0_gw == ''):
nd0_gw = Settings.NetworkDevices[seld].gw
if (nd0_mask == ''):
nd0_mask = Settings.NetworkDevices[seld].mask
if (nd0_dns == ''):
nd0_dns = Settings.NetworkDevices[seld].dns
addEnabled(Settings.NetworkDevices[seld].isconnected())
addNetType(Settings.NetworkDevices[seld].iswireless())
addFormCheckBox('DHCP', 'nd0_dhcp', nd0_dhcp)
addFormTextBox('IP', 'nd0_ip', nd0_ip, 15)
addFormTextBox('GW', 'nd0_gw', nd0_gw, 15)
addFormTextBox('Mask', 'nd0_mask', nd0_mask, 15)
addFormTextBox('DNS', 'nd0_dns', nd0_dns, 128)
addFormNote('If DHCP enabled these fields will not be saved or used!')
else:
TXBuffer += 'No device'
if (len(netdevs) > 1):
TXBuffer += '<TR><TD>Secondary network device:<TD>'
if (netdev1 == ''):
defaultdev2 = Settings.NetMan.getsecondarydevice()
else:
defaultdev2 = int(netdev1)
seld2 = defaultdev2
if (defaultdev < 0):
seld2 = 0
if (seld2 == seld):
if (seld == 0):
seld2 = 1
else:
seld2 = 0
addSelector_Head('netdev1', True)
for i in range(0, len(netdevs)):
addSelector_Item(netdevs[i], i, (int(i) == int(seld2)), False)
addSelector_Foot()
if (nd1_dhcp == ''):
nd1_dhcp = Settings.NetworkDevices[seld2].dhcp
if (nd1_dhcp != True):
nd1_dhcp = False
if (nd1_ip == ''):
nd1_ip = Settings.NetworkDevices[seld2].ip
if (nd1_gw == ''):
nd1_gw = Settings.NetworkDevices[seld2].gw
if (nd1_mask == ''):
nd1_mask = Settings.NetworkDevices[seld2].mask
if (nd1_dns == ''):
nd1_dns = Settings.NetworkDevices[seld2].dns
addEnabled(Settings.NetworkDevices[seld2].isconnected())
addNetType(Settings.NetworkDevices[seld2].iswireless())
addFormCheckBox('DHCP', 'nd1_dhcp', nd1_dhcp)
addFormTextBox('IP', 'nd1_ip', nd1_ip, 15)
addFormTextBox('GW', 'nd1_gw', nd1_gw, 15)
addFormTextBox('Mask', 'nd1_mask', nd1_mask, 15)
addFormTextBox('DNS', 'nd1_dns', nd1_dns, 15)
addFormNote('If DHCP enabled these fields will not be saved or used!')
TXBuffer += "<TR><TD style='width:150px;' align='left'><TD>"
if (netmanager == False):
addSubmitButton()
TXBuffer += '</table></form>'
sendHeadandTail('TmplStd', _TAIL)
return TXBuffer |
class CummuNoiseEffTorch():
_grad()
def __init__(self, std, shapes, device, seed, test_mode=False):
seed = (seed if (seed is not None) else int.from_bytes(os.urandom(8), byteorder='big', signed=True))
self.test_mode = test_mode
self.std = std
self.shapes = shapes
self.device = device
torch.cuda.manual_seed_all(seed)
self.generator = torch.Generator(device=self.device)
self.generator.manual_seed(seed)
self.step = 0
self.noise_sum = [torch.zeros(shape).to(self.device) for shape in shapes]
self.stack = []
_grad()
def get_noise(self):
return [torch.normal(0, self.std, shape, generator=self.generator, device=self.device) for shape in self.shapes]
_grad()
def push(self, elem):
for i in range(len(self.shapes)):
self.noise_sum[i] += (elem.value[i] / (2.0 - (1 / (2 ** elem.height))))
self.stack.append(elem)
_grad()
def pop(self):
elem = self.stack.pop()
for i in range(len(self.shapes)):
self.noise_sum[i] -= (elem.value[i] / (2.0 - (1 / (2 ** elem.height))))
_grad()
def __call__(self):
self.step += 1
self.push(TreeNode(0, self.get_noise()))
while ((len(self.stack) >= 2) and (self.stack[(- 1)].height == self.stack[(- 2)].height)):
(left_value, right_value) = (self.stack[(- 2)].value, self.stack[(- 1)].value)
new_noise = self.get_noise()
new_elem = TreeNode((self.stack[(- 1)].height + 1), [(x + ((y + z) / 2)) for (x, y, z) in zip(new_noise, left_value, right_value)])
self.pop()
self.pop()
self.push(new_elem)
return self.noise_sum
_grad()
def proceed_until(self, step_target):
if (self.step >= step_target):
raise ValueError(f'Already reached {step_target}.')
while (self.step < step_target):
noise_sum = self.__call__()
return noise_sum |
class TestPCF2LiftStageService(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.mock_mpc_svc = MagicMock(spec=MPCService)
self.mock_mpc_svc.onedocker_svc = MagicMock()
self.run_id = '681ba82c-16d9-11ed-861d-0242ac120002'
onedocker_binary_config_map = defaultdict((lambda : OneDockerBinaryConfig(tmp_directory='/test_tmp_directory/', binary_version='latest', repository_path='test_path/')))
self.stage_svc = PCF2LiftStageService(onedocker_binary_config_map, self.mock_mpc_svc)
self.container_permission_id = 'test-container-permission'
async def test_compute_metrics(self) -> None:
containers = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
self.mock_mpc_svc.start_containers.return_value = containers
private_computation_instance = self._create_pc_instance(pcs_features={PCSFeature.PCF_TLS})
binary_name = 'private_lift/pcf2_lift'
num_containers = private_computation_instance.infra_config.num_mpc_containers
test_server_ips = [f'192.0.2.{i}' for i in range(num_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = (binary_name, ['cmd_1', 'cmd_2'])
(await self.stage_svc.run_async(private_computation_instance, NullCertificateProvider(), NullCertificateProvider(), '', '', test_server_ips))
self.mock_mpc_svc.start_containers.assert_called_once_with(cmd_args_list=['cmd_1', 'cmd_2'], onedocker_svc=self.mock_mpc_svc.onedocker_svc, binary_version='latest', binary_name=binary_name, timeout=None, env_vars=None, env_vars_list=[{'ONEDOCKER_REPOSITORY_PATH': 'test_path/'} for i in range(num_containers)], wait_for_containers_to_start_up=True, existing_containers=None, opa_workflow_path=TLS_OPA_WORKFLOW_PATH, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(containers, private_computation_instance.infra_config.instances[(- 1)].containers)
self.assertEqual('COMPUTE', private_computation_instance.infra_config.instances[(- 1)].stage_name)
(PCF2LiftStageService, 'get_game_args')
async def test_convert_cmd_args_list_when_tls_enabled(self, mock_get_game_args: MagicMock) -> None:
self.mock_mpc_svc.start_containers.return_value = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
private_computation_instance = self._create_pc_instance(pcs_features={PCSFeature.PCF_TLS})
num_containers = private_computation_instance.infra_config.num_mpc_containers
expected_game_args = mock_get_game_args.return_value = [f'game_args_{i}' for i in range(num_containers)]
test_server_ips = [f'192.0.2.{i}' for i in range(num_containers)]
test_server_hostnames = [f'node{i}.test.com' for i in range(num_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = ('private_lift/pcf2_lift', ['cmd_1', 'cmd_2'])
(await self.stage_svc.run_async(private_computation_instance, self._get_mock_certificate_provider('test_server_cert'), self._get_mock_certificate_provider('test_ca_cert'), '/test/server_certificate_path', '/test/server_certificate_path', test_server_ips, test_server_hostnames))
self.mock_mpc_svc.convert_cmd_args_list.assert_called_once_with(game_name=PrivateComputationServiceData.PCF2_LIFT_STAGE_DATA.game_name, game_args=expected_game_args, mpc_party=MPCParty.CLIENT, server_ips=test_server_hostnames)
async def test_tls_env_vars(self) -> None:
self.mock_mpc_svc.start_containers.return_value = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
private_computation_instance = self._create_pc_instance({PCSFeature.PCF_TLS})
num_containers = private_computation_instance.infra_config.num_mpc_containers
test_server_ips = [f'192.0.2.{i}' for i in range(num_containers)]
test_server_hostnames = [f'node{i}.test.com' for i in range(num_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = ('private_lift/pcf2_lift', ['cmd_1', 'cmd_2'])
expected_server_certificate = 'test_server_cert'
expected_ca_certificate = 'test_ca_cert'
expected_server_key_resource_id = 'test_key'
expected_server_key_region = 'test_region'
expected_server_key_install_path = 'test/path'
expected_server_certificate_path = '/test/server_certificate_path'
expected_ca_certificate_path = '/test/server_certificate_path'
(await self.stage_svc.run_async(private_computation_instance, self._get_mock_certificate_provider(expected_server_certificate), self._get_mock_certificate_provider(expected_ca_certificate), expected_server_certificate_path, expected_ca_certificate_path, test_server_ips, test_server_hostnames, StaticPrivateKeyReferenceProvider(expected_server_key_resource_id, expected_server_key_region, expected_server_key_install_path)))
self.mock_mpc_svc.start_containers.assert_called_once()
call_kwargs = self.mock_mpc_svc.start_containers.call_args[1]
call_env_args_list = call_kwargs['env_vars_list']
self.assertTrue(call_env_args_list)
for (i, call_env_args) in enumerate(call_env_args_list):
self.assertTrue(('ONEDOCKER_REPOSITORY_PATH' in call_env_args))
self.assertEqual('test_path/', call_env_args['ONEDOCKER_REPOSITORY_PATH'])
self.assertTrue((SERVER_CERTIFICATE_ENV_VAR in call_env_args))
self.assertEqual(expected_server_certificate, call_env_args[SERVER_CERTIFICATE_ENV_VAR])
self.assertTrue((CA_CERTIFICATE_ENV_VAR in call_env_args))
self.assertEqual(expected_ca_certificate, call_env_args[CA_CERTIFICATE_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_REF_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_resource_id, call_env_args[SERVER_PRIVATE_KEY_REF_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_install_path, call_env_args[SERVER_PRIVATE_KEY_PATH_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_REGION_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_region, call_env_args[SERVER_PRIVATE_KEY_REGION_ENV_VAR])
self.assertTrue((SERVER_CERTIFICATE_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_server_certificate_path, call_env_args[SERVER_CERTIFICATE_PATH_ENV_VAR])
self.assertTrue((CA_CERTIFICATE_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_ca_certificate_path, call_env_args[CA_CERTIFICATE_PATH_ENV_VAR])
self.assertTrue((SERVER_IP_ADDRESS_ENV_VAR in call_env_args))
self.assertEqual(test_server_ips[i], call_env_args[SERVER_IP_ADDRESS_ENV_VAR])
self.assertTrue((SERVER_HOSTNAME_ENV_VAR in call_env_args))
self.assertEqual(test_server_hostnames[i], call_env_args[SERVER_HOSTNAME_ENV_VAR])
def test_get_game_args(self) -> None:
private_computation_instance = self._create_pc_instance(pcs_features=set())
run_name_base = ((private_computation_instance.infra_config.instance_id + '_') + GameNames.PCF2_LIFT.value)
common_game_args = {'input_base_path': private_computation_instance.data_processing_output_path, 'output_base_path': private_computation_instance.pcf2_lift_stage_output_base_path, 'num_files': private_computation_instance.infra_config.num_files_per_mpc_container, 'concurrency': private_computation_instance.infra_config.mpc_compute_concurrency, 'num_conversions_per_user': private_computation_instance.product_config.common.padding_size, 'log_cost': True, 'run_id': self.run_id, 'use_tls': False, 'ca_cert_path': '', 'server_cert_path': '', 'private_key_path': '', 'log_cost_s3_bucket': private_computation_instance.infra_config.log_cost_bucket}
test_game_args = [{**common_game_args, 'run_name': (f'{run_name_base}_0' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': 0}, {**common_game_args, 'run_name': (f'{run_name_base}_1' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': private_computation_instance.infra_config.num_files_per_mpc_container}]
self.assertEqual(test_game_args, self.stage_svc.get_game_args(private_computation_instance, '', ''))
def _create_pc_instance(self, pcs_features: Set[PCSFeature]) -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, status=PrivateComputationInstanceStatus.COMPUTATION_STARTED, status_update_ts=, instances=[], game_type=PrivateComputationGameType.LIFT, num_pid_containers=2, num_mpc_containers=2, num_files_per_mpc_container=NUM_NEW_SHARDS_PER_FILE, status_updates=[], run_id=self.run_id, log_cost_bucket='test_log_cost_bucket', pcs_features=pcs_features, container_permission_id=self.container_permission_id)
common: CommonProductConfig = CommonProductConfig(input_path='456', output_dir='789')
product_config: ProductConfig = LiftConfig(common=common)
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config)
def _get_mock_certificate_provider(self, certificate: str) -> MagicMock:
certificate_provider = MagicMock()
certificate_provider.get_certificate.return_value = certificate
return certificate_provider |
class OptionSeriesXrangeOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesXrangeOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesXrangeOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesXrangeOnpointPosition':
return self._config_sub_data('position', OptionSeriesXrangeOnpointPosition) |
class Image(_VirtModel, _JsonPickleModel, _OSType, _HVMType, _DcMixin, _UserTasksModel):
_MANIFEST_TEMPLATE = frozendict({u'v': 2, u'owner': u'-0000-0000-0000-', u'state': u'active', u'disabled': False, u'public': False})
OSTYPE2OS = frozendict({_OSType.LINUX: u'linux', _OSType.BSD: u'bsd', _OSType.WINDOWS: u'windows', _OSType.SUNOS: u'illumos', _OSType.SUNOS_ZONE: u'smartos', _OSType.LINUX_ZONE: u'linux'})
CUSTOM = frozendict({_OSType.LINUX: FrozenAttrDict({'name': 'custom_linux', 'desc': 'Custom Linux', 'note': _('Install your favourite OS from ISO image.'), 'access': _VirtModel.PUBLIC, 'deploy': False, 'resize': False, 'ostype': _OSType.LINUX}), _OSType.BSD: FrozenAttrDict({'name': 'custom_bsd', 'desc': 'Custom BSD', 'note': _('Install your favourite OS from ISO image.'), 'access': _VirtModel.PUBLIC, 'deploy': False, 'resize': False, 'ostype': _OSType.BSD}), _OSType.SUNOS: FrozenAttrDict({'name': 'custom_sunos', 'desc': 'Custom SunOS', 'note': _('Install your favourite OS from ISO image.'), 'access': _VirtModel.PRIVATE, 'deploy': False, 'resize': False, 'ostype': _OSType.SUNOS}), _OSType.WINDOWS: FrozenAttrDict({'name': 'custom_windows', 'desc': 'Custom Windows', 'note': _('Install your favourite OS from ISO image.'), 'access': _VirtModel.PRIVATE, 'deploy': False, 'resize': False, 'ostype': _OSType.WINDOWS})})
ACCESS = ((_VirtModel.PUBLIC, _('Public')), (_VirtModel.PRIVATE, _('Private')), (_VirtModel.DELETED, _('Deleted')))
OK = 1
PENDING = 2
STATUS = ((OK, _('ok')), (PENDING, _('pending')))
READY = 1
IMPORTING = 2
DELETING = 3
NS_STATUS = ((READY, _('ready')), (IMPORTING, _('importing')), (DELETING, _('deleting')))
TAGS_KEY = u'erigones'
DEFAULT_SIZE = 1024
IMAGE_SIZE_TOTAL_KEY = 'image-size-total:%s'
new = False
_pk_key = 'image_uuid'
_src_vm = None
_src_snap = None
uuid = models.CharField(_('UUID'), max_length=36, primary_key=True)
version = models.CharField(_('Image version'), max_length=16)
size = models.IntegerField(_('Image size (MB)'), help_text=_('Exact same value as in imgadm manifest attribute image_size.'))
ostype = models.SmallIntegerField(_('Guest OS type'), choices=_OSType.OSTYPE)
deploy = models.BooleanField(_('Deploy required?'), default=False)
resize = models.BooleanField(_('Resizable?'), default=False)
status = models.SmallIntegerField(_('Status'), choices=STATUS)
class Meta():
app_label = 'vms'
verbose_name = _('Image')
verbose_name_plural = _('Images')
unique_together = (('alias', 'owner', 'version'),)
def __init__(self, *args, **kwargs):
super(Image, self).__init__(*args, **kwargs)
if (not self.uuid):
self.new = True
self.uuid = str(uuid4())
def ostype_to_os(cls, ostype):
return cls.OSTYPE2OS[ostype]
def os_to_ostype(cls, manifest):
if (manifest['type'] == 'zone-dataset'):
return Image.SUNOS_ZONE
elif (manifest['type'] == 'lx-dataset'):
return Image.LINUX_ZONE
else:
os = manifest['os'].lower()
if (os == cls.OSTYPE2OS[_OSType.SUNOS_ZONE]):
return _OSType.SUNOS
os2ostype = {v: k for (k, v) in cls.OSTYPE2OS.items() if (k in cls.HVM_OSTYPES)}
return os2ostype.get(manifest['os'], Image.LINUX)
def note(self):
if (self.access == self.DISABLED):
return _('Coming Soon')
else:
return ''
def alias_version(self):
return ('%s (%s)' % (self.alias, self.version))
def tags(self):
return self.json.get('tags', [])
def tags(self, value):
self.save_item('tags', value, save=False)
def tag_list(self):
if (not self.tags):
return ''
return ','.join(map(unicode, sorted(self.tags)))
def backup(self):
return PickleDict(self.json.get('backup', {}))
def backup(self, value):
self.save_item('backup', value, save=False)
def manifest(self):
return PickleDict(self.json.get('manifest', {}))
def manifest(self, value):
self.save_item('manifest', value, save=False)
def manifest_active(self):
return PickleDict(self.json.get('manifest_active', {}))
_active.setter
def manifest_active(self, value):
self.save_item('manifest_active', value, save=False)
def requirements(self):
return self.manifest_active.get('requirements', {})
def min_platform(self):
return self.requirements.get('min_platform', {}).get(settings.VMS_SDC_VERSION, None)
def max_platform(self):
return self.requirements.get('max_platform', {}).get(settings.VMS_SDC_VERSION, None)
def homepage(self):
return self.manifest_active.get('homepage', '')
def build_manifest(self):
manifest = dict(self._MANIFEST_TEMPLATE)
manifest.update(self.manifest)
manifest.update({u'uuid': self.uuid, u'name': self.alias, u'version': self.version, u'state': u'active', u'disabled': (self.access in (self.DISABLED, self.INTERNAL)), u'public': (self.access == self.PUBLIC), u'os': self.ostype_to_os(self.ostype), u'description': self.desc, u'image_size': self.size})
if (u'tags' not in manifest):
manifest[u'tags'] = {}
manifest[u'tags'].update({self.TAGS_KEY: self.tags, 'resize': self.resize, 'deploy': self.deploy, 'internal': (self.access == self.INTERNAL)})
if (self.ostype in self.ZONE_OSTYPES):
for i in ('image_size', 'nic_driver', 'disk_driver', 'cpu_type'):
try:
del manifest[i]
except KeyError:
pass
if (self.ostype == self.SUNOS_ZONE):
img_type = u'zone-dataset'
elif (self.ostype == self.LINUX_ZONE):
img_type = u'lx-dataset'
else:
img_type = u'other'
else:
manifest[u'image_size'] = self.size
manifest[u'cpu_type'] = manifest.get('cpu_type', u'qemu64')
img_type = u'zvol'
if (manifest.get('type', None) != 'other'):
manifest[u'type'] = img_type
if (u'published_at' not in manifest):
manifest[u'published_at'] = timezone.now().isoformat()
return manifest
def default_apiview(self):
return {'status': self.status, 'status_display': self.get_status_display()}
def web_data(self):
return {'size': self.size}
def web_data_admin(self):
return {'name': self.name, 'alias': self.alias, 'version': self.version, 'access': self.access, 'owner': self.owner.username, 'ostype': self.ostype, 'desc': self.desc, 'dc_bound': self.dc_bound_bool, 'resize': self.resize, 'deploy': self.deploy, 'tags': self.tag_list}
def save_status(self, new_status=None, **kwargs):
if (new_status is not None):
self.status = new_status
return self.save(update_fields=('status',), **kwargs)
def is_ok(self):
return (self.status == self.OK)
def is_used_by_vms(self, dc=None, zpool=None):
if dc:
vms = dc.vm_set.all()
else:
vms = Vm.objects.filter(dc__in=self.dc.all())
for vm in vms:
if (self.uuid in vm.get_image_uuids(zpool=zpool)):
return True
return False
def _get_ns_key(self, ns):
return ('img:%s:ns:%s' % (self.uuid, ns.id))
def get_block_key(self, ns):
return ('%s:%s:%s' % (cache.key_prefix, cache.version, self._get_ns_key(ns)))
def set_ns_status(self, ns, status):
cache.set(self._get_ns_key(ns), status, (3600 * 24))
def del_ns_status(self, ns):
cache.delete(self._get_ns_key(ns))
def get_ns_status(self, ns):
status = cache.get(self._get_ns_key(ns))
if (not status):
return self.READY
return status
def get_ns_status_display(self, ns):
return dict(self.NS_STATUS).get(self.get_ns_status(ns))
def src_vm_uuid(self):
return self.json.get('vm_uuid', None)
_vm_uuid.setter
def src_vm_uuid(self, value):
self.save_item('vm_uuid', value, save=False)
def src_snap_id(self):
return self.json.get('snap_id', None)
_snap_id.setter
def src_snap_id(self, value):
self.save_item('snap_id', value, save=False)
def src_vm(self):
if ((self._src_vm is None) and self.src_vm_uuid):
try:
self._src_vm = Vm.objects.get(uuid=self.src_vm_uuid)
except Vm.DoesNotExist:
self.src_vm_uuid = None
self.save(update_fields=('enc_json',))
return self._src_vm
_vm.setter
def src_vm(self, vm):
self.src_vm_uuid = vm.uuid
self._src_vm = vm
def src_snap(self):
if ((self._src_snap is None) and self.src_snap_id):
try:
self._src_snap = Snapshot.objects.get(id=self.src_snap_id)
except Snapshot.DoesNotExist:
self.src_snap_id = None
self.save(update_fields=('enc_json',))
return self._src_snap
_snap.setter
def src_snap(self, snap):
self.src_snap_id = snap.id
self._src_snap = snap
def tasks_add(self, task_id, apiview, msg='', **additional_apiview):
info = self._create_task_info(self.pk, apiview, msg, additional_apiview=additional_apiview)
if (apiview.get('view') == 'image_snapshot'):
vm = self.src_vm
if vm:
info[vm._pk_key] = vm.pk
return self._add_task(self.owner_id, task_id, info)
def tasks_del(self, task_id, **additional_apiview):
apiview = super(Image, self).tasks_del(task_id, **additional_apiview)
if (apiview.get('view') == 'image_snapshot'):
vm = self.src_vm
if (vm.owner_id != self.owner_id):
self._pop_task(vm.owner_id, task_id)
return apiview |
class BicincittaStation(BikeShareStation, BicincittaMixin):
station_statuses = {0: 'online', 1: 'maintenance', 2: 'offline', 3: 'under construction', 4: 'planned'}
def __init__(self, endpoint, uid, lat, lng, name, number, status, *_):
super(BicincittaStation, self).__init__()
self.endpoint = endpoint
self.name = name
self.latitude = BicincittaStation.parse_shitty_float(lat)
self.longitude = BicincittaStation.parse_shitty_float(lng)
self.extra = {'uid': uid, 'number': int(number), 'status': self.station_statuses[int(status)]}
def parse_shitty_float(blergh):
try:
return float(re.search('\\d+\\.\\d{5,15}', blergh).group())
except:
return float(blergh)
def update(self, scraper=None):
scraper = (scraper or PyBikesScraper())
data = self.get_station_status(self.extra['uid'], self.endpoint, scraper)
(_, reviews, score, _, status) = data['d'].split(u'')[:5]
self.bikes = status.count('4')
self.free = status.count('0')
self.extra['score'] = float(score.replace(',', '.'))
self.extra['reviews'] = int(reviews) |
class TestFullTextModels():
def test_should_preload_models(self):
mock_models = {field.name: MagicMock(name=field.name) for field in dataclasses.fields(FullTextModels)}
mock_fulltext_models = FullTextModels(**mock_models)
mock_fulltext_models.preload()
for model in mock_models.values():
model.preload.assert_called() |
class BertOutput(nn.Module):
def __init__(self, hidden_size, intermediate_size, layer_norm_eps, hidden_dropout_prob):
super().__init__()
assert (hidden_dropout_prob == 0.0)
self.dense = nn.Linear(intermediate_size, hidden_size, specialization='add')
self.dropout = nn.Dropout(hidden_dropout_prob)
self.LayerNorm = nn.LayerNorm(hidden_size, eps=layer_norm_eps)
def forward(self, hidden_states: Tensor, input_tensor: Tensor) -> Tensor:
hidden_states = self.dense(hidden_states, input_tensor)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states |
class BaseSolver():
def __init__(self) -> None:
self.stateful = StateManager()
self.xp = get_xp()
self.register_stateful('history')
self.register_stateful('xp.cfg', 'xp.sig', write_only=True)
self.logger = logger
self.result_logger = ResultLogger(self.logger)
self._current_stage: tp.Optional[str] = None
self._current_formatter: tp.Optional[Formatter] = None
self._start_epoch()
def _start_epoch(self) -> None:
self._pending_metrics: tp.Dict[(str, tp.Any)] = {}
def checkpoint_path(self) -> Path:
return (self.folder / 'checkpoint.th')
def history(self) -> tp.List[tp.Dict[(str, tp.Any)]]:
return self.xp.link.history
def folder(self) -> Path:
return self.xp.folder
def epoch(self) -> int:
return (len(self.history) + 1)
def init_tensorboard(self, **kwargs):
self.result_logger.init_tensorboard(**kwargs)
def init_wandb(self, **kwargs):
self.result_logger.init_wandb(**kwargs)
def _check_in_stage(self):
if (self._current_stage is None):
raise RuntimeError('This function can only be called from inside a stage.')
def log_progress(self, stage_name: str, iterable: tp.Iterable, total: tp.Optional[int]=None, updates: int=5) -> LogProgressBar:
return self.result_logger.get_log_progress_bar(stage_name, iterable, total=total, updates=updates, step=self.epoch, step_name='epoch', formatter=self.formatter)
def log_hyperparams(self, params: dict, metrics: tp.Optional[dict]=None):
self.result_logger.log_hyperparams(params, metrics)
def log_metrics(self, stage_name: str, metrics: dict, formatter: tp.Optional[Formatter]=None):
if (stage_name in self._pending_metrics):
raise RuntimeError(f'Stage {stage_name} already exist for epoch {self.epoch}')
self._pending_metrics[stage_name] = metrics
if (formatter is None):
formatter = self.formatter
self.result_logger.log_metrics(stage_name, metrics, step=self.epoch, step_name='epoch', formatter=formatter)
def log_audio(self, stage_name: str, key: str, audio: tp.Any, sample_rate: int, **kwargs: tp.Any):
self.result_logger.log_audio(stage_name, key, audio, sample_rate, self.epoch, **kwargs)
def log_image(self, stage_name: str, key: str, image: tp.Any, **kwargs: tp.Any):
self.result_logger.log_image(stage_name, key, image, self.epoch, **kwargs)
def log_text(self, stage_name: str, key: str, text: str, **kwargs: tp.Any):
self.result_logger.log_text(stage_name, key, text, self.epoch, **kwargs)
def register_stateful(self, *args: str, write_only: bool=False):
for name in args:
owner = self
(*path, leaf) = name.split('.')
for part in path:
owner = getattr(owner, part)
state_source = AttributeWrapper(owner, leaf)
self.stateful.register(name, state_source, write_only)
def state_dict(self):
return self.stateful.state_dict()
def load_state_dict(self, state):
self.stateful.load_state_dict(state)
def commit(self, save_checkpoint: bool=True):
self.history.append(self._pending_metrics)
self._start_epoch()
if is_rank_zero():
self.xp.link.update_history(self.history)
if save_checkpoint:
state = self.state_dict()
with write_and_rename(self.checkpoint_path) as f:
torch.save(state, f)
self.logger.debug('Checkpoint saved to %s', self.checkpoint_path)
def restore(self) -> bool:
if (not self.checkpoint_path.exists()):
return False
state = torch.load(self.checkpoint_path, 'cpu')
self.load_state_dict(state)
self.logger.debug('Checkpoint loaded from %s', self.checkpoint_path)
return True
def get_formatter(self, stage_name: str) -> Formatter:
return Formatter()
def formatter(self) -> Formatter:
self._check_in_stage()
assert (self._current_formatter is not None)
return self._current_formatter
def current_stage(self) -> str:
self._check_in_stage()
assert (self._current_stage is not None)
return self._current_stage
def run_stage(self, stage_name, method, *args, **kwargs):
assert (self._current_stage is None)
self._current_stage = stage_name
self._current_formatter = self.get_formatter(stage_name)
begin = time.time()
try:
metrics = method(*args, **kwargs)
if (metrics is None):
metrics = {}
metrics['duration'] = (time.time() - begin)
self.log_metrics(stage_name, metrics)
finally:
self._current_stage = None
self._current_formatter = None
return metrics
def run(self):
raise NotImplementedError() |
def _gen_fusible_view_ops_after_strided_op() -> List[Tuple[(str, Callable[([Tensor], Tensor)], str)]]:
def reshape_op(input_tensor: Tensor):
shape = input_tensor._attrs['shape']
return ops.reshape()(input_tensor, [(- 1), (shape[1].value() * shape[2].value())])
def flatten_op(input_tensor: Tensor):
return ops.flatten(start_dim=1, end_dim=(- 1))(input_tensor)
test_cases = [('reshape', reshape_op, 'float16'), ('flatten', flatten_op, 'float16')]
target = detect_target()
if ((target.name() == 'cuda') and (int(target._arch) >= 80)):
test_cases.append(('reshape', reshape_op, 'float'))
return test_cases |
_defaults()
class TaxSchemaPublic(SoftDeletionSchema):
class Meta():
type_ = 'tax'
self_view = 'v1.tax_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
name = fields.Str(allow_none=True, default='')
rate = fields.Float(validate=(lambda n: (0 <= n <= 100)), allow_none=True, default=0)
is_tax_included_in_price = fields.Boolean(default=False)
event = Relationship(self_view='v1.tax_event', self_view_kwargs={'id': '<id>'}, related_view='v1.event_detail', related_view_kwargs={'tax_id': '<id>'}, schema='EventSchemaPublic', type_='event') |
class JsMedia(JsHtml):
def start(self):
return ('\nvar mediaConfig = { video: true };\nif(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {\n navigator.mediaDevices.getUserMedia(mediaConfig).then(function(stream) {\n%(varId)s.srcObject = stream; %(varId)s.play();});}\nelse if(navigator.getUserMedia) { \nnavigator.getUserMedia(mediaConfig, function(stream) {\n %(varId)s.src = stream; %(varId)s.play();}, errBack);\n} else if(navigator.webkitGetUserMedia) {\nnavigator.webkitGetUserMedia(mediaConfig, function(stream){\n %(varId)s.src = window.webkitURL.createObjectURL(stream); %(varId)s.play();}, errBack);\n} else if(navigator.mozGetUserMedia) {\nnavigator.mozGetUserMedia(mediaConfig, function(stream){\n %(varId)s.src = window.URL.createObjectURL(stream); %(varId)s.play();}, errBack);} ' % {'varId': self.varId})
def stop(self):
return ('var stream = %s.srcObject; var tracks = stream.getTracks();\nfor (var i = 0; i < tracks.length; i++) {var track = tracks[i]; track.stop()}\nvideo.srcObject = null;' % self.varId)
def play(self):
return ('%s.play()' % self.varId)
def takepicture(self, width: int=50, height: int=50):
return ('\nvar canvas = document.createElement("canvas"); canvas.width = %(width)s; canvas.height = %(height)s;\ncanvas.getContext(\'2d\').drawImage(%(varId)s, 0, 0, canvas.width, canvas.height);\nvar data = canvas.toDataURL(\'image/png\'); photo.setAttribute(\'src\', data)' % {'varId': self.varId, 'width': width, 'height': height})
def record(self, start: bool=True):
if (not start):
return ' window.recorder.stop() '
return ('\nvar stream = %(varId)s.srcObject;\nwindow.recorder = new MediaRecorder(stream, {mimeType: \'video/webm\'});\n\nconst chunks = [];\nwindow.recorder.ondataavailable = e => chunks.push(e.data);\nwindow.recorder.onstop = e => {\n const blob = new Blob(chunks, { type: chunks[0].type });\n stream.getVideoTracks()[0].stop();\n\n filename="yourCustomFileName"\n if(window.navigator.msSaveOrOpenBlob) {window.navigator.msSaveBlob(blob, filename)}\n else{\n var elem = window.document.createElement(\'a\');\n elem.href = window.URL.createObjectURL(blob); elem.download = filename; \n document.body.appendChild(elem); elem.click(); document.body.removeChild(elem)}\n}; window.recorder.start()' % {'varId': self.varId}) |
class DeclProjMixA(SimpleEntity, ProjectMixin):
__tablename__ = 'DeclProjMixAs'
__mapper_args__ = {'polymorphic_identity': 'DeclProjMixA'}
a_id = Column('id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True)
def __init__(self, **kwargs):
super(DeclProjMixA, self).__init__(**kwargs)
ProjectMixin.__init__(self, **kwargs) |
def download_pdf(link, location, name):
try:
response = requests.get(link)
with open(os.path.join(location, name), 'wb') as f:
f.write(response.content)
f.close()
except HTTPError:
print('>>> Error 404: cannot be downloaded!\n')
raise
except socket.timeout:
print(' '.join(("can't download", link, 'due to connection timeout!')))
raise |
class layernorm_sigmoid_mul(Operator):
def __init__(self, layer_norm: Operator, sigmoid: Operator, mul: Operator) -> None:
super().__init__()
self._attrs['op'] = 'layernorm_sigmoid_mul'
self._attrs['has_profiler'] = False
assert layernorm_sigmoid_mul.is_valid(layer_norm, sigmoid, mul)
self._update_inputs_outputs(layer_norm, sigmoid, mul)
self._set_depth()
def is_valid(layer_norm: Operator, sigmoid: Operator, mul: Operator) -> bool:
if (sigmoid._attrs['inputs'][0] != layer_norm._attrs['outputs'][0]):
return False
if (len(mul._attrs['inputs']) != 2):
return False
return (((mul._attrs['inputs'][0] == sigmoid._attrs['outputs'][0]) and (mul._attrs['inputs'][1] == layer_norm._attrs['inputs'][0])) or ((mul._attrs['inputs'][1] == sigmoid._attrs['outputs'][0]) and (mul._attrs['inputs'][0] == layer_norm._attrs['inputs'][0])))
def _update_inputs_outputs(self, layer_norm, sigmoid, mul):
self._attrs['inputs'] = layer_norm._attrs['inputs']
self._attrs['gamma_constant'] = layer_norm._attrs['gamma_constant']
self._attrs['beta_constant'] = layer_norm._attrs['beta_constant']
self._attrs['normalized_shape'] = layer_norm._attrs['normalized_shape']
self._attrs['eps'] = layer_norm._attrs['eps']
self._attrs['outputs'] = mul._attrs['outputs']
self._attrs['output_accessors'] = [TensorAccessor(output_tensor) for output_tensor in self._attrs['outputs']]
self._attrs['input_accessors'] = [TensorAccessor(self._attrs['inputs'][0])]
for input_tensor in self._attrs['inputs']:
input_tensor._attrs['dst_ops'].discard(layer_norm)
input_tensor._attrs['dst_ops'].discard(mul)
input_tensor._attrs['dst_ops'].add(self)
assert (len(self._attrs['outputs']) == 1)
output_tensor = self._attrs['outputs'][0]
output_tensor._attrs['src_ops'] = StableSet([self])
x = self._attrs['inputs'][0]
for (i, shape_var) in enumerate(output_tensor._attrs['shape']):
shape_var._attrs['values'] = x._attrs['shape'][i]._attrs['values']
sigmoid._attrs['inputs'][0]._attrs['src_ops'] = StableSet()
sigmoid._attrs['inputs'][0]._attrs['dst_ops'] = StableSet()
sigmoid._attrs['outputs'][0]._attrs['src_ops'] = StableSet()
sigmoid._attrs['outputs'][0]._attrs['dst_ops'] = StableSet()
def __call__(self):
return self._attrs['outputs'][0]
def _get_op_attributes(self):
raise NotImplementedError('layernorm_sigmoid_mul get op attribute not implemented')
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs)
def _args_for_pseudo_code(self):
return [f"normalized_shape={[s.symbolic_value() for s in self._attrs['normalized_shape']]}"] |
def sendNotification(snmpEngine, authData, transportTarget, contextData, notifyType, *varBinds, **options):
def __cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx):
(lookupMib, future) = cbCtx
if future.cancelled():
return
try:
varBindsUnmade = VB_PROCESSOR.unmakeVarBinds(snmpEngine.cache, varBinds, lookupMib)
except Exception as e:
future.set_exception(e)
else:
future.set_result((errorIndication, errorStatus, errorIndex, varBindsUnmade))
notifyName = LCD.configure(snmpEngine, authData, transportTarget, notifyType, contextData.contextName)
future = asyncio.Future()
ntforg.NotificationOriginator().sendVarBinds(snmpEngine, notifyName, contextData.contextEngineId, contextData.contextName, VB_PROCESSOR.makeVarBinds(snmpEngine.cache, varBinds), __cbFun, (options.get('lookupMib', True), future))
if (notifyType == 'trap'):
def __trapFun(future):
if future.cancelled():
return
future.set_result((None, 0, 0, []))
loop = asyncio.get_event_loop()
loop.call_soon(__trapFun, future)
return future |
def test_switch_branches():
asgraph = AbstractSyntaxInterface()
code_node_1 = asgraph._add_code_node([Assignment(var('e'), const(9))])
code_node_2 = asgraph._add_code_node([Assignment(var('d'), const(9))])
condition = asgraph._add_condition_node_with(LogicCondition.initialize_symbol('a', asgraph.factory.logic_context), code_node_1, code_node_2)
asgraph.switch_branches(condition)
assert ((len(asgraph) == 5) and (condition.condition == (~ LogicCondition.initialize_symbol('a', asgraph.factory.logic_context))) and (condition.true_branch_child == code_node_2) and (condition.false_branch_child == code_node_1)) |
_data_source('homography')
class HomographyDataSource(DataSource):
def __init__(self, root: Path, image_hw: Tuple[(int, int)], subpath: str):
super().__init__(root, image_hw)
(self.homography, self.homography_size, self.homography_bounds) = self.load_homography((root / subpath))
def __len__(self) -> int:
return len(self.homography)
def __getitem__(self, idx: int) -> Dict[(str, Tensor)]:
return {'homography': self.homography[idx]}
def get_keys(self) -> List[str]:
return ['homography']
def get_global_data(self) -> Dict[(str, Any)]:
return {'homography': self.homography, 'homography_size': self.homography_size, 'homography_bounds': self.homography_bounds}
def load_homography(self, path: Path) -> Tensor:
homographies = np.load((path / 'homographies-first-frame.npy'))
(H_homo, W_homo) = np.load((path / 'size.npy'))
with open((path / 'homographies-first-frame.txt'), 'r') as f:
_ = f.readline()
bounds = f.readline()
bounds = [float(v) for v in bounds.rstrip().split(' ')[1:]]
assert (len(bounds) == 4), f'Failed to parse bounds (length is not 4 but {len(bounds)})'
return (torch.from_numpy(homographies).float(), [H_homo, W_homo], bounds) |
def get_RS_and_optimization_config(enable_filtering, enable_optimization):
(time_alignment_config, hand_eye_config) = get_RANSAC_scalar_part_inliers_config(enable_filtering)
optimiztion_config = OptimizationConfig()
optimiztion_config.enable_optimization = enable_optimization
optimiztion_config.optimization_only = False
algorithm_name = (hand_eye_config.algorithm_name + ('_opt' if enable_optimization else '_no_opt'))
return (algorithm_name, time_alignment_config, hand_eye_config, optimiztion_config) |
def main(arg, av):
if arg.filename:
fname = arg.filename
elif av:
fname = av.pop(0)
raw_dat = load_cmd_data(fname)
if arg.seek:
raw_dat = raw_dat[:arg.seek]
a_dat = list(map(abs, raw_dat))
max_pause = (int(statistics.stdev(a_dat)) * 2)
max_pause = (arg.preamble or max_pause)
if verbose:
print(f'max_pause {max_pause}')
if (arg.preamble or arg.split_sig):
dat_list = split_data_str(raw_dat, max_val=max_pause)
else:
dat_list = [raw_dat]
if verbose:
print(f'dat_list {len(dat_list)}')
min_length = (arg.length or MIN_BIT_LEN)
plot_list = []
for x in dat_list:
y = convert_dat(x, divider=DATA_SCALE)
if (len(y) >= min_length):
plot_list.append(convert_dat(x, divider=10))
if arg.numplots:
plot_list = plot_list[:arg.numplots]
list_lenghts = [len(x) for x in plot_list]
max_len = max(list_lenghts)
if verbose:
print(f'max_len {max_len}')
plot_x = np.arange((max_len * DATA_SCALE), step=DATA_SCALE)
plt.style.use('dark_background')
p = plt.figure()
ax = p.gca()
ax.get_yaxis().set_visible(False)
plt.xlabel('s')
plt.title('SubGhz Raw Signal')
height = 6
pn = len(plot_list)
if (pn < 8):
height = (2 + (pn * 0.5))
plt.gcf().set_size_inches(6, height)
y_off = 0
for d in plot_list:
d_len = len(d)
plot_y = (np.array(d) + (y_off * int((HIGH_PLOT_VAL * 1.3))))
plt.plot(plot_x[:d_len], plot_y)
y_off += 1
outfile = os.path.basename(fname)
outfile = (os.path.splitext(outfile)[0] + '.png')
print(f'saving plot as {outfile}')
plt.savefig(outfile, pad_inches=0.3)
plt.show() |
.parametrize('current_data, reference_data, metric, expected_result', ((pd.DataFrame({'col': []}), None, ColumnValueRangeMetric(column_name='col', left=0, right=10.3), ColumnValueRangeMetricResult(column_name='col', left=0, right=10.3, current=ValuesInRangeStat(number_in_range=0, number_not_in_range=0, share_in_range=0, share_not_in_range=0, number_of_values=0, distribution=Distribution(x=np.array([0.0, 1.0]), y=np.array([0]))), reference=None)), (pd.DataFrame({'col': [1, 2, np.NAN, 3, (- 3.2)]}), pd.DataFrame({'col': [(- 1.5), 2, np.NAN, 3, 20]}), ColumnValueRangeMetric(column_name='col'), ColumnValueRangeMetricResult(column_name='col', left=(- 1.5), right=20, current=ValuesInRangeStat(number_in_range=3, number_not_in_range=1, share_in_range=0.75, share_not_in_range=0.25, number_of_values=4, distribution=Distribution(x=np.array([(- 3.2), 0., 3., 6., 10., 13., 16., 20.0]), y=np.array([1, 3, 0, 0, 0, 0, 0]))), reference=ValuesInRangeStat(number_in_range=4, number_not_in_range=0, share_in_range=1, share_not_in_range=0, number_of_values=4, distribution=Distribution(x=np.array([(- 3.2), 0., 3., 6., 10., 13., 16., 20.0]), y=np.array([1, 2, 0, 0, 0, 0, 1])))))))
def test_data_quality_values_in_range_metric_success(current_data: pd.DataFrame, reference_data: pd.DataFrame, metric: ColumnValueRangeMetric, expected_result: ColumnValueRangeMetricResult) -> None:
report = Report(metrics=[metric])
report.run(current_data=current_data, reference_data=reference_data, column_mapping=ColumnMapping())
result = metric.get_result()
smart_assert_equal(result, expected_result) |
def test_query_more_multibatch_negative():
testutil.add_response('login_response_200')
testutil.add_response('query_more_multibatch_0_200')
testutil.add_response('query_more_multibatch_1_no_body')
testutil.add_response('api_version_response_200')
client = testutil.get_client()
query_result = client.query_more('SELECT Id FROM Lead')
assert (query_result[0] is None) |
class YaraGenerator(object):
def __init__(self, sig_mode, instruction_set, instruction_mode, rule_name='generated_rule', do_comment=True):
self.instruction_set = instruction_set
self.instruction_mode = instruction_mode
self.do_comment_sig = do_comment
self.sig_mode = sig_mode
self.rule_name = rule_name
self.yr_rule = YaraRule()
self._signature = ''
self._chunks = []
def add_chunk(self, data, offset=0, is_data=False):
self._chunks.append(DataChunk(data, offset=offset, is_data=is_data))
def _hex_opcode(self, opcode_list):
return ' '.join((format(x, '02x').upper() for x in opcode_list if (x != 0)))
def _get_opcode_size(self, opcode_list):
result = 0
for bt in opcode_list:
if (bt != 0):
result += 1
return result
def _wilcard_bytes(self, data, offset, length):
for i in range(offset, (offset + length)):
data[i] = '?'
return data
def _process_instruction(self, ins):
ins_str = '{} {}'.format(ins.mnemonic, ins.op_str)
opcode_hex_str = self._hex_opcode(ins.opcode)
opcode_size = self._get_opcode_size(ins.opcode)
operand_total_size = (len(ins.bytes) - opcode_size)
ins_hex = binascii.hexlify(ins.bytes).upper()
ins_hex = ins_hex.decode('ascii')
log.debug('Hex:\t\t {}'.format(ins_hex))
log.debug('Opc. size:\t {}'.format(opcode_size))
log.debug('Opcode:\t\t {}, {}'.format(opcode_hex_str, str(ins.opcode)))
log.debug('rex:\t {}'.format(hex(ins.rex)))
log.debug('Ins:\t\t {}'.format(ins_str))
ins_comment = '{}'.format(ins_hex)
ins_comment = ins_comment.ljust(30)
ins_comment += ins_str
ins_comment = '{} {}'.format(hex(ins.address), ins_comment)
ins_hex_list = list(ins_hex)
if self.should_wildcard_imm_operand(ins):
ins_hex_list = self._wilcard_bytes(ins_hex_list, (ins.imm_offset * 2), (ins.imm_size * 2))
if self.should_wildcard_disp_operand(ins):
ins_hex_list = self._wilcard_bytes(ins_hex_list, (ins.disp_offset * 2), (ins.disp_size * 2))
signature = ''.join(ins_hex_list)
return (signature, ins_comment)
def is_jmp_or_call(self, ins):
for group in ins.groups:
group_name = ins.group_name(group)
if (group_name in ['jump', 'call']):
return True
return False
def should_wildcard_disp_operand(self, ins):
if (self.sig_mode in ['loose', 'normal']):
return True
else:
return self.is_jmp_or_call(ins)
def should_wildcard_imm_operand(self, ins):
if (self.sig_mode in ['loose']):
return True
else:
return self.is_jmp_or_call(ins)
def format_hex(self, data):
n = 2
return ' '.join([data[i:(i + n)] for i in range(0, len(data), n)])
def generate_rule(self):
self.yr_rule.rule_name = self.rule_name
self.yr_rule.metas['generated_by'] = '"mkYARA - By Jelle Vergeer"'
self.yr_rule.metas['date'] = '"{}"'.format(datetime.now().strftime('%Y-%m-%d %H:%M'))
self.yr_rule.metas['version'] = '"1.0"'
md = Cs(self.instruction_set, self.instruction_mode)
md.detail = True
md.syntax = CS_OPT_SYNTAX_INTEL
chunk_nr = 0
for chunk in self._chunks:
chunk_nr += 1
chunk_id = '$chunk_{}'.format(chunk_nr)
chunk_signature = ''
chunk_comment = ''
if (chunk.is_data is False):
disasm = md.disasm(chunk.data, chunk.offset)
for ins in disasm:
(rule_part, comment) = self._process_instruction(ins)
rule_part = self.format_hex(rule_part)
chunk_signature += (rule_part + '\n')
chunk_comment += (comment + '\n')
self.yr_rule.add_string(chunk_id, chunk_signature, StringType.HEX)
if self.do_comment_sig:
self.yr_rule.comments.append(chunk_comment)
else:
rule_part = self.format_hex(chunk.data.encode('hex'))
self.yr_rule.add_string(chunk_id, rule_part, StringType.HEX)
self.yr_rule.condition = 'any of them'
return self.yr_rule |
class OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
def load_het_snps(vcf_fname, sample_id=None, normal_id=None, min_variant_depth=20, zygosity_freq=None, tumor_boost=False):
if (vcf_fname is None):
return None
varr = tabio.read(vcf_fname, 'vcf', sample_id=sample_id, normal_id=normal_id, min_depth=min_variant_depth, skip_somatic=True)
if ((zygosity_freq is None) and ('n_zygosity' in varr) and (not varr['n_zygosity'].any())):
logging.warning("VCF normal sample's genotypes are all 0/0 or missing; inferring genotypes from allele frequency instead")
zygosity_freq = 0.25
if (zygosity_freq is not None):
varr = varr.zygosity_from_freq(zygosity_freq, (1 - zygosity_freq))
if ('n_zygosity' in varr):
somatic_idx = ((varr['zygosity'] != 0.0) & (varr['n_zygosity'] == 0.0))
if (somatic_idx.any() and (not somatic_idx.all())):
logging.info((f'Skipping {somatic_idx.sum()} additional somatic records ' + 'based on T/N genotypes'))
varr = varr[(~ somatic_idx)]
orig_len = len(varr)
varr = varr.heterozygous()
logging.info('Kept %d heterozygous of %d VCF records', len(varr), orig_len)
if tumor_boost:
varr['alt_freq'] = varr.tumor_boost()
return varr |
class LEAFDataLoader(IFLDataLoader):
SEED = 2137
random.seed(SEED)
def __init__(self, train_dataset: Dataset, eval_dataset: Dataset, test_dataset: Dataset, batch_size: int, drop_last: bool=False):
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.test_dataset = test_dataset
self.batch_size = batch_size
self.drop_last = drop_last
def fl_train_set(self, **kwargs) -> Iterable[Dict[(str, Generator)]]:
(yield from self._batchify(self.train_dataset, self.drop_last))
def fl_eval_set(self, **kwargs) -> Iterable[Dict[(str, Generator)]]:
(yield from self._batchify(self.eval_dataset, drop_last=False))
def fl_test_set(self, **kwargs) -> Iterable[Dict[(str, Generator)]]:
(yield from self._batchify(self.test_dataset, drop_last=False))
def _batchify(self, dataset: Dataset, drop_last=False) -> Generator[(Dict[(str, Generator)], None, None)]:
for (one_user_inputs, one_user_labels) in dataset:
data = list(zip(one_user_inputs, one_user_labels))
random.shuffle(data)
(one_user_inputs, one_user_labels) = zip(*data)
batch = {'features': batchify(one_user_inputs, self.batch_size, drop_last), 'labels': batchify(one_user_labels, self.batch_size, drop_last)}
(yield batch) |
class LoggingMiddleware(MiddlewareMixin):
server_logger = logging.getLogger('server')
start = None
log = None
def process_request(self, request):
self.start = time.perf_counter()
self.log = {'path': request.path, 'remote_addr': get_remote_addr(request), 'host': request.get_host(), 'method': request.method, 'timestamp': now().strftime('%m/%d/%y %H:%M:%S')}
try:
self.log['request'] = getattr(request, '_body', request.body).decode('ASCII')
except UnicodeDecodeError:
self.log['request'] = getattr(request, '_body', request.body)
def process_response(self, request, response):
status_code = response.status_code
self.log['status_code'] = status_code
self.log['response_ms'] = self.get_response_ms()
self.log['traceback'] = None
if response.headers:
if (('key' in response.headers) and (len(response.headers['key']) >= 2)):
self.log['cache_key'] = response.headers['key'][1]
if (('cache-trace' in response.headers) and (len(response.headers['cache-trace']) >= 2)):
self.log['cache_trace'] = response.headers['cache-trace'][1]
if (100 <= status_code < 400):
self.log['status'] = 'INFO'
self.server_logger.info(self.get_message_string(), extra=self.log)
elif (400 <= status_code < 500):
self.log['status'] = 'WARNING'
try:
self.log['error_msg'] = str(response.data)
except AttributeError:
self.log['error_msg'] = response.getvalue().decode('ASCII')
error_msg_str = (('[' + self.log['error_msg']) + ']')
self.server_logger.warning('{} {}'.format(self.get_message_string(), error_msg_str), extra=self.log)
else:
pass
return response
def get_message_string(self):
return '[{timestamp}] [{status}] [{method}] [{path} : {status_code}] [{remote_addr}] [{host}] [{response_ms}]'.format(**self.log)
def process_exception(self, request, exception):
self.log['status_code'] = 500
self.log['response_ms'] = self.get_response_ms()
self.log['status'] = 'ERROR'
self.log['timestamp'] = now().strftime('%d/%m/%y %H:%M:%S')
self.log['traceback'] = traceback.format_exc()
self.server_logger.error('%s', self.get_message_string(), extra=self.log)
def get_response_ms(self):
duration = (time.perf_counter() - self.start)
return int((duration * 1000)) |
def _get_page(session, js_assets, css_assets, link, export):
pre_path = ('flexx/assets' if export else '/flexx/assets')
codes = []
for assets in [css_assets, js_assets]:
for asset in assets:
if (link in (0, 1)):
html = asset.to_html('{}', link)
elif asset.name.endswith(('-info.js', '-export.js')):
html = asset.to_html('', 0)
else:
html = asset.to_html((pre_path + '/shared/{}'), link)
codes.append(html)
if (export and (assets is js_assets)):
codes.append('<script>window.flexx.spin();</script>')
codes.append('')
codes.append(('<script>flexx.create_session("%s", "%s");</script>\n' % (session.app_name, session.id)))
src = INDEX
if (link in (0, 1)):
asset_names = [a.name for a in (css_assets + js_assets)]
toc = (('<!-- Contents:\n\n- ' + '\n- '.join(asset_names)) + '\n\n-->')
codes.insert(0, toc)
src = src.replace('ASSET-HOOK', '\n\n\n'.join(codes))
else:
src = src.replace('ASSET-HOOK', '\n'.join(codes))
return src |
def get_snowflake_schemas(engine: Engine) -> Dict[(str, Dict[(str, List[str])])]:
schema_cursor = engine.execute(text('SHOW SCHEMAS'))
db_schemas = [row[1] for row in schema_cursor]
metadata: Dict[(str, Dict[(str, List)])] = {}
for schema in db_schemas:
if include_dataset_schema(schema=schema, database_type=engine.dialect.name):
metadata[schema] = {}
table_cursor = engine.execute(text(f'SHOW TABLES IN "{schema}"'))
db_tables = [row[1] for row in table_cursor]
for table in db_tables:
column_cursor = engine.execute(text(f'SHOW COLUMNS IN "{schema}"."{table}"'))
columns = [row[2] for row in column_cursor]
metadata[schema][table] = columns
return metadata |
class FusedType():
def is_fused_type(self):
raise NotImplementedError
def get_all_formatted_backend_types(self, type_formatter):
template_params = self.get_template_parameters()
values_template_parameters = {param.__name__: param.values for param in template_params}
names = tuple(values_template_parameters.keys())
formatted_types = []
for set_types in itertools.product(*values_template_parameters.values()):
template_variables = dict(zip(names, set_types))
formatted_types.append(format_type_as_backend_type(self, type_formatter, **template_variables))
return formatted_types |
def save_settings(path, settings):
LOG.debug('Saving settings')
with open(path, 'w') as f:
print('# This file is automatically generated', file=f)
print(file=f)
for (k, v) in sorted(settings.items()):
h = SETTINGS_AND_HELP.get(k)
if h:
print(file=f)
print('#', ('-' * 76), file=f)
h.save(k, v, f)
print(file=f)
print('#', ('-' * 76), file=f)
print('# Version of CliMetLab', file=f)
print(file=f)
yaml.dump({'version': VERSION}, f, default_flow_style=False)
print(file=f) |
def test_subset_COLRv1_downgrade_version(colrv1_path):
subset_path = (colrv1_path.parent / (colrv1_path.name + '.subset'))
subset.main([str(colrv1_path), '--glyph-names', f'--output-file={subset_path}', '--unicodes=E004'])
subset_font = TTFont(subset_path)
assert (set(subset_font.getGlyphOrder()) == {'.notdef', 'uniE004', 'glyph00016', 'glyph00017', 'glyph00018'})
assert ('COLR' in subset_font)
assert (subset_font['COLR'].version == 0) |
class TestAPIBNFCodeViews(ApiTestBase):
api_prefix = '/api/1.0'
def assertNotJson(self, content):
try:
json.loads(content)
raise AssertionError(('Expected %s... to be non-JSON' % content[:10]))
except ValueError:
pass
def assertJson(self, content):
try:
json.loads(content)
except ValueError:
raise AssertionError(('Expected %s... to be JSON' % content[:10]))
def test_header_and_query_string_json_negotiation(self):
url = ('%s/bnf_code?q=lor&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertJson(response.content)
url = ('%s/bnf_code?q=lor&format=json' % self.api_prefix)
response = self.client.get(url, {}, follow=True, HTTP_ACCEPT='text/html')
self.assertJson(response.content)
url = ('%s/bnf_code?q=lor' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertNotJson(response.content)
url = ('%s/bnf_code?q=lor' % self.api_prefix)
response = self.client.get(url, {}, follow=True, HTTP_ACCEPT='application/json')
self.assertNotJson(response.content)
def test_api_view_bnf_chemical(self):
url = ('%s/bnf_code?q=lor&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 5)
self.assertEqual(content[0]['id'], '0202010D0')
self.assertEqual(content[0]['name'], 'Chlorothiazide')
self.assertEqual(content[0]['type'], 'chemical')
self.assertEqual(content[3]['id'], '0202010D0AA')
self.assertEqual(content[3]['name'], 'Chloroth')
self.assertEqual(content[3]['type'], 'product')
url = ('%s/bnf_code?q=0202010D0BD&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '0202010D0BD')
self.assertEqual(content[0]['name'], 'Chlotride')
self.assertEqual(content[0]['is_generic'], False)
url = ('%s/bnf_code?q=0202010D0bd&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '0202010D0BD')
self.assertEqual(content[0]['name'], 'Chlotride')
self.assertEqual(content[0]['is_generic'], False)
def test_inactive_chemical(self):
url = ('%s/bnf_code?q=0204ZZZZZ&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 0)
def test_api_view_bnf_section(self):
url = ('%s/bnf_code?q=diuretics&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 2)
self.assertEqual(content[0]['id'], '2.2')
self.assertEqual(content[0]['name'], 'Diuretics')
self.assertEqual(content[0]['type'], 'BNF section')
self.assertEqual(content[1]['id'], '2.2.1')
self.assertEqual(content[1]['name'], 'Thiazides And Related Diuretics')
self.assertEqual(content[1]['type'], 'BNF paragraph')
url = ('%s/bnf_code?q=cardio&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '2')
self.assertEqual(content[0]['name'], 'Cardiovascular System')
self.assertEqual(content[0]['type'], 'BNF chapter')
url = ('%s/bnf_code?q=2.2&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '2.2')
self.assertEqual(content[0]['name'], 'Diuretics')
self.assertEqual(content[0]['type'], 'BNF section')
url = ('%s/bnf_code?q=0202&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '2.2')
self.assertEqual(content[0]['name'], 'Diuretics')
self.assertEqual(content[0]['type'], 'BNF section')
def test_inactive_section(self):
url = ('%s/bnf_code?q=5.99&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 0)
def test_api_view_bnf_presentation(self):
url = ('%s/bnf_code?q=Bendroflume&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 3)
self.assertEqual(content[0]['id'], '0202010B0')
self.assertEqual(content[0]['name'], 'Bendroflumethiazide')
url = ('%s/bnf_code?q=0202010F0AAAAAA&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['id'], '0202010F0AAAAAA')
self.assertEqual(content[0]['name'], 'Chlortalidone_Tab 50mg')
self.assertEqual(content[0]['type'], 'product format')
url = ('%s/bnf_code?q=0202010F0AAA&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 0)
def test_inactive_presentation(self):
url = ('%s/bnf_code?q=non-current+product&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 0)
def test_api_view_bnf_presentation_replacements(self):
url = ('%s/bnf_code?q=Labetalol+50&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1) |
def load(project_path: Union[(Path, str, None)]=None, name: Optional[str]=None, raise_if_loaded: bool=True) -> 'Project':
if (project_path is None):
project_path = check_for_project('.')
if ((project_path is not None) and (project_path != Path('.').absolute())):
warnings.warn(f"Loaded project has a root folder of '{project_path}' which is different from the current working directory", BrownieEnvironmentWarning)
else:
project_path = Path(project_path)
if (project_path.resolve() != check_for_project(project_path)):
packages_path = _get_data_folder().joinpath('packages')
if ((not project_path.is_absolute()) and packages_path.joinpath(project_path).exists()):
project_path = packages_path.joinpath(project_path)
else:
project_path = None
if (project_path is None):
raise ProjectNotFound('Could not find Brownie project')
project_path = Path(project_path).resolve()
if (name is None):
name = project_path.name
if (not name.lower().endswith('project')):
name += ' project'
if (not name[0].isalpha()):
raise BadProjectName('Project must start with an alphabetic character')
name = ''.join((i for i in name.title() if i.isalnum()))
for loaded_project in _loaded_projects:
if (loaded_project._name == name):
if raise_if_loaded:
raise ProjectAlreadyLoaded('There is already a project loaded with this name')
return loaded_project
_create_folders(project_path)
_add_to_sys_path(project_path)
return Project(name, project_path) |
class OptionSeriesColumnpyramidSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_serialization_negative():
currency_change = {'FET': 10}
good_change = {'a_good': 1}
msg = StateUpdateMessage(performative=StateUpdateMessage.Performative.APPLY, amount_by_currency_id=currency_change, quantities_by_good_id=good_change)
with patch.object(StateUpdateMessage.Performative, '__eq__', return_value=False):
with pytest.raises(ValueError, match=f'Performative not valid: {msg.performative}'):
msg.serializer.encode(msg)
encoded_tx_bytes = msg.serializer.encode(msg)
with patch.object(StateUpdateMessage.Performative, '__eq__', return_value=False):
with pytest.raises(ValueError, match=f'Performative not valid: {msg.performative}'):
msg.serializer.decode(encoded_tx_bytes) |
def validate_directory(path, checkmodes):
if (not (type(path) == str)):
logging.debug('got path which is not a string. Exiting..')
sys.exit('Function validate_directory got path which is not a string.')
if (not os.path.isdir(path)):
logging.debug(('Returning false in validate_directory/os.path.isdir for directory: %s' % path))
return False
if os.path.islink(path):
logging.debug(('Returning false in validate_directory/os.path.islink for directory: %s' % path))
return False
if checkmodes:
return check_dir_execution_bit(path)
return True |
class _IndexedCustomCheckListEditor(BaseSourceWithLocation):
source_class = CustomEditor
locator_class = Index
handlers = [(MouseClick, (lambda wrapper, _: _interaction_helpers.mouse_click_checkbox_child_in_panel(control=wrapper._target.source.control, index=convert_index(source=wrapper._target.source, index=wrapper._target.location.index), delay=wrapper.delay)))] |
class OptionSeriesStreamgraphDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
(image=cpu_image, shared_volumes={str(SHARED): volume}, timeout=(30 * 60))
def main(storage: str, language: str='', video: bytes=b'', suffix: str='.mp4') -> bytes:
with tempfile.TemporaryDirectory(dir=storage) as tmp:
dirpath = Path(tmp)
lang = check_language(language)
config = Config(target_lang=lang, speaker_markers=True)
shared_input = (dirpath / 'input').with_suffix(suffix)
shared_input.write_bytes(video)
shared_output: Path = run.call(path_in=str(shared_input), path_out=str((dirpath / 'translated.mp4')), path_tmp=str(dirpath), config=config)
return shared_output.read_bytes() |
def multiplex_for_tensor_fields(func):
def inner(self, field):
if field.is_scalar_field:
return func(self, field)
else:
f = field.reshape(((- 1), field.grid.size))
res = [func(self, ff) for ff in f]
new_shape = np.concatenate((field.tensor_shape, [(- 1)]))
return Field(np.array(res).reshape(new_shape), res[0].grid)
return inner |
class ColumnApi():
def __init__(self, page: primitives.PageModel, js_code: str):
self.varId = js_code
self.page = page
def sizeColumnsToFit(self, width):
return JsObjects.JsVoid(('%s.sizeColumnsToFit(%s)' % (self.varId, JsUtils.jsConvertData(width, None))))
def getColumnGroup(self, name: str):
return JsObjects.JsVoid(('%s.getColumnGroup(%s)' % (self.varId, JsUtils.jsConvertData(name, None))))
def getColumn(self, name: str):
return JsObjects.JsVoid(('%s.getColumn(%s)' % (self.varId, JsUtils.jsConvertData(name, None))))
def getColumnState(self):
return JsObjects.JsVoid(('%s.getColumnState()' % self.varId))
def setColumnState(self, column_state):
return JsObjects.JsVoid(('%s.setColumnState(%s)' % (self.varId, JsUtils.jsConvertData(column_state, None))))
def resetColumnState(self):
return JsObjects.JsVoid(('%s.resetColumnState()' % self.varId))
def isPinning(self):
return JsObjects.JsVoid(('%s.isPinning()' % self.varId))
def isPinningLeft(self):
return JsObjects.JsVoid(('%s.isPinningLeft()' % self.varId))
def isPinningRight(self):
return JsObjects.JsVoid(('%s.isPinningRight()' % self.varId))
def setColumnVisible(self, col_name: str, visible: bool):
col_name = JsUtils.jsConvertData(col_name, None)
visible = JsUtils.jsConvertData(visible, None)
return JsObjects.JsVoid(('%s.setColumnVisible(%s. %s)' % (self.varId, col_name, visible)))
def setColumnsVisible(self, col_names, visible):
col_names = JsUtils.jsConvertData(col_names, None)
visible = JsUtils.jsConvertData(visible, None)
return JsObjects.JsVoid(('%s.setColumnsVisible(%s. %s)' % (self.varId, col_names, visible)))
def setColumnPinned(self, col_name: str, pinned: bool):
col_name = JsUtils.jsConvertData(col_name, None)
pinned = JsUtils.jsConvertData(pinned, None)
return JsObjects.JsVoid(('%s.setColumnPinned(%s. %s)' % (self.varId, col_name, pinned)))
def setColumnsPinned(self, col_names, pinned):
col_names = JsUtils.jsConvertData(col_names, None)
pinned = JsUtils.jsConvertData(pinned, None)
return JsObjects.JsVoid(('%s.setColumnsPinned(%s. %s)' % (self.varId, col_names, pinned)))
def getColumnGroupState(self):
return JsObjects.JsVoid(('%s.getColumnGroupState()' % self.varId))
def autoSizeColumn(self, col_name: str):
return JsObjects.JsVoid(('%s.autoSizeColumn(%s)' % (self.varId, JsUtils.jsConvertData(col_name, None))))
def autoSizeColumns(self, col_names: list):
return JsObjects.JsVoid(('%s.autoSizeColumns(%s)' % (self.varId, JsUtils.jsConvertData(col_names, None))))
def getDisplayNameForColumn(self, name: str):
return JsObjects.JsVoid(('%s.getDisplayNameForColumn(%s)' % (self.varId, JsUtils.jsConvertData(name, None))))
def getAllColumns(self):
return JsObjects.JsVoid(('%s.getAllColumns()' % self.varId))
def getAllGridColumns(self):
return JsObjects.JsVoid(('%s.getAllGridColumns()' % self.varId))
def getPrimaryColumns(self):
return JsObjects.JsVoid(('%s.getPrimaryColumns()' % self.varId))
def getSecondaryColumns(self):
return JsObjects.JsVoid(('%s.getSecondaryColumns()' % self.varId))
def getAllDisplayedVirtualColumns(self):
return JsObjects.JsVoid(('%s.getAllDisplayedVirtualColumns()' % self.varId))
def moveColumn(self, col_name, to_index):
col_name = JsUtils.jsConvertData(col_name, None)
to_index = JsUtils.jsConvertData(to_index, None)
return JsObjects.JsVoid(('%s.moveColumn(%s, %s)' % (self.varId, col_name, to_index)))
def moveColumns(self, col_names, to_index):
col_names = JsUtils.jsConvertData(col_names, None)
to_index = JsUtils.jsConvertData(to_index, None)
return JsObjects.JsVoid(('%s.moveColumns(%s, %s)' % (self.varId, col_names, to_index)))
def setColumnAggFunc(self, column, agg_func):
column = JsUtils.jsConvertData(column, None)
agg_func = JsUtils.jsConvertData(agg_func, None)
return JsObjects.JsVoid(('%s.setColumnAggFunc(%s, %s)' % (self.varId, column, agg_func)))
def setColumnWidth(self, col_name, new_width, finished=True):
col_name = JsUtils.jsConvertData(col_name, None)
new_width = JsUtils.jsConvertData(new_width, None)
finished = JsUtils.jsConvertData(finished, None)
return JsObjects.JsVoid(('%s.setColumnWidth(%s, %s, %s)' % (self.varId, col_name, new_width, finished)))
def setColumnWidths(self, column_widths, finished=True):
column_widths = JsUtils.jsConvertData(column_widths, None)
finished = JsUtils.jsConvertData(finished, None)
return JsObjects.JsVoid(('%s.setColumnWidth(%s, %s)' % (self.varId, column_widths, finished)))
def custom(self, func_nam: str, *argv):
js_args = []
for arg in argv:
js_args.append(str(JsUtils.jsConvertData(arg, None)))
return JsObjects.JsObject.JsObject.get(('%s.%s(%s)' % (self.varId, func_nam, ', '.join(js_args)))) |
def CreateManifest(manifest_path, classpath, main_class=None, manifest_entries=None):
output = ['Manifest-Version: 1.0']
if main_class:
output.append(('Main-Class: %s' % main_class))
if manifest_entries:
for (k, v) in manifest_entries:
output.append(('%s: %s' % (k, v)))
if classpath:
sanitized_paths = []
for path in classpath:
sanitized_paths.append(os.path.basename(path.strip('"')))
output.append(('Class-Path: %s' % ' '.join(sanitized_paths)))
output.append('Created-By: ')
output.append('')
wrapper = textwrap.TextWrapper(break_long_words=True, drop_whitespace=False, subsequent_indent=' ', width=(_MAX_MANIFEST_LINE_LEN - 2))
output = '\r\n'.join((w for l in output for w in wrapper.wrap(l)))
with open(manifest_path, 'w') as f:
f.write(output) |
def test_tensorflow_task_with_custom_config(serialization_settings: SerializationSettings):
task_config = TfJob(chief=Chief(replicas=1, requests=Resources(cpu='1'), limits=Resources(cpu='2'), image='chief:latest'), worker=Worker(replicas=5, requests=Resources(cpu='2', mem='2Gi'), limits=Resources(cpu='4', mem='2Gi'), image='worker:latest', restart_policy=RestartPolicy.FAILURE), ps=PS(replicas=2, restart_policy=RestartPolicy.ALWAYS), evaluator=Evaluator(replicas=5, requests=Resources(cpu='2', mem='2Gi'), limits=Resources(cpu='4', mem='2Gi'), image='evaluator:latest', restart_policy=RestartPolicy.FAILURE))
(task_config=task_config, cache=True, requests=Resources(cpu='1'), cache_version='1')
def my_tensorflow_task(x: int, y: str) -> int:
return x
assert (my_tensorflow_task(x=10, y='hello') == 10)
assert (my_tensorflow_task.task_config is not None)
assert (my_tensorflow_task.task_type == 'tensorflow')
assert (my_tensorflow_task.resources.limits == Resources())
assert (my_tensorflow_task.resources.requests == Resources(cpu='1'))
expected_custom_dict = {'chiefReplicas': {'replicas': 1, 'image': 'chief:latest', 'resources': {'requests': [{'name': 'CPU', 'value': '1'}], 'limits': [{'name': 'CPU', 'value': '2'}]}}, 'workerReplicas': {'replicas': 5, 'image': 'worker:latest', 'resources': {'requests': [{'name': 'CPU', 'value': '2'}, {'name': 'MEMORY', 'value': '2Gi'}], 'limits': [{'name': 'CPU', 'value': '4'}, {'name': 'MEMORY', 'value': '2Gi'}]}, 'restartPolicy': 'RESTART_POLICY_ON_FAILURE'}, 'psReplicas': {'resources': {}, 'replicas': 2, 'restartPolicy': 'RESTART_POLICY_ALWAYS'}, 'evaluatorReplicas': {'replicas': 5, 'image': 'evaluator:latest', 'resources': {'requests': [{'name': 'CPU', 'value': '2'}, {'name': 'MEMORY', 'value': '2Gi'}], 'limits': [{'name': 'CPU', 'value': '4'}, {'name': 'MEMORY', 'value': '2Gi'}]}, 'restartPolicy': 'RESTART_POLICY_ON_FAILURE'}}
assert (my_tensorflow_task.get_custom(serialization_settings) == expected_custom_dict) |
class _InterEventIntervallLog():
def __init__(self):
self.clear()
def clear(self):
self.log_dict = {}
def add_event(self, event_tag, time):
try:
self.log_dict[event_tag].append(time)
except Exception:
self.log_dict[event_tag] = [time]
def _get_iei_intervalls(self, from_tag, to_tag):
rtn = []
try:
time_from = sorted(self.log_dict[from_tag])
time_to = sorted(self.log_dict[to_tag])
except Exception:
return rtn
for f in time_from:
for t in time_to:
if (t >= f):
rtn.append((t - f))
break
return rtn
def summary(self):
rtn = []
for (a, b) in combinations(self.log_dict.keys(), 2):
for reverse in [False, True]:
if reverse:
tmp = b
b = a
a = tmp
iei = self._get_iei_intervalls(a, b)
txt = '{0} --> {1}: n={2}'.format(a, b, len(iei))
if (len(iei) > 0):
txt += ', mean={0}, median={1}, std={2}'.format(misc.round(statistics.mean(iei), 2), misc.round(statistics.median(iei), 2), misc.round(statistics.std(iei), 2))
rtn.append(txt)
return rtn |
class RangeStringArgument(ArgumentDefinition):
NOT_A_VALID_RANGE_STRING = 'The input should be of the type: <b><pre>\n\t1,3-5,9,17\n</pre></b>i.e. integer values separated by commas, and dashes to represent ranges.'
VALUE_NOT_IN_RANGE = 'A value must be in the range from 0 to %d.'
def __init__(self, max_value: Optional[int]=None, **kwargs: bool) -> None:
super().__init__(**kwargs)
self.__max_value = max_value
def validate(self, token: str) -> ValidationStatus:
validation_status = super().validate(token)
if (not validation_status):
return validation_status
try:
ActiveRange.validate_rangestring(token)
except ValueError:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
if (self.__max_value is not None):
try:
ActiveRange.validate_rangestring_vs_length(token, self.__max_value)
except ValueError:
validation_status.setFailed()
validation_status.addToMessage((RangeStringArgument.VALUE_NOT_IN_RANGE % (self.__max_value - 1)))
validation_status.setValue(token)
return validation_status |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.