code stringlengths 281 23.7M |
|---|
.parametrize('h5_fn', ['cytosin_orca_overlap_data.h5', 'cytosin_trip_orca_overlap_data.h5'])
def test_tden_self_overlap(h5_fn, this_dir):
with h5py.File((this_dir / h5_fn), 'r') as handle:
mo_coeffs = handle['mo_coeffs'][:]
ci_coeffs = handle['ci_coeffs'][:]
calc = OverlapCalculator()
def tden_self_overlap(C, Xa):
S_AO = calc.get_sao_from_mo_coeffs(C)
(Xa, _) = norm_ci_coeffs(Xa, np.zeros_like(Xa), restricted_norm=1.0)
return tden_overlaps(C, Xa, C, Xa, S_AO)
for (Ca, Xa) in zip(mo_coeffs, ci_coeffs):
ovlps = tden_self_overlap(Ca.T, Xa)
I = np.eye(ovlps.shape[0])
np.testing.assert_allclose(ovlps, I, atol=5e-05) |
class FeatureType(Enum):
TYPE_CAT = 'CATEGORICAL'
TYPE_BOOL = 'BOOL'
TYPE_NUM = 'NUMERIC'
TYPE_TEXT = 'TEXT'
TYPE_UNSUPPORTED = 'UNSUPPORTED'
TYPE_ALL_NAN = 'ALL_NAN'
TYPE_UNKNOWN = 'UNKNOWN'
TYPE_SKIPPED = 'SKIPPED'
def __str__(self):
return ('TYPE_' + str(self.value)) |
def test_sign_message(accounts):
class TestSubType(EIP712Type):
inner: 'uint256'
class TestMessage(EIP712Message):
_name_: 'string' = 'Brownie Tests'
value: 'uint256'
default_value: 'address' = '0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF'
sub: TestSubType
local = accounts.add(priv_key)
msg = TestMessage(value=1, sub=TestSubType(inner=2))
signed = local.sign_message(msg)
assert isinstance(signed, SignedMessage)
assert (signed.messageHash.hex() == '0x131c497d4ba2a00564dcf667c3bf3f85a410ef8cb50050b51959c26') |
def find_approx(cmd_input: str, cmd_map: Optional[Iterable[str]]) -> Iterable[str]:
prefix_suggestions = set()
levenshtein_suggestions = {}
for another_command in cmd_map:
if str(another_command).startswith(str(cmd_input).lower()):
prefix_suggestions.add(another_command)
elif (len(another_command) > 1):
distance = jellyfish.damerau_levenshtein_distance(str(cmd_input).lower(), another_command)
if (distance <= 2):
levenshtein_suggestions.update({another_command: distance})
if prefix_suggestions:
return sorted(prefix_suggestions)
else:
return [k for (k, _) in sorted(levenshtein_suggestions.items(), key=(lambda i: (i[1], i[0])))] |
def _add_channels_to_command(command, channels):
if channels:
channels = channels.strip().split()
dashc = []
for channel in channels:
dashc.append('--channel')
dashc.append(channel)
return ((command[:2] + dashc) + command[2:])
else:
return command |
.parametrize('language', LANGUAGES)
def test_wordclock_language(manager_nospawn, minimal_conf_noscreen, language):
with tempfile.TemporaryDirectory() as cache:
config = minimal_conf_noscreen
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([widget.WordClock(language=language, cache=cache)], 10))]
manager_nospawn.start(config)
(_, response) = manager_nospawn.c.widget['wordclock'].eval('self.language')
assert (response == language)
assert os.path.isfile(os.path.join(cache, 'wordclock.png')) |
class Particle():
def __init__(self, pos):
self.pos = pos
self.scl = (16 if BEES else 2)
self.ang = math.radians(random.randint(0, 359))
self.spd = random.randint(4, 10)
self.start = CTM()
self.clr = ([255, 200, 0, 155] if BEES else [*(random.randint(55, 255) for i in range(3)), 55])
self.mem = (0, 0)
self.pre = glm.vec2(pos)
def process(self):
if (CTM() > (self.start + 50)):
self.ang = math.radians((glm.simplex(self.pos) * 360.0))
self.start = CTM()
self.rot = glm.vec2((glm.vec4(1) * glm.rotate(glm.mat4(), self.ang, [0, 0, 1])))
self.pre = (int(self.pos.x), int(self.pos.y))
self.pos += ((self.rot * rl.GetFrameTime()) * 64)
rl.DrawLine(int(self.pos.x), int(self.pos.y), *self.pre, self.clr)
if (self.pos.x < 0):
self.pos.x = (rl.GetScreenWidth() - 1)
if (self.pos.y < 0):
self.pos.y = (rl.GetScreenHeight() - 1)
if (self.pos.x >= rl.GetScreenWidth()):
self.pos.x = 0
if (self.pos.y >= rl.GetScreenHeight()):
self.pos.y = 0
hlf = (self.scl // 2)
self.mem = ((int(self.pos.x) - hlf), (int(self.pos.y) - hlf))
rl.DrawRectangle(*self.mem, (self.scl if (not BEES) else hlf), self.scl, self.clr)
if BEES:
rl.DrawRectangle((self.mem[0] + hlf), self.mem[1], hlf, self.scl, [0, 0, 0, 55]) |
def test_self_reference_error() -> None:
t = generate_graph_resources(3)
field(t, 'dr_1', 'ds_1', 'f1').references.append((FieldAddress('dr_1', 'ds_1', 'f1'), None))
field(t, 'dr_1', 'ds_1', 'f1').identity = 'email'
with pytest.raises(ValidationError):
generate_traversal({'email': 'a'}, *t) |
class BaseTaskTestCase(BasePyTestCase):
def setup_method(self, method):
super().setup_method(method)
self._tsm_patcher = mock.patch('bodhi.server.util.transactional_session_maker._end_session')
self._tsm_patcher.start()
def teardown_method(self, method):
self._tsm_patcher.stop()
super().teardown_method(method) |
class CfgFormatBase():
__metaclass__ = ABCMeta
def __init__(self, cfg, name):
self.__cfg = cfg
self.__name = name
def create_cfg_from_str(self, url):
return
def create_cfg_from_file(self, url):
return
def __create_cfg_from_url(self, url):
if url.startswith((self.__name + ':')):
return self.create_cfg_from_str(url)
elif url.startswith('file:'):
return self.create_cfg_from_file(url)
assert False
def __evaluate_once(self, config):
cfg = {}
for cfg_idx in config:
DictHelper.merge(cfg, self.__create_cfg_from_url(cfg_idx))
return cfg
def evaluate(self):
try:
while True:
config = self.__cfg.get_value(['configuration', self.__name])
del self.__cfg['configuration'][self.__name]
DictHelper.merge(self.__cfg, self.__evaluate_once(config))
except RMTException:
pass |
def extractMarubintranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def compute_fork_data_root(current_version: bytes, genesis_validators_root: bytes) -> bytes:
if (len(current_version) != 4):
raise ValueError(f'Fork version should be in 4 bytes. Got {len(current_version)}.')
return ForkData(current_version=current_version, genesis_validators_root=genesis_validators_root).hash_tree_root |
(base=RequestContextTask, name='export.sessions.pdf', bind=True)
def export_sessions_pdf_task(self, event_id):
sessions = db.session.query(Session).filter_by(event_id=event_id)
try:
sessions_pdf_url = create_save_pdf(render_template('pdf/sessions_pdf.html', sessions=sessions), UPLOAD_PATHS['exports-temp']['pdf'].format(event_id=event_id, identifier=''))
result = {'download_url': sessions_pdf_url}
except Exception as e:
result = {'__error': True, 'result': str(e)}
logger.exception('Error in exporting sessions as PDF')
return result |
def gen_bridge(tmpdir, bridge, reset, hdl):
corsair.config.globcfg['register_reset'] = reset
if (hdl == 'vhdl'):
bridge_path = path_join(tmpdir, ('%s2lb.vhd' % bridge))
corsair.generators.LbBridgeVhdl(path=bridge_path, bridge_type=bridge).generate()
else:
bridge_path = path_join(tmpdir, ('%s2lb.v' % bridge))
corsair.generators.LbBridgeVerilog(path=bridge_path, bridge_type=bridge).generate()
return bridge_path |
class TextScreen(Visual):
def __init__(self, heading, text, position=None, heading_font=None, heading_size=None, heading_bold=None, heading_italic=None, heading_underline=None, heading_colour=None, text_font=None, text_size=None, text_bold=None, text_italic=None, text_underline=None, text_colour=None, text_justification=None, background_colour=None, size=None):
if (position is None):
position = defaults.textscreen_position
Visual.__init__(self, position, log_comment='text_screen')
self._heading = heading
self._text = text
if (heading_font is None):
heading_font = defaults.textscreen_heading_font
if (heading_font is None):
heading_font = _internals.active_exp.text_font
if (heading_font is None):
heading_font = 'FreeSans'
self._heading_font = find_font(heading_font)
try:
with open(self._heading_font, 'rb') as f:
pygame.font.Font(f, 10)
except Exception:
raise IOError("Font '{0}' not found!".format(heading_font))
if (heading_size is None):
heading_size = defaults.textscreen_heading_size
if heading_size:
self._heading_size = heading_size
else:
self._heading_size = int((_internals.active_exp.text_size * 1.2))
if (heading_bold is not None):
self._heading_bold = heading_bold
else:
self._heading_bold = defaults.textscreen_heading_bold
if (heading_italic is not None):
self._heading_italic = heading_italic
else:
self._heading_italic = defaults.textscreen_heading_italic
if (heading_underline is not None):
self._heading_underline = heading_underline
else:
self._heading_underline = defaults.textscreen_heading_underline
if (heading_colour is None):
heading_colour = defaults.textscreen_heading_colour
if (heading_colour is not None):
self._heading_colour = heading_colour
else:
self._heading_colour = _internals.active_exp.foreground_colour
if (text_font is None):
text_font = defaults.textscreen_text_font
if (text_font is not None):
self._text_font = find_font(text_font)
else:
self._text_font = find_font(_internals.active_exp.text_font)
try:
with open(self._text_font, 'rb') as f:
pygame.font.Font(f, 10)
except Exception:
raise IOError("Font '{0}' not found!".format(text_font))
if (text_size is None):
self._text_size = defaults.textscreen_text_size
if (text_size is not None):
self._text_size = text_size
else:
self._text_size = _internals.active_exp.text_size
if (text_bold is not None):
self._text_bold = text_bold
else:
self._text_bold = defaults.textscreen_text_bold
if (text_italic is not None):
self._text_italic = text_italic
else:
self._text_italic = defaults.textscreen_text_italic
if (text_underline is not None):
self._text_underline = text_underline
else:
self._text_underline = defaults.textscreen_text_underline
if (text_colour is None):
text_colour = defaults.textscreen_text_colour
if (text_colour is not None):
self._text_colour = text_colour
else:
self._text_colour = _internals.active_exp.foreground_colour
if (text_justification is not None):
self._text_justification = text_justification
else:
self._text_justification = defaults.textscreen_text_justification
if (size is not None):
self._size = size
else:
size = defaults.textscreen_size
if (size is None):
try:
self._size = ((_internals.active_exp.screen.surface.get_size()[0] - (_internals.active_exp.screen.surface.get_size()[0] // 5)), (_internals.active_exp.screen.surface.get_size()[1] - (_internals.active_exp.screen.surface.get_size()[1] // 5)))
except Exception:
raise RuntimeError('Cannot get size of screen!')
if (background_colour is not None):
self._background_colour = background_colour
else:
self._background_colour = defaults.textscreen_background_colour
_getter_exception_message = 'Cannot set {0} if surface exists!'
def heading(self):
return self._heading
def heading(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading'))
else:
self._heading = value
def text(self):
return self._text
def text(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text'))
else:
self._text = value
def text_font(self):
return self._text_font
_font.setter
def text_font(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_font'))
else:
self._text_font = value
def text_size(self):
return self._text_size
_size.setter
def text_size(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_size'))
else:
self._text_size = value
def text_bold(self):
return self._text_bold
_bold.setter
def text_bold(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_bold'))
else:
self._text_bold = value
def text_italic(self):
return self._text_italic
_italic.setter
def text_italic(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_italic'))
else:
self._text_italic = value
def text_underline(self):
return self._text_underline
_underline.setter
def text_underline(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_underline'))
else:
self._text_underline = value
def text_colour(self):
return self._text_colour
_colour.setter
def text_colour(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_colour'))
else:
self._text_colour = value
def heading_font(self):
return self._heading_font
_font.setter
def heading_font(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_font'))
else:
self._heading_font = value
def heading_size(self):
return self._heading_size
_size.setter
def heading_size(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_size'))
else:
self._heading_size = value
def heading_bold(self):
return self._heading_bold
_bold.setter
def heading_bold(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_bold'))
else:
self._heading_bold = value
def heading_italic(self):
return self._heading_italic
_italic.setter
def heading_italic(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_italic'))
else:
self._heading_italic = value
def heading_underline(self):
return self._heading_underline
_underline.setter
def heading_underline(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_underline'))
else:
self._heading_underline = value
def heading_colour(self):
return self._heading_colour
_colour.setter
def heading_colour(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('heading_colour'))
else:
self._heading_colour = value
def background_colour(self):
return self._background_colour
_colour.setter
def background_colour(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('background_colour'))
else:
self._background_colour = value
def size(self):
return self._size
def size(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('size'))
else:
self._size = value
def text_justification(self):
return self._text_justification
_justification.setter
def text_justification(self, value):
if self.has_surface:
raise AttributeError(TextScreen._getter_exception_message.format('text_justification'))
else:
self._text_justification = value
def _create_surface(self):
surface = pygame.surface.Surface(self.size, pygame.SRCALPHA).convert_alpha()
if (self.background_colour is not None):
surface.fill(self.background_colour)
header = TextLine(text=self.heading, text_size=self.heading_size, text_colour=self.heading_colour, background_colour=self.background_colour, text_font=self.heading_font, text_bold=self.heading_bold, text_italic=self.heading_italic, text_underline=self.heading_underline)
Stimulus._id_counter -= 1
box = TextBox(text=self.text, text_font=self.text_font, text_size=self.text_size, text_bold=self.text_bold, text_italic=self.text_italic, text_underline=self.text_underline, text_colour=self.text_colour, background_colour=self.background_colour, size=(self.size[0], (self.size[1] - (self.size[1] // 5))), text_justification=self.text_justification)
Stimulus._id_counter -= 1
surface.blit(header._get_surface(), (((self.size[0] // 2) - (header.surface_size[0] // 2)), 0))
surface.blit(box._get_surface(), (((self.size[0] // 2) - (box.size[0] // 2)), (self.size[1] // 5)))
return surface
def _demo(exp=None):
if (exp is None):
from .. import control
control.set_develop_mode(True)
control.defaults.event_logging = 0
exp_ = control.initialize()
textscreen = TextScreen('Hello World', 'Line one.\nLine two.\nLine three.')
textscreen.present()
if (exp is None):
exp_.clock.wait(1000) |
class TestRandomStringGenerator(BaseEvenniaTest):
def test_generate(self):
generated = []
for i in range(4):
generated.append(SIMPLE_GENERATOR.get())
generated.sort()
self.assertEqual(generated, ['00', '01', '10', '11'])
with self.assertRaises(random_string_generator.ExhaustedGenerator):
SIMPLE_GENERATOR.get() |
class ColourHandler(logging.Handler):
def __init__(self, level=logging.DEBUG):
logging.Handler.__init__(self, level)
self.formatter = logging.Formatter(('\r%(name)s%(padding)s - %(style)s%(levelname)s - %(message)s' + clr.Style.RESET_ALL))
clr.init()
self.logPaths = {}
def emit(self, record):
segments = record.name.split('.')
tname = threading.current_thread().name
segments.append(tname)
if ((segments[0] == 'Main') and (len(segments) > 1)):
segments.pop(0)
segments[0] = ('Main.' + segments[0])
nameList = []
for (indice, pathSegment) in enumerate(segments):
if (not (indice in self.logPaths)):
self.logPaths[indice] = [pathSegment]
elif (not (pathSegment in self.logPaths[indice])):
self.logPaths[indice].append(pathSegment)
name = clr.Style.RESET_ALL
name += getColor(self.logPaths[indice].index(pathSegment))
name += pathSegment
name += clr.Style.RESET_ALL
nameList.append(name)
record.name = '.'.join(nameList)
if (record.levelname == 'DEBUG'):
record.style = clr.Style.DIM
elif (record.levelname == 'WARNING'):
record.style = clr.Style.BRIGHT
elif (record.levelname == 'ERROR'):
record.style = (clr.Style.BRIGHT + clr.Fore.RED)
elif (record.levelname == 'CRITICAL'):
record.style = ((clr.Style.BRIGHT + clr.Back.BLUE) + clr.Fore.RED)
else:
record.style = clr.Style.NORMAL
record.padding = ''
try:
print(self.format(record))
except UnicodeDecodeError:
print('Failed to pring log entry!') |
.skipif((not has_hf_transformers), reason='requires huggingface transformers')
.parametrize('torch_device', TORCH_DEVICES)
.parametrize('with_torch_sdp', [False, True])
def test_encoder(torch_device, with_torch_sdp):
assert_encoder_output_equals_hf(BERTEncoder, 'explosion-testing/bert-test', torch_device, with_torch_sdp=with_torch_sdp) |
def filter_firewall_acl6_data(json):
option_list = ['comments', 'dstaddr', 'fragment', 'interface', 'name', 'policyid', 'service', 'srcaddr', 'status']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
('pyscf')
def test_modekill_pyscf(this_dir):
fn = (this_dir / 'ethane_shaked.xyz')
geom = geom_loader(fn)
calc = PySCF(basis='sto3g', xc='bp86', pal=2)
geom.set_calculator(calc)
(w, v) = np.linalg.eigh(geom.eckart_projection(geom.mw_hessian))
nus = eigval_to_wavenumber(w)
assert (nus[0] == pytest.approx((- 266.2801), abs=0.05))
modekill = ModeKill(geom, kill_inds=[0])
modekill.run()
assert modekill.converged
(w, v) = np.linalg.eigh(geom.eckart_projection(geom.mw_hessian))
nus = eigval_to_wavenumber(w)
assert (nus[0] == pytest.approx(324.4358, abs=0.05)) |
def main(args):
parser = argparse.ArgumentParser(description='Tidy3D')
parser.add_argument('simulation', help='path to the .json or .yaml file containing the simulation')
parser.add_argument('--out', '-o', default='simulation.hdf5', required=False, help='path to output the data')
parser.add_argument('--inspect_sim', '-i', required=False, action='store_true', help='visualize simulation and prompt before submitting')
parser.add_argument('--inspect_credits', '-c', required=False, action='store_true', help='visualize simulation and prompt before submitting')
parser.add_argument('--task_name', '-t', default='my_task', required=False, help='set name for task')
parser.add_argument('--folder_name', '-f', default='default', required=False, help='folder name for task')
parser.add_argument('--test_only', action='store_true', required=False, help='load the simulation file and exit, for testing purposes only.')
args = parser.parse_args(args)
sim_file = args.simulation
out_file = args.out
inspect_sim = args.inspect_sim
inspect_credits = args.inspect_credits
task_name = args.task_name
test_only = args.test_only
folder_name = args.folder_name
print('simulation file: ', sim_file)
print('data output file: ', out_file)
print('inspect simulation: ', inspect_sim)
print('inspect credits: ', inspect_credits)
print('task name: ', task_name)
print('folder name: ', folder_name)
if (('.yaml' in sim_file) or ('.yml' in sim_file)):
simulation = Simulation.from_yaml(sim_file)
else:
simulation = Simulation.from_file(sim_file)
if inspect_sim:
looks_good = input('Do you want to continue to submit? [y]/[n]')
if (looks_good.lower() != 'y'):
print(' - exiting')
sys.exit()
if test_only:
return
job = Job(simulation=simulation, task_name=task_name, folder_name=folder_name)
if inspect_credits:
info = job.get_info()
print(f'''task "{task_name}" estimated to use
{info.estFlexUnit:.2f} credits and
{info.s3Storage:.2e} bytes of storage.''')
looks_good = input('Do you want to continue to submit? [y]/[n]')
if (looks_good.lower() != 'y'):
print(' - exiting')
sys.exit()
job.start()
job.monitor()
job.download(path=out_file) |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class GroupGEMMRcrBiasTestCase(unittest.TestCase):
([param('group_gemm_rcr_bias_fp16', 'float16'), param('group_gemm_rcr_bias_fp32_sm80', 'float32'), param('group_gemm_rcr_bias_bf16', 'bfloat16')])
def test_group_gemm_rcr_bias(self, test_name, dtype):
M = 256
K1 = 128
N1 = 60
K2 = 192
N2 = 64
target = detect_target()
X1 = Tensor(shape=[M, K1], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[M, K2], dtype=dtype, name='x2', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
B1 = Tensor(shape=[N1], dtype=dtype, name='b1', is_input=True)
B2 = Tensor(shape=[N2], dtype=dtype, name='b2', is_input=True)
OP = ops.group_gemm_rcr_bias()
(Y1, Y2) = OP(operand_groups=[[X1, W1, B1], [X2, W2, B2]])
Y1._attrs['name'] = 'y1'
Y1._attrs['is_output'] = True
Y2._attrs['name'] = 'y2'
Y2._attrs['is_output'] = True
module = compile_model([Y1, Y2], target, './tmp', test_name)
X1_pt = get_random_torch_tensor(shape=(M, K1), dtype=dtype)
X2_pt = get_random_torch_tensor(shape=(M, K2), dtype=dtype)
W1_pt = get_random_torch_tensor(shape=(N1, K1), dtype=dtype)
W2_pt = get_random_torch_tensor(shape=(N2, K2), dtype=dtype)
B1_pt = get_random_torch_tensor(shape=(N1,), dtype=dtype)
B2_pt = get_random_torch_tensor(shape=(N2,), dtype=dtype)
Y1_pt = torch.nn.functional.linear(X1_pt, W1_pt, bias=B1_pt)
Y2_pt = torch.nn.functional.linear(X2_pt, W2_pt, bias=B2_pt)
inputs = {'x1': X1_pt, 'w1': W1_pt, 'b1': B1_pt, 'x2': X2_pt, 'w2': W2_pt, 'b2': B2_pt}
y1 = torch.empty_like(Y1_pt)
y2 = torch.empty_like(Y2_pt)
module.run_with_tensors(inputs, {'y1': y1, 'y2': y2})
torch.testing.assert_close(Y1_pt, y1, atol=0.1, rtol=0.1)
torch.testing.assert_close(Y2_pt, y2, atol=0.1, rtol=0.1) |
_heads([Factorial, DoubleFactorial])
def tex_Factorial(head, args, **kwargs):
assert (len(args) == 1)
argstr = [arg.latex(**kwargs) for arg in args]
ss = '!'
if (head == DoubleFactorial):
ss += '!'
if (args[0].is_symbol() or (args[0].is_integer() and (args[0]._integer >= 0))):
return ((argstr[0] + ' ') + ss)
else:
return ((('\\left(' + argstr[0]) + '\\right)') + ss) |
class SalesReceiptTest(QuickbooksTestCase):
def setUp(self):
super(SalesReceiptTest, self).setUp()
self.account_number = datetime.now().strftime('%d%H%M')
self.name = 'Test Account {0}'.format(self.account_number)
def create_sales_receipt(self, qty=1, unit_price=100.0):
sales_receipt = SalesReceipt()
sales_receipt.TotalAmt = (qty * unit_price)
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
sales_receipt.CustomerRef = customer.to_ref()
item = Item.all(max_results=1, qb=self.qb_client)[0]
line = SalesItemLine()
sales_item_line_detail = SalesItemLineDetail()
sales_item_line_detail.ItemRef = item.to_ref()
sales_item_line_detail.Qty = qty
sales_item_line_detail.UnitPrice = unit_price
today = datetime.now()
sales_item_line_detail.ServiceDate = today.strftime('%Y-%m-%d')
line.SalesItemLineDetail = sales_item_line_detail
line.Amount = (qty * unit_price)
sales_receipt.Line = [line]
return sales_receipt.save(qb=self.qb_client)
def test_create(self):
sales_receipt = self.create_sales_receipt(qty=1, unit_price=100.0)
query_sales_receipt = SalesReceipt.get(sales_receipt.Id, qb=self.qb_client)
self.assertEqual(query_sales_receipt.TotalAmt, 100.0)
self.assertEqual(query_sales_receipt.Line[0].Amount, 100.0)
self.assertEqual(query_sales_receipt.Line[0].SalesItemLineDetail['Qty'], 1)
self.assertEqual(query_sales_receipt.Line[0].SalesItemLineDetail['UnitPrice'], 100.0)
def test_void(self):
sales_receipt = self.create_sales_receipt(qty=1, unit_price=100.0)
query_sales_receipt = SalesReceipt.get(sales_receipt.Id, qb=self.qb_client)
self.assertEqual(query_sales_receipt.TotalAmt, 100.0)
self.assertNotIn('Voided', query_sales_receipt.PrivateNote)
sales_receipt.void(qb=self.qb_client)
query_sales_receipt = SalesReceipt.get(sales_receipt.Id, qb=self.qb_client)
self.assertEqual(query_sales_receipt.TotalAmt, 0.0)
self.assertIn('Voided', query_sales_receipt.PrivateNote) |
class FirewallService():
def __init__(self, fw):
self._fw = fw
self._services = {}
def __repr__(self):
return ('%s(%r)' % (self.__class__, self._services))
def cleanup(self):
self._services.clear()
def get_services(self):
return sorted(self._services.keys())
def check_service(self, service):
if (service not in self._services):
raise FirewallError(errors.INVALID_SERVICE, service)
def get_service(self, service):
self.check_service(service)
return self._services[service]
def add_service(self, obj):
self._services[obj.name] = obj
def remove_service(self, service):
self.check_service(service)
del self._services[service] |
def fixSmartQuotes(text):
if isinstance(text, list):
text = [fixSmartQuotes(tmp) for tmp in text]
return text
text = text.replace("\\'", "'")
text = text.replace('\\"', '"')
text = text.replace('', "'")
text = text.replace('', "'")
text = text.replace('', '"')
text = text.replace('', '"')
return text |
def extractListeningstoriesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Okoborehime to Entaku no Kishi', 'Okoborehime to Entaku no Kishi', 'translated'), ('okoborehime', 'Okoborehime to Entaku no Kishi', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ConcatenateTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ConcatenateTestCase, self).__init__(*args, **kwargs)
self.test_count = 0
def _run_concatenate(self, *, concatenate_op, input_shapes, dim=None, input_type='float16', optimize_args=False):
input_tensors_pt = [get_random_torch_tensor(shape, input_type) for (i, shape) in enumerate(input_shapes)]
Y_pt = (torch.cat(input_tensors_pt) if (dim is None) else torch.cat(input_tensors_pt, dim))
if optimize_args:
target = detect_target(optimize_for_compilation_time=True)
else:
target = detect_target()
inputs = [Tensor(shape=shape, dtype=input_type, name='input_{}'.format(i), is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', 'concatenate', dll_name=dll_name)
input_tensors_ait = {f'input_{idx}': input_tensors_pt[idx] for idx in range(len(inputs))}
y = torch.empty_like(Y_pt)
module.run_with_tensors(input_tensors_ait, [y])
self.assertTrue(torch.equal(Y_pt, y))
self.test_count += 1
def _run_batch_concatenate(self, *, batch_sizes, concatenate_op, input_shapes, dim=0, input_type='float16', optimize_args=False):
if optimize_args:
target = detect_target(optimize_for_compilation_time=True)
else:
target = detect_target()
BATCH_DIM_NAME = 'input_batch'
batch_dim = shape_utils.gen_int_var_min_max(values=batch_sizes, name=BATCH_DIM_NAME)
inputs = [Tensor(shape=[batch_dim, *shape], dtype=input_type, name='input_{}'.format(i), is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
batch_tag = '_'.join([str(b) for b in batch_sizes])
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', f'concatenate_batched_{batch_tag}', dll_name=dll_name)
for batch in batch_sizes:
input_tensors_pt = [get_random_torch_tensor([batch, *shape], input_type) for (i, shape) in enumerate(input_shapes)]
Y_pt = (torch.cat(input_tensors_pt) if (dim is None) else torch.cat(input_tensors_pt, dim))
input_tensors_ait = {f'input_{idx}': input_tensors_pt[idx] for idx in range(len(inputs))}
y = torch.empty_like(Y_pt)
module.run_with_tensors(input_tensors_ait, [y])
self.assertTrue(torch.equal(Y_pt, y))
self.test_count += 1
def _run_masked_concatenate(self, *, concatenate_op, input_shapes, input_masks, dim=None, input_type='float16', optimize_args=False):
input_tensors_pt = [get_random_torch_tensor(shape, input_type) for (i, shape) in enumerate(input_shapes)]
y_pt = (torch.cat(input_tensors_pt) if (dim is None) else torch.cat(input_tensors_pt, dim))
if optimize_args:
target = detect_target(optimize_for_compilation_time=True)
else:
target = detect_target()
inputs = [Tensor(shape=shape, dtype=input_type, name='input_{}'.format(i), is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
inputs = [i for (mask, i) in zip(input_masks, inputs) if (mask is True)]
input_accessors = [i for (mask, i) in zip(input_masks, concatenate_op._attrs['input_accessors']) if (mask is True)]
concatenate_op._attrs['input_masks'] = input_masks
concatenate_op._attrs['inputs'] = inputs
concatenate_op._attrs['input_accessors'] = input_accessors
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', 'concatenate_masked', dll_name=dll_name)
inputs = []
for (i, x_tensor_pt) in enumerate(input_tensors_pt):
if input_masks[i]:
inputs.append(x_tensor_pt)
y = torch.empty_like(y_pt)
module.run_with_tensors(inputs, [y])
split_sections = [shape[dim] for shape in input_shapes]
ys_pt = torch.split(y_pt, split_sections, dim=dim)
ys = torch.split(y, split_sections, dim=dim)
for (mask, pt, actual) in zip(input_masks, ys_pt, ys):
if (mask is True):
self.assertTrue(torch.equal(pt, actual))
self.test_count += 1
def test_batch_cat(self):
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate(), input_shapes=([1], [1]), dim=0)
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate(), input_shapes=([1], [1]), dim=0, optimize_args=True)
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate(), input_shapes=([1], [1]), dim=1)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=0)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=1)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=2)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=3)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 1, 4], [2, 3, 4]), dim=2)
def test_cat(self):
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1], [1]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 1], [1, 1]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 1], [1, 1]), dim=0, optimize_args=True)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 1], [1, 1]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 1], [2, 1]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=[[2, 3, 4]], dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=2)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [3, 3, 4], [4, 3, 4]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 4, 4], [2, 5, 4]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 6], [2, 3, 5], [2, 3, 4]), dim=2)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1024, 32, 32], [1024, 16, 32], [1024, 8, 32]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([12, 3, 4, 5], [3, 3, 4, 5], [7, 3, 4, 5]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 5], [2, 3, 4, 5]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 9, 5], [2, 3, 4, 5], [2, 3, 1, 5]), dim=2)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 3], [2, 3, 4, 5]), dim=3)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([12, 3, 4, 5], [3, 3, 4, 5], [7, 3, 4, 5]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 5], [2, 3, 4, 5]), dim=1)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 9, 5], [2, 3, 4, 5], [2, 3, 1, 5]), dim=2)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 3], [2, 3, 4, 5]), dim=3)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 3], [2, 3, 4, 5]), dim=(- 1))
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([3, 0], [3, 0]), dim=0)
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([3, 0], [3, 1]), dim=1)
def test_masked_cat(self):
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 2, 2], [2, 2, 2], [2, 2, 2]), input_masks=[True, True, False], dim=2, optimize_args=True)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2], [2]), input_masks=[True, False], dim=0)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3], [5, 3], [3, 3]), input_masks=[False, True, True], dim=0)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 11, 4], [2, 5, 4], [2, 2, 4]), input_masks=[True, False, True], dim=1)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 1, 1], [1, 1, 2], [1, 1, 4]), input_masks=[False, True, False], dim=2)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 1, 1], [1, 1, 2], [1, 1, 4]), input_masks=[False, True, False], dim=2, optimize_args=True)
(('float16', 'float32', 'bfloat16'))
def test_floats(self, dtype):
if ((detect_target().name() != 'cuda') and (dtype != 'float16')):
self.skipTest(f'{detect_target().name()} backend is not supported for {dtype} input type')
self._run_concatenate(concatenate_op=ops.concatenate(), input_shapes=([1, 3, 1], [2, 3, 1], [3, 3, 1]), input_type=dtype)
self._run_masked_concatenate(concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 8], [2, 3, 16]), input_masks=[False, True, False], dim=2, input_type=dtype)
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate(), input_shapes=([2, 3, 4], [2, 3, 2]), dim=3, input_type=dtype)
def _test_concatenate_shape(self, in_shapes, out_shape, dim):
Xs = [Tensor(shape=in_shape, name=f'input_{idx}', is_input=True) for (idx, in_shape) in enumerate(in_shapes)]
Y = ops.concatenate()(Xs, dim)
y_shape = Y.shape()
self.assertEqual(len(y_shape), len(out_shape))
for (y, o) in zip(y_shape, out_shape):
self.assertEqual(y, o)
def test_concatenate_shape_var(self):
var1 = IntVar(values=[1, 2], name='var1')
var2 = IntVar(values=[3, 5], name='var2')
var3 = IntVar(values=[7, 11], name='var3')
sym1 = var1._attrs['symbolic_value']
sym2 = var2._attrs['symbolic_value']
sym3 = var3._attrs['symbolic_value']
in_shapes = [[var, 2, 3] for var in [var1, var2, var3]]
ovar1 = IntVar(values=[11, 18], symbolic_value=((sym1 + sym2) + sym3))
self._test_concatenate_shape(in_shapes, [ovar1, 2, 3], 0)
self._test_concatenate_shape(in_shapes, [ovar1, 2, 3], (- 3))
def test_concatenate_shape_mix(self):
var1 = IntVar(values=[1, 2], name='var1')
var2 = IntVar(values=[3, 5], name='var2')
imm1 = IntImm(17)
imm2 = IntImm(19)
sym1 = var1._attrs['symbolic_value']
sym2 = var2._attrs['symbolic_value']
in_shapes = [[var1, 2, 3], [imm1, 2, 3], [imm2, 2, 3], [var2, 2, 3]]
ovar1 = IntVar(values=[40, 43], symbolic_value=(((sym1 + sym2) + 17) + 19))
self._test_concatenate_shape(in_shapes, [ovar1, 2, 3], 0)
def test_concatenate_shape_compatible(self):
var1 = IntVar(values=[1, 2])
sym1 = var1._attrs['symbolic_value']
in_shapes = [[var1, 2, 3], [var1, 2, 3]]
self._test_concatenate_shape(in_shapes, [var1, 2, 6], (- 1))
dup_var1 = IntVar(values=[1, 2], symbolic_value=sym1)
in_shapes = [[var1, 2, 3], [dup_var1, 2, 3]]
self._test_concatenate_shape(in_shapes, [var1, 2, 6], (- 1)) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_address6': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_address6']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_address6']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_address6')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class flow_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 1
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 1)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.flow_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('flow_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class NetworkManager():
interfaces_file_name = ''
resolvconf_file_name = ''
dhcpcd_file_name = ''
def __init__(self):
self.wpaconfig = ''
self.WifiSSID = ''
self.WifiKey = ''
self.WifiSSID2 = ''
self.WifiKey2 = ''
self.APMode = (- 1)
self.APModeDev = 99
self.APModeTime = 30
self.APStopTime = (- 1)
self.WifiAPKey = 'configrpi'
self.WifiAPChannel = 1
self.WifiDevWatch = (- 1)
self.WifiDevNum = (- 1)
self.dhcpcd_inuse = False
def networkinit(self):
ipi = getipinfos()
ni = parseifconfig(ipi)
realdevs = 0
if ni:
if (len(ni) > 0):
realdevs = 0
for i in range(len(ni)):
if (ni[i]['mac'] != ''):
if (len(Settings.NetworkDevices) <= realdevs):
tarr = NetworkDevice()
Settings.NetworkDevices.append(tarr)
Settings.NetworkDevices[realdevs].ip = ni[i]['ip']
Settings.NetworkDevices[realdevs].mask = ni[i]['mask']
Settings.NetworkDevices[realdevs].devicename = ni[i]['name']
Settings.NetworkDevices[realdevs].connected = (int(ni[i]['active']) != 0)
Settings.NetworkDevices[realdevs].lastconnectiontest = time.time()
Settings.NetworkDevices[realdevs].mac = ni[i]['mac']
if Settings.NetworkDevices[realdevs].connected:
if ni[i]['wireless']:
Settings.NetworkDevices[realdevs].connectiontype = 2
else:
Settings.NetworkDevices[realdevs].connectiontype = 1
else:
Settings.NetworkDevices[realdevs].connectiontype = 0
realdevs += 1
for dc in range(len(Settings.NetworkDevices)):
Settings.NetworkDevices[dc].dns = Settings.NetworkDevices[dc].dns.strip()
if ((Settings.NetworkDevices[dc].gw == '') and (Settings.NetworkDevices[dc].ip != '')):
Settings.NetworkDevices[dc].gw = getgw(Settings.NetworkDevices[dc].devicename)
def setAPconf(self, startup=False):
return False
def getdevicenames(self):
rs = []
if (len(Settings.NetworkDevices) > 0):
for n in range(len(Settings.NetworkDevices)):
rs.append(Settings.NetworkDevices[n].devicename)
return rs
def getfirstwirelessdev(self):
try:
pd = self.getprimarydevice()
if Settings.NetworkDevices[pd].iswireless():
return Settings.NetworkDevices[pd].devicename
pd = self.getsecondarydevice()
if Settings.NetworkDevices[pd].iswireless():
return Settings.NetworkDevices[pd].devicename
except:
return False
return False
def getfirstwirelessdevnum(self):
try:
pd = self.getprimarydevice()
if Settings.NetworkDevices[pd].iswireless():
return pd
pd = self.getsecondarydevice()
if Settings.NetworkDevices[pd].iswireless():
return pd
except:
return (- 1)
return (- 1)
def getprimarydevice(self):
rs = 0
if (len(Settings.NetworkDevices) > 0):
for n in range(len(Settings.NetworkDevices)):
if (Settings.NetworkDevices[n].netdevorder == 0):
rs = n
break
return rs
def getsecondarydevice(self):
rs = (- 1)
if (len(Settings.NetworkDevices) > 0):
for n in range(len(Settings.NetworkDevices)):
if (Settings.NetworkDevices[n].netdevorder > 0):
rs = n
break
return rs
def setdeviceorder(self, primary, secondary):
if (len(Settings.NetworkDevices) > 0):
for n in range(len(Settings.NetworkDevices)):
if (n == primary):
Settings.NetworkDevices[n].netdevorder = 0
elif (n == secondary):
Settings.NetworkDevices[n].netdevorder = 1
else:
Settings.NetworkDevices[n].netdevorder = (- 1)
def saveconfig(self):
pass |
def get_busybox_components(file_object: FileObject) -> list[str]:
data = Path(file_object.file_path).read_bytes()
start_index = data.index(b'[\x00[[\x00')
end_index = data.index(b'\x00\x00', (start_index + 5))
extracted_bytes = data[start_index:(end_index + 2)]
split_bytes = extracted_bytes.split(b'\x00')
return [word.decode('ascii') for word in split_bytes if word] |
def _lookup_deleted_award_keys(client: Elasticsearch, lookup_key: str, value_list: list, config: dict, index: Optional[str]=None, lookup_chunk_size: int=50000) -> list:
if (index is None):
index = f"{config['query_alias_prefix']}-*"
if (not _is_allowed_key_field_type(client, lookup_key, index)):
msg = f'Cannot perform lookups in index "{index}" with key field "{lookup_key}" because its type is not one of the allowed field types, or the field was not found in that index.'
logger.error(format_log(msg=msg, action='Delete'))
raise RuntimeError(msg)
if (lookup_chunk_size > 65536):
msg = f'{lookup_chunk_size} is greater than 65,536, which is the max number of terms that can be added to an ES terms filter query'
logger.error(format_log(msg=msg, action='Delete'))
raise RuntimeError(msg)
if (lookup_chunk_size > config['max_query_size']):
msg = f"{lookup_chunk_size} is greater {config['max_query_size']}, which is the max number of query results returnable from this index. Use a smaller chunk or increase max_result_window for this index."
logger.error(format_log(msg=msg, action='Delete'))
raise RuntimeError(msg)
award_key_list = []
values_generator = chunks(value_list, lookup_chunk_size)
for chunk_of_values in values_generator:
q = Search(using=client, index=index).filter('terms', **{lookup_key: chunk_of_values})
q.update_from_dict({'size': config['max_query_size']})
response = q.execute()
if (response['hits']['total']['value'] != 0):
award_key_list += [x['_source'][ES_AWARDS_UNIQUE_KEY_FIELD] for x in response['hits']['hits']]
return award_key_list |
class ForwarderParameter(parser.NamedParameter, parser.ParameterWithSourceEquivalent):
def __init__(self, real, parent, **kwargs):
super(ForwarderParameter, self).__init__(aliases=real.aliases, argument_name=real.argument_name, undocumented=True, **kwargs)
self.real = real
self.parent = parent
self.orig_redispatch = real.redispatch_short_arg
real.redispatch_short_arg = self.redispatch_short_arg
def get_fba(self, ba):
return self.parent.get_meta(ba).get_sub()
def read_argument(self, ba, i):
self.real.read_argument(self.get_fba(ba), i)
def apply_generic_flags(self, ba):
self.real.apply_generic_flags(self.get_fba(ba))
def redispatch_short_arg(self, rest, ba, i):
self.orig_redispatch(rest, ba.real, i) |
def test_median_imputation_when_user_enters_single_variables(df_na):
imputer = MeanMedianImputer(imputation_method='median', variables=['Age'])
X_transformed = imputer.fit_transform(df_na)
X_reference = df_na.copy()
X_reference['Age'] = X_reference['Age'].fillna(23.0)
assert (imputer.imputation_method == 'median')
assert (imputer.variables == ['Age'])
assert (imputer.n_features_in_ == 6)
assert (imputer.imputer_dict_ == {'Age': 23.0})
assert (X_transformed['Age'].isnull().sum() == 0)
pd.testing.assert_frame_equal(X_transformed, X_reference) |
class TlsSubscriptionResponseData(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'id': (str,), 'attributes': (TlsSubscriptionResponseAttributes,)}
_property
def discriminator():
return None
attribute_map = {'id': 'id', 'attributes': 'attributes'}
read_only_vars = {'id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def _access_list_rlp_to_rpc_structure(access_list: Sequence) -> Sequence:
if (not is_rlp_structured_access_list(access_list)):
raise ValueError('provided object not formatted as rlp-structured access list')
rpc_structured_access_list = []
for t in access_list:
rpc_structured_access_list.append({'address': t[0], 'storageKeys': t[1]})
return tuple(rpc_structured_access_list) |
class SlaveChannel(Channel, ABC):
supported_message_types: Set[MsgType] = set()
suggested_reactions: Optional[Sequence[ReactionName]] = None
def get_extra_functions(self) -> Dict[(ExtraCommandName, Callable)]:
methods = {}
for mName in dir(self):
m = getattr(self, mName)
if (callable(m) and getattr(m, 'extra_fn', False)):
methods[ExtraCommandName(mName)] = m
return methods
def get_chat_picture(self, chat: 'Chat') -> BinaryIO:
raise NotImplementedError()
def get_chat_member_picture(self, chat_member: 'ChatMember') -> BinaryIO:
raise NotImplementedError()
def get_chat(self, chat_uid: ChatID) -> 'Chat':
raise NotImplementedError()
def get_chats(self) -> Collection['Chat']:
raise NotImplementedError() |
def test_liveness_analysis_graph_dead_code(construct_graph_dead_code, variable_u, aliased_variables_x, aliased_variables_y, aliased_variables_z):
(nodes, cfg) = construct_graph_dead_code
liveness_analysis = LivenessAnalysis(cfg)
assert ((liveness_analysis.live_in_of(nodes[0]) == {aliased_variables_x[1], aliased_variables_y[1], aliased_variables_z[1]}) and (liveness_analysis.live_in_of(nodes[1]) == {aliased_variables_x[2], aliased_variables_y[1], aliased_variables_z[1]}) and (liveness_analysis.live_in_of(nodes[2]) == {variable_u[2], aliased_variables_x[2], aliased_variables_y[1], aliased_variables_z[1]}) and (liveness_analysis.live_in_of(nodes[3]) == {variable_u[3], aliased_variables_x[2], aliased_variables_z[1]}) and (liveness_analysis.live_in_of(nodes[4]) == {variable_u[3], aliased_variables_x[2], aliased_variables_z[1]}) and (liveness_analysis.live_in_of(nodes[5]) == {variable_u[3], variable_u[6], variable_u[7], aliased_variables_x[2]}) and (liveness_analysis.live_in_of(nodes[6]) == {variable_u[8], variable_u[9], variable_u[10], aliased_variables_x[2]}))
assert ((liveness_analysis.live_out_of(nodes[0]) == {aliased_variables_x[2], aliased_variables_y[1], aliased_variables_z[1]}) and (liveness_analysis.live_out_of(nodes[1]) == {variable_u[1], aliased_variables_x[2], aliased_variables_y[1], aliased_variables_z[1]}) and (liveness_analysis.live_out_of(nodes[2]) == {variable_u[3], aliased_variables_x[2], aliased_variables_z[1]}) and (liveness_analysis.live_out_of(nodes[3]) == {variable_u[3], variable_u[4], aliased_variables_x[2]}) and (liveness_analysis.live_out_of(nodes[4]) == {variable_u[3], variable_u[5], aliased_variables_x[2], aliased_variables_z[1]}) and (liveness_analysis.live_out_of(nodes[5]) == {variable_u[3], variable_u[6], variable_u[7], aliased_variables_x[2]}) and (liveness_analysis.live_out_of(nodes[6]) == set())) |
class FLTrainer(abc.ABC):
logger: logging.Logger = Logger.get_logger(__name__)
def __init__(self, *, model: IFLModel, cuda_enabled: bool=False, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=FLTrainerConfig, **kwargs)
assert (self.cfg.eval_epoch_frequency <= self.cfg.epochs), 'We expect to do at least one eval. However, eval_epoch_frequency:'
self._cuda_state_manager = CudaTransferMinimizer(cuda_enabled)
self._cuda_state_manager.on_trainer_init(model)
self.cuda_enabled = cuda_enabled
self._timeout_simulator = instantiate(self.cfg.timeout_simulator)
self.channel = instantiate(self.cfg.channel)
self.data_provider = None
self.num_total_users: int = (- 1)
self.clients = {}
self.eval_clients = {}
def _set_defaults_in_cfg(cls, cfg):
if OmegaConf.is_missing(cfg.timeout_simulator, '_target_'):
cfg.timeout_simulator = NeverTimeOutSimulatorConfig()
if OmegaConf.is_missing(cfg.client, '_target_'):
cfg.client = ClientConfig()
if OmegaConf.is_missing(cfg.channel, '_target_'):
cfg.channel = FLChannelConfig()
def train(self, data_provider: IFLDataProvider, metrics_reporter: IFLMetricsReporter, num_total_users: int, distributed_world_size: int=1, rank: int=0) -> Tuple[(IFLModel, Any)]:
pass
def test(self, data_provider: IFLDataProvider, metrics_reporter: IFLMetricsReporter) -> Any:
return self._test(timeline=Timeline(global_round=1), data_provider=data_provider, model=self.global_model(), metrics_reporter=metrics_reporter)
def global_model(self) -> IFLModel:
pass
def _maybe_run_evaluation(self, timeline: Timeline, data_provider, metrics_reporter: IFLMetricsReporter, best_metric, best_model_state):
if (not self.cfg.do_eval):
return (best_metric, best_model_state)
if (not timeline.tick(self.cfg.eval_epoch_frequency)):
return (best_metric, best_model_state)
personalized_metrics = {}
if self.cfg.personalized:
personalized_metrics = self._evaluate_personalized_eval_users(timeline=timeline, data_provider=data_provider, global_model=self.global_model(), metrics_reporter=metrics_reporter)
(eval_metric, eval_metric_better_than_prev) = self._evaluate(timeline=timeline, data_provider=data_provider, global_model=self.global_model(), metrics_reporter=metrics_reporter)
if (self.cfg.always_keep_trained_model or eval_metric_better_than_prev):
best_metric = eval_metric
model_state = self.global_model().fl_get_module().state_dict()
best_model_state = deepcopy(model_state)
sys.stdout.flush()
return (best_metric, best_model_state)
def _print_training_stats(self, timeline: Timeline) -> None:
print(f'Train finished Global Round: {timeline.global_round_num()}')
def _report_train_metrics(self, model: IFLModel, timeline: Timeline, metrics_reporter: Optional[IFLMetricsReporter]=None, extra_metrics: Optional[List[Metric]]=None) -> None:
if (self.cfg.report_train_metrics and (metrics_reporter is not None) and timeline.tick((1.0 / self.cfg.train_metrics_reported_per_epoch))):
self._print_training_stats(timeline)
metrics_reporter.report_metrics(model=model, reset=True, stage=TrainingStage.TRAINING, timeline=timeline, epoch=timeline.global_round_num(), print_to_channels=True, extra_metrics=extra_metrics)
_grad()
def _calc_eval_metrics_on_clients(self, model: IFLModel, clients_data: Iterable[IFLUserData], data_split: str, metrics_reporter: IFLMetricsReporter) -> None:
for client_data in clients_data:
for batch in getattr(client_data, f'{data_split}_data')():
batch_metrics = model.get_eval_metrics(batch)
metrics_reporter.add_batch_metrics(batch_metrics)
def _calc_post_epoch_communication_metrics(self, timeline: Timeline, metrics_reporter: Optional[IFLMetricsReporter]):
if ((metrics_reporter is not None) and self.channel.cfg.report_communication_metrics and timeline.tick((1.0 / self.cfg.train_metrics_reported_per_epoch))):
extra_metrics = [Metric(('Client to Server Bytes Sent' if (name == ChannelDirection.CLIENT_TO_SERVER) else 'Server to Client Bytes Sent'), tracker.mean()) for (name, tracker) in self.channel.stats_collector.get_channel_stats().items()]
metrics_reporter.report_metrics(model=None, reset=False, stage=TrainingStage.TRAINING, timeline=timeline, epoch=timeline.global_round_num(), print_to_channels=True, extra_metrics=extra_metrics)
self.channel.stats_collector.reset_channel_stats()
def _evaluate_personalized_eval_users(self, timeline: Timeline, data_provider, global_model: IFLModel, metrics_reporter: IFLMetricsReporter) -> Any:
print(f'{timeline}: Evaluate global model w/ finetune on validation data of eval users')
(personalized_metrics, _) = FineTuner.fine_tune_and_evaluate(data=self.data_provider.eval_users(), global_model=global_model, client_config=self.cfg.client, metrics_reporter=metrics_reporter, cuda_state_manager=self._cuda_state_manager, training_stage=TrainingStage.PERSONALIZED_EVAL, timeline=timeline, epochs=self.cfg.personalized_epochs)
return personalized_metrics
def _evaluate(self, timeline: Timeline, data_provider: IFLDataProvider, global_model: IFLModel, metrics_reporter: IFLMetricsReporter) -> Tuple[(Any, bool)]:
print(f'{timeline}: Evaluates global model on all data of eval users')
self._cuda_state_manager.before_train_or_eval(global_model)
global_model.fl_get_module().eval()
self._calc_eval_metrics_on_clients(model=global_model, clients_data=data_provider.eval_users(), data_split='eval', metrics_reporter=metrics_reporter)
(metrics, found_best_model) = metrics_reporter.report_metrics(model=global_model, reset=True, stage=TrainingStage.EVAL, timeline=timeline, epoch=timeline.global_round_num(), print_to_channels=True)
self._cuda_state_manager.after_train_or_eval(global_model)
return (metrics, found_best_model)
def _test(self, timeline: Timeline, data_provider: IFLDataProvider, model: IFLModel, metrics_reporter: IFLMetricsReporter) -> Any:
personalized_metrics = {}
if self.cfg.personalized:
(personalized_metrics, _) = FineTuner.fine_tune_and_evaluate(data=self.data_provider.test_users(), global_model=model, client_config=self.cfg.client, metrics_reporter=metrics_reporter, cuda_state_manager=self._cuda_state_manager, training_stage=TrainingStage.PERSONALIZED_TEST, timeline=timeline, epochs=self.cfg.personalized_epochs)
print(f'Running {timeline} for {TrainingStage.TEST.name.title()}')
self._cuda_state_manager.before_train_or_eval(model)
model.fl_get_module().eval()
self._calc_eval_metrics_on_clients(model=model, clients_data=data_provider.test_users(), data_split='eval', metrics_reporter=metrics_reporter)
(metrics, _) = metrics_reporter.report_metrics(model=model, reset=True, stage=TrainingStage.TEST, timeline=timeline, epoch=timeline.global_round_num(), print_to_channels=True)
self._cuda_state_manager.after_train_or_eval(model)
return metrics |
class DateTimeField(Field):
default_error_messages = {'invalid': _('Datetime has wrong format. Use one of these formats instead: {format}.'), 'date': _('Expected a datetime but got a date.'), 'make_aware': _('Invalid datetime for the timezone "{timezone}".'), 'overflow': _('Datetime value out of range.')}
datetime_parser = datetime.datetime.strptime
def __init__(self, format=empty, input_formats=None, default_timezone=None, **kwargs):
if (format is not empty):
self.format = format
if (input_formats is not None):
self.input_formats = input_formats
if (default_timezone is not None):
self.timezone = default_timezone
super().__init__(**kwargs)
def enforce_timezone(self, value):
field_timezone = (self.timezone if hasattr(self, 'timezone') else self.default_timezone())
if (field_timezone is not None):
if timezone.is_aware(value):
try:
return value.astimezone(field_timezone)
except OverflowError:
self.fail('overflow')
try:
dt = timezone.make_aware(value, field_timezone)
if (not valid_datetime(dt)):
self.fail('make_aware', timezone=field_timezone)
return dt
except Exception as e:
if (pytz and isinstance(e, pytz.exceptions.InvalidTimeError)):
self.fail('make_aware', timezone=field_timezone)
raise e
elif ((field_timezone is None) and timezone.is_aware(value)):
return timezone.make_naive(value, datetime.timezone.utc)
return value
def default_timezone(self):
return (timezone.get_current_timezone() if settings.USE_TZ else None)
def to_internal_value(self, value):
input_formats = getattr(self, 'input_formats', api_settings.DATETIME_INPUT_FORMATS)
if (isinstance(value, datetime.date) and (not isinstance(value, datetime.datetime))):
self.fail('date')
if isinstance(value, datetime.datetime):
return self.enforce_timezone(value)
for input_format in input_formats:
with contextlib.suppress(ValueError, TypeError):
if (input_format.lower() == ISO_8601):
parsed = parse_datetime(value)
if (parsed is not None):
return self.enforce_timezone(parsed)
parsed = self.datetime_parser(value, input_format)
return self.enforce_timezone(parsed)
humanized_format = humanize_datetime.datetime_formats(input_formats)
self.fail('invalid', format=humanized_format)
def to_representation(self, value):
if (not value):
return None
output_format = getattr(self, 'format', api_settings.DATETIME_FORMAT)
if ((output_format is None) or isinstance(value, str)):
return value
value = self.enforce_timezone(value)
if (output_format.lower() == ISO_8601):
value = value.isoformat()
if value.endswith('+00:00'):
value = (value[:(- 6)] + 'Z')
return value
return value.strftime(output_format) |
class MeanIoU():
def __init__(self):
self.epsilon = 1e-10
def __call__(self, tensor1, tensor2):
if ((len(tensor1.shape) == 1) and (len(tensor2.shape) == 1)):
inter = torch.sum(torch.squeeze((tensor1 * tensor2)))
union = (torch.sum(torch.squeeze((tensor1 + tensor2))) - inter)
else:
inter = torch.sum(torch.sum(torch.squeeze((tensor1 * tensor2), axis=3), axis=2), axis=1)
union = (torch.sum(torch.sum(torch.squeeze((tensor1 + tensor2), axis=3), axis=2), axis=1) - inter)
return torch.mean(((inter + self.epsilon) / (union + self.epsilon))) |
class PaymentTest(QuickbooksTestCase):
def setUp(self):
super(PaymentTest, self).setUp()
self.account_number = datetime.now().strftime('%d%H%M')
self.name = 'Test Account {0}'.format(self.account_number)
def test_create(self):
refund_receipt = RefundReceipt()
refund_receipt.DocNumber = 'DocNum123'
refund_receipt.TotalAmt = 100
refund_receipt.Balance = 100
refund_receipt.PrivateNote = 'Private Note'
refund_receipt.PaymentType = 'Check'
memo = CustomerMemo()
memo.value = 'Customer Memo'
refund_receipt.CustomerMemo = memo
refund_receipt.CheckPayment = RefundReceiptCheckPayment()
refund_receipt.CheckPayment.CheckNum = '1001'
refund_receipt.CheckPayment.NameOnAcct = 'John Smith'
refund_receipt.CheckPayment.AcctNum = ''
refund_receipt.CheckPayment.BankName = 'Bank'
item = Item.all(max_results=1, qb=self.qb_client)[0]
line = DetailLine()
line.DetailType = 'SalesItemLineDetail'
line.Amount = 200
line.SalesItemLineDetail = SalesItemLineDetail()
line.SalesItemLineDetail.ItemRef = item.to_ref()
refund_receipt.Line.append(line)
account = Account.where("Name = 'checking'", max_results=1, qb=self.qb_client)[0]
refund_receipt.DepositToAccountRef = account.to_ref()
refund_receipt.save(qb=self.qb_client)
query_refund_receipt = RefundReceipt.get(refund_receipt.Id, qb=self.qb_client)
self.assertEqual(query_refund_receipt.DocNumber, refund_receipt.DocNumber)
self.assertEqual(query_refund_receipt.Line[0].Amount, 200)
self.assertEqual(refund_receipt.DepositToAccountRef.value, account.Id) |
class ParameterRangeOneOf(_common.FlyteIdlEntity):
def __init__(self, param: Union[(IntegerParameterRange, ContinuousParameterRange, CategoricalParameterRange)]):
self._integer_parameter_range = (param if isinstance(param, IntegerParameterRange) else None)
self._continuous_parameter_range = (param if isinstance(param, ContinuousParameterRange) else None)
self._categorical_parameter_range = (param if isinstance(param, CategoricalParameterRange) else None)
def integer_parameter_range(self) -> Optional[IntegerParameterRange]:
if self._integer_parameter_range:
return self._integer_parameter_range
return None
def continuous_parameter_range(self) -> Optional[ContinuousParameterRange]:
if self._continuous_parameter_range:
return self._continuous_parameter_range
return None
def categorical_parameter_range(self) -> Optional[CategoricalParameterRange]:
if self._categorical_parameter_range:
return self._categorical_parameter_range
return None
def to_flyte_idl(self) -> _idl_parameter_ranges.ParameterRangeOneOf:
return _idl_parameter_ranges.ParameterRangeOneOf(integer_parameter_range=(self.integer_parameter_range.to_flyte_idl() if self.integer_parameter_range else None), continuous_parameter_range=(self.continuous_parameter_range.to_flyte_idl() if self.continuous_parameter_range else None), categorical_parameter_range=(self.categorical_parameter_range.to_flyte_idl() if self.categorical_parameter_range else None))
def from_flyte_idl(cls, pb_object: Union[(_idl_parameter_ranges.ParameterRangeOneOf, _idl_parameter_ranges.IntegerParameterRange, _idl_parameter_ranges.ContinuousParameterRange, _idl_parameter_ranges.CategoricalParameterRange)]):
param = None
if isinstance(pb_object, _idl_parameter_ranges.ParameterRangeOneOf):
if pb_object.HasField('continuous_parameter_range'):
param = ContinuousParameterRange.from_flyte_idl(pb_object.continuous_parameter_range)
elif pb_object.HasField('integer_parameter_range'):
param = IntegerParameterRange.from_flyte_idl(pb_object.integer_parameter_range)
elif pb_object.HasField('categorical_parameter_range'):
param = CategoricalParameterRange.from_flyte_idl(pb_object.categorical_parameter_range)
elif isinstance(pb_object, _idl_parameter_ranges.IntegerParameterRange):
param = IntegerParameterRange.from_flyte_idl(pb_object)
elif isinstance(pb_object, _idl_parameter_ranges.ContinuousParameterRange):
param = ContinuousParameterRange.from_flyte_idl(pb_object)
elif isinstance(pb_object, _idl_parameter_ranges.CategoricalParameterRange):
param = CategoricalParameterRange.from_flyte_idl(pb_object)
return cls(param=param) |
class OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Align():
def __init__(self, target, query, coverage, identity, out=False, min_match=None):
if (not out):
(fd, self.output) = tempfile.mkstemp(suffix='.lastz')
os.close(fd)
else:
self.output = out
if (identity and (not min_match)):
self.cli = '{5} {0}[multiple,nameparse=full] {1}[nameparse=full] --strand=both --seed=12of19 --transition --nogfextend --nochain --gap=400,30 --xdrop=910 --ydrop=8370 --hspthresh=3000 --gappedthresh=3000 --noentropy --coverage={2} --identity={3} --output={4} --format=general-:score,name1,strand1,zstart1,end1,length1,name2,strand2,zstart2,end2,length2,diff,cigar,identity,continuity'.format(target, query, coverage, identity, self.output, get_user_path('binaries', 'lastz'))
elif min_match:
self.cli = '{5} {0}[multiple,nameparse=full] {1}[nameparse=full] --strand=both --seed=12of19 --transition --nogfextend --nochain --gap=400,30 --xdrop=910 --ydrop=8370 --hspthresh=3000 --gappedthresh=3000 --noentropy --matchcount={2} --identity={3} --output={4} --format=general-:score,name1,strand1,zstart1,end1,length1,name2,strand2,zstart2,end2,length2,diff,cigar,identity,continuity'.format(target, query, min_match, identity, self.output, get_user_path('binaries', 'lastz'))
def run(self):
(lastz_stdout, lastz_stderr) = subprocess.Popen(self.cli, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate(None)
return (lastz_stdout, lastz_stderr) |
def _find_repeats(permutations):
index = defaultdict(set)
repeats = set()
for perm in permutations:
for block in perm.blocks:
if (perm.genome_name in index[block.block_id]):
repeats.add(block.block_id)
else:
index[block.block_id].add(perm.genome_name)
return repeats |
class JsHtmlSlidingPanel(JsHtml.JsHtml):
def close(self):
return JsFncs.JsFunctions([self.page.js.if_((self.component.icon.dom.content.toString().indexOf(self.component.options.icon_expanded.split(' ')[(- 1)]) >= 0), [self.page.js.getElementsByName(('panel_%s' % self.htmlCode)).first.toggle(), self.component.icon.dom.switchClass(self.component.options.icon_expanded, self.component.options.icon_closed)])])
def open(self):
return JsFncs.JsFunctions([self.page.js.if_((self.component.icon.dom.content.toString().indexOf(self.component.options.icon_closed.split(' ')[(- 1)]) >= 0), [self.page.js.getElementsByName(('panel_%s' % self.htmlCode)).first.toggle(), self.component.icon.dom.switchClass(self.component.options.icon_closed, self.component.options.icon_expanded)])])
def set_title(self, data, options: dict=None):
return self.component.title[1].build(data, options=options)
def set_icon(self, data: str, css: dict=None, options: dict=None):
if (css is not None):
return self.component.title[0].build(data, options={'css': css})
return self.component.title[0].build(data, options=options) |
class IssueInstanceTraceFrameAssoc(Base, PrepareMixin, RecordMixin):
__tablename__ = 'issue_instance_trace_frame_assoc'
__table_args__ = BASE_TABLE_ARGS
issue_instance_id = Column('issue_instance_id', BIGDBIDType, primary_key=True, nullable=False)
trace_frame_id = Column('trace_frame_id', BIGDBIDType, primary_key=True, nullable=False, index=True)
issue_instance = relationship('IssueInstance', primaryjoin='IssueInstanceTraceFrameAssoc.issue_instance_id == foreign(IssueInstance.id)', uselist=False, viewonly=True)
trace_frame = relationship('TraceFrame', primaryjoin='IssueInstanceTraceFrameAssoc.trace_frame_id == foreign(TraceFrame.id)', uselist=False, viewonly=True)
def merge(cls, session, items):
return cls._merge_assocs(session, items, cls.issue_instance_id, cls.trace_frame_id) |
class PrivateCredentials():
def __init__(self, consumer_key, rsa_key, api_url=XERO_BASE_URL):
self.consumer_key = consumer_key
self.rsa_key = rsa_key
self.base_url = api_url
self.oauth_token = consumer_key
self.oauth = OAuth1(self.consumer_key, resource_owner_key=self.oauth_token, rsa_key=self.rsa_key, signature_method=SIGNATURE_RSA, signature_type=SIGNATURE_TYPE_AUTH_HEADER) |
_ns.route('/', defaults={'limit': 200})
_ns.route('/<int:limit>/')
def rss(limit=200):
coprs = CoprsLogic.get_multiple(include_unlisted_on_hp=False).order_by(models.Copr.id.desc()).limit(limit)
answer = render_template('rss/rss.xml', coprs=coprs)
return Response(answer, mimetype='text/xml') |
def which(program):
def is_exe(fpath):
return (os.path.isfile(fpath) and (WINDOWS or os.access(fpath, os.X_OK)))
(fpath, fname) = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
if (WINDOWS and ('.' not in fname)):
if is_exe((exe_file + '.exe')):
return (exe_file + '.exe')
if is_exe((exe_file + '.cmd')):
return (exe_file + '.cmd')
if is_exe((exe_file + '.bat')):
return (exe_file + '.bat')
return None |
def create_animal(item: dict, breeding=False) -> Animal:
animal_class = farming_table.get(item['template_id'], None)
if (not animal_class):
return None
animal = animal_class()
animal.day_claims_at = [datetime.fromtimestamp(item) for item in item['day_claims_at']]
animal.name = item['name']
animal.template_id = item['template_id']
animal.times_claimed = item.get('times_claimed', None)
animal.last_claimed = datetime.fromtimestamp(item['last_claimed'])
animal.next_availability = datetime.fromtimestamp(item['next_availability'])
if (not breeding):
animal.asset_id = item['asset_id']
else:
animal.required_claims = 9
animal.daily_claim_limit = 3
animal.consumed_card = 318607
animal.bearer_id = item['bearer_id']
animal.partner_id = item['partner_id']
return animal |
def test_wellzone_to_points():
mywell = xtgeo.well_from_file(WFILE, zonelogname='Zone_model2', mdlogname='M_MDEPTH')
zpoints = mywell.get_zonation_points(use_undef=False)
assert (zpoints.iat[(9, 6)] == 6)
zpoints = mywell.get_zonation_points(use_undef=True)
assert (zpoints.iat[(9, 6)] == 7)
with pytest.raises(ValueError):
zpoints = mywell.get_zonation_points(zonelist=[1, 3, 4, 5])
zpoints = mywell.get_zonation_points(zonelist=[3, 4, 5])
assert (zpoints.iat[(6, 6)] == 4)
zpoints = mywell.get_zonation_points(zonelist=(3, 5))
assert (zpoints.iat[(6, 6)] == 4) |
class Download(Core):
def get_download_url(self, file_id: str, file_name: str=None, expire_sec: int=14400, drive_id: str=None) -> GetDownloadUrlResponse:
body = GetDownloadUrlRequest(file_id=file_id, drive_id=drive_id, file_name=file_name, expire_sec=expire_sec)
return self._core_get_download_url(body)
def batch_download_url(self, file_id_list: List[str], expire_sec: int=14400, drive_id=None) -> List[BatchDownloadUrlResponse]:
body = BatchDownloadUrlRequest(drive_id=drive_id, file_id_list=file_id_list, expire_sec=expire_sec)
result = self._core_batch_download_url(body)
return list(result)
def download_folder(self, folder_file_id: str, local_folder: str='.', drive_id: str=None, file_filter: Callable[([BaseFile], bool)]=(lambda x: False)) -> str:
if (folder_file_id != 'root'):
folder = self._core_get_file(GetFileRequest(file_id=folder_file_id, drive_id=drive_id))
local_folder = os.path.join(local_folder, self._del_special_symbol(folder.name))
return self.__download_folder(folder_file_id, local_folder, drive_id, file_filter=file_filter)
def __download_folder(self, folder_file_id: str, local_folder: str='.', drive_id: str=None, file_filter: Callable[([BaseFile], bool)]=(lambda x: False)) -> str:
os.makedirs(local_folder, exist_ok=True)
files = []
for file in self._core_get_file_list(GetFileListRequest(parent_file_id=folder_file_id, drive_id=drive_id)):
if file_filter(file):
continue
if (file.type == 'folder'):
self.__download_folder(folder_file_id=file.file_id, local_folder=os.path.join(local_folder, self._del_special_symbol(file.name)))
continue
files.append(file)
self.download_files(files, local_folder=local_folder)
return os.path.abspath(local_folder)
def download_file(self, *, file_path: str, url: str) -> str:
def download_file(self, *, file_id: str, local_folder: str='.') -> str:
def download_file(self, *, file: BaseFile, local_folder: str='.') -> str:
def download_file(self, *, file_path: str=None, url: str=None, local_folder: str='.', file_id: str=None, file: BaseFile=None, drive_id=None) -> str:
if file_id:
file = self._core_get_file(GetFileRequest(file_id=file_id, drive_id=drive_id))
if file:
if (file.type == 'folder'):
raise AligoException(':,')
file_path = os.path.join(local_folder, file.name)
url = (file.download_url or file.url)
return self._core_download_file(file_path, url) |
def test_data_integrity_test_number_of_columns() -> None:
test_dataset = pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 2, 2, 432], 'target': [0, 0, 0, 1]})
suite = TestSuite(tests=[TestNumberOfColumns()])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert suite
suite = TestSuite(tests=[TestNumberOfColumns(gte=10)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert (not suite)
suite = TestSuite(tests=[TestNumberOfColumns(eq=3)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert suite
assert suite.show()
assert suite.json() |
def checkReachability(tdb, cmdenv):
if cmdenv.direct:
return
(srcSys, dstSys) = (cmdenv.origSystems, cmdenv.destSystems)
if ((len(srcSys) == 1) and (len(dstSys) == 1)):
(srcSys, dstSys) = (srcSys[0], dstSys[0])
if (srcSys != dstSys):
maxLyPer = cmdenv.maxLyPer
avoiding = tuple((avoid for avoid in cmdenv.avoidPlaces if isinstance(avoid, System)))
route = tdb.getRoute(srcSys, dstSys, maxLyPer, avoiding)
if (not route):
raise CommandLineError('No route between {} and {} with a {}ly/jump limit.'.format(srcSys.name(), dstSys.name(), maxLyPer))
jumpLimit = (cmdenv.maxJumpsPer * cmdenv.hops)
routeJumps = (len(route) - 1)
if (jumpLimit < routeJumps):
hopsRequired = math.ceil((routeJumps / cmdenv.maxJumpsPer))
jumpsRequired = math.ceil((routeJumps / cmdenv.hops))
raise CommandLineError('Shortest route between {src} and {dst} at {jumply} ly per jump requires at least {minjumps} jumps. Your current settings (--hops {hops} --jumps {jumps}) allows a maximum of {jumplimit}.\n\nYou may need --hops={althops} or --jumps={altjumps}.\n\nSee also:\n --towards (aka -T), --start-jumps (-s), --end-jumps (-e), --direct.\n'.format(src=srcSys.name(), dst=dstSys.name(), jumply=cmdenv.maxLyPer, minjumps=routeJumps, hops=cmdenv.hops, jumps=cmdenv.maxJumpsPer, jumplimit=jumpLimit, althops=hopsRequired, altjumps=jumpsRequired)) |
class ComputeRepositoryClient(_base_repository.BaseRepositoryClient):
def __init__(self, quota_max_calls=None, quota_period=100.0, use_rate_limiter=True, read_only=False, cache_discovery=False, cache=None):
if (not quota_max_calls):
use_rate_limiter = False
self._backend_services = None
self._disks = None
self._firewalls = None
self._forwarding_rules = None
self._global_operations = None
self._images = None
self._instance_group_managers = None
self._instance_groups = None
self._instance_templates = None
self._instances = None
self._networks = None
self._projects = None
self._region_instance_groups = None
self._snapshots = None
self._subnetworks = None
super(ComputeRepositoryClient, self).__init__(API_NAME, versions=['beta', 'v1'], quota_max_calls=quota_max_calls, quota_period=quota_period, use_rate_limiter=use_rate_limiter, read_only=read_only, cache_discovery=cache_discovery, cache=cache)
def backend_services(self):
if (not self._backend_services):
self._backend_services = self._init_repository(_ComputeBackendServicesRepository, version='beta')
return self._backend_services
def disks(self):
if (not self._disks):
self._disks = self._init_repository(_ComputeDisksRepository)
return self._disks
def firewalls(self):
if (not self._firewalls):
self._firewalls = self._init_repository(_ComputeFirewallsRepository)
return self._firewalls
def forwarding_rules(self):
if (not self._forwarding_rules):
self._forwarding_rules = self._init_repository(_ComputeForwardingRulesRepository)
return self._forwarding_rules
def global_operations(self):
if (not self._global_operations):
self._global_operations = self._init_repository(_ComputeGlobalOperationsRepository)
return self._global_operations
def images(self):
if (not self._images):
self._images = self._init_repository(_ComputeImagesRepository)
return self._images
def instance_group_managers(self):
if (not self._instance_group_managers):
self._instance_group_managers = self._init_repository(_ComputeInstanceGroupManagersRepository)
return self._instance_group_managers
def instance_groups(self):
if (not self._instance_groups):
self._instance_groups = self._init_repository(_ComputeInstanceGroupsRepository)
return self._instance_groups
def instance_templates(self):
if (not self._instance_templates):
self._instance_templates = self._init_repository(_ComputeInstanceTemplatesRepository)
return self._instance_templates
def instances(self):
if (not self._instances):
self._instances = self._init_repository(_ComputeInstancesRepository)
return self._instances
def networks(self):
if (not self._networks):
self._networks = self._init_repository(_ComputeNetworksRepository)
return self._networks
def projects(self):
if (not self._projects):
self._projects = self._init_repository(_ComputeProjectsRepository)
return self._projects
def region_instance_groups(self):
if (not self._region_instance_groups):
self._region_instance_groups = self._init_repository(_ComputeRegionInstanceGroupsRepository)
return self._region_instance_groups
def snapshots(self):
if (not self._snapshots):
self._snapshots = self._init_repository(_ComputeSnapshotsRepository)
return self._snapshots
def subnetworks(self):
if (not self._subnetworks):
self._subnetworks = self._init_repository(_ComputeSubnetworksRepository)
return self._subnetworks |
def extractMahouShoujoIkuseiKeikakuFandomCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_download_accounts_bad_filter_type_raises(client, download_test_data):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
payload = {'account_level': 'federal_account', 'filters': '01', 'columns': []}
resp = client.post('/api/v2/download/accounts/', content_type='application/json', data=json.dumps(payload))
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY)
assert (resp.json()['detail'] == "Missing value: 'filters|fy' is a required field") |
def cat_sina_news():
datasets_name = sys._getframe().f_code.co_name
writer = csv.writer(open('./pretrain_datasets/output/{}.txt'.format(datasets_name), 'w'), delimiter='\t')
base_dir = './pretrain_datasets/csl/'
for (root, dirs, files) in os.walk(base_dir):
for file in files:
file_path = ((root + '/') + file)
for line in tqdm(csv.reader(open(file_path), delimiter='\t')):
content = line[1]
line = [content, datasets_name]
writer.writerow(line) |
def earth_actor(radius=0.5, opacity=1.0):
source = tvtk.EarthSource(radius=radius, on_ratio=16, outline=0)
mapper = tvtk.PolyDataMapper()
configure_input_data(mapper, source.output)
prop = tvtk.Property(opacity=opacity)
actor = tvtk.Actor(mapper=mapper, property=prop)
source.update()
return actor |
class GetFileListRequest(DatClass):
parent_file_id: str = 'root'
drive_id: str = None
starred: bool = field(default=None, repr=False)
all: bool = field(default=False, repr=False)
category: BaseFileCategory = field(default=None, repr=False)
fields: GetFileListFields = field(default='*', repr=False)
image_thumbnail_process: str = field(default='image/resize,w_400/format,jpeg', repr=False)
image_url_process: str = field(default='image/resize,w_1920/format,jpeg', repr=False)
limit: int = field(default=200, repr=False)
marker: str = field(default=None, repr=False)
order_by: GetFileListOrderBy = field(default='updated_at', repr=False)
order_direction: OrderDirection = field(default='DESC', repr=False)
status: str = field(default=None, repr=False)
type: BaseFileType = field(default=None, repr=False)
url_expire_sec: int = field(default=14400, repr=False)
video_thumbnail_process: str = field(default='video/snapshot,t_0,f_jpg,ar_auto,w_800', repr=False) |
class OptionSeriesOrganizationEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
def setUpModule():
subprocess.check_output('\nBASE_FOLDER="{config.base_folder}/test_file_upload2web/"\nrm -rf "$BASE_FOLDER"\n\nmkdir -p "$BASE_FOLDER/0777/0555/0777/0555"\nchown www-data: -R "$BASE_FOLDER/"\nchmod 0777 "$BASE_FOLDER/0777" \nchmod 0777 "$BASE_FOLDER/0777/0555/0777/"\nchmod 0555 "$BASE_FOLDER/0777/0555" \nchmod 0555 "$BASE_FOLDER/0777/0555/0777/0555"\n'.format(config=config), shell=True) |
class AssemblyApi(ProviderInterface, AudioInterface):
provider_name = 'assembly'
def __init__(self, api_keys: Dict={}) -> None:
self.api_settings = load_provider(ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys)
self.api_key = self.api_settings['assembly_key']
self.url = '
self.url_upload_file = f'{self.url}/upload'
self.url_transcription = f'{self.url}/transcript'
def audio__speech_to_text_async__launch_job(self, file: str, language: str, speakers: int, profanity_filter: bool, vocabulary: Optional[List[str]], audio_attributes: tuple, model: str=None, file_url: str='', provider_params=dict()) -> AsyncLaunchJobResponseType:
(export_format, channels, frame_rate) = audio_attributes
if (language and ('-' in language)):
language = language_matches[language]
header = {'authorization': self.api_key}
file_name = ((str(int(time())) + '_') + str(file.split('/')[(- 1)]))
content_url = file_url
if (not content_url):
content_url = upload_file_to_s3(file, ((Path(file_name).stem + '.') + export_format))
data = {'audio_url': f'{content_url}', 'language_code': language, 'speaker_labels': True, 'filter_profanity': profanity_filter}
if vocabulary:
data.update({'word_boost': vocabulary})
if (not language):
del data['language_code']
data.update({'language_detection': True})
data.update(provider_params)
launch_transcription = False
trials = 10
while (not launch_transcription):
trials -= 1
response = requests.post(self.url_transcription, json=data, headers=header)
if (response.status_code != 200):
error = response.json().get('error')
if ('not available in this language' in error):
parameter = error.split(':')[1].strip()
del data[parameter]
else:
raise ProviderException(response.json().get('error'), code=response.status_code)
else:
launch_transcription = True
transcribe_id = response.json()['id']
return AsyncLaunchJobResponseType(provider_job_id=transcribe_id)
def audio__speech_to_text_async__get_job_result(self, provider_job_id: str) -> AsyncBaseResponseType[SpeechToTextAsyncDataClass]:
headers = {'authorization': self.api_key}
response = requests.get(url=f'{self.url_transcription}/{provider_job_id}', headers=headers)
if (response.status_code != 200):
error_message = (response.json().get('error') or 'Error when transcribing audio file')
if ('transcript id not found' in error_message):
raise AsyncJobException(reason=AsyncJobExceptionReason.DEPRECATED_JOB_ID, code=response.status_code)
raise ProviderException(error_message, code=response.status_code)
diarization_entries = []
speakers = {}
index_speaker = 0
original_response = response.json()
status = original_response['status']
if (status == 'error'):
raise ProviderException(original_response, code=response.status_code)
if (status != 'completed'):
return AsyncPendingResponseType[SpeechToTextAsyncDataClass](provider_job_id=provider_job_id)
if (original_response.get('utterances') and (len(original_response['utterances']) > 0)):
for line in original_response['utterances']:
words = line.get('words', [])
if (line['speaker'] not in speakers):
index_speaker += 1
speaker_tag = index_speaker
speakers[line['speaker']] = index_speaker
elif (line['speaker'] in speakers):
speaker_tag = speakers[line['speaker']]
for word in words:
diarization_entries.append(SpeechDiarizationEntry(speaker=speaker_tag, segment=word['text'], start_time=str((word['start'] / 1000)), end_time=str((word['end'] / 1000)), confidence=word['confidence']))
diarization = SpeechDiarization(total_speakers=len(speakers), entries=diarization_entries)
if (len(speakers) == 0):
diarization.error_message = 'Speaker diarization not available for the data specified'
return AsyncResponseType[SpeechToTextAsyncDataClass](original_response=original_response, standardized_response=SpeechToTextAsyncDataClass(text=original_response['text'], diarization=diarization), provider_job_id=provider_job_id) |
def get_initial_data(request):
dc_settings = request.dc.settings
initial = {'language': get_language(), 'country': dc_settings.PROFILE_COUNTRY_CODE_DEFAULT, 'phone': dc_settings.PROFILE_PHONE_PREFIX_DEFAULT, 'time_zone': dc_settings.PROFILE_TIME_ZONE_DEFAULT}
try:
country = get_geoip(request)['country_code']
if (not country):
country = dc_settings.PROFILE_COUNTRY_CODE_DEFAULT
phone = get_phone_prefix(country)
if (not phone):
phone = dc_settings.PROFILE_PHONE_PREFIX_DEFAULT
time_zone = get_time_zone(country)
if (not time_zone):
time_zone = dc_settings.PROFILE_TIME_ZONE_DEFAULT
except Exception as ex:
logger.error('Registration GEO detection problem')
logger.exception(ex)
else:
initial['phone'] = phone
initial['country'] = country
initial['timezone'] = time_zone
return initial |
class OptionSeriesBubbleSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesBubbleSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesBubbleSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesBubbleSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesBubbleSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesBubbleSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesBubbleSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
(ISplashScreen)
class SplashScreen(MSplashScreen, Window):
image = Image(ImageResource('splash'))
log_level = Int(DEBUG)
show_log_messages = Bool(True)
text = Str()
text_color = Any()
text_font = Any()
text_location = Tuple(5, 5)
def _create_control(self, parent):
image = self.image.create_image()
splash_screen = wx.adv.SplashScreen(image.ConvertToBitmap(), (wx.adv.SPLASH_NO_TIMEOUT | wx.adv.SPLASH_CENTRE_ON_SCREEN), 0, parent, (- 1), style=(wx.SIMPLE_BORDER | wx.FRAME_NO_TASKBAR))
self._wx_default_text_font = new_font_like(wx.NORMAL_FONT, point_size=(wx.NORMAL_FONT.GetPointSize() + 1), style=wx.ITALIC)
splash_screen.Bind(wx.EVT_PAINT, self._on_paint)
return splash_screen
def _text_changed(self):
if (self.control is not None):
self.control.Refresh(False)
self.control.Update()
wx.GetApp().Yield(True)
def _on_paint(self, event):
if (self.control is not None):
window = self.control
dc = wx.PaintDC(window)
if (self.text_font is None):
text_font = self._wx_default_text_font
else:
text_font = self.text_font
dc.SetFont(text_font)
if (self.text_color is None):
text_color = 'black'
else:
text_color = self.text_color
dc.SetTextForeground(text_color)
(x, y) = self.text_location
dc.DrawText(self.text, x, y)
event.Skip() |
def render_body(body, option, global_coord=True):
T = (body.parent_bodynode.T if body.parent_bodynode else constants.eye_T())
glPushAttrib(GL_LIGHTING)
if option.lighting:
glEnable(GL_LIGHTING)
else:
glDisable(GL_LIGHTING)
if option.render_face:
s = option.scale
glPushMatrix()
if global_coord:
gl_render.glTransform(T)
glScalef(s, s, s)
body.render_with_color(option.color_face)
glPopMatrix()
if option.render_edge:
glLineWidth(option.line_width)
s = option.scale
glPushMatrix()
if global_coord:
gl_render.glTransform(T)
glScalef(s, s, s)
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
body.render_with_color(option.color_edge)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glPopMatrix()
glPopAttrib(GL_LIGHTING) |
class RMTTestReqName(object):
def rmttest_positive_01(self):
(config, req) = create_parameters()
req['Name'] = 'This is something'
rt = ReqName(config)
(name, value) = rt.rewrite('Name-test', req)
assert ('Name' == name)
assert ('This is something' == value)
def rmttest_negative_01(self):
(config, req) = create_parameters()
rt = ReqName(config)
with pytest.raises(RMTException) as rmte:
rt.rewrite('Name-test', req)
assert (37 == rmte.id()) |
class OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class AssignmentHandler(Handler):
def register(self):
self._lifter.HANDLERS.update({mediumlevelil.MediumLevelILSetVar: self.lift_assignment, mediumlevelil.MediumLevelILSetVarSsa: self.lift_assignment, mediumlevelil.MediumLevelILSetVarField: self.lift_set_field, mediumlevelil.MediumLevelILSetVarSsaField: self.lift_set_field, mediumlevelil.MediumLevelILSetVarSplit: self.lift_split_assignment, mediumlevelil.MediumLevelILSetVarSplitSsa: self.lift_split_assignment, mediumlevelil.MediumLevelILSetVarAliased: partial(self.lift_assignment, is_aliased=True), mediumlevelil.MediumLevelILSetVarAliasedField: partial(self.lift_set_field, is_aliased=True), mediumlevelil.MediumLevelILVarField: self.lift_get_field, mediumlevelil.MediumLevelILVarSsaField: self.lift_get_field, mediumlevelil.MediumLevelILVarAliasedField: partial(self.lift_get_field, is_aliased=True), mediumlevelil.MediumLevelILStore: self.lift_store, mediumlevelil.MediumLevelILStoreSsa: self.lift_store, mediumlevelil.MediumLevelILStoreStruct: self.lift_store_struct, mediumlevelil.MediumLevelILStoreStructSsa: self.lift_store_struct, mediumlevelil.MediumLevelILLowPart: self._lift_mask_high})
def lift_assignment(self, assignment: mediumlevelil.MediumLevelILSetVar, is_aliased=False, **kwargs) -> Assignment:
return Assignment(self._lifter.lift(assignment.dest, is_aliased=is_aliased, parent=assignment), self._lifter.lift(assignment.src, parent=assignment))
def lift_set_field(self, assignment: mediumlevelil.MediumLevelILSetVarField, is_aliased=False, **kwargs) -> Assignment:
dest_type = self._lifter.lift(assignment.dest.type)
if (isinstance(assignment.dest.type, binaryninja.NamedTypeReferenceType) and (isinstance(dest_type, Struct) or isinstance(dest_type, Class))):
struct_variable = self._lifter.lift(assignment.dest, is_aliased=True, parent=assignment)
destination = MemberAccess(offset=assignment.offset, member_name=struct_variable.type.get_member_name_by_offset(assignment.offset), operands=[struct_variable], writes_memory=assignment.dest.version)
value = self._lifter.lift(assignment.src)
elif ((assignment.offset == 0) and self._lifter.is_omitting_masks):
destination = self._lift_contraction(assignment, is_aliased=is_aliased, parent=assignment)
value = self._lifter.lift(assignment.src)
else:
destination = self._lifter.lift(assignment.dest, is_aliased=is_aliased, parent=assignment)
value = self._lift_masked_operand(assignment)
return Assignment(destination, value)
def lift_get_field(self, instruction: mediumlevelil.MediumLevelILVarField, is_aliased=False, **kwargs) -> Operation:
source = self._lifter.lift(instruction.src, is_aliased=is_aliased, parent=instruction)
if (isinstance(source.type, Struct) or isinstance(source.type, Class) or isinstance(source.type, Union)):
return self._get_field_as_member_access(instruction, source, **kwargs)
cast_type = source.type.resize((instruction.size * self.BYTE_SIZE))
if instruction.offset:
return UnaryOperation(OperationType.cast, [BinaryOperation(OperationType.right_shift_us, [source, Constant(instruction.offset, Integer.int32_t())])], cast_type)
return UnaryOperation(OperationType.cast, [source], vartype=cast_type, contraction=True)
def _get_field_as_member_access(self, instruction: mediumlevelil.MediumLevelILVarField, source: Expression, **kwargs) -> MemberAccess:
if (isinstance(source.type, Struct) or isinstance(source.type, Class)):
member_name = source.type.get_member_name_by_offset(instruction.offset)
elif (parent := kwargs.get('parent', None)):
parent_type = self._lifter.lift(parent.dest.type)
member_name = source.type.get_member_name_by_type(parent_type)
else:
logging.warning(f'Cannot get member name for instruction {instruction}')
member_name = f'field_{hex(instruction.offset)}'
return MemberAccess(offset=instruction.offset, member_name=member_name, operands=[source])
def lift_store(self, assignment: mediumlevelil.MediumLevelILStoreSsa, **kwargs) -> Assignment:
return Assignment(self._lift_store_destination(assignment), self._lifter.lift(assignment.src))
def _lift_store_destination(self, store_assignment: mediumlevelil.MediumLevelILStoreSsa) -> (UnaryOperation | GlobalVariable):
memory_version = store_assignment.dest_memory
store_destination = self._lifter.lift(store_assignment.dest, parent=store_assignment)
if isinstance(store_destination, GlobalVariable):
store_destination.ssa_label = memory_version
return store_destination
return UnaryOperation(OperationType.dereference, [store_destination], vartype=store_destination.type, writes_memory=memory_version)
def _lift_contraction(self, assignment: mediumlevelil.MediumLevelILSetVarField, is_aliased=False, **kwargs) -> UnaryOperation:
destination_operand = self._lifter.lift(assignment.dest, is_aliased=is_aliased, parent=assignment)
contraction_type = destination_operand.type.resize((assignment.size * self.BYTE_SIZE))
return UnaryOperation(OperationType.cast, [destination_operand], vartype=contraction_type, contraction=True)
def _lift_masked_operand(self, assignment: mediumlevelil.MediumLevelILSetVarField, is_aliased=False, **kwargs) -> BinaryOperation:
return BinaryOperation(OperationType.bitwise_or, [BinaryOperation(OperationType.bitwise_and, [self._lifter.lift(assignment.prev, parent=assignment, is_aliased=is_aliased), Constant(((self._get_all_ones_mask_for_type(assignment.dest.var.type.width) - self._get_all_ones_mask_for_type(assignment.size)) << (assignment.offset * self.BYTE_SIZE)))], vartype=self._lifter.lift(assignment.src.expr_type, parent=assignment)), BinaryOperation(OperationType.left_shift, [self._lifter.lift(assignment.src, parent=assignment), Constant((assignment.offset * self.BYTE_SIZE))], vartype=self._lifter.lift(assignment.src.expr_type, parent=assignment))], vartype=self._lifter.lift(assignment.expr_type, parent=assignment))
def _lift_mask_high(self, instruction: mediumlevelil.MediumLevelILSetVarField, **kwargs) -> BinaryOperation:
return BinaryOperation(OperationType.bitwise_and, [(op := self._lifter.lift(instruction.src, parent=instruction)), Constant(self._get_all_ones_mask_for_type(instruction.size))], vartype=op.type.resize((instruction.size * self.BYTE_SIZE)))
def _get_all_ones_mask_for_type(self, type_size: int, **kwargs) -> int:
return int(((2 ** (type_size * self.BYTE_SIZE)) - 1))
def lift_split_assignment(self, assignment: mediumlevelil.MediumLevelILSetVarSplit, **kwargs) -> Assignment:
return Assignment(RegisterPair((high := self._lifter.lift(assignment.high, parent=assignment)), (low := self._lifter.lift(assignment.low, parent=assignment)), vartype=high.type.resize((high.type.size + low.type.size))), self._lifter.lift(assignment.src, parent=assignment))
def lift_store_struct(self, instruction: mediumlevelil.MediumLevelILStoreStruct, **kwargs) -> Assignment:
vartype = self._lifter.lift(instruction.dest.expr_type)
struct_variable = self._lifter.lift(instruction.dest, is_aliased=True, parent=instruction)
struct_member_access = MemberAccess(member_name=vartype.type.get_member_name_by_offset(instruction.offset), offset=instruction.offset, operands=[struct_variable], vartype=vartype, writes_memory=instruction.dest_memory)
src = self._lifter.lift(instruction.src)
return Assignment(struct_member_access, src) |
def _init_users(db: DbSetup, db_list: list[str]):
for key in ['ro', 'rw', 'del']:
user = getattr(config.common.postgres, f'{key}_user')
pw = getattr(config.common.postgres, f'{key}_pw')
db.create_user(user, pw)
for db_name in db_list:
db.grant_connect(db_name, user)
DbSetup(db_name=db_name).grant_usage(user) |
def print_table(struct, iterable, is_default=(lambda obj, p: False)):
term_width = get_terminal_width()
occupied = sum(((w if isinstance(w, int) else 0) for (_, w, *_) in struct))
available = ((term_width - occupied) - (len(struct) - 1))
(template, separator) = ('', '')
widths = {}
for (header, width, align, *_) in struct:
w = max(0, (width if isinstance(width, int) else width(available)))
widths[header] = w
template = ' '.join([template, '{{: {}{}}}'.format(align, w)])
separator = ' '.join([separator, ('-' * w)])
(template, separator) = (template[1:], separator[1:])
table = template.format(*(t[0] for t in struct))
table = '\n'.join([table, separator])
for obj in iterable:
values = []
for (h, _, _, a, f) in struct:
f = (f if (f is not None) else (lambda a: a))
if is_default(obj, a):
value = ''
else:
if (a is None):
value = obj
else:
value = obj[a]
value = f(value)
if (value is None):
value = ''
value = str(value).split('\n')[0]
value = limit_str(str(value), widths[h])
values.append(value)
line = template.format(*values)
table = '\n'.join([table, line])
print(table) |
def extractLuxiufer(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def test_collect():
lr = Learner(OLS(), indexer=data.indexer, name='lr')
lr.__no_output__ = False
a = _run(lr, 'fit', X, y, return_preds=True)
b = _run(lr, 'fit', X, y, refit=False, return_preds=True)
c = _run(lr, 'transform', X, y, return_preds=True)
d = _run(lr, 'transform', X, y)
np.testing.assert_array_equal(a, b)
np.testing.assert_array_equal(a, c)
np.testing.assert_array_equal(a, d) |
def _need_maintenance_ignore_tests(request):
if (not settings.MAINTENANCE_MODE_IGNORE_TESTS):
return
is_testing = False
if (((len(sys.argv) > 0) and ('runtests' in sys.argv[0])) or ((len(sys.argv) > 1) and (sys.argv[1] == 'test'))):
is_testing = True
if is_testing:
return False |
class ConfigData():
host: (str | None)
username: (str | None)
password: (str | None)
entry: (ConfigEntry | None)
def __init__(self):
self.host = None
self.username = None
self.password = None
self.entry = None
def url(self):
url = API_URL_TEMPLATE.format(self.host)
return url
def from_dict(data: dict[(str, Any)]=None) -> ConfigData:
result = ConfigData()
if (data is not None):
result.host = data.get(CONF_HOST)
result.username = data.get(CONF_USERNAME)
result.password = data.get(CONF_PASSWORD)
return result
def to_dict(self):
obj = {CONF_HOST: self.host, CONF_USERNAME: self.username, CONF_PASSWORD: self.password}
return obj
def __repr__(self):
to_string = f'{self.to_dict()}'
return to_string |
def test_nested_dataset_format():
dataset = __to_dataset__(example_dataset_nested_yaml)
ds = Dataset.parse_obj(dataset)
graph = convert_dataset_to_graph(ds, 'ignore')
comments_field = field([graph], 'mongo_nested_test', 'photos', 'comments')
tags_field = field([graph], 'mongo_nested_test', 'photos', 'tags')
_id_field = field([graph], 'mongo_nested_test', 'photos', '_id')
thumbnail_field = field([graph], 'mongo_nested_test', 'photos', 'thumbnail')
assert isinstance(comments_field, ObjectField)
assert comments_field.is_array
assert (comments_field.data_type() == 'object')
assert isinstance(comments_field.fields['text'], ScalarField)
assert (comments_field.fields['text'].data_type() == 'None')
assert isinstance(tags_field, ScalarField)
assert tags_field.is_array
assert isinstance(_id_field, ScalarField)
assert (_id_field.is_array is False)
assert isinstance(thumbnail_field, ObjectField)
assert (thumbnail_field.is_array is False)
assert (thumbnail_field.data_type() == 'object')
assert (thumbnail_field.fields['photo_id'].data_type() == 'integer')
assert (thumbnail_field.fields['name'].data_type() == 'string') |
class ColorFormatter(logging.Formatter):
colors = {'error': dict(fg='red'), 'exception': dict(fg='red'), 'critical': dict(fg='red'), 'debug': dict(fg='blue'), 'info': dict(fg='green'), 'warning': dict(fg='yellow')}
def format(self, record: logging.LogRecord) -> str:
if (not record.exc_info):
level = record.levelname.lower()
msg = record.getMessage()
if (level in self.colors):
prefix = click.style('{}: '.format(level), **self.colors[level])
msg = '\n'.join(((prefix + x) for x in msg.splitlines()))
return msg
return logging.Formatter.format(self, record) |
class TestWithUnittest(unittest.TestCase):
def test_generated_config(self) -> None:
with initialize_config_module(version_base=None, config_module='hydra_app.conf'):
cfg = compose(config_name='config', overrides=['app.user=test_user'])
assert (cfg == {'app': {'user': 'test_user', 'num1': 10, 'num2': 20}, 'db': {'host': 'localhost', 'port': 3306}}) |
class OptionSeriesFunnel3dDatasorting(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def matchByName(self):
return self._config_get(None)
def matchByName(self, flag: bool):
self._config(flag, js_type=False)
def sortKey(self):
return self._config_get('y')
def sortKey(self, text: str):
self._config(text, js_type=False) |
def get_default_sphinx_config():
return dict(extensions=['sphinx_togglebutton', 'sphinx_copybutton', 'myst_nb', 'jupyter_book', 'sphinx_thebe', 'sphinx_comments', 'sphinx_external_toc', 'sphinx.ext.intersphinx', 'sphinx_design', 'sphinx_book_theme'], pygments_style='sphinx', html_theme='sphinx_book_theme', html_theme_options={'search_bar_text': 'Search this book...'}, html_sourcelink_suffix='', numfig=True, recursive_update=False, suppress_warnings=['myst.domains']) |
class OptionPlotoptionsPyramidSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsPyramidSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsPyramidSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsPyramidSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsPyramidSonificationContexttracksMappingLowpassResonance) |
(((detect_target().name() == 'cuda') and (detect_target()._arch < '80')), 'On CUDA, only supported on > SM80 arch.')
class FuseConvCase(unittest.TestCase):
def _build_conv2d(self, batch_dim, CO, HH, WW, CI, filter_HW, stride=1, transpose=False):
X = Tensor(shape=[batch_dim, HH, WW, CI], dtype='float16', name='input_0', is_input=True)
W = Tensor(shape=[CO, filter_HW, filter_HW, CI], dtype='float16', name='input_1', is_input=True)
if transpose:
conv2d = ops.transposed_conv2d(stride=stride, pad=0)(X, W)
else:
conv2d = ops.conv2d(stride=stride, pad=0)(X, W)
return conv2d
def test_do_not_fuse_with_add_not_1d(self):
B = [1]
batch_dim = shape_utils.gen_int_var_min_max(B, name='batch_dim')
(CO, HH, WW, CI) = (256, 28, 28, 128)
filter_HW = 3
bias = Tensor(shape=[batch_dim, 26, 26, CO], dtype='float16', name='bias', is_input=True)
conv2d = self._build_conv2d(batch_dim, CO, HH, WW, CI, filter_HW)
output = ops.elementwise(FuncEnum.ADD)(bias, conv2d)
output._attrs['is_output'] = True
output._attrs['name'] = 'output_0'
target = detect_target()
module = compile_model(output, target, './tmp', 'test_do_not_fuse_with_add_not_1d')
check_tensor = None
for tensor in module.debug_sorted_graph:
if (tensor._attrs['name'] == 'output_0'):
check_tensor = tensor
break
self.assertIsNotNone(check_tensor)
self.assertEqual(len(check_tensor.src_ops()), 1)
src_op = list(check_tensor.src_ops())[0]
self.assertEqual(src_op._attrs['op'], 'fused_elementwise')
for b in B:
X_pt = torch.randn(b, CI, HH, WW).cuda().half()
W_pt = torch.randn(CO, CI, filter_HW, filter_HW).cuda().half()
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt)
B_pt = torch.randn(Y_pt.size()).cuda().half()
Y_pt = (Y_pt + B_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
b_pt = B_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'bias': b_pt}
y = torch.empty([b, 26, 26, CO]).cuda().half()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute(0, 3, 1, 2)
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.1, rtol=0.1))
def test_do_not_fuse_transpose_with_add_not_1d(self):
B = [1]
(CO, HH, WW, CI) = (256, 28, 28, 256)
filter_HW = 2
batch_dim = shape_utils.gen_int_var_min_max(B, name='batch_dim')
bias = Tensor(shape=[batch_dim, 56, 56, CO], dtype='float16', name='bias', is_input=True)
conv2d = self._build_conv2d(batch_dim, CO, HH, WW, CI, filter_HW, stride=2, transpose=True)
output = ops.elementwise(FuncEnum.ADD)(bias, conv2d)
output._attrs['is_output'] = True
output._attrs['name'] = 'output_0'
target = detect_target()
module = compile_model(output, target, './tmp', 'test_do_not_fuse_with_add_not_1d')
check_tensor = None
for tensor in module.debug_sorted_graph:
if (tensor._attrs['name'] == 'output_0'):
check_tensor = tensor
break
self.assertIsNotNone(check_tensor)
self.assertEqual(len(check_tensor.src_ops()), 1)
src_op = list(check_tensor.src_ops())[0]
self.assertEqual(src_op._attrs['op'], 'fused_elementwise')
for b in B:
X_pt = torch.randn(b, CI, HH, WW).cuda().half()
W_pt = torch.randn(CO, CI, filter_HW, filter_HW).cuda().half()
W_pt = torch.randn(CO, CI, filter_HW, filter_HW).cuda().half()
Y_pt = torch.nn.functional.conv_transpose2d(X_pt, W_pt, stride=2)
B_pt = torch.randn(b, CO, 56, 56).cuda().half()
Y_pt = (Y_pt + B_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
b_pt = B_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'bias': b_pt}
y = torch.empty([b, 56, 56, CO]).cuda().half()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute(0, 3, 1, 2)
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.1, rtol=0.1)) |
class RecoveryStats(TelemetryDevice):
internal = False
serverless_status = serverless.Status.Internal
command = 'recovery-stats'
human_name = 'Recovery Stats'
help = 'Regularly samples shard recovery stats'
def __init__(self, telemetry_params, clients, metrics_store):
super().__init__()
self.telemetry_params = telemetry_params
self.clients = clients
self.sample_interval = telemetry_params.get('recovery-stats-sample-interval', 1)
if (self.sample_interval <= 0):
raise exceptions.SystemSetupError("The telemetry parameter 'recovery-stats-sample-interval' must be greater than zero but was {}.".format(self.sample_interval))
self.specified_cluster_names = self.clients.keys()
indices_per_cluster = self.telemetry_params.get('recovery-stats-indices', False)
if isinstance(indices_per_cluster, str):
self.indices_per_cluster = {opts.TargetHosts.DEFAULT: indices_per_cluster}
else:
self.indices_per_cluster = indices_per_cluster
if self.indices_per_cluster:
for cluster_name in self.indices_per_cluster.keys():
if (cluster_name not in clients):
raise exceptions.SystemSetupError("The telemetry parameter 'recovery-stats-indices' must be a JSON Object with keys matching the cluster names [{}] specified in --target-hosts but it had [{}].".format(','.join(sorted(clients.keys())), cluster_name))
self.specified_cluster_names = self.indices_per_cluster.keys()
self.metrics_store = metrics_store
self.samplers = []
def on_benchmark_start(self):
for cluster_name in self.specified_cluster_names:
recorder = RecoveryStatsRecorder(cluster_name, self.clients[cluster_name], self.metrics_store, self.sample_interval, (self.indices_per_cluster[cluster_name] if self.indices_per_cluster else ''))
sampler = SamplerThread(recorder)
self.samplers.append(sampler)
sampler.daemon = True
sampler.start()
def on_benchmark_stop(self):
if self.samplers:
for sampler in self.samplers:
sampler.finish() |
def run_preanalysis(tree, taxonomy_db):
tree.resolve_polytomy(defaults={'dist': 0, 'support': None})
root_mid = tree.get_midpoint_outgroup()
tree.set_outgroup(root_mid)
tree.dist = 0.01
tree.set_species_naming_function((lambda x: x.split('.')[0]))
taxonomy_db.annotate_tree(tree, taxid_attr='species')
annot_props = set()
for (i, node) in enumerate(tree.traverse()):
if (not node.is_leaf):
node.name = ('%s-%d' % (make_name(i), get_depth(node)))
annot_props |= set(node.props)
return sorted((annot_props - {'_speciesFunction'})) |
def lambda_handler(event, context):
response = event.get('response')
print(response)
request = event.get('request')
session = request.get('session')
user_email = request.get('userAttributes').get('email')
if ((not session) or (len(session) == 0)):
secret_login_code = generate_login_code(event)
try:
email_content = assemble_email_contents(secret_login_code)
send_notification_email(user_email, email_content)
except Exception as e:
print(f'Error: Failed to send sign-in code - {user_email}.')
print(e)
return {'statusCode': 502, 'headers': {'Access-Control-Allow-Methods': 'POST,OPTIONS', 'Access-Control-Allow-Origin': '*'}, 'body': '{"success" : false}'}
else:
previous_challenge = session[0]
secret_login_code = previous_challenge.get('challengeMetadata')
response.update({'privateChallengeParameters': {'answer': secret_login_code}, 'challengeMetadata': secret_login_code, 'publicChallengeParameters': {'answer': secret_login_code}})
print(event)
return event |
def _fix_overlappings(spans):
good_spans = set()
for span in spans:
should_add_span = False
for other_span in spans:
if (((span.start == other_span.start) and (span.end == other_span.end)) or (span.start >= other_span.end) or (span.end <= other_span.start)):
should_add_span = True
continue
if (((span.start > other_span.start) and (span.end <= other_span.end)) or ((span.start >= other_span.start) and (span.end < other_span.end))):
should_add_span = False
break
if should_add_span:
good_spans.add(span)
return sorted(good_spans, key=(lambda x: (x.start, (- len(x))))) |
def run_base_box_3d():
m = UnitSquareMesh(10, 10, quadrilateral=True)
mesh = ExtrudedMesh(m, layers=8)
f = Function(FunctionSpace(mesh, 'CG', 1))
(x, y, z) = SpatialCoordinate(mesh)
f.interpolate(((x + (2 * y)) + (4 * z)))
assert np.allclose(3.5, assemble((f * dx)))
x = m.coordinates
sd = SubDomainData(And(And((0.2 <= real(x[0])), (real(x[0]) <= 0.5)), And((0.3 <= real(x[1])), (real(x[1]) <= 0.7))))
sd = op2.Subset(mesh.cell_set, sd.indices)
assert np.allclose(0.402, assemble((f * dx(subdomain_data=sd)))) |
class SQLOutputParser(BaseOutputParser):
def __init__(self, is_stream_out: bool=False, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
def parse_model_nostream_resp(self, response: ResponseTye, sep: str):
model_out_text = super().parse_model_nostream_resp(response, sep)
clean_str = super().parse_prompt_response(model_out_text)
return json.loads(clean_str, strict=True) |
.usefixtures('set_site_config')
(max_examples=10)
(config_generators())
def test_that_if_field_is_given_and_grid_is_missing_you_get_error(tmp_path_factory, config_generator):
with config_generator(tmp_path_factory) as config_values:
config_dict = config_values.to_config_dict('test.ert', os.getcwd())
del config_dict[ConfigKeys.GRID]
assume((len(config_dict.get(ConfigKeys.FIELD, [])) > 0))
with pytest.raises(ConfigValidationError, match='In order to use the FIELD keyword, a GRID must be supplied'):
_ = ErtConfig.from_dict(config_dict) |
class IngressStatusTestAcrossNamespaces(AmbassadorTest):
status_update = {'loadBalancer': {'ingress': [{'ip': '168.168.168.168'}]}}
def init(self):
self.target = HTTP(namespace='alt-namespace')
def manifests(self) -> str:
return ((namespace_manifest('alt-namespace') + '\n---\napiVersion: networking.k8s.io/v1\nkind: Ingress\nmetadata:\n annotations:\n kubernetes.io/ingress.class: ambassador\n getambassador.io/ambassador-id: {self.ambassador_id}\n name: {self.path.k8s}\n namespace: alt-namespace\nspec:\n rules:\n - paths:\n - backend:\n service:\n name: {self.target.path.k8s}\n port:\n number: 80\n path: /{self.name}/\n pathType: Prefix\n') + super().manifests())
def queries(self):
if (True or (sys.platform != 'darwin')):
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.path.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
time.sleep(1)
(yield Query(self.url((self.name + '/'))))
(yield Query(self.url(f'need-normalization/../{self.name}/')))
def check(self):
if (not parse_bool(os.environ.get('AMBASSADOR_PYTEST_INGRESS_TEST', 'false'))):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if (False and (sys.platform == 'darwin')):
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert (r.backend.name == self.target.path.k8s), (r.backend.name, self.target.path.k8s)
assert r.backend.request
assert (r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/')
ingress_cmd = ['tools/bin/kubectl', 'get', '-o', 'json', 'ingress', self.path.k8s, '-n', 'alt-namespace']
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
(ingress_out, _) = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert (ingress_json['status'] == self.status_update), f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead" |
def ident_test(comm, port):
wb = comms[comm](port=port, csr_csv='csr.csv')
wb.open()
fpga_identifier = ''
for i in range(256):
c = chr((wb.read((wb.bases.identifier_mem + (4 * i))) & 255))
fpga_identifier += c
if (c == '\x00'):
break
print(fpga_identifier)
wb.close() |
def lazy_import():
from fastly.model.included_with_waf_firewall_version import IncludedWithWafFirewallVersion
from fastly.model.waf_firewall_version_response_data import WafFirewallVersionResponseData
globals()['IncludedWithWafFirewallVersion'] = IncludedWithWafFirewallVersion
globals()['WafFirewallVersionResponseData'] = WafFirewallVersionResponseData |
class StopJob(Job):
_label = 'stop'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._command = [self.options['container_runtime'], 'stop', self.release]
self._popen_kwargs['stdout'] = subprocess.DEVNULL
def _pre_start_hook(self):
pass |
class TabsExtraListener(sublime_plugin.EventListener):
extra_command_call = False
def on_window_command(self, window, command_name, args):
extra_command_call = TabsExtraListener.extra_command_call
cmd = None
if (args is None):
view = window.active_view()
if (view is None):
return cmd
if (extra_command_call and (command_name == 'close_file')):
view.settings().set('tabs_extra_closing', True)
return cmd
(group, index) = window.get_view_index(view)
args = {'group': group, 'index': index}
if (command_name in ['close_by_index', 'close']):
command_name = 'tabs_extra_close'
args['close_type'] = 'single'
cmd = (command_name, args)
elif (command_name == 'close_all'):
command_name = 'tabs_extra_close_all'
args = {}
cmd = (command_name, args)
elif (command_name == 'close_others_by_index'):
command_name = 'tabs_extra_close'
args['close_type'] = 'other'
cmd = (command_name, args)
elif (command_name == 'close_to_right_by_index'):
command_name = 'tabs_extra_close'
args['close_type'] = 'right'
cmd = (command_name, args)
return cmd
def on_load(self, view):
Focus.cancel()
if sort_on_load_save():
if (not self.on_sort(view)):
view.settings().set('tabsextra_to_sort', True)
def on_post_save(self, view):
if sort_on_load_save():
self.on_sort(view)
def on_sort(self, view):
sorted_views = False
window = view.window()
if (window and (window.get_view_index(view)[1] != (- 1))):
cmd = sublime.load_settings(SETTINGS).get('sort_on_load_save_command', {})
module = str(cmd.get('module', ''))
reverse = bool(cmd.get('reverse', False))
if (module != ''):
window.run_command('tabs_extra_sort', {'sort_by': module, 'reverse': reverse})
sorted_views = True
return sorted_views
def on_pre_close(self, view):
Focus.cancel()
view.settings().set('tabs_extra_is_closed', True)
if (not view.settings().get('tabs_extra_closing', False)):
TabsExtraListener.extra_command_call = True
window = view.window()
if (window is not None):
view.settings().set('tabs_extra_view_info', view.window().get_view_index(view))
view.settings().set('tabs_extra_window_info', view.window().id())
else:
TabsExtraListener.extra_command_call = False
def on_close(self, view):
view_info = view.settings().get('tabs_extra_view_info', None)
window_info = view.settings().get('tabs_extra_window_info', None)
if ((view_info is not None) and (window_info is not None)):
TabsExtraListener.extra_command_call = False
def on_activated(self, view):
if (not TabsExtraListener.extra_command_call):
window = view.window()
if (window is None):
return
s = window.active_sheet()
timestamp_view(window, s)
moving = view.settings().get('tabs_extra_moving', None)
if (moving is not None):
(win_id, group_id) = moving
window = view.window()
if (window is None):
return
active_group = window.get_view_index(view)[0]
if ((window.id() != win_id) or (int(group_id) != int(active_group))):
view.settings().erase('tabs_extra_moving')
elif (sort_on_load_save() and view.settings().get('tabsextra_to_sort')):
view.settings().erase('tabsextra_to_sort')
self.on_sort(view) |
class AssetMetadata(object):
def __init__(self, cai_name='', cai_type=''):
self.cai_name = cai_name
self.cai_type = cai_type
def __eq__(self, other):
return ((self.cai_name == other.cai_name) and (self.cai_type == other.cai_type))
def __repr__(self):
return 'cai_name: {}, cai_type: {}'.format(self.cai_name, self.cai_type) |
class WorkflowClient():
service: WorkflowService
domain: domain
options: WorkflowClientOptions
def new_client(cls, host: str='localhost', port: int=7933, domain: str='', options: WorkflowClientOptions=None, timeout: int=DEFAULT_SOCKET_TIMEOUT_SECONDS) -> WorkflowClient:
service = WorkflowService.create(host, port, timeout=timeout)
return cls(service=service, domain=domain, options=options)
def start(cls, stub_fn: Callable, *args) -> WorkflowExecutionContext:
stub = stub_fn.__self__
assert (stub._workflow_client is not None)
assert (stub_fn._workflow_method is not None)
return exec_workflow(stub._workflow_client, stub_fn._workflow_method, args, workflow_options=stub._workflow_options, stub_instance=stub)
def new_workflow_stub(self, cls: Type, workflow_options: WorkflowOptions=None):
attrs = {}
attrs['_workflow_client'] = self
attrs['_workflow_options'] = workflow_options
for (name, fn) in inspect.getmembers(cls, inspect.isfunction):
if hasattr(fn, '_workflow_method'):
attrs[name] = get_workflow_stub_fn(fn._workflow_method)
elif hasattr(fn, '_signal_method'):
attrs[name] = get_signal_stub_fn(fn._signal_method)
elif hasattr(fn, '_query_method'):
attrs[name] = get_query_stub_fn(fn._query_method)
stub_cls = type(cls.__name__, (WorkflowStub,), attrs)
return stub_cls()
def new_workflow_stub_from_workflow_id(self, cls: Type, workflow_id: str):
stub_instance = self.new_workflow_stub(cls)
execution = WorkflowExecution(workflow_id=workflow_id, run_id=None)
stub_instance._execution = execution
return stub_instance
def wait_for_close(self, context: WorkflowExecutionContext) -> object:
return self.wait_for_close_with_workflow_id(workflow_id=context.workflow_execution.workflow_id, run_id=context.workflow_execution.run_id, workflow_type=context.workflow_type)
def wait_for_close_with_workflow_id(self, workflow_id: str, run_id: str=None, workflow_type: str=None):
while True:
history_request = create_close_history_event_request(self, workflow_id, run_id)
(history_response, err) = self.service.get_workflow_execution_history(history_request)
if err:
raise Exception(err)
if (not history_response.history.events):
continue
history_event = history_response.history.events[0]
if (history_event.event_type == EventType.WorkflowExecutionCompleted):
attributes = history_event.workflow_execution_completed_event_attributes
return json.loads(attributes.result)
elif (history_event.event_type == EventType.WorkflowExecutionFailed):
attributes = history_event.workflow_execution_failed_event_attributes
if (attributes.reason == 'WorkflowFailureException'):
exception = deserialize_exception(attributes.details)
if isinstance(exception, ActivityFailureException):
exception.set_cause()
workflow_execution = WorkflowExecution(workflow_id=workflow_id, run_id=run_id)
raise WorkflowFailureException(workflow_type=workflow_type, execution=workflow_execution) from exception
else:
details: Dict = json.loads(attributes.details)
detail_message = details.get('detailMessage', '')
raise WorkflowExecutionFailedException(attributes.reason, details=details, detail_message=detail_message)
elif (history_event.event_type == EventType.WorkflowExecutionTimedOut):
raise WorkflowExecutionTimedOutException()
elif (history_event.event_type == EventType.WorkflowExecutionTerminated):
attributes = history_event.workflow_execution_terminated_event_attributes
raise WorkflowExecutionTerminatedException(reason=attributes.reason, details=attributes.details, identity=attributes.identity)
elif (history_event.event_type == EventType.WorkflowExecutionCanceled):
raise WorkflowExecutionCanceledException()
else:
raise Exception(('Unexpected history close event: ' + str(history_event)))
def new_activity_completion_client(self):
return ActivityCompletionClient(self.service) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.