code stringlengths 281 23.7M |
|---|
def default(template='index'):
template = (template if template.endswith('.html') else (template + '.html'))
try:
return render_template(('static_pages/' + template), is_redirect=request.args.get('redirected'))
except TemplateNotFound:
return (render_template('static_pages/404.html'), 404) |
class OptionSeriesArcdiagramMarker(Options):
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def fillOpacity(self):
return self._config_get(1)
def fillOpacity(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesArcdiagramMarkerStates':
return self._config_sub_data('states', OptionSeriesArcdiagramMarkerStates)
def symbol(self):
return self._config_get('circle')
def symbol(self, text: str):
self._config(text, js_type=False) |
def _only_reshape(array, source, target):
(source, target) = (source.split(), target.replace(' * ', '*').split())
input_shape = {key: array.shape[index] for (index, key) in enumerate(source)}
output_shape = []
for t in target:
product = 1
if (not (t == '1')):
t = t.split('*')
for t_ in t:
product *= input_shape[t_]
output_shape.append(product)
return array.reshape(output_shape) |
def add_bgp_error_metadata(code, sub_code, def_desc='unknown'):
if (_EXCEPTION_REGISTRY.get((code, sub_code)) is not None):
raise ValueError(('BGPSException with code %d and sub-code %d already defined.' % (code, sub_code)))
def decorator(subclass):
if issubclass(subclass, BGPSException):
_EXCEPTION_REGISTRY[(code, sub_code)] = subclass
subclass.CODE = code
subclass.SUB_CODE = sub_code
subclass.DEF_DESC = def_desc
return subclass
return decorator |
def upgrade():
op.create_table('event_model', sa.Column('offset', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False), sa.Column('key', sa.String(length=1024), nullable=False), sa.Column('value', sa.Text(), nullable=True), sa.Column('context', sa.Text(), nullable=True), sa.Column('namespace', sa.String(length=1024), nullable=True), sa.Column('sender', sa.String(length=1024), nullable=True), sa.Column('create_time', sa.BigInteger(), nullable=False), sa.Column('uuid', sa.String(length=40), nullable=False), sa.PrimaryKeyConstraint('offset'), sa.UniqueConstraint('uuid'))
op.create_table('notification_client', sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), autoincrement=True, nullable=False), sa.Column('namespace', sa.String(length=1024), nullable=True), sa.Column('sender', sa.String(length=1024), nullable=True), sa.Column('create_time', sa.BigInteger(), nullable=True), sa.Column('is_deleted', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id')) |
class StructureStructureInterface(AbstractBCPlacement):
structures: Tuple[(str, str)] = pd.Field(title='Structures', description='Names of two structures.')
('structures', always=True)
def unique_names(cls, val):
if (val[0] == val[1]):
raise SetupError("The same structure is provided twice in 'StructureStructureInterface'.")
return val |
()
def system_with_dataset_reference() -> Generator:
(yield [models.System(organization_fides_key=1, fides_key='test_system', system_type='test', privacy_declarations=[models.PrivacyDeclaration(name='test_privacy_declaration', data_categories=[], data_use='test_data_use', data_subjects=[], dataset_references=['test_dataset'])])]) |
def get_nodes(request, sr=(), pr=(), order_by=('hostname',), annotate=None, extra=None, **kwargs):
if (not request.user.is_staff):
kwargs['dc'] = request.dc
if sr:
qs = Node.objects.select_related(*sr)
else:
qs = Node.objects
if pr:
qs = qs.prefetch_related(*pr)
if annotate:
qs = qs.annotate(**annotate)
if extra:
qs = qs.extra(**extra)
if kwargs:
return qs.filter(**kwargs).order_by(*order_by)
return qs.order_by(*order_by) |
def test_nested_schema_array():
artist = typesystem.Schema(fields={'name': typesystem.String(max_length=100)})
definitions = typesystem.Definitions()
definitions['Artist'] = artist
album = typesystem.Schema(fields={'title': typesystem.String(max_length=100), 'release_year': typesystem.Integer(), 'artists': typesystem.Array(items=typesystem.Reference(to='Artist', definitions=definitions))})
value = album.validate({'title': 'Double Negative', 'release_year': '2018', 'artists': [{'name': 'Low'}]})
assert (value == {'title': 'Double Negative', 'release_year': 2018, 'artists': [{'name': 'Low'}]})
(value, error) = album.validate_or_error({'title': 'Double Negative', 'release_year': '2018', 'artists': None})
assert (dict(error) == {'artists': 'May not be null.'})
(value, error) = album.validate_or_error({'title': 'Double Negative', 'release_year': '2018', 'artists': 'Low'})
assert (dict(error) == {'artists': 'Must be an array.'})
album = typesystem.Schema(fields={'title': typesystem.String(max_length=100), 'release_year': typesystem.Integer(), 'artists': typesystem.Array(items=typesystem.Reference(to='Artist', definitions=definitions), allow_null=True)})
value = album.validate({'title': 'Double Negative', 'release_year': '2018', 'artists': None})
assert (value == {'title': 'Double Negative', 'release_year': 2018, 'artists': None}) |
def test_digest_change(flyte_project):
ignore = IgnoreGroup(flyte_project, [GitIgnore, DockerIgnore, StandardIgnore])
digest1 = compute_digest(flyte_project, ignore.is_ignored)
change_file = (((flyte_project / 'src') / 'workflows') / 'hello_world.py')
assert (not ignore.is_ignored(change_file))
change_file.write_text("print('I do matter!')")
digest2 = compute_digest(flyte_project, ignore.is_ignored)
assert (digest1 != digest2) |
class OptionPlotoptionsItemSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsItemSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsItemSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsItemSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsItemSonificationContexttracksMappingLowpassResonance) |
def init_params(networks: TDMPCNetworks, spec: specs.EnvironmentSpec, key: jax.random.PRNGKeyArray):
keys = jax.random.split(key, 6)
dummy_obs = utils.add_batch_dim(utils.zeros_like(spec.observations))
dummy_act = utils.add_batch_dim(utils.zeros_like(spec.actions))
encoder_params = networks.encoder_network.init(keys[0], dummy_obs)
embedding = networks.encoder_network.apply(encoder_params, dummy_obs)
critic_params = networks.critic_network.init(keys[1], embedding, dummy_act)
twin_critic_params = networks.critic_network.init(keys[2], embedding, dummy_act)
dynamics_params = networks.dynamics_network.init(keys[3], embedding, dummy_act)
reward_params = networks.reward_network.init(keys[4], embedding, dummy_act)
policy_params = networks.policy_network.init(keys[5], embedding)
return TDMPCParams(encoder_params=encoder_params, critic_params=critic_params, dynamics_params=dynamics_params, twin_critic_params=twin_critic_params, reward_params=reward_params, policy_params=policy_params) |
def test_capture_serverless_elb(event_elb, context, elasticapm_client):
os.environ['AWS_LAMBDA_FUNCTION_NAME'] = 'test_func'
_serverless
def test_func(event, context):
with capture_span('test_span'):
time.sleep(0.01)
return {'statusCode': 200, 'headers': {'foo': 'bar'}}
test_func(event_elb, context)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
assert (transaction['name'] == 'POST unknown route')
assert (transaction['result'] == 'HTTP 2xx')
assert (transaction['span_count']['started'] == 1)
assert (transaction['context']['request']['method'] == 'POST')
assert transaction['context']['request']['headers']
assert (transaction['context']['response']['status_code'] == 200)
assert (transaction['context']['service']['origin']['name'] == 'lambda-279XGJDqGZ5rsrHC2Fjr') |
def _cmp_by_origin(path1, path2):
def get_origin_pref(origin):
if (origin.value == BGP_ATTR_ORIGIN_IGP):
return 3
elif (origin.value == BGP_ATTR_ORIGIN_EGP):
return 2
elif (origin.value == BGP_ATTR_ORIGIN_INCOMPLETE):
return 1
else:
LOG.error('Invalid origin value encountered %s.', origin)
return 0
origin1 = path1.get_pattr(BGP_ATTR_TYPE_ORIGIN)
origin2 = path2.get_pattr(BGP_ATTR_TYPE_ORIGIN)
assert ((origin1 is not None) and (origin2 is not None))
if (origin1.value == origin2.value):
return None
origin1 = get_origin_pref(origin1)
origin2 = get_origin_pref(origin2)
if (origin1 == origin2):
return None
elif (origin1 > origin2):
return path1
return path2 |
def _generate_c_usage_code(processed_args):
lines = []
positionals = []
opts = []
for arg in processed_args:
if arg.is_positional():
positionals.append(arg)
else:
opts.append(arg)
lines.append('"\\n"')
lines.append('"USAGE:\\n\\n"')
line = 'program_name'
if opts:
line += ' [OPTIONS]'
if positionals:
positional_names = ' '.join([x.var_name for x in positionals])
line += f' {positional_names}'
lines.append((('"' + line) + '\\n"'))
if opts:
lines.append('"\\nOPTIONS:\\n\\n"')
longest_left_col = 0
usage_lines = []
for opt in opts:
left_col = ('"' + opt.opt)
if (opt.longopt is not None):
left_col += (' ' + opt.longopt)
arg = None
right_col = ''
if opt.is_flag():
right_col = f'''{opt.desc} flag
"'''
elif (type(opt.value) == list):
right_col = f'''A string value (default: %s)
", {opt.var_name} ? {opt.var_name} : "null"'''
choices = '|'.join(opt.value)
arg = f' [{choices}]'
elif (ArgType.INT == opt.type):
right_col = f'''An int value (default: %ld)
", {opt.var_name}'''
arg = ' [int]'
elif (ArgType.FLOAT == opt.type):
right_col = f'''A float value (default: %.2f)
", {opt.var_name}'''
arg = ' [float]'
elif (ArgType.STRING == opt.type):
right_col = f'''A string value (default: %s)
", {opt.var_name} ? {opt.var_name} : "null"'''
arg = ' [string]'
elif (ArgType.FILE == opt.type):
right_col = f'''A filename (default: %s)
", {opt.var_name} ? {opt.var_name} : "null"'''
arg = ' FILE'
if (arg is not None):
left_col += arg
if (len(left_col) > longest_left_col):
longest_left_col = len(left_col)
usage_lines.append((left_col, right_col))
for (leftcol, rightcol) in usage_lines:
num_spaces = ((longest_left_col + 2) - len(leftcol))
lines.append(((leftcol + (' ' * num_spaces)) + rightcol))
lines.append('"\\n"')
return '\n'.join([f' printf({line});' for line in lines]) |
class HeadlessBrowser():
WEBDRIVER_TIMEOUT = 12
WEBDRIVER_ARGUMENTS = ('--disable-dev-shm-usage', '--ignore-certificate-errors', '--headless', '--incognito', '--no-sandbox', '--disable-gpu', '--disable-extensions', '--disk-cache-size=0', '--aggressive-cache-discard', '--disable-notifications', '--disable-remote-fonts', '--disable-sync', '--window-size=1366,768', '--hide-scrollbars', '--disable-audio-output', '--dns-prefetch-disable', '--no-default-browser-check', '--disable-background-networking', '--enable-features=NetworkService,NetworkServiceInProcess', '--disable-background-timer-throttling', '--disable-backgrounding-occluded-windows', '--disable-breakpad', '--disable-client-side-phishing-detection', '--disable-component-extensions-with-background-pages', '--disable-default-apps', '--disable-features=TranslateUI', '--disable-hang-monitor', '--disable-ipc-flooding-protection', '--disable-prompt-on-repost', '--disable-renderer-backgrounding', '--force-color-profile=srgb', '--metrics-recording-only', '--no-first-run', '--password-store=basic', '--use-mock-keychain', '--disable-blink-features=AutomationControlled')
def __init__(self, useragent=None):
chrome_options = webdriver.ChromeOptions()
for opt in self.WEBDRIVER_ARGUMENTS:
chrome_options.add_argument(opt)
proxies = urllib.request.getproxies()
if proxies:
proxy_string = ';'.join(['{}={}'.format(scheme, url) for (scheme, url) in proxies.items()])
chrome_options.add_argument('--proxy-server={}'.format(proxy_string))
chrome_options.add_experimental_option('excludeSwitches', ['enable-automation'])
chrome_options.add_experimental_option('useAutomationExtension', False)
self.driver = webdriver.Chrome(options=chrome_options)
self.driver.set_page_load_timeout(self.WEBDRIVER_TIMEOUT)
self.driver.execute_cdp_cmd('Network.setUserAgentOverride', {'userAgent': (useragent or self.driver.execute_script('return navigator.userAgent').replace('Headless', ''))})
self.get = self.driver.get
self.screenshot = self.driver.get_screenshot_as_png
def stop(self):
try:
self.driver.close()
self.driver.quit()
except Exception:
pass
try:
pid = True
while pid:
(pid, status) = os.waitpid((- 1), os.WNOHANG)
except ChildProcessError:
pass
def __del__(self):
self.stop() |
class SettingsDialog(QDialog):
def __init__(self, parent):
if (not parent):
parent = mw.app.activeWindow()
QDialog.__init__(self, parent)
self.parent = parent
self.setWindowTitle('SIAC Settings')
self.setup_ui()
self.exec_()
def setup_ui(self):
self.vbox = QVBoxLayout()
self.tabs = QTabWidget()
self.tab_appearance = AppearanceSettingsTab()
self.tab_shortcut = ShortcutSettingsTab()
self.tab_general = GeneralSettingsTab()
self.tab_interleaving = InterleavingSettingsTab()
self.tab_markdown = MarkdownSettingsTab()
self.tabs.addTab(self.tab_appearance, 'Appearance')
self.tabs.addTab(self.tab_shortcut, 'Shortcuts')
self.tabs.addTab(self.tab_general, 'General')
self.tabs.addTab(self.tab_interleaving, 'Interleaving')
self.tabs.addTab(self.tab_markdown, 'Markdown')
self.buttonBox = QDialogButtonBox((QDialogButtonBox.Ok | QDialogButtonBox.Cancel))
self.buttonBox.accepted.connect(self.accept_clicked)
self.buttonBox.rejected.connect(self.reject)
self.vbox.addWidget(self.tabs)
self.vbox.addWidget(self.buttonBox)
self.setLayout(self.vbox)
def accept_clicked(self):
tooltip_changes = ((((self.tab_appearance.save_changes() + self.tab_shortcut.save_changes()) + self.tab_general.save_changes()) + self.tab_interleaving.save_changes()) + self.tab_markdown.save_changes())
if tooltip_changes:
tooltip_text = (('<b>Settings changed</b><br>' + tooltip_changes) + '<br><i>Please restart Anki to make sure all settings are applied!</i>')
else:
tooltip_text = '<b>No settings changed!</b>'
tooltip(tooltip_text, parent=self.parent)
self.accept() |
def imdho_abs_cross_section(dEs_inc: np.ndarray, dE_exc: float, gamma: float, displs: np.ndarray, nus: np.ndarray, ithresh: float=1e-06):
integrand = get_crossec_integrand(dE_exc, gamma, displs, nus)
tmax = ((- np.log(ithresh)) / nu2angfreq_au(gamma))
print(f'tmax={tmax:.4f} au for ={gamma:.2f} cm1')
tenth = (dEs_inc.size // 10)
cross_secs = np.zeros_like(dEs_inc)
for (i, dE_inc) in enumerate(dEs_inc):
(y, _) = quad(integrand, 0, tmax, args=(dE_inc,), complex_func=True, limit=500)
cross_secs[i] = y.real
if ((i % tenth) == 0):
print(i, y.real)
cross_secs = (cross_secs / cross_secs.max())
return cross_secs |
class TestDiscountCodeValidation(TestCase):
def test_quantity_pass(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'min_quantity': 10, 'max_quantity': 20, 'tickets_number': 30}
DiscountCodeSchemaTicket.validate_quantity(schema, data, original_data)
def test_date_pass(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'valid_from': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), 'valid_till': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC'))}
DiscountCodeSchemaTicket.validate_date(schema, data, original_data)
def test_date_start_gt_end(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'valid_from': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), 'valid_till': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC'))}
with self.assertRaises(UnprocessableEntityError):
DiscountCodeSchemaTicket.validate_date(schema, data, original_data)
def test_quantity_min_gt_max(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'min_quantity': 20, 'max_quantity': 10, 'tickets_number': 30}
with self.assertRaises(UnprocessableEntityError):
DiscountCodeSchemaTicket.validate_quantity(schema, data, original_data)
def test_quantity_max_gt_tickets_number(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'min_quantity': 10, 'max_quantity': 30, 'tickets_number': 20}
with self.assertRaises(UnprocessableEntityError):
DiscountCodeSchemaTicket.validate_quantity(schema, data, original_data)
def test_percent_value_lte_hundred(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'type': 'percent', 'value': 90, 'tickets': []}
DiscountCodeSchemaTicket.validate_value(schema, data, original_data)
def test_percent_value_gt_hundred(self):
schema = DiscountCodeSchemaTicket()
original_data = {'data': {}}
data = {'type': 'percent', 'value': 110, 'tickets': []}
with self.assertRaises(UnprocessableEntityError):
DiscountCodeSchemaTicket.validate_value(schema, data, original_data) |
def _export_interaction(contents: t.Any, extension: str) -> None:
path = interaction.get_save_filename_input('Filename to export as?', extension)
if path:
path = ((path + '.') + extension)
else:
interaction.show_message_box('Error', 'Did not get required path name for export.')
return
with open(path, 'w+', encoding='utf-8') as file:
file.write(contents)
interaction.show_message_box('Success', f'Done, exported to {path}') |
def test_skip_bad_plugin(caplog, plugin_creator: PluginCreator) -> None:
caplog.set_level(logging.WARNING, logger='submitit')
plugin_creator.add_plugin('submitit_bad', entry_points='[submitit]\nexecutor = submitit_bad:NonExisitingExecutor\njob_environment = submitit_bad:BadEnvironment\nunsupported_key = submitit_bad:SomethingElse\n', init='\nimport submitit\n\nclass BadEnvironment:\n name = "bad"\n\n def __init__(self):\n raise Exception("this is a bad environment")\n')
executors = plugins.get_executors().keys()
assert ({'slurm', 'local', 'debug'} == set(executors))
assert ('bad' not in executors)
expected = [(logging.ERROR, "'submitit_bad'.*no attribute 'NonExisitingExecutor'"), (logging.ERROR, "'submitit_bad'.*this is a bad environment"), (logging.WARNING, 'unsupported_key = submitit_bad:SomethingElse')]
assert (len(caplog.records) == len(expected))
for (record, ex_record) in zip(caplog.records, expected):
assert (record.name == 'submitit')
assert (record.levelno == ex_record[0])
assert re.search(ex_record[1], record.getMessage()) |
def meatFunction(delay):
global HUNGER_UPDATE, meatApplied, money
if (money >= 15):
if (alive == True):
btnFeed.config(state='disabled')
money -= 15
meatApplied = True
_thread.start_new_thread(updateLabel, (3, (TAMA_NAME + ' is eating!')))
time.sleep(delay)
btnFeed.config(state='normal')
meatApplied = False
else:
_thread.start_new_thread(updateLabel, (3, "You don't have enough money for meat!")) |
def gsm8k_samples():
return "\nQ: Every hour Joanne has to collect the coins out of the fountain inside the mall. During the first hour, she collected 15 coins. For the next two hours, she collected 35 coins from the fountain. In the fourth hour, she collected 50 coins from the fountain but she gave 15 of them to her coworker so she could buy a soda. How many coins did she have after the fourth hour?\nA: Let's think step by step.\n15 coins collected in hour one\n35 coins collected in hour two\n35 coins collected in hour three\n50 coins collected in hour four\nBefore giving her coworker some coins there were 15+35+35+50=<< 15+35+35+50 = 135 >>135 coins\nThe number of coins after given 15 to her coworker is 135-15=<< 135-15 = 120 >>120\nSo the answer is 120\n\nQ: Jerrys two daughters play softball on different teams. They each have 8 games this season. Each team practices 4 hours for every game they play. If each game lasts for 2 hours, how many hours will Jerry spend at the field watching his daughters play and practice altogether?\nA: Let's think step by step.\nJerry will spend 8 games x 2 hours per game = << 8*2 = 16 >>16 hours watching one daughter play her games.\nHe will spend 16 x 2 = << 16*2 = 32 >>32 hours watching both daughters play their games.\nHe will spend 8 games x 4 hours of practice = << 8*4 = 32 >>32 hours watching one daughter practice.\nHe will spend 32 x 2 = << 32*2 = 64 >>64 hours watching both daughters practice.\nHe will spend a total of 32 hours watching games + 64 hours watching practice = << 32+64 = 96 >>96 hours.\nSo the answer is 96\n" |
def test_Vector():
v = Vector((100, 200))
assert (repr(v) == 'Vector((100, 200))')
assert (v == Vector((100, 200)))
assert (v == Vector([100, 200]))
assert (v == (100, 200))
assert ((100, 200) == v)
assert (v == [100, 200])
assert ([100, 200] == v)
assert (v is Vector(v))
assert ((v + 10) == (110, 210))
assert ((10 + v) == (110, 210))
assert ((v + Vector((1, 2))) == (101, 202))
assert ((v - Vector((1, 2))) == (99, 198))
assert ((v * 2) == (200, 400))
assert ((2 * v) == (200, 400))
assert ((v * 0.5) == (50, 100))
assert ((v / 2) == (50, 100))
assert ((2 / v) == (0.02, 0.01))
v = Vector((3, 4))
assert (abs(v) == 5)
assert (v.length() == 5)
assert (v.normalized() == Vector((0.6, 0.8)))
assert (abs(Vector((1, 1, 1))) == math.sqrt(3))
assert bool(Vector((0, 0, 1)))
assert (not bool(Vector((0, 0, 0))))
v1 = Vector((2, 3))
v2 = Vector((3, 4))
assert (v1.dot(v2) == 18)
v = Vector((2, 4))
assert (round((v / 3)) == (1, 1))
with pytest.raises(AttributeError, match="'Vector' object has no attribute 'newAttr'"):
v.newAttr = 12 |
class CreditChargeResponseTests(unittest.TestCase):
def test_init(self):
response = CreditChargeResponse()
self.assertEqual(response.CCTransId, '')
self.assertEqual(response.AuthCode, '')
self.assertEqual(response.TxnAuthorizationTime, '')
self.assertEqual(response.Status, '') |
class TestPropertyNotifications(unittest.TestCase):
def test_property_notifications(self):
output_buffer = io.StringIO()
test_obj = HasPropertySubclass(output_buffer=output_buffer)
test_obj.value = 'value_1'
self.assertEqual(output_buffer.getvalue(), 'value_1')
test_obj.value = 'value_2'
self.assertEqual(output_buffer.getvalue(), 'value_1value_2') |
class _ComplexNumPyEncoder(json.JSONEncoder):
def default(self, obj):
if np.iscomplexobj(obj):
obj = np.stack([np.asarray(obj).real, np.asarray(obj).imag])
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.bool_):
return bool(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj) |
class TiktokenTokenizer():
def __init__(self, name) -> None:
self.enc = tiktoken.get_encoding(name)
self.encode = (lambda s: self.enc.encode(s, allowed_special={'<|endoftext|>'}))
self.pad_token = self.enc.eot_token
def __call__(self, text, max_length=None, padding=None, truncation=False, return_tensors=None):
ids = self.encode(text)
if truncation:
ids = ids[:max_length]
mask = ([1] * len(ids))
if (padding == 'max_length'):
mask += ([0] * (max_length - len(ids)))
ids += ([self.pad_token] * (max_length - len(ids)))
if (return_tensors == 'pt'):
ids = torch.tensor(ids, dtype=torch.long)
mask = torch.tensor(mask)
return {'input_ids': ids, 'attention_mask': mask} |
def main():
create_new_db(NEW_DB_URL)
old_connection = get_connection(OLD_DB_URL)
import timeit
start = timeit.default_timer()
move_rules(old_connection)
t1 = timeit.default_timer()
print(('Moving rules took %f seconds' % (t1 - start)))
move_function_calls(old_connection)
t2 = timeit.default_timer()
print(('Moving functionCalls took %f seconds' % (t2 - t1)))
move_outliers(old_connection)
t3 = timeit.default_timer()
print(('Moving outliers took %f seconds' % (t3 - t2)))
print(('Total time was %f seconds' % (t3 - start))) |
class CLICommand(RegionalCommand):
def regional_from_cli(cls, parser, argv, cfg):
parser.add_argument('--exclude-block', action='append', dest='exclude_blocks', default=cfg('exclude_block', type=List(IPNet), default=[]), help='exclude CIDR blocks from check')
args = parser.parse_args(argv)
return cls(**vars(args))
def __init__(self, regions, exclude_blocks):
super().__init__(regions)
self.exclude_blocks = [ip_network(b) for b in exclude_blocks]
self.cidrs = []
def regional_execute(self, session, acct, region):
ec2 = session.resource('ec2', region_name=region)
return [(c['CidrBlock'], vpc.id) for vpc in ec2.vpcs.all() for c in vpc.cidr_block_association_set]
def regional_collect_results(self, acct, region, get_result):
for (block, vpc_id) in get_result():
block = ip_network(block)
if any((block.overlaps(e) for e in self.exclude_blocks)):
continue
cidr = _CIDR(acct, region, vpc_id, block)
self.cidrs.append(cidr)
print(f'Found CIDR {cidr}', flush=True)
def post_hook(self):
overlap = []
for i in range(0, len(self.cidrs)):
c1 = self.cidrs[i]
for j in range((i + 1), len(self.cidrs)):
c2 = self.cidrs[j]
if c1.overlaps(c2):
overlap.append(sorted((c1, c2)))
for (c1, c2) in sorted(overlap, key=(lambda x: x[0])):
print(f'OVERLAP! {c1} <<<>>> {c2}') |
class LogTarget():
def __init__(self):
self.fd = None
def write(self, data, level, logger, is_debug=0):
raise NotImplementedError('LogTarget.write is an abstract method')
def flush(self):
raise NotImplementedError('LogTarget.flush is an abstract method')
def close(self):
raise NotImplementedError('LogTarget.close is an abstract method') |
class Migration(migrations.Migration):
dependencies = [('references', '0061_gtassf133balances_add_status_of_budgetary_resources_total_cpe_field')]
operations = [migrations.RemoveField(model_name='gtassf133balances', name='adjustments_to_unobligated_balance_cpe'), migrations.AddField(model_name='gtassf133balances', name='adjustments_to_unobligated_balance_brought_forward_fyb', field=models.DecimalField(decimal_places=2, default=0.0, max_digits=23)), migrations.AlterField(model_name='gtassf133balances', name='adjustments_to_unobligated_balance_brought_forward_cpe', field=models.DecimalField(decimal_places=2, max_digits=23))] |
def search_albums(artist):
artist_id = [x for x in select([artists.c.id]).where((artists.c.nome == artist.lower())).execute()]
if artist_id:
query = select([discs.c.id, discs.c.album, discs.c.ano, discs.c.artista_id]).where((discs.c.artista_id == artist_id[0][0])).execute()
return {_id: {'album': album, 'ano': ano, 'id_artista': artista} for (_id, album, ano, artista) in query}
return {} |
.parametrize('numGlyphs, indices, extra_indices, expected_data', [(5, TSI0_INDICES, TSI0_EXTRA_INDICES, TSI0_DATA), (0, [], EMPTY_TSI0_EXTRA_INDICES, EMPTY_TSI0_DATA)], ids=['simple', 'empty'])
def test_compile(table, numGlyphs, indices, extra_indices, expected_data):
assert (table.compile(ttFont=None) == b'')
table.set(indices, extra_indices)
data = table.compile(ttFont=None)
assert (data == expected_data) |
('xtb')
def test_opt_coord_type(this_dir):
(preopt_ct, tsopt_ct, endopt_ct) = ('dlc', 'redund', 'tric')
run_dict = {'geom': {'type': 'cart', 'fn': str((this_dir / 'test_geoms.trj'))}, 'calc': {'type': 'xtb', 'quiet': True, 'pal': 2}, 'preopt': {'geom': {'type': preopt_ct, 'freeze_atoms': [0]}, 'max_cycles': 1}, 'interpol': {'type': 'redund', 'between': 1}, 'cos': {'type': 'neb'}, 'opt': {'type': 'sd', 'max_cycles': 1}, 'tsopt': {'geom': {'type': tsopt_ct, 'freeze_atoms': [0]}, 'max_cycles': 1}, 'irc': {'max_cycles': 1}, 'endopt': {'max_cycles': 1, 'geom': {'type': endopt_ct, 'freeze_atoms': [0]}}}
results = run_from_dict(run_dict)
assert (results.ts_geom.coord_type == tsopt_ct)
assert all([(geom.coord_type == endopt_ct) for geom in results.end_geoms]) |
def extractCrazysilvermoonWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Number One Zombie Wife', 'Number One Zombie Wife', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def copy_database(raw_conn, from_database, to_database):
logger.debug('copy_database(%r, %r)', from_database, to_database)
terminate_database_connections(raw_conn, from_database)
if (raw_conn.engine.dialect.name == 'postgresql'):
raw_conn.execute(('\n CREATE DATABASE "%s" WITH TEMPLATE "%s";\n ' % (to_database, from_database)))
elif (raw_conn.engine.dialect.name == 'mysql'):
create_database(raw_conn, to_database)
for row in raw_conn.execute(('SHOW TABLES in %s;' % from_database)):
raw_conn.execute(('\n CREATE TABLE %s.%s LIKE %s.%s\n ' % (to_database, row[0], from_database, row[0])))
raw_conn.execute(('ALTER TABLE %s.%s DISABLE KEYS' % (to_database, row[0])))
raw_conn.execute(('\n INSERT INTO %s.%s SELECT * FROM %s.%s\n ' % (to_database, row[0], from_database, row[0])))
raw_conn.execute(('ALTER TABLE %s.%s ENABLE KEYS' % (to_database, row[0])))
else:
raise NotSupportedDatabase() |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
.parametrize('cfg_string', ['zeroshot_cfg_string', 'fewshot_cfg_string', 'ext_template_cfg_string'])
def test_summarization_config(cfg_string, request):
cfg_string = request.getfixturevalue(cfg_string)
orig_config = Config().from_str(cfg_string)
nlp = spacy.util.load_model_from_config(orig_config, auto_fill=True)
assert (nlp.pipe_names == ['llm'])
component_cfg = dict(orig_config['components']['llm'])
component_cfg.pop('factory')
nlp2 = spacy.blank('en')
nlp2.add_pipe('llm', config=component_cfg)
assert (nlp2.pipe_names == ['llm'])
pipe = nlp.get_pipe('llm')
assert isinstance(pipe, LLMWrapper)
assert isinstance(pipe.task, LLMTask) |
class DeclStatMixA(SimpleEntity, StatusMixin):
__tablename__ = 'DeclStatMixAs'
__mapper_args__ = {'polymorphic_identity': 'DeclStatMixA'}
declStatMixAs_id = Column('id', Integer, ForeignKey('SimpleEntities.id'), primary_key=True)
def __init__(self, **kwargs):
super(DeclStatMixA, self).__init__(**kwargs)
StatusMixin.__init__(self, **kwargs) |
class TestABC(unittest.TestCase):
def test_basic_abc(self):
self.assertRaises(TypeError, AbstractFoo)
concrete = ConcreteFoo()
self.assertEqual(concrete.foo(), 'foo')
self.assertEqual(concrete.bar, 'bar')
self.assertEqual(concrete.x, 10)
self.assertEqual(concrete.y, 20.0)
self.assertTrue(isinstance(concrete, AbstractFoo))
def test_registered(self):
foolike = FooLike()
self.assertTrue(isinstance(foolike, AbstractFoo))
def test_post_hoc_mixing(self):
class TraitedBar(HasTraits, AbstractBar, metaclass=ABCMetaHasTraits):
x = Int(10)
def bar(self):
return 'bar'
traited = TraitedBar()
self.assertTrue(isinstance(traited, AbstractBar))
self.assertEqual(traited.x, 10) |
def _get_region_data_sources(coordinates, points):
data_region = get_region(coordinates)
sources_region = get_region(points)
region = (min(data_region[0], sources_region[0]), max(data_region[1], sources_region[1]), min(data_region[2], sources_region[2]), max(data_region[3], sources_region[3]))
return region |
def get_window_width():
try:
if (platform.system() in ['Linux', 'Darwin', 'Java']):
with os.popen('stty size', 'r') as size:
columns = size.read().split()[1]
return int(columns)
else:
return None
except Exception as exp:
return None |
def writehtmllist(space, namehead, hashtaghead):
spaces = ''
for _ in range(space):
spaces += ' '
if (initialspace >= 2):
return (spaces + f'<ul><li><a href="{hashtaghead}">{namehead}</a></li></ul>')
else:
return (spaces + f'<li><a href="{hashtaghead}">{namehead}</a></li>') |
class OptionSeriesBubbleSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(ITaskWindowBackend)
class MTaskWindowBackend(HasTraits):
control = DelegatesTo('window')
window = Instance('pyface.tasks.task_window.TaskWindow')
def create_contents(self, parent):
raise NotImplementedError()
def destroy(self):
pass
def hide_task(self, state):
raise NotImplementedError()
def show_task(self, state):
raise NotImplementedError()
def get_layout(self):
raise NotImplementedError()
def set_layout(self, layout):
raise NotImplementedError() |
class SenseloafApi(ProviderInterface, OcrInterface):
provider_name = 'senseloaf'
def __init__(self, api_keys: Dict={}):
super().__init__()
self.api_settings = load_provider(ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys)
self.client = Client(self.api_settings.get('api_key', None), self.api_settings.get('email', None), self.api_settings.get('password', None))
def ocr__resume_parser(self, file: str, file_url: str='') -> ResponseType[ResumeParserDataClass]:
original_response = self.client.parse_document(parse_type=Parser.RESUME, file=file, url=file_url)
mapper = ResumeMapper(original_response)
return ResponseType[ResumeParserDataClass](original_response=mapper.original_response(), standardized_response=mapper.standard_response()) |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Livro', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nome', models.CharField(max_length=30)), ('categoria', models.CharField(max_length=30)), ('autor', models.CharField(max_length=30))])] |
class OpenEventTestCase(unittest.TestCase):
def setUpClass(cls) -> None:
cls.app = Setup.create_app()
def tearDownClass(cls) -> None:
Setup.drop_db()
def setUp(self):
with self.app.test_request_context():
self._connection = db.engine.connect()
self._transaction = self._connection.begin()
options = dict(bind=self._connection, binds={})
session = db.create_scoped_session(options=options)
self._old_session = db.session
db.session = session
def tearDown(self):
with self.app.test_request_context():
db.session.remove()
db.session = self._old_session
self._transaction.rollback()
self._connection.close() |
def extract_kinetic_features(motion, thresholds, up_vec):
features = kinetic.KineticFeatures(motion, (1 / motion.fps), thresholds, up_vec)
kinetic_feature_vector = []
for i in range(motion.skel.num_joints()):
(positions_mean, positions_stddev) = features.local_position_stats(i)
feature_vector = np.hstack([features.average_kinetic_energy_horizontal(i), features.average_kinetic_energy_vertical(i), features.average_energy_expenditure(i)])
kinetic_feature_vector.extend(feature_vector)
return kinetic_feature_vector |
class OAuthCallbackHandler(BaseHTTPRequestHandler):
def do_GET(self):
split_url = urlsplit(self.path)
if (split_url.path == '/callback'):
parsed_url = parse_qs(split_url.query)
self.server.callback_code = parsed_url.get('code', [None])[0]
self.server.callback_state = parsed_url.get('state', [None])[0]
self.send_response(HTTPStatus.OK)
body_text = b'<p>You can close me now.</p>'
else:
self.send_response(HTTPStatus.NOT_IMPLEMENTED)
body_text = b'<p>Something went wrong.</p>'
self.end_headers()
self.wfile.write(b'<html><head><title>EDAPI Frontier Login</title></head>')
self.wfile.write(b'<body><h1>AUTHENTICATION</h1>')
self.wfile.write(body_text)
self.wfile.write(b'</body></html>')
def log_message(self, format, *args):
pass |
def example():
async def on_pan_update1(e: ft.DragUpdateEvent):
c.top = max(0, (c.top + e.delta_y))
c.left = max(0, (c.left + e.delta_x))
(await c.update_async())
async def on_pan_update2(e: ft.DragUpdateEvent):
e.control.top = max(0, (e.control.top + e.delta_y))
e.control.left = max(0, (e.control.left + e.delta_x))
(await e.control.update_async())
gd = ft.GestureDetector(mouse_cursor=ft.MouseCursor.MOVE, drag_interval=50, on_pan_update=on_pan_update1)
c = ft.Container(gd, bgcolor=ft.colors.AMBER, width=50, height=50, left=0, top=0)
gd1 = ft.GestureDetector(mouse_cursor=ft.MouseCursor.MOVE, drag_interval=10, on_vertical_drag_update=on_pan_update2, left=100, top=100, content=ft.Container(bgcolor=ft.colors.BLUE, width=50, height=50))
return ft.Stack([c, gd1], width=1000, height=500) |
class OptionSeriesLollipopDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
.skipif((LOW_MEMORY > memory), reason='Travis has too less memory to run it.')
def test_hicPlotMatrix_perChr_pca1_bigwig():
outfile = NamedTemporaryFile(suffix='.png', prefix='hicexplorer_test', delete=False)
args = '--matrix {0}/hicTransform/pearson_perChromosome.h5 --perChr --disable_tight_layout --outFileName {1} --bigwig {2}'.format(ROOT, outfile.name, (ROOT + 'hicPCA/pca1.bw')).split()
compute(hicexplorer.hicPlotMatrix.main, args, 5)
res = compare_images(((ROOT + 'hicPlotMatrix') + '/small_matrix_50kb_pearson_pca1_plot.png'), outfile.name, tol=tolerance)
assert (res is None), res
if REMOVE_OUTPUT:
os.remove(outfile.name) |
class QLineEditDrop(QtWidgets.QLineEdit):
def __init__(self):
super().__init__()
self.setAcceptDrops(True)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
else:
event.ignore()
def dropEvent(self, event):
files = [u.toLocalFile() for u in event.mimeData().urls()]
self.setText(files[0]) |
class TestNumberOfOutRangeValues(BaseDataQualityValueRangeMetricsTest):
name: ClassVar = 'Number of Out-of-Range Values '
def calculate_value_for_test(self) -> Numeric:
return self.metric.get_result().current.number_not_in_range
def get_description(self, value: Numeric) -> str:
return f'The number of values out of range in the column **{self.column_name}** is {value}. The test threshold is {self.get_condition()}.' |
class GrowingNT():
logger = logging.getLogger('cos')
def __init__(self, geom, step_len=0.5, rms_thresh=0.0017, r=None, final_geom=None, between=None, bonds=None, r_update=True, r_update_thresh=1.0, stop_after_ts=False, require_imag_freq=0.0, hessian_at_ts=False, out_dir='.', dump=True):
assert (geom.coord_type == 'cart')
self.geom = geom
self.step_len = step_len
self.rms_thresh = rms_thresh
self.final_geom = final_geom
self.between = between
self.bonds = bonds
self.r_update = r_update
self.r_update_thresh = r_update_thresh
self.stop_after_ts = stop_after_ts
self.require_imag_freq = require_imag_freq
self.hessian_at_ts = hessian_at_ts
self.out_dir = Path(out_dir)
self.dump = dump
if (not self.out_dir.exists()):
os.mkdir(self.out_dir)
self.coord_type = self.geom.coord_type
if self.final_geom:
self.converge_to_geom = self.final_geom
self.r = self.get_r(self.geom, self.final_geom, self.bonds, r)
self.r_org = self.r.copy()
if (final_geom and self.between):
self.step_len = (np.linalg.norm((final_geom.coords - geom.coords)) / (self.between + 1))
self._initialized = False
self.images = [self.geom.copy()]
self.all_energies = list()
self.all_real_forces = list()
self.sp_images = [self.geom.copy()]
self.ts_images = list()
self.min_images = list()
self.ts_imag_freqs = list()
self.initialize()
if self.dump:
self.trj_fn = self.get_path('newton_trajectory.trj')
def get_path(self, fn):
return (self.out_dir / fn)
def get_r(geom, final_geom, bonds, r):
if final_geom:
r = (final_geom - geom)
elif (bonds is not None):
r = get_weighted_bond_mode(bonds, geom.coords3d)
elif (r is not None):
pass
else:
raise Exception("Please supply either 'r' or 'final_geom'!")
r = (r / np.linalg.norm(r))
return r
def log(self, message):
self.logger.debug(message)
def r(self):
return self._r
def P(self):
return self._P
def r(self, r):
self._r = r
self._P = (np.eye(self.coords.size) - np.outer(self.r, self.r))
def atoms(self):
return self.geom.atoms
def coords(self):
return self.geom.coords
def coords(self, coords):
self.geom.coords = coords
def cart_coords(self):
return self.geom.cart_coords
def grow_image(self):
self.images[(- 1)] = self.geom.copy()
if (self._initialized and self.final_geom and self.between):
m = (self.between + 2)
k = (len(self.images) - 1)
lambda_ = ((m - k) / ((m + 1) - k))
step = ((self.coords * (lambda_ - 1)) + ((1 - lambda_) * self.final_geom.coords))
else:
step = (self.step_len * self.r)
self.coords = (self.coords + step)
real_forces = self.geom.forces
energy = self.geom.energy
self.all_energies.append(energy)
self.all_real_forces.append(real_forces)
self.images.append(self.geom)
def initialize(self):
assert (not self._initialized), 'GrowingNT.initialize() can only be called once!'
init_results = self.geom.get_energy_and_forces_at(self.images[0].coords)
self.all_energies.append(init_results['energy'])
self.all_real_forces.append(init_results['forces'])
self.grow_image()
self._initialized = True
def calc_hessian_for(self, other_geom):
res = self.geom.get_energy_and_cart_hessian_at(other_geom.cart_coords)
cart_hessian = res['hessian']
return cart_hessian
def energy(self):
return self.geom.energy
def forces(self):
forces = self.geom.forces
perp_forces = self.P.dot(forces)
return perp_forces
def cart_forces(self):
return self.geom.cart_forces
def get_energy_at(self, coords):
return self.geom.get_energy_at(coords)
def get_energy_and_forces_at(self, coords):
return self.geom.get_energy_and_forces_at(coords)
def as_xyz(self):
return self.geom.as_xyz()
def clear_passed(self):
self.passed_min = False
self.passed_ts = False
def reparametrize(self):
real_forces = self.geom.forces
energy = self.energy
self.all_real_forces[(- 1)] = real_forces
self.all_energies[(- 1)] = energy
forces = self.forces
can_grow = (rms(forces) <= self.rms_thresh)
if can_grow:
if self.dump:
with open(self.trj_fn, 'w') as handle:
handle.write('\n'.join([geom.as_xyz() for geom in self.images]))
'\n Check if we passed a stationary point (SP).\n ^ Energy\n |\n | -3 -1 -2\n | \\ / / \\\n | \\ / / \\\n | -2 -3 -1\n | Minimum TS\n '
ae = self.all_energies
self.passed_min = ((len(ae) >= 3) and (ae[(- 3)] > ae[(- 2)] < ae[(- 1)]))
self.passed_ts = ((len(ae) >= 3) and (ae[(- 3)] < ae[(- 2)] > ae[(- 1)]))
passed_sp = (self.passed_min or self.passed_ts)
if passed_sp:
sp_image = self.images[(- 2)].copy()
sp_kind = ('Minimum' if self.passed_min else 'TS')
self.sp_images.append(sp_image)
self.log(f'''Passed stationary point! It seems to be a {sp_kind}.
{sp_image.as_xyz()}''')
if self.passed_ts:
self.ts_images.append(sp_image)
if self.hessian_at_ts:
sp_hessian = self.calc_hessian_for(sp_image)
(nus, *_) = sp_image.get_normal_modes(sp_hessian)
self.log(f'First 5 frequencies: {nus[:5]}')
if (self.require_imag_freq < 0.0):
try:
sp_hessian
except NameError:
sp_hessian = self.calc_hessian_for(sp_image)
self.ts_imag_freqs.append(sp_image.get_imag_frequencies(sp_hessian))
elif self.passed_min:
self.min_images.append(sp_image)
r_new = self.get_r(self.geom, self.final_geom, self.bonds, self.r)
r_dot = r_new.dot(self.r)
r_org_dot = r_new.dot(self.r_org)
self.log(f"r.dot(r')={r_dot:.6f} r_org.dot(r')={r_org_dot:.6f}")
if (self.r_update and (r_org_dot <= self.r_update_thresh) and self.passed_min):
self.r = r_new
self.log('Updated r')
self.grow_image()
assert (len(self.images) == len(self.all_energies) == len(self.all_real_forces))
self.did_reparametrization = can_grow
return can_grow
def check_convergence(self, *args, **kwargs):
if (len(self.ts_images) == 0):
return False
converged = self.stop_after_ts
if self.require_imag_freq:
converged = (converged and (self.ts_imag_freqs[(- 1)][0] <= self.require_imag_freq))
return converged
def get_additional_print(self):
if self.did_reparametrization:
img_num = len(self.images)
str_ = f'Grew Newton trajectory to {img_num} images.'
if self.passed_min:
str_ += f' Passed minimum geometry at image {(img_num - 1)}.'
elif self.passed_ts:
str_ += f' Passed transition state geometry at image {(img_num - 1)}.'
else:
str_ = None
self.did_reparametrization = False
self.clear_passed()
return str_ |
('/version', strict_slashes=False)
('/allure-docker-service/version', strict_slashes=False)
def version_endpoint():
try:
version = get_file_as_string(ALLURE_VERSION).strip()
except Exception as ex:
body = {'meta_data': {'message': str(ex)}}
resp = jsonify(body)
resp.status_code = 400
else:
body = {'data': {'version': version}, 'meta_data': {'message': 'Version successfully obtained'}}
resp = jsonify(body)
resp.status_code = 200
return resp |
def test_correct_response(client, monkeypatch, elasticsearch_transaction_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_category/naics', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'category': 'naics', 'limit': 10, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 500000.0, 'code': '222220', 'id': None, 'name': 'NAICS 2'}, {'amount': 50000.0, 'code': '111110', 'id': None, 'name': 'NAICS 1'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
assert (resp.json() == expected_response) |
class FieldGrid(Tidy3dBaseModel):
x: Coords = pd.Field(..., title='X Positions', description='x,y,z coordinates of the locations of the x-component of a vector field.')
y: Coords = pd.Field(..., title='Y Positions', description='x,y,z coordinates of the locations of the y-component of a vector field.')
z: Coords = pd.Field(..., title='Z Positions', description='x,y,z coordinates of the locations of the z-component of a vector field.') |
def create_system_image(image_file=None, upload_path=None, unique_identifier=None, ext='jpg'):
filename = f'{get_file_name()}.{ext}'
if image_file:
with urllib.request.urlopen(image_file) as img_data:
image_file = io.BytesIO(img_data.read())
else:
file_relative_path = 'static/default_system_image.png'
image_file = ((current_app.config['BASE_DIR'] + '/') + file_relative_path)
try:
im = Image.open(image_file)
except OSError:
raise OSError('Corrupt/Invalid Image')
if (im.format != 'JPEG'):
img = im.convert('RGB')
else:
img = im
temp_file_relative_path = ((('static/media/temp/' + generate_hash(str(image_file))) + get_file_name()) + '.jpg')
temp_file_path = ((current_app.config['BASE_DIR'] + '/') + temp_file_relative_path)
dir_path = temp_file_path.rsplit('/', 1)[0]
if (not os.path.isdir(dir_path)):
os.makedirs(dir_path)
img.save(temp_file_path)
upfile = UploadedFile(file_path=temp_file_path, filename=filename)
if (not upload_path):
upload_path = UPLOAD_PATHS['event_topic']['system_image'].format(event_topic_id=unique_identifier)
uploaded_url = upload(upfile, upload_path)
os.remove(temp_file_path)
image = {'system_image_url': uploaded_url}
return image |
class OptionPlotoptionsItemAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsItemAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsItemAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsItemAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsItemAccessibilityPoint) |
(ACCESS_MANUAL_WEBHOOK, status_code=HTTP_200_OK, dependencies=[Security(verify_oauth_client, scopes=[WEBHOOK_CREATE_OR_UPDATE])], response_model=AccessManualWebhookResponse)
def patch_access_manual_webhook(connection_config: ConnectionConfig=Depends(_get_connection_config), *, db: Session=Depends(deps.get_db), request_body: AccessManualWebhooks) -> AccessManualWebhook:
access_manual_webhook: AccessManualWebhook = get_access_manual_webhook_or_404(connection_config)
access_manual_webhook.fields = jsonable_encoder(request_body.fields)
try:
access_manual_webhook.save(db=db)
except IntegrityError as exc:
raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=Pii(str(exc)))
logger.info("Updated access manual webhook for connection config '{}'", connection_config.key)
return access_manual_webhook |
class BaseLoadAnalyzer(ABC):
def __init__(self, cloud_client: BaseCloudClient, input_batch_queue: InputBatchQueue, output_batch_queue: OutputBatchQueue, model_instances_storage: ModelInstancesStorage, config: dm.Config):
logger.info('Start load analyzer')
self.sleep_time = config.load_analyzer.sleep_time
self.cloud_client = cloud_client
self.input_batch_queue = input_batch_queue
self.output_batch_queue = output_batch_queue
self.model_instances_storage = model_instances_storage
self.config = config
for checker_class in self.checkers:
self.__checkers.append(checker_class(self.model_instances_storage, self.input_batch_queue, self.output_batch_queue, self.config))
checkers: List[Type[Checker]] = []
__checkers: List[Checker] = []
async def analyzer_pipeline(self):
while True:
(await asyncio.sleep(self.sleep_time))
logger.debug('Load analyzer tick')
pipeline = TriggerPipeline(cloud_client=self.cloud_client, model_instances_storage=self.model_instances_storage, config=self.config)
for checker in self.__checkers:
triggers = checker.make_triggers()
pipeline.extend(triggers)
pipeline.optimize()
if (len(pipeline) != 0):
logger.info(f'Pipeline that will be applied {pipeline.get_triggers()}')
(await pipeline.apply()) |
class BuilderTests(unittest.TestCase):
def test_can_create_pypher(self):
p = Pypher()
self.assertIsInstance(p, Pypher)
def test_pypher_created_statements(self):
p = Pypher()
expected = []
for s in _PREDEFINED_STATEMENTS:
getattr(p, s[0])
try:
expected.append(s[1].upper())
except:
expected.append(s[0].upper())
self.assertEqual(str(p), ' '.join(expected))
def test_pypher_can_create_dynamic_statment(self):
p = Pypher()
p.my_statement(1, 2, 3)
exp = 'my_statement 1, 2, 3'
q = str(p)
self.assertEqual(exp, q)
self.assertEqual(0, len(p.bound_params))
def test_pypher_can_create_dynamic_statment_random(self):
p = Pypher()
stmt = 'my_statment_{}'.format(randint(1, 777))
getattr(p, stmt)(1, 2, 3)
exp = '{} 1, 2, 3'.format(stmt)
q = str(p)
self.assertEqual(exp, q)
self.assertEqual(0, len(p.bound_params))
def test_can_add_statement_with_link_method(self):
p = Pypher()
stmt = 'my_statment_{}'.format(randint(1, 777))
p.link(stmt)
q = str(p)
self.assertEqual(stmt, q)
self.assertEqual(0, len(p.bound_params))
def test_pypher_created_functions(self):
p = Pypher()
expected = []
for s in _PREDEFINED_FUNCTIONS:
getattr(p, s[0])
try:
expected.append((s[1] + '()'))
except:
expected.append((s[0] + '()'))
self.assertEqual(str(p), ' '.join(expected))
def test_can_call_MAX_predefined_function(self):
p = Pypher()
p.MAX(9, 9)
string = str(p)
params = p.bound_params
expected = 'max(${}, ${})'.format(get_dict_key(params, 9), get_dict_key(params, 9))
self.assertEqual(1, len(params))
self.assertEqual(expected, string)
def test_can_call_LABELS_predefined_function(self):
p = Pypher()
p.LABELS('test')
string = str(p)
params = p.bound_params
expected = 'labels(${})'.format(get_dict_key(params, 'test'))
self.assertEqual(1, len(params))
self.assertEqual(expected, string)
def test_can_add_one_statement(self):
p = Pypher()
p.some_attribute
expected = 'some_attribute'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_two_statements(self):
p = Pypher()
p.some_statement.some_other_statement
expected = 'some_statement some_other_statement'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_random_statements(self):
p = Pypher()
exp = []
for x in range(1, randrange(5, 22)):
getattr(p, str(x))
exp.append(str(x))
expected = ' '.join(exp)
self.assertEqual(str(p), expected)
def test_can_add_one_property(self):
p = Pypher()
p.property('property')
expected = '.`property`'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_one_property_with_custom_quote(self):
import pypher
pypher.builder.QUOTES['property'] = '"'
p = Pypher()
p.property('property')
expected = '."property"'
c = str(p)
self.assertEqual(c, expected)
pypher.builder.QUOTES['property'] = '`'
def test_can_add_one_property_underscore(self):
p = Pypher()
p.__property__
expected = '.`property`'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_two_properties(self):
p = Pypher()
p.property('prop1').property('prop2')
expected = '.`prop1`.`prop2`'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_two_properties_underscore(self):
p = Pypher()
p.__prop1__.__prop2__
expected = '.`prop1`.`prop2`'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_two_properties_mixed(self):
p = Pypher()
p.property('prop1').__prop2__
expected = '.`prop1`.`prop2`'
c = str(p)
self.assertEqual(c, expected)
def test_can_add_random_properties(self):
p = Pypher()
exp = []
for x in range(1, randrange(5, 22)):
p.property(str(x))
exp.append('`{}`'.format(x))
expected = ('.' + '.'.join(exp))
self.assertEqual(str(p), expected)
def test_can_add_statement_and_property(self):
p = Pypher()
p.RETURN.property('money')
exp = 'RETURN.`money`'
self.assertEqual(str(p), exp)
def test_can_manually_add_link(self):
p = Pypher()
link = Statement(name='SOMESTATEMENT')
p.add_link(link)
exp = 'SOMESTATEMENT'
self.assertEqual(str(p), exp)
def test_can_add_empty_node(self):
p = Pypher()
p.node()
exp = '()'
self.assertEqual(str(p), exp)
def test_can_add_empty_node_with_one_label(self):
p = Pypher()
p.node(labels='Test')
exp = '(:`Test`)'
self.assertEqual(str(p), exp)
def test_can_add_empty_node_with_one_label_with_custom_quote(self):
import pypher
pypher.builder.QUOTES['label'] = '"'
p = Pypher()
p.node(labels='Test')
exp = '(:"Test")'
self.assertEqual(str(p), exp)
pypher.builder.QUOTES['label'] = '`'
def test_can_add_empty_node_with_multiple_labels(self):
p = Pypher()
p.node(labels=['Test', 'one', 'two'])
exp = '(:`Test`:`one`:`two`)'
self.assertEqual(str(p), exp)
def test_can_add_named_node(self):
p = Pypher()
p.node('name')
exp = '(name)'
self.assertEqual(str(p), exp)
def test_can_add_named_node_with_multiple_labels(self):
p = Pypher()
p.node('name', labels=['Test', 'one', 'two'])
exp = '(name:`Test`:`one`:`two`)'
self.assertEqual(str(p), exp)
def test_can_add_unamed_node_with_properties(self):
p = Pypher()
name = 'somename'
age = 99
p.node(name=name, age=age)
c = str(p)
params = p.bound_params
exp = '( {{`age`: ${a}, `name`: ${n}}})'.format(a=get_dict_key(params, age), n=get_dict_key(params, name))
self.assertEqual(c, exp)
def test_can_add_unamed_node_with_properties_and_labels(self):
p = Pypher()
name = 'somename'
age = 99
p.node(name=name, age=age, labels=['Test', 'one', 'two'])
c = str(p)
params = p.bound_params
exp = '(:`Test`:`one`:`two` {{`age`: ${a}, `name`: ${n}}})'.format(n=get_dict_key(params, name), a=get_dict_key(params, age))
self.assertEqual(c, exp)
def test_can_add_named_node_with_properties_and_labels(self):
p = Pypher()
name = 'somename'
age = 99
p.node('name', name=name, age=age, labels=['Test', 'one', 'two'])
c = str(p)
params = p.bound_params
exp = '(name:`Test`:`one`:`two` {{`age`: ${a}, `name`: ${n}}})'.format(n=get_dict_key(params, name), a=get_dict_key(params, age))
self.assertEqual(c, exp)
def test_can_add_empty_undirected_relationship(self):
p = Pypher()
p.relationship()
exp = '--'
self.assertEqual(str(p), exp)
def test_can_add_empty_out_relationship(self):
p = Pypher()
p.relationship(direction='out')
exp = '-->'
self.assertEqual(str(p), exp)
def test_can_add_empty_in_relationship(self):
p = Pypher()
p.relationship(direction='in')
exp = '<--'
self.assertEqual(str(p), exp)
def test_can_add_named_relationship(self):
p = Pypher()
p.rel('name')
exp = '-[name]-'
self.assertEqual(str(p), exp)
def test_can_add_named_out_relationship(self):
p = Pypher()
p.rel('name', direction='>')
exp = '-[name]->'
self.assertEqual(str(p), exp)
def test_can_add_named_in_relationship(self):
p = Pypher()
p.rel('name', direction='<')
exp = '<-[name]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_labels(self):
p = Pypher()
p.relationship(labels=['one', 'two', 'three'])
exp = '-[:`one`|`two`|`three`]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_types(self):
p = Pypher()
p.relationship(types=['one', 'two', 'three'])
exp = '-[:`one`|`two`|`three`]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_min_hop(self):
p = Pypher()
p.relationship(min_hops=1)
exp = '-[*1..]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_max_hop(self):
p = Pypher()
p.relationship(max_hops=1)
exp = '-[*..1]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_min_max_hop(self):
p = Pypher()
p.relationship(min_hops=1, max_hops=3)
exp = '-[*1..3]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_fixed_length(self):
p = Pypher()
p.relationship(min_hops=3, max_hops=3)
exp = '-[*3]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_fixed_length_hops(self):
p = Pypher()
p.relationship(hops=3)
exp = '-[*3]-'
self.assertEqual(str(p), exp)
def test_using_hops_and_min_hops_raises_error(self):
p = Pypher()
with self.assertRaises(ValueError):
p.relationship(hops=2, min_hops=3)
def test_using_hops_and_max_hops_raises_error(self):
p = Pypher()
with self.assertRaises(ValueError):
p.relationship(hops=2, max_hops=3)
def test_can_add_empty_undirected_relationship_with_labels_and_types_but_uses_types(self):
p = Pypher()
p.relationship(labels=[1, 2, 3], types=['one', 'two', 'three'])
exp = '-[:`one`|`two`|`three`]-'
self.assertEqual(str(p), exp)
def test_can_add_empty_undirected_relationship_with_labels_and_types_but_uses_labels(self):
p = Pypher()
p.relationship(labels=[1, 2, 3], types=None)
exp = '-[:`1`|`2`|`3`]-'
self.assertEqual(str(p), exp)
def test_can_add_named_undirected_relationship_with_labels(self):
p = Pypher()
p.relationship(variable='test', labels=['one', 'two', 'three'])
exp = '-[test:`one`|`two`|`three`]-'
self.assertEqual(str(p), exp)
def test_can_add_named_undirected_relationship_with_labels_and_properties(self):
p = Pypher()
name = 'somename'
age = 99
p.relationship(variable='test', labels=['one', 'two', 'three'], name=name, age=age)
c = str(p)
params = p.bound_params
exp = '-[test:`one`|`two`|`three` {{`age`: ${a}, `name`: ${n}}}]-'.format(n=get_dict_key(params, name), a=get_dict_key(params, age))
self.assertEqual(str(p), exp)
def test_can_add_named_undirected_relationship_with_labels_and_properties_and_hops(self):
p = Pypher()
name = 'somename'
age = 99
p.relationship(variable='test', labels=['one', 'two', 'three'], name=name, age=age, min_hops=1, max_hops=3)
c = str(p)
params = p.bound_params
exp = '-[test:`one`|`two`|`three`*1..3 {{`age`: ${a}, `name`: ${n}}}]-'.format(n=get_dict_key(params, name), a=get_dict_key(params, age))
self.assertEqual(str(p), exp)
def test_can_add_empty_node_rel_node(self):
p = Pypher()
p.node().rel().node()
exp = '()--()'
self.assertEqual(str(p), exp)
def test_can_add_named_node_labeled_out_relationship_node_with_properties(self):
n = 'name'
l = 'KNOWS'
name = 'somename'
age = 99
p = Pypher()
p.node(n).rel_out(labels=l).node(name=name, age=age)
c = str(p)
params = p.bound_params
exp = '({n})-[:`{l}`]->( {{`age`: ${age}, `name`: ${name}}})'.format(n=n, l=l, name=get_dict_key(params, name), age=get_dict_key(params, age))
self.assertEqual(c, exp)
self.assertEqual(2, len(params))
def test_can_add_raw(self):
p = Pypher()
s = 'raw content {}'.format(random())
p.this.will.be.raw(s)
exp = 'this will be {}'.format(s)
self.assertEqual(str(p), exp)
def test_can_bind_params_and_clear_them_using_reset(self):
p = Pypher()
p.bind_param(1)
p.bind_param(2)
s = str(p)
bp = p.bound_params
p.reset()
bp2 = p.bound_params
self.assertEqual(2, len(bp))
self.assertEqual(0, len(bp2))
def test_can_add_raw_with_mixed_args(self):
p = Pypher()
a = Pypher()
i = random()
s = 'raw content {}'.format(random())
p.this.will.be.raw(s, a.test.ID(i))
c = str(p)
params = p.bound_params
exp = 'this will be {} test id({})'.format(s, i)
self.assertEqual(c, exp)
self.assertEqual(0, len(params))
def test_can_add_random_function(self):
p = Pypher()
f = 'someFunction{}'.format(random())
p.func(f)
exp = '{}()'.format(f)
self.assertEqual(str(p), exp)
def test_can_add_random_function_with_args(self):
p = Pypher()
f = 'someFunction{}'.format(random())
one = 'one'
two = 2
p.func(f, one, two)
c = str(p)
params = p.bound_params
exp = '{}(${}, ${})'.format(f, get_dict_key(params, one), get_dict_key(params, two))
self.assertEqual(c, exp)
self.assertEqual(2, len(params))
def test_can_add_random_raw_function_with_mixed_args(self):
p = Pypher()
a = Pypher()
i = random()
f = 'someFunction{}'.format(random())
one = 'one'
two = 2
p.func_raw(f, one, two, a.id(i))
c = str(p)
params = p.bound_params
exp = '{}({}, {}, id({}))'.format(f, one, two, i)
self.assertEqual(c, exp)
self.assertEqual(0, len(params))
def test_can_add_in_clause(self):
p = Pypher()
one = 1
two = 2
three = 3
p.n.property('name').IN(one, two, three)
c = str(p)
params = p.bound_params
exp = 'n.`name` IN [${}, ${}, ${}]'.format(get_dict_key(params, one), get_dict_key(params, two), get_dict_key(params, three))
self.assertEqual(c, exp)
self.assertEqual(3, len(params))
def test_can_add_list(self):
p = Pypher()
one = 1
two = 2
three = 3
p.List(one, two, three)
c = str(p)
params = p.bound_params
exp = '[${one}, ${two}, ${three}]'.format(one=get_dict_key(params, one), two=get_dict_key(params, two), three=get_dict_key(params, three))
self.assertEqual(exp, c)
self.assertEqual(3, len(params))
def test_can_add_list_comprehension_clause(self):
p = Pypher()
three = 3
p.n.property('name').comp((__.field | three))
c = str(p)
params = p.bound_params
exp = 'n.`name` [field | ${}]'.format(get_dict_key(params, three))
self.assertEqual(c, exp)
self.assertEqual(1, len(params))
def test_can_add_single_label(self):
p = Pypher()
p.n.label('one')
exp = 'n:`one`'
self.assertEqual(str(p), exp)
def test_can_add_multiple_labels(self):
p = Pypher()
p.n.label(['one', 'two', 'three', 'four'])
exp = 'n:`one`:`two`:`three`:`four`'
self.assertEqual(str(p), exp)
def test_can_assign_variable(self):
p = Pypher()
p.MATCH.p.assign(__.node('n').rel_out().node('m'))
exp = 'MATCH p = (n)-->(m)'
self.assertEqual(str(p), exp)
def test_can_reuse_params_object_across_pypher_isntances(self):
params = Params('xxx')
p = Pypher(params=params)
p2 = Pypher(params=params)
self.assertEqual(id(p.params), id(p2.params))
def test_can_return_regular_shallow_map(self):
p = Pypher()
one = 'one'
two = 'two'
three = 'three'
p.RETURN.map(one, two, three=three)
q = str(p)
params = p.bound_params
exp = 'RETURN {{one, two, `three`: ${three}}}'.format(three=get_dict_key(params, three))
self.assertEqual(exp, q)
self.assertEqual(1, len(params))
def test_can_nest_map(self):
p = Pypher()
p.collect(__.map('one', 'two', 'three'))
q = str(p)
exp = 'collect({one, two, three})'
self.assertEqual(exp, q)
def test_can_return_regular_shallow_map_with_list(self):
p = Pypher()
one = 'one'
two = 'two'
three = 'three'
four = 'four'
five = 'five'
p.RETURN.map(one, two, three=three, list=[four, five])
q = str(p)
params = p.bound_params
exp = 'RETURN {{one, two, `list`: [${four}, ${five}], `three`: ${three}}}'.format(three=get_dict_key(params, three), four=get_dict_key(params, four), five=get_dict_key(params, five))
self.assertEqual(exp, q)
self.assertEqual(3, len(params))
def test_can_return_map_projection(self):
p = Pypher()
one = 'one'
two = 'two'
three = 'three'
four = 'four'
five = 'five'
user = 'user'
p.RETURN.mapprojection(user, one, two, three=three, list=[four, five])
q = str(p)
params = p.bound_params
exp = 'RETURN user {{one, two, `list`: [${four}, ${five}], `three`: ${three}}}'.format(three=get_dict_key(params, three), four=get_dict_key(params, four), five=get_dict_key(params, five))
self.assertEqual(exp, q)
self.assertEqual(3, len(params))
def test_can_nest_map_projection(self):
p = Pypher()
name = '.name'
real_name = '.realName'
title = '.title'
year = '.year'
mp = __.mapprojection('movie', title, year)
p.RETURN.mapprojection('actor', name, real_name, movies=__.collect(mp))
q = str(p)
params = p.bound_params
exp = 'RETURN actor {.name, .realName, `movies`: collect(movie {.title, .year})}'
self.assertEqual(exp, q)
self.assertEqual(0, len(params))
def test_can_append_two_instances_first_is_empty(self):
p = Pypher()
p2 = Pypher()
p2.two
p.append(p2)
exp = 'two'
exp2 = 'two'
s = str(p)
s2 = str(p2)
self.assertEqual(exp, s)
self.assertEqual(exp2, s2)
def test_can_append_two_instances(self):
p = Pypher()
p2 = Pypher()
p.one
p2.two
p.append(p2)
exp = 'one two'
exp2 = 'two'
s = str(p)
s2 = str(p2)
self.assertEqual(exp, s)
self.assertEqual(exp2, s2)
def test_can_append_four_instances(self):
p = Pypher()
p2 = Pypher()
p3 = Pypher()
p4 = Pypher()
p.one
p2.two
p3.three.three
p4.four.four.four
p.append(p2.append(p3))
p.append(p4)
exp = 'one two three three four four four'
exp2 = 'two three three four four four'
exp3 = 'three three four four four'
exp4 = 'four four four'
s = str(p)
s2 = str(p2)
s3 = str(p3)
s4 = str(p4)
self.assertEqual(exp, s)
self.assertEqual(exp2, s2)
self.assertEqual(exp3, s3)
self.assertEqual(exp4, s4)
def test_can_use_base_conditional(self):
p = Pypher()
p.CONDITIONAL(1, 2, 3)
s = str(p)
params = p.bound_params
exp = '(${one}, ${two}, ${three})'.format(one=get_dict_key(params, 1), two=get_dict_key(params, 2), three=get_dict_key(params, 3))
self.assertEqual(exp, s)
self.assertEqual(3, len(params))
def test_can_use_AND_conditional_alias(self):
p = Pypher()
p.COND_AND(1, 2, 3)
s = str(p)
params = p.bound_params
exp = '(${one} AND ${two} AND ${three})'.format(one=get_dict_key(params, 1), two=get_dict_key(params, 2), three=get_dict_key(params, 3))
self.assertEqual(exp, s)
self.assertEqual(3, len(params))
def test_can_use_OR_conditional_alias(self):
p = Pypher()
p.COR(1, 2, 3)
s = str(p)
params = p.bound_params
exp = '(${one} OR ${two} OR ${three})'.format(one=get_dict_key(params, 1), two=get_dict_key(params, 2), three=get_dict_key(params, 3))
self.assertEqual(exp, s)
self.assertEqual(3, len(params))
def test_can_nest_AND_and_OR_conditionals(self):
p = Pypher()
p.COR(1, __.CAND(2, 3))
s = str(p)
params = p.bound_params
exp = '(${one} OR (${two} AND ${three}))'.format(one=get_dict_key(params, 1), two=get_dict_key(params, 2), three=get_dict_key(params, 3))
self.assertEqual(exp, s)
self.assertEqual(3, len(params))
def test_can_clone_shallow_pypher(self):
p = Pypher()
p.a.b.c.d
c = p.clone()
x = str(p)
y = str(c)
self.assertEqual(x, y)
self.assertEqual(len(p.bound_params), len(c.bound_params))
self.assertTrue((id(p.bound_params) == id(c.bound_params)))
def test_can_clone_nested_pypher(self):
p = Pypher()
d = Pypher()
e = Pypher()
e.CAND(1, 2, 3, 4, 5, __.test.this.out.CONDITIONAL(9, 9, 8, __.node(6)))
d.id(123).raw(e)
p.a.b.c.d.node((d == 122))
c = p.clone()
x = str(p)
y = str(c)
self.assertEqual(x, y)
self.assertEqual(len(p.bound_params), len(c.bound_params))
self.assertTrue((id(p.bound_params) == id(c.bound_params)))
def test_can_clone_pypher_and_follow_different_paths(self):
p = Pypher()
p.one.two.three.four
c = p.clone()
p.xx.yy.zz.node('zzz11122')
c.a.b.c.d
before = str(p)
after = str(c)
self.assertTrue((before != after))
def test_can_do_bitwise_and(self):
p = Pypher()
p.BAND(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "&", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_one_nested_bitwise_and(self):
p = Pypher()
p.BAND(12, 4, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "&", apoc.bitwise.op(${}, "&", ${}))'.format(get_dict_key(params, 12), get_dict_key(params, 4), get_dict_key(params, 4))
self.assertEqual(x, expected)
self.assertEqual(2, len(params))
def test_can_do_two_nested_bitwise_and(self):
p = Pypher()
p.BAND(12, 4, 4, 20)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "&", apoc.bitwise.op(${}, "&", apoc.bitwise.op(${}, "&", ${})))'.format(get_dict_key(params, 12), get_dict_key(params, 4), get_dict_key(params, 4), get_dict_key(params, 20))
self.assertEqual(x, expected)
self.assertEqual(3, len(params))
def test_can_do_bitwise_or(self):
p = Pypher()
p.BOR(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "|", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_bitwise_xor(self):
p = Pypher()
p.BXOR(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "^", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_bitwise_not(self):
p = Pypher()
p.BNOT(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "~", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_bitwise_left_shift(self):
p = Pypher()
p.BLSHIFT(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, ">>", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_bitwise_right_shift(self):
p = Pypher()
p.BRSHIFT(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, "<<", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_do_bitwise_unsigned_left_shift(self):
p = Pypher()
p.BULSHIFT(12, 4)
x = str(p)
params = p.bound_params
expected = 'apoc.bitwise.op(${}, ">>>", ${})'.format(get_dict_key(params, 12), get_dict_key(params, 4))
self.assertEqual(x, expected)
def test_can_use_subclassed_function_by_classname(self):
class SubClassedFunc(Func):
name = 'sub_class_func'
p = Pypher()
y = 12
p.SubClassedFunc(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_func(${})'.format(get_dict_key(params, y))
self.assertEqual(x, expected)
self.assertEqual(1, len(params))
def test_can_use_subclassed_function_by_alias(self):
class SubClassedFunc2(Func):
name = 'sub_class_func2'
_ALIASES = ['scf']
p = Pypher()
y = 12
p.scf(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_func2(${})'.format(get_dict_key(params, y))
self.assertEqual(x, expected)
self.assertEqual(1, len(params))
def test_can_use_subclassed_function_by_alias_and_classname(self):
class SubClassedFunc3(Func):
name = 'sub_class_func3'
_ALIASES = ['scf3']
p = Pypher()
y = 12
p.scf3(y).SubClassedFunc3(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_func3(${}) sub_class_func3(${})'.format(get_dict_key(params, y), get_dict_key(params, y))
self.assertEqual(x, expected)
self.assertEqual(1, len(params))
def test_can_use_subclassed_statement_by_classname(self):
class SubClassedStatement(Statement):
name = 'sub_class_stmt'
p = Pypher()
y = 12
p.SubClassedStatement(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_stmt {}'.format(y)
self.assertEqual(x, expected)
self.assertEqual(0, len(params))
def test_can_use_subclassed_statement_by_alias(self):
class SubClassedStmt2(Statement):
name = 'sub_class_stmt2'
_ALIASES = ['scs']
p = Pypher()
y = 12
p.scs(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_stmt2 {}'.format(y)
self.assertEqual(x, expected)
self.assertEqual(0, len(params))
def test_can_use_subclassed_statement_by_alias_and_classname(self):
class SubClassedStmt3(Statement):
name = 'sub_class_stmt3'
_ALIASES = ['scs3']
p = Pypher()
y = 12
p.scs3(y).SubClassedStmt3(y)
x = str(p)
params = p.bound_params
expected = 'sub_class_stmt3 {} sub_class_stmt3 {}'.format(y, y)
self.assertEqual(x, expected)
self.assertEqual(0, len(params))
def test_can_call_bundled_custom_functions(self):
p = Pypher()
x = 10
y = 11
p.extract(y).size(x)
u = str(p)
params = p.bound_params
expected = 'extract(${}) size(${})'.format(get_dict_key(params, y), get_dict_key(params, x))
self.assertEqual(u, expected)
self.assertEqual(2, len(params))
def test_can_call_bundled_custom_statements(self):
p = Pypher()
x = 10
y = 11
p.remove(y).drop(x)
u = str(p)
params = p.bound_params
expected = 'REMOVE {} DROP {}'.format(y, x)
self.assertEqual(u, expected)
self.assertEqual(0, len(params)) |
def map_func(context):
java_file = JavaFile(context.from_java(), context.to_java())
try:
res = java_file.read(4)
(data_len,) = struct.unpack('<i', res)
print('res', type(data_len), data_len)
data = java_file.read(data_len)
print('data', str(data))
except Exception as e:
msg = traceback.format_exc()
print(msg) |
class Formatter(object):
delimiter = '\n'
def __init__(self, max_width=None):
self.max_width = (get_terminal_width() if (max_width is None) else max_width)
self.wrapper = textwrap.TextWrapper()
self.lines = []
self._indent = 0
def append(self, line, indent=0):
if (not line):
self.new_paragraph()
elif line.startswith(' '):
self.append_raw(line, indent)
else:
self.wrapper.width = self.get_width(indent)
for wline in self.wrapper.wrap(line):
self.append_raw(wline, indent=indent)
def append_raw(self, line, indent=0):
self.lines.append(((self._indent + indent), line))
def get_width(self, indent=0):
return ((self.max_width - self._indent) - indent)
def new_paragraph(self):
if (self.lines and self.lines[(- 1)][1]):
self.lines.append((0, ''))
def extend(self, iterable):
if (not isinstance(iterable, Formatter)):
for line in iterable:
self.append(line)
else:
for (indent, line) in iterable:
self.append_raw(line, indent)
def indent(self, indent=2):
return _FormatterIndent(self, indent)
def columns(self, num=2, spacing=' ', align=None, wrap=None, min_widths=None, max_widths=None, indent=None):
return _FormatterColumns(self, num, spacing, align, wrap, min_widths, max_widths, (self._indent if (indent is None) else indent))
def __str__(self):
if (self.lines and (not self.lines[(- 1)][1])):
lines = self.lines[:(- 1)]
else:
lines = self.lines
return self.delimiter.join((((' ' * indent) + line) for (indent, line_) in lines for line in self.convert_line(line_)))
def convert_line(self, line):
try:
lines_getter = line.formatter_lines
except AttributeError:
(yield str(line))
else:
for line in lines_getter():
(yield str(line))
def __iter__(self):
return iter(self.lines) |
.django_db
def test_federal_account_award_empty(client, monkeypatch, helpers, generic_account_data):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_spending_endpoint(client, url, def_codes=['A'], spending_type='award')
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.json()['results']) == 0) |
class JSONWriter(GenericWriter):
def __init__(self, fo: AvroJSONEncoder, schema: Schema, codec: str='null', sync_interval: int=(1000 * SYNC_SIZE), metadata: Optional[Dict[(str, str)]]=None, validator: bool=False, sync_marker: bytes=b'', codec_compression_level: Optional[int]=None, options: Dict[(str, bool)]={}):
super().__init__(schema, metadata, validator, options)
self.encoder = fo
self.encoder.configure(self.schema, self._named_schemas)
def write(self, record):
if self.validate_fn:
self.validate_fn(record, self.schema, self._named_schemas, '', True, self.options)
write_data(self.encoder, record, self.schema, self._named_schemas, '', self.options)
def flush(self):
self.encoder.flush() |
class dual_gemm_rcr_fast_gelu(gemm_rcr):
def __init__(self):
super().__init__()
self._attrs['op'] = 'dual_gemm_rcr_fast_gelu'
self._attrs['epilogue2'] = 'LeftFastGeluAndMul'
def _infer_shapes(self, a: Tensor, b: Tensor, bias: Tensor):
return super()._infer_shapes(a, b)
def __call__(self, a: Tensor, b: Tensor, bias: Tensor) -> Tensor:
(a, b) = self._align_ab(a, b)
self._attrs['inputs'] = [a, b, bias]
self._attrs['input_accessors'] = [TensorAccessor(tensor) for tensor in self._attrs['inputs']]
self._set_depth()
self._sanity_check(a, b)
output_shape = self._infer_shapes(a, b, bias)
self._extract_epilogue_alignment(output_shape)
output = Tensor(output_shape, src_ops={self}, dtype=self._attrs['inputs'][0]._attrs['dtype'])
self._attrs['outputs'] = [output]
self._attrs['output_accessors'] = [TensorAccessor(output)]
if ((b._attrs['shape'][(- 2)] != 1) and (bias._attrs['shape'][(- 2)] == 1)):
self._attrs['broadcast_b1'] = True
return output |
def test_deploy(BrownieTester, otherproject, accounts):
t = BrownieTester.deploy(True, {'from': accounts[0]})
assert (len(BrownieTester) == 1)
assert (len(otherproject.BrownieTester) == 0)
assert (t not in otherproject.BrownieTester)
t2 = otherproject.BrownieTester.deploy(True, {'from': accounts[0]})
assert (len(BrownieTester) == 1)
assert (len(otherproject.BrownieTester) == 1)
assert (t2 not in BrownieTester) |
_ns.route('/new/', methods=['GET', 'POST'])
_required
def api_new_token():
user = flask.g.user
copr64 = (base64.b64encode(b'copr') + b'##')
api_login = helpers.generate_api_token((flask.current_app.config['API_TOKEN_LENGTH'] - len(copr64)))
user.api_login = api_login
user.api_token = helpers.generate_api_token(flask.current_app.config['API_TOKEN_LENGTH'])
user.api_token_expiration = (datetime.date.today() + datetime.timedelta(days=flask.current_app.config['API_TOKEN_EXPIRATION']))
db.session.add(user)
db.session.commit()
return flask.redirect(flask.url_for('api_ns.api_home')) |
def picker_callback(picker):
if (picker.actor in red_glyphs.actor.actors):
point_id = (picker.point_id // glyph_points.shape[0])
if (point_id != (- 1)):
(x, y, z) = (x1[point_id], y1[point_id], z1[point_id])
outline.bounds = ((x - 0.1), (x + 0.1), (y - 0.1), (y + 0.1), (z - 0.1), (z + 0.1)) |
def test_options_database_cleared():
opts = PETSc.Options()
expect = len(opts.getAll())
mesh = UnitIntervalMesh(1)
V = FunctionSpace(mesh, 'DG', 0)
u = TrialFunction(V)
v = TestFunction(V)
A = assemble((inner(u, v) * dx))
b = assemble((conj(v) * dx))
u = Function(V)
solvers = []
for i in range(100):
solver = LinearSolver(A, solver_parameters={'ksp_type': 'preonly', 'pc_type': 'lu'})
solver.solve(u, b)
solvers.append(solver)
assert (expect == len(opts.getAll())) |
class PrivateComputationPIDOnlyTestStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PID_SHARD PID_PREPARE ID_MATCH ID_MATCH_POST_PROCESS'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PID_SHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_SHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_SHARD_STARTED, completed_status=PrivateComputationInstanceStatus.PID_SHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_SHARD_FAILED, is_joint_stage=False)
PID_PREPARE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_PREPARE_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_PREPARE_STARTED, completed_status=PrivateComputationInstanceStatus.PID_PREPARE_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_PREPARE_FAILED, is_joint_stage=False)
ID_MATCH = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_FAILED, is_joint_stage=True, is_retryable=True, timeout=DEFAULT_RUN_PID_TIMEOUT_IN_SEC)
ID_MATCH_POST_PROCESS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_FAILED, is_joint_stage=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
logging.info('Start PID Only Test stage flow')
return self.get_default_stage_service(args) |
class OptionSeriesTreemapSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class PrivateComputationMRStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PC_PRE_VALIDATION UNION_PID_MR_MULTIKEY ID_SPINE_COMBINER RESHARD PCF2_ATTRIBUTION PCF2_AGGREGATION AGGREGATE POST_PROCESSING_HANDLERS'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PC_PRE_VALIDATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_STARTED, completed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_FAILED, is_joint_stage=False)
UNION_PID_MR_MULTIKEY = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_MR_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_MR_STARTED, completed_status=PrivateComputationInstanceStatus.PID_MR_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_MR_FAILED, is_joint_stage=False)
ID_SPINE_COMBINER = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_STARTED, completed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_FAILED, is_joint_stage=False)
RESHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.RESHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.RESHARD_STARTED, completed_status=PrivateComputationInstanceStatus.RESHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.RESHARD_FAILED, is_joint_stage=False)
PCF2_ATTRIBUTION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_STARTED, completed_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_FAILED, is_joint_stage=True)
PCF2_AGGREGATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PCF2_AGGREGATION_FAILED, is_joint_stage=True)
AGGREGATE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.AGGREGATION_FAILED, is_joint_stage=True, timeout=DEFAULT_AGGREGATE_TIMEOUT_IN_SEC)
POST_PROCESSING_HANDLERS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_INITIALIZED, started_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_STARTED, completed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_COMPLETED, failed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_FAILED, is_joint_stage=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
logging.info('Start MR stage flow')
if (self is self.UNION_PID_MR_MULTIKEY):
if (args.workflow_svc is None):
raise NotImplementedError('workflow_svc is None')
return PIDMRStageService(args.workflow_svc)
elif (self is self.ID_SPINE_COMBINER):
return IdSpineCombinerStageService(args.storage_svc, args.onedocker_svc, args.onedocker_binary_config_map, protocol_type=Protocol.MR_PID_PROTOCOL.value)
elif (self is self.PCF2_ATTRIBUTION):
return PCF2AttributionStageService(args.onedocker_binary_config_map, args.mpc_svc)
elif (self is self.PCF2_AGGREGATION):
return PCF2AggregationStageService(args.onedocker_binary_config_map, args.mpc_svc)
else:
return self.get_default_stage_service(args) |
_cli.command('up', short_help='Upgrades the database to the selected migration.')
('--revision', '-r', default='head', help='The migration to upgrade to.')
('--dry-run', default=False, is_flag=True, help='Only print SQL instructions, without actually applying the migration.')
_script_info
def migrations_up(info, revision, dry_run):
from .orm.migrations.commands import up
app = info.load_app()
dbs = info.load_db()
up(app, dbs, revision, dry_run) |
.integration
.ledger
(scope='session')
_for_platform('Linux', skip=False)
def ganache(ganache_configuration, ganache_addr, ganache_port, timeout: float=2.0, max_attempts: int=10):
client = docker.from_env()
image = GanacheDockerImage(client, ' 8545, config=ganache_configuration)
(yield from _launch_image(image, timeout=timeout, max_attempts=max_attempts)) |
def test_add_details_to_slack_alert():
block = SlackAlertMessageBuilder.create_divider_block()
message_builder = SlackAlertMessageBuilder()
message_builder.add_details_to_slack_alert()
assert (json.dumps(message_builder.slack_message, sort_keys=True) == json.dumps({'blocks': [], 'attachments': [{'blocks': []}]}, sort_keys=True))
message_builder = SlackAlertMessageBuilder()
message_builder.add_details_to_slack_alert(result=[block, block])
assert (json.dumps(message_builder.slack_message, sort_keys=True) == json.dumps({'blocks': [], 'attachments': [{'blocks': [{'type': 'section', 'text': {'type': 'mrkdwn', 'text': ':mag: *Result*'}}, {'type': 'divider'}, block, block]}]}, sort_keys=True))
message_builder = SlackAlertMessageBuilder()
message_builder.add_details_to_slack_alert(configuration=[block, block])
assert (json.dumps(message_builder.slack_message, sort_keys=True) == json.dumps({'blocks': [], 'attachments': [{'blocks': [{'type': 'section', 'text': {'type': 'mrkdwn', 'text': ':hammer_and_wrench: *Configuration*'}}, {'type': 'divider'}, block, block]}]}, sort_keys=True))
message_builder = SlackAlertMessageBuilder()
message_builder.add_details_to_slack_alert(configuration=[block, block], result=[block, block])
assert (json.dumps(message_builder.slack_message, sort_keys=True) == json.dumps({'blocks': [], 'attachments': [{'blocks': [{'type': 'section', 'text': {'type': 'mrkdwn', 'text': ':mag: *Result*'}}, {'type': 'divider'}, block, block, {'type': 'section', 'text': {'type': 'mrkdwn', 'text': ':hammer_and_wrench: *Configuration*'}}, {'type': 'divider'}, block, block]}]}, sort_keys=True)) |
class LoggerAcquireProgress(apt.progress.text.AcquireProgress):
def __init__(self, logger):
class FileLike():
def write(self, text):
text = text.strip()
if text:
logger.debug(text)
def flush(self):
pass
super().__init__(FileLike())
def pulse(self, owner):
return apt.progress.base.AcquireProgress.pulse(self, owner) |
class open_raw_session_result():
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
def isUnion():
return False
def read(self, iprot):
if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if (ftype == TType.STOP):
break
if (fid == 0):
if (ftype == TType.STRUCT):
self.success = Session()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif (fid == 1):
if (ftype == TType.STRUCT):
self.se = SessionException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('open_raw_session_result')
if (self.success != None):
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if (self.se != None):
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = (' ' * 4)
if (self.success is not None):
value = pprint.pformat(self.success, indent=0)
value = padding.join(value.splitlines(True))
L.append((' success=%s' % value))
if (self.se is not None):
value = pprint.pformat(self.se, indent=0)
value = padding.join(value.splitlines(True))
L.append((' se=%s' % value))
return ('%s(%s)' % (self.__class__.__name__, (('\n' + ',\n'.join(L)) if L else '')))
def __eq__(self, other):
if (not isinstance(other, self.__class__)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other))
if (not six.PY2):
__hash__ = object.__hash__ |
class ProductFeed(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isProductFeed = True
super(ProductFeed, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
country = 'country'
created_time = 'created_time'
default_currency = 'default_currency'
deletion_enabled = 'deletion_enabled'
delimiter = 'delimiter'
encoding = 'encoding'
file_name = 'file_name'
id = 'id'
ingestion_source_type = 'ingestion_source_type'
item_sub_type = 'item_sub_type'
latest_upload = 'latest_upload'
migrated_from_feed_id = 'migrated_from_feed_id'
name = 'name'
override_type = 'override_type'
primary_feeds = 'primary_feeds'
product_count = 'product_count'
quoted_fields_mode = 'quoted_fields_mode'
schedule = 'schedule'
supplementary_feeds = 'supplementary_feeds'
update_schedule = 'update_schedule'
feed_type = 'feed_type'
override_value = 'override_value'
primary_feed_ids = 'primary_feed_ids'
rules = 'rules'
selected_override_fields = 'selected_override_fields'
class Delimiter():
autodetect = 'AUTODETECT'
bar = 'BAR'
comma = 'COMMA'
semicolon = 'SEMICOLON'
tab = 'TAB'
tilde = 'TILDE'
class IngestionSourceType():
primary_feed = 'primary_feed'
supplementary_feed = 'supplementary_feed'
class QuotedFieldsMode():
autodetect = 'AUTODETECT'
off = 'OFF'
on = 'ON'
class Encoding():
autodetect = 'AUTODETECT'
latin1 = 'LATIN1'
utf16be = 'UTF16BE'
utf16le = 'UTF16LE'
utf32be = 'UTF32BE'
utf32le = 'UTF32LE'
utf8 = 'UTF8'
class FeedType():
automotive_model = 'AUTOMOTIVE_MODEL'
destination = 'DESTINATION'
flight = 'FLIGHT'
home_listing = 'HOME_LISTING'
hotel = 'HOTEL'
hotel_room = 'HOTEL_ROOM'
local_inventory = 'LOCAL_INVENTORY'
media_title = 'MEDIA_TITLE'
offer = 'OFFER'
products = 'PRODUCTS'
transactable_items = 'TRANSACTABLE_ITEMS'
vehicles = 'VEHICLES'
vehicle_offer = 'VEHICLE_OFFER'
class ItemSubType():
appliances = 'APPLIANCES'
baby_feeding = 'BABY_FEEDING'
baby_transport = 'BABY_TRANSPORT'
beauty = 'BEAUTY'
bedding = 'BEDDING'
cameras = 'CAMERAS'
cell_phones_and_smart_watches = 'CELL_PHONES_AND_SMART_WATCHES'
cleaning_supplies = 'CLEANING_SUPPLIES'
clothing = 'CLOTHING'
clothing_accessories = 'CLOTHING_ACCESSORIES'
computers_and_tablets = 'COMPUTERS_AND_TABLETS'
diapering_and_potty_training = 'DIAPERING_AND_POTTY_TRAINING'
electronics_accessories = 'ELECTRONICS_ACCESSORIES'
furniture = 'FURNITURE'
health = 'HEALTH'
home_goods = 'HOME_GOODS'
jewelry = 'JEWELRY'
nursery = 'NURSERY'
printers_and_scanners = 'PRINTERS_AND_SCANNERS'
projectors = 'PROJECTORS'
shoes_and_footwear = 'SHOES_AND_FOOTWEAR'
software = 'SOFTWARE'
toys = 'TOYS'
tvs_and_monitors = 'TVS_AND_MONITORS'
video_game_consoles_and_video_games = 'VIDEO_GAME_CONSOLES_AND_VIDEO_GAMES'
watches = 'WATCHES'
class OverrideType():
batch_api_language_or_country = 'BATCH_API_LANGUAGE_OR_COUNTRY'
catalog_segment_customize_default = 'CATALOG_SEGMENT_CUSTOMIZE_DEFAULT'
country = 'COUNTRY'
language = 'LANGUAGE'
language_and_country = 'LANGUAGE_AND_COUNTRY'
local = 'LOCAL'
smart_pixel_language_or_country = 'SMART_PIXEL_LANGUAGE_OR_COUNTRY'
def get_endpoint(cls):
return 'product_feeds'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.productcatalog import ProductCatalog
return ProductCatalog(api=self._api, fbid=parent_id).create_product_feed(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeed, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'default_currency': 'string', 'deletion_enabled': 'bool', 'delimiter': 'delimiter_enum', 'encoding': 'encoding_enum', 'migrated_from_feed_id': 'string', 'name': 'string', 'quoted_fields_mode': 'quoted_fields_mode_enum', 'schedule': 'string', 'update_schedule': 'string'}
enums = {'delimiter_enum': ProductFeed.Delimiter.__dict__.values(), 'encoding_enum': ProductFeed.Encoding.__dict__.values(), 'quoted_fields_mode_enum': ProductFeed.QuotedFieldsMode.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeed, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_automotive_models(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.automotivemodel import AutomotiveModel
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/automotive_models', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AutomotiveModel, api_type='EDGE', response_parser=ObjectParser(target_class=AutomotiveModel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_destinations(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.destination import Destination
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/destinations', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Destination, api_type='EDGE', response_parser=ObjectParser(target_class=Destination, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_flights(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.flight import Flight
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/flights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Flight, api_type='EDGE', response_parser=ObjectParser(target_class=Flight, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_home_listings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.homelisting import HomeListing
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/home_listings', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=HomeListing, api_type='EDGE', response_parser=ObjectParser(target_class=HomeListing, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_hotels(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.hotel import Hotel
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/hotels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Hotel, api_type='EDGE', response_parser=ObjectParser(target_class=Hotel, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_media_titles(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.mediatitle import MediaTitle
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/media_titles', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=MediaTitle, api_type='EDGE', response_parser=ObjectParser(target_class=MediaTitle, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_products(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productitem import ProductItem
param_types = {'bulk_pagination': 'bool', 'error_priority': 'error_priority_enum', 'error_type': 'error_type_enum', 'filter': 'Object'}
enums = {'error_priority_enum': ProductItem.ErrorPriority.__dict__.values(), 'error_type_enum': ProductItem.ErrorType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/products', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductItem, api_type='EDGE', response_parser=ObjectParser(target_class=ProductItem, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_rules(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productfeedrule import ProductFeedRule
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/rules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedRule, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeedRule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_rule(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productfeedrule import ProductFeedRule
param_types = {'attribute': 'string', 'params': 'map', 'rule_type': 'rule_type_enum'}
enums = {'rule_type_enum': ProductFeedRule.RuleType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/rules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedRule, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeedRule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_supplementary_feed_assoc(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'assoc_data': 'list<map>'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/supplementary_feed_assocs', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_upload_schedules(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productfeedschedule import ProductFeedSchedule
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/upload_schedules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedSchedule, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeedSchedule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_upload_schedule(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'upload_schedule': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/upload_schedules', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeed, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeed, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_uploads(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productfeedupload import ProductFeedUpload
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/uploads', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedUpload, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeedUpload, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_upload(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productfeedupload import ProductFeedUpload
param_types = {'fbe_external_business_id': 'string', 'file': 'file', 'password': 'string', 'update_only': 'bool', 'url': 'string', 'username': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/uploads', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=ProductFeedUpload, api_type='EDGE', response_parser=ObjectParser(target_class=ProductFeedUpload, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_vehicle_offers(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.vehicleoffer import VehicleOffer
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/vehicle_offers', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VehicleOffer, api_type='EDGE', response_parser=ObjectParser(target_class=VehicleOffer, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_vehicles(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.vehicle import Vehicle
param_types = {'bulk_pagination': 'bool', 'filter': 'Object'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/vehicles', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Vehicle, api_type='EDGE', response_parser=ObjectParser(target_class=Vehicle, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'country': 'string', 'created_time': 'datetime', 'default_currency': 'string', 'deletion_enabled': 'bool', 'delimiter': 'Delimiter', 'encoding': 'string', 'file_name': 'string', 'id': 'string', 'ingestion_source_type': 'IngestionSourceType', 'item_sub_type': 'string', 'latest_upload': 'ProductFeedUpload', 'migrated_from_feed_id': 'string', 'name': 'string', 'override_type': 'string', 'primary_feeds': 'list<string>', 'product_count': 'int', 'quoted_fields_mode': 'QuotedFieldsMode', 'schedule': 'ProductFeedSchedule', 'supplementary_feeds': 'list<string>', 'update_schedule': 'ProductFeedSchedule', 'feed_type': 'FeedType', 'override_value': 'string', 'primary_feed_ids': 'list<string>', 'rules': 'list<string>', 'selected_override_fields': 'list<string>'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Delimiter'] = ProductFeed.Delimiter.__dict__.values()
field_enum_info['IngestionSourceType'] = ProductFeed.IngestionSourceType.__dict__.values()
field_enum_info['QuotedFieldsMode'] = ProductFeed.QuotedFieldsMode.__dict__.values()
field_enum_info['Encoding'] = ProductFeed.Encoding.__dict__.values()
field_enum_info['FeedType'] = ProductFeed.FeedType.__dict__.values()
field_enum_info['ItemSubType'] = ProductFeed.ItemSubType.__dict__.values()
field_enum_info['OverrideType'] = ProductFeed.OverrideType.__dict__.values()
return field_enum_info |
def using(path, rem_on_start=True, rem_on_end=False):
if (rem_on_start and os.path.exists(path)):
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.isfile(path):
os.remove(path)
assert (not os.path.exists(path)), 'failed to remove: {_coconut_format_0}'.format(_coconut_format_0=path)
try:
(yield)
finally:
if rem_on_end:
try:
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.isfile(path):
os.remove(path)
except OSError:
traceback.print_exc() |
class OptionPlotoptionsHeatmapSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsWordcloudSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
_os(*metadata.platforms)
def main():
common.log('WinRAR StartUp Folder Persistence')
win_rar_path = Path('WinRAR.exe').resolve()
ace_loader_path = Path('Ace32Loader.exe').resolve()
batch_file_path = '\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup\\mssconf.bat'
startup_path = (os.environ['USERPROFILE'] + batch_file_path)
common.copy_file('C:\\Windows\\System32\\cmd.exe', win_rar_path)
common.copy_file('C:\\Windows\\System32\\cmd.exe', ace_loader_path)
common.execute([win_rar_path, '/c', ace_loader_path, '/c', 'echo', 'test', '^>', startup_path], kill=True)
common.remove_file(startup_path)
common.remove_file(ace_loader_path)
common.remove_file(win_rar_path) |
class TestUtils():
def test_is_valid_attr_name(self):
assert is_valid_attr_name('_piZZa')
assert is_valid_attr_name('nice_pizza_with_2_mushrooms')
assert is_valid_attr_name('_2_pizze')
assert is_valid_attr_name('_')
assert is_valid_attr_name('___')
assert (not is_valid_attr_name('4'))
assert (not is_valid_attr_name(4))
assert (not is_valid_attr_name(None))
assert (not is_valid_attr_name('4pizze'))
assert (not is_valid_attr_name('pizza+')) |
class HistoryView(View):
_super_method
def post(self, request, **kwargs):
res = {'msg': '!', 'code': 412, 'self': None}
if (not request.user.is_superuser):
res['msg'] = ''
return JsonResponse(res)
data = request.data
form = HistoryForm(data)
if (not form.is_valid()):
(res['self'], res['msg']) = clean_form(form)
return JsonResponse(res)
res['code'] = 0
nid = kwargs.get('nid')
if nid:
history_query = History.objects.filter(nid=nid)
history_query.update(**form.cleaned_data)
res['msg'] = ''
return JsonResponse(res)
History.objects.create(**form.cleaned_data)
return JsonResponse(res)
_super_method
def delete(self, request, nid):
res = {'msg': '!', 'code': 412}
if (not request.user.is_superuser):
res['msg'] = ''
return JsonResponse(res)
history_query = History.objects.filter(nid=nid)
history_query.delete()
res['code'] = 0
return JsonResponse(res) |
def _get_normal_forms(form):
terms = [(~ z3_x[1].copy()), (((~ z3_x[1].copy()) | z3_x[2].copy()) & (z3_x[3].copy() | (~ z3_x[1].copy()))), ((((~ z3_x[1].copy()) | z3_x[2].copy()) & (z3_x[3].copy() | (~ z3_x[1].copy()))) & (z3_x[4].copy() | (z3_x[2].copy() & z3_x[3].copy()))), ((z3_x[2].copy() & (~ z3_x[1].copy())) | (z3_x[3].copy() & (~ z3_x[1].copy()))), (((z3_x[1].copy() | (z3_x[2].copy() & (~ z3_x[1].copy()))) | (z3_x[3].copy() & (~ (z3_x[1].copy() | z3_x[2].copy())))) | ((z3_x[5].copy() & z3_x[4].copy()) & (~ z3_x[1].copy()))), (((z3_x[2].copy() | z3_x[4].copy()) & (~ z3_x[1].copy())) | ((z3_x[3].copy() | z3_x[4].copy()) & (z3_x[5].copy() | (~ z3_x[1].copy()))))]
if (form == 'cnf'):
result = [(~ z3_x[1].copy()), ((z3_x[2].copy() | (~ z3_x[1].copy())) & (z3_x[3].copy() | (~ z3_x[1].copy()))), ((((z3_x[2].copy() | (~ z3_x[1].copy())) & (z3_x[3].copy() | (~ z3_x[1].copy()))) & (z3_x[2].copy() | z3_x[4].copy())) & (z3_x[3].copy() | z3_x[4].copy())), ((z3_x[2].copy() | z3_x[3].copy()) & (~ z3_x[1].copy())), ((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) | z3_x[5].copy()) & (((z3_x[1].copy() | z3_x[4].copy()) | z3_x[2].copy()) | z3_x[3].copy())), ((((z3_x[2].copy() | z3_x[3].copy()) | z3_x[4].copy()) & (((~ z3_x[1].copy()) | z3_x[3].copy()) | z3_x[4].copy())) & ((~ z3_x[1].copy()) | z3_x[5].copy()))]
elif (form == 'dnf'):
result = [(~ z3_x[1].copy()), ((~ z3_x[1].copy()) | (z3_x[3].copy() & z3_x[2].copy())), ((z3_x[3].copy() & z3_x[2].copy()) | (z3_x[4].copy() & (~ z3_x[1].copy()))), ((z3_x[2].copy() & (~ z3_x[1].copy())) | (z3_x[3].copy() & (~ z3_x[1].copy()))), (((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) | (z3_x[5].copy() & z3_x[4].copy()))]
else:
raise ValueError(f'wrong input')
return [(term, normal_form) for (term, normal_form) in zip(terms, result)] |
def lazy_import():
from fastly.model.aws_region import AwsRegion
from fastly.model.logging_format_version_string import LoggingFormatVersionString
from fastly.model.logging_kinesis_additional import LoggingKinesisAdditional
from fastly.model.logging_placement import LoggingPlacement
from fastly.model.service_id_and_version_string import ServiceIdAndVersionString
from fastly.model.timestamps import Timestamps
globals()['AwsRegion'] = AwsRegion
globals()['LoggingFormatVersionString'] = LoggingFormatVersionString
globals()['LoggingKinesisAdditional'] = LoggingKinesisAdditional
globals()['LoggingPlacement'] = LoggingPlacement
globals()['ServiceIdAndVersionString'] = ServiceIdAndVersionString
globals()['Timestamps'] = Timestamps |
def get_version():
current_dir = os.path.abspath(os.path.dirname(__file__))
version_file = os.path.join(current_dir, 'video_transformers', '__init__.py')
with io.open(version_file, encoding='utf-8') as f:
return re.search('^__version__ = [\\\'"]([^\\\'"]*)[\\\'"]', f.read(), re.M).group(1) |
class ExerciseInfo():
__init__ = _custom_dataclass_init
path: Path
slug: str
name: str
uuid: str
prerequisites: List[str]
type: str = 'practice'
status: ExerciseStatus = ExerciseStatus.Active
concepts: List[str] = None
difficulty: int = 1
topics: List[str] = None
practices: List[str] = None
def __post_init__(self):
if (self.concepts is None):
self.concepts = []
if (self.topics is None):
self.topics = []
if (self.practices is None):
self.practices = []
if isinstance(self.status, str):
self.status = ExerciseStatus(self.status)
def solution_stub(self):
return next((p for p in self.path.glob('*.py') if ((not p.name.endswith('_test.py')) and (p.name != 'example.py'))), None)
def helper_file(self):
return next(self.path.glob('*_data.py'), None)
def test_file(self):
return next(self.path.glob('*_test.py'), None)
def meta_dir(self):
return (self.path / '.meta')
def exemplar_file(self):
if (self.type == 'concept'):
return (self.meta_dir / 'exemplar.py')
return (self.meta_dir / 'example.py')
def template_path(self):
return (self.meta_dir / 'template.j2')
def config_file(self):
return (self.meta_dir / 'config.json')
def load_config(self) -> ExerciseConfig:
return ExerciseConfig.load(self.config_file) |
def extract_forkid(enr: ENRAPI) -> ForkID:
try:
eth_cap = enr[b'eth']
except KeyError:
raise ENRMissingForkID()
try:
[forkid] = rlp.sedes.List([ForkID]).deserialize(eth_cap)
return forkid
except rlp.exceptions.ListDeserializationError:
raise MalformedMessage('Unable to extract ForkID from {eth_cap}') |
class GroupsScannerTest(ForsetiTestCase):
def setUp(self):
pass
def _render_ascii(self, starting_node, attr):
rows = []
for (pre, fill, node) in anytree.RenderTree(starting_node, style=anytree.AsciiStyle()):
value = getattr(node, attr, '')
if isinstance(value, (list, tuple)):
lines = value
else:
lines = str(value).split('\n')
rows.append((u'%s%s' % (pre, json.dumps(lines[0], sort_keys=True))))
for line in lines[1:]:
rows.append((u'%s%s' % (fill, json.dumps(line, sort_keys=True))))
return '\n'.join(rows)
def _create_mock_service_config(self):
mock_data_access = mock.MagicMock()
mock_data_access.iter_groups.return_value = fake_data.ALL_GROUPS
mock_data_access.return_value = fake_data.ALL_GROUPS
mock_data_access.expand_members.side_effect = [fake_data.AAAAA_GROUP_MEMBERS, fake_data.BBBBB_GROUP_MEMBERS, fake_data.CCCCC_GROUP_MEMBERS, fake_data.DDDDD_GROUP_MEMBERS]
mock_service_config = mock.MagicMock()
mock_service_config.model_manager.get.return_value = (mock.MagicMock(), mock_data_access)
return mock_service_config
def test_groups_scanner(self):
mock_service_config = self._create_mock_service_config()
scanner = groups_scanner.GroupsScanner({}, {}, mock_service_config, '', '', '')
root = scanner._build_group_tree()
self.assertEqual(fake_data.EXPECTED_MEMBERS_IN_TREE, self._render_ascii(root, 'member_email'))
with open('tests/scanner/test_data/fake_group_rules.yaml', 'r') as f:
rules = yaml.load(f)
root_with_rules = scanner._apply_all_rules(root, rules)
self.assertEqual(fake_data.EXPECTED_MEMBERS_IN_TREE, self._render_ascii(root_with_rules, 'member_email'))
self.assertEqual(fake_data.EXPECTED_RULES_IN_TREE, self._render_ascii(root_with_rules, 'rules'))
all_violations = scanner._find_violations(root_with_rules)
self.assertEqual(3, len(all_violations))
for violation in all_violations:
self.assertEqual('', violation.member_email) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestInstanceEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def test_simple_editor(self):
obj = ObjectWithInstance()
tester = UITester()
with tester.create_ui(obj, {'view': simple_view}) as ui:
instance = tester.find_by_name(ui, 'inst')
instance.perform(MouseClick())
value_txt = instance.find_by_name('value')
value_txt.perform(KeySequence('abc'))
self.assertEqual(obj.inst.value, 'abc')
def test_custom_editor(self):
obj = ObjectWithInstance()
tester = UITester()
with tester.create_ui(obj, {'view': custom_view}) as ui:
value_txt = tester.find_by_name(ui, 'inst').find_by_name('value')
value_txt.perform(KeySequence('abc'))
self.assertEqual(obj.inst.value, 'abc')
def test_custom_editor_with_selection(self):
obj = ObjectWithList()
tester = UITester()
with tester.create_ui(obj, {'view': selection_view}) as ui:
self.assertIsNone(obj.inst)
instance = tester.find_by_name(ui, 'inst')
text = instance.inspect(SelectedText())
self.assertEqual(text, '')
instance.locate(Index(1)).perform(MouseClick())
self.assertIs(obj.inst, obj.inst_list[1])
text = instance.inspect(SelectedText())
self.assertEqual(text, obj.inst_list[1].name)
value_txt = instance.find_by_name('value')
value_txt.perform(KeySequence('abc'))
self.assertEqual(obj.inst.value, 'twoabc')
def test_custom_editor_with_selection_initialized(self):
obj = ObjectWithList()
obj.inst = obj.inst_list[1]
tester = UITester()
with tester.create_ui(obj, {'view': selection_view}) as ui:
self.assertIs(obj.inst, obj.inst_list[1])
instance = tester.find_by_name(ui, 'inst')
text = instance.inspect(SelectedText())
self.assertEqual(text, obj.inst.name)
def test_custom_editor_with_selection_change_option_name(self):
obj = ObjectWithList()
tester = UITester()
with tester.create_ui(obj, {'view': selection_view}) as ui:
self.assertIsNone(obj.inst)
instance = tester.find_by_name(ui, 'inst')
instance.locate(Index(0)).perform(MouseClick())
self.assertIs(obj.inst, obj.inst_list[0])
name_txt = instance.find_by_name('name')
for _ in range(3):
name_txt.perform(KeyClick('Backspace'))
name_txt.perform(KeySequence('Something New'))
text = instance.inspect(SelectedText())
self.assertEqual(text, 'Something New')
self.assertEqual('Something New', obj.inst_list[0].name)
def test_custom_editor_resynch_editor(self):
edited_inst = EditedInstance(value='hello')
obj = ObjectWithInstance(inst=edited_inst)
tester = UITester()
with tester.create_ui(obj, {'view': custom_view}) as ui:
value_txt = tester.find_by_name(ui, 'inst').find_by_name('value')
displayed = value_txt.inspect(DisplayedText())
self.assertEqual(displayed, 'hello')
edited_inst.value = 'bye'
displayed = value_txt.inspect(DisplayedText())
self.assertEqual(displayed, 'bye')
def test_simple_editor_resynch_editor(self):
edited_inst = EditedInstance(value='hello')
obj = ObjectWithInstance(inst=edited_inst)
tester = UITester()
with tester.create_ui(obj, {'view': simple_view}) as ui:
instance = tester.find_by_name(ui, 'inst')
instance.perform(MouseClick())
value_txt = instance.find_by_name('value')
displayed = value_txt.inspect(DisplayedText())
self.assertEqual(displayed, 'hello')
edited_inst.value = 'bye'
displayed = value_txt.inspect(DisplayedText())
self.assertEqual(displayed, 'bye')
def test_simple_editor_parent_closed(self):
obj = ObjectWithInstance()
tester = UITester()
with tester.create_ui(obj, {'view': simple_view}) as ui:
instance = tester.find_by_name(ui, 'inst')
instance.perform(MouseClick())
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_simple_editor_modal(self):
obj = ObjectWithInstance()
ui_tester = UITester()
with ui_tester.create_ui(obj, dict(view=modal_view)) as ui:
def click_button():
ui_tester.find_by_name(ui, 'inst').perform(MouseClick())
def when_opened(tester):
with tester.capture_error():
try:
dialog_ui = tester.get_dialog_widget()._ui
ui_tester = UITester(auto_process_events=False)
value = ui_tester.find_by_name(dialog_ui, 'value')
value.perform(KeySequence('Hello'))
self.assertEqual(obj.inst.value, '')
ok_button = ui_tester.find_by_id(dialog_ui, 'OK')
ok_button.perform(MouseClick())
finally:
if (tester.get_dialog_widget() is not None):
tester.close(accept=True)
mdtester = ModalDialogTester(click_button)
mdtester.open_and_run(when_opened=when_opened)
self.assertTrue(mdtester.dialog_was_opened)
self.assertEqual(obj.inst.value, 'Hello')
def test_propagate_errors(self):
obj = ObjectWithValidatedInstance()
ui_tester = UITester()
with ui_tester.create_ui(obj) as ui:
something_ui = ui_tester.find_by_name(ui, 'something')
some_string_field = something_ui.locate(TargetByName('some_string'))
some_string_field.perform(KeySequence('abcd'))
some_string_field.perform(KeyClick('Enter'))
ok_button = ui_tester.find_by_id(ui, 'OK')
instance_editor_ui = something_ui._target._ui
instance_editor_ui_parent = something_ui._target._ui.parent
self.assertNotEqual(instance_editor_ui, ui)
self.assertEqual(instance_editor_ui_parent, ui)
self.assertEqual(instance_editor_ui.errors, ui.errors)
self.assertFalse(ok_button.inspect(IsEnabled()))
def test_propagate_errors_switch_selection(self):
obj = ObjectWithValidatedList()
ui_tester = UITester()
with ui_tester.create_ui(obj, {'view': selection_view}) as ui:
something_ui = ui_tester.find_by_name(ui, 'inst')
something_ui.locate(Index(0)).perform(MouseClick())
some_string_field = something_ui.locate(TargetByName('some_string'))
some_string_field.perform(KeySequence('bcde'))
some_string_field.perform(KeyClick('Enter'))
ok_button = ui_tester.find_by_id(ui, 'OK')
instance_editor_ui = something_ui._target._ui
instance_editor_ui_parent = something_ui._target._ui.parent
self.assertNotEqual(instance_editor_ui, ui)
self.assertEqual(instance_editor_ui_parent, ui)
self.assertEqual(instance_editor_ui.errors, ui.errors)
self.assertFalse(ok_button.inspect(IsEnabled()))
something_ui.locate(Index(1)).perform(MouseClick())
self.assertTrue(ok_button.inspect(IsEnabled()))
def test_none_selected(self):
obj = ObjectWithList()
tester = UITester()
with tester.create_ui(obj, {'view': none_view}) as ui:
self.assertIsNone(obj.inst)
instance = tester.find_by_name(ui, 'inst')
text = instance.inspect(SelectedText())
self.assertEqual(text, '')
instance.locate(Index(1)).perform(MouseClick())
self.assertIs(obj.inst, obj.inst_list[1])
text = instance.inspect(SelectedText())
self.assertEqual(text, obj.inst_list[1].name)
reset_to_none_button = tester.find_by_name(ui, 'reset_to_none')
reset_to_none_button.perform(MouseClick())
self.assertIsNone(obj.inst)
text = instance.inspect(SelectedText())
self.assertEqual(text, '')
instance.locate(Index(1)).perform(MouseClick())
self.assertIs(obj.inst, obj.inst_list[1])
text = instance.inspect(SelectedText())
self.assertEqual(text, obj.inst_list[1].name)
change_options_button = tester.find_by_name(ui, 'change_options')
change_options_button.perform(MouseClick())
self.assertIsNone(obj.inst)
text = instance.inspect(SelectedText())
self.assertEqual(text, '')
def test_droppable(self):
obj = ObjectWithInstance()
obj_with_list = ObjectWithList()
tester = UITester()
with tester.create_ui(obj, {'view': non_editable_droppable_view}):
pass
with tester.create_ui(obj_with_list, {'view': non_editable_droppable_selectable_view}):
pass |
class Solaar(IntervalModule):
color = '#FFFFFF'
error_color = '#FF0000'
interval = 30
settings = (('nameOfDevice', "name of the logitech's unifying device"), ('color', 'standard color'), ('error_color', 'color to use when non zero exit code is returned'))
required = ('nameOfDevice',)
def findDeviceNumber(self):
command = 'solaar show'
(retvalue, out, stderr) = run_through_shell(command, enable_shell=True)
for line in out.split('\n'):
if ((line.count(self.nameOfDevice) > 0) and (line.count(':') > 0)):
numberOfDevice = line.split(':')[0]
return numberOfDevice
raise DeviceNotFound()
def findBatteryStatus(self, numberOfDevice):
command = ('solaar show %s' % numberOfDevice)
(retvalue, out, stderr) = run_through_shell(command, enable_shell=True)
for line in out.split('\n'):
if (line.count('Battery') > 0):
if (line.count(':') > 0):
batterystatus = line.split(':')[1].strip().strip(',')
return batterystatus
elif line.count('offline'):
raise NoBatteryStatus('offline')
else:
raise NoBatteryStatus('unknown')
raise NoBatteryStatus('unknown/error')
def run(self):
self.output = {}
try:
device_number = self.findDeviceNumber()
output = self.findBatteryStatus(device_number)
self.output['color'] = self.color
except DeviceNotFound:
output = 'device absent'
self.output['color'] = self.error_color
except NoBatteryStatus as e:
output = e.message
self.output['color'] = self.error_color
self.output['full_text'] = output |
class Ml_modelSerializer(serializers.ModelSerializer):
access = serializers.SerializerMethodField('_get_access')
def __init__(self, *args, **kwargs):
user = kwargs.pop('user')
super().__init__(*args, **kwargs)
self.user = user
def _get_access(self, ml_model):
if Ml_model.objects.filter(pk=ml_model.id, user=self.user):
return True
if Ml_model.objects.filter(pk=ml_model.id, purchased__id__exact=self.user.id):
return True
return False
class Meta():
model = Ml_model
fields = ['id', 'name', 'model_name', 'username', 'description', 'version', 'eval_metrics', 'columns', 'access'] |
class Ml_modelsList(APIView):
permission_classes = [AUTH_METHOD]
def get(self, request, format=None):
ml_models = Ml_model.objects.filter(user_id=request.user.id)
serializer = ml_modelSerializer(ml_models, user=None, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ml_modelSerializer(data=request.data, user=request.user)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) |
class ProjectTest(ForsetiTestCase):
def setUp(self):
self.org = Organization('', display_name='My org name')
self.folder = Folder('55555', display_name='My folder', parent=self.org)
self.project1 = Project('project-1', 11111, display_name='Project 1')
self.project2 = Project('project-2', 22222, display_name='Project 2', parent=self.org)
self.project3 = Project('project-3', 33333, display_name='Project 3', parent=self.folder)
def test_create_project_getters_are_correct(self):
my_project_id = 'my-projectid-1'
my_project_number =
my_project_name = 'My project name'
project = Project(my_project_id, project_number=my_project_number, display_name=my_project_name, lifecycle_state=ProjectLifecycleState.ACTIVE)
self.assertEqual(my_project_id, project.id)
self.assertEqual(my_project_number, project.project_number)
self.assertEqual((Project.RESOURCE_NAME_FMT % my_project_id), project.name)
self.assertEqual(my_project_name, project.display_name)
self.assertEqual(ResourceType.PROJECT, project.type)
self.assertEqual(None, project.parent)
self.assertEqual(ProjectLifecycleState.ACTIVE, project.lifecycle_state)
def test_project_equals_other_project_is_true(self):
id_1 = 'my-project-1'
number_1 =
name_1 = 'My project 1'
project1 = Project(id_1, number_1, display_name=name_1)
id_2 = 'my-project-1'
number_2 =
name_2 = 'My project 1'
project2 = Project(id_2, number_2, display_name=name_2)
self.assertTrue((project1 == project2))
def test_project_notequals_other_project_is_true(self):
id_1 = 'my-project-1'
number_1 =
name_1 = 'My project 1'
project1 = Project(id_1, number_1, display_name=name_1)
id_2 = 'my-project-2'
number_2 =
name_2 = 'My project 2'
project2 = Project(id_2, number_2, display_name=name_2)
self.assertTrue((project1 != project2))
def test_project_notequals_org_is_true(self):
id_1 = 'my-project-1'
number_1 =
name_1 = 'My project 1'
project = Project(id_1, number_1, display_name=name_1)
id_2 = ''
name_2 = 'My org 1'
org = Organization(id_2, display_name=name_2)
self.assertTrue((project != org)) |
def _get_leaves(session: Session, issue_instance_id: DBID, kind: SharedTextKind) -> Set[str]:
message_ids = [int(id) for (id,) in session.query(SharedText.id).distinct(SharedText.id).join(IssueInstanceSharedTextAssoc, (SharedText.id == IssueInstanceSharedTextAssoc.shared_text_id)).filter((IssueInstanceSharedTextAssoc.issue_instance_id == issue_instance_id)).filter((SharedText.kind == kind))]
leaf_lookup = {int(id): contents for (id, contents) in session.query(SharedText.id, SharedText.contents).filter((SharedText.kind == kind))}
return {leaf_lookup[id] for id in message_ids if (id in leaf_lookup)} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.