code stringlengths 281 23.7M |
|---|
def check_binary():
binary = which(OVERLAP_EXEC)
if (not binary):
logger.error('"%s" native module not found', OVERLAP_EXEC)
return False
try:
devnull = open(os.devnull, 'w')
subprocess.check_call([OVERLAP_EXEC, '--help'], stderr=devnull)
except subprocess.CalledProcessError as e:
logger.error('Some error inside native module: %s', str(e))
return False
return True |
class RichText(models.Model):
text = CleansedRichTextField(_('text'), config_name='richtext-plugin')
class Meta():
abstract = True
verbose_name = _('rich text')
verbose_name_plural = _('rich texts')
def __str__(self):
return Truncator(strip_tags(self.text)).words(10, truncate=' ...') |
def observation_spaces_to_in_shapes(observation_spaces: Dict[(Union[(int, str)], gym.spaces.Dict)]) -> Dict[(Union[(int, str)], Dict[(str, Sequence[int])])]:
in_shapes = dict()
for (obs_key, obs_dict) in observation_spaces.items():
in_shapes[obs_key] = dict()
assert isinstance(obs_dict, gym.spaces.Dict)
for (key, value) in obs_dict.spaces.items():
assert isinstance(value, gym.spaces.Box), f'Only box observation spaces supported at this point, but got: {type(value)}'
in_shapes[obs_key][key] = value.shape
return in_shapes |
('cuda.perm102_bmm_rrr_bias.gen_function')
def gen_function(func_attrs, exec_cond_template, dim_info_dict):
bmm_problem_info = _get_strided_problem_info(func_attrs)
problem_args = bmm_common.PROBLEM_ARGS_TEMPLATE.render(mm_info=bmm_problem_info)
problem_args_cutlass_3x = bmm_common.PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(mm_info=bmm_common.add_elem_types_to_mm_info(mm_info=bmm_problem_info, func_attrs=func_attrs))
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
output_ndims = len(func_attrs['output_accessors'][0].original_shapes)
return common.gen_function(func_attrs=func_attrs, src_template=common_bias.SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, output_addr_calculator=get_output_addr_calculator(func_attrs)) |
.unit
def test_find_uncategorized_dataset_fields_missing_field() -> None:
test_resource = {'bar': ['4', '5']}
test_resource_dataset = _dataset.create_db_dataset('ds', test_resource)
existing_dataset = Dataset(name='ds', fides_key='ds', collections=[DatasetCollection(name='bar', fields=[DatasetField(name=4, data_categories=['category_1'])])])
(uncategorized_keys, total_field_count) = _dataset.find_uncategorized_dataset_fields(existing_dataset=existing_dataset, source_dataset=test_resource_dataset)
assert (set(uncategorized_keys) == {'ds.bar.5'})
assert (total_field_count == 2) |
def main(args=None):
if (args is None):
args = sys.argv[1:]
if (len(args) < 2):
print('usage: merge_woff_metadata.py METADATA.xml INPUT.woff [OUTPUT.woff]', file=sys.stderr)
return 1
metadata_file = args[0]
with open(metadata_file, 'rb') as f:
metadata = f.read()
infile = args[1]
if (len(args) > 2):
outfile = args[2]
else:
(filename, ext) = os.path.splitext(infile)
outfile = makeOutputFileName(filename, None, ext)
font = TTFont(infile)
if (font.flavor not in ('woff', 'woff2')):
print('Input file is not a WOFF or WOFF2 font', file=sys.stderr)
return 1
data = font.flavorData
data.metaData = metadata
font.save(outfile) |
.parametrize(('input_data', 'expected_output'), [({'parameter': {'output': 0}}, '0'), ({'parameter': {'output': b64encode(b'').decode()}}, ''), ({'parameter': {'output': b64encode(b'foobar').decode()}}, 'foobar'), ({'parameter': {'output': 'no_b64'}}, 'decoding error: no_b64')])
def test_decode_output_values(input_data, expected_output):
results = qemu_exec.decode_output_values(input_data)
assert all((isinstance(value, str) for parameter_result in results.values() for value in parameter_result.values()))
assert (results['parameter']['output'] == expected_output) |
class OptionSeriesColumnrangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestScaffoldConnectionFailsWhenConnectionAlreadyExists():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.resource_name = 'myresource'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
dir_path = Path('packages')
tmp_dir = (cls.t / dir_path)
src_dir = (cls.cwd / Path(ROOT_DIR, dir_path))
shutil.copytree(str(src_dir), str(tmp_dir))
os.chdir(cls.t)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR])
assert (result.exit_code == 0)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', cls.agent_name], standalone_mode=False)
assert (result.exit_code == 0)
os.chdir(cls.agent_name)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'scaffold', 'connection', cls.resource_name], standalone_mode=False)
assert (result.exit_code == 0)
cls.result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'scaffold', 'connection', cls.resource_name], standalone_mode=False)
def test_exit_code_equal_to_1(self):
assert (self.result.exit_code == 1)
def test_error_message_connection_already_existing(self):
s = "A connection with name '{}' already exists. Aborting...".format(self.resource_name)
assert (self.result.exception.message == s)
def test_resource_directory_exists(self):
assert Path(self.t, self.agent_name, 'connections', self.resource_name).exists()
def teardown_class(cls):
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
def clone_to_tmp_dir(app):
tmp_dir = Path('tmp')
tmp_dir.mkdir(exist_ok=True)
tmp_dir = (tmp_dir / 'importer')
if tmp_dir.exists():
shutil.rmtree(str(tmp_dir), onerror=handle_retree_error_on_windows)
vcs = common.getvcs(app.RepoType, app.Repo, tmp_dir)
vcs.gotorevision(options.rev)
return tmp_dir |
def extractWuxiaHeroes(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Blood Hourglass' in item['title']):
return buildReleaseMessageWithType(item, 'Blood Hourglass', vol, chp, frag=frag, postfix=postfix)
tagmap = [('The Nine Cauldrons', 'The Nine Cauldrons', 'translated'), ('Nine Yang Sword Saint', 'Nine Yang Sword Saint', 'translated'), ('Conquest', 'Conquest', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('My Father in Law is Lu Bu Chapter', 'My Father in Law is Lu Bu Chapter', 'translated'), ('Blood Hourglass', 'Blood Hourglass', 'translated'), ('Era of Cultivation: Chapter', 'Era of Cultivation', 'translated'), ('Conquest Chapter', 'Conquest', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def decrypt_data(xref, cfunc, xref_args):
print(('%s: ' % hex(int(xref))), end='')
args = convert_args_to_long(xref_args)
if args:
try:
key = idaapi.get_many_bytes(args[2], (args[3] if (idc.Dword(args[3]) == ) else idc.Dword(args[3])))
data = idaapi.get_many_bytes(args[0], (args[1] if (idc.Dword(args[1]) == ) else idc.Dword(args[1])))
except TypeError:
print("Couldn't retrieve the cipher or the key.")
print(xref_args)
else:
key = null_pad(key, 32)
if (args[4] == 1):
data = custom_b64decode(data)
plain = PKCS7_unpad(AES.new(key, AES.MODE_CBC, ('\x00' * 16)).decrypt(data))
print(plain)
else:
print('Not all args are numbers')
print(xref_args) |
class ModelDockerDeleter(ErsiliaBase):
def __init__(self, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
def delete(self, model_id):
if is_inside_docker():
return
self.logger.info('Removing docker images and stopping containers related to {0}'.format(model_id))
dm = DockerManager(config_json=self.config_json)
dm.delete_images(model_id) |
def delete(item_id):
json_data = downloadUtils.download_url((('{server}/emby/Users/{userid}/Items/' + item_id) + '?format=json'))
item = json.loads(json_data)
item_id = item.get('Id')
item_name = item.get('Name', '')
series_name = item.get('SeriesName', '')
ep_number = item.get('IndexNumber', (- 1))
final_name = ''
if series_name:
final_name += (series_name + ' - ')
if (ep_number != (- 1)):
final_name += ('Episode %02d - ' % (ep_number,))
final_name += item_name
if (not item.get('CanDelete', False)):
message = ((string_load(30417) + '\n') + final_name)
xbmcgui.Dialog().ok(string_load(30135), message)
return
message = ((final_name + '\n') + string_load(30092))
return_value = xbmcgui.Dialog().yesno(string_load(30091), message)
if return_value:
log.debug('Deleting Item: {0}', item_id)
url = ('{server}/emby/Items/' + item_id)
progress = xbmcgui.DialogProgress()
progress.create(string_load(30052), string_load(30053))
downloadUtils.download_url(url, method='DELETE')
progress.close()
check_for_new_content()
home_window = HomeWindow()
last_url = home_window.get_property('last_content_url')
if last_url:
home_window.set_property(('skip_cache_for_' + last_url), 'true')
xbmc.executebuiltin('Container.Refresh') |
class FragmentNode(template.Node):
def __init__(self, nodelist, request, identifier, mode='append'):
self.nodelist = nodelist
self.request_var = template.Variable(request)
self.identifier_var = template.Variable(identifier)
self.mode = mode
def render(self, context):
request = self.request_var.resolve(context)
identifier = self.identifier_var.resolve(context)
rendered = self.nodelist.render(context)
if (not hasattr(request, '_feincms_fragments')):
request._feincms_fragments = {}
old = request._feincms_fragments.get(identifier, '')
if (self.mode == 'prepend'):
request._feincms_fragments[identifier] = (rendered + old)
elif (self.mode == 'replace'):
request._feincms_fragments[identifier] = rendered
else:
request._feincms_fragments[identifier] = (old + rendered)
return '' |
def output_stats(out_stream: TextIO, name: str, run_id: str, metrics: Dict[(str, Any)], config: Dict[(str, Any)]):
for (pass_name, metric) in metrics.items():
logger.info(f'pass: {pass_name}')
for (metric_name, records) in metric.items():
total = 0
avg = 0
if records:
total = sum(records)
avg = (total / len(records))
if metric_name.endswith('.time'):
logger.info(f'metric: {metric_name}, average: {avg:.3f} ms, total: {total:.3f} ms')
logger.info(f'{format_float_val_list(records, 3)}')
elif metric_name.endswith('.memory'):
logger.info(f'metric: {metric_name}, average: {avg:.3f} MB, total: {total:.3f} MB')
logger.info(f'{format_float_val_list(records, 3)}')
stats = {'op_name': name, 'id': run_id, 'metric': metrics, 'config': config}
out_stream.write((json.dumps(stats) + '\n'))
out_stream.flush() |
def test_recurse_check_structure_extrasubitem():
sample = dict(string='Foobar', list=['Foo', 'Bar'], dict={'foo': 'Bar'}, none=None, true=True, false=False)
to_check = dict(string='Foobar', list=['Foo', 'Bar', 'Bas'], dict={'foo': 'Bar', 'Bar': 'Foo'}, none=None, true=True, false=False)
with pytest.raises(ValidationException):
recurse_check_structure(sample, to_check) |
.parametrize('filename', files_formats.keys())
def test_file_c_handle(testpath, filename):
any_xtgeo_file = xtgeo._XTGeoFile((testpath / filename))
handle_count = any_xtgeo_file._cfhandlecount
c_handle_1 = any_xtgeo_file.get_cfhandle()
assert ((handle_count + 1) == any_xtgeo_file._cfhandlecount)
c_handle_2 = any_xtgeo_file.get_cfhandle()
assert ((handle_count + 2) == any_xtgeo_file._cfhandlecount)
assert (c_handle_1 == c_handle_2)
assert (any_xtgeo_file.cfclose() is False)
assert (any_xtgeo_file.cfclose() is True)
with pytest.raises(RuntimeError):
any_xtgeo_file.cfclose() |
def _gamestats():
fpage_account_limit = 4
recent_users = AccountDB.objects.get_recently_connected_accounts()
nplyrs_conn_recent = (len(recent_users) or 'none')
nplyrs = (AccountDB.objects.num_total_accounts() or 'none')
nplyrs_reg_recent = (len(AccountDB.objects.get_recently_created_accounts()) or 'none')
nsess = evennia.SESSION_HANDLER.account_count()
nobjs = ObjectDB.objects.count()
nobjs = (nobjs or 1)
Character = class_from_module(settings.BASE_CHARACTER_TYPECLASS, fallback=settings.FALLBACK_CHARACTER_TYPECLASS)
nchars = Character.objects.all_family().count()
Room = class_from_module(settings.BASE_ROOM_TYPECLASS, fallback=settings.FALLBACK_ROOM_TYPECLASS)
nrooms = Room.objects.all_family().count()
Exit = class_from_module(settings.BASE_EXIT_TYPECLASS, fallback=settings.FALLBACK_EXIT_TYPECLASS)
nexits = Exit.objects.all_family().count()
nothers = (((nobjs - nchars) - nrooms) - nexits)
pagevars = {'page_title': 'Front Page', 'accounts_connected_recent': recent_users[:fpage_account_limit], 'num_accounts_connected': (nsess or 'no one'), 'num_accounts_registered': (nplyrs or 'no'), 'num_accounts_connected_recent': (nplyrs_conn_recent or 'no'), 'num_accounts_registered_recent': (nplyrs_reg_recent or 'no one'), 'num_rooms': (nrooms or 'none'), 'num_exits': (nexits or 'no'), 'num_objects': (nobjs or 'none'), 'num_characters': (nchars or 'no'), 'num_others': (nothers or 'no')}
return pagevars |
def is_taxadb_up_to_date(dbfile=DEFAULT_TAXADB):
db = sqlite3.connect(dbfile)
try:
version = db.execute('SELECT version FROM stats;').fetchone()[0]
except (sqlite3.OperationalError, ValueError, IndexError, TypeError):
version = None
db.close()
return (version == DB_VERSION) |
def process_image(img, output, name, amount, space, cvd_approach, fit):
with Image.open(img) as im:
if ((im.format == 'PNG') and (name == 'opacity')):
if (im.mode not in ('RGBA',)):
im = im.convert('RGBA')
pixels = im.load()
start = time.perf_counter_ns()
total = (im.size[0] * im.size[1])
factor = (100 / total)
i = j = 0
print('Pixels: {}'.format(total))
print('> 0%', end='\r')
for i in range(im.size[0]):
for j in range(im.size[1]):
pixels[(i, j)] = apply_filter(name, amount, space, cvd_approach, pixels[(i, j)], fit)
print('> {}%'.format(int(((i * j) * factor))), end='\r')
print('> 100%')
t = (time.perf_counter_ns() - start)
printt(t)
im.save(output) |
class Commen_Thread(QThread):
def __init__(self, action, *args):
super(QThread, self).__init__()
self.action = action
self.args = args
def run(self):
print('start_thread params:{}'.format(self.args))
if self.args:
print(self.args)
if (len(self.args) == 1):
self.action(self.args[0])
elif (len(self.args) == 2):
self.action(self.args[0], self.args[1])
elif (len(self.args) == 3):
self.action(self.args[0], self.args[1], self.args[2])
elif (len(self.args) == 4):
self.action(self.args[0], self.args[1], self.args[2], self.args[3])
else:
self.action() |
class OptionPlotoptionsColumnrangeSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OverallTotals(models.Model):
id = models.AutoField(primary_key=True)
create_date = models.DateTimeField(auto_now_add=True, blank=True, null=True)
update_date = models.DateTimeField(auto_now=True, null=True)
fiscal_year = models.IntegerField(blank=True, null=True)
total_budget_authority = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True)
class Meta():
managed = True
db_table = 'overall_totals' |
def tokenize_autotype(autotype):
while autotype:
opening_idx = (- 1)
for char in '{+^%~':
idx = autotype.find(char)
if ((idx != (- 1)) and ((opening_idx == (- 1)) or (idx < opening_idx))):
opening_idx = idx
if (opening_idx == (- 1)):
(yield (autotype, False))
return
if (opening_idx > 0):
(yield (autotype[:opening_idx], False))
if (autotype[opening_idx] in '+^%~'):
(yield (autotype[opening_idx], True))
autotype = autotype[(opening_idx + 1):]
continue
closing_idx = autotype.find('}')
if (closing_idx == (- 1)):
dmenu_err(('Unable to find matching right brace (}) while' + f'''tokenizing auto-type string: {autotype}
'''))
return
if ((closing_idx == (opening_idx + 1)) and ((closing_idx + 1) < len(autotype)) and (autotype[(closing_idx + 1)] == '}')):
(yield ('{}}', True))
autotype = autotype[(closing_idx + 2):]
continue
(yield (autotype[opening_idx:(closing_idx + 1)], True))
autotype = autotype[(closing_idx + 1):] |
def remove_geojson_entry(zone_key: ZoneKey):
geo_json_path = (ROOT_PATH / 'web/geo/world.geojson')
with JsonFilePatcher(geo_json_path, indent=None) as f:
new_features = [f for f in f.content['features'] if (f['properties']['zoneName'] != zone_key)]
f.content['features'] = new_features
run_shell_command(f'npx --config {PRETTIER_CONFIG_PATH} --write {geo_json_path}', cwd=ROOT_PATH)
run_shell_command('pnpm generate-world', cwd=(ROOT_PATH / 'web')) |
class OptionSeriesVariablepieSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Command(BaseCommand):
help = 'Configures your project for deployment to the specified platform.'
def __init__(self):
self.suppressed_base_arguments.update(['--version', '-v', '--settings', '--pythonpath', '--traceback', '--no-color', '--force-color'])
self.requires_system_checks = []
super().__init__()
def create_parser(self, prog_name, subcommand, **kwargs):
epilog = 'For more help, see the full documentation at:
parser = super().create_parser(prog_name, subcommand, usage=cli.get_usage(), epilog=epilog, add_help=False, **kwargs)
return parser
def add_arguments(self, parser):
sd_cli = cli.SimpleDeployCLI(parser)
def handle(self, *args, **options):
self.stdout.write('Configuring project for deployment...')
self._parse_cli_options(options)
if self.log_output:
self._start_logging()
self.log_info(f'''
CLI args: {options}''')
self._validate_command()
self._inspect_system()
self._inspect_project()
self._confirm_automate_all()
self.platform_deployer.validate_platform()
if self.automate_all:
self.platform_deployer.prep_automate_all()
self._add_simple_deploy_req()
self.platform_deployer.deploy()
def _parse_cli_options(self, options):
self.automate_all = options['automate_all']
self.platform = options['platform']
self.log_output = (not options['no_logging'])
self.ignore_unclean_git = options['ignore_unclean_git']
self.deployed_project_name = options['deployed_project_name']
self.region = options['region']
self.unit_testing = options['unit_testing']
self.integration_testing = options['integration_testing']
def _validate_command(self):
self._validate_platform_arg()
def _validate_platform_arg(self):
if (not self.platform):
raise SimpleDeployCommandError(self, d_msgs.requires_platform_flag)
elif (self.platform in ['fly_io', 'platform_sh', 'heroku']):
self.write_output(f' Deployment target: {self.platform}')
else:
error_msg = d_msgs.invalid_platform_msg(self.platform)
raise SimpleDeployCommandError(self, error_msg)
self.platform_msgs = import_module(f'.{self.platform}.deploy_messages', package='simple_deploy.management.commands')
deployer_module = import_module(f'.{self.platform}.deploy', package='simple_deploy.management.commands')
self.platform_deployer = deployer_module.PlatformDeployer(self)
try:
self.platform_deployer.confirm_preliminary()
except AttributeError:
pass
def _confirm_automate_all(self):
if (not self.automate_all):
return
self.write_output(self.platform_msgs.confirm_automate_all)
confirmed = self.get_confirmation()
if confirmed:
self.write_output('Automating all steps...')
else:
self.write_output(d_msgs.cancel_automate_all)
sys.exit()
def _start_logging(self):
created_log_dir = self._create_log_dir()
timestamp = datetime.now().strftime('%Y-%m-%d-%H%M%S')
log_filename = f'simple_deploy_{timestamp}.log'
verbose_log_path = (self.log_dir_path / log_filename)
verbose_logger = logging.basicConfig(level=logging.INFO, filename=verbose_log_path, format='%(asctime)s %(levelname)s: %(message)s')
self.log_info('\nLogging run of `manage.py simple_deploy`...')
if created_log_dir:
self.write_output(f'Created {self.log_dir_path}.')
def _create_log_dir(self):
self.log_dir_path = (settings.BASE_DIR / Path('simple_deploy_logs'))
if (not self.log_dir_path.exists()):
self.log_dir_path.mkdir()
return True
else:
return False
def _ignore_sd_logs(self):
ignore_msg = '# Ignore logs from simple_deploy.'
ignore_msg += '\nsimple_deploy_logs/\n'
gitignore_path = (self.git_path / '.gitignore')
if (not gitignore_path.exists()):
gitignore_path.write_text(ignore_msg, encoding='utf-8')
self.write_output('No .gitignore file found; created .gitignore.')
self.write_output('Added simple_deploy_logs/ to .gitignore.')
else:
with open(gitignore_path, 'r+') as f:
gitignore_contents = f.read()
if ('simple_deploy_logs/' not in gitignore_contents):
f.write(f'''
{ignore_msg}''')
self.write_output('Added simple_deploy_logs/ to .gitignore')
def _strip_secret_key(self, line):
if ('SECRET_KEY:' in line):
new_line = line.split('SECRET_KEY:')[0]
new_line += 'SECRET_KEY: *value hidden*'
return new_line
else:
return line
def _inspect_system(self):
self.use_shell = False
(self.on_windows, self.on_macos) = (False, False)
if (os.name == 'nt'):
self.on_windows = True
self.use_shell = True
self.log_info(' Local platform identified: Windows')
elif (platform.system() == 'Darwin'):
self.on_macos = True
self.log_info(' Local platform identified: macOS')
def _inspect_project(self):
self.project_name = settings.ROOT_URLCONF.replace('.urls', '')
self.log_info(f' Project name: {self.project_name}')
self.project_root = Path(settings.BASE_DIR)
self.log_info(f' Project root: {self.project_root}')
self._find_git_dir()
self._check_git_status()
if self.log_output:
self._ignore_sd_logs()
self.settings_path = ((self.project_root / self.project_name) / 'settings.py')
self.pkg_manager = self._get_dep_man_approach()
msg = f' Dependency management system: {self.pkg_manager}'
self.write_output(msg)
self.requirements = self._get_current_requirements()
def _find_git_dir(self):
if Path((self.project_root / '.git')).exists():
self.git_path = Path(self.project_root)
self.write_output(f' Found .git dir at {self.git_path}.')
self.nested_project = False
elif (Path(self.project_root).parent / Path('.git')).exists():
self.git_path = Path(self.project_root).parent
self.write_output(f' Found .git dir at {self.git_path}.')
self.nested_project = True
else:
error_msg = 'Could not find a .git/ directory.'
error_msg += f'''
Looked in {self.project_root} and in {Path(self.project_root).parent}.'''
raise SimpleDeployCommandError(self, error_msg)
def _check_git_status(self):
if self.ignore_unclean_git:
return
cmd = 'git status'
output_obj = self.execute_subp_run(cmd)
output_str = output_obj.stdout.decode()
self.log_info(f'''
git status:
{output_str}''')
if ('working tree clean' in output_str):
return
cmd = 'git diff'
output_obj = self.execute_subp_run(cmd)
output_str = output_obj.stdout.decode()
if (not self._diff_output_clean(output_str)):
error_msg = d_msgs.unclean_git_status
if self.automate_all:
error_msg += d_msgs.unclean_git_automate_all
raise SimpleDeployCommandError(self, error_msg)
def _diff_output_clean(self, output_str):
if (not output_str):
cmd = 'git status --porcelain'
output_obj = self.execute_subp_run(cmd)
gs_output_str = output_obj.stdout.decode()
if (gs_output_str.strip() == '?? simple_deploy_logs/'):
return True
else:
return False
num_deletions = output_str.count('\n- ')
num_additions = output_str.count('\n+ ')
if ((num_deletions > 0) or (num_additions > 1)):
return False
re_diff = '(\\n\\+{1}\\s+[\',"]simple_deploy[\',"],)'
m = re.search(re_diff, output_str)
if m:
return True
if all([('diff --git a/.gitignore b/.gitignore' in output_str), ('+# Ignore logs from simple_deploy.' in output_str), ('+simple_deploy_logs/' in output_str)]):
return True
return False
def _get_dep_man_approach(self):
if (self.git_path / 'Pipfile').exists():
return 'pipenv'
elif self._check_using_poetry():
return 'poetry'
elif (self.git_path / 'requirements.txt').exists():
return 'req_txt'
error_msg = f"Couldn't find any specified requirements in {self.git_path}."
raise SimpleDeployCommandError(self, error_msg)
def _check_using_poetry(self):
path = (self.git_path / 'poetry.lock')
if path.exists():
return True
path = (self.git_path / 'pyproject.toml')
if path.exists():
if ('[tool.poetry]' in path.read_text()):
return True
return False
def _get_current_requirements(self):
msg = ' Checking current project requirements...'
self.write_output(msg)
if (self.pkg_manager == 'req_txt'):
requirements = self._get_req_txt_requirements()
elif (self.pkg_manager == 'pipenv'):
requirements = self._get_pipfile_requirements()
elif (self.pkg_manager == 'poetry'):
requirements = self._get_poetry_requirements()
msg = ' Found existing dependencies:'
self.write_output(msg)
for requirement in requirements:
msg = f' {requirement}'
self.write_output(msg)
return requirements
def _add_simple_deploy_req(self):
self.write_output('\n Looking for django-simple-deploy in requirements...')
self.add_package('django-simple-deploy')
def _get_req_txt_requirements(self):
self.req_txt_path = (self.git_path / 'requirements.txt')
contents = self.req_txt_path.read_text()
lines = contents.split('\n')
req_re = '^([a-zA-Z0-9\\-]*)'
requirements = []
for line in lines:
m = re.search(req_re, line)
if m:
requirements.append(m.group(1))
return requirements
def _get_pipfile_requirements(self):
self.pipfile_path = f'{self.git_path}/Pipfile'
with open(self.pipfile_path) as f:
lines = f.readlines()
requirements = []
in_packages = False
for line in lines:
if ('[packages]' in line):
in_packages = True
continue
elif ('[dev-packages]' in line):
break
if in_packages:
pkg_name = line.split('=')[0].rstrip()
if pkg_name:
requirements.append(pkg_name)
return requirements
def _write_pipfile_pkg(self, package_name, version=''):
with open(self.pipfile_path) as f:
pipfile_text = f.read()
if (not version):
version = '*'
tab_string = (' ' * (((30 - len(package_name)) - 5) - len(version)))
new_pkg_string = f'''[packages]
{package_name} = "{version}"{tab_string}# Added by simple_deploy command.'''
pipfile_text = pipfile_text.replace('[packages]', new_pkg_string)
with open(self.pipfile_path, 'w') as f:
f.write(pipfile_text)
self.write_output(f' Added {package_name} to Pipfile.')
def _get_poetry_requirements(self):
self.pyprojecttoml_path = (self.git_path / 'pyproject.toml')
parsed_toml = toml.loads(self.pyprojecttoml_path.read_text())
main_reqs = parsed_toml['tool']['poetry']['dependencies'].keys()
requirements = list(main_reqs)
try:
deploy_reqs = parsed_toml['tool']['poetry']['group']['deploy']['dependencies'].keys()
except KeyError:
pass
else:
requirements += list(deploy_reqs)
requirements.remove('python')
return requirements
def write_output(self, output_obj, log_level='INFO', write_to_console=True, skip_logging=False):
if isinstance(output_obj, subprocess.CompletedProcess):
output_str = output_obj.stdout.decode()
if (not output_str):
output_str = output_obj.stderr.decode()
elif isinstance(output_obj, str):
output_str = output_obj
if write_to_console:
self.stdout.write(output_str)
if (self.log_output and (not skip_logging)):
for line in output_str.splitlines():
line = self._strip_secret_key(line)
logging.info(line)
def log_info(self, output_obj):
self.write_output(output_obj, write_to_console=False)
def execute_subp_run(self, cmd, check=False):
if self.on_windows:
output = subprocess.run(cmd, shell=True, capture_output=True)
else:
cmd_parts = shlex.split(cmd)
output = subprocess.run(cmd_parts, capture_output=True, check=check)
return output
def execute_command(self, cmd, skip_logging=False):
cmd_parts = cmd.split()
with subprocess.Popen(cmd_parts, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, shell=self.use_shell) as p:
for line in p.stderr:
self.write_output(line, skip_logging=skip_logging)
if (p.returncode != 0):
raise subprocess.CalledProcessError(p.returncode, p.args)
def add_packages(self, package_list):
for package in package_list:
self.add_package(package)
def add_package(self, package_name, version=''):
self.write_output(f'''
Looking for {package_name}...''')
if (self.pkg_manager == 'pipenv'):
self._add_pipenv_pkg(package_name, version)
elif (self.pkg_manager == 'poetry'):
self._add_poetry_pkg(package_name, version)
else:
self._add_req_txt_pkg(package_name, version)
def _add_req_txt_pkg(self, package_name, version):
if (package_name in self.requirements):
self.write_output(f' Found {package_name} in requirements file.')
return
with open(self.req_txt_path, 'a') as f:
package_name += version
tab_string = (' ' * (30 - len(package_name)))
f.write(f'''
{package_name}{tab_string}# Added by simple_deploy command.''')
self.write_output(f' Added {package_name} to requirements.txt.')
def _add_poetry_pkg(self, package_name, version):
self._check_poetry_deploy_group()
if (package_name in self.requirements):
self.write_output(f' Found {package_name} in requirements file.')
return
if (not version):
version = '*'
new_req_line = f'{package_name} = "{version}"'
contents = self.pyprojecttoml_path.read_text()
new_group_string = f'''{self.poetry_group_string}{new_req_line}
'''
contents = contents.replace(self.poetry_group_string, new_group_string)
self.pyprojecttoml_path.write_text(contents, encoding='utf-8')
self.write_output(f' Added {package_name} to pyproject.toml.')
def _check_poetry_deploy_group(self):
self.poetry_group_string = '[tool.poetry.group.deploy]\noptional = true\n'
self.poetry_group_string += '\n[tool.poetry.group.deploy.dependencies]\n'
contents = self.pyprojecttoml_path.read_text()
if (self.poetry_group_string in contents):
return
contents += f'''
{self.poetry_group_string}'''
self.pyprojecttoml_path.write_text(contents, encoding='utf-8')
msg = ' Added optional "deploy" group to pyproject.toml.'
self.write_output(msg)
def _add_pipenv_pkg(self, package_name, version=''):
if (package_name in self.requirements):
self.write_output(f' Found {package_name} in Pipfile.')
else:
self._write_pipfile_pkg(package_name, version)
def get_confirmation(self, msg='', skip_logging=False):
if (not msg):
prompt = '\nAre you sure you want to do this? (yes|no) '
else:
prompt = f'''
{msg} (yes|no) '''
confirmed = ''
if self.integration_testing:
self.write_output(prompt, skip_logging=skip_logging)
msg = ' Confirmed for integration testing...'
self.write_output(msg, skip_logging=skip_logging)
return True
while True:
self.write_output(prompt, skip_logging=skip_logging)
confirmed = input()
self.write_output(confirmed, skip_logging=skip_logging, write_to_console=False)
if (confirmed.lower() in ('y', 'yes')):
return True
elif (confirmed.lower() in ('n', 'no')):
return False
else:
self.write_output(' Please answer yes or no.', skip_logging=skip_logging)
def commit_changes(self):
if (not self.automate_all):
return
self.write_output(' Committing changes...')
cmd = 'git add .'
output = self.execute_subp_run(cmd)
self.write_output(output)
cmd = 'git commit -am "Configured project for deployment."'
output = self.execute_subp_run(cmd)
self.write_output(output) |
def extractReiWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] != ['Tak Berkategori']):
return False
chp_prefixes = [('The villager Who Grew Up Drinking Elixir Fountain ', 'The villager Who Grew Up Drinking Elixir Fountain', 'translated'), ('Another World Transfer in Game Character Episode ', 'Another World Transfer in Game Character', 'translated'), ('Manowa', 'Manowa Mamono Taosu Nouryoku Ubau Watashi Tsuyokunaru', 'translated'), ('Cat ', 'Me and My Beloved Cat (Girlfriend)', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Flashbots(Module):
signed_txs: List[HexBytes]
response: Union[(FlashbotsBundleResponse, FlashbotsPrivateTransactionResponse)]
def sign_bundle(self, bundled_transactions: List[Union[(FlashbotsBundleTx, FlashbotsBundleRawTx, FlashbotsBundleDictTx)]]) -> List[HexBytes]:
nonces: Dict[(HexStr, Nonce)] = {}
signed_transactions: List[HexBytes] = []
for tx in bundled_transactions:
if ('signed_transaction' in tx):
tx_params = _parse_signed_tx(tx['signed_transaction'])
nonces[tx_params['from']] = (tx_params['nonce'] + 1)
signed_transactions.append(tx['signed_transaction'])
elif ('signer' in tx):
(signer, tx) = (tx['signer'], tx['transaction'])
tx['from'] = signer.address
if (tx.get('nonce') is None):
tx['nonce'] = nonces.get(signer.address, self.web3.eth.get_transaction_count(signer.address))
nonces[signer.address] = (tx['nonce'] + 1)
if ('gas' not in tx):
tx['gas'] = self.web3.eth.estimateGas(tx)
signed_tx = signer.sign_transaction(tx)
signed_transactions.append(signed_tx.rawTransaction)
elif all(((key in tx) for key in ['v', 'r', 's'])):
(v, r, s) = (tx['v'], int(tx['r'].hex(), base=16), int(tx['s'].hex(), base=16))
tx_dict = {'nonce': tx['nonce'], 'data': HexBytes(tx['input']), 'value': tx['value'], 'gas': tx['gas']}
if (('maxFeePerGas' in tx) or ('maxPriorityFeePerGas' in tx)):
assert (('maxFeePerGas' in tx) and ('maxPriorityFeePerGas' in tx))
(tx_dict['maxFeePerGas'], tx_dict['maxPriorityFeePerGas']) = (tx['maxFeePerGas'], tx['maxPriorityFeePerGas'])
else:
assert ('gasPrice' in tx)
tx_dict['gasPrice'] = tx['gasPrice']
if tx.get('accessList'):
tx_dict['accessList'] = tx['accessList']
if tx.get('chainId'):
tx_dict['chainId'] = tx['chainId']
if tx.get('to'):
tx_dict['to'] = HexBytes(tx['to'])
unsigned_tx = serializable_unsigned_transaction_from_dict(tx_dict)
raw = encode_transaction(unsigned_tx, vrs=(v, r, s))
assert (self.web3.keccak(raw) == tx['hash'])
signed_transactions.append(raw)
return signed_transactions
def to_hex(self, signed_transaction: bytes) -> str:
tx_hex = signed_transaction.hex()
if (tx_hex[0:2] != '0x'):
tx_hex = f'0x{tx_hex}'
return tx_hex
def send_raw_bundle_munger(self, signed_bundled_transactions: List[HexBytes], target_block_number: int, opts: Optional[FlashbotsOpts]=None) -> List[Any]:
if (opts is None):
opts = {}
return [{'txs': list(map((lambda x: self.to_hex(x)), signed_bundled_transactions)), 'blockNumber': hex(target_block_number), 'minTimestamp': (opts['minTimestamp'] if ('minTimestamp' in opts) else 0), 'maxTimestamp': (opts['maxTimestamp'] if ('maxTimestamp' in opts) else 0), 'revertingTxHashes': (opts['revertingTxHashes'] if ('revertingTxHashes' in opts) else []), 'replacementUuid': (opts['replacementUuid'] if ('replacementUuid' in opts) else None)}]
sendRawBundle: Method[Callable[([Any], Any)]] = Method(FlashbotsRPC.eth_sendBundle, mungers=[send_raw_bundle_munger])
send_raw_bundle = sendRawBundle
def send_bundle_munger(self, bundled_transactions: List[Union[(FlashbotsBundleTx, FlashbotsBundleRawTx)]], target_block_number: int, opts: Optional[FlashbotsOpts]=None) -> List[Any]:
signed_txs = self.sign_bundle(bundled_transactions)
self.response = FlashbotsBundleResponse(self.web3, signed_txs, target_block_number)
return self.send_raw_bundle_munger(signed_txs, target_block_number, opts)
def raw_bundle_formatter(self, resp) -> Any:
return (lambda _: resp.response)
sendBundle: Method[Callable[([Any], Any)]] = Method(FlashbotsRPC.eth_sendBundle, mungers=[send_bundle_munger], result_formatters=raw_bundle_formatter)
send_bundle = sendBundle
def cancel_bundles_munger(self, replacement_uuid: str) -> List[Any]:
return [{'replacementUuid': replacement_uuid}]
def cancel_bundle_formatter(self, resp) -> Any:
return (lambda res: {'bundleHashes': res})
cancelBundles: Method[Callable[([Any], Any)]] = Method(FlashbotsRPC.eth_cancelBundle, mungers=[cancel_bundles_munger], result_formatters=cancel_bundle_formatter)
cancel_bundles = cancelBundles
def simulate(self, bundled_transactions: List[Union[(FlashbotsBundleTx, FlashbotsBundleRawTx)]], block_tag: Union[(int, str)]=None, state_block_tag: int=None, block_timestamp: int=None):
block_number = (self.web3.eth.block_number if ((block_tag is None) or (block_tag == 'latest')) else block_tag)
evm_block_number = self.web3.toHex(block_number)
evm_block_state_number = (self.web3.toHex(state_block_tag) if (state_block_tag is not None) else self.web3.toHex((block_number - 1)))
evm_timestamp = (block_timestamp if (block_timestamp is not None) else self.extrapolate_timestamp(block_number, self.web3.eth.block_number))
signed_bundled_transactions = self.sign_bundle(bundled_transactions)
call_result = self.call_bundle(signed_bundled_transactions, evm_block_number, evm_block_state_number, evm_timestamp)
return {'bundleHash': call_result['bundleHash'], 'coinbaseDiff': call_result['coinbaseDiff'], 'results': call_result['results'], 'signedBundledTransactions': signed_bundled_transactions, 'totalGasUsed': reduce((lambda a, b: (a + b['gasUsed'])), call_result['results'], 0)}
def extrapolate_timestamp(self, block_tag: int, latest_block_number: int):
block_delta = (block_tag - latest_block_number)
if (block_delta < 0):
raise Exception('block extrapolation negative')
return (self.web3.eth.get_block(latest_block_number)['timestamp'] + (block_delta * SECONDS_PER_BLOCK))
def call_bundle_munger(self, signed_bundled_transactions: List[Union[(FlashbotsBundleTx, FlashbotsBundleRawTx)]], evm_block_number, evm_block_state_number, evm_timestamp, opts: Optional[FlashbotsOpts]=None) -> Any:
inpt = [{'txs': list(map((lambda x: x.hex()), signed_bundled_transactions)), 'blockNumber': evm_block_number, 'stateBlockNumber': evm_block_state_number, 'timestamp': evm_timestamp}]
return inpt
call_bundle: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.eth_callBundle, mungers=[call_bundle_munger])
def get_user_stats_munger(self) -> List:
return [{'blockNumber': hex(self.web3.eth.blockNumber)}]
getUserStats: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.flashbots_getUserStats, mungers=[get_user_stats_munger])
get_user_stats = getUserStats
getUserStatsV2: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.flashbots_getUserStatsV2, mungers=[get_user_stats_munger])
get_user_stats_v2 = getUserStatsV2
def get_bundle_stats_munger(self, bundle_hash: Union[(str, int)], block_number: Union[(str, int)]) -> List:
if isinstance(bundle_hash, int):
bundle_hash = hex(bundle_hash)
if isinstance(block_number, int):
block_number = hex(block_number)
return [{'bundleHash': bundle_hash, 'blockNumber': block_number}]
getBundleStats: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.flashbots_getBundleStats, mungers=[get_bundle_stats_munger])
get_bundle_stats = getBundleStats
getBundleStatsV2: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.flashbots_getBundleStatsV2, mungers=[get_bundle_stats_munger])
get_bundle_stats_v2 = getBundleStatsV2
def send_private_transaction_munger(self, transaction: Union[(FlashbotsBundleTx, FlashbotsBundleRawTx)], max_block_number: Optional[int]=None) -> Any:
signed_transaction: str
if ('signed_transaction' in transaction):
signed_transaction = transaction['signed_transaction']
else:
signed_transaction = transaction['signer'].sign_transaction(transaction['transaction']).rawTransaction
if (max_block_number is None):
current_block = self.web3.eth.block_number
max_block_number = (current_block + 25)
params = {'tx': self.to_hex(signed_transaction), 'maxBlockNumber': max_block_number}
self.response = FlashbotsPrivateTransactionResponse(self.web3, signed_transaction, max_block_number)
return [params]
sendPrivateTransaction: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.eth_sendPrivateTransaction, mungers=[send_private_transaction_munger], result_formatters=raw_bundle_formatter)
send_private_transaction = sendPrivateTransaction
def cancel_private_transaction_munger(self, tx_hash: str) -> bool:
params = {'txHash': tx_hash}
return [params]
cancelPrivateTransaction: Method[Callable[([Any], Any)]] = Method(json_rpc_method=FlashbotsRPC.eth_cancelPrivateTransaction, mungers=[cancel_private_transaction_munger])
cancel_private_transaction = cancelPrivateTransaction |
class Asset(Task, CodeMixin):
__auto_name__ = False
__strictly_typed__ = True
__tablename__ = 'Assets'
__mapper_args__ = {'polymorphic_identity': 'Asset'}
asset_id = Column('id', Integer, ForeignKey('Tasks.id'), primary_key=True)
def __init__(self, code, **kwargs):
kwargs['code'] = code
super(Asset, self).__init__(**kwargs)
CodeMixin.__init__(self, **kwargs)
ReferenceMixin.__init__(self, **kwargs)
def __eq__(self, other):
return (super(Asset, self).__eq__(other) and isinstance(other, Asset) and (self.type == other.type))
def __hash__(self):
return super(Asset, self).__hash__() |
class OptionPlotoptionsLollipopDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def extractReaderslistpodcastWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class RelationshipBuilder(object):
def __init__(self, versioning_manager, model, property_):
self.manager = versioning_manager
self.property = property_
self.model = model
def one_to_many_subquery(self, obj):
tx_column = option(obj, 'transaction_column_name')
remote_alias = sa.orm.aliased(self.remote_cls)
primary_keys = [getattr(remote_alias, column.name) for column in sa.inspect(remote_alias).mapper.columns if (column.primary_key and (column.name != tx_column))]
return sa.exists(sa.select([1]).where(sa.and_((getattr(remote_alias, tx_column) <= getattr(obj, tx_column)), *[(getattr(remote_alias, pk.name) == getattr(self.remote_cls, pk.name)) for pk in primary_keys])).group_by(*primary_keys).having((sa.func.max(getattr(remote_alias, tx_column)) == getattr(self.remote_cls, tx_column))).correlate(self.local_cls, self.remote_cls))
def many_to_one_subquery(self, obj):
tx_column = option(obj, 'transaction_column_name')
reflector = VersionExpressionReflector(obj, self.property)
return (getattr(self.remote_cls, tx_column) == sa.select([sa.func.max(getattr(self.remote_cls, tx_column))]).where(sa.and_((getattr(self.remote_cls, tx_column) <= getattr(obj, tx_column)), reflector(self.property.primaryjoin))))
def query(self, obj):
session = sa.orm.object_session(obj)
return session.query(self.remote_cls).filter(self.criteria(obj))
def process_query(self, query):
if (self.property.lazy == 'dynamic'):
return query
if (self.property.uselist is False):
return query.first()
return query.all()
def criteria(self, obj):
direction = self.property.direction
if self.versioned:
if (direction.name == 'ONETOMANY'):
return self.one_to_many_criteria(obj)
elif (direction.name == 'MANYTOMANY'):
return self.many_to_many_criteria(obj)
elif (direction.name == 'MANYTOONE'):
return self.many_to_one_criteria(obj)
else:
reflector = VersionExpressionReflector(obj, self.property)
return reflector(self.property.primaryjoin)
def many_to_many_criteria(self, obj):
return sa.and_(self.association_subquery(obj), self.one_to_many_subquery(obj), (self.remote_cls.operation_type != Operation.DELETE))
def many_to_one_criteria(self, obj):
reflector = VersionExpressionReflector(obj, self.property)
return sa.and_(reflector(self.property.primaryjoin), self.many_to_one_subquery(obj), (self.remote_cls.operation_type != Operation.DELETE))
def one_to_many_criteria(self, obj):
reflector = VersionExpressionReflector(obj, self.property)
return sa.and_(reflector(self.property.primaryjoin), self.one_to_many_subquery(obj), (self.remote_cls.operation_type != Operation.DELETE))
def reflected_relationship(self):
def relationship(obj):
query = self.query(obj)
return self.process_query(query)
return relationship
def association_subquery(self, obj):
tx_column = option(obj, 'transaction_column_name')
reflector = VersionExpressionReflector(obj, self.property)
association_table_alias = self.association_version_table.alias()
association_cols = [association_table_alias.c[association_col.name] for (_, association_col) in self.remote_to_association_column_pairs]
association_exists = sa.exists(sa.select([1]).where(sa.and_((association_table_alias.c[tx_column] <= getattr(obj, tx_column)), *[(association_col == self.association_version_table.c[association_col.name]) for association_col in association_cols])).group_by(*association_cols).having((sa.func.max(association_table_alias.c[tx_column]) == self.association_version_table.c[tx_column])).correlate(self.association_version_table))
return sa.exists(sa.select([1]).where(sa.and_(reflector(self.property.primaryjoin), association_exists, (self.association_version_table.c.operation_type != Operation.DELETE), adapt_columns(self.property.secondaryjoin))).correlate(self.local_cls, self.remote_cls))
def build_association_version_tables(self):
column = list(self.property.remote_side)[0]
self.manager.association_tables.add(column.table)
builder = TableBuilder(self.manager, column.table)
metadata = column.table.metadata
if builder.parent_table.schema:
table_name = ((builder.parent_table.schema + '.') + builder.table_name)
elif metadata.schema:
table_name = ((metadata.schema + '.') + builder.table_name)
else:
table_name = builder.table_name
if (table_name not in metadata.tables):
self.association_version_table = table = builder()
self.manager.association_version_tables.add(table)
else:
self.association_version_table = metadata.tables[table_name]
def __call__(self):
self.local_cls = version_class(self.model)
self.versioned = False
try:
self.remote_cls = version_class(self.property.mapper.class_)
self.versioned = True
except (AttributeError, KeyError):
return
except ClassNotVersioned:
self.remote_cls = self.property.mapper.class_
if ((self.property.secondary is not None) and (not self.property.viewonly) and (not self.manager.is_excluded_property(self.model, self.property.key))):
self.build_association_version_tables()
self.remote_to_association_column_pairs = []
for column_pair in self.property.local_remote_pairs:
if (column_pair[0] in self.property.target.c.values()):
self.remote_to_association_column_pairs.append(column_pair)
setattr(self.local_cls, self.property.key, self.reflected_relationship) |
class AttendeeViewSet(EventUserModelViewSet):
queryset = Attendee.objects.all()
serializer_class = AttendeeSerializer
filter_fields = ('event_user__event__event_slug', 'is_installing', 'email_confirmed', 'event__event_slug')
ordering_fields = ('created_at', 'updated_at', 'registration_date')
def get_counts(self):
queryset = self.filter_queryset(self.get_queryset())
return Attendee.objects.get_counts(queryset) |
def extractHktranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SearchableSnapshotsStats(TelemetryDevice):
internal = False
serverless_status = serverless.Status.Blocked
command = 'searchable-snapshots-stats'
human_name = 'Searchable Snapshots Stats'
help = 'Regularly samples searchable snapshots stats'
def __init__(self, telemetry_params, clients, metrics_store):
super().__init__()
self.telemetry_params = telemetry_params
self.clients = clients
self.sample_interval = telemetry_params.get('searchable-snapshots-stats-sample-interval', 1)
if (self.sample_interval <= 0):
raise exceptions.SystemSetupError(f"The telemetry parameter 'searchable-snapshots-stats-sample-interval' must be greater than zero but was {self.sample_interval}.")
self.specified_cluster_names = self.clients.keys()
indices_per_cluster = self.telemetry_params.get('searchable-snapshots-stats-indices', None)
if isinstance(indices_per_cluster, str):
self.indices_per_cluster = {opts.TargetHosts.DEFAULT: [indices_per_cluster]}
else:
self.indices_per_cluster = indices_per_cluster
if self.indices_per_cluster:
for cluster_name in self.indices_per_cluster.keys():
if (cluster_name not in clients):
raise exceptions.SystemSetupError(f"The telemetry parameter 'searchable-snapshots-stats-indices' must be a JSON Object with keys matching the cluster names [{','.join(sorted(clients.keys()))}] specified in --target-hosts but it had [{cluster_name}].")
self.specified_cluster_names = self.indices_per_cluster.keys()
self.metrics_store = metrics_store
self.samplers = []
def on_benchmark_start(self):
for cluster_name in self.specified_cluster_names:
recorder = SearchableSnapshotsStatsRecorder(cluster_name, self.clients[cluster_name], self.metrics_store, self.sample_interval, (self.indices_per_cluster[cluster_name] if self.indices_per_cluster else None))
sampler = SamplerThread(recorder)
self.samplers.append(sampler)
sampler.daemon = True
sampler.start()
def on_benchmark_stop(self):
if self.samplers:
for sampler in self.samplers:
sampler.finish() |
def add_images(qc_page, qc_dir, image_list, scene_file, wb_logging='WARNING', add_titles=False, title_formatter=None):
for image in image_list:
if add_titles:
if image.subject_title:
image_title = image.subject_title
if title_formatter:
image_title = image_title.format(**title_formatter)
qc_page.write('<h4>{}</h4>\n'.format(image_title))
pic_name = '{}.png'.format(image.name)
write_image(qc_page, 12, pic_name, pic_name, '')
output_path = os.path.join(qc_dir, pic_name)
image.make_image(output_path, scene_file, logging=wb_logging) |
class OptionSeriesErrorbarStatesInactive(Options):
def animation(self) -> 'OptionSeriesErrorbarStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesErrorbarStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def filter_firewall_central_snat_map_data(json):
option_list = ['comments', 'dst_addr', 'dst_addr6', 'dst_port', 'dstintf', 'nat', 'nat_ippool', 'nat_ippool6', 'nat_port', 'nat46', 'nat64', 'orig_addr', 'orig_addr6', 'orig_port', 'policyid', 'protocol', 'srcintf', 'status', 'type', 'uuid']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class JsConsole():
def __init__(self, page: primitives.PageModel=None):
self.page = page
def debugger(self):
return JsObject.JsKeyword('debugger')
def clear(self):
return JsFncs.JsFunction('console.clear()')
def log(self, data: Union[(str, primitives.JsDataModel)], js_conv_func: Optional[Union[(str, list)]]=None, skip_data_convert: bool=False):
if skip_data_convert:
return JsFncs.JsFunction(_CONSOLE_LOG_EXPR.format(data))
if isinstance(data, list):
data = JsUtils.jsWrap(JsUtils.jsConvertFncs(data, toStr=True))
if hasattr(data, 'dom'):
return JsFncs.JsFunction(_CONSOLE_LOG_EXPR.format(JsUtils.jsConvertData(data.dom.content, js_conv_func)))
return JsFncs.JsFunction(_CONSOLE_LOG_EXPR.format(JsUtils.jsConvertData(data, js_conv_func)))
def info(self, data: Union[(str, primitives.JsDataModel)], js_conv_func: Optional[Union[(str, list)]]=None):
return JsFncs.JsFunction(('console.info(%s)' % JsUtils.jsConvertData(data, js_conv_func)))
def warn(self, data: Union[(str, primitives.JsDataModel)], js_conv_func: Optional[Union[(str, list)]]=None):
return JsFncs.JsFunction(('console.warn(%s)' % JsUtils.jsConvertData(data, js_conv_func)))
def error(self, data: Union[(str, primitives.JsDataModel)], js_conv_func: Optional[Union[(str, list)]]=None):
return JsFncs.JsFunction(('console.error(%s)' % JsUtils.jsConvertData(data, js_conv_func)))
def table(self, data: Union[(str, primitives.JsDataModel)], js_header: Optional[list]=None) -> JsFncs.JsFunction:
if (js_header is not None):
return JsFncs.JsFunction(('console.table(%s, %s)' % (data, js_header)))
return JsFncs.JsFunction(('console.table(%s)' % data))
def time(self, html_code: Union[(str, primitives.JsDataModel)]) -> JsNumber.JsNumber:
return JsNumber.JsNumber(("console.time('%s')" % html_code), is_py_data=False)
def timeEnd(self, html_code: Union[(str, primitives.JsDataModel)]):
return JsFncs.JsFunction(("console.timeEnd('%s')" % html_code))
def _assert(self, data: Union[(str, primitives.JsDataModel)], info: str, js_conv_func: Optional[Union[(str, list)]]=None) -> JsFncs.JsFunction:
return JsFncs.JsFunction(("console.assert(%s, '%s')" % (JsUtils.jsConvertData(data, js_conv_func), info)))
def tryCatch(self, js_funcs: Union[(str, list)], js_funcs_errs: Union[(str, list)]='console.warn(err.message)', profile: Optional[Union[(dict, bool)]]=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
return JsFncs.JsFunction(('try{%s} catch(err){%s}' % (JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile), js_funcs_errs)))
def perf(self, js_code: str, label: Optional[str]=None):
if (label is not None):
return JsFncs.JsFunction(("console.log('%s' + (performance.now() - %s) + 'ms')" % (label, js_code)))
return JsFncs.JsFunction(("console.log((performance.now() - %s) + 'ms')" % js_code))
def service(self, msg: str, headers: Optional[dict]=None):
from epyk import LOG_SERVICE
if (LOG_SERVICE is None):
raise ValueError('Log service must be defined pk.LOG_SERVICE = <service_url>')
return self.page.js.post(LOG_SERVICE, {'content': msg}, headers=headers, asynchronous=True) |
def _safe_DIE_linkage_name(die, default=None):
if ('DW_AT_linkage_name' in die.attributes):
return bytes2str(die.attributes['DW_AT_linkage_name'].value)
elif ('DW_AT_name' in die.attributes):
return bytes2str(die.attributes['DW_AT_name'].value)
else:
return default |
def blms(x, d, N=4, L=4, mu=0.1):
nIters = (min(len(x), len(d)) // L)
u = np.zeros(((L + N) - 1))
w = np.zeros(N)
e = np.zeros((nIters * L))
for n in range(nIters):
u[:(- L)] = u[L:]
u[(- L):] = x[(n * L):((n + 1) * L)]
d_n = d[(n * L):((n + 1) * L)]
A = hankel(u[:L], u[(- N):])
e_n = (d_n - np.dot(A, w))
w = (w + ((mu * np.dot(A.T, e_n)) / L))
e[(n * L):((n + 1) * L)] = e_n
return e |
(((memory_usage is None) or (MAGICK_VERSION_INFO <= (6, 6, 9, 7))), reason='memory_usage is unavailable, or untestable')
def test_memory_leak():
'
minimum = 1.0
with Color('NONE') as nil_color:
minimum = ctypes.sizeof(nil_color.raw)
consumes = memory_usage((color_memory_leak, (), {}))
assert ((consumes[(- 1)] - consumes[0]) <= minimum) |
class _ASPPModule(nn.Module):
def __init__(self, inplanes, planes, kernel_size, padding, dilation, BatchNorm):
super(_ASPPModule, self).__init__()
self.atrous_conv = nn.Conv2d(inplanes, planes, kernel_size=kernel_size, stride=1, padding=padding, dilation=dilation, bias=False)
self.bn = BatchNorm(planes)
self.relu = nn.ReLU()
init_weight(self)
def forward(self, x):
x = self.relu(self.bn(self.atrous_conv(x)))
return x |
class Errors(commands.Cog):
def __init__(self, bot):
self.bot = bot
.listener()
async def on_command_error(self, inter, error):
if isinstance(error, commands.NotOwner):
(await inter.send(self.bot.response.get('not-owner', guild_id=inter.guild.id)))
elif isinstance(error, commands.NoPrivateMessage):
(await inter.send(self.bot.response.get('no-dm')))
else:
traceback.print_tb(error.__traceback__)
print(error) |
class CursorProxy(wrapt.ObjectProxy):
provider_name = None
DML_QUERIES = ('INSERT', 'DELETE', 'UPDATE')
def __init__(self, wrapped, destination_info=None) -> None:
super(CursorProxy, self).__init__(wrapped)
self._self_destination_info = (destination_info or {})
def callproc(self, procname, params=None):
return self._trace_sql(self.__wrapped__.callproc, procname, params, action=EXEC_ACTION)
def execute(self, sql, params=None):
return self._trace_sql(self.__wrapped__.execute, sql, params)
def executemany(self, sql, param_list):
return self._trace_sql(self.__wrapped__.executemany, sql, param_list)
def _bake_sql(self, sql):
return sql
def _trace_sql(self, method, sql, params, action=QUERY_ACTION):
sql_string = self._bake_sql(sql)
if (action == EXEC_ACTION):
signature = (sql_string + '()')
else:
signature = self.extract_signature(sql_string)
action = extract_action_from_signature(signature, action)
sql_string = shorten(sql_string, string_length=10000)
with capture_span(signature, span_type='db', span_subtype=self.provider_name, span_action=action, extra={'db': {'type': 'sql', 'statement': sql_string, 'instance': getattr(self, '_self_database', None)}, 'destination': self._self_destination_info}, skip_frames=1, leaf=True) as span:
if (params is None):
result = method(sql)
else:
result = method(sql, params)
if (span and (self.rowcount not in ((- 1), None)) and signature.startswith(self.DML_QUERIES)):
span.update_context('db', {'rows_affected': self.rowcount})
return result
def extract_signature(self, sql):
raise NotImplementedError() |
class OptionSeriesColumnrangeSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
.EventDecorator()
def find_sub_block(iset, ises, comm):
found = []
target_indices = iset.indices
candidates = OrderedDict(enumerate(ises))
with temp_internal_comm(comm) as icomm:
while True:
match = False
for (i, candidate) in list(candidates.items()):
candidate_indices = candidate.indices
(candidate_size,) = candidate_indices.shape
(target_size,) = target_indices.shape
lmatch = ((candidate_size <= target_size) and numpy.array_equal(target_indices[:candidate_size], candidate_indices))
if icomm.allreduce(lmatch, op=MPI.LAND):
target_indices = target_indices[candidate_size:]
found.append(i)
candidates.pop(i)
match = True
if (not match):
break
if (icomm.allreduce(len(target_indices), op=MPI.SUM) > 0):
raise LookupError(('Unable to find %s in %s' % (iset, ises)))
return found |
class DIN99o(Lab):
BASE = 'xyz-d65'
NAME = 'din99o'
SERIALIZE = ('--din99o',)
WHITE = WHITES['2deg']['D65']
CHANNELS = (Channel('l', 0.0, 100.0), Channel('a', (- 55.0), 55.0, flags=FLG_MIRROR_PERCENT), Channel('b', (- 55.0), 55.0, flags=FLG_MIRROR_PERCENT))
def to_base(self, coords: Vector) -> Vector:
return super().to_base(din99o_to_lab(coords))
def from_base(self, coords: Vector) -> Vector:
return lab_to_din99o(super().from_base(coords)) |
def test_hamming_weight_model_raises_exception_when_given_non_bytes_array():
vm = scared.HammingWeight()
with pytest.raises(TypeError):
vm(1)
with pytest.raises(TypeError):
vm('barr')
with pytest.raises(ValueError):
vm(np.array(['foo', 'barr']))
with pytest.raises(ValueError):
vm(np.array([564, 3.3])) |
def train(node_id, dataset_provider: Callable[([], tf.data.Dataset)], epochs=sys.maxsize):
model = tf.keras.Sequential([PrintLayer(node_id)])
loss = tf.keras.losses.MeanSquaredError()
model.compile(loss=loss)
try:
model.fit(dataset_provider(), epochs=epochs, verbose=2)
except Exception as e:
pass |
.slow
.skipif((not has_hf_transformers), reason='requires huggingface transformers')
.skipif((not has_torch_compile), reason='requires torch.compile')
.parametrize('torch_device', TORCH_DEVICES)
.parametrize('model', LLAMA_TEST_MODELS)
.parametrize('with_torch_sdp', [False, True])
def test_causal_lm_torch_compile(torch_device, model, with_torch_sdp):
assert_causal_lm_output_equals_hf(LlamaCausalLM, model, torch_device, jit_method=JITMethod.TorchCompile, with_torch_sdp=with_torch_sdp) |
def hash_fiat_element(element):
restriction = None
e = element
if isinstance(e, FIAT.DiscontinuousElement):
e = e._element
if isinstance(e, FIAT.RestrictedElement):
restriction = tuple(e._indices)
e = e._element
if (len(restriction) == e.space_dimension()):
restriction = None
family = e.__class__.__name__
degree = e.order
return (family, element.ref_el, degree, restriction) |
class AIFlowRpcServerException(AIFlowException):
def __init__(self, error_msg, error_code=INTERNAL_ERROR, **kwargs):
try:
self.error_code = error_code
except (ValueError, TypeError):
self.error_code = INTERNAL_ERROR
self.error_msg = error_msg
self.json_kwargs = kwargs
super(AIFlowException, self).__init__(error_msg) |
class JavaUnitTest(object):
def __init__(self, java_class, file_name=None, test_class_name=None):
self.java_class = java_class
if (file_name is None):
self.data_file_name = 'of{version}/{name}.data'.format(version=java_class.version.dotless_version, name=java_class.c_name[3:])
else:
self.data_file_name = file_name
if (test_class_name is None):
self.test_class_name = (self.java_class.name + 'Test')
else:
self.test_class_name = test_class_name
def package(self):
return self.java_class.package
def name(self):
return self.test_class_name
def interface(self):
return self.java_class.interface
def has_test_data(self):
return test_data.exists(self.data_file_name)
def test_data(self):
return test_data.read(self.data_file_name) |
('new')
('--plugin', '-p', type=click.STRING, help='Adds a new language to a plugin.')
('lang')
def new_translation(lang, plugin):
if plugin:
validate_plugin(plugin)
click.secho('[+] Adding new language {} for plugin {}...'.format(lang, plugin), fg='cyan')
add_plugin_translations(plugin, lang)
else:
click.secho('[+] Adding new language {}...'.format(lang), fg='cyan')
add_translations(lang) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_service_group': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_service_group']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_service_group']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_service_group')
(is_error, has_changed, result, diff) = fortios_firewall_service(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def _kl_div(reference_data: pd.Series, current_data: pd.Series, feature_type: ColumnType, threshold: float, n_bins: int=30) -> Tuple[(float, bool)]:
(reference_percents, current_percents) = get_binned_data(reference_data, current_data, feature_type, n_bins)
kl_div_value = stats.entropy(reference_percents, current_percents)
return (kl_div_value, (kl_div_value >= threshold)) |
def test_consent_request(db):
provided_identity_data = {'privacy_request_id': None, 'field_name': 'email', 'encrypted_value': {'value': ''}}
provided_identity = ProvidedIdentity.create(db, data=provided_identity_data)
consent_request_1 = {'provided_identity_id': provided_identity.id}
consent_1 = ConsentRequest.create(db, data=consent_request_1)
consent_request_2 = {'provided_identity_id': provided_identity.id}
consent_2 = ConsentRequest.create(db, data=consent_request_2)
assert (consent_1.provided_identity_id in provided_identity.id)
assert (consent_2.provided_identity_id in provided_identity.id)
provided_identity.delete(db)
assert (Consent.get(db, object_id=consent_1.id) is None)
assert (Consent.get(db, object_id=consent_2.id) is None) |
def test_collect_analysis_tags(backend_db, frontend_db):
tags1 = {'tag_a': {'color': 'success', 'value': 'tag a', 'propagate': True}, 'tag_b': {'color': 'warning', 'value': 'tag b', 'propagate': False}}
tags2 = {'tag_c': {'color': 'success', 'value': 'tag c', 'propagate': True}}
insert_test_fo(backend_db, 'fo1', analysis={'foo': generate_analysis_entry(tags=tags1), 'bar': generate_analysis_entry(tags=tags2)})
fo = frontend_db.get_object('fo1')
assert ('foo' in fo.analysis_tags)
assert ('bar' in fo.analysis_tags)
assert (set(fo.analysis_tags['foo']) == {'tag_a', 'tag_b'})
assert (fo.analysis_tags['foo']['tag_a'] == tags1['tag_a']) |
('ecs_deploy.cli.get_client')
def test_update_task_exclusive_docker_labels(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.update, (TASK_DEFINITION_ARN_1, '-d', 'webserver', 'new-label', 'new-value', '--exclusive-docker-labels'))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Update task definition based on: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert (u'Changed dockerLabel "new-label" of container "webserver" to: "new-value"' in result.output)
assert (u'Removed dockerLabel "foo" of container "webserver"' in result.output)
assert (u'Removed dockerLabel "lorem" of container "webserver"' in result.output)
assert (u'Successfully created revision: 2' in result.output) |
class GTIN(models.Model):
class Meta():
verbose_name = 'Global Trade Item Number'
ampp = models.ForeignKey(db_column='appid', to='AMPP', on_delete=models.CASCADE, help_text='AMPP')
gtin = models.BigIntegerField(help_text='GTIN')
startdt = models.DateField(help_text='GTIN date')
enddt = models.DateField(null=True, help_text='The date the GTIN became invalid') |
()
('--duration', default=3, help='Run time in seconds.')
('--runtime_mode', default='async', help='Runtime mode: async or threaded.')
_of_runs_deco
_format_deco
def main(duration: int, runtime_mode: str, number_of_runs: int, output_format: str) -> Any:
parameters = {'Duration(seconds)': duration, 'Runtime mode': runtime_mode, 'Number of runs': number_of_runs}
def result_fn() -> List[Tuple[(str, Any, Any, Any)]]:
return multi_run(int(number_of_runs), run, (duration, runtime_mode))
return print_results(output_format, parameters, result_fn) |
class EvpnNLRI(StringifyMixin, TypeDisp):
ROUTE_FAMILY = RF_L2_EVPN
_PACK_STR = '!BB'
_PACK_STR_SIZE = struct.calcsize(_PACK_STR)
ETHERNET_AUTO_DISCOVERY = 1
MAC_IP_ADVERTISEMENT = 2
INCLUSIVE_MULTICAST_ETHERNET_TAG = 3
ETHERNET_SEGMENT = 4
IP_PREFIX_ROUTE = 5
ROUTE_TYPE_NAME = None
MAX_ET =
_NAMES = {}
NLRI_PREFIX_FIELDS = []
def __init__(self, type_=None, length=None):
if (type_ is None):
type_ = self._rev_lookup_type(self.__class__)
self.type = type_
self.length = length
self.route_dist = None
def register_type(cls, type_):
cls._TYPES = cls._TYPES.copy()
cls._NAMES = cls._NAMES.copy()
def _register_type(subcls):
cls._TYPES[type_] = subcls
cls._NAMES[subcls.ROUTE_TYPE_NAME] = subcls
cls._REV_TYPES = None
return subcls
return _register_type
def _lookup_type_name(cls, type_name):
try:
return cls._NAMES[type_name]
except KeyError:
return EvpnUnknownNLRI
def parser(cls, buf):
(route_type, length) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf))
offset = (cls._PACK_STR_SIZE + length)
subcls = cls._lookup_type(route_type)
values = subcls.parse_value(buf[cls._PACK_STR_SIZE:offset])
return (subcls(type_=route_type, length=length, **values), buf[offset:])
def serialize_value(self):
return b''
def serialize(self):
value_bin = self.serialize_value()
self.length = len(value_bin)
return (struct.pack(EvpnNLRI._PACK_STR, self.type, self.length) + value_bin)
def _rd_from_bin(buf):
return (_RouteDistinguisher.parser(buf[:8]), buf[8:])
def _rd_to_bin(rd):
return six.binary_type(rd.serialize())
def _esi_from_bin(buf):
return (EvpnEsi.parser(buf[:10]), buf[10:])
def _esi_to_bin(esi):
return esi.serialize()
def _ethernet_tag_id_from_bin(buf):
return (type_desc.Int4.to_user(six.binary_type(buf[:4])), buf[4:])
def _ethernet_tag_id_to_bin(tag_id):
return type_desc.Int4.from_user(tag_id)
def _mac_addr_len_from_bin(buf):
return (type_desc.Int1.to_user(six.binary_type(buf[:1])), buf[1:])
def _mac_addr_len_to_bin(mac_len):
return type_desc.Int1.from_user(mac_len)
def _mac_addr_from_bin(buf, mac_len):
mac_len //= 8
return (addrconv.mac.bin_to_text(buf[:mac_len]), buf[mac_len:])
def _mac_addr_to_bin(mac_addr):
return addrconv.mac.text_to_bin(mac_addr)
def _ip_addr_len_from_bin(buf):
return (type_desc.Int1.to_user(six.binary_type(buf[:1])), buf[1:])
def _ip_addr_len_to_bin(ip_len):
return type_desc.Int1.from_user(ip_len)
def _ip_addr_from_bin(buf, ip_len):
return (ip.bin_to_text(buf[:ip_len]), buf[ip_len:])
def _ip_addr_to_bin(ip_addr):
return ip.text_to_bin(ip_addr)
def _mpls_label_from_bin(buf):
(mpls_label, is_bos) = mpls.label_from_bin(buf)
rest = buf[3:]
return (mpls_label, rest, is_bos)
def _mpls_label_to_bin(label, is_bos=True):
return mpls.label_to_bin(label, is_bos=is_bos)
def _vni_from_bin(buf):
return (vxlan.vni_from_bin(six.binary_type(buf[:3])), buf[3:])
def _vni_to_bin(vni):
return vxlan.vni_to_bin(vni)
def prefix(self):
def _format(i):
pairs = []
for k in i.NLRI_PREFIX_FIELDS:
v = getattr(i, k)
if (k == 'esi'):
pairs.append(('%s:%s' % (k, v.formatted_str)))
else:
pairs.append(('%s:%s' % (k, v)))
return ','.join(pairs)
return ('%s(%s)' % (self.ROUTE_TYPE_NAME, _format(self)))
def formatted_nlri_str(self):
return ('%s:%s' % (self.route_dist, self.prefix)) |
class OptionSeriesBellcurveSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def test_Market():
pf = build_portfolio(names=names_yf, pf_allocation=pf_allocation, start_date=start_date, end_date=end_date, data_api='yfinance', market_index='^GSPC')
assert isinstance(pf.market_index, Market)
assert (pf.market_index.name == '^GSPC')
assert (pf.beta is not None)
assert (pf.rsquared is not None)
assert (pf.treynor is not None) |
def filter_firewall_vipgrp6_data(json):
option_list = ['color', 'comments', 'member', 'name', 'uuid']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class CookieSessionPipe(SessionPipe):
def __init__(self, key, expire=3600, secure=False, samesite='Lax', domain=None, cookie_name=None, cookie_data=None, encryption_mode='modern', compression_level=0):
super().__init__(expire=expire, secure=secure, samesite=samesite, domain=domain, cookie_name=cookie_name, cookie_data=cookie_data)
self.key = key
if (encryption_mode != 'modern'):
raise ValueError('Unsupported encryption_mode')
self.compression_level = compression_level
def _encrypt_data(self) -> str:
data = pickle.dumps(sdict(current.session))
if self.compression_level:
data = zlib.compress(data, self.compression_level)
return crypto_symmetric.encrypt_b64(data, self.key)
def _decrypt_data(self, data: str) -> SessionData:
try:
ddata = crypto_symmetric.decrypt_b64(data, self.key)
if self.compression_level:
ddata = zlib.decompress(ddata)
rv = pickle.loads(ddata)
except Exception:
rv = None
return SessionData(rv, expires=self.expire)
def _load_session(self, wrapper: IngressWrapper) -> SessionData:
cookie_data = wrapper.cookies[self.cookie_name].value
return self._decrypt_data(cookie_data)
def _new_session(self) -> SessionData:
return SessionData(expires=self.expire)
def _session_cookie_data(self) -> str:
return self._encrypt_data()
def clear(self):
raise NotImplementedError(f"{self.__class__.__name__} doesn't support sessions clearing. Change the 'key' parameter to invalidate existing ones.") |
class PayrollManager(BaseManager):
def __init__(self, name, credentials, unit_price_4dps=False, user_agent=None):
from xero import __version__ as VERSION
self.credentials = credentials
self.name = name
self.base_url = (credentials.base_url + XERO_PAYROLL_URL)
self.extra_params = ({'unitdp': 4} if unit_price_4dps else {})
self.singular = singular(name)
if (user_agent is None):
self.user_agent = (('pyxero/%s ' % VERSION) + requests.utils.default_user_agent())
else:
self.user_agent = user_agent
for method_name in self.DECORATED_METHODS:
method = getattr(self, ('_%s' % method_name))
setattr(self, method_name, self._get_data(method)) |
class OptionSeriesPyramid3dSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractSugoitranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class MultiScaleImageFeatureExtractor(nn.Module):
def __init__(self, modelname: str='dino_vits16', freeze: bool=False, scale_factors: list=[1, (1 / 2), (1 / 3)]):
super().__init__()
self.freeze = freeze
self.scale_factors = scale_factors
if ('res' in modelname):
self._net = getattr(torchvision.models, modelname)(pretrained=True)
self._output_dim = self._net.fc.weight.shape[1]
self._net.fc = nn.Identity()
elif ('dinov2' in modelname):
self._net = torch.hub.load('facebookresearch/dinov2', modelname)
self._output_dim = self._net.norm.weight.shape[0]
elif ('dino' in modelname):
self._net = torch.hub.load('facebookresearch/dino:main', modelname)
self._output_dim = self._net.norm.weight.shape[0]
else:
raise ValueError(f'Unknown model name {modelname}')
for (name, value) in (('_resnet_mean', _RESNET_MEAN), ('_resnet_std', _RESNET_STD)):
self.register_buffer(name, torch.FloatTensor(value).view(1, 3, 1, 1), persistent=False)
if self.freeze:
for param in self.parameters():
param.requires_grad = False
def get_output_dim(self):
return self._output_dim
def forward(self, image_rgb: torch.Tensor) -> torch.Tensor:
img_normed = self._resnet_normalize_image(image_rgb)
features = self._compute_multiscale_features(img_normed)
return features
def _resnet_normalize_image(self, img: torch.Tensor) -> torch.Tensor:
return ((img - self._resnet_mean) / self._resnet_std)
def _compute_multiscale_features(self, img_normed: torch.Tensor) -> torch.Tensor:
multiscale_features = None
if (len(self.scale_factors) <= 0):
raise ValueError(f'Wrong format of self.scale_factors: {self.scale_factors}')
for scale_factor in self.scale_factors:
if (scale_factor == 1):
inp = img_normed
else:
inp = self._resize_image(img_normed, scale_factor)
if (multiscale_features is None):
multiscale_features = self._net(inp)
else:
multiscale_features += self._net(inp)
averaged_features = (multiscale_features / len(self.scale_factors))
return averaged_features
def _resize_image(image: torch.Tensor, scale_factor: float) -> torch.Tensor:
return nn.functional.interpolate(image, scale_factor=scale_factor, mode='bilinear', align_corners=False) |
def split_item(item: Item, max_token_count: int) -> list[Item]:
if ((not item.content) or (len(str(item.content)) <= max_token_count)):
return [item]
else:
return [Item(content=Content(str(chunk)), author=item.author, created_at=CreatedAt(item.created_at), domain=Domain(item.domain), url=Url(item.url)) for chunk in split_string_into_chunks(str(item.content), max_token_count)] |
class Interaction(ABC):
def set_feature(self, feature: str, new_data: Any, indices: Any):
pass
def reset_feature(self, feature: str):
pass
def link(self, event_type: str, target: Any, feature: str, new_data: Any, callback: callable=None, bidirectional: bool=False):
if (event_type in PYGFX_EVENTS):
self.world_object.add_event_handler(self._event_handler, event_type)
elif (event_type in self.feature_events):
if isinstance(self, GraphicCollection):
feature_instance = getattr(self[:], event_type)
else:
feature_instance = getattr(self, event_type)
feature_instance.add_event_handler(self._event_handler)
else:
raise ValueError(f'Invalid event, valid events are: {(PYGFX_EVENTS + self.feature_events)}')
if (feature is not None):
if (feature not in target.feature_events):
raise ValueError(f'Invalid feature for target, valid features are: {target.feature_events}')
if (event_type not in self.registered_callbacks.keys()):
self.registered_callbacks[event_type] = list()
callback_data = CallbackData(target=target, feature=feature, new_data=new_data, callback_function=callback)
for existing_callback_data in self.registered_callbacks[event_type]:
if (existing_callback_data == callback_data):
warn('linkage already exists for given event, target, and data, skipping')
return
self.registered_callbacks[event_type].append(callback_data)
if bidirectional:
if (event_type in PYGFX_EVENTS):
warn('cannot use bidirectional link for pygfx events')
return
target.link(event_type=event_type, target=self, feature=feature, new_data=new_data, callback=callback, bidirectional=False)
def _event_handler(self, event):
if (event.type in self.registered_callbacks.keys()):
for target_info in self.registered_callbacks[event.type]:
if (target_info.callback_function is not None):
target_info.callback_function(source=self, target=target_info.target, event=event, new_data=target_info.new_data)
elif isinstance(self, GraphicCollection):
if (event.type in self.feature_events):
indices = event.pick_info['collection-index']
else:
for (i, item) in enumerate(self.graphics):
wo = WORLD_OBJECTS[item.loc]
if (wo is event.pick_info['world_object']):
indices = i
target_info.target.set_feature(feature=target_info.feature, new_data=target_info.new_data, indices=indices)
else:
target_info.target.set_feature(feature=target_info.feature, new_data=target_info.new_data, indices=None) |
('snakes.nets', depends=['snakes.plugins.labels'])
def extend(module):
class Transition(module.Transition):
def __init__(self, name, guard=None, **options):
mod = set(iterate(options.pop('modules', [])))
module.Transition.__init__(self, name, guard, **options)
self.modules(mod)
def modules(self, modules=None):
if (modules is None):
return self.label('modules')
else:
self.label(modules=set(iterate(modules)))
class Place(module.Place):
def __init__(self, name, tokens=[], check=None, **options):
mod = set(iterate(options.pop('modules', [])))
module.Place.__init__(self, name, tokens, check, **options)
self.modules(mod)
def modules(self, modules=None):
if (modules is None):
return self.label('modules')
else:
self.label(modules=set(iterate(modules)))
class PetriNet(module.PetriNet):
def __init__(self, name, **options):
mod = set(iterate(options.pop('modules', [])))
module.PetriNet.__init__(self, name, **options)
self.modules(mod)
def modules(self, modules=None):
if (modules is None):
return self.label('modules')
mod = set(iterate(modules))
self.label(modules=mod)
for node in self.node():
node.modules((mod | node.modules()))
def add_place(self, place, **options):
mod = set(iterate(options.pop('modules', self.modules())))
module.PetriNet.add_place(self, place, **options)
place.modules((place.modules() | mod))
def add_transition(self, trans, **options):
mod = set(iterate(options.pop('modules', self.modules())))
module.PetriNet.add_transition(self, trans, **options)
trans.modules((trans.modules() | mod))
def merge_places(self, target, sources, **options):
mod = set(iterate(options.pop('modules', self.modules())))
module.PetriNet.merge_places(self, target, sources, **options)
new = self.place(target)
new.modules(reduce(set.__or__, (self.place(p).modules() for p in sources), mod))
def merge_transitions(self, target, sources, **options):
mod = set(iterate(options.pop('modules', self.modules())))
module.PetriNet.merge_transitions(self, target, sources, **options)
new = self.transition(target)
new.modules(reduce(set.__or__, (self.place(p).modules() for p in sources), mod))
return (Transition, Place, PetriNet) |
class Distro(Base):
__tablename__ = 'distros'
name = sa.Column(sa.String(200), primary_key=True)
def __init__(self, name):
self.name = name
def __json__(self):
return dict(name=self.name)
def by_name(cls, session, name):
query = session.query(cls).filter((sa.func.lower(cls.name) == sa.func.lower(name)))
return query.first()
get = by_name
def all(cls, session, page=None, count=False):
query = session.query(cls).order_by(cls.name)
query = _paginate_query(query, page)
if count:
return query.count()
else:
return query.all()
def search(cls, session, pattern, page=None, count=False):
if ('*' in pattern):
pattern = pattern.replace('*', '%')
query = session.query(cls).filter(sa.or_(sa.func.lower(cls.name).like(sa.func.lower(pattern)))).order_by(cls.name).distinct()
query = _paginate_query(query, page)
if count:
return query.count()
else:
return query.all()
def get_or_create(cls, session, name):
distro = cls.by_name(session, name)
if (not distro):
distro = cls(name=name)
session.add(distro)
session.flush()
return distro |
class OptionSeriesBubbleDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class Z3Converter(BaseConverter):
def __init__(self):
self._context = Context()
def context(self) -> Context:
return self._context
def negate(self, expr: BoolRef) -> BoolRef:
return Not(expr)
def _convert_variable(self, variable: Variable, **kwargs) -> BitVecRef:
return BitVec(str(variable), (variable.type.size if variable.type.size else 32), ctx=self._context)
def _convert_constant(self, constant: Constant, **kwargs) -> BitVecRef:
return BitVecVal(constant.value, (constant.type.size if constant.type.size else 32), ctx=self._context)
def _convert_branch(self, branch: Branch, **kwargs) -> BoolRef:
if isinstance(branch.condition, Condition):
return self._convert_condition(branch.condition)
return self._convert_condition(Condition(OperationType.not_equal, [branch.condition, Constant(0, branch.condition.type)]))
def _convert_condition(self, condition: Condition, **kwargs) -> BoolRef:
_operation = self._get_operation(condition)
return self._ensure_bool_sort(_operation)
def _convert_operation(self, operation: Operation, **kwargs) -> BitVecRef:
_operation = self._get_operation(operation)
return self._ensure_bitvec_sort(_operation)
def _get_operation(self, operation: Operation) -> Union[(BoolRef, BitVecRef)]:
operands = self._ensure_same_sort([self.convert(operand) for operand in operation.operands])
if (operands and isinstance(operands[0], BoolRef) and (operation.operation in self.OPERATIONS_BOOLREF)):
converter = self.OPERATIONS_BOOLREF.get(operation.operation, None)
elif (operands and isinstance(operands[0], BoolRef) and (operation.operation in self.OPERATIONS_INVALID_BOOLREF_OP)):
converter = (lambda a, b: _convert_invalid_boolref_op(a, b, self.OPERATIONS_INVALID_BOOLREF_OP.get(operation.operation, None)))
else:
converter = self.OPERATIONS.get(operation.operation, None)
if (not converter):
raise ValueError(f'Could not convert operation {operation} into z3 logic.')
return converter(*operands)
def _ensure_same_sort(self, operands: List[ExprRef]) -> List[ExprRef]:
if any((is_bv(op) for op in operands)):
operands = [self._ensure_bitvec_sort(operand) for operand in operands]
operands = list(self._ensure_bv_size(operands))
elif any((is_bool(op) for op in operands)):
operands = [self._ensure_bool_sort(operand) for operand in operands]
return operands
def _ensure_bitvec_sort(self, expression: ExprRef) -> BitVecRef:
if is_bv(expression):
return expression
if is_bool(expression):
return If(expression, BitVecVal(1, 1, ctx=self._context), BitVecVal(0, 1, ctx=self._context), ctx=self._context)
raise ValueError(f'Can not convert {expression}')
def _ensure_bool_sort(self, expression: ExprRef) -> BoolRef:
if is_bool(expression):
return expression
if is_bv(expression):
return (expression != BitVecVal(1, expression.size(), ctx=self._context))
raise ValueError(f'Can not convert {expression}')
def _ensure_bv_size(self, operands: List[BitVecRef]) -> Iterator[BitVecRef]:
desired_size = operands[0].size()
for operand in operands:
operand_size = operand.size()
if (operand_size == desired_size):
(yield operand)
elif (desired_size > operand_size):
(yield ZeroExt((desired_size - operand_size), operand))
else:
(yield Extract((desired_size - 1), 0, operand))
def check(self, *condition: BoolRef, timeout: int=2000) -> str:
solver = Solver(ctx=self._context)
solver.set('timeout', timeout)
for term in condition:
solver.add(term)
result = repr(solver.check())
if (result == 'unknown'):
logging.warning(f'It could be that z3 was not able to check satisfiability for the given terms in {timeout}ms')
return BaseConverter.UNKNOWN
elif (result == 'unsat'):
return BaseConverter.UNSAT
return BaseConverter.SAT
LOGIC_OPERATIONS = {OperationType.bitwise_or, OperationType.bitwise_and}
OPERATIONS = {OperationType.plus: (lambda a, b: (a + b)), OperationType.minus: (lambda a, b: (a - b)), OperationType.multiply: (lambda a, b: (a * b)), OperationType.divide: (lambda a, b: (a / b)), OperationType.modulo: (lambda a, b: (a % b)), OperationType.bitwise_xor: (lambda a, b: (a ^ b)), OperationType.bitwise_or: (lambda a, b: (a | b)), OperationType.bitwise_and: (lambda a, b: (a & b)), OperationType.logical_or: (lambda a, b: Or((a != 0), (b != 0))), OperationType.logical_and: (lambda a, b: And((a != 0), (b != 0))), OperationType.left_shift: (lambda a, b: (a << b)), OperationType.right_shift: (lambda a, b: (a >> b)), OperationType.right_shift_us: LShR, OperationType.left_rotate: RotateLeft, OperationType.right_rotate: RotateRight, OperationType.equal: (lambda a, b: (a == b)), OperationType.not_equal: (lambda a, b: (a != b)), OperationType.less: (lambda a, b: (a < b)), OperationType.less_or_equal: (lambda a, b: (a <= b)), OperationType.greater: (lambda a, b: (a > b)), OperationType.greater_or_equal: (lambda a, b: (a >= b)), OperationType.cast: (lambda a: a), OperationType.negate: (lambda a: (- a)), OperationType.logical_not: (lambda a: (~ a)), OperationType.divide_us: UDiv, OperationType.modulo_us: URem, OperationType.greater_us: UGT, OperationType.less_us: ULT, OperationType.greater_or_equal_us: UGE, OperationType.less_or_equal_us: ULE}
OPERATIONS_BOOLREF = {OperationType.bitwise_and: And, OperationType.bitwise_xor: Xor, OperationType.bitwise_or: Or, OperationType.logical_not: Not, OperationType.negate: Not}
OPERATIONS_INVALID_BOOLREF_OP = {OperationType.minus: operator.sub, OperationType.plus: operator.add, OperationType.multiply: operator.mul, OperationType.divide: operator.truediv, OperationType.left_shift: operator.lshift, OperationType.right_shift: operator.rshift, OperationType.modulo: operator.mod} |
class PublicCountryISOCodeSensor(BaseSensor):
name = 'publiccountryiso'
desc = _('Display your public country ISO code')
command = 'curl ifconfig.co/country-iso'
current_country_iso = ''
lasttime = 0
def get_value(self, sensor):
if ((self.current_country_iso == '') or (self.lasttime == 0) or ((time.time() - self.lasttime) > 600)):
self.current_country_iso = self.script_exec(self.command)
self.lasttime = time.time()
return self.current_country_iso |
def exit_process(is_error=True, delayed=False):
from threading import Thread
import _thread
status = (1 if is_error else 0)
Thread(target=(lambda : (time.sleep(3), _thread.interrupt_main())), daemon=True).start()
Thread(target=(lambda : (time.sleep(6), os._exit(status))), daemon=True).start()
if (not delayed):
import sys
sys.exit(status) |
class OptionSeriesVennSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class StackableSettings(Settings):
absolute_params = {'angle': 60}
relative_params = {'height': 2.0, 'width': 4.0, 'holedistance': 1.0, 'bottom_stabilizers': 0.0}
def checkValues(self) -> None:
if (self.angle < 20):
raise ValueError("StackableSettings: 'angle' is too small. Use value >= 20")
if (self.angle > 260):
raise ValueError("StackableSettings: 'angle' is too big. Use value < 260")
def edgeObjects(self, boxes, chars: str='sSsS', add: bool=True, fingersettings=None):
fingersettings = (fingersettings or boxes.edges['f'].settings)
edges = [StackableEdge(boxes, self, fingersettings), StackableEdgeTop(boxes, self, fingersettings), StackableFeet(boxes, self, fingersettings), StackableHoleEdgeTop(boxes, self, fingersettings)]
return self._edgeObjects(edges, boxes, chars, add) |
class FrameStacker():
def __init__(self, num_frames: int):
self._num_frames = num_frames
self.reset()
def num_frames(self) -> int:
return self._num_frames
def reset(self):
self._stack = collections.deque(maxlen=self._num_frames)
def step(self, frame: np.ndarray) -> np.ndarray:
if (not self._stack):
self._stack.extend(([frame] * (self._num_frames - 1)))
self._stack.append(frame)
return np.concatenate(self._stack, axis=(- 1))
def update_spec(self, spec: dm_env_specs.Array) -> dm_env_specs.Array:
return dm_env_specs.Array(shape=(spec.shape[:(- 1)] + ((spec.shape[(- 1)] * self._num_frames),)), dtype=spec.dtype, name=spec.name) |
.parametrize('box_size,log_level', [((1, 0.1, 0.1), 'WARNING'), ((0.1, 1, 0.1), 'WARNING'), ((0.1, 0.1, 1), 'WARNING')])
def test_sim_structure_extent(log_capture, box_size, log_level):
box = td.Structure(geometry=td.Box(size=box_size), medium=td.Medium(permittivity=2))
_ = td.HeatSimulation(size=(1, 1, 1), structures=[box], grid_spec=td.UniformUnstructuredGrid(dl=0.1))
assert_log_level(log_capture, log_level) |
()
('paths', nargs=(- 1), type=click.Path(path_type=Path))
_command
def check(paths: List[Path]) -> int:
if (not paths):
raise click.ClickException('Provide some filenames')
return_code = 0
for result in usort_path(paths, write=False):
if result.error:
click.echo(f'Error sorting {result.path}: {result.error}')
return_code |= 1
for warning in result.warnings:
click.echo(f'Warning at {result.path}:{warning.line} {warning.message}')
if (result.content != result.output):
click.echo(f'Would sort {result.path}')
return_code |= 2
print_benchmark(result.timings)
return return_code |
class InvoiceTest(QuickbooksTestCase):
def create_invoice(self, customer, request_id=None):
invoice = Invoice()
line = SalesItemLine()
line.LineNum = 1
line.Description = 'description'
line.Amount = 100
line.SalesItemLineDetail = SalesItemLineDetail()
item = Item.all(max_results=1, qb=self.qb_client)[0]
line.SalesItemLineDetail.ItemRef = item.to_ref()
invoice.Line.append(line)
invoice.CustomerRef = customer.to_ref()
invoice.CustomerMemo = CustomerMemo()
invoice.CustomerMemo.value = 'Customer Memo'
invoice.save(qb=self.qb_client, request_id=request_id)
return invoice
def test_query_by_customer_ref(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
invoice = Invoice.query("select * from Invoice where CustomerRef = '{0}'".format(customer.Id), qb=self.qb_client)
print(invoice[0].Line[0].LineNum)
print(invoice[0].Line[0].Amount)
self.assertEqual(invoice[0].CustomerRef.name, customer.DisplayName)
def test_where(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
invoice = Invoice.where("CustomerRef = '{0}'".format(customer.Id), qb=self.qb_client)
print(invoice[0])
self.assertEqual(invoice[0].CustomerRef.name, customer.DisplayName)
def test_create(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
invoice = self.create_invoice(customer)
query_invoice = Invoice.get(invoice.Id, qb=self.qb_client)
self.assertEqual(query_invoice.CustomerRef.name, customer.DisplayName)
self.assertEqual(query_invoice.CustomerMemo.value, 'Customer Memo')
self.assertEqual(query_invoice.Line[0].Description, 'description')
self.assertEqual(query_invoice.Line[0].Amount, 100.0)
def test_create_idempotence(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
sample_request_id = str(uuid.uuid4())
invoice = self.create_invoice(customer, request_id=sample_request_id)
duplicate_invoice = self.create_invoice(customer, request_id=sample_request_id)
self.assertEqual(invoice.Id, duplicate_invoice.Id)
def test_delete(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
invoice = self.create_invoice(customer)
invoice_id = invoice.Id
invoice.delete(qb=self.qb_client)
query_invoice = Invoice.filter(Id=invoice_id, qb=self.qb_client)
self.assertEqual([], query_invoice)
def test_void(self):
customer = Customer.all(max_results=1, qb=self.qb_client)[0]
invoice = self.create_invoice(customer)
invoice_id = invoice.Id
invoice.void(qb=self.qb_client)
query_invoice = Invoice.get(invoice_id, qb=self.qb_client)
self.assertEqual(query_invoice.Balance, 0.0)
self.assertEqual(query_invoice.TotalAmt, 0.0)
self.assertIn('Voided', query_invoice.PrivateNote) |
def extractSolairemtlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.slow
def test_on_init(hound, repo, cve):
fixes = hound.get_rule_fixes(cve)
detect = False
if ((fixes == 'v2.6.12-rc2') or (fixes == '1da177e4c3f41524e886b7f1b8a0c1fc7321cac2')):
detect = True
repo.git.checkout('--force', 'v2.6.12-rc2')
try:
if detect:
assert hound.check_cve(cve), (cve + ' on first commit')
else:
assert (not hound.check_cve(cve)), (cve + ' on first commit')
except UnsupportedVersion:
pytest.skip('Unsupported spatch version') |
class AnalysisPlugin(YaraBasePlugin):
NAME = 'software_components'
DESCRIPTION = 'identify software components'
MIME_BLACKLIST = MIME_BLACKLIST_NON_EXECUTABLE
VERSION = '0.4.2'
FILE = __file__
def process_object(self, file_object):
file_object = super().process_object(file_object)
analysis = file_object.processed_analysis[self.NAME]
if (len(analysis) > 1):
analysis = self.add_version_information(analysis, file_object)
analysis['summary'] = self._get_summary(analysis)
self.add_os_key(file_object)
return file_object
def get_version(self, input_string: str, meta_dict: dict) -> str:
if ('version_regex' in meta_dict):
regex = meta_dict['version_regex'].replace('\\\\', '\\')
else:
regex = '\\d+.\\d+(.\\d+)?(\\w)?'
pattern = re.compile(regex)
version = pattern.search(input_string)
if (version is not None):
return self._strip_leading_zeroes(version.group(0))
return ''
def _get_summary(results: dict) -> list[str]:
summary = set()
for (key, result) in results.items():
if (key != 'summary'):
software = result['meta']['software_name']
for version in result['meta']['version']:
summary.add(f'{software} {version}')
return sorted(summary)
def add_version_information(self, results, file_object: FileObject):
for item in results:
if (item != 'summary'):
results[item] = self.get_version_for_component(results[item], file_object)
return results
def get_version_for_component(self, result, file_object: FileObject):
versions = set()
for matched_string in result['strings']:
match = matched_string[2]
match = make_unicode_string(match)
versions.add(self.get_version(match, result['meta']))
if result['meta'].get('format_string'):
key_strings = [s for (_, _, s) in result['strings'] if ('%s' in s)]
if key_strings:
versions.update(extract_data_from_ghidra(file_object.binary, key_strings, config.backend.docker_mount_base_dir))
if (('' in versions) and (len(versions) > 1)):
versions.remove('')
result['meta']['version'] = list(versions)
return result
def add_os_key(self, file_object):
for entry in file_object.processed_analysis[self.NAME]['summary']:
for os_ in OS_LIST:
if (entry.find(os_) != (- 1)):
if self._entry_has_no_trailing_version(entry, os_):
self.add_analysis_tag(file_object, 'OS', entry, TagColor.GREEN, True)
else:
self.add_analysis_tag(file_object, 'OS', os_, TagColor.GREEN, False)
self.add_analysis_tag(file_object, 'OS Version', entry, TagColor.GREEN, True)
def _entry_has_no_trailing_version(entry, os_string):
return (os_string.strip() == entry.strip())
def _strip_leading_zeroes(version_string: str) -> str:
(prefix, suffix) = ('', '')
while (version_string and (version_string[0] not in string.digits)):
prefix += version_string[0]
version_string = version_string[1:]
while (version_string and (version_string[(- 1)] not in string.digits)):
suffix = (version_string[(- 1)] + suffix)
version_string = version_string[:(- 1)]
elements = []
for element in version_string.split('.'):
try:
elements.append(str(int(element)))
except ValueError:
elements.append(element)
return ((prefix + '.'.join(elements)) + suffix) |
def graphs_test1():
cfg = ControlFlowGraph()
cfg.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), Call(imp_function_symbol('__x86.get_pc_thunk.bx'), [], Pointer(CustomType('void', 0), 32), 1))]), BasicBlock(1, [Phi(Variable('var_10', Integer(32, True), 2, False), [Constant(0, Integer(32, True)), Variable('var_10', Integer(32, True), 3, False)]), Branch(Condition(OperationType.less, [Variable('var_10', Integer(32, True), 2, False), Variable('arg2', Integer(32, True), 0, False)], CustomType('bool', 1)))]), BasicBlock(2, [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5216, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [(base := Variable('arg1', Pointer(Integer(32, True), 32), 0, False)), BinaryOperation(OperationType.left_shift, [(index := Variable('var_10', Integer(32, True), 2, False)), Constant(2, Integer(8, True))], Integer(32, True))], Pointer(CustomType('void', 0), 32))], Integer(32, True), None, False)], Pointer(CustomType('void', 0), 32), 3)), Assignment(Variable('var_10', Integer(32, True), 3, False), BinaryOperation(OperationType.plus, [Variable('var_10', Integer(32, True), 2, False), Constant(1, Integer(32, True))], Integer(32, True)))]), BasicBlock(3, [Return(ListOperation([Variable('var_10', Integer(32, True), 2, False)]))])]))
cfg.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[1])])
out_cfg = ControlFlowGraph()
out_cfg.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), Call(imp_function_symbol('__x86.get_pc_thunk.bx'), [], Pointer(CustomType('void', 0), 32), 1))]), BasicBlock(1, [Phi(Variable('var_10', Integer(32, True), 2, False), [Constant(0, Integer(32, True)), Variable('var_10', Integer(32, True), 3, False)]), Branch(Condition(OperationType.less, [Variable('var_10', Integer(32, True), 2, False), Variable('arg2', Integer(32, True), 0, False)], CustomType('bool', 1)))]), BasicBlock(2, [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5216, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [Variable('arg1', Pointer(Integer(32, True), 32), 0, False), BinaryOperation(OperationType.left_shift, [Variable('var_10', Integer(32, True), 2, False), Constant(2, Integer(8, True))], Integer(32, True))], Pointer(CustomType('void', 0), 32))], Integer(32, True), None, False, array_info=ArrayInfo(base, index, True))], Pointer(CustomType('void', 0), 32), 3)), Assignment(Variable('var_10', Integer(32, True), 3, False), BinaryOperation(OperationType.plus, [Variable('var_10', Integer(32, True), 2, False), Constant(1, Integer(32, True))], Integer(32, True)))]), BasicBlock(3, [Return(ListOperation([Variable('var_10', Integer(32, True), 2, False)]))])]))
out_cfg.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[1])])
return (cfg, out_cfg) |
class TestGetSpecificSystemUserManages():
(scope='function')
def url(self, viewer_user, system) -> str:
return (V1_URL_PREFIX + f'/user/{viewer_user.id}/system-manager/{system.fides_key}')
def test_get_system_managed_by_user_not_authenticated(self, api_client: TestClient, url: str) -> None:
resp = api_client.get(url, headers={})
assert (resp.status_code == HTTP_401_UNAUTHORIZED)
def test_get_system_managed_by_user_wrong_scope(self, api_client: TestClient, generate_auth_header, url):
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_403_FORBIDDEN)
def test_get_system_managed_by_self(self, api_client: TestClient, url, viewer_user, system, db) -> None:
viewer_user.set_as_system_manager(db, system)
auth_header = generate_auth_header_for_user(viewer_user, [])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_200_OK)
assert (resp.json()['fides_key'] == system.fides_key)
def test_get_system_managed_by_other_user(self, api_client: TestClient, url, viewer_user, system, db) -> None:
viewer_user.set_as_system_manager(db, system)
another_user = FidesUser.create(db=db, data={'username': 'another_user', 'password': '&%3Qe2fGo7'})
client = ClientDetail(hashed_secret='thisisatest', salt='thisisstillatest', scopes=SCOPE_REGISTRY, user_id=another_user.id)
db.add(client)
db.commit()
auth_header = generate_auth_header_for_user(another_user, [])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_403_FORBIDDEN)
client.delete(db=db)
another_user.delete(db)
def test_get_system_managed_by_user_not_found(self, api_client: TestClient, generate_auth_header, url, system) -> None:
auth_header = generate_auth_header(scopes=[SYSTEM_MANAGER_READ])
resp = api_client.get((V1_URL_PREFIX + f'/user/bad_user/system-manager/{system.fides_key}'), headers=auth_header)
assert (resp.status_code == HTTP_404_NOT_FOUND)
assert (resp.json()['detail'] == f'No user found with id bad_user.')
def test_get_system_managed_by_user_system_does_not_exist(self, api_client: TestClient, generate_auth_header, url, viewer_user) -> None:
auth_header = generate_auth_header(scopes=[SYSTEM_MANAGER_READ])
resp = api_client.get((V1_URL_PREFIX + f'/user/{viewer_user.id}/system-manager/bad_system'), headers=auth_header)
assert (resp.status_code == HTTP_404_NOT_FOUND)
assert (resp.json()['detail'] == f'No system found with fides_key bad_system.')
def test_get_system_not_managed_by_user(self, api_client: TestClient, generate_auth_header, url, viewer_user, system) -> None:
assert (not viewer_user.systems)
auth_header = generate_auth_header(scopes=[SYSTEM_MANAGER_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_404_NOT_FOUND)
assert (resp.json()['detail'] == f'User {viewer_user.id} is not a manager of system {system.fides_key}')
def test_get_system_managed_by_user(self, api_client: TestClient, generate_auth_header, url, viewer_user, system, db) -> None:
viewer_user.set_as_system_manager(db, system)
auth_header = generate_auth_header(scopes=[SYSTEM_MANAGER_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == HTTP_200_OK)
assert (resp.json()['fides_key'] == system.fides_key) |
class get_config_reply(message):
version = 6
type = 8
def __init__(self, xid=None, flags=None, miss_send_len=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (miss_send_len != None):
self.miss_send_len = miss_send_len
else:
self.miss_send_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.flags))
packed.append(struct.pack('!H', self.miss_send_len))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = get_config_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 8)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.flags = reader.read('!H')[0]
obj.miss_send_len = reader.read('!H')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.miss_send_len != other.miss_send_len):
return False
return True
def pretty_print(self, q):
q.text('get_config_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {0: 'OFPC_FRAG_NORMAL', 1: 'OFPC_FRAG_DROP', 2: 'OFPC_FRAG_REASM', 3: 'OFPC_FRAG_MASK'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('miss_send_len = ')
q.text(('%#x' % self.miss_send_len))
q.breakable()
q.text('}') |
class AbstractAgencyViewSet(AbstractSpendingByCategoryViewSet, metaclass=ABCMeta):
agency_type: AgencyType
def build_elasticsearch_result(self, response: dict) -> List[dict]:
agency_info_buckets = response.get('group_by_agg_key', {}).get('buckets', [])
code_list = [bucket.get('key') for bucket in agency_info_buckets if bucket.get('key')]
current_agency_info = {}
if (self.agency_type in (AgencyType.AWARDING_TOPTIER, AgencyType.FUNDING_TOPTIER)):
agency_info_query = ToptierAgency.objects.filter(toptier_code__in=code_list, agency__toptier_flag=True).annotate(id=F('agency__id'), agency_code=F('toptier_code'), code=F('abbreviation'))
else:
agency_info_query = SubtierAgency.objects.filter(subtier_code__in=code_list).annotate(id=F('agency__id'), agency_code=F('subtier_code'), code=F('abbreviation'))
agency_info_query = agency_info_query.values('agency_code', 'id', 'code', 'name')
for agency_info in agency_info_query.all():
agency_code = agency_info.pop('agency_code')
current_agency_info[agency_code] = agency_info
results = []
for bucket in agency_info_buckets:
agency_info = (current_agency_info.get(bucket.get('key')) or {})
result = {'id': agency_info.get('id'), 'code': agency_info.get('code'), 'name': agency_info.get('name'), 'amount': (int(bucket.get('sum_field', {'value': 0})['value']) / Decimal('100'))}
if (self.agency_type == AgencyType.AWARDING_TOPTIER):
submission = ToptierAgencyPublishedDABSView.objects.filter(agency_id=agency_info.get('id')).first()
result['agency_slug'] = (slugify(agency_info.get('name')) if (submission is not None) else None)
results.append(result)
return results
def query_django_for_subawards(self, base_queryset: QuerySet) -> List[dict]:
django_filters = {f'{self.agency_type.value}_agency_name__isnull': False}
django_values = [f'{self.agency_type.value}_agency_name', f'{self.agency_type.value}_agency_abbreviation']
queryset = self.common_db_query(base_queryset, django_filters, django_values).annotate(name=F(f'{self.agency_type.value}_agency_name'), code=F(f'{self.agency_type.value}_agency_abbreviation'))
lower_limit = self.pagination.lower_limit
upper_limit = self.pagination.upper_limit
query_results = list(queryset[lower_limit:upper_limit])
for row in query_results:
is_subtier = ((self.agency_type == AgencyType.AWARDING_SUBTIER) or (self.agency_type == AgencyType.FUNDING_SUBTIER))
row['id'] = fetch_agency_tier_id_by_agency(agency_name=row['name'], is_subtier=is_subtier)
row.pop(f'{self.agency_type.value}_agency_name')
row.pop(f'{self.agency_type.value}_agency_abbreviation')
return query_results |
class StopOrNot(Struct):
CONST_SIZE = 21
def __init__(self, stop: bool):
super(StopOrNot, self).__init__('ospf.data.StopOrNot', (('stop_specific_1', 'I'), ('from_id_rev', '4s'), ('to_id_rev', '4s'), ('seq_number_rev', 'I'), ('stop_specific_2', '5s')))
self.header = None
self.as_dict = {'\\/cs\\/transport\\/stop': stop}
self.as_json = json.dumps(self.as_dict).encode('ascii')
def to_raw_data(self) -> bytes:
return ((self.header.to_raw_data() + super().to_raw_data()) + self.as_json)
def print(self, full=True) -> None:
print('.stopOrNot')
self.header.print()
print(' ')
super().print()
print(self.as_json)
def build(from_id: int, to_id: int, stop: bool) -> StopOrNot:
data = StopOrNot(stop)
size = ((Header.CONST_SIZE + StopOrNot.CONST_SIZE) + len(data.as_json))
data.header = Header.build(Data.TYPE, size, 0, from_id, to_id)
data._cfields['stop_specific_1'] =
data._cfields['from_id_rev'] = from_id.to_bytes(4, 'little')
data._cfields['to_id_rev'] = to_id.to_bytes(4, 'little')
data._cfields['seq_number_rev'] = 0
data._cfields['stop_specific_2'] = bytes([50, 12, 0, 52, 4])
data.header._cfields['crc16'] = Crc16Arc.calc(data.to_raw_data())
return data |
def test_dt_output_shape(dummy_feats_and_labels, dummy_titanic):
(feats, labels) = dummy_feats_and_labels
dt = DecisionTree()
dt.fit(feats, labels)
pred = dt.predict(feats)
assert (pred.shape == (feats.shape[0],)), 'DecisionTree output should be same as training labels.'
(X_train, y_train, X_test, y_test) = dummy_titanic
dt = DecisionTree()
dt.fit(X_train, y_train)
pred_train = dt.predict(X_train)
pred_test = dt.predict(X_test)
assert (pred_train.shape == (X_train.shape[0],)), 'DecisionTree output should be same as training labels.'
assert (pred_test.shape == (X_test.shape[0],)), 'DecisionTree output should be same as testing labels.' |
def train(batch_size, model_path=None):
data_module = DepthCovDataModule(batch_size)
if (model_path is None):
model = NonstationaryGpModule()
else:
model = NonstationaryGpModule.load_from_checkpoint(model_path)
checkpoint_callback = ModelCheckpoint(monitor='loss_val', dirpath='./models/', filename='gp-{epoch:02d}-{loss_val:.4f}')
trainer = pl.Trainer(callbacks=[checkpoint_callback], accumulate_grad_batches=1, gpus=1, limit_train_batches=0.01, max_epochs=1000, limit_val_batches=0.01)
trainer.fit(model, data_module) |
class SubBackendJITCython(SubBackendJIT):
def make_new_header(self, func, arg_types):
header = HeaderFunction(name=func.__name__, arguments=list(inspect.signature(func).parameters.keys()), imports='import cython\n\nimport numpy as np\ncimport numpy as np\n')
signatures = make_signatures_from_typehinted_func(func, self.type_formatter, as_list_str=True)
for signature in signatures:
header.add_signature(signature)
if (arg_types != 'no types'):
header.add_signature(arg_types)
return header
def _load_old_header(self, path_backend_header):
return HeaderFunction(path=path_backend_header)
def _merge_header_objects(self, header, header_old):
header.update_with_other_header(header_old)
return header
def _make_header_code(self, header):
return header.make_code() |
class OptionSeriesTreegraphSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def send_single_erasure_email(db: Session, subject_email: str, subject_name: str, batch_identities: List[str], test_mode: bool=False) -> None:
org_name = get_org_name(db)
dispatch_message(db=db, action_type=MessagingActionType.MESSAGE_ERASURE_REQUEST_FULFILLMENT, to_identity=Identity(email=subject_email), service_type=get_email_messaging_config_service_type(db=db), message_body_params=ErasureRequestBodyParams(controller=org_name, third_party_vendor_name=subject_name, identities=batch_identities), subject_override=f"{('Test notification' if test_mode else 'Notification')} of user erasure requests from {org_name}") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.