code stringlengths 281 23.7M |
|---|
class Command(BaseCommand):
help = '\n This command creates an empty Delta Table based on the provided --destination-table argument.\n '
def add_arguments(self, parser):
parser.add_argument('--destination-table', type=str, required=True, help='The destination Delta Table to write the data', choices=list(TABLE_SPEC))
parser.add_argument('--spark-s3-bucket', type=str, required=False, default=CONFIG.SPARK_S3_BUCKET, help='The destination bucket in S3 to write the data')
parser.add_argument('--alt-db', type=str, required=False, help='An alternate database (aka schema) in which to create this table, overriding the TABLE_SPEC db')
parser.add_argument('--alt-name', type=str, required=False, help='An alternate delta table name for the created table, overriding the TABLE_SPEC destination_table name')
def handle(self, *args, **options):
extra_conf = {'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension', 'spark.sql.catalog.spark_catalog': 'org.apache.spark.sql.delta.catalog.DeltaCatalog', 'spark.sql.legacy.parquet.datetimeRebaseModeInWrite': 'LEGACY', 'spark.sql.legacy.parquet.int96RebaseModeInWrite': 'LEGACY', 'spark.sql.jsonGenerator.ignoreNullFields': 'false'}
spark = get_active_spark_session()
spark_created_by_command = False
if (not spark):
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark)
logger = get_jvm_logger(spark)
destination_table = options['destination_table']
spark_s3_bucket = options['spark_s3_bucket']
table_spec = TABLE_SPEC[destination_table]
destination_database = (options['alt_db'] or table_spec['destination_database'])
destination_table_name = (options['alt_name'] or destination_table)
logger.info(f'Using Spark Database: {destination_database}')
spark.sql(f'create database if not exists {destination_database};')
spark.sql(f'use {destination_database};')
spark.sql(TABLE_SPEC[destination_table]['delta_table_create_sql'].format(DESTINATION_TABLE=destination_table_name, DESTINATION_DATABASE=destination_database, SPARK_S3_BUCKET=spark_s3_bucket, DELTA_LAKE_S3_PATH=CONFIG.DELTA_LAKE_S3_PATH))
if spark_created_by_command:
spark.stop() |
def youtube_channel_details(key, channel_ids):
base_url = '
channel_ids = _split_by_comma(channel_ids, length=50)
final_df = pd.DataFrame()
for channel_id in channel_ids:
params = {'id': channel_id, 'key': key}
logging.info(msg=('Requesting: ' + 'channel details'))
channel_resp = requests.get(base_url, params=params)
if (channel_resp.status_code >= 400):
raise Exception(channel_resp.json())
items_df = pd.DataFrame(channel_resp.json()['items'])
details = ['snippet', 'statistics', 'contentDetails']
detail_df = pd.DataFrame()
for detail in details:
try:
detail_df = pd.concat([detail_df, pd.DataFrame([x[detail] for x in channel_resp.json()['items']])], axis=1)
except KeyError:
continue
temp_df = pd.concat([items_df, detail_df], axis=1)
final_df = pd.concat([final_df, temp_df], sort=False, ignore_index=True)
return final_df |
class DatatableColumn():
def __init__(self, title: str, slug: str, renderer_macro: str, *, ordering: (str | None)=None, renderer_macro_kwargs: (dict[(str, Any)] | None)=None) -> None:
self.title = title
self.slug = slug
self.ordering = ordering
self.renderer_macro = renderer_macro
self.renderer_macro_kwargs = (renderer_macro_kwargs if (renderer_macro_kwargs is not None) else {}) |
class ProductItemCommerceInsights(AbstractObject):
def __init__(self, api=None):
super(ProductItemCommerceInsights, self).__init__()
self._isProductItemCommerceInsights = True
self._api = api
class Field(AbstractObject.Field):
message_sends = 'message_sends'
organic_impressions = 'organic_impressions'
paid_impressions = 'paid_impressions'
_field_types = {'message_sends': 'unsigned int', 'organic_impressions': 'unsigned int', 'paid_impressions': 'unsigned int'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
.skipif((MID_MEMORY > memory), reason='Travis has too less memory to run it.')
.parametrize('matrix', [matrix])
.parametrize('outFileName', [outfile_aggregate_plots])
.parametrize('BED', [BED])
.parametrize('mode', ['intra-chr'])
.parametrize('ran', ['50000:900000'])
.parametrize('BED2', [BED2])
.parametrize('numberOfBins', [30])
.parametrize('transform', sorted(['none']))
.parametrize('operationType', sorted(['sum', 'mean', 'median']))
.parametrize('outFilePrefixMatrix', ['outFilePrefix'])
.parametrize('outFileContactPairs', ['outFileContactPairs'])
.parametrize('diagnosticHeatmapFile', [diagnosticHeatmapFile])
.parametrize('kmeans', [4])
.parametrize('hclust', [4])
.parametrize('howToCluster', sorted(['full', 'center', 'diagonal']))
.parametrize('chromosomes', ['X'])
.parametrize('colorMap', ['RdYlBu_r'])
.parametrize('plotType', sorted(['2d', '3d']))
.parametrize('vMin', [0.01])
.parametrize('vMax', [1.0])
def test_aggregate_contacts_two(capsys, matrix, outFileName, BED, mode, ran, BED2, numberOfBins, transform, operationType, outFilePrefixMatrix, outFileContactPairs, diagnosticHeatmapFile, kmeans, hclust, howToCluster, chromosomes, colorMap, plotType, vMin, vMax):
args = '--matrix {} --outFileName {} --BED {} --mode {} --range {} --BED2 {} --numberOfBins {} --transform {} --operationType {} --outFileContactPairs {} --kmeans {} --hclust {} --howToCluster {} --chromosomes {} --colorMap {} --plotType {} --vMin {} --vMax {} --disable_bbox_tight'.format(matrix, outFileName.name, BED, mode, ran, BED2, numberOfBins, transform, operationType, outFileContactPairs, kmeans, hclust, howToCluster, chromosomes, colorMap, plotType, vMin, vMax).split()
compute(hicexplorer.hicAggregateContacts.main, args, 5)
os.remove(outFileName.name) |
class backtracking(dummy):
def __init__(self, eta=0.5, **kwargs):
if ((eta > 1) or (eta <= 0)):
raise ValueError('eta must be between 0 and 1.')
self.eta = eta
super(backtracking, self).__init__(**kwargs)
def _update_step(self, solver, objective, niter):
properties = copy.deepcopy(vars(solver))
logging.debug('(Begin) solver properties: {}'.format(properties))
fn = 0
grad = np.zeros_like(properties['sol'])
for f in solver.smooth_funs:
fn += f.eval(properties['sol'])
grad += f.grad(properties['sol'])
step = properties['step']
logging.debug('fn = {}'.format(fn))
while True:
solver.step = step
logging.debug('Current step: {}'.format(step))
solver._algo()
logging.debug('(During) solver properties: {}'.format(vars(solver)))
fp = np.sum([f.eval(solver.sol) for f in solver.smooth_funs])
logging.debug('fp = {}'.format(fp))
dot_prod = np.dot((solver.sol - properties['sol']), grad)
logging.debug('dot_prod = {}'.format(dot_prod))
norm_diff = np.sum(((solver.sol - properties['sol']) ** 2))
logging.debug('norm_diff = {}'.format(norm_diff))
for (key, val) in properties.items():
setattr(solver, key, copy.copy(val))
logging.debug('(Reset) solver properties: {}'.format(vars(solver)))
if (((2.0 * step) * ((fp - fn) - dot_prod)) <= norm_diff):
logging.debug('Break condition reached')
break
else:
logging.debug('Decreasing step')
step *= self.eta
return step |
def go():
print('AutoTreiver Launching!')
settings = loadSettings()
threads = 1
if (('threads' in settings) and settings['threads']):
threads = settings['threads']
print('Have multithreading configuration directive!', threads)
else:
print('Running in single thread mode.')
settings['aux_message_queue'] = queue.Queue()
manager_lock = threading.Lock()
seen_lock = threading.Lock()
serialize_lock = threading.Lock()
lock_dict = {'manager_lock': manager_lock, 'seen_lock': seen_lock, 'serialize_lock': serialize_lock}
if (threads == 1):
launchThread(settings, lock_dict)
else:
multithread(threads, settings, lock_dict) |
class PretrainedConfig():
model_type: str = ''
def __init__(self, **kwargs):
self._name_or_path = str(kwargs.pop('name_or_path', ''))
self.scale = None
def name_or_path(self) -> str:
return self._name_or_path
_or_path.setter
def name_or_path(self, value):
self._name_or_path = str(value)
def from_pretrained(cls, pretrained_model_name_or_path: Union[(str, os.PathLike)], **kwargs) -> 'PretrainedConfig':
(config_dict, kwargs) = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if (('model_type' in config_dict) and hasattr(cls, 'model_type') and (config_dict['model_type'] != cls.model_type)):
logger.warning(f"You are using a model of type {config_dict['model_type']} to instantiate a model of type {cls.model_type}. This is not supported for all configurations of models and can yield errors.")
return cls.from_dict(config_dict, **kwargs)
def save_pretrained(self, save_directory: Union[(str, os.PathLike)]):
if os.path.isfile(save_directory):
raise AssertionError(f'Provided path ({save_directory}) should be a directory, not a file')
os.makedirs(save_directory, exist_ok=True)
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file)
logger.info(f'Configuration saved in {output_config_file}')
def get_config_dict(cls, pretrained_model_name_or_path: Union[(str, os.PathLike)], **kwargs) -> Tuple[(Dict[(str, Any)], Dict[(str, Any)])]:
scale = kwargs.pop('scale', None)
cache_dir = kwargs.pop('cache_dir', None)
revision = kwargs.pop('revision', None)
pretrained_model_name_or_path = str(pretrained_model_name_or_path)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
elif (os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path)):
config_file = pretrained_model_name_or_path
else:
config_file = get_model_url(pretrained_model_name_or_path, filename=CONFIG_NAME, revision=revision)
try:
resolved_config_file = get_model_path(config_file, cache_dir=cache_dir)
config_dict = cls._dict_from_json_file(resolved_config_file)
if (scale is not None):
config_dict['scale'] = scale
except EnvironmentError as err:
logger.error(err)
msg = f'''Can't load config for '{pretrained_model_name_or_path}'. Make sure that:
- '{pretrained_model_name_or_path}' is a correct model identifier
- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file
'''
raise EnvironmentError(msg)
except json.JSONDecodeError:
msg = f"Couldn't reach server at '{config_file}' to download configuration file or configuration file is not a valid JSON file. Please check network or file content here: {resolved_config_file}."
raise EnvironmentError(msg)
if (resolved_config_file == config_file):
logger.info(f'loading configuration file {config_file}')
else:
logger.info(f'loading configuration file {config_file} from cache at {resolved_config_file}')
return (config_dict, kwargs)
def from_dict(cls, config_dict: Dict[(str, Any)], **kwargs) -> Tuple[('PretrainedConfig', Dict[(str, Any)])]:
config = cls(**config_dict)
to_remove = []
for (key, value) in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info(f'Model config {config}')
return (config, kwargs)
def from_json_file(cls, json_file: Union[(str, os.PathLike)]) -> 'PretrainedConfig':
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
def _dict_from_json_file(cls, json_file: Union[(str, os.PathLike)]):
with open(json_file, 'r', encoding='utf-8') as reader:
text = reader.read()
return json.loads(text)
def to_dict(self) -> Dict[(str, Any)]:
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, 'model_type'):
output['model_type'] = self.__class__.model_type
return output
def to_json_string(self) -> str:
config_dict = self.to_dict()
return (json.dumps(config_dict, indent=2, sort_keys=True) + '\n')
def to_json_file(self, json_file_path: Union[(str, os.PathLike)]):
with open(json_file_path, 'w', encoding='utf-8') as writer:
writer.write(self.to_json_string) |
class NodeLookup(object):
def __init__(self, database):
self.conn = sqlite3.connect(database)
def build_database(self, nodes, tiles):
create_tables(self.conn)
c = self.conn.cursor()
tile_names = []
for tile_type in tiles:
for tile in tiles[tile_type]:
tile_names.append(tile)
tile_pkeys = {}
for tile_file in progressbar.progressbar(tile_names):
(root, _) = os.path.splitext(os.path.basename(tile_file))
tile = root[5:]
c.execute('INSERT INTO tile(name) VALUES (?);', (tile,))
tile_pkeys[tile] = c.lastrowid
nodes_processed = set()
for node in progressbar.progressbar(nodes):
with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert (node_wires['node'] not in nodes_processed)
nodes_processed.add(node_wires['node'])
c.execute('INSERT INTO node(name) VALUES (?);', (node_wires['node'],))
node_pkey = c.lastrowid
for wire in node_wires['wires']:
tile = wire['wire'].split('/')[0]
tile_pkey = tile_pkeys[tile]
c.execute('\nINSERT INTO wire(name, tile_pkey, node_pkey) VALUES (?, ?, ?);', (wire['wire'], tile_pkey, node_pkey))
self.conn.commit()
c = self.conn.cursor()
c.execute('CREATE INDEX tile_names ON tile(name);')
c.execute('CREATE INDEX node_names ON node(name);')
c.execute('CREATE INDEX wire_node_tile ON wire(node_pkey, tile_pkey);')
c.execute('CREATE INDEX wire_tile ON wire(tile_pkey);')
self.conn.commit()
def site_pin_node_to_wires(self, tile, node):
if (node is None):
return
c = self.conn.cursor()
c.execute('\nWITH\n the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?),\n the_node(node_pkey) AS (SELECT pkey AS node_pkey FROM node WHERE name = ?)\nSELECT wire.name FROM wire\n INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey\n INNER JOIN the_node ON the_node.node_pkey = wire.node_pkey;\n', (tile, node))
for row in c:
(yield row[0][(len(tile) + 1):])
def wires_for_tile(self, tile):
c = self.conn.cursor()
c.execute('\nWITH\n the_tile(tile_pkey) AS (SELECT pkey AS tile_pkey FROM tile WHERE name = ?)\nSELECT wire.name FROM wire\n INNER JOIN the_tile ON the_tile.tile_pkey = wire.tile_pkey;\n', (tile,))
for row in c:
(yield row[0][(len(tile) + 1):]) |
def exposed_update_from_dead_netlocs():
with open('bad_urls.json', 'r') as fp:
cont = fp.read()
items = json.loads(cont)
moved = []
for (key, value) in items.items():
if (('code' in value) and (value['code'] == 410)):
comment_line = comment_netloc(key)
moved.append(comment_line)
if (('code' in value) and (value['code'] == 404)):
comment_line = comment_netloc(key)
moved.append(comment_line)
if (('code' in value) and (value['code'] == (- 1))):
comment_line = comment_netloc(key)
moved.append(comment_line)
print('New dead: ')
for line in moved:
if line:
print(line) |
def test_run_bench(tmp_path, monkeypatch):
def dummy(*args, **kwargs):
return
monkeypatch.setitem(__main__.COMMANDS, 'attach', dummy)
__main__._parse_and_main([*helpers.setup_temp_env(tmp_path), 'run-bench', '--worker', 'mac', '--benchmarks', 'deepcopy', 'caf63ec5'], __file__)
queue = json.loads(((((tmp_path / 'BENCH') / 'QUEUES') / 'mac') / 'queue.json').read_text())
(reqid,) = queue['jobs']
assert reqid.endswith('-mac')
assert reqid.startswith('req-compile-bench')
(reqdir,) = list(((tmp_path / 'BENCH') / 'REQUESTS').iterdir())
assert (reqdir.name == reqid)
files = sorted((x.name for x in reqdir.iterdir()))
assert (['benchmarks.manifest', 'pyperformance.ini', 'request.json', 'results.json', 'run.sh', 'send.sh'] == files) |
.feature('unit')
.story('services', 'core', 'scheduler')
class TestSchedulerEntities():
def test_scheduled_process(self):
scheduled_process = ScheduledProcess()
assert (scheduled_process.name is None)
assert (scheduled_process.script is None)
def test_schedule(self):
assert isinstance(Schedule.Type(2), IntEnum)
schedule = Schedule(Schedule.Type.STARTUP)
assert (schedule.schedule_id is None)
assert (schedule.name is None)
assert (schedule.exclusive is True)
assert (schedule.enabled is False)
assert (schedule.repeat is None)
assert (schedule.process_name is None)
assert (schedule.schedule_type == 1)
def test_schedule_todict(self):
schedule = Schedule(Schedule.Type.STARTUP)
schedule.name = 'test'
schedule.process_name = 'test'
schedule.repeat = datetime.timedelta(seconds=30)
schedule.enabled = True
schedule.exclusive = False
schedule_json = {'name': 'test', 'type': 1, 'processName': 'test', 'repeat': 30, 'enabled': True, 'exclusive': False}
assert (schedule_json == schedule.toDict())
def test_startup_schedule(self):
startup_schedule = StartUpSchedule()
assert (startup_schedule.schedule_id is None)
assert (startup_schedule.name is None)
assert (startup_schedule.exclusive is True)
assert (startup_schedule.enabled is False)
assert (startup_schedule.repeat is None)
assert (startup_schedule.process_name is None)
assert (startup_schedule.schedule_type == 1)
with pytest.raises(AttributeError):
assert (startup_schedule.day is None)
assert (startup_schedule.time is None)
def test_timed_schedule(self):
timed_schedule = TimedSchedule()
assert (timed_schedule.schedule_id is None)
assert (timed_schedule.name is None)
assert (timed_schedule.exclusive is True)
assert (timed_schedule.enabled is False)
assert (timed_schedule.repeat is None)
assert (timed_schedule.process_name is None)
assert (timed_schedule.schedule_type == 2)
assert (timed_schedule.day is None)
assert (timed_schedule.time is None)
def test_timed_schedule_todict(self):
schedule = TimedSchedule()
schedule.name = 'test'
schedule.process_name = 'test'
schedule.repeat = datetime.timedelta(seconds=30)
schedule.enabled = True
schedule.exclusive = False
schedule.day = 3
schedule.time = datetime.time(hour=5, minute=22, second=25)
schedule_json = {'name': 'test', 'type': 2, 'processName': 'test', 'repeat': 30, 'day': 3, 'time': '5:22:25', 'enabled': True, 'exclusive': False}
assert (schedule_json == schedule.toDict())
def test_interval_schedule(self):
interval_schedule = IntervalSchedule()
assert (interval_schedule.schedule_id is None)
assert (interval_schedule.name is None)
assert (interval_schedule.exclusive is True)
assert (interval_schedule.enabled is False)
assert (interval_schedule.repeat is None)
assert (interval_schedule.process_name is None)
assert (interval_schedule.schedule_type == 3)
with pytest.raises(AttributeError):
assert (interval_schedule.day is None)
assert (interval_schedule.time is None)
def test_manual_schedule(self):
manual_schedule = ManualSchedule()
assert (manual_schedule.schedule_id is None)
assert (manual_schedule.name is None)
assert (manual_schedule.exclusive is True)
assert (manual_schedule.enabled is False)
assert (manual_schedule.repeat is None)
assert (manual_schedule.process_name is None)
assert (manual_schedule.schedule_type == 4)
with pytest.raises(AttributeError):
assert (manual_schedule.day is None)
assert (manual_schedule.time is None)
def test_task(self):
assert isinstance(Task.State(2), IntEnum)
task = Task()
assert (task.task_id is None)
assert (task.process_name is None)
assert (task.reason is None)
assert (task.state is None)
assert (task.cancel_requested is None)
assert (task.start_time is None)
assert (task.end_time is None)
assert (task.exit_code is None) |
def make_blocks(num_records=2000, codec='null'):
records = make_records(num_records)
new_file = BytesIO()
fastavro.writer(new_file, schema, records, codec=codec)
new_file.seek(0)
block_reader = fastavro.block_reader(new_file, schema)
blocks = list(block_reader)
new_file.close()
return (blocks, records) |
class ModifyL4Src(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Modify_L4_Src test')
of_ports = config['port_map'].keys()
of_ports.sort()
self.assertTrue((len(of_ports) > 1), 'Not enough ports for test')
delete_all_flows(self.controller)
logging.info('Verify if switch supports the action -- modify_l4_src, if not skip the test')
logging.info('Insert a flow with action -- set src tcp port')
logging.info('Send packet matching the flow, verify recieved packet src tcp port is rewritten ')
sup_acts = sw_supported_actions(self, use_cache='true')
if (not (sup_acts & (1 << ofp.OFPAT_SET_TP_SRC))):
skip_message_emit(self, 'modify_l4_src test skipped')
return
(pkt, exp_pkt, acts) = pkt_action_setup(self, mod_fields=['tcp_sport'], check_test_params=True)
flow_match_test(self, config['port_map'], pkt=pkt, exp_pkt=exp_pkt, action_list=acts, max_test=2) |
(scope='function')
def privacy_preference_history_for_tcf_feature(db, provided_identity_and_consent_request, privacy_experience_france_overlay, fides_user_provided_identity, served_notice_history_for_tcf_feature):
preference_history_record = PrivacyPreferenceHistory.create(db=db, data={'anonymized_ip_address': '92.158.1.0', 'email': '', 'fides_user_device': '051b219f-20e4-45df-82f7-5eb68a00889f', 'method': 'button', 'feature': 2, 'privacy_experience_config_history_id': None, 'privacy_experience_id': privacy_experience_france_overlay.id, 'preference': 'opt_in', 'fides_user_device_provided_identity_id': fides_user_provided_identity.id, 'request_origin': 'tcf_overlay', 'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/324.42 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/425.24', 'user_geography': 'fr_idg', 'url_recorded': 'example.com/', 'served_notice_history_id': served_notice_history_for_tcf_feature.id}, check_name=False)
(yield preference_history_record)
preference_history_record.delete(db) |
class OriginInspectorDimensions(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'region': (str,), 'datacenter': (str,), 'host': (str,)}
_property
def discriminator():
return None
attribute_map = {'region': 'region', 'datacenter': 'datacenter', 'host': 'host'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def forward(model: Model[(InT, OutT)], Xr: InT, is_train: bool) -> Tuple[(OutT, Callable)]:
Q = model.get_param('Q')
key_transform = model.get_ref(KEY_TRANSFORM_REF)
(attention, bp_attention) = _get_attention(model.ops, Q, key_transform, Xr.dataXd, Xr.lengths, is_train)
(output, bp_output) = _apply_attention(model.ops, attention, Xr.dataXd, Xr.lengths)
def backprop(dYr: OutT) -> InT:
(dX, d_attention) = bp_output(dYr.dataXd)
(dQ, dX2) = bp_attention(d_attention)
model.inc_grad('Q', dQ.ravel())
dX += dX2
return Ragged(dX, dYr.lengths)
return (Ragged(output, Xr.lengths), backprop) |
def display_tag_info(editor, stamp, tag, index):
synonyms = index.synonyms
if (' ' in tag):
tagsContained = tag.split(' ')
else:
tagsContained = [tag]
for synset in synonyms:
synsetNorm = [s.lower() for s in synset]
for tag in tagsContained:
if (tag.lower() in synsetNorm):
tag += (' ' + ' '.join(synset))
break
searchRes = findBySameTag(tag, 30000, [], [])
tagsfound = _extract_tags(searchRes['result'], tag)
if ((len(tagsfound) <= 2) and ('::' in tag)):
for s in tag.split('::'):
res = findBySameTag(s, 30000, [], [])
tagsfound = _extract_tags(res['result'], tag)
if (len(tagsfound) > 0):
break
sortedCounts = sorted(tagsfound.items(), key=(lambda kv: kv[1]), reverse=True)
time_stamp_for_graph = utility.text.get_stamp()
window = mw.app.activeWindow()
should_hide_left_side = ((window is not None) and (window.width() < 1000))
html = ''
if should_hide_left_side:
html = '\n <span id=\'siac-tag-graph-lbl-%s\'>Retention for this Topic / Reviews</span>\n <div id="siac-tag-graph-%s" style=\'width: 230px; height: 130px; margin-right: auto; margin-left: auto; margin-bottom: 15px;\'></div>\n <table class=\'w-100\'>\n <tr><td style=\'text-align: left;\'>Retention</td><td style=\'text-align: right;\'><b>%s</b></td></tr>\n <tr><td style=\'text-align: left;\'>Notes</td><td style=\'text-align: right;\'><b>%s</b></td></tr>\n <tr><td style=\'text-align: left\'>Related</td><td>%s</td></tr></table>\n '
else:
html = '\n <div class=\'w-100 flex-row\'>\n <div style=\'flex: 1 1; flex-basis: 50%%; padding: 5px; max-width: 50%%;\'>\n <span>Newest for <b>%s</b></span>\n <div class=\'siac-tag-info-box-left\' style=\'%s\' id=\'siac-tag-info-box-left-%s\'>\n %s\n </div>\n </div>\n <div style=\'flex: 1 1; flex-basis: 50%%;\'>\n <span id=\'siac-tag-graph-lbl-%s\'>Retention for this Topic / Reviews</span>\n <div id="siac-tag-graph-%s" style=\'width: 230px; height: 130px; margin-right: auto; margin-left: auto; margin-bottom: 15px;\'></div>\n <table class=\'w-100\'>\n <tr><td style=\'text-align: left;\'>Retention</td><td style=\'text-align: right;\'><b>%s</b></td></tr>\n <tr><td style=\'text-align: left;\'>Notes</td><td style=\'text-align: right;\'><b>%s</b></td></tr>\n <tr><td style=\'text-align: left\'>Related</td><td>%s</td></tr></table>\n </div>\n </div>\n '
tags = ''
if (len(sortedCounts) < 3):
starter = set([t[0] for t in sortedCounts])
for t in sortedCounts:
res = findBySameTag(t[0], 30000, [], [])
for r in res['result']:
spl = r.tags.split()
for s in spl:
if ((s == tag) or (s in tag.split())):
continue
if (s in tagsfound):
tagsfound[s] += 1
else:
tagsfound[s] = 1
sortedCounts = sorted(tagsfound.items(), key=(lambda kv: kv[1]), reverse=True)
total_length = 0
for k in sortedCounts[:10]:
if should_hide_left_side:
tags += ("<div data-stamp='siac-tg-%s' class='tagLbl' style='margin-bottom: 4px;' data-name='%s' onclick='tagClick(this); event.stopPropagation();'>%s</div>" % (utility.text.get_stamp(), k[0], utility.text.trim_if_longer_than(k[0], 40)))
else:
tags += ("<div data-stamp='siac-tg-%s' class='tagLbl' style='margin-bottom: 4px;' data-name='%s' onmouseenter='tagMouseEnter(this)' onclick='tagClick(this); event.stopPropagation();' onmouseleave='tagMouseLeave(this)'>%s</div>" % (utility.text.get_stamp(), k[0], utility.text.trim_if_longer_than(k[0], 40)))
total_length += len(utility.text.trim_if_longer_than(k[0], 40))
if (total_length > 120):
break
if (len(tags) == 0):
tags = 'Could not find any related tags. Related tags are determined by looking for tags that appear on the same notes as the given tag.'
nids = [r.id for r in searchRes['result']]
tret = getAvgTrueRetention(nids)
if (tret is not None):
color = utility.misc._retToColor(tret)
tret = ("<span style='background: %s; color: black;'> %s </span>" % (color, tret))
if (not should_hide_left_side):
sorted_db_list = sorted(searchRes['result'], key=(lambda x: x.id), reverse=True)
note_html = UI.get_result_html_simple(sorted_db_list[:100])
enlarge_note_area_height = ('max-height: 320px' if ((total_length > 120) and (tret is not None)) else '')
tag_name = tag
if (' ' in tag_name):
base = tag_name.split()[0]
tag_name = (utility.text.trim_if_longer_than(base, 25) + (' (+%s)' % len(tag_name.split()[1:])))
else:
tag_name = utility.text.trim_if_longer_than(tag_name, 28)
html = (html % (tag_name, enlarge_note_area_height, stamp, note_html, time_stamp_for_graph, time_stamp_for_graph, (tret if (tret is not None) else 'Not enough Reviews'), len(searchRes['result']), tags))
else:
html = (html % (time_stamp_for_graph, time_stamp_for_graph, (tret if (tret is not None) else 'Not enough Reviews'), len(searchRes['result']), tags))
ret_data = getTrueRetentionOverTime(nids)
graph_js = retention_stats_for_tag(ret_data, ('siac-tag-graph-' + time_stamp_for_graph), ('siac-tag-graph-lbl-' + time_stamp_for_graph))
id_for_box = ('siac-tag-info-box-' + stamp)
params = {'stamp': stamp, 'id': id_for_box, 'html': html, 'graph_js': graph_js, 'should_be_small': ('siac-tag-info-box-small' if should_hide_left_side else '')}
editor.web.eval(' $(`<div class=\'siac-tag-info-box {should_be_small}\' id=\'{id}\' data-stamp=\'{stamp}\' onclick=\'tagInfoBoxClicked(this)\' onmouseleave=\'tagMouseLeave(this)\'></div>`).insertAfter(\'#outerWr\');\n $(\'#{id}\').html(`{html}`);\n {graph_js} \n showTagInfo(document.querySelectorAll(".tagLbl[data-stamp=\'{stamp}\']")[0]);\n\n '.format(**params))
return nids |
class HamiltonJacobiDiffusionReaction_ASGS(SGE_base):
def __init__(self, coefficients, nd, stabFlag='1', lag=False):
SGE_base.__init__(self, coefficients, nd, lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self, mesh, t, cq):
import copy
self.mesh = mesh
self.tau = []
self.tau_last = []
self.cq = cq
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u', ci)].shape, 'd'))
self.tau.append(numpy.zeros(cq[('u', ci)].shape, 'd'))
if (('dH', ci, ci) in cq):
cq[('dH_sge', ci, ci)] = copy.deepcopy(cq[('dH', ci, ci)])
if (('dm', ci, ci) in cq):
cq[('dm_sge', ci, ci)] = copy.deepcopy(cq[('dm', ci, ci)])
if (('dmt', ci, ci) in cq):
cq[('dmt_sge', ci, ci)] = copy.deepcopy(cq[('dmt', ci, ci)])
else:
if (('dH', ci, ci) in cq):
cq[('dH_sge', ci, ci)] = cq[('dH', ci, ci)]
if (('dm', ci, ci) in cq):
cq[('dm_sge', ci, ci)] = cq[('dm', ci, ci)]
if (('dmt', ci, ci) in cq):
cq[('dmt_sge', ci, ci)] = cq[('dmt', ci, ci)]
self.tau.append(numpy.zeros(cq[('u', ci)].shape, 'd'))
for (ci, ckDict) in self.coefficients.diffusion.items():
if self.lag:
for (ck, cjDict) in ckDict.items():
cq[('grad(phi)_sge', ck)] = copy.deepcopy(cq[('grad(phi)', ck)])
for cj in list(cjDict.keys()):
cq[('dphi_sge', ck, cj)] = copy.deepcopy(cq[('dphi', ck, cj)])
cq[('da_sge', ci, ck, cj)] = copy.deepcopy(cq[('da', ci, ck, cj)])
else:
for (ck, cjDict) in ckDict.items():
cq[('grad(phi)_sge', ck)] = cq[('grad(phi)', ck)]
for cj in list(cjDict.keys()):
cq[('dphi_sge', ck, cj)] = cq[('dphi', ck, cj)]
cq[('da_sge', ci, ck, cj)] = cq[('da', ci, ck, cj)]
def updateSubgridErrorHistory(self, initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
self.cq[('dH_sge', ci, ci)][:] = self.cq[('dH', ci, ci)]
self.cq[('dm_sge', ci, ci)][:] = self.cq[('dm', ci, ci)]
for (ci, ckDict) in self.coefficients.diffusion.items():
for (ck, cjDict) in ckDict.items():
self.cq[('grad(phi)_sge', ck)][:] = self.cq[('grad(phi)', ck)]
for cj in list(cjDict.keys()):
self.cq[('dphi_sge', ck, cj)][:] = 0.0
self.cq[('da_sge', ci, ck, cj)][:] = self.cq[('da', ci, ck, cj)]
def calculateSubgridError(self, q):
oldTau = False
for ci in range(self.nc):
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.stabilizationFlag, self.coefficients.sdInfo[(ci, ci)][0], self.coefficients.sdInfo[(ci, ci)][1], self.mesh.elementDiametersArray, q[('dmt', ci, ci)], q[('dH', ci, ci)], q[('a', ci, ci)], q[('da', ci, ci, ci)], q[('grad(phi)', ci)], q[('dphi', ci, ci)], q[('dr', ci, ci)], q[('pe', ci)], q[('cfl', ci)], self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag, self.mesh.elementDiametersArray, q[('dmt', ci, ci)], q[('dH', ci, ci)], q[('a', ci, ci)], q[('da', ci, ci, ci)], q[('grad(phi)', ci)], q[('dphi', ci, ci)], q[('dr', ci, ci)], q[('pe', ci)], q[('cfl', ci)], self.tau[ci])
elif self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci, ci)][0], self.coefficients.sdInfo[(ci, ci)][1], q['inverse(J)'], q[('dmt', ci, ci)], q[('dH', ci, ci)], q[('a', ci, ci)], q[('da', ci, ci, ci)], q[('grad(phi)', ci)], q[('dphi', ci, ci)], q[('dr', ci, ci)], q[('pe', ci)], q[('cfl', ci)], self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'], q[('dmt', ci, ci)], q[('dH', ci, ci)], q[('a', ci, ci)], q[('da', ci, ci, ci)], q[('grad(phi)', ci)], q[('dphi', ci, ci)], q[('dr', ci, ci)], q[('pe', ci)], q[('cfl', ci)], self.tau[ci])
if self.lag:
tau = self.tau_last[ci]
else:
tau = self.tau[ci]
for cj in range(self.nc):
if (('dpdeResidual', ci, cj) in q):
csubgridError.calculateSubgridError_tauRes(tau, q[('pdeResidual', ci)], q[('dpdeResidual', ci, cj)], q[('subgridError', ci)], q[('dsubgridError', ci, cj)]) |
class JsDomEffects():
def __init__(self, page: primitives.PageModel, component: primitives.HtmlModel):
self._effects = Effects.Effects(page)
self.component = component
def glow(self, color, radius=50, duration=1, timing_fnc='ease-in-out', delay=0, iteration_count='infinite', direction='alternate', fill_mode='forwards'):
name = self._effects.glow(color, radius, duration, timing_fnc, delay, iteration_count, direction, fill_mode)
return self.component.dom.css('animation', ('%s %ss %s %ss %s %s %s' % (name, duration, timing_fnc, delay, iteration_count, direction, fill_mode)))
def blink(self, duration=1, timing_fnc='ease-in-out', delay=0, iteration_count='infinite', direction='alternate', fill_mode='forwards'):
name = self._effects.blink(duration, timing_fnc, delay, iteration_count, direction, fill_mode)
return self.component.dom.css('animation', ('%s %ss %s %ss %s %s' % (name, duration, timing_fnc, delay, iteration_count, direction)))
def fade_out(self, duration=5, timing_fnc='ease-in-out', delay=0, iteration_count=1, direction='normal', fill_mode='forwards'):
name = self._effects.fade_out(duration, timing_fnc, delay, iteration_count, direction, fill_mode)
return self.component.dom.css('animation', ('%s %ss %s %ss %s %s' % (name, duration, timing_fnc, delay, iteration_count, direction)))
def fade_in(self, duration=5, timing_fnc='ease-in-out', delay=0, iteration_count=1, direction='normal', fill_mode='forwards'):
name = self._effects.fade_in(duration, timing_fnc, delay, iteration_count, direction, fill_mode)
return self.component.dom.css('animation', ('%s %ss %s %ss %s %s %s' % (name, duration, timing_fnc, delay, iteration_count, direction, fill_mode))) |
(('config_name', 'overrides', 'expected'), [param('defaults_with_override_only', [], DefaultsTreeNode(node=VirtualRoot(), children=[DefaultsTreeNode(node=ConfigDefault(path='hydra/config'), children=[GroupDefault(group='help', value='custom1'), GroupDefault(group='output', value='default'), ConfigDefault(path='_self_')]), ConfigDefault(path='defaults_with_override_only')]), id='defaults_with_override_only'), param('defaults_with_override_only', ['+group1=file1'], DefaultsTreeNode(node=VirtualRoot(), children=[DefaultsTreeNode(node=ConfigDefault(path='hydra/config'), children=[GroupDefault(group='help', value='custom1'), GroupDefault(group='output', value='default'), ConfigDefault(path='_self_')]), DefaultsTreeNode(node=ConfigDefault(path='defaults_with_override_only'), children=[GroupDefault(group='group1', value='file1')])]), id='defaults_with_override_only')])
def test_defaults_with_overrides_only(config_name: Optional[str], overrides: List[str], expected: DefaultsTreeNode) -> None:
_test_defaults_tree_impl(config_name=config_name, input_overrides=overrides, expected=expected, prepend_hydra=True) |
.skipif((sys.version_info < (3, 5)), reason='requires python3.5 or higher due to incompatible pickle file in tests.')
def test_read_results_dicts():
(inputs, results) = ds.get_sim_results(path=SIM_DIR, return_xarray=False, return_status=False)
assert isinstance(inputs, dict)
assert isinstance(results, dict) |
class shutdown_args():
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
def isUnion():
return False
def read(self, iprot):
if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if (ftype == TType.STOP):
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('shutdown_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = (' ' * 4)
return ('%s(\n%s)' % (self.__class__.__name__, ',\n'.join(L)))
def __eq__(self, other):
if (not isinstance(other, self.__class__)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other))
if (not six.PY2):
__hash__ = object.__hash__ |
class OnlyFieldsSerializerMixinTests(SerializerMixinTestCase):
def serialize(self, **context):
return CarModelTestSerializer(self.carmodel_model_s, context=context).data
def test_all_fields_implicit(self):
self.assertDictEqual(self.serialize(), self.expected_complete_data)
def test_all_fields_explicit(self):
self.assertDictEqual(self.serialize(only=set()), self.expected_complete_data)
def test_only_single_root_field(self):
self.assertDictEqual(self.serialize(only={'id'}), dict(id=self.carmodel_model_s.pk))
def test_only_multiple_root_fields(self):
self.assertDictEqual(self.serialize(only={'id', 'name'}), dict(id=self.carmodel_model_s.pk, name=self.carmodel_model_s.name))
def test_only_serializer_foreign_key_field(self):
self.assertDictEqual(self.serialize(only={'manufacturer'}), dict(manufacturer=dict(id=self.manufacturer_tesla.pk, name=self.manufacturer_tesla.name)))
def test_only_serializer_many_field(self):
self.assertDictEqual(self.serialize(only={'skus'}), dict(skus=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, owners=[dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, email=self.owner_tyrell.email, organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))]), dict(id=self.sku_70.pk, variant=self.sku_70.variant, owners=[])]))
def test_only_single_child_field_foreign_key(self):
self.assertDictEqual(self.serialize(only={'manufacturer__id'}), dict(manufacturer=dict(id=self.manufacturer_tesla.pk)))
def test_only_multiple_child_fields_foreign_key(self):
self.assertDictEqual(self.serialize(only={'manufacturer__id', 'manufacturer__name'}), dict(manufacturer=dict(id=self.manufacturer_tesla.pk, name=self.manufacturer_tesla.name)))
def test_only_single_child_field_many(self):
self.assertDictEqual(self.serialize(only={'skus__id'}), dict(skus=[dict(id=self.sku_p100d.pk), dict(id=self.sku_70.pk)]))
def test_only_multiple_child_fields_many(self):
self.assertDictEqual(self.serialize(only={'skus__id', 'skus__variant'}), dict(skus=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant), dict(id=self.sku_70.pk, variant=self.sku_70.variant)]))
def test_double_nested_only(self):
self.assertDictEqual(self.serialize(only={'skus__owners__name'}), dict(skus=[dict(owners=[dict(name=self.owner_tyrell.name)]), dict(owners=[])]))
def test_complex(self):
self.assertDictEqual(self.serialize(only={'name', 'manufacturer__name', 'skus__variant'}), dict(name=self.carmodel_model_s.name, manufacturer=dict(name=self.manufacturer_tesla.name), skus=[dict(variant=self.sku_p100d.variant), dict(variant=self.sku_70.variant)]))
def test_missing_root_field(self):
with self.assertRaises(ValueError):
self.serialize(only={'not_found'})
def test_missing_child_field_foreign_key(self):
with self.assertRaises(ValueError):
self.serialize(only={'manufacturer__not_found'})
def test_missing_child_key_many(self):
with self.assertRaises(ValueError):
self.serialize(only={'skus__not_found'})
def test_error_serialize_all_and_specific(self):
with self.assertRaises(ValueError):
self.serialize(only={'manufacturer', 'manufacturer__name'})
def test_field_ordering_unchanged_root(self):
root_1 = self.serialize(only=('name', 'id', 'manufacturer'))
root_2 = self.serialize(only=('manufacturer', 'id', 'name'))
self.assertEqual(root_1.keys(), root_2.keys())
self.assertEqual(list(root_1.keys()), ['id', 'name', 'manufacturer'])
def test_field_ordering_unchanged_nested(self):
child_1 = self.serialize(only=('skus__variant', 'skus__owners'))
child_2 = self.serialize(only=('skus__owners', 'skus__variant'))
keys_1 = child_1['skus'][0].keys()
keys_2 = child_2['skus'][0].keys()
self.assertEqual(keys_1, keys_2)
self.assertEqual(list(keys_1), ['variant', 'owners']) |
def update_ear_flag(vertices: List[Vertex], i: int) -> None:
h = ((i - 1) % len(vertices))
j = ((i + 1) % len(vertices))
triangle = (vertices[h].coordinate, vertices[i].coordinate, vertices[j].coordinate)
vertices[i].is_ear = (vertices[i].is_convex and (not any((is_inside(v.coordinate, triangle) for (k, v) in enumerate(vertices) if (not (v.is_convex or (k == h) or (k == i) or (k == j))))))) |
class RemoteEvaluator():
def __init__(self, evaluator: SentenceLevelEvaluator) -> None:
self.evaluator = evaluator
self.address = evaluator.args.remote_address
self.port = evaluator.args.remote_port
self.source_segment_size = evaluator.args.source_segment_size
self.base_url = f'
def send_source(self, segment: Segment):
url = f'{self.base_url}/input'
requests.put(url, data=segment.json())
def receive_prediction(self) -> Segment:
url = f'{self.base_url}/output'
r = requests.get(url)
return segment_from_json_string(r.text)
def system_reset(self):
requests.post(f'{self.base_url}/reset')
def results(self):
return self.evaluator.results()
def remote_eval(self):
for instance in self.evaluator.instance_iterator:
self.system_reset()
while (not instance.finish_prediction):
self.send_source(instance.send_source(self.source_segment_size))
output_segment = self.receive_prediction()
instance.receive_prediction(output_segment)
self.evaluator.write_log(instance)
self.evaluator.dump_results() |
class _Background():
def __init__(self, color=None):
self.color = color
def apply(self, item):
if self.color:
r = item.boundingRect()
bg = QGraphicsRectItem(r)
bg.setParentItem(item)
pen = QPen(QColor(self.color))
brush = QBrush(QColor(self.color))
bg.setPen(pen)
bg.setBrush(brush)
bg.setFlag(QGraphicsItem.GraphicsItemFlag.ItemStacksBehindParent)
return bg
else:
return None |
class OptionPlotoptionsXrangeSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class WafExclusionResponseData(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeWafExclusion,), 'attributes': (WafExclusionResponseDataAttributes,), 'relationships': (WafExclusionResponseDataRelationships,), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'attributes': 'attributes', 'relationships': 'relationships', 'id': 'id'}
read_only_vars = {'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [WafExclusionData, WafExclusionResponseDataAllOf], 'oneOf': []} |
class ModelDataGenerator(ABC):
def iter_data_lines_for_xml_root(self, root: etree.ElementBase) -> Iterable[str]:
return self.iter_data_lines_for_layout_document(parse_alto_root(root))
def iter_model_data_for_layout_document(self, layout_document: LayoutDocument) -> Iterable[LayoutModelData]:
pass
def iter_data_lines_for_layout_document(self, layout_document: LayoutDocument) -> Iterable[str]:
return (model_data.data_line for model_data in self.iter_model_data_for_layout_document(layout_document))
def iter_data_lines_for_layout_documents(self, layout_documents: Iterable[LayoutDocument]) -> Iterable[str]:
for (index, layout_document) in enumerate(layout_documents):
LOGGER.debug('generating data lines for document: index=%d', index)
if (index > 0):
LOGGER.debug('adding document separator')
(yield from ['\n'])
(yield from (model_data.data_line for model_data in self.iter_model_data_for_layout_document(layout_document))) |
class FetchedModelsManager(ErsiliaBase):
def __init__(self, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
self.file_name = os.path.abspath(os.path.join(EOS, FETCHED_MODELS_FILENAME))
def add(self, model_id):
pass
def delete(self, model_id):
if os.path.exists(self.file_name):
with open(self.file_name) as infile:
models = dict(csv.reader(infile))
infile.close()
del models[model_id]
with open(self.file_name, 'w') as f:
for (key, values) in models.items():
f.write(f'''{key},{values}
''')
self.logger.debug('Fetched model entry {0} deleted'.format(model_id))
else:
self.logger.debug('Model entry {0} was not available in the fetched models registry'.format(model_id)) |
class RiversideHandler(THBEventHandler):
interested = ['calcdistance', 'action_after']
def handle(self, evt_type, arg):
if (evt_type == 'calcdistance'):
(src, card, dist) = arg
if (not src.has_skill(Riverside)):
return arg
for p in dist:
if p.tags['riverside_target']:
dist[p] -= 10000
elif ((evt_type == 'action_after') and isinstance(arg, FinalizeStage)):
g = self.game
for ch in g.players:
ch.tags.pop('riverside_target', 0)
return arg |
def make_class_defs(ole_item):
class_defs = []
class_name = ole_item.python_name
is_sink = ole_item.bIsSink
class_body = []
class_body_assigns = []
clsid = ole_item.clsid
clsid_assign = ast.Assign([ast.Name('CLSID', ast.Store())], ast.Call(ast.Name('IID', ast.Load()), [ast.Constant(str(clsid))], []))
class_body_assigns.append(clsid_assign)
try:
progid = ProgIDFromCLSID(clsid)
except PythonComError:
progid = None
if progid:
progid_assign = ast.Assign([ast.Name('PROGID', ast.Store())], ast.Constant(progid))
class_body_assigns.append(progid_assign)
class_body.extend(class_body_assigns)
if is_sink:
class_name = class_name.lstrip('_')
if hasattr(ole_item, 'mapFuncs'):
for (name, entry) in ole_item.mapFuncs.items():
assert (entry.desc.desckind == pythoncom.DESCKIND_FUNCDESC)
if ((entry.desc.wFuncFlags & pythoncom.FUNCFLAG_FRESTRICTED) and (entry.desc.memid != pythoncom.DISPID_NEWENUM)):
continue
if (entry.desc.funckind != pythoncom.FUNC_DISPATCH):
continue
if entry.hidden:
continue
if (entry.desc.memid == pythoncom.DISPID_VALUE):
name_lower = 'value'
elif (entry.desc.memid == pythoncom.DISPID_NEWENUM):
name_lower = '_newenum'
else:
name_lower = name.lower()
if (name_lower == 'count'):
func_def = make_method(entry, '__len__')
elif (name_lower == 'item'):
func_def = make_method(entry, '__getitem__')
elif (name_lower == 'value'):
func_def = make_method(entry, '__call__')
elif (name_lower == '_newenum'):
func_def = make_method(entry, '__iter__')
else:
func_def = make_method(entry, is_event=is_sink)
class_body.append(func_def)
if hasattr(ole_item, 'propMap'):
for (name, entry) in ole_item.propMap.items():
if (entry.desc.memid == pythoncom.DISPID_VALUE):
name_lower = 'value'
elif (entry.desc.memid == pythoncom.DISPID_NEWENUM):
name_lower = '_newenum'
else:
name_lower = name.lower()
if (name_lower == 'count'):
func_def = make_method(entry, '__len__')
class_body.append(func_def)
elif (name_lower == 'item'):
func_def = make_method(entry, '__getitem__')
class_body.append(func_def)
elif (name_lower == 'value'):
func_def = make_method(entry, '__call__')
class_body.append(func_def)
elif (name_lower == '_newenum'):
func_def = make_method(entry, '__iter__')
class_body.append(func_def)
else:
func_def = make_method(entry, is_getter=True)
class_body.append(func_def)
func_def = make_method(entry, is_setter=True)
class_body.append(func_def)
if hasattr(ole_item, 'propMapGet'):
for (name, entry) in ole_item.propMapGet.items():
if (entry.desc.memid == pythoncom.DISPID_VALUE):
name_lower = 'value'
elif (entry.desc.memid == pythoncom.DISPID_NEWENUM):
name_lower = '_newenum'
else:
name_lower = name.lower()
if (name_lower == 'count'):
func_def = make_method(entry, '__len__')
elif (name_lower == 'item'):
func_def = make_method(entry, '__getitem__')
elif (name_lower == 'value'):
func_def = make_method(entry, '__call__')
elif (name_lower == '_newenum'):
func_def = make_method(entry, '__iter__')
else:
func_def = make_method(entry, is_getter=True)
class_body.append(func_def)
if hasattr(ole_item, 'propMapPut'):
for (name, entry) in ole_item.propMapPut.items():
if ((name not in ole_item.propMap) and (name not in ole_item.propMapGet)):
func_def = make_method(entry, is_getter=True, is_setter=True)
class_body.append(func_def)
func_def = make_method(entry, is_setter=True)
class_body.append(func_def)
if (not class_body):
class_body = [ast.Expr(ast.Constant(Ellipsis))]
class_bases = []
if hasattr(ole_item, 'interfaces'):
class_bases_interfaces = [ast.Name(interface.python_name, ast.Load()) for (interface, flag) in ole_item.interfaces]
class_bases.extend(class_bases_interfaces)
if hasattr(ole_item, 'sources'):
class_bases_sources = [ast.Name(source.python_name.lstrip('_'), ast.Load()) for (source, flag) in ole_item.sources]
class_bases.extend(class_bases_sources)
class_def = ast.ClassDef(class_name, class_bases, [], class_body, [])
class_defs.append(class_def)
if is_sink:
class_name += 'Handler'
class_body = []
class_body.extend(class_body_assigns)
if hasattr(ole_item, 'mapFuncs'):
for (name, entry) in ole_item.mapFuncs.items():
assert (entry.desc.desckind == pythoncom.DESCKIND_FUNCDESC)
if ((entry.desc.wFuncFlags & pythoncom.FUNCFLAG_FRESTRICTED) and (entry.desc.memid != pythoncom.DISPID_NEWENUM)):
continue
if (entry.desc.funckind != pythoncom.FUNC_DISPATCH):
continue
if entry.hidden:
continue
if (entry.desc.memid == pythoncom.DISPID_VALUE):
name_lower = 'value'
elif (entry.desc.memid == pythoncom.DISPID_NEWENUM):
name_lower = '_newenum'
else:
name_lower = name.lower()
if (name_lower == 'count'):
continue
elif (name_lower == 'item'):
continue
elif (name_lower == 'value'):
continue
elif (name_lower == '_newenum'):
continue
else:
func_def = make_method(entry, is_handler=is_sink)
class_body.append(func_def)
class_def = ast.ClassDef(class_name, [], [], class_body, [])
class_defs.append(class_def)
return class_defs |
class Arcfour():
def __init__(self, key):
s = list(range(256))
j = 0
klen = len(key)
for i in range(256):
j = (((j + s[i]) + key[(i % klen)]) % 256)
(s[i], s[j]) = (s[j], s[i])
self.s = s
(self.i, self.j) = (0, 0)
return
def process(self, data):
(i, j) = (self.i, self.j)
s = self.s
r = []
for c in data:
i = ((i + 1) % 256)
j = ((j + s[i]) % 256)
(s[i], s[j]) = (s[j], s[i])
k = s[((s[i] + s[j]) % 256)]
r.append((c ^ k))
(self.i, self.j) = (i, j)
return bytes(r)
encrypt = decrypt = process |
def extractNyankonyantranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
('input_list, reference, expected_idx', [([], ('', ''), (- 1)), ([('a', '10')], ('a', None), 0), ([('a', '10'), ('b', '20'), ('a', '30')], ('a', None), 2), ([('a', '10'), ('b', '20'), ('a', '30')], ('b', None), 1), ([('a', '10'), ('b', '20'), ('a', '30')], ('a', '10'), 0)])
def test_find_last_match(input_list: List[Tuple[(str, str)]], reference: str, expected_idx: int) -> None:
csp = create_search_path(input_list)
assert (csp.find_last_match(SearchPathQuery(reference[0], reference[1])) == expected_idx) |
.parametrize('idx, i, slice_at', [(0, 0, 0), (10, 3, 2), (33, 7, 4), (4, 1, 0), (13, 3, 2), pytest.param((- 1), 0, 0, marks=pytest.mark.xfail), pytest.param(40, 0, 0, marks=pytest.mark.xfail)])
def test_idx2i(nlp, idx, i, slice_at):
doc = nlp(_TEXT_SAMPLE)
doc_idx = idx2i(doc, idx)
assert (doc_idx == i)
fix_idx = (idx - sum((len(t) for t in doc[:slice_at])))
span = Span(doc, slice_at, len(doc))
span_idx = idx2i(span, fix_idx)
tokens = doc[slice_at:]
tokens_idx = idx2i(tokens, fix_idx)
fix_i = (i - slice_at)
assert (span_idx == fix_i)
assert (tokens_idx == fix_i) |
class HistoricalDomainsMetaFilters(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'region': (str,), 'datacenter': (str,), 'domain': (str,)}
_property
def discriminator():
return None
attribute_map = {'region': 'region', 'datacenter': 'datacenter', 'domain': 'domain'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptimizerScheduler(abc.ABC):
def __init__(self, *, optimizer: Optimizer, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=OptimizerSchedulerConfig, **kwargs)
self.optimizer = optimizer
def _set_defaults_in_cfg(cls, cfg):
pass
def step(self, batch_metric: Optional[IFLBatchMetrics]=None, model: Optional[IFLModel]=None, data: Optional[Any]=None, epoch: Optional[int]=None, global_round_num: Optional[int]=0):
pass
def get_lr(self):
lrs = [param_group['lr'] for param_group in self.optimizer.param_groups]
return lrs |
class Port(Base):
__tablename__ = 'port'
__table_args__ = (UniqueConstraint('protocol', 'port_number'),)
id = Column(Integer, primary_key=True)
protocol = Column('protocol', String)
port_number = Column('port_number', Integer)
target_id = Column(Integer, ForeignKey('target.id'))
targets = relationship('Target', secondary=port_association_table, back_populates='open_ports') |
class Ohno2013(CCT):
NAME = 'ohno-2013'
CHROMATICITY = 'uv-1960'
def __init__(self, cmfs: dict[(int, tuple[(float, float, float)])]=cmfs.CIE_1931_2DEG, white: VectorLike=cat.WHITES['2deg']['D65'], planck_step: int=5):
self.white = white
self.blackbody = BlackBodyCurve(cmfs, white, planck_step, self.CHROMATICITY)
def to_cct(self, color: Color, start: float=1000, end: float=100000, samples: int=10, iterations: int=6, exact: bool=False, **kwargs: Any) -> Vector:
(u, v) = color.split_chromaticity(self.CHROMATICITY)[:(- 1)]
last = (samples - 1)
index = 0
table = []
for _ in range(iterations):
table.clear()
lowest = math.inf
index = 0
for j in range(samples):
k = alg.lerp(start, end, (j / last))
(u2, v2) = self.blackbody(k, exact=exact)
di = math.sqrt((((u2 - u) ** 2) + ((v2 - v) ** 2)))
if (di < lowest):
lowest = di
index = j
table.append((k, u2, v2, di))
start = (table[(index - 1)][0] if (index > 0) else table[index][0])
end = (table[(index + 1)][0] if (index < last) else table[index][0])
ti = table[index][0]
if (not exact):
(ui, vi) = self.blackbody(ti, exact=True)
di = math.sqrt((((ui - u) ** 2) + ((vi - v) ** 2)))
else:
di = table[index][(- 1)]
if ((index == 0) or (not exact)):
tp = ((ti - 0.0001) if (index == 0) else table[(index - 1)][0])
(up, vp) = self.blackbody(tp, exact=True)
dp = math.sqrt((((up - u) ** 2) + ((vp - v) ** 2)))
else:
(tp, up, vp, dp) = table[(index - 1)]
if ((index == last) or (not exact)):
tn = ((ti + 0.0001) if (index == last) else table[(index + 1)][0])
(un, vn) = self.blackbody(tn, exact=True)
dn = math.sqrt((((un - u) ** 2) + ((vn - v) ** 2)))
else:
(tn, un, vn, dn) = table[(index + 1)]
l = math.sqrt((((un - up) ** 2) + ((vn - vp) ** 2)))
x = ((((dp ** 2) - (dn ** 2)) + (l ** 2)) / (2 * l))
t = (tp + ((tn - tp) * (x / l)))
vtx = (vp + ((vn - vp) * (x / l)))
sign = math.copysign(1, (v - vtx))
duv = ((((dp ** 2) - (x ** 2)) ** (1 / 2)) * sign)
if (abs(duv) >= 0.002):
x = (((tn - ti) * (tp - tn)) * (ti - tp))
a = ((((tp * (dn - di)) + (ti * (dp - dn))) + (tn * (di - dp))) * (x ** (- 1)))
b = ((- ((((tp ** 2) * (dn - di)) + ((ti ** 2) * (dp - dn))) + ((tn ** 2) * (di - dp)))) * (x ** (- 1)))
c = ((- (((((dp * ti) * tn) * (tn - ti)) + (((di * tp) * tn) * (tp - tn))) + (((dn * tp) * ti) * (ti - tp)))) * (x ** (- 1)))
t = ((- b) / (2 * a))
duv = ((((a * (t ** 2)) + (b * t)) + c) * sign)
return [t, duv]
def from_cct(self, color: type[Color], space: str, kelvin: float, duv: float, scale: bool, scale_space: (str | None), **kwargs: Any) -> Color:
(u0, v0) = self.blackbody(kelvin, exact=True)
if duv:
(u1, v1) = self.blackbody((kelvin + 0.01), exact=True)
du = (u0 - u1)
dv = (v0 - v1)
di = math.sqrt(((du ** 2) + (dv ** 2)))
if di:
du /= di
dv /= di
u0 = (u0 - (duv * dv))
v0 = (v0 + (duv * du))
return color.chromaticity(space, [u0, v0, 1], self.CHROMATICITY, scale=scale, scale_space=scale_space) |
class GenrePopupController(OptionsController):
new_genre_icon = GObject.property(type=bool, default=False)
def __init__(self, plugin, album_model):
super(GenrePopupController, self).__init__()
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self._album_model = album_model
shell = plugin.shell
self.plugin = plugin
genres_model = RB.RhythmDBPropertyModel.new(shell.props.db, RB.RhythmDBPropType.GENRE)
query = shell.props.library_source.props.base_query_model
genres_model.props.query_model = query
self._initial_genre = _('All Genres')
self._spritesheet = None
self._default_image = None
self._unrecognised_image = None
self._connect_properties()
self._connect_signals(query, genres_model)
self._update_options(genres_model)
def update_images(self, *args):
if self._spritesheet:
del self._spritesheet
self._spritesheet = GenreConfiguredSpriteSheet(self.plugin, 'genre', get_stock_size())
self._default_image = self.create_button_image(self.plugin, self._default_image, 'default_genre.png')
self._unrecognised_image = self.create_button_image(self.plugin, self._unrecognised_image, 'unrecognised_genre.png')
if args[(- 1)]:
self.update_image = True
def _connect_signals(self, query, genres_model):
self.connect('notify::new-genre-icon', self._update_options, genres_model)
query.connect('row-inserted', self._update_options, genres_model)
query.connect('row-deleted', self._update_options, genres_model)
query.connect('row-changed', self._update_options, genres_model)
def _connect_properties(self):
gs = GSetting()
setting = gs.get_setting(gs.Path.PLUGIN)
setting.bind(gs.PluginKey.NEW_GENRE_ICON, self, 'new_genre_icon', Gio.SettingsBindFlags.GET)
def _update_options(self, *args):
genres_model = args[(- 1)]
self.update_images(False)
still_exists = False
options = []
row_num = 0
for row in genres_model:
if (row_num == 0):
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
genre = _('All Genres')
row_num = (row_num + 1)
else:
genre = row[0]
options.append(genre)
still_exists = (still_exists or (genre == self.current_key))
self.options = options
self.current_key = (self.current_key if still_exists else self._initial_genre)
def do_action(self):
if (self.current_key == self._initial_genre):
self._album_model.remove_filter('genre')
else:
self._album_model.replace_filter('genre', self.current_key)
def get_current_image(self):
test_genre = self.current_key.lower()
if (test_genre == self._initial_genre.lower()):
image = self._default_image
else:
image = self._find_alternates(test_genre)
if ((image == self._unrecognised_image) and (test_genre in self._spritesheet)):
image = self._spritesheet[test_genre]
return image
def _find_alternates(self, test_genre):
test_genre = RB.search_fold(test_genre)
(ret, sprite) = self._match_genres(test_genre, self._spritesheet.GENRE_USER)
if ret:
return sprite
for genre in sorted(self._spritesheet.locale_names, key=(lambda b: ((- len(b)), b))):
if (RB.search_fold(genre) in test_genre):
return self._spritesheet[self._spritesheet.locale_names[genre]]
(ret, sprite) = self._match_genres(test_genre, self._spritesheet.GENRE_LOCALE)
if ret:
return sprite
(ret, sprite) = self._match_genres(test_genre, self._spritesheet.GENRE_SYSTEM)
if ret:
return sprite
for genre in sorted(self._spritesheet.names, key=(lambda b: ((- len(b)), b))):
if (RB.search_fold(genre) in test_genre):
return self._spritesheet[genre]
return self._unrecognised_image
def _match_genres(self, test_genre, genre_type):
case_search = CaseInsensitiveDict(dict(((k.name, v) for (k, v) in self._spritesheet.genre_alternate.items() if (k.genre_type == genre_type))))
if (test_genre in case_search):
return (True, self._spritesheet[case_search[test_genre]])
else:
return (False, None)
def get_current_description(self):
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
if (self.current_key == self._initial_genre):
return _('All Genres')
else:
return self.current_key |
def expand_derivatives_form(form, fc_params):
if isinstance(form, ufl.form.Form):
from firedrake.parameters import parameters as default_parameters
from tsfc.parameters import is_complex
if (fc_params is None):
fc_params = default_parameters['form_compiler'].copy()
else:
_ = fc_params
fc_params = default_parameters['form_compiler'].copy()
fc_params.update(_)
complex_mode = (fc_params and is_complex(fc_params.get('scalar_type')))
return ufl.algorithms.preprocess_form(form, complex_mode)
return ufl.algorithms.ad.expand_derivatives(form) |
class Panda(CommandBase):
def __init__(self):
super().__init__()
self.name = 'panda'
self.description = ' panda interfacing tools'
self.commands = {'flash': Command(description=' flashes panda with make recover (usually works with the C2)'), 'flash2': Command(description=' flashes panda using Panda module (usually works with the EON)')}
def _flash():
r = run('make -C {}/panda/board recover'.format(OPENPILOT_PATH))
if (not r):
error('Error running make command!')
def _flash2():
if (not run('pkill -f boardd')):
error('Error killing boardd! Is it running? (continuing...)')
importlib.import_module('panda', 'Panda').Panda().flash() |
class TestAVSUCDReader(DataReaderTestBase):
def setup_reader(self):
r = UnstructuredGridReader()
r.initialize(get_example_data('cellsnd.ascii.inp'))
self.e.add_source(r)
self.bounds = ((- 2.0), 2.0, (- 2.0), 2.0, 0.0, 0.0)
def test_avsucd_data_reader(self):
self.check(self.scene, self.bounds)
def test_save_and_restore(self):
self.check_saving(self.e, self.scene, self.bounds)
def test_deepcopied(self):
self.check_deepcopying(self.scene, self.bounds) |
def test_mask_sha256():
configuration = HashMaskingConfiguration(algorithm='SHA-256')
masker = HashMaskingStrategy(configuration)
expected = '1c015e801323afa54bde5e4d510809e6b5f14ad9b9961c48cbd7143106b6e596'
secret = MaskingSecretCache[str](secret='adobo', masking_strategy=HashMaskingStrategy.name, secret_type=SecretType.salt)
cache_secret(secret, request_id)
masked = masker.mask(['monkey'], request_id)[0]
assert (expected == masked)
clear_cache_secrets(request_id) |
def fix_content(content, tmpdir):
if nodeenv.is_WIN:
bin_name = 'Scripts'
node_name = 'node.exe'
else:
bin_name = 'bin'
node_name = 'node'
tmpdir.join('Scripts').join('node.exe')
content = content.replace('__NODE_VIRTUAL_PROMPT__', '({})'.format(tmpdir.basename))
content = content.replace('__NODE_VIRTUAL_ENV__', str(tmpdir))
content = content.replace('__SHIM_NODE__', str(tmpdir.join(bin_name).join(node_name)))
content = content.replace('__BIN_NAME__', bin_name)
content = content.replace('__MOD_NAME__', os.path.join('lib', 'node_modules'))
content = content.replace('__NPM_CONFIG_PREFIX__', '$NODE_VIRTUAL_ENV')
return content |
def get_all_package_ids() -> Set[PackageId]:
result: Set[PackageId] = set()
now = get_hashes_from_current_release()
now_by_type = split_hashes_by_type(now)
for (type_, name_to_hashes) in now_by_type.items():
for (name, _) in name_to_hashes.items():
if (name in TEST_PROTOCOLS):
continue
configuration_file_path = get_configuration_file_path(type_, name)
public_id = get_public_id_from_yaml(configuration_file_path)
package_id = PackageId(PackageType(type_[:(- 1)]), public_id)
result.add(package_id)
return result |
class FSWriter(BaseWriter):
def __init__(self, params):
super().__init__(params)
self.type = 'filesystem'
self.out_dir = params.get('directory')
def write_data(self, data, file_name=None):
if (not file_name):
file_name = str(uuid.uuid4())
dir_path = os.path.join(os.path.abspath(os.getcwd()), self.out_dir)
os.makedirs(dir_path, exist_ok=True)
with open('{}/{}.json'.format(dir_path, file_name), 'w') as f:
for d in data:
f.write('{}\n'.format(d)) |
def test_multiple_tasks(test_client_factory: Callable[(..., TestClient)]):
TASK_COUNTER = 0
def increment(amount):
nonlocal TASK_COUNTER
TASK_COUNTER += amount
async def app(scope, receive, send):
tasks = BackgroundTasks()
tasks.add_task(increment, amount=1)
tasks.add_task(increment, amount=2)
tasks.add_task(increment, amount=3)
response = Response('tasks initiated', media_type='text/plain', background=tasks)
(await response(scope, receive, send))
client = test_client_factory(app)
response = client.get('/')
assert (response.text == 'tasks initiated')
assert (TASK_COUNTER == ((1 + 2) + 3)) |
class FidesModel(BaseModel):
fides_key: FidesKey = Field(description='A unique key used to identify this resource.')
organization_fides_key: FidesKey = Field(default='default_organization', description='Defines the Organization that this resource belongs to.')
tags: Optional[List[str]] = None
name: Optional[str] = name_field
description: Optional[str] = description_field
class Config():
extra = 'ignore'
orm_mode = True |
def streaming_post_to_es(client: Elasticsearch, chunk: list, index_name: str, job_name: str=None, delete_before_index: bool=True, delete_key: str='_id') -> Tuple[(int, int)]:
(success, failed) = (0, 0)
try:
if delete_before_index:
value_list = [doc[delete_key] for doc in chunk]
delete_docs_by_unique_key(client, delete_key, value_list, job_name, index_name, refresh_after=False)
for (ok, item) in helpers.streaming_bulk(client, actions=chunk, chunk_size=ES_BATCH_ENTRIES, max_chunk_bytes=ES_MAX_BATCH_BYTES, max_retries=10, index=index_name):
if ok:
success += 1
else:
failed += 1
except Exception as e:
logger.error(f'''Error on partition {job_name}:
{str(e)[:2000]}
...
{str(e)[(- 2000):]}
''')
raise RuntimeError(f'{job_name}')
logger.info(format_log(f'Success: {success:,} | Fail: {failed:,}', name=job_name, action='Index'))
return (success, failed) |
class TimeStamped(object):
swagger_types = {'created_at': 'datetime'}
attribute_map = {'created_at': 'createdAt'}
def __init__(self, created_at=None):
self._created_at = None
self.discriminator = None
if (created_at is not None):
self.created_at = created_at
def created_at(self):
return self._created_at
_at.setter
def created_at(self, created_at):
self._created_at = created_at
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(TimeStamped, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, TimeStamped)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
(version=(1, 1, 6))
()
('-k', '--size', help='Plot size', type=int, default=32, show_default=True)
('--override-k', help='Force size smaller than 32', default=False, show_default=True, is_flag=True)
('-n', '--num', help='Number of plots or challenges', type=int, default=1, show_default=True)
('-b', '--buffer', help='Megabytes for sort/plot buffer', type=int, default=3389, show_default=True)
('-r', '--num_threads', help='Number of threads to use', type=int, default=2, show_default=True)
('-u', '--buckets', help='Number of buckets', type=int, default=128, show_default=True)
('-a', '--alt_fingerprint', type=int, default=None, help='Enter the alternative fingerprint of the key you want to use')
('-c', '--pool_contract_address', type=str, default=None, help='Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None')
('-f', '--farmer_public_key', help='Hex farmer public key', type=str, default=None)
('-p', '--pool_public_key', help='Hex public key of pool', type=str, default=None)
('-t', '--tmp_dir', help='Temporary directory for plotting files', type=click.Path(), default=pathlib.Path('.'), show_default=True)
('-2', '--tmp2_dir', help='Second temporary directory for plotting files', type=click.Path(), default=None)
('-d', '--final_dir', help='Final directory for plots (relative or absolute)', type=click.Path(), default=pathlib.Path('.'), show_default=True)
('-i', '--plotid', help='PlotID in hex for reproducing plots (debugging only)', type=str, default=None)
('-m', '--memo', help='Memo in hex for reproducing plots (debugging only)', type=str, default=None)
('-e', '--nobitfield', help='Disable bitfield', default=False, is_flag=True)
('-x', '--exclude_final_dir', help='Skips adding [final dir] to harvester for farming', default=False, is_flag=True)
def _cli_1_1_6() -> None:
pass |
def test_select_range_mult(monkeypatch: MonkeyPatch):
inputs = ['1-5 7-10']
monkeypatch.setattr('builtins.input', (lambda : inputs.pop(0)))
save_stats = {'cats': [1, 1, 1, 0, 1]}
ids = cat_id_selector.select_cats_range(save_stats)
actual_ids = [1, 2, 3, 4, 5, 7, 8, 9, 10]
assert (ids == actual_ids) |
def test_success_message_succeeds():
(message, code) = success_message({'my': 'response'}, '/any/url')
assert (message['my'] == 'response')
assert (message['status'] == 0)
assert (code == 200)
(_, code) = success_message({'my': 'response'}, '/any/url', return_code=202)
assert (code == 202) |
def test_paginate_query(sqlalchemy_storage):
for i in range(10):
item = MockStorageItem(MockResourceIdentifier(str(i)), f'test_data_{i}')
sqlalchemy_storage.save(item)
page_size = 3
page_number = 2
query_spec = QuerySpec(conditions={})
page_result = sqlalchemy_storage.paginate_query(page_number, page_size, MockStorageItem, query_spec)
assert (len(page_result.items) == page_size)
assert (page_result.page == page_number)
assert (page_result.total_pages == 4)
assert (page_result.total_count == 10) |
class TestTachoMotorSpeedDValue(ptc.ParameterizedTestCase):
def test_speed_d_negative(self):
with self.assertRaises(IOError):
self._param['motor'].speed_d = (- 1)
def test_speed_d_zero(self):
self._param['motor'].speed_d = 0
self.assertEqual(self._param['motor'].speed_d, 0)
def test_speed_d_positive(self):
self._param['motor'].speed_d = 1
self.assertEqual(self._param['motor'].speed_d, 1)
def test_speed_d_after_reset(self):
self._param['motor'].speed_d = 1
self._param['motor'].command = 'reset'
if ('speed_pid' in self._param):
expected = self._param['speed_pid']['kD']
else:
expected = motor_info[self._param['motor'].driver_name]['speed_d']
self.assertEqual(self._param['motor'].speed_d, expected) |
def write_mcj_data(outFile):
b_data = {}
dat = {'Created': 'MifareClassicTool', 'FileType': 'mfcard', 'blocks': b_data}
i = 0
for b in blk_data:
b_data[str(i)] = ''.join(b).upper()
i += 1
with open((outFile + '_mct.json'), 'w', encoding='utf-8') as fd:
json.dump(dat, fd, indent=2) |
_group.command('kibana-diff')
('--rule-id', '-r', multiple=True, help='Optionally specify rule ID')
('--repo', default='elastic/kibana', help='Repository where branch is located')
('--branch', '-b', default='main', help='Specify the kibana branch to diff against')
('--threads', '-t', type=click.IntRange(1), default=50, help='Number of threads to use to download rules')
def kibana_diff(rule_id, repo, branch, threads):
from .misc import get_kibana_rules
rules = RuleCollection.default()
if rule_id:
rules = rules.filter((lambda r: (r.id in rule_id))).id_map
else:
rules = rules.filter(production_filter).id_map
repo_hashes = {r.id: r.contents.sha256(include_version=True) for r in rules.values()}
kibana_rules = {r['rule_id']: r for r in get_kibana_rules(repo=repo, branch=branch, threads=threads).values()}
kibana_hashes = {r['rule_id']: dict_hash(r) for r in kibana_rules.values()}
missing_from_repo = list(set(kibana_hashes).difference(set(repo_hashes)))
missing_from_kibana = list(set(repo_hashes).difference(set(kibana_hashes)))
rule_diff = []
for (rule_id, rule_hash) in repo_hashes.items():
if (rule_id in missing_from_kibana):
continue
if (rule_hash != kibana_hashes[rule_id]):
rule_diff.append(f"versions - repo: {rules[rule_id].contents.autobumped_version}, kibana: {kibana_rules[rule_id]['version']} -> {rule_id} - {rules[rule_id].contents.name}")
diff = {'missing_from_kibana': [f'{r} - {rules[r].name}' for r in missing_from_kibana], 'diff': rule_diff, 'missing_from_repo': [f"{r} - {kibana_rules[r]['name']}" for r in missing_from_repo]}
diff['stats'] = {k: len(v) for (k, v) in diff.items()}
diff['stats'].update(total_repo_prod_rules=len(rules), total_gh_prod_rules=len(kibana_rules))
click.echo(json.dumps(diff, indent=2, sort_keys=True))
return diff |
def extractWinkcherryfoxBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesTreemapStatesHover(Options):
def animation(self) -> 'OptionSeriesTreemapStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesTreemapStatesHoverAnimation)
def brightness(self):
return self._config_get('undefined')
def brightness(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesTreemapStatesHoverHalo':
return self._config_sub_data('halo', OptionSeriesTreemapStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesTreemapStatesHoverMarker':
return self._config_sub_data('marker', OptionSeriesTreemapStatesHoverMarker)
def opacity(self):
return self._config_get(0.75)
def opacity(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False) |
class PythonConnectionManager(metaclass=abc.ABCMeta):
TYPE: str = NotImplemented
def __init__(self, profile: AdapterRequiredConfig):
self.profile = profile
if (profile.credentials.type == self.TYPE):
self.credentials = profile.credentials
else:
self.credentials = profile.python_adapter_credentials
self.thread_connections: Dict[(Hashable, Connection)] = {}
self.lock: RLock = flags.MP_CONTEXT.RLock()
def get_thread_identifier() -> Hashable:
return (os.getpid(), get_ident())
def get_thread_connection(self) -> Connection:
key = self.get_thread_identifier()
with self.lock:
if (key not in self.thread_connections):
raise InvalidConnectionError(key, list(self.thread_connections))
return self.thread_connections[key]
def set_thread_connection(self, conn: Connection) -> None:
key = self.get_thread_identifier()
if (key in self.thread_connections):
raise DbtInternalError('In set_thread_connection, existing connection exists for {}')
self.thread_connections[key] = conn
def get_if_exists(self) -> Optional[Connection]:
key = self.get_thread_identifier()
with self.lock:
return self.thread_connections.get(key)
def clear_thread_connection(self) -> None:
key = self.get_thread_identifier()
with self.lock:
if (key in self.thread_connections):
del self.thread_connections[key]
def set_connection_name(self, name: Optional[str]=None) -> Connection:
conn_name: str
if (name is None):
conn_name = 'master'
else:
if (not isinstance(name, str)):
raise CompilationError(f'For connection name, got {name} - not a string!')
assert isinstance(name, str)
conn_name = name
conn = self.get_if_exists()
if (conn is None):
conn = Connection(type=Identifier(self.TYPE), name=None, state=ConnectionState.INIT, transaction_open=False, handle=None, credentials=self.credentials)
self.set_thread_connection(conn)
if ((conn.name == conn_name) and (conn.state == 'open')):
return conn
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
if (conn.state == 'open'):
fire_event(ConnectionReused(conn_name=conn_name))
else:
conn.handle = LazyHandle(self.open)
conn.name = conn_name
return conn
def retry_connection(cls, connection: Connection, connect: Callable[([], AdapterHandle)], logger: AdapterLogger, retryable_exceptions: Iterable[Type[Exception]], retry_limit: int=1, retry_timeout: Union[(Callable[([int], SleepTime)], SleepTime)]=1, _attempts: int=0) -> Connection:
timeout = (retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout)
if (timeout < 0):
raise FailedToConnectError('retry_timeout cannot be negative or return a negative time.')
if ((retry_limit < 0) or (retry_limit > sys.getrecursionlimit())):
connection.handle = None
connection.state = ConnectionState.FAIL
raise FailedToConnectError('retry_limit cannot be negative')
try:
connection.handle = connect()
connection.state = ConnectionState.OPEN
return connection
except tuple(retryable_exceptions) as e:
if (retry_limit <= 0):
connection.handle = None
connection.state = ConnectionState.FAIL
raise FailedToConnectError(str(e))
logger.debug(f'''Got a retryable error when attempting to open a {cls.TYPE} connection.
{retry_limit} attempts remaining. Retrying in {timeout} seconds.
Error:
{e}''')
sleep(timeout)
return cls.retry_connection(connection=connection, connect=connect, logger=logger, retry_limit=(retry_limit - 1), retry_timeout=retry_timeout, retryable_exceptions=retryable_exceptions, _attempts=(_attempts + 1))
except Exception as e:
connection.handle = None
connection.state = ConnectionState.FAIL
raise FailedToConnectError(str(e))
def cancel(self, connection: Connection):
raise NotImplementedError('`cancel` is not implemented for this adapter!')
def cancel_open(self) -> List[str]:
names = []
this_connection = self.get_if_exists()
with self.lock:
for connection in self.thread_connections.values():
if (connection is this_connection):
continue
if ((connection.handle is not None) and (connection.state == ConnectionState.OPEN)):
self.cancel(connection)
if (connection.name is not None):
names.append(connection.name)
return names
def open(cls, connection: Connection) -> Connection:
raise NotImplementedError('`open` is not implemented for this adapter!')
def release(self) -> None:
with self.lock:
conn = self.get_if_exists()
if (conn is None):
return
try:
self.close(conn)
except Exception:
self.clear_thread_connection()
raise
def cleanup_all(self) -> None:
with self.lock:
for connection in self.thread_connections.values():
if (connection.state not in {'closed', 'init'}):
fire_event(ConnectionLeftOpen(conn_name=connection.name))
else:
fire_event(ConnectionClosed(conn_name=connection.name))
self.close(connection)
self.thread_connections.clear()
def _close_handle(cls, connection: Connection) -> None:
if hasattr(connection.handle, 'close'):
connection.handle.close()
def close(cls, connection: Connection) -> Connection:
if (connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}):
return connection
cls._close_handle(connection)
connection.state = ConnectionState.CLOSED
return connection
def execute(self, compiled_code: str) -> Tuple[(AdapterResponse, Any)]:
raise NotImplementedError('`execute` is not implemented for this adapter!') |
class OptionPlotoptionsBulletSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IntricateApp():
def unorthodox_call(self, scope, receive, send):
return self._call_factory(scope, receive, send)
async def _call_factory(self, scope, receive, send):
(await send('Hello!'))
(await send('Bye.'))
__call__ = _asgi_helpers._wrap_asgi_coroutine_func(unorthodox_call) |
class QuadratureRule(object):
def __init__(self, cell, degree, points, weights):
self.cell = cell
self.points = np.array(points, dtype=np.double)
self.weights = np.array(weights, dtype=np.double)
self.degree = degree
if (self.cell.dim != self.points.shape[1]):
raise ValueError('Dimension mismatch between reference cell and quadrature points')
if (self.points.shape[0] != len(self.weights)):
raise ValueError('Number of quadrature points and quadrature weights must match')
def integrate(self, function):
raise NotImplementedError |
def get_jobdetails(job_details):
deets_dict = {}
if ('Benchmark' in job_details):
deet_match_list = ['name', 'benchmark', 'benchmark_all']
else:
deet_match_list = ['hash_mode', 'attack_mode', 'mask', 'wordlist', 'wordlist2', 'rules', 'name', 'username', 'increment', 'increment_min', 'increment_max', 'disable_brain', 'brain_check', 'restore']
logger.debug('Parsing job details:\n{}'.format(job_details))
if ('[' in job_details):
rules_split = job_details[(job_details.rfind('[') + 1):job_details.rfind(']')].strip()
rules_list = [rule.strip().rstrip("'").lstrip("'") for rule in rules_split.split(',')]
else:
rules_list = None
deets_split = job_details[(job_details.rfind('(') + 1):job_details.rfind(')')].split(',')
for deets in deets_split:
deet = deets.split('=')[0].strip(' ')
if (deet in deet_match_list):
deets_dict[deet] = deets.strip().split('=')[1].strip().rstrip("'").lstrip("'")
if ('Benchmark' in job_details):
return deets_dict
if rules_list:
rule_names = []
for (key, rule) in dict(CRACK_CONF['rules']).items():
if (rule in rules_list):
rule_names.append(key)
deets_dict['rules'] = rule_names
else:
deets_dict['rules'] = None
if ('mask' in deets_dict):
if deets_dict['mask']:
mask = deets_dict['mask']
for (key, mask_file) in dict(CRACK_CONF['masks']).items():
if (mask in mask_file):
deets_dict['mask'] = key
if ('wordlist' in deets_dict):
if deets_dict['wordlist']:
wordlist = deets_dict['wordlist']
for (key, word) in dict(CRACK_CONF['wordlists']).items():
if (wordlist in word):
deets_dict['wordlist'] = key
break
else:
deets_dict['wordlist'] = None
if ('wordlist2' in deets_dict):
wordlist = deets_dict['wordlist2']
for (key, word) in dict(CRACK_CONF['wordlists']).items():
if (wordlist in word):
deets_dict['wordlist2'] = key
break
else:
deets_dict['wordlist2'] = None
return deets_dict |
class icmp(packet_base.PacketBase):
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ICMP_TYPES = {}
def register_icmp_type(*args):
def _register_icmp_type(cls):
for type_ in args:
icmp._ICMP_TYPES[type_] = cls
return cls
return _register_icmp_type
def __init__(self, type_=ICMP_ECHO_REQUEST, code=0, csum=0, data=b''):
super(icmp, self).__init__()
self.type = type_
self.code = code
self.csum = csum
self.data = data
def parser(cls, buf):
(type_, code, csum) = struct.unpack_from(cls._PACK_STR, buf)
msg = cls(type_, code, csum)
offset = cls._MIN_LEN
if (len(buf) > offset):
cls_ = cls._ICMP_TYPES.get(type_, None)
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return (msg, None, None)
def serialize(self, payload, prev):
hdr = bytearray(struct.pack(icmp._PACK_STR, self.type, self.code, self.csum))
if self.data:
if (self.type in icmp._ICMP_TYPES):
assert isinstance(self.data, _ICMPv4Payload)
hdr += self.data.serialize()
else:
hdr += self.data
else:
self.data = echo()
hdr += self.data.serialize()
if (self.csum == 0):
self.csum = packet_utils.checksum(hdr)
struct.pack_into('!H', hdr, 2, self.csum)
return hdr
def __len__(self):
return (self._MIN_LEN + len(self.data)) |
class QueryStub(object):
def __init__(self, channel):
self.Evidence = channel.unary_unary('/cosmos.evidence.v1beta1.Query/Evidence', request_serializer=cosmos_dot_evidence_dot_v1beta1_dot_query__pb2.QueryEvidenceRequest.SerializeToString, response_deserializer=cosmos_dot_evidence_dot_v1beta1_dot_query__pb2.QueryEvidenceResponse.FromString)
self.AllEvidence = channel.unary_unary('/cosmos.evidence.v1beta1.Query/AllEvidence', request_serializer=cosmos_dot_evidence_dot_v1beta1_dot_query__pb2.QueryAllEvidenceRequest.SerializeToString, response_deserializer=cosmos_dot_evidence_dot_v1beta1_dot_query__pb2.QueryAllEvidenceResponse.FromString) |
def create_dyn_dep_munger(buildopts, install_dirs, strip: bool=False) -> Optional[DepBase]:
if buildopts.is_linux():
return ElfDeps(buildopts, install_dirs, strip)
if buildopts.is_darwin():
return MachDeps(buildopts, install_dirs, strip)
if buildopts.is_windows():
return WinDeps(buildopts, install_dirs, strip)
if buildopts.is_freebsd():
return ElfDeps(buildopts, install_dirs, strip)
return None |
(cls=PluggableGroup, entry_point_group='jb.cmdline', use_internal={'build', 'clean', 'config', 'create', 'myst'}, context_settings={'help_option_names': ['-h', '--help']})
('--version', is_flag=True, expose_value=False, is_eager=True, help='Show the version and exit.', callback=version_callback)
def main():
pass |
class TraditionalPokerScoreDetectorTests(unittest.TestCase):
def _test_detect(self, cards, expected_category, expected_cards):
score = TraditionalPokerScoreDetector(lowest_rank=7).get_score(cards)
self.assertIsInstance(score, TraditionalPokerScore)
self.assertEqual(score.category, expected_category, 'Wrong category detected')
self.assertEqual(score.cards, expected_cards, 'Incorrect cards order')
def test_detect_highest_card(self):
expected_category = TraditionalPokerScore.NO_PAIR
cards = [Card(9, 0), Card(10, 1), Card(7, 2), Card(14, 0), Card(11, 1)]
expected_cards = [Card(14, 0), Card(11, 1), Card(10, 1), Card(9, 0), Card(7, 2)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_pair(self):
expected_category = TraditionalPokerScore.PAIR
cards = [Card(9, 0), Card(10, 1), Card(10, 2), Card(14, 0), Card(11, 1)]
expected_cards = [Card(10, 2), Card(10, 1), Card(14, 0), Card(11, 1), Card(9, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_double_pair(self):
expected_category = TraditionalPokerScore.TWO_PAIR
cards = [Card(9, 0), Card(10, 1), Card(10, 2), Card(14, 0), Card(9, 1)]
expected_cards = [Card(10, 2), Card(10, 1), Card(9, 1), Card(9, 0), Card(14, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_three_of_a_kind(self):
expected_category = TraditionalPokerScore.TRIPS
cards = [Card(10, 0), Card(10, 1), Card(14, 0), Card(10, 2), Card(9, 1)]
expected_cards = [Card(10, 2), Card(10, 1), Card(10, 0), Card(14, 0), Card(9, 1)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_straight(self):
expected_category = TraditionalPokerScore.STRAIGHT
cards = [Card(10, 0), Card(13, 1), Card(11, 2), Card(14, 0), Card(12, 1)]
expected_cards = [Card(14, 0), Card(13, 1), Card(12, 1), Card(11, 2), Card(10, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_straight_minimum(self):
expected_category = TraditionalPokerScore.STRAIGHT
cards = [Card(7, 0), Card(10, 1), Card(8, 2), Card(14, 0), Card(9, 1)]
expected_cards = [Card(10, 1), Card(9, 1), Card(8, 2), Card(7, 0), Card(14, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_full_house(self):
expected_category = TraditionalPokerScore.FULL_HOUSE
cards = [Card(7, 0), Card(11, 1), Card(7, 2), Card(11, 0), Card(11, 2)]
expected_cards = [Card(11, 2), Card(11, 1), Card(11, 0), Card(7, 2), Card(7, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_flush(self):
expected_category = TraditionalPokerScore.FLUSH
cards = [Card(9, 3), Card(10, 3), Card(7, 3), Card(14, 3), Card(11, 3)]
expected_cards = [Card(14, 3), Card(11, 3), Card(10, 3), Card(9, 3), Card(7, 3)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_four_of_a_kind(self):
expected_category = TraditionalPokerScore.QUADS
cards = [Card(10, 0), Card(10, 1), Card(10, 2), Card(14, 0), Card(10, 3)]
expected_cards = [Card(10, 3), Card(10, 2), Card(10, 1), Card(10, 0), Card(14, 0)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_straight_flush(self):
expected_category = TraditionalPokerScore.STRAIGHT_FLUSH
cards = [Card(10, 2), Card(13, 2), Card(11, 2), Card(14, 2), Card(12, 2)]
expected_cards = [Card(14, 2), Card(13, 2), Card(12, 2), Card(11, 2), Card(10, 2)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_longer_sequence(self):
expected_category = TraditionalPokerScore.TRIPS
cards = [Card(7, 0), Card(7, 2), Card(8, 0), Card(7, 3), Card(8, 1), Card(8, 2), Card(14, 0)]
expected_cards = [Card(8, 2), Card(8, 1), Card(8, 0), Card(14, 0), Card(7, 3)]
self._test_detect(cards, expected_category, expected_cards)
def test_detect_shorter_sequence(self):
expected_category = TraditionalPokerScore.TRIPS
cards = [Card(8, 0), Card(7, 3), Card(8, 1), Card(8, 2)]
expected_cards = [Card(8, 2), Card(8, 1), Card(8, 0), Card(7, 3)]
self._test_detect(cards, expected_category, expected_cards) |
def test_mars_api_key():
answers = [' 'b295aad8af30332fad2fa8c963ab7900', 'joe.']
with patch('climetlab.sources.prompt.Text.ask', side_effect=answers):
with patch('sys.stdout', new_callable=StringIO) as stdout:
prompt = MARSAPIKeyPrompt().ask_user()
assert (prompt == {'url': ' 'key': 'b295aad8af30332fad2fa8c963ab7900', 'email': 'joe.'})
printed = stdout.getvalue().strip()
assert printed.startswith('An API key is needed to access this dataset.') |
def run():
snmpDispatcher = SnmpDispatcher()
iterator = sendNotification(snmpDispatcher, CommunityData('public', mpModel=0), UdpTransportTarget(('demo.snmplabs.com', 162)), 'trap', NotificationType(ObjectIdentity('1.3.6.1.6.3.1.1.5.2')).loadMibs('SNMPv2-MIB').addVarBinds(('1.3.6.1.6.3.1.1.4.3.0', '1.3.6.1.4.1.20408.4.1.1.2'), ('1.3.6.1.2.1.1.1.0', OctetString('my system'))))
(errorIndication, errorStatus, errorIndex, varBinds) = (yield from iterator)
if errorIndication:
print(errorIndication)
snmpDispatcher.transportDispatcher.closeDispatcher() |
class TestOrderedDictionary(unittest.TestCase):
def test_get_and_set(self):
d = OrderedDictionary()
self.assertEqual(len(d), 0)
d['hello'] = 'goodbye'
self.assertEqual(d['hello'], 'goodbye')
def test_insert(self):
d = OrderedDictionary()
d['hello'] = 'goodbye'
self.assertEqual(d.keys(), ['hello'])
self.assertEqual(len(d), 1)
d.insert(0, 'stanley', 'fetcher')
self.assertEqual(d.keys(), ['stanley', 'hello'])
self.assertEqual(len(d), 2)
self.assertEqual(d['stanley'], 'fetcher')
self.assertEqual(d['hello'], 'goodbye')
d.insert(1, 'monty', 'python')
self.assertEqual(d.keys(), ['stanley', 'monty', 'hello'])
self.assertEqual(len(d), 3)
self.assertEqual(d['stanley'], 'fetcher')
self.assertEqual(d['monty'], 'python')
self.assertEqual(d['hello'], 'goodbye')
def test_keeps_things_in_order(self):
d = OrderedDictionary()
d['hello'] = 'goodbye'
d['stanley'] = 'fletcher'
d['monty'] = 'python'
self.assertEqual(d.keys(), ['hello', 'stanley', 'monty'])
def test_iteration_over_keys(self):
d = OrderedDictionary()
d['hello'] = 'goodbye'
d['stanley'] = 'fletcher'
d['monty'] = 'python'
try:
for i in d:
pass
except KeyError:
self.fail('Iteration over OrderedDictionary failed') |
class OptionChartOptions3dFrameLeft(Options):
def color(self):
return self._config_get('transparent')
def color(self, text: str):
self._config(text, js_type=False)
def size(self):
return self._config_get(1)
def size(self, num: float):
self._config(num, js_type=False)
def visible(self):
return self._config_get('Default')
def visible(self, flag: str):
self._config(flag, js_type=False) |
_cli.command('set', short_help='Overrides database revision with selected migration.')
('--revision', '-r', default='head', help='The migration to set.')
('--auto-confirm', default=False, is_flag=True, help='Skip asking confirmation.')
_script_info
def migrations_set(info, revision, auto_confirm):
from .orm.migrations.commands import set_revision
app = info.load_app()
dbs = info.load_db()
set_revision(app, dbs, revision, auto_confirm) |
class GraphLegend(sweetviz.graph.Graph):
def __init__(self, dataframe_report):
styles = ['graph_base.mplstyle']
self.set_style(styles)
fig = plt.figure(figsize=(config['Graphs'].getfloat('legend_width'), config['Graphs'].getfloat('legend_height')))
axs = fig.add_axes([0, 0, 1, 1])
axs.axis('off')
scale = axs.transAxes.transform((1, 1))
scale = [(1.0 / x) for x in scale]
def to_fractions(pos_in_pix):
return ((pos_in_pix[0] * scale[0]), (pos_in_pix[1] * scale[1]))
def to_fractionsxy(x, y):
return ((x * scale[0]), (y * scale[1]))
def to_fraction_seq(seq, scalar):
return [(x * scalar) for x in seq]
cycle_colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
label_color = plt.rcParams['axes.labelcolor']
gfx_x_source = 225
gfx_x_compare = 350
gfx_bar_y = 14
gfx_line_y = 5
bar_size = np.array([25, 6])
bar_text_offset = np.array([6, 0])
line_text_offset = np.array([0, (- 3)])
axs.add_patch(patches.Rectangle(to_fractionsxy(gfx_x_source, gfx_bar_y), (bar_size[0] * scale[0]), (bar_size[1] * scale[1]), facecolor=cycle_colors[0]))
text1 = (np.array([gfx_x_source, gfx_bar_y]) - bar_text_offset)
text1[1] += 1
text1_elem = plt.text((text1[0] * scale[0]), (text1[1] * scale[1]), ('' + dataframe_report.source_name), fontsize=8, color=cycle_colors[0], ha='right')
if dataframe_report.compare_name:
axs.add_patch(patches.Rectangle(to_fractionsxy(gfx_x_compare, gfx_bar_y), (bar_size[0] * scale[0]), (bar_size[1] * scale[1]), facecolor=cycle_colors[1]))
text2 = (np.array([gfx_x_compare, gfx_bar_y]) + bar_text_offset)
text2[0] += bar_size[0]
text2_elem = plt.text((text2[0] * scale[0]), (text2[1] * scale[1]), ('' + dataframe_report.compare_name), fontsize=8, color=cycle_colors[1])
if (dataframe_report.get_target_type() is not None):
axs.add_line(matplotlib.lines.Line2D(to_fraction_seq([gfx_x_source, (gfx_x_source + bar_size[0])], scale[0]), to_fraction_seq([gfx_line_y, gfx_line_y], scale[1]), lw=1, color=sweetviz.graph.COLOR_TARGET_SOURCE, marker='o'))
text1[1] = gfx_line_y
text1 += line_text_offset
if (dataframe_report.get_target_type() == FeatureType.TYPE_NUM):
text_content = ('Avg. ' + dataframe_report._target['name'])
else:
text_content = ('% ' + dataframe_report._target['name'])
text1_elem = plt.text((text1[0] * scale[0]), (text1[1] * scale[1]), text_content, fontsize=8, color=sweetviz.graph.COLOR_TARGET_SOURCE, ha='right')
if (dataframe_report.compare_name and dataframe_report._target.get('compare')):
axs.add_line(matplotlib.lines.Line2D(to_fraction_seq([gfx_x_compare, (gfx_x_compare + bar_size[0])], scale[0]), to_fraction_seq([gfx_line_y, gfx_line_y], scale[1]), lw=1, color=sweetviz.graph.COLOR_TARGET_COMPARE, marker='o'))
text2[1] = gfx_line_y
text2 += line_text_offset
if (dataframe_report.get_target_type() == FeatureType.TYPE_NUM):
text_content = ('Avg. ' + dataframe_report._target['name'])
else:
text_content = ('% ' + dataframe_report._target['name'])
text2_elem = plt.text((text2[0] * scale[0]), (text2[1] * scale[1]), text_content, fontsize=8, color=sweetviz.graph.COLOR_TARGET_COMPARE)
self.graph_base64 = self.get_encoded_base64(fig)
plt.close('all')
return |
def clean_tag(in_txt):
assert isinstance(in_txt, str), ("Passed item is not a string! Type: '%s' -> '%s'" % (type(in_txt), in_txt))
assert (not (',' in in_txt)), ("It looks like a tag list got submitted as a tag! String: '%s'" % (in_txt,))
in_txt = in_txt.strip().lower().replace(' ', '-')
return in_txt |
class TestTask(BasePyTestCase):
('bodhi.server.tasks.bugs')
('bodhi.server.tasks.buildsys')
('bodhi.server.tasks.initialize_db')
('bodhi.server.tasks.config')
('bodhi.server.tasks.check_policies.main')
def test_task(self, main_function, config_mock, init_db_mock, buildsys, bugs):
check_policies_task()
config_mock.load_config.assert_called_with()
init_db_mock.assert_called_with(config_mock)
buildsys.setup_buildsystem.assert_called_with(config_mock)
bugs.set_bugtracker.assert_called_with()
main_function.assert_called_with() |
class OptionSeriesAreasplinerangeSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.timeout(8)
def test_account_transactions(db_context: DatabaseContext) -> None:
ACCOUNT_ID_1 = 10
ACCOUNT_ID_2 = 11
MASTERKEY_ID_1 = 20
MASTERKEY_ID_2 = 21
masterkey1 = MasterKeyRow(MASTERKEY_ID_1, None, DerivationType.BIP32, b'111')
masterkey2 = MasterKeyRow(MASTERKEY_ID_2, None, DerivationType.BIP32, b'222')
with MasterKeyTable(db_context) as mktable:
with SynchronousWriter() as writer:
mktable.create([masterkey1, masterkey2], completion_callback=writer.get_callback())
assert writer.succeeded()
account1 = AccountRow(ACCOUNT_ID_1, MASTERKEY_ID_1, ScriptType.P2PKH, 'name1')
account2 = AccountRow(ACCOUNT_ID_2, MASTERKEY_ID_2, ScriptType.P2PK, 'name2')
with AccountTable(db_context) as table:
with SynchronousWriter() as writer:
table.create([account1, account2], completion_callback=writer.get_callback())
assert writer.succeeded()
KEYINSTANCE_ID_1 = 100
KEYINSTANCE_ID_2 = 101
key1 = KeyInstanceRow(KEYINSTANCE_ID_1, ACCOUNT_ID_1, MASTERKEY_ID_1, DerivationType.BIP32, b'333', ScriptType.P2PKH, KeyInstanceFlag.NONE, None)
key2 = KeyInstanceRow(KEYINSTANCE_ID_2, ACCOUNT_ID_2, MASTERKEY_ID_2, DerivationType.BIP32, b'444', ScriptType.P2PKH, KeyInstanceFlag.NONE, None)
with KeyInstanceTable(db_context) as keyinstance_table:
with SynchronousWriter() as writer:
keyinstance_table.create([key1, key2], completion_callback=writer.get_callback())
assert writer.succeeded()
TX_BYTES_1 = os.urandom(10)
TX_HASH_1 = bitcoinx.double_sha256(TX_BYTES_1)
tx1 = TransactionRow(tx_hash=TX_HASH_1, tx_data=TxData(height=1, position=1, fee=250, date_added=1, date_updated=2), tx_bytes=TX_BYTES_1, flags=TxFlags(((TxFlags.StateSettled | TxFlags.HasByteData) | TxFlags.HasHeight)), description=None)
TX_BYTES_2 = os.urandom(10)
TX_HASH_2 = bitcoinx.double_sha256(TX_BYTES_2)
tx2 = TransactionRow(tx_hash=TX_HASH_2, tx_data=TxData(height=1, position=1, fee=250, date_added=1, date_updated=2), tx_bytes=TX_BYTES_2, flags=TxFlags(((TxFlags.StateSettled | TxFlags.HasByteData) | TxFlags.HasHeight)), description=None)
with TransactionTable(db_context) as transaction_table:
with SynchronousWriter() as writer:
transaction_table.create([tx1, tx2], completion_callback=writer.get_callback())
assert writer.succeeded()
txd1 = TransactionDeltaRow(TX_HASH_1, KEYINSTANCE_ID_1, 100)
txd2 = TransactionDeltaRow(TX_HASH_2, KEYINSTANCE_ID_2, 200)
with TransactionDeltaTable(db_context) as table:
with SynchronousWriter() as writer:
table.create([txd1, txd2], completion_callback=writer.get_callback())
assert writer.succeeded()
with TransactionTable(db_context) as table:
metadatas = table.read_metadata()
print(metadatas)
assert (2 == len(metadatas))
assert ({TX_HASH_1, TX_HASH_2} == {t[0] for t in metadatas})
metadatas_1 = table.read_metadata(account_id=ACCOUNT_ID_1)
assert (1 == len(metadatas_1))
assert (TX_HASH_1 == metadatas_1[0][0])
metadatas_2 = table.read_metadata(account_id=ACCOUNT_ID_2)
assert (1 == len(metadatas_2))
assert (TX_HASH_2 == metadatas_2[0][0])
metadatas_3 = table.read_metadata(account_id=(- 1))
assert (0 == len(metadatas_3))
matches = table.read()
assert (2 == len(matches))
assert ({TX_HASH_1, TX_HASH_2} == {t[0] for t in matches})
matches_1 = table.read(account_id=ACCOUNT_ID_1)
assert (1 == len(matches_1))
assert (TX_HASH_1 == matches_1[0][0])
matches_2 = table.read(account_id=ACCOUNT_ID_2)
assert (1 == len(matches_2))
assert (TX_HASH_2 == matches_2[0][0])
matches_3 = table.read(account_id=(- 1))
assert (0 == len(matches_3)) |
class WallPinRow(_WallMountedBox):
def __init__(self) -> None:
super().__init__()
self.argparser.add_argument('--pins', action='store', type=int, default=8, help='number of pins')
self.argparser.add_argument('--pinlength', action='store', type=float, default=35, help='length of pins (in mm)')
self.argparser.add_argument('--pinwidth', action='store', type=float, default=10, help='width of pins (in mm)')
self.argparser.add_argument('--pinspacing', action='store', type=float, default=35, help='space from middle to middle of pins (in mm)')
self.argparser.add_argument('--pinspacing_increment', action='store', type=float, default=0.0, help='increase spacing from left to right (in mm)')
self.argparser.add_argument('--angle', action='store', type=float, default=20.0, help='angle of the pins pointing up (in degrees)')
self.argparser.add_argument('--hooks', action='store', type=int, default=3, help='number of hooks into the wall')
self.argparser.add_argument('--h', action='store', type=float, default=50.0, help='height of the front plate (in mm) - needs to be at least 7 time the thickness')
def frontCB(self):
s = self.pinspacing
inc = self.pinspacing_increment
t = self.thickness
pos = (s / 2)
s += (0.5 * inc)
for i in range(self.pins):
self.rectangularHole(pos, (2 * t), self.pinwidth, t)
pos += s
s += inc
for i in range(1, (self.hooks - 1)):
self.fingerHolesAt(((i * self.x) / (self.hooks - 1)), (self.h / 2), (self.h / 2))
def backCB(self):
t = self.thickness
self.fingerHolesAt(0, (2 * t), self.x, 0)
if (self.angle < 0.001):
return
for i in range(1, (self.hooks - 1)):
self.fingerHolesAt(((i * self.x) / (self.hooks - 1)), (3 * t), ((self.h / 2) - (3 * t)))
def sideWall(self, move=None):
a = self.angle
ar = math.radians(a)
h = self.h
t = self.thickness
sh = (((math.sin(ar) * 6) * t) + (math.cos(ar) * h))
tw = ((self.edges['a'].margin() + (math.sin(ar) * h)) + ((math.cos(ar) * 6) * t))
th = (sh + 6)
if self.move(tw, th, move, True):
return
self.moveTo(self.edges['a'].margin())
self.polyline((math.sin(ar) * h), a, (4 * t))
self.fingerHolesAt(((- 3.5) * t), 0, (h / 2), 90)
self.edgeCorner('e', 'h')
self.edges['h'](h)
self.polyline(0, (90 - a), ((math.cos(ar) * 6) * t), 90)
self.edges['a'](sh)
self.corner(90)
self.move(tw, th, move)
def supportWall(self, move=None):
a = self.angle
ar = math.radians(a)
h = self.h
t = self.thickness
sh = (((math.sin(ar) * 6) * t) + (math.cos(ar) * h))
tw = (self.edges['a'].margin() + max((((math.sin(ar) * h) / 2) + ((math.cos(ar) * 5) * t)), (math.sin(ar) * h)))
th = (sh + 6)
if self.move(tw, th, move, True):
return
self.moveTo(self.edges['a'].margin())
if (a > 0.001):
self.polyline((math.sin(ar) * h), (a + 90), (3 * t))
self.edges['f'](((h / 2) - (3 * t)))
self.polyline(0, (- 90))
self.polyline((4 * t), 90)
self.edges['f']((h / 2))
self.polyline(((math.sin(ar) * 2) * t), (90 - a), (((math.cos(ar) * 4) * t) - (((math.sin(ar) ** 2) * 2) * t)), 90)
if (a > 0.001):
self.edges['a'](sh)
else:
self.edges['a']((h / 2))
self.corner(90)
self.move(tw, th, move)
def render(self):
self.generateWallEdges()
p = PinEdge(self, self)
n = self.pins
t = self.thickness
if (self.h < (7 * t)):
self.h = (7 * t)
self.x = x = ((n * self.pinspacing) + (((n * (n - 1)) / 2) * self.pinspacing_increment))
self.rectangularWall(x, (3 * t), [p, 'e', 'f', 'e'], move='up')
self.rectangularWall(x, self.h, 'efef', callback=[self.frontCB], move='up')
self.rectangularWall(x, (self.h / 2), 'efef', callback=[self.backCB], move='up')
self.sideWall(move='right')
for i in range((self.hooks - 2)):
self.supportWall(move='right')
self.sideWall(move='right') |
class OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPictorialSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class NoiseGenerator(lg.Node):
OUTPUT = lg.Topic(RandomMessage)
config: NoiseGeneratorConfig
(OUTPUT)
async def generate_noise(self) -> lg.AsyncPublisher:
while True:
(yield (self.OUTPUT, RandomMessage(timestamp=time.time(), data=np.random.rand(self.config.num_features))))
(await asyncio.sleep((1 / self.config.sample_rate))) |
.parametrize('uri_template', ['/{field}{field}', '/{field}...{field}', '/{field}/{another}/{field}', '/{field}/something/something/{field}/something'])
def test_duplicate_field_names(uri_template):
router = DefaultRouter()
with pytest.raises(ValueError):
router.add_route(uri_template, ResourceWithId(1)) |
class PowerFunctionExample(HasStrictTraits):
plot = Instance(Plot)
power = Range(0, 5, value=2)
x = Array(shape=(None,), dtype='float')
def _plot_default(self):
y = (self.x ** self.power)
plot_data = ArrayPlotData(x=self.x, y=y)
plot = Plot(plot_data)
plot.plot(('x', 'y'), 'line', name='power function', color='auto')
plot.padding_top = 25
plot.border_visible = False
plot.index_grid.visible = False
plot.value_grid.visible = False
plot.title = 'Power Function n={}'.format(self.power)
plot.title_position = 'right'
plot.title_angle = (- 90)
plot.legend_alignment = 'ul'
plot.legend.border_visible = False
plot.legend.bgcolor = (0.9, 0.9, 0.9, 0.5)
plot.legend.visible = True
plot.index_axis.title = 'y'
plot.value_axis.title = 'x'
return plot
def _x_default(self):
return np.linspace((- 2.0), 2.0, 101)
_trait_change('power')
def _update_y(self):
y = (self.x ** self.power)
self.plot.data.set_data('y', y)
_trait_change('x')
def _update_data(self):
y = (self.x ** self.power)
self.plot.data.update_data(x=self.x, y=y)
_trait_change('power')
def _update_title(self):
self.plot.title = 'Power Function n={}'.format(self.power)
view = View(VGroup(Item('plot', editor=ComponentEditor()), VGroup(Item('power')), show_labels=False), resizable=True, title='Power Function Example') |
def delays_to_cells(ws, row, delays, cells):
cells['FAST_MAX'] = 'E{}'.format(row)
cells['FAST_MIN'] = 'F{}'.format(row)
cells['SLOW_MAX'] = 'G{}'.format(row)
cells['SLOW_MIN'] = 'H{}'.format(row)
if (delays is not None):
ws[cells['FAST_MAX']] = delays[FAST].max
ws[cells['FAST_MIN']] = delays[FAST].min
ws[cells['SLOW_MAX']] = delays[SLOW].max
ws[cells['SLOW_MIN']] = delays[SLOW].min
else:
ws[cells['FAST_MAX']] = 0
ws[cells['FAST_MIN']] = 0
ws[cells['SLOW_MAX']] = 0
ws[cells['SLOW_MIN']] = 0 |
()
def docs_serve(session: nox.Session) -> None:
generate_docs(session)
session.notify('teardown')
session.run('docker', 'compose', 'build', 'docs', external=True)
run_shell = ('docker', 'compose', 'run', '--rm', '--service-ports', CI_ARGS, 'docs', '/bin/bash', '-c', 'mkdocs serve --dev-addr=0.0.0.0:8000')
session.run(*run_shell, external=True) |
.xfail
def test_when_exception_can_not_be_deserialized(isolated_client):
_client('virtualenv', requirements=[''])
def unpicklable_input_function_client():
class T(Exception):
frame = __import__('sys')._getframe(0)
raise T()
with pytest.raises(FalServerlessError, match='...'):
unpicklable_input_function_client() |
class OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def fetch_production(zone_key: str, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict:
session = (session or Session())
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
value_map = fetch_data(zone_key, session, logger=logger)
data = {'zoneKey': zone_key, 'datetime': value_map['date'].datetime, 'production': {'biomass': None, 'coal': value_map.get('coal', 0), 'gas': value_map.get('gas', 0), 'hydro': value_map.get('hydro', 0), 'nuclear': value_map.get('nuclear', 0), 'oil': None, 'solar': value_map.get('solar', 0), 'wind': value_map.get('wind', 0), 'geothermal': None, 'unknown': value_map.get('unknown', 0)}, 'storage': {'hydro': None}, 'source': 'sldcguj.com'}
valid_data = validate(data, logger, remove_negative=True, floor=7000)
return valid_data |
def test_search_query_cache(frontend_db, frontend_editing_db):
assert (frontend_db.search_query_cache(offset=0, limit=10) == [])
id1 = frontend_editing_db.add_to_search_query_cache('foo', 'rule bar{}')
id2 = frontend_editing_db.add_to_search_query_cache('bar', 'rule foo{}')
assert (sorted(frontend_db.search_query_cache(offset=0, limit=10)) == [(id1, 'rule bar{}', ['bar']), (id2, 'rule foo{}', ['foo'])]) |
def test_behaviour_parse_module_missing_class():
skill_context = SkillContext(skill=MagicMock(skill_id=PublicId.from_str('author/name:0.1.0')))
dummy_behaviours_path = Path(ROOT_DIR, 'tests', 'data', 'dummy_skill', 'behaviours.py')
with unittest.mock.patch.object(aea.skills.base._default_logger, 'warning') as mock_logger_warning:
behaviours_by_id = Behaviour.parse_module(dummy_behaviours_path, {'dummy_behaviour': SkillComponentConfiguration('DummyBehaviour'), 'unknown_behaviour': SkillComponentConfiguration('UnknownBehaviour')}, skill_context)
mock_logger_warning.assert_called_with("Behaviour 'UnknownBehaviour' cannot be found.")
assert ('dummy_behaviour' in behaviours_by_id) |
class InstallationAdmin(ExportMixin, EventoLAdmin):
resource_class = InstallationResource
list_display = ('hardware', 'software', 'get_event', 'get_installer', 'attendee')
list_filter = (EventFromInstallerFilter, HardwareFilter, SoftwareFilter, InstallerFilter)
search_fields = ('notes',)
def get_event(self, obj):
return obj.installer.event
get_event.short_description = _('Event')
get_event.admin_order_field = 'installer__event__name'
def get_installer(self, obj):
return obj.installer.user
get_installer.short_description = _('Installer')
get_installer.admin_order_field = 'installer__user__username'
def filter_event(self, events, queryset):
return queryset.filter(installer__event__in=events) |
class TlsPrivateKeyResponseAttributesAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'name': (str,), 'key_length': (int,), 'key_type': (str,), 'replace': (bool,), 'public_key_sha1': (str,)}
_property
def discriminator():
return None
attribute_map = {'name': 'name', 'key_length': 'key_length', 'key_type': 'key_type', 'replace': 'replace', 'public_key_sha1': 'public_key_sha1'}
read_only_vars = {'key_length', 'key_type', 'replace', 'public_key_sha1'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class TraditionalPokerGameEventDispatcher(GameEventDispatcher):
def new_game_event(self, game_id: str, players: List[Player], dealer_id: str, blind_bets):
self.raise_event('new-game', {'game_id': game_id, 'game_type': 'traditional', 'players': [player.dto() for player in players], 'dealer_id': dealer_id, 'blind_bets': blind_bets})
def game_over_event(self):
self.raise_event('game-over', {})
def change_cards_action_event(self, player, timeout, timeout_epoch):
self.raise_event('player-action', {'action': 'cards-change', 'player': player.dto(), 'timeout': timeout, 'timeout_date': time.strftime('%Y-%m-%d %H:%M:%S+0000', time.gmtime(timeout_epoch))})
def change_cards_event(self, player, num_cards):
self.raise_event('cards-change', {'player': player.dto(), 'num_cards': num_cards}) |
('ui', 'trackproperties_dialog_cover_row.ui')
class TagImageField(Gtk.Box):
__gtype_name__ = 'TagImageField'
(button, image, type_model, description_entry, type_selection, info_label) = GtkTemplate.Child.widgets(6)
def __init__(self, all_button=True):
Gtk.Box.__init__(self)
self.init_template()
self.parent_row = None
self.all_func = None
self.update_func = None
self.batch_update = False
self.pixbuf = None
self.info = CoverImage(None, None, None, None)
self.default_type = 3
self.mime_info = {'image/jpeg': {'title': _('JPEG image'), 'type': 'jpeg', 'options': {'quality': '90'}}, 'image/png': {'title': _('PNG image'), 'type': 'png', 'options': {}}, 'image/': {'title': _('Image'), 'type': 'jpeg', 'options': {'quality': '90'}}, '-->': {'title': _('Linked image')}}
self.button.drag_dest_set(Gtk.DestDefaults.ALL, [], Gdk.DragAction.COPY)
self.button.drag_dest_add_uri_targets()
self.type_selection.connect('scroll-event', dummy_scroll_handler)
self.all_button = None
if all_button:
self.all_button = AllButton(self)
self.pack_start(self.all_button, False, False, 0)
def grab_focus(self):
self.image.grab_focus()
def register_parent_row(self, parent_row):
self.parent_row = parent_row
def register_update_func(self, func):
self.update_func = func
def register_all_func(self, function):
self.all_func = function
def set_value(self, val, all_vals=None, doupdate=True):
if doupdate:
if val:
loader = GdkPixbuf.PixbufLoader()
try:
loader.write(val.data)
loader.close()
except GLib.GError:
pass
else:
self.batch_update = True
self.set_pixbuf(loader.get_pixbuf(), val.mime)
if (val.type is not None):
self.type_selection.set_active(val.type)
self.type_selection.set_sensitive(True)
else:
self.type_selection.set_active((- 1))
self.type_selection.set_sensitive(False)
if (val.desc is not None):
self.description_entry.set_text(val.desc)
self.description_entry.set_sensitive(True)
else:
self.description_entry.set_text('')
self.description_entry.set_sensitive(False)
self.batch_update = False
else:
self.batch_update = True
self.set_pixbuf(None)
self.type_selection.set_active((- 1))
self.type_selection.set_sensitive(False)
self.description_entry.set_text('')
self.description_entry.set_sensitive(False)
self.batch_update = False
self.call_update_func()
if (None not in (all_vals, self.all_button)):
self.all_button.set_active(all(((val == v) for v in all_vals)))
def get_value(self):
if (not self.pixbuf):
return None
mime = self.mime_info[self.info.mime]
writer = io.BytesIO()
def gdk_pixbuf_save_func(buf, count, user_data):
if (writer.write(buf) == count):
return True
return False
try:
save_to_callback_function = self.pixbuf.save_to_callbackv
except AttributeError:
save_to_callback_function = self.pixbuf.save_to_callback
save_to_callback_function(gdk_pixbuf_save_func, None, mime['type'], list(mime['options'].keys()), list(mime['options'].values()))
writer.seek(0)
return self.info._replace(data=writer.read())
def call_update_func(self):
if ((not self.update_func) or self.batch_update):
return
self.update_func(self, self.parent_row.tag, self.parent_row.multi_id, self.get_value)
def set_pixbuf(self, pixbuf, mime=None):
self.pixbuf = pixbuf
if (pixbuf is None):
self.image.set_from_icon_name('list-add', Gtk.IconSize.DIALOG)
self.info_label.set_markup('')
else:
self.image.set_from_pixbuf(pixbuf.scale_simple(100, 100, GdkPixbuf.InterpType.BILINEAR))
(width, height) = (pixbuf.get_width(), pixbuf.get_height())
if (mime is None):
markup = _('{width}x{height} pixels').format(width=width, height=height)
else:
markup = _('{format} ({width}x{height} pixels)').format(format=self.mime_info.get(mime, self.mime_info['image/'])['title'], width=width, height=height)
self.info_label.set_markup(markup)
self.info = self.info._replace(mime=mime)
def _on_button_clicked(self, button):
dialog = dialogs.FileOperationDialog(title=_('Select image to set as cover'), parent=self.get_toplevel(), buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
dialog.set_select_multiple(False)
filefilter = Gtk.FileFilter()
filefilter.set_name(_('Supported image formats'))
filefilter.add_pattern('*.[jJ][pP][gG]')
filefilter.add_pattern('*.[jJ][pP][eE][gG]')
filefilter.add_pattern('*.[pP][nN][gG]')
dialog.add_filter(filefilter)
if (dialog.run() == Gtk.ResponseType.OK):
filename = dialog.get_filename()
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename)
info = GdkPixbuf.Pixbuf.get_file_info(filename)[0]
except TypeError:
pass
else:
self.batch_update = True
self.set_pixbuf(pixbuf, info.get_mime_types()[0])
self.type_selection.set_active(self.default_type)
self.type_selection.set_sensitive(True)
self.description_entry.set_text(os.path.basename(filename).rsplit('.', 1)[0])
self.description_entry.set_sensitive(True)
self.batch_update = False
self.call_update_func()
dialog.destroy()
def _on_button_drag_data_received(self, widget, context, x, y, selection, info, time):
if (selection.target.name() == 'text/uri-list'):
filename = Gio.File.new_for_uri(selection.get_uris()[0]).get_path()
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename)
info = GdkPixbuf.Pixbuf.get_file_info(filename)[0]
except TypeError:
pass
else:
self.batch_update = True
self.set_pixbuf(pixbuf, info['mime_types'][0])
self.type_selection.set_active(self.default_type)
self.description_entry.set_sensitive(True)
self.description_entry.set_text(os.path.basename(filename).rsplit('.', 1)[0])
self.description_entry.set_sensitive(True)
self.batch_update = False
self.call_update_func()
def _on_type_selection_changed(self, combobox):
self.info = self.info._replace(type=self.type_model[combobox.get_active()][0])
self.call_update_func()
def _on_description_entry_changed(self, entry):
self.info = self.info._replace(desc=entry.get_text())
self.call_update_func() |
def add_file_to_realization_runpaths(runpath_file):
with open(runpath_file, 'r', encoding='utf-8') as fh:
runpath_file_lines = fh.readlines()
for line in runpath_file_lines:
realization_path = line.split()[1]
realization_nr = re.findall(REGEX, realization_path)
(Path(realization_path) / TARGET_FILE).write_text(f'{realization_nr[0]}', encoding='utf-8') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.