code stringlengths 281 23.7M |
|---|
def test_df(categorical: bool=False, datetime: bool=False) -> Tuple[(pd.DataFrame, pd.Series)]:
(X, y) = make_classification(n_samples=1000, n_features=12, n_redundant=4, n_clusters_per_class=1, weights=[0.5], class_sep=2, random_state=1)
colnames = [f'var_{i}' for i in range(12)]
X = pd.DataFrame(X, columns=colnames)
y = pd.Series(y)
if (categorical is True):
X['cat_var1'] = (['A'] * 1000)
X['cat_var2'] = (['B'] * 1000)
if (datetime is True):
X['date1'] = pd.date_range('2020-02-24', periods=1000, freq='T')
X['date2'] = pd.date_range('2021-09-29', periods=1000, freq='H')
return (X, y) |
def test_flatten_does_not_persist_0_checkpoints(journal_db, memory_db):
journal_db.set(b'before-record', b'test-a')
journal_db.flatten()
assert (b'before-record' not in memory_db)
assert (b'before-record' in journal_db)
journal_db.persist()
assert (b'before-record' in memory_db) |
class TestDataFrameHist(TestData):
def test_flights_hist(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
num_bins = 10
pd_distancekilometers = np.histogram(pd_flights['DistanceKilometers'], num_bins)
pd_flightdelaymin = np.histogram(pd_flights['FlightDelayMin'], num_bins)
pd_bins = pd.DataFrame({'DistanceKilometers': pd_distancekilometers[1], 'FlightDelayMin': pd_flightdelaymin[1]})
pd_weights = pd.DataFrame({'DistanceKilometers': pd_distancekilometers[0], 'FlightDelayMin': pd_flightdelaymin[0]})
_ = ed_flights[['DistanceKilometers', 'FlightDelayMin']]
(ed_bins, ed_weights) = ed_flights[['DistanceKilometers', 'FlightDelayMin']]._hist(num_bins=num_bins)
assert_frame_equal(pd_bins, ed_bins, check_exact=False)
assert_frame_equal(pd_weights, ed_weights, check_exact=False)
def test_flights_filtered_hist(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
pd_flights = pd_flights[(pd_flights.FlightDelayMin > 0)]
ed_flights = ed_flights[(ed_flights.FlightDelayMin > 0)]
num_bins = 10
pd_distancekilometers = np.histogram(pd_flights['DistanceKilometers'], num_bins)
pd_flightdelaymin = np.histogram(pd_flights['FlightDelayMin'], num_bins)
pd_bins = pd.DataFrame({'DistanceKilometers': pd_distancekilometers[1], 'FlightDelayMin': pd_flightdelaymin[1]})
pd_weights = pd.DataFrame({'DistanceKilometers': pd_distancekilometers[0], 'FlightDelayMin': pd_flightdelaymin[0]})
_ = ed_flights[['DistanceKilometers', 'FlightDelayMin']]
(ed_bins, ed_weights) = ed_flights[['DistanceKilometers', 'FlightDelayMin']]._hist(num_bins=num_bins)
assert_frame_equal(pd_bins, ed_bins, check_exact=False)
assert_frame_equal(pd_weights, ed_weights, check_exact=False) |
class MinhaLista(MutableSequence):
def __init__(self, *vals):
self.vals = vals
def __getitem__(self, pos):
return self.vals[pos]
def __len__(self):
return len(self.vals)
def __delitem__(self, item):
del self.vals[item]
def __setitem__(self, pos, valor):
self.vals[pos] = valor |
class Demo(lg.Graph):
AVERAGED_NOISE: AveragedNoise
PLOT: Plot
def setup(self) -> None:
self.AVERAGED_NOISE.configure(AveragedNoiseConfig(sample_rate=SAMPLE_RATE, num_features=NUM_FEATURES, window=WINDOW))
self.PLOT.configure(PlotConfig(refresh_rate=REFRESH_RATE, num_bars=NUM_FEATURES))
def connections(self) -> lg.Connections:
return ((self.AVERAGED_NOISE.OUTPUT, self.PLOT.INPUT),)
def process_modules(self) -> Tuple[(lg.Module, ...)]:
return (self.AVERAGED_NOISE, self.PLOT)
def logging(self) -> Dict[(str, lg.Topic)]:
return {'AVERAGED_NOISE.GENERATOR.OUTPUT': self.AVERAGED_NOISE.GENERATOR.OUTPUT} |
def annotate_project(project):
(project)
def wrapper(*args, **kwargs):
ad_block_tag = kwargs.pop('ad_block_tag', None)
annotate = annotate_tape(kwargs)
if annotate:
bcs = kwargs.get('bcs', [])
sb_kwargs = ProjectBlock.pop_kwargs(kwargs)
if isinstance(args[1], function.Function):
output = args[1]
V = output.function_space()
if (isinstance(args[0], function.Function) and (extract_unique_domain(args[0]) != V.mesh())):
block = SupermeshProjectBlock(args[0], V, output, bcs, ad_block_tag=ad_block_tag, **sb_kwargs)
else:
block = ProjectBlock(args[0], V, output, bcs, ad_block_tag=ad_block_tag, **sb_kwargs)
with stop_annotating():
output = project(*args, **kwargs)
if annotate:
tape = get_working_tape()
if (not isinstance(args[1], function.Function)):
if (isinstance(args[0], function.Function) and (extract_unique_domain(args[0]) != args[1].mesh())):
block = SupermeshProjectBlock(args[0], args[1], output, bcs, ad_block_tag=ad_block_tag, **sb_kwargs)
else:
block = ProjectBlock(args[0], args[1], output, bcs, ad_block_tag=ad_block_tag, **sb_kwargs)
tape.add_block(block)
block.add_output(output.create_block_variable())
return output
return wrapper |
def test_create_from_cookiecutter(temp_with_override: Path, cli):
book = (temp_with_override / 'new_book')
result = cli.invoke(commands.create, [book.as_posix(), '--cookiecutter'])
assert (result.exit_code == 0)
assert book.joinpath('my_book', 'my_book', '_config.yml').exists()
assert (len(list(book.joinpath('my_book').iterdir())) == 7)
assert (len(list(book.joinpath('my_book', '.github', 'workflows').iterdir())) == 1)
assert (len(list(book.joinpath('my_book', 'my_book').iterdir())) == 9) |
class TestScriptedFields(TestData):
def test_add_new_scripted_field(self):
ed_field_mappings = FieldMappings(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
ed_field_mappings.add_scripted_field('scripted_field_None', None, np.dtype('int64'))
expected = self.pd_flights().columns.to_list()
expected.append(None)
assert (expected == ed_field_mappings.display_names)
def test_add_duplicate_scripted_field(self):
ed_field_mappings = FieldMappings(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
ed_field_mappings.add_scripted_field('scripted_field_Carrier', 'Carrier', np.dtype('int64'))
buf = StringIO()
ed_field_mappings.es_info(buf)
print(buf.getvalue())
expected = self.pd_flights().columns.to_list()
expected.remove('Carrier')
expected.append('Carrier')
assert (expected == ed_field_mappings.display_names) |
def is_valid_directory(path_to_directory: str, must_be_writable: bool=False) -> OperationOutcome:
if (path_to_directory and os.path.isdir(path_to_directory) and os.access(path_to_directory, os.R_OK)):
if ((not must_be_writable) or os.access(path_to_directory, os.W_OK)):
return OperationOutcome(True, '')
else:
return OperationOutcome(False, f'Path {path_to_directory} is not writable.')
elif (not path_to_directory):
return OperationOutcome(False, 'Variable `path_to_directory` must be a non-empty string')
elif (not os.path.exists(path_to_directory)):
return OperationOutcome(False, f'Path {path_to_directory} does not exist.')
elif (not os.path.isdir(path_to_directory)):
return OperationOutcome(False, f'Path {path_to_directory} is not a directory.')
else:
return OperationOutcome(False, f'`{path_to_directory}` is not a valid path.') |
def test_single_window_when_using_freq(df_time):
expected_results = {'ambient_temp': [31.31, 31.51, 32.15, 32.39, 32.62, 32.5, 32.52, 32.68, 33.76], 'module_temp': [49.18, 49.84, 52.35, 50.63, 49.61, 47.01, 46.67, 47.52, 49.8], 'irradiation': [0.51, 0.79, 0.65, 0.76, 0.42, 0.49, 0.57, 0.56, 0.74], 'color': ['blue', 'blue', 'blue', 'blue', 'blue', 'blue', 'blue', 'blue', 'blue'], 'ambient_temp_window_2_sum': [np.nan, np.nan, np.nan, np.nan, 62.82, 63.66, 64.54, 65.01, 65.12], 'module_temp_window_2_sum': [np.nan, np.nan, np.nan, np.nan, 99.02, 102.19, 102.98, 100.24, 96.62], 'irradiation_window_2_sum': [np.nan, np.nan, np.nan, np.nan, 1.3, 1.44, 1.41, 1.18, 0.91]}
expected_results_df = pd.DataFrame(data=expected_results, index=_date_time)
transformer = WindowFeatures(window=2, functions=['sum'], freq='45min')
df_tr = transformer.fit_transform(df_time)
assert df_tr.head(9).round(3).equals(expected_results_df)
transformer = WindowFeatures(window=2, functions=['sum'], freq='45min', drop_original=True)
df_tr = transformer.fit_transform(df_time)
assert df_tr.head(9).round(3).equals(expected_results_df.drop(['ambient_temp', 'module_temp', 'irradiation'], axis=1))
transformer = WindowFeatures(variables=['ambient_temp', 'irradiation'], window=2, functions='sum', freq='45min')
df_tr = transformer.fit_transform(df_time)
assert df_tr.head(9).round(3).equals(expected_results_df.drop(['module_temp_window_2_sum'], axis=1))
transformer = WindowFeatures(variables=['irradiation'], window=2, functions=['sum'], freq='45min')
df_tr = transformer.fit_transform(df_time)
assert df_tr.head(9).round(3).equals(expected_results_df.drop(['ambient_temp_window_2_sum', 'module_temp_window_2_sum'], axis=1)) |
def filter_extender_lte_carrier_list_data(json):
option_list = ['sn']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def test_standard_calls_passthrough(accounts, tester):
addr = accounts[1]
value = ['blahblah', addr, ['yesyesyes', '0x1234']]
tester.setTuple(value)
with brownie.multicall:
assert (tester.getTuple.call(addr) == value)
assert (not isinstance(tester.getTuple.call(addr), Proxy)) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestExample(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def test_run(self):
(accepted_files, skipped_files) = SEARCHER.get_python_files()
for file_path in accepted_files:
with self.subTest(file_path=file_path):
try:
run_file(file_path)
except Exception as exc:
message = ''.join(traceback.format_exception(*sys.exc_info()))
self.fail('Executing {} failed with exception {}\n {}'.format(file_path, exc, message))
finally:
process_cascade_events()
for (file_path, reason) in skipped_files:
with self.subTest(file_path=file_path):
raise unittest.SkipTest('{reason} (File: {file_path})'.format(reason=reason, file_path=file_path)) |
def update_heartbeat(submission_id: int, processor_id: str) -> int:
sql = f'''
update {DABSLoaderQueue._meta.db_table}
set heartbeat = %s::timestamptz
where submission_id = %s and processor_id = %s and state = %s
'''
with psycopg2.connect(dsn=get_database_dsn_string()) as connection:
with connection.cursor() as cursor:
cursor.execute(sql, [now(), submission_id, processor_id, DABSLoaderQueue.IN_PROGRESS])
return cursor.rowcount |
def recurse_check_structure(sample: Any, to_check: Any) -> None:
sample_type = type(sample)
to_check_type = type(to_check)
if ((sample is not None) and (sample_type != to_check_type)):
raise ValidationException(f'{sample} [{sample_type}] is not the same type as {to_check} [{to_check_type}].')
if (sample_type in (list, tuple)):
for element in to_check:
recurse_check_structure(sample[0], element)
return
if (sample_type == dict):
for key in sample:
if (key not in to_check):
raise ValidationException(f"{to_check} doesn't contain the key {key}.")
for key in to_check:
if (key not in sample):
raise ValidationException(f'{to_check} contains an unknown key {key}.')
for key in sample:
recurse_check_structure(sample[key], to_check[key])
return |
(reuse_venv=True)
def format(session):
session.install('black', 'isort', 'flynt')
session.run('python', 'utils/license-headers.py', 'fix', *SOURCE_FILES)
session.run('flynt', *SOURCE_FILES)
session.run('black', '--target-version=py38', *SOURCE_FILES)
session.run('isort', '--profile=black', *SOURCE_FILES)
lint(session) |
def logger_to_ofp(port_stats):
return {'packets_out': port_stats.tx_packets, 'packets_in': port_stats.rx_packets, 'bytes_out': port_stats.tx_bytes, 'bytes_in': port_stats.rx_bytes, 'dropped_out': port_stats.tx_dropped, 'dropped_in': port_stats.rx_dropped, 'errors_out': port_stats.tx_errors, 'errors_in': port_stats.rx_errors} |
def adjusted_path(tools_to_activate, system=False, user=False):
path_add = get_required_path(tools_to_activate)
if (WINDOWS and (not MSYS)):
existing_path = win_get_environment_variable('PATH', system=system, user=user, fallback=True).split(ENVPATH_SEPARATOR)
else:
existing_path = os.environ['PATH'].split(ENVPATH_SEPARATOR)
existing_emsdk_tools = []
existing_nonemsdk_path = []
for entry in existing_path:
if to_unix_path(entry).startswith(EMSDK_PATH):
existing_emsdk_tools.append(entry)
else:
existing_nonemsdk_path.append(entry)
new_emsdk_tools = []
kept_emsdk_tools = []
for entry in path_add:
if (not normalized_contains(existing_emsdk_tools, entry)):
new_emsdk_tools.append(entry)
else:
kept_emsdk_tools.append(entry)
whole_path = unique_items(((new_emsdk_tools + kept_emsdk_tools) + existing_nonemsdk_path))
if MSYS:
whole_path = [to_msys_path(p) for p in whole_path]
new_emsdk_tools = [to_msys_path(p) for p in new_emsdk_tools]
separator = (':' if MSYS else ENVPATH_SEPARATOR)
return (separator.join(whole_path), new_emsdk_tools) |
class VerificationMethod():
def get_html(self):
raise NotImplementedError()
def get_javascript(self):
raise NotImplementedError()
def verification_in_request(self, request):
raise NotImplementedError()
def verify_request(self, request):
raise NotImplementedError() |
class WindowsAppLink(AbstractObject):
def __init__(self, api=None):
super(WindowsAppLink, self).__init__()
self._isWindowsAppLink = True
self._api = api
class Field(AbstractObject.Field):
app_id = 'app_id'
app_name = 'app_name'
package_family_name = 'package_family_name'
url = 'url'
_field_types = {'app_id': 'string', 'app_name': 'string', 'package_family_name': 'string', 'url': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def test_supports_foundry_schema(spark_session, provide_config):
pc = DiskPersistenceBackedSparkCache(**provide_config)
df = spark_session.createDataFrame(data=[[1, 2]], schema='a: int, b: int')
foundry_schema = {'fieldSchemaList': [{'type': 'INTEGER', 'name': 'a', 'nullable': None, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}, {'type': 'INTEGER', 'name': 'b', 'nullable': None, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}], 'primaryKey': None, 'dataFrameReaderClass': 'com.palantir.foundry.spark.input.TextDataFrameReader', 'customMetadata': {'textParserParams': {'parser': 'CSV_PARSER', 'nullValues': None, 'nullValuesPerColumn': None, 'charsetName': 'UTF-8', 'fieldDelimiter': ',', 'recordDelimiter': '\n', 'quoteCharacter': '"', 'dateFormat': {}, 'skipLines': 1, 'jaggedRowBehavior': 'THROW_EXCEPTION', 'parseErrorBehavior': 'THROW_EXCEPTION', 'addFilePath': False, 'addFilePathInsteadOfUri': False, 'addByteOffset': False, 'addImportedAt': False}}}
dataset_directory = os.sep.join([provide_config['cache_dir'], 'd1', 't1.csv'])
df.write.format('csv').option('header', 'true').save(os.sep.join([dataset_directory]))
pc.set_item_metadata(dataset_directory, to_dict('d1', 't1', '/d1'), foundry_schema)
assert_frame_equal(df.toPandas(), pc[to_dict('d1', 't1', '/d1')].toPandas())
del pc[to_dict('d1', 't1', '/d1')]
assert (len(pc) == 0) |
class AppHandler(FlexxHandler):
def get(self, full_path):
logger.debug(('Incoming request at %r' % full_path))
ok_app_names = ('__main__', '__default__', '__index__')
parts = [p for p in full_path.split('/') if p]
app_name = None
path = '/'.join(parts)
if parts:
if (path.lower() == 'flexx'):
return self.redirect('/flexx/')
if ((parts[0] in ok_app_names) or manager.has_app_name(parts[0])):
app_name = parts[0]
path = '/'.join(parts[1:])
if (app_name is None):
if ((len(parts) == 1) and ('.' in full_path)):
return self.redirect(('/flexx/data/' + full_path))
app_name = '__main__'
if (app_name == '__main__'):
app_name = manager.has_app_name('__main__')
elif ('/' not in full_path):
return self.redirect(('/%s/' % app_name))
if (not app_name):
if (not parts):
app_name = '__index__'
else:
name = (parts[0] if parts else '__main__')
return self.write(('No app "%s" is currently hosted.' % name))
if (app_name == '__index__'):
self._get_index(app_name, path)
else:
self._get_app(app_name, path)
def _get_index(self, app_name, path):
if path:
return self.redirect('/flexx/__index__')
all_apps = [('<li><a href="%s/">%s</a></li>' % (name, name)) for name in manager.get_app_names()]
the_list = (('<ul>%s</ul>' % ''.join(all_apps)) if all_apps else 'no apps')
self.write(('Index of available apps: ' + the_list))
def _get_app(self, app_name, path):
if path.startswith(('flexx/data/', 'flexx/assets/')):
return self.redirect(('/' + path))
correct_app_name = manager.has_app_name(app_name)
if (not correct_app_name):
return self.write(('No app "%s" is currently hosted.' % app_name))
if (correct_app_name != app_name):
return self.redirect(('/%s/%s' % (correct_app_name, path)))
session_id = self.get_argument('session_id', '')
if session_id:
session = manager.get_session_by_id(session_id)
if (session and (session.status == session.STATUS.PENDING)):
self.write(get_page(session).encode())
else:
self.redirect(('/%s/' % app_name))
else:
session = manager.create_session(app_name, request=self.request)
self.write(get_page(session).encode()) |
class TestLChProperties(util.ColorAsserts, unittest.TestCase):
def test_lightness(self):
c = Color('color(--lch 90% 50 120 / 1)')
self.assertEqual(c['lightness'], 90)
c['lightness'] = 80
self.assertEqual(c['lightness'], 80)
def test_chroma(self):
c = Color('color(--lch 90% 50 120 / 1)')
self.assertEqual(c['chroma'], 50)
c['chroma'] = 40
self.assertEqual(c['chroma'], 40)
def test_hue(self):
c = Color('color(--lch 90% 50 120 / 1)')
self.assertEqual(c['hue'], 120)
c['hue'] = 110
self.assertEqual(c['hue'], 110)
def test_alpha(self):
c = Color('color(--lch 90% 50 120 / 1)')
self.assertEqual(c['alpha'], 1)
c['alpha'] = 0.5
self.assertEqual(c['alpha'], 0.5) |
def task_setup(function: typing.Callable, *, integration_requests: typing.Optional[List]=None) -> typing.Callable:
integration_requests = (integration_requests or [])
(function)
def wrapper(*args, **kwargs):
print('preprocessing')
output = function(*args, **kwargs)
print('postprocessing')
return output
return (functools.partial(task_setup, integration_requests=integration_requests) if (function is None) else wrapper) |
def delete_old_files(directory: str, age_limit_seconds: int):
files_to_not_delete = _get_files_to_not_delete()
age_limit = (time.time() - age_limit_seconds)
for (dirpath, dirnames, filenames) in os.walk(directory):
if (dirpath.split('/')[(- 1)] not in files_to_not_delete):
for filename in filenames:
if (filename.split('.')[0] not in files_to_not_delete):
file_path = os.path.join(dirpath, filename)
_remove_file_older_than(file_path, age_limit) |
class TopicNotLocked(Requirement):
def __init__(self, topic=None, topic_id=None, post_id=None, post=None):
self._topic = topic
self._topic_id = topic_id
self._post = post
self._post_id = post_id
def fulfill(self, user):
return (not any(self._determine_locked()))
def _determine_locked(self):
if (self._topic is not None):
return (self._topic.locked, self._topic.forum.locked)
elif (self._post is not None):
return (self._post.topic.locked, self._post.topic.forum.locked)
elif (self._topic_id is not None):
return Topic.query.join(Forum, (Forum.id == Topic.forum_id)).filter((Topic.id == self._topic_id)).with_entities(Topic.locked, Forum.locked).first()
else:
return self._get_topic_from_request()
def _get_topic_from_request(self):
if current_topic:
return (current_topic.locked, current_forum.locked)
else:
raise FlaskBBError('How did you get this to happen?') |
class MystWarningsDirective(SphinxDirective):
has_content = False
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
def run(self):
from sphinx.pycode import ModuleAnalyzer
analyzer = ModuleAnalyzer.for_module(MystWarnings.__module__)
qname = MystWarnings.__qualname__
analyzer.analyze()
warning_names = [(e.value, analyzer.attr_docs[(qname, e.name)]) for e in MystWarnings]
text = [f"- `myst.{name}`: {' '.join(doc)}" for (name, doc) in warning_names]
node = nodes.Element()
self.state.nested_parse(text, 0, node)
return node.children |
def _smsapi_response_adapter(response):
if ((response.status_code == requests.codes.ok) and response.text.startswith('ERROR')):
new_status_code = 406
logger.warning('SMSAPI response got status code %s and contains "%s". Updating SMSAPI response status code to %s!', response.status_code, response.text, new_status_code)
response.status_code = new_status_code
return response |
(stability='beta')
class StorageInterface(Generic[(T, TDataRepresentation)], ABC):
def __init__(self, serializer: Optional[Serializer]=None, adapter: Optional[StorageItemAdapter[(T, TDataRepresentation)]]=None):
self._serializer = (serializer or JsonSerializer())
self._storage_item_adapter = (adapter or DefaultStorageItemAdapter())
def serializer(self) -> Serializer:
return self._serializer
def adapter(self) -> StorageItemAdapter[(T, TDataRepresentation)]:
return self._storage_item_adapter
def save(self, data: T) -> None:
def update(self, data: T) -> None:
def save_or_update(self, data: T) -> None:
def save_list(self, data: List[T]) -> None:
for d in data:
self.save(d)
def save_or_update_list(self, data: List[T]) -> None:
for d in data:
self.save_or_update(d)
def load(self, resource_id: ResourceIdentifier, cls: Type[T]) -> Optional[T]:
def load_list(self, resource_id: List[ResourceIdentifier], cls: Type[T]) -> List[T]:
result = []
for r in resource_id:
item = self.load(r, cls)
if (item is not None):
result.append(item)
return result
def delete(self, resource_id: ResourceIdentifier) -> None:
def query(self, spec: QuerySpec, cls: Type[T]) -> List[T]:
def count(self, spec: QuerySpec, cls: Type[T]) -> int:
def paginate_query(self, page: int, page_size: int, cls: Type[T], spec: Optional[QuerySpec]=None) -> PaginationResult[T]:
if (spec is None):
spec = QuerySpec(conditions={})
spec.limit = page_size
spec.offset = ((page - 1) * page_size)
items = self.query(spec, cls)
total = self.count(spec, cls)
return PaginationResult(items=items, total_count=total, total_pages=(((total + page_size) - 1) // page_size), page=page, page_size=page_size) |
class Factory(object):
known_input_types = {'git': Git, 'filesystem': FileSystem}
def create(input_method, input_config):
tracer.info('Called: name [%s]', input_method)
if input_method.startswith('ignore:'):
tracer.info('Ignoring factory entry.')
return None
if (input_method not in Factory.known_input_types):
assert False
return Factory.known_input_types[input_method](input_config) |
class TestOFPActionSetTpSrc(unittest.TestCase):
type_ = {'buf': b'\x00\t', 'val': ofproto.OFPAT_SET_TP_SRC}
len_ = {'buf': b'\x00\x08', 'val': ofproto.OFP_ACTION_TP_PORT_SIZE}
tp = {'buf': b'\x07\xf1', 'val': 2033}
zfill = (b'\x00' * 2)
buf = (((type_['buf'] + len_['buf']) + tp['buf']) + zfill)
c = OFPActionSetTpSrc(tp['val'])
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.tp['val'], self.c.tp)
def test_parser_src(self):
res = self.c.parser(self.buf, 0)
eq_(self.tp['val'], res.tp)
def test_parser_dst(self):
type_ = {'buf': b'\x00\n', 'val': ofproto.OFPAT_SET_TP_DST}
buf = (((type_['buf'] + self.len_['buf']) + self.tp['buf']) + self.zfill)
res = self.c.parser(self.buf, 0)
eq_(self.tp['val'], res.tp)
(AssertionError)
def test_parser_check_type(self):
type_ = {'buf': b'\x00\x07', 'val': 7}
buf = (((type_['buf'] + self.len_['buf']) + self.tp['buf']) + self.zfill)
self.c.parser(buf, 0)
(AssertionError)
def test_parser_check_len(self):
len_ = {'buf': b'\x00\x07', 'val': 7}
buf = (((self.type_['buf'] + len_['buf']) + self.tp['buf']) + self.zfill)
self.c.parser(buf, 0)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
fmt = ofproto.OFP_ACTION_TP_PORT_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(self.type_['val'], res[0])
eq_(self.len_['val'], res[1])
eq_(self.tp['val'], res[2]) |
def extractXianXiaWorld(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('www.xianxiaworld.net/A-Thought-Through-Eternity/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'A Thought Through Eternity', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/Beast-Piercing-The-Heavens/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'Beast Piercing The Heavens', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/Dominating-Sword-Immortal/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'Dominating Sword Immortal', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/Dragon-Marked-War-God/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'Dragon-Marked War God', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/Emperor-of-The-Cosmos/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'Emperor of The Cosmos', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/God-of-Slaughter/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'God of Slaughter', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/God-level-Bodyguard-in-The-City/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'God-level Bodyguard in The City', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/Realms-In-The-Firmament/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'Realms In The Firmament', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/The-King-Of-Myriad-Domains/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'The King Of Myriad Domains', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/The-Magus-Era/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'The Magus Era', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/The-Portal-of-Wonderland/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'The Portal of Wonderland', vol, chp, frag=frag, postfix=postfix)
if ('www.xianxiaworld.net/World-Defying-Dan-God/' in item['linkUrl']):
return buildReleaseMessageWithType(item, 'World Defying Dan God', vol, chp, frag=frag, postfix=postfix)
return False |
def write_version_to_file(filename: str, version: str) -> None:
with open(filename, 'r+') as file:
file_content = file.read()
updated_content = re.sub('version=\\d+\\.\\d+\\.\\d+', f'version={version}', file_content)
updated_content = re.sub('release=\\d+\\.\\d+\\.\\d+', f'release={version}', updated_content)
file.seek(0)
file.write(updated_content)
file.truncate() |
class SelfAttentionConvBlock(PerceptionBlock):
def __init__(self, in_keys: Union[(str, List[str])], out_keys: Union[(str, List[str])], in_shapes: Union[(Sequence[int], List[Sequence[int]])], embed_dim: int, dropout: Optional[float], add_input_to_output: bool, bias: bool):
super().__init__(in_keys=in_keys, out_keys=out_keys, in_shapes=in_shapes)
assert (len(self.in_keys) == len(self.in_shapes))
assert (len(self.in_keys) in (1, 2))
assert (len(self.out_keys) in (1, 2))
assert (len(self.in_shapes[0]) == 3), 'In dimensionality should be 3 without batch'
if (len(self.in_keys) > 1):
assert (len(self.in_shapes[1]) == 2)
assert (np.prod(self.in_shapes[0][(- 2):]) == self.in_shapes[1][(- 1)] == self.in_shapes[1][(- 2)])
in_dim = self.in_shapes[0][0]
assert (in_dim > embed_dim)
assert ((in_dim // embed_dim) == (in_dim / float(embed_dim))), 'in_dim should be evenly dividable by embed_dim'
self.in_dim = in_dim
self.embedding_dim = embed_dim
self.add_input_to_output = add_input_to_output
self.query_conv = nn.Conv2d(in_channels=self.in_dim, out_channels=self.embedding_dim, kernel_size=1, bias=bias)
self.key_conv = nn.Conv2d(in_channels=self.in_dim, out_channels=self.embedding_dim, kernel_size=1, bias=bias)
self.value_conv = nn.Conv2d(in_channels=self.in_dim, out_channels=self.in_dim, kernel_size=1, bias=bias)
self.gamma = nn.Parameter(torch.zeros(1, dtype=torch.float32))
self.softmax = nn.Softmax(dim=(- 1))
self.dropout = nn.Dropout(p=(dropout if (dropout is not None) else 0.0))
(PerceptionBlock)
def forward(self, block_input: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
input_tensor = block_input[self.in_keys[0]]
attn_mask = (block_input[self.in_keys[1]] if (len(self.in_keys) > 1) else None)
assert (len(input_tensor.shape) == 4)
(batch_size, num_channels, width, height) = input_tensor.size()
proj_query = self.query_conv(input_tensor).view(batch_size, (- 1), (width * height)).permute(0, 2, 1)
proj_key = self.key_conv(input_tensor).view(batch_size, (- 1), (width * height))
energy = torch.bmm(proj_query, proj_key)
if (attn_mask is not None):
energy += attn_mask
attention = self.softmax(energy)
attention = self.dropout(attention)
proj_value = self.value_conv(input_tensor).view(batch_size, (- 1), (width * height))
out = torch.bmm(proj_value, attention.permute(0, 2, 1))
out = out.view(batch_size, num_channels, width, height)
out = (self.gamma * out)
if self.add_input_to_output:
out = (out + input_tensor)
out_dict = dict({self.out_keys[0]: out})
if (len(self.out_keys) == 2):
out_dict[self.out_keys[1]] = attention
return out_dict
def __repr__(self):
txt = f'{self.__class__.__name__}'
txt += f'''
embed_dim: {self.embedding_dim}'''
txt += f'''
dropout: {self.dropout}'''
txt += f'''
bias: {(self.query_conv.bias is not None)}'''
txt += f'''
add_input_to_output: {self.add_input_to_output}'''
txt += f'''
use_attn_mask: {(len(self.in_keys) > 1)}'''
txt += f'''
Out Shapes: {self.out_shapes()}'''
return txt |
(scope='function')
def rollbar_connection_config(db: session, rollbar_config, rollbar_secrets) -> Generator:
fides_key = rollbar_config['fides_key']
connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': rollbar_secrets, 'saas_config': rollbar_config})
(yield connection_config)
connection_config.delete(db) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_internet_service_custom': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_internet_service_custom']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_internet_service_custom']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_internet_service_custom')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
.django_db
def test_federal_accounts_endpoint_correct_data(client, fixture_data):
resp = client.post('/api/v2/federal_accounts/', content_type='application/json', data=json.dumps({'sort': {'field': 'managing_agency', 'direction': 'asc'}, 'filters': {'fy': '2017'}}))
response_data = resp.json()
assert (response_data['fy'] == '2017')
assert (response_data['results'][0]['managing_agency_acronym'] == 'DOD')
assert (response_data['results'][0]['budgetary_resources'] is None)
assert (response_data['results'][1]['managing_agency_acronym'] == 'DOL')
assert (response_data['results'][1]['budgetary_resources'] is None)
assert (response_data['results'][2]['managing_agency_acronym'] == 'DOD')
assert (response_data['results'][2]['budgetary_resources'] is None)
assert (response_data['results'][3]['managing_agency_acronym'] == 'ABCD')
assert (response_data['results'][3]['budgetary_resources'] == 3000)
assert (response_data['results'][4]['managing_agency_acronym'] is None)
assert (response_data['results'][4]['budgetary_resources'] is None)
assert (response_data['results'][5]['managing_agency_acronym'] == 'EFGH')
assert (response_data['results'][5]['budgetary_resources'] == 9000) |
class MsgServicer(object):
def CreateClient(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateClient(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpgradeClient(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubmitMisbehaviour(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') |
def _calculate_correlations(df: pd.DataFrame, num_for_corr, cat_for_corr, kind):
if (kind == 'pearson'):
return df[num_for_corr].corr('pearson')
elif (kind == 'spearman'):
return df[num_for_corr].corr('spearman')
elif (kind == 'kendall'):
return df[num_for_corr].corr('kendall')
elif (kind == 'cramer_v'):
return get_pairwise_correlation(df[cat_for_corr], _cramer_v) |
def init(backend='ipy', width=None, height=None, local=True):
from mayavi.core.base import Base
from tvtk.pyface.tvtk_scene import TVTKScene
global _backend, _registry
backends = _registry.keys()
error_msg = ('Backend must be one of %r, got %s' % (backends, backend))
assert (backend in backends), error_msg
from mayavi import mlab
mlab.options.offscreen = True
_backend = _registry[backend](width, height, local)
Base._ipython_display_ = _ipython_display_
TVTKScene._ipython_display_ = _ipython_display_
print(('Notebook initialized with %s backend.' % backend)) |
def should_clear_cache(force=False):
if force:
return True
elif (not ParserElement._packratEnabled):
return False
elif (SUPPORTS_INCREMENTAL and ParserElement._incrementalEnabled):
if (not in_incremental_mode()):
return repeatedly_clear_incremental_cache
if ((incremental_cache_limit is not None) and (len(ParserElement.packrat_cache) > incremental_cache_limit)):
return 'smart'
return False
else:
return True |
class ClkReg2():
def __init__(self, value=0):
self.unpack(value)
def unpack(self, value):
self.delay_time = ((value >> 0) & ((2 ** 6) - 1))
self.no_count = ((value >> 6) & ((2 ** 1) - 1))
self.edge = ((value >> 7) & ((2 ** 1) - 1))
self.mx = ((value >> 8) & ((2 ** 2) - 1))
self.frac_wf_r = ((value >> 10) & ((2 ** 1) - 1))
self.frac_en = ((value >> 11) & ((2 ** 1) - 1))
self.frac = ((value >> 12) & ((2 ** 3) - 1))
self.reserved = ((value >> 15) & ((2 ** 1) - 1))
def pack(self):
value = (self.delay_time << 0)
value |= (self.no_count << 6)
value |= (self.edge << 7)
value |= (self.mx << 8)
value |= (self.frac_wf_r << 10)
value |= (self.frac_en << 11)
value |= (self.frac << 12)
value |= (self.reserved << 15)
return value
def __repr__(self):
s = 'ClkReg2:\n'
s += ' delay_time: {:d}\n'.format(self.delay_time)
s += ' no_count: {:d}\n'.format(self.no_count)
s += ' edge: {:d}\n'.format(self.edge)
s += ' mx: {:d}\n'.format(self.mx)
s += ' frac_wf_r: {:d}\n'.format(self.frac_wf_r)
s += ' frac_en: {:d}\n'.format(self.frac_en)
s += ' frac: {:d}\n'.format(self.frac)
s += ' reserved: {:d}'.format(self.reserved)
return s |
class TestRedirectStream():
redirect_stream = None
orig_stream = None
def test_no_redirect_in_init(self):
orig_stdout = getattr(sys, self.orig_stream)
self.redirect_stream(None)
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
def test_redirect_to_string_io(self):
f = StringIO()
msg = 'Consider an API like help(), which prints directly to stdout'
orig_stdout = getattr(sys, self.orig_stream)
with self.redirect_stream(f):
print(msg, file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue().strip()
self.assertEqual(s, msg)
def test_enter_result_is_target(self):
f = StringIO()
with self.redirect_stream(f) as enter_result:
self.assertIs(enter_result, f)
def test_cm_is_reusable(self):
f = StringIO()
write_to_f = self.redirect_stream(f)
orig_stdout = getattr(sys, self.orig_stream)
with write_to_f:
print('Hello', end=' ', file=getattr(sys, self.orig_stream))
with write_to_f:
print('World!', file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue()
self.assertEqual(s, 'Hello World!\n')
def test_cm_is_reentrant(self):
f = StringIO()
write_to_f = self.redirect_stream(f)
orig_stdout = getattr(sys, self.orig_stream)
with write_to_f:
print('Hello', end=' ', file=getattr(sys, self.orig_stream))
with write_to_f:
print('World!', file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue()
self.assertEqual(s, 'Hello World!\n') |
class LogiHead():
dissolved_gas: (bool | None) = None
vaporized_oil: (bool | None) = None
directional: (bool | None) = None
radial: (bool | None) = None
reversible: (bool | None) = None
hysterisis: (bool | None) = None
dual_porosity: (bool | None) = None
end_point_scaling: (bool | None) = None
directional_end_point_scaling: (bool | None) = None
reversible_end_point_scaling: (bool | None) = None
alternate_end_point_scaling: (bool | None) = None
miscible_displacement: (bool | None) = None
scale_water_pressure1: (bool | None) = None
scale_water_pressure2: (bool | None) = None
coal_bed_methane: (bool | None) = None
def from_file_values(cls, values: list[bool], simulator: Simulator) -> LogiHead:
if (simulator == Simulator.ECLIPSE_100):
indices = [0, 1, 2, 4, 3, 6, 14, 16, 17, 18, 19, 35, 55, 56, 127]
else:
indices = [0, 1, 2, 3, 4, 6, 14, 16, 17, 18, 19, 35, 55, 56, 127]
return cls(*[lookup_optional_code(values, i) for i in indices]) |
('/api/user/edit', methods=['POST'])
_secure
def user_edit():
user_id = int(request.form['user_id'])
is_admin = (request.form['is_admin'] == 'true')
if ((flask.session.get((config.link + '_user_id')) == user_id) and (not is_admin)):
return (jsonify({'message': 'Cannot remove the admin permissions from itself.'}), BAD_REQUEST_STATUS)
with session_scope() as session:
try:
user = session.query(User).filter((User.id == user_id)).one()
user.is_admin = is_admin
old_password = request.form.get('old_password')
if (old_password is not None):
if user.check_password(old_password):
new_password = request.form['new_password']
new_password2 = request.form['new_password2']
if (new_password != new_password2):
return (jsonify({'message': "Passwords don't match."}), BAD_REQUEST_STATUS)
user.set_password(new_password)
else:
return (jsonify({'message': "Old password doesn't match."}), BAD_REQUEST_STATUS)
except NoResultFound:
return (jsonify({'message': "User ID doesn't exist."}), BAD_REQUEST_STATUS)
except Exception as e:
return (jsonify({'message': str(e)}), BAD_REQUEST_STATUS)
return 'OK' |
def get_pips(tile, pips):
proto_pips = {}
for pip in pips:
name = check_and_strip_prefix(pip['pip'], (tile + '/'))
proto_pips[name] = {'src_wire': (check_and_strip_prefix(pip['src_wire'], (tile + '/')) if (pip['src_wire'] is not None) else None), 'dst_wire': (check_and_strip_prefix(pip['dst_wire'], (tile + '/')) if (pip['dst_wire'] is not None) else None), 'is_pseudo': pip['is_pseudo'], 'is_directional': pip['is_directional'], 'can_invert': pip['can_invert'], 'speed_model_index': pip['speed_model_index']}
return proto_pips |
def prepareOperatingSystem(config, userPath):
if config['hook32']:
logging.info('placing hook for 32bit processes')
place_hook_in_registry((userPath + config['hook32']), 'SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\Windows')
if config['hook64']:
logging.info('placing hook for 64bit processes')
place_hook_in_registry((userPath + config['hook64']), 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Windows') |
def strip_non_ecs_options(subset: Dict[(str, Any)]) -> None:
for key in subset:
subset[key] = {x: subset[key][x] for x in subset[key] if (x in ecs_options)}
if (('fields' in subset[key]) and isinstance(subset[key]['fields'], dict)):
strip_non_ecs_options(subset[key]['fields']) |
def execute(wf, mode='light', storage={'s3': {'config': {'s3_bucket': 'dagster-test'}}}):
(task, dep, env) = parse_json(wf)
try:
pipeline_def = PipelineDefinition(name='basic', solid_defs=task, dependencies=dep, mode_defs=[ModeDefinition('light', system_storage_defs=s3_plus_default_storage_defs, resource_defs={'pyspark': nothing, 's3': s3_resource})])
cfg = {'config': {'endpoint_url': (' + conf.OBJECT_STORAGE_HANDLER['connection_string']), 'region_name': conf.OBJECT_STORAGE_HANDLER['region_name'], 'use_unsigned_session': conf.OBJECT_STORAGE_HANDLER['use_unsigned_session']}}
except Exception as e:
result = {'error': ('configuration ' + str(e))}
start = time.perf_counter()
try:
result = execute_pipeline(pipeline_def, run_config=RunConfig(mode=mode, run_id=wf['wf_unique_id']), instance=DagsterInstance.get(), environment_dict={'storage': storage, 'solids': env, 'resources': {'s3': cfg}})
except Exception as e:
result = {'error': str(e)}
end = time.perf_counter()
print(('execution time :' + str((end - start))))
return result |
def extractPoachedeggsnovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('zhanxian', 'Zhanxian', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_smt256_empty_hashes():
DEPTH = 256
EMPTY_LEAF_NODE_HASH = BLANK_HASH
EMPTY_NODE_HASHES = collections.deque([EMPTY_LEAF_NODE_HASH])
for _ in range((DEPTH - 1)):
EMPTY_NODE_HASHES.appendleft(keccak((EMPTY_NODE_HASHES[0] + EMPTY_NODE_HASHES[0])))
EMPTY_ROOT_HASH = keccak((EMPTY_NODE_HASHES[0] + EMPTY_NODE_HASHES[0]))
smt = SMT()
assert (smt.root_hash == EMPTY_ROOT_HASH)
key = (b'\x00' * 32)
assert (smt._get(key)[1] == tuple(EMPTY_NODE_HASHES)) |
class MoveJoystick(MoveTank):
def on(self, x, y, radius=100.0):
if ((not x) and (not y)):
self.off()
return
vector_length = math.sqrt(((x * x) + (y * y)))
angle = math.degrees(math.atan2(y, x))
if (angle < 0):
angle += 360
if (vector_length > radius):
vector_length = radius
(init_left_speed_percentage, init_right_speed_percentage) = MoveJoystick.angle_to_speed_percentage(angle)
left_speed_percentage = ((init_left_speed_percentage * vector_length) / radius)
right_speed_percentage = ((init_right_speed_percentage * vector_length) / radius)
MoveTank.on(self, SpeedPercent(left_speed_percentage), SpeedPercent(right_speed_percentage))
def angle_to_speed_percentage(angle):
if (0 <= angle <= 45):
left_speed_percentage = 1
right_speed_percentage = ((- 1) + (angle / 45.0))
elif (45 < angle <= 90):
left_speed_percentage = 1
percentage_from_45_to_90 = ((angle - 45) / 45.0)
right_speed_percentage = percentage_from_45_to_90
elif (90 < angle <= 135):
percentage_from_90_to_135 = ((angle - 90) / 45.0)
left_speed_percentage = (1 - percentage_from_90_to_135)
right_speed_percentage = 1
elif (135 < angle <= 180):
percentage_from_135_to_180 = ((angle - 135) / 45.0)
left_speed_percentage = ((- 1) * percentage_from_135_to_180)
right_speed_percentage = 1
elif (180 < angle <= 225):
percentage_from_180_to_225 = ((angle - 180) / 45.0)
left_speed_percentage = ((- 1) + percentage_from_180_to_225)
if (angle < 202.5):
percentage_from_180_to_202 = ((angle - 180) / 22.5)
right_speed_percentage = (1 - percentage_from_180_to_202)
elif (angle == 202.5):
right_speed_percentage = 0
else:
percentage_from_202_to_225 = ((angle - 202.5) / 22.5)
right_speed_percentage = ((- 1) * percentage_from_202_to_225)
elif (225 < angle <= 270):
percentage_from_225_to_270 = ((angle - 225) / 45.0)
left_speed_percentage = ((- 1) * percentage_from_225_to_270)
right_speed_percentage = (- 1)
elif (270 < angle <= 315):
left_speed_percentage = (- 1)
percentage_from_270_to_315 = ((angle - 270) / 45.0)
right_speed_percentage = ((- 1) + percentage_from_270_to_315)
elif (315 < angle <= 360):
if (angle < 337.5):
percentage_from_315_to_337 = ((angle - 315) / 22.5)
left_speed_percentage = ((1 - percentage_from_315_to_337) * (- 1))
elif (angle == 337.5):
left_speed_percentage = 0
elif (angle > 337.5):
percentage_from_337_to_360 = ((angle - 337.5) / 22.5)
left_speed_percentage = percentage_from_337_to_360
percentage_from_315_to_360 = ((angle - 315) / 45.0)
right_speed_percentage = ((- 1) * percentage_from_315_to_360)
else:
raise Exception('You created a circle with more than 360 degrees ({})...that is quite the trick'.format(angle))
return ((left_speed_percentage * 100), (right_speed_percentage * 100)) |
def message_scalar_test(out, version, cls):
(members, member_types) = scalar_member_types_get(cls, version)
length = of_g.base_length[(cls, version)]
v_name = loxi_utils.version_to_name(version)
out.write(('\nstatic int\ntest_%(cls)s_%(v_name)s_scalar(void)\n{\n %(cls)s_t *obj;\n\n obj = %(cls)s_new(%(v_name)s);\n TEST_ASSERT(obj != NULL);\n TEST_ASSERT(obj->version == %(v_name)s);\n TEST_ASSERT(obj->length == %(length)d);\n TEST_ASSERT(obj->parent == NULL);\n TEST_ASSERT(obj->object_id == %(u_cls)s);\n' % dict(cls=cls, u_cls=cls.upper(), v_name=v_name, length=length, version=version)))
ofclass = loxi_globals.unified.class_by_name(cls)
if (ofclass and (not ofclass.virtual)):
root = ofclass
while root.superclass:
root = root.superclass
if root.virtual:
out.write(('\n {\n of_object_id_t object_id;\n %(root_cls)s_wire_object_id_get(obj, &object_id);\n TEST_ASSERT(object_id == %(u_cls)s);\n }\n' % dict(root_cls=root.name, u_cls=cls.upper())))
if (not type_maps.class_is_virtual(cls)):
out.write(('\n if (loci_class_metadata[obj->object_id].wire_length_get != NULL) {\n int length;\n\n loci_class_metadata[obj->object_id].wire_length_get((of_object_t *)obj, &length);\n TEST_ASSERT(length == %(length)d);\n }\n\n /* Set up incrementing values for scalar members */\n %(cls)s_%(v_name)s_populate_scalars(obj, 1);\n\n /* Check values just set */\n TEST_ASSERT(%(cls)s_%(v_name)s_check_scalars(obj, 1) != 0);\n' % dict(cls=cls, u_cls=cls.upper(), v_name=v_name, length=length, version=version)))
out.write(('\n %(cls)s_delete(obj);\n\n /* To do: Check memory */\n return TEST_PASS;\n}\n' % dict(cls=cls))) |
class Plugin(LedgerPlugin, QtPluginBase):
icon_paired = 'icons8-usb-connected-80.png'
icon_unpaired = 'icons8-usb-disconnected-80.png'
def create_handler(self, window: HandlerWindow) -> QtHandlerBase:
return Ledger_Handler(window)
def show_settings_dialog(self, window: ElectrumWindow, keystore: Hardware_KeyStore) -> None:
assert (keystore.handler is not None)
keystore.handler.setup_dialog() |
.update(GROUP, VERSION, PLURAL)
def updated(spec, status, logger, patch, **kwargs):
description = spec.get('description')
openai.api_key = os.environ['OPENAI_API_KEY']
if (('error' in status) and ('expectedObjects' in spec) and (status['error'] != '')):
expected_objects_yaml = ask_for_help(openai, spec['expectedObjects'], description, status['error'])
else:
expected_objects_yaml = update_spec(openai, spec['expectedObjects'], description)
logger.debug(expected_objects_yaml)
try:
expected_objects = yaml.safe_load_all(expected_objects_yaml)
except Exception as e:
logger.debug(e)
patch.status['error'] = f'Error parsing yaml: {e}'
raise kopf.TemporaryError('Error parsing yaml, asking for help.', delay=60)
patch.spec['expectedObjects'] = '\n---\n'.join([yaml.dump(object) for object in expected_objects if ('comments' not in object)])
expected_objects = list(expected_objects)
k8s_client = kubernetes.client.ApiClient()
dynamic_client = kubernetes.dynamic.DynamicClient(k8s_client)
try:
for object in expected_objects:
if ('comments' in object):
patch.status['comments'] = object['comments']
continue
if spec['dryRun']:
continue
namespace = object.get('metadata', {}).get('namespace', None)
api = dynamic_client.resources.get(api_version=object['apiVersion'], kind=object['kind'])
try:
resp = api.create(namespace=namespace, body=object)
logger.debug(f'Create Response: response: {resp}')
except kubernetes.client.exceptions.ApiException as e:
logger.debug(e)
resp = api.server_side_apply(namespace=namespace, body=object, field_manager='magic')
logger.debug(f'Patch Response: response: {resp}')
logger.debug(resp)
except Exception as e:
logger.debug(e)
patch.status['error'] = str(e)
raise kopf.TemporaryError('Error creating objects, asking for help.', delay=60)
patch.status['error'] = '' |
class OozieActionSchema(OozieNamedObjectSchema):
ok = ma.fields.Nested(OozieFlowControlSchema, required=True)
error = ma.fields.Nested(OozieFlowControlSchema, required=True)
def _get_action_builder(self, data):
keyed_action_builders = {builder.key: builder for builder in self.context['oozie_plugin'].action_builders()}
keys_present = [key for key in keyed_action_builders if data.get(key)]
if (not keys_present):
raise ma.ValidationError('No known action builders present (expected one of {{{}}}, found keys: {}'.format(', '.join(keyed_action_builders.keys()), ', '.join(data.keys())))
if (len(keys_present) > 1):
raise ma.ValidationError('Only one action type may be specified (found {})'.format(keys_present))
return keyed_action_builders[keys_present[0]]
_schema(pass_original=True)
def one_action_type(self, _, original):
self._get_action_builder(original)
_load(pass_original=True)
def fetch_base_action(self, data, original):
builder_cls = self._get_action_builder(original)
return builder_cls(self.context, data, original[builder_cls.key]) |
def stop_process(pid, timeout_sec: int=60):
try:
os.kill(pid, signal.SIGTERM)
start_time = time.monotonic()
while check_pid_exist(pid):
if ((time.monotonic() - start_time) > timeout_sec):
raise RuntimeError('pid: {} does not exit after {} seconds.'.format(pid, timeout_sec))
time.sleep(1)
except ProcessLookupError:
pass
except Exception:
logger.warning('Failed to stop pid: {} with SIGTERM. Try to send SIGKILL'.format(pid))
try:
os.kill(pid, signal.SIGKILL)
except Exception as e:
raise RuntimeError('Failed to kill pid: {} with SIGKILL.'.format(pid)) from e
logger.info('pid: {} stopped'.format(pid)) |
def add_default_axes(plot, orientation='normal', vtitle='', htitle=''):
if (orientation in ('normal', 'h')):
v_mapper = plot.value_mapper
h_mapper = plot.index_mapper
else:
v_mapper = plot.index_mapper
h_mapper = plot.value_mapper
left = PlotAxis(orientation='left', title=vtitle, mapper=v_mapper, component=plot)
bottom = PlotAxis(orientation='bottom', title=htitle, mapper=h_mapper, component=plot)
plot.underlays.append(left)
plot.underlays.append(bottom)
return (left, bottom) |
.parametrize('text,exclusive_classes,response,expected', [('Golden path for exclusive', True, 'Recipe', ['Recipe']), ('Golden path for non-exclusive', False, 'Recipe,Feedback', ['Recipe', 'Feedback']), ('Non-exclusive but responded with a single label', False, 'Recipe', ['Recipe']), ('Exclusive but responded with multilabel', True, 'Recipe,Comment', []), ('Weird outputs for exclusive', True, 'reCiPe', ['Recipe']), ('Weird outputs for non-exclusive', False, 'reciPE,CoMMeNt,FeedBack', ['Recipe', 'Comment', 'Feedback']), ('Extra spaces for exclusive', True, 'Recipe ', ['Recipe']), ('Extra spaces for non-exclusive', False, 'Recipe, Comment, Feedback', ['Recipe', 'Comment', 'Feedback']), ('One weird value', False, 'Recipe,Comment,SomeOtherUnnecessaryLabel', ['Recipe', 'Comment'])])
def test_textcat_multilabel_labels_are_correct(text, exclusive_classes, response, expected):
labels = 'Recipe,Comment,Feedback'
llm_textcat = make_textcat_task_v3(labels=labels, exclusive_classes=exclusive_classes, normalizer=lowercase_normalizer())
nlp = spacy.blank('en')
doc = nlp.make_doc(text)
pred = list(llm_textcat.parse_responses([doc], [response]))[0]
pred_cats = [cat for (cat, score) in pred.cats.items() if (score == 1.0)]
assert (set(pred_cats) == set(expected)) |
.parametrize(['region_indices', 'items', 'expected_num_shorts', 'expected_regions', 'expected_items'], [([0, 1, 2], [[0, 1, 2], [3, 4, 5], [6, 7, 8]], 0, [0, 1, 2], [[0, 1, 2], [3, 4, 5], [6, 7, 8]]), ([0, 1, 2], [[0, 128, 2], [3, 4, 5], [6, 7, 8]], 1, [1, 0, 2], [[128, 0, 2], [4, 3, 5], [7, 6, 8]]), ([0, 1, 2], [[0, 1, 128], [3, 4, 5], [6, (- 129), 8]], 2, [1, 2, 0], [[1, 128, 0], [4, 5, 3], [(- 129), 8, 6]]), ([0, 1, 2], [[128, 1, (- 129)], [3, 4, 5], [6, 7, 8]], 2, [0, 2, 1], [[128, (- 129), 1], [3, 5, 4], [6, 8, 7]]), ([0, 1, 2], [[0, 1, 128], [3, (- 129), 5], [256, 7, 8]], 3, [0, 1, 2], [[0, 1, 128], [3, (- 129), 5], [256, 7, 8]]), ([0, 1, 2], [[0, 128, 2], [0, 4, 5], [0, 7, 8]], 1, [1, 2], [[128, 2], [4, 5], [7, 8]]), ([0, 1, 2], [[0, 32768, 2], [3, 4, 5], [6, 7, 8]], 32769, [1, 0, 2], [[32768, 0, 2], [4, 3, 5], [7, 6, 8]]), ([0, 1, 2], [[0, 1, 32768], [3, 4, 5], [6, (- 32769), 8]], 32770, [1, 2, 0], [[1, 32768, 0], [4, 5, 3], [(- 32769), 8, 6]]), ([0, 1, 2], [[32768, 1, (- 32769)], [3, 4, 5], [6, 7, 8]], 32770, [0, 2, 1], [[32768, (- 32769), 1], [3, 5, 4], [6, 8, 7]]), ([0, 1, 2], [[0, 1, 32768], [3, (- 32769), 5], [65536, 7, 8]], 32771, [0, 1, 2], [[0, 1, 32768], [3, (- 32769), 5], [65536, 7, 8]]), ([0, 1, 2], [[0, 32768, 2], [0, 4, 5], [0, 7, 8]], 32769, [1, 2], [[32768, 2], [4, 5], [7, 8]])], ids=['0/3_shorts_no_reorder', '1/3_shorts_reorder', '2/3_shorts_reorder', '2/3_shorts_same_row_reorder', '3/3_shorts_no_reorder', '1/3_shorts_1/3_zeroes', '1/3_longs_reorder', '2/3_longs_reorder', '2/3_longs_same_row_reorder', '3/3_longs_no_reorder', '1/3_longs_1/3_zeroes'])
def test_buildVarData_optimize(region_indices, items, expected_num_shorts, expected_regions, expected_items):
data = buildVarData(region_indices, items, optimize=True)
assert (data.ItemCount == len(items))
assert (data.NumShorts == expected_num_shorts)
assert (data.VarRegionCount == len(expected_regions))
assert (data.VarRegionIndex == expected_regions)
assert (data.Item == expected_items) |
def gen_function(func_attrs, exec_cond_template, dim_info_dict, layout, unary_op1, binary_op1, binary_op2, unary_op2):
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
input_addr_calculator = gemm_rcr.get_input_addr_calculator(func_attrs)
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
output_ndims = len(func_attrs['output_accessors'][0].original_shapes)
support_split_k = _support_split_k(func_attrs)
has_d1 = common.has_d1(func_attrs)
problem_args = PROBLEM_ARGS_TEMPLATE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type, layout=layout, support_split_k=support_split_k, has_d1=has_d1)
problem_args_cutlass_3x = PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type, layout=layout, has_d1=has_d1)
return common.gen_function(func_attrs=func_attrs, src_template=SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, f_instance_convertor=partial(gemm_bias_broadcast_instance, layout=layout, unary_op1=unary_op1, binary_op1=binary_op1, binary_op2=binary_op2, unary_op2=unary_op2, elem_type=elem_input_type), support_split_k=support_split_k, input_addr_calculator=input_addr_calculator, output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(stride_dim='N', output_accessor=func_attrs['output_accessors'][0])) |
def get_permutation_map(V, W):
perm = numpy.empty((V.dof_count,), dtype=PETSc.IntType)
perm.fill((- 1))
vdat = V.make_dat(val=perm)
offset = 0
wdats = []
for Wsub in W:
val = numpy.arange(offset, (offset + Wsub.dof_count), dtype=PETSc.IntType)
wdats.append(Wsub.make_dat(val=val))
offset += Wsub.dof_dset.layout_vec.sizes[0]
sizes = [(Wsub.finat_element.space_dimension() * Wsub.value_size) for Wsub in W]
eperm = numpy.concatenate([restricted_dofs(Wsub.finat_element, V.finat_element) for Wsub in W])
pmap = PermutedMap(V.cell_node_map(), eperm)
kernel_code = f'''
void permutation(PetscInt *restrict x,
const PetscInt *restrict xi,
const PetscInt *restrict xf){{
for(PetscInt i=0; i<{sizes[0]}; i++) x[i] = xi[i];
for(PetscInt i=0; i<{sizes[1]}; i++) x[i+{sizes[0]}] = xf[i];
return;
}}
'''
kernel = op2.Kernel(kernel_code, 'permutation', requires_zeroed_output_arguments=False)
op2.par_loop(kernel, V.mesh().cell_set, vdat(op2.WRITE, pmap), wdats[0](op2.READ, W[0].cell_node_map()), wdats[1](op2.READ, W[1].cell_node_map()))
own = V.dof_dset.layout_vec.sizes[0]
perm = perm.reshape(((- 1),))
perm = V.dof_dset.lgmap.apply(perm, result=perm)
return perm[:own] |
def extractAzuureskyBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
titlemap = [('World defying dan god chapter ', 'World defying dan god', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class RequestContextManager():
def __init__(self, request_context_holder):
self.ctx_holder = request_context_holder
self.ctx = None
self.token = None
def __enter__(self):
(self.ctx, self.token) = self.ctx_holder.init_request_context()
return self
def request_start(self):
return self.ctx.get('request_start')
def request_end(self):
return self.ctx.get('request_end')
def __exit__(self, exc_type, exc_val, exc_tb):
self.ctx_holder.restore_context(self.token)
if (self.token.old_value != contextvars.Token.MISSING):
self.ctx_holder.update_request_start(self.request_start)
self.ctx_holder.update_request_end(self.request_end)
self.token = None
return False |
class TableArith(TableOperation):
op: str
exprs: List[Sql]
def _compile(self, qb):
tables = [t.compile_wrap(qb) for t in self.exprs]
selects = [([f'SELECT * FROM '] + t.code) for t in tables]
code = join_sep(selects, f' {self.op} ')
if (qb.target == sqlite):
code += [' LIMIT -1']
return code
type = property(X.exprs[0].type) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestSimpleEnumEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def check_enum_text_update(self, view):
enum_edit = EnumModel()
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
combobox = tester.find_by_name(ui, 'value')
displayed = combobox.inspect(DisplayedText())
self.assertEqual(displayed, 'one')
combobox.locate(Index(1)).perform(MouseClick())
displayed = combobox.inspect(DisplayedText())
self.assertEqual(displayed, 'two')
def check_enum_object_update(self, view):
enum_edit = EnumModel()
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
self.assertEqual(enum_edit.value, 'one')
combobox = tester.find_by_name(ui, 'value')
for _ in range(3):
combobox.perform(KeyClick('Backspace'))
combobox.perform(KeySequence('two'))
combobox.perform(KeyClick('Enter'))
self.assertEqual(enum_edit.value, 'two')
def check_enum_index_update(self, view):
enum_edit = EnumModel()
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
self.assertEqual(enum_edit.value, 'one')
combobox = tester.find_by_name(ui, 'value')
combobox.locate(Index(1)).perform(MouseClick())
self.assertEqual(enum_edit.value, 'two')
def check_enum_text_bad_update(self, view):
enum_edit = EnumModel()
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
self.assertEqual(enum_edit.value, 'one')
combobox = tester.find_by_name(ui, 'value')
for _ in range(3):
combobox.perform(KeyClick('Backspace'))
combobox.perform(KeyClick('H'))
combobox.perform(KeyClick('Enter'))
self.assertEqual(enum_edit.value, 'one')
def test_simple_enum_editor_text(self):
self.check_enum_text_update(get_view('simple'))
def test_simple_enum_editor_index(self):
self.check_enum_index_update(get_view('simple'))
def test_simple_evaluate_editor_text(self):
self.check_enum_text_update(get_evaluate_view('simple'))
def test_simple_evaluate_editor_index(self):
self.check_enum_index_update(get_evaluate_view('simple'))
def test_simple_evaluate_editor_bad_text(self):
self.check_enum_text_bad_update(get_evaluate_view('simple'))
def test_simple_evaluate_editor_object(self):
self.check_enum_object_update(get_evaluate_view('simple'))
def test_simple_evaluate_editor_object_no_auto_set(self):
view = get_evaluate_view('simple', auto_set=False)
enum_edit = EnumModel()
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
self.assertEqual(enum_edit.value, 'one')
combobox = tester.find_by_name(ui, 'value')
for _ in range(3):
combobox.perform(KeyClick('Backspace'))
combobox.perform(KeySequence('two'))
self.assertEqual(enum_edit.value, 'one')
combobox.perform(KeyClick('Enter'))
self.assertEqual(enum_edit.value, 'two')
def test_simple_editor_resizable(self):
enum_edit = EnumModel()
resizable_view = View(UItem('value', style='simple', resizable=True))
tester = UITester()
with tester.create_ui(enum_edit, dict(view=resizable_view)):
pass
def test_simple_editor_rebuild_editor_evaluate(self):
enum_editor_factory = EnumEditor(evaluate=True, values=['one', 'two', 'three', 'four'])
view = View(UItem('value', editor=enum_editor_factory, style='simple'))
tester = UITester()
with tester.create_ui(EnumModel(), dict(view=view)):
enum_editor_factory.values = ['one', 'two', 'three']
def test_simple_editor_disabled(self):
enum_edit = EnumModel(value='two')
view = View(UItem('value', style='simple', enabled_when="value == 'one'", editor=EnumEditor(evaluate=True, values=['one', 'two'])))
tester = UITester()
with tester.create_ui(enum_edit, dict(view=view)) as ui:
combobox = tester.find_by_name(ui, 'value')
with self.assertRaises(Disabled):
combobox.perform(KeyClick('Enter'))
with self.assertRaises(Disabled):
combobox.perform(KeySequence('two'))
self.assertFalse(combobox.inspect(IsEnabled())) |
class Heightmap():
def __init__(self, params, debug=False):
self.params = params
self.size = params.get('size')
self.grid = np.zeros((self.size, self.size))
self.grid[0][0] = random.randint(0, 255)
self.grid[(self.size - 1)][0] = random.randint(0, 255)
self.grid[0][(self.size - 1)] = random.randint(0, 255)
self.grid[(self.size - 1)][(self.size - 1)] = random.randint(0, 255)
self._subdivide(0, 0, (self.size - 1), (self.size - 1))
avg = []
m = []
for g in self.grid:
m.append(max(g))
avg.append((sum(g) / float(len(g))))
self.highest_height = max(m)
self.lowest_height = min(m)
self.average_height = (sum(avg) / float(len(avg)))
sea_percent = params.get('sea_percent')
self.sealevel = round((self.average_height * ((sea_percent * 2) / 100)))
if (sea_percent == 100):
self.sealevel = 255
if debug:
print('Sea level at {} or {}%'.format(self.sealevel, sea_percent))
def height_at(self, x, y):
return self.grid[x][y]
def _adjust(self, xa, ya, x, y, xb, yb):
if (self.grid[x][y] == 0):
d = (math.fabs((xa - xb)) + math.fabs((ya - yb)))
ROUGHNESS = self.params.get('roughness')
v = (((self.grid[xa][ya] + self.grid[xb][yb]) / 2.0) + (((random.random() - 0.5) * d) * ROUGHNESS))
c = int((math.fabs(v) % 257))
if (y == 0):
self.grid[x][(self.size - 1)] = c
if ((x == 0) or (x == (self.size - 1))):
if (y < (self.size - 1)):
self.grid[x][((self.size - 1) - y)] = c
(range_low, range_high) = self.params.get('height_range')
if (c < range_low):
c = range_low
elif (c > range_high):
c = range_high
self.grid[x][y] = c
def _subdivide(self, x1, y1, x2, y2):
if (not (((x2 - x1) < 2.0) and ((y2 - y1) < 2.0))):
x = int(((x1 + x2) / 2))
y = int(((y1 + y2) / 2))
v = int(((((self.grid[x1][y1] + self.grid[x2][y1]) + self.grid[x2][y2]) + self.grid[x1][y2]) / 4))
(range_low, range_high) = self.params.get('height_range')
if (v < range_low):
v = range_low
elif (v > range_high):
v = range_high
self.grid[x][y] = v
self._adjust(x1, y1, x, y1, x2, y1)
self._adjust(x2, y1, x2, y, x2, y2)
self._adjust(x1, y2, x, y2, x2, y2)
self._adjust(x1, y1, x1, y, x1, y2)
self._subdivide(x1, y1, x, y)
self._subdivide(x, y1, x2, y)
self._subdivide(x, y, x2, y2)
self._subdivide(x1, y, x, y2) |
class ManagedPartnerBusiness(AbstractObject):
def __init__(self, api=None):
super(ManagedPartnerBusiness, self).__init__()
self._isManagedPartnerBusiness = True
self._api = api
class Field(AbstractObject.Field):
ad_account = 'ad_account'
catalog_segment = 'catalog_segment'
extended_credit = 'extended_credit'
page = 'page'
seller_business_info = 'seller_business_info'
seller_business_status = 'seller_business_status'
template = 'template'
_field_types = {'ad_account': 'AdAccount', 'catalog_segment': 'ProductCatalog', 'extended_credit': 'ManagedPartnerExtendedCredit', 'page': 'Page', 'seller_business_info': 'Object', 'seller_business_status': 'string', 'template': 'list<Object>'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class AlignmentDataset(Dataset):
def __init__(self, pairs, tokenizer):
self.tokenizer = tokenizer
self.pairs = pairs
def __iter__(self):
worker_info = torch.utils.data.get_worker_info()
start = 0
end = len(self.pairs)
if (worker_info is None):
for i in range(end):
(yield self.__getitem__(i))
else:
worker_id = worker_info.id
w = float(worker_info.num_workers)
t = (end - start)
w = float(worker_info.num_workers)
per_worker = int(math.ceil((t / w)))
worker_id = worker_info.id
iter_start = (start + (worker_id * per_worker))
iter_end = min((iter_start + per_worker), end)
for i in range(iter_start, iter_end):
(yield self.__getitem__(i)) |
class OptionPlotoptionsTreegraphSonification(Options):
def contextTracks(self) -> 'OptionPlotoptionsTreegraphSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionPlotoptionsTreegraphSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionPlotoptionsTreegraphSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionPlotoptionsTreegraphSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsTreegraphSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsTreegraphSonificationPointgrouping)
def tracks(self) -> 'OptionPlotoptionsTreegraphSonificationTracks':
return self._config_sub_data('tracks', OptionPlotoptionsTreegraphSonificationTracks) |
def upgrade():
bind = op.get_bind()
existing_history_names: ResultProxy = bind.execute(text('select name from privacynoticehistory;'))
validate_fides_key_suitability(existing_history_names, 'privacynoticehistory')
existing_notice_names: ResultProxy = bind.execute(text('select name from privacynotice;'))
validate_fides_key_suitability(existing_notice_names, 'privacynotice')
op.add_column('privacynotice', sa.Column('notice_key', sa.String(), nullable=True))
op.add_column('privacynoticehistory', sa.Column('notice_key', sa.String(), nullable=True))
op.execute("update privacynoticehistory set notice_key = LOWER(REPLACE(TRIM(name), ' ', '_'));")
op.execute("update privacynotice set notice_key = LOWER(REPLACE(TRIM(name), ' ', '_'));")
op.alter_column('privacynotice', 'notice_key', nullable=False)
op.alter_column('privacynoticehistory', 'notice_key', nullable=False) |
def to_wei(number: Union[(int, float, str, decimal.Decimal)], unit: str) -> int:
if (unit.lower() not in units):
raise ValueError(f"Unknown unit. Must be one of {'/'.join(units.keys())}")
if (is_integer(number) or is_string(number)):
d_number = decimal.Decimal(value=number)
elif isinstance(number, float):
d_number = decimal.Decimal(value=str(number))
elif isinstance(number, decimal.Decimal):
d_number = number
else:
raise TypeError('Unsupported type. Must be one of integer, float, or string')
s_number = str(number)
unit_value = units[unit.lower()]
if (d_number == decimal.Decimal(0)):
return 0
if ((d_number < 1) and ('.' in s_number)):
with localcontext() as ctx:
multiplier = ((len(s_number) - s_number.index('.')) - 1)
ctx.prec = multiplier
d_number = (decimal.Decimal(value=number, context=ctx) * (10 ** multiplier))
unit_value /= (10 ** multiplier)
with localcontext() as ctx:
ctx.prec = 999
result_value = (decimal.Decimal(value=d_number, context=ctx) * unit_value)
if ((result_value < MIN_WEI) or (result_value > MAX_WEI)):
raise ValueError('Resulting wei value must be between 1 and 2**256 - 1')
return int(result_value) |
.parametrize(('ignore_params', 'expected_result', 'flag_name', 'issue_code'), [({'ignore_obsolete': (), 'ignore_unused': (), 'ignore_missing': ('hello', 'goodbye'), 'ignore_transitive': (), 'ignore_misplaced_dev': ()}, {'DEP001': ('goodbye', 'hello', 'package')}, 'ignore-missing', 'DEP001'), ({'ignore_obsolete': ('hello', 'goodbye'), 'ignore_unused': (), 'ignore_missing': (), 'ignore_transitive': (), 'ignore_misplaced_dev': ()}, {'DEP002': ('goodbye', 'hello', 'package')}, 'ignore-obsolete', 'DEP002'), ({'ignore_obsolete': (), 'ignore_unused': ('hello', 'goodbye'), 'ignore_missing': (), 'ignore_transitive': (), 'ignore_misplaced_dev': ()}, {'DEP002': ('goodbye', 'hello', 'package')}, 'ignore-unused', 'DEP002'), ({'ignore_obsolete': (), 'ignore_unused': (), 'ignore_missing': (), 'ignore_transitive': ('hello', 'goodbye'), 'ignore_misplaced_dev': ()}, {'DEP003': ('goodbye', 'hello', 'package')}, 'ignore-transitive', 'DEP003'), ({'ignore_obsolete': (), 'ignore_unused': (), 'ignore_missing': (), 'ignore_transitive': (), 'ignore_misplaced_dev': ('hello', 'goodbye')}, {'DEP004': ('goodbye', 'hello', 'package')}, 'ignore-misplaced-dev', 'DEP004')])
def test_ignore_param_append(caplog: pytest.LogCaptureFixture, ignore_params: MutableMapping[(str, tuple[(str, ...)])], expected_result: MutableMapping[(str, tuple[(str, ...)])], flag_name: str, issue_code: str) -> None:
result = get_value_for_per_rule_ignores_argument(per_rule_ignores={issue_code: ('package',)}, **ignore_params)
assert ({k: sorted(v) for (k, v) in result.items()} == {k: sorted(v) for (k, v) in expected_result.items()})
assert (generate_deprecation_warning(flag_name=flag_name, issue_code=issue_code, sequence=('hello', 'goodbye')) in caplog.text) |
def _cmd_genemetrics(args):
cnarr = read_cna(args.filename)
segarr = (read_cna(args.segment) if args.segment else None)
is_sample_female = verify_sample_sex(cnarr, args.sample_sex, args.male_reference, args.diploid_parx_genome)
table = do_genemetrics(cnarr, segarr, args.threshold, args.min_probes, args.drop_low_coverage, args.male_reference, is_sample_female, args.diploid_parx_genome)
logging.info('Found %d gene-level gains and losses', len(table))
write_dataframe(args.output, table) |
class Config():
def __init__(self, args):
self.iface = args.iface
if (not self.iface):
self.iface = conf.iface
self.verbose = args.verbose
self.collect_hosts = args.collect_hosts
self.filename = args.filename
self.verbose_extra = args.verbose_extra
self.show_missed = args.show_missed
self.raw = args.raw
self.show_port = args.show_port
self.flt = args.flt
self.both = args.both
self.incoming = args.incoming
self.no_analysis = args.no_analysis |
def populate_filesystem(fst_bytes):
root_dir = GCFSTEntry(bool(fst_bytes[0]), (struct.unpack_from('>I', fst_bytes, 0)[0] & ), struct.unpack_from('>I', fst_bytes, 4)[0], struct.unpack_from('>I', fst_bytes, 8)[0])
string_table_bytes = fst_bytes[(root_dir.length * 12):len(fst_bytes)]
nodes_read = 1
while (nodes_read < root_dir.length):
current_offset = (nodes_read * 12)
new_entry = GCFSTEntry(bool(fst_bytes[(current_offset + 0)]), (struct.unpack_from('>I', fst_bytes, current_offset)[0] & ), struct.unpack_from('>I', fst_bytes, (current_offset + 4))[0], struct.unpack_from('>I', fst_bytes, (current_offset + 8))[0])
root_dir.children.append(new_entry)
nodes_read += new_entry.populate_children_recursive(root_dir, current_offset, fst_bytes, string_table_bytes)
return root_dir |
class OptionSeriesHeatmapSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _handle_rgbgradient_dict(colors):
duration = _default_duration
gradient = []
if ('duration' in colors):
duration = colors['duration']
if ('colors' in colors):
for stop in colors['colors']:
color = stop['color']
if isinstance(color, str):
color = parse_color_string(color)
if ((not isinstance(color, (tuple, list))) or (len(color) != 3)):
raise ValueError(('Not a valid color %s' % str(color)))
for channel in color:
if ((not isinstance(channel, int)) or (channel < 0) or (channel > 255)):
raise ValueError(('Not a valid color %s' % str(color)))
gradient.append({'pos': (stop['pos'] if ('pos' in stop) else 0), 'color': color})
if ((len(gradient) < 14) and (gradient[(- 1)]['pos'] != 100)):
gradient.append({'pos': 100, 'color': gradient[0]['color']})
return (duration, gradient) |
def extractYaminatranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def parentheses_placeholder(text: str):
length = len(text)
key = 0
placeholder = 0
depth = 0
output = parentheses_placeholder_data()
output.list.append('')
while (length > key):
char = text[key]
key += 1
if (char == '('):
depth += 1
if (depth == 1):
output.text += f'{{{placeholder}}}'
output.list.append('')
placeholder += 1
elif (char == ')'):
depth -= 1
elif (depth == 0):
output.text += char
else:
output.list[(- 1)] += char
return output |
.EventDecorator('MakeKronCode')
def make_kron_code(Vc, Vf, t_in, t_out, mat_name, scratch):
operator_decl = []
prolong_code = []
restrict_code = []
(_, celems, cshifts) = get_permutation_to_line_elements(Vc)
(_, felems, fshifts) = get_permutation_to_line_elements(Vf)
shifts = fshifts
in_place = False
if (len(felems) == len(celems)):
in_place = all((((len(fs) * Vf.value_size) == (len(cs) * Vc.value_size)) for (fs, cs) in zip(fshifts, cshifts)))
psize = Vf.value_size
if (not in_place):
if (len(celems) == 1):
psize = Vc.value_size
pelem = celems[0]
perm_name = ('perm_%s' % t_in)
celems = (celems * len(felems))
elif (len(felems) == 1):
shifts = cshifts
psize = Vf.value_size
pelem = felems[0]
perm_name = ('perm_%s' % t_out)
felems = (felems * len(celems))
else:
raise ValueError('Cannot assign fine to coarse DOFs')
if (set(cshifts) == set(fshifts)):
csize = (Vc.value_size * Vc.finat_element.space_dimension())
prolong_code.append(f'''
for({IntType_c} j=1; j<{len(fshifts)}; j++)
for({IntType_c} i=0; i<{csize}; i++)
{t_in}[j*{csize} + i] = {t_in}[i];
''')
restrict_code.append(f'''
for({IntType_c} j=1; j<{len(fshifts)}; j++)
for({IntType_c} i=0; i<{csize}; i++)
{t_in}[i] += {t_in}[j*{csize} + i];
''')
elif (pelem == celems[0]):
for k in range(len(shifts)):
if ((Vc.value_size * len(shifts[k])) < Vf.value_size):
shifts[k] = (shifts[k] * (Vf.value_size // Vc.value_size))
pshape = [e.space_dimension() for e in pelem]
pargs = ', '.join(map(str, (pshape + ([1] * (3 - len(pshape))))))
pstride = (psize * numpy.prod(pshape))
perm = sum(shifts, tuple())
perm_data = ', '.join(map(str, perm))
operator_decl.append(f'''
PetscBLASInt {perm_name}[{len(perm)}] = {{ {perm_data} }};
''')
prolong_code.append(f'''
for({IntType_c} j=1; j<{len(perm)}; j++)
permute_axis({perm_name}[j], {pargs}, {psize}, {t_in}, {t_in}+j*{pstride});
''')
restrict_code.append(f'''
for({IntType_c} j=1; j<{len(perm)}; j++)
ipermute_axis({perm_name}[j], {pargs}, {psize}, {t_in}, {t_in}+j*{pstride});
''')
fskip = 0
cskip = 0
Jlen = 0
Jmats = []
fshapes = []
cshapes = []
has_code = False
identity_filter = (lambda A: (numpy.array([]) if ((A.shape[0] == A.shape[1]) and numpy.allclose(A, numpy.eye(A.shape[0]))) else A))
for (celem, felem, shift) in zip(celems, felems, shifts):
if (len(felem) != len(celem)):
raise ValueError('Fine and coarse elements do not have the same number of factors')
if (len(felem) > 3):
raise ValueError('More than three factors are not supported')
nscal = (psize * len(shift))
fshape = [e.space_dimension() for e in felem]
cshape = [e.space_dimension() for e in celem]
fshapes.append(((nscal,) + tuple(fshape)))
cshapes.append(((nscal,) + tuple(cshape)))
J = [identity_filter(evaluate_dual(ce, fe)).T for (ce, fe) in zip(celem, felem)]
if any(((Jk.size and numpy.isclose(Jk, 0.0).all()) for Jk in J)):
prolong_code.append(f'''
for({IntType_c} i=0; i<{(nscal * numpy.prod(fshape))}; i++) {t_out}[i+{fskip}] = 0.0E0;
''')
restrict_code.append(f'''
for({IntType_c} i=0; i<{(nscal * numpy.prod(cshape))}; i++) {t_in}[i+{cskip}] = 0.0E0;
''')
else:
Jsize = numpy.cumsum(([Jlen] + [Jk.size for Jk in J]))
Jptrs = [(('%s+%d' % (mat_name, Jsize[k])) if J[k].size else 'NULL') for k in range(len(J))]
Jmats.extend(J)
Jlen = Jsize[(- 1)]
Jargs = ', '.join((Jptrs + (['NULL'] * (3 - len(Jptrs)))))
fargs = ', '.join(map(str, (fshape + ([1] * (3 - len(fshape))))))
cargs = ', '.join(map(str, (cshape + ([1] * (3 - len(cshape))))))
if in_place:
prolong_code.append(f'''
kronmxv_inplace(0, {fargs}, {cargs}, {nscal}, {Jargs}, &{t_in}, &{t_out});
''')
restrict_code.append(f'''
kronmxv_inplace(1, {cargs}, {fargs}, {nscal}, {Jargs}, &{t_out}, &{t_in});
''')
elif (shifts == fshifts):
if (has_code and (psize > 1)):
raise ValueError('Single tensor product to many tensor products not implemented for vectors')
prolong_code.append(f'''
kronmxv(0, {fargs}, {cargs}, {nscal}, {Jargs}, {t_in}+{cskip}, {t_out}+{fskip}, {scratch}, {t_out}+{fskip});
''')
restrict_code.append(f'''
kronmxv(1, {cargs}, {fargs}, {nscal}, {Jargs}, {t_out}+{fskip}, {t_in}+{cskip}, {t_out}+{fskip}, {scratch});
''')
else:
if has_code:
raise ValueError('Many tensor products to single tensor product not implemented')
fskip = 0
prolong_code.append(f'''
kronmxv(0, {fargs}, {cargs}, {nscal}, {Jargs}, {t_in}+{cskip}, {t_out}+{fskip}, {t_in}+{cskip}, {t_out}+{fskip});
''')
restrict_code.append(f'''
kronmxv(1, {cargs}, {fargs}, {nscal}, {Jargs}, {t_out}+{fskip}, {t_in}+{cskip}, {t_out}+{fskip}, {t_in}+{cskip});
''')
has_code = True
fskip += (nscal * numpy.prod(fshape))
cskip += (nscal * numpy.prod(cshape))
Jdata = ', '.join(map(float.hex, chain.from_iterable((Jk.flat for Jk in Jmats))))
operator_decl.append(f'''
PetscScalar {mat_name}[{Jlen}] = {{ {Jdata} }};
''')
operator_decl = ''.join(operator_decl)
prolong_code = ''.join(prolong_code)
restrict_code = ''.join(reversed(restrict_code))
shapes = [tuple(map(max, zip(*fshapes))), tuple(map(max, zip(*cshapes)))]
if (fskip > numpy.prod(shapes[0])):
shapes[0] = (fskip, 1, 1, 1)
if (cskip > numpy.prod(shapes[1])):
shapes[1] = (cskip, 1, 1, 1)
return (operator_decl, prolong_code, restrict_code, shapes) |
def conv2d(x, input_filters, output_filters, kernel_size, strides, relu=True, mode='REFLECT'):
shape = [kernel_size, kernel_size, input_filters, output_filters]
weight = tf.Variable(tf.truncated_normal(shape, stddev=WEIGHT_INIT_STDDEV), name='weight')
padding = (kernel_size // 2)
x_padded = tf.pad(x, [[0, 0], [padding, padding], [padding, padding], [0, 0]], mode=mode)
out = tf.nn.conv2d(x_padded, weight, strides=[1, strides, strides, 1], padding='VALID')
out = instance_norm(out, output_filters)
if relu:
out = tf.nn.relu(out)
return out |
class TestAdvancedSearch(TestCase):
fixtures = ['dmd-objs']
def test_advanced_search(self):
bnf_codes = ['0204000C0AAAAAA', '0204000C0BBAAAA', '0204000D0AAAAAA']
factory = DataFactory()
factory.create_prescribing_for_bnf_codes(bnf_codes)
search = ['nm', 'contains', 'acebutolol']
with patched_global_matrixstore_from_data_factory(factory):
results = advanced_search(AMP, search, ['unavailable'])
self.assertFalse(results['too_many_results'])
self.assertCountEqual(results['objs'], AMP.objects.filter(pk__in=[, ]))
querystring = results['analyse_url'].split('#')[1]
params = parse_qs(querystring)
self.assertEqual(params, {'numIds': ['0204000C0AA'], 'denom': ['total_list_size']}) |
def select_cats_range(save_stats: dict[(str, Any)]) -> list[int]:
ids = user_input_handler.get_range(user_input_handler.colored_input('Enter cat ids (Look up cro battle cats to find ids)(You can enter &all& to get all, a range e.g &1&-&50&, or ids separate by spaces e.g &5 4 7&):'), length=len(save_stats['cats']))
return ids |
class OptionPlotoptionsFunnel3dOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsFunnel3dOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsFunnel3dOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsFunnel3dOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsFunnel3dOnpointPosition) |
class OptionPlotoptionsColumnpyramidLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsColumnpyramidLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsColumnpyramidLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class RecognitionService(Service):
def __init__(self, api_key: str, domain: str, port: str, options: AllOptionsDict={}):
super().__init__(api_key, options)
self.available_services = []
self.recognize_face_from_images: RecognizeFaceFromImage = RecognizeFaceFromImage(domain=domain, port=port, api_key=api_key)
self.face_collection: FaceCollection = FaceCollection(domain=domain, port=port, api_key=api_key, options=options)
self.subjects: Subjects = Subjects(domain=domain, port=port, api_key=api_key, options=options)
def get_available_functions(self) -> List[str]:
return self.available_services
def recognize(self, image_path: str, options: AllOptionsDict={}) -> dict:
request = RecognizeFaceFromImage.Request(api_key=self.api_key, image_path=image_path)
return self.recognize_face_from_images.execute(request, (self.options if (options == {}) else options))
def get_face_collection(self) -> FaceCollection:
return self.face_collection
def get_subjects(self) -> Subjects:
return self.subjects |
def validate(config: Dict[(str, Any)], instance_id: str, logger: logging.Logger, expected_result_path: str, aggregated_result_path: Optional[str]=None) -> None:
pc_service = build_private_computation_service(config['private_computation'], config['mpc'], config['pid'], config.get('post_processing_handlers', {}), config.get('pid_post_processing_handlers', {}))
pc_service.validate_metrics(instance_id=instance_id, aggregated_result_path=aggregated_result_path, expected_result_path=expected_result_path) |
def check_file(filename: str) -> List[LintMessage]:
with open(filename, 'rb') as f:
original = f.read().decode('utf-8')
try:
path = Path(filename)
usort_config = UsortConfig.find(path)
black_config = make_black_config(path)
replacement = ufmt_string(path=path, content=original, usort_config=usort_config, black_config=black_config)
if (original == replacement):
return []
return [LintMessage(path=filename, line=None, char=None, code='UFMT', severity=LintSeverity.WARNING, name='format', original=original, replacement=replacement, description='Run `lintrunner -a` to apply this patch.')]
except Exception as err:
return [format_error_message(filename, err)] |
def test_get_contract_factory_raises_insufficient_assets_error(w3):
insufficient_owned_manifest = get_ethpm_spec_manifest('owned', 'v3.json')
owned_package = w3.pm.get_package_from_manifest(insufficient_owned_manifest)
with pytest.raises(InsufficientAssetsError):
owned_package.get_contract_factory('Owned') |
.parametrize('typ', [str, int, float])
def test_aliases_grib_paramid_mutiple_false(typ):
_131 = typ(131)
aliases_grib_paramid = normalize('x', type=typ, aliases={'u': typ(131), 'v': typ(132)}, multiple=False)(func_x)
assert (aliases_grib_paramid('u') == _131)
assert (aliases_grib_paramid(131) == _131)
assert (aliases_grib_paramid('131') == _131)
assert (aliases_grib_paramid(('131',)) == _131)
assert (aliases_grib_paramid(['131']) == _131)
with pytest.raises(TypeError):
aliases_grib_paramid(['131', 'v'])
with pytest.raises(TypeError):
aliases_grib_paramid([])
with pytest.raises(TypeError):
aliases_grib_paramid(tuple([])) |
def test_default():
score = scores.MaskPercent(band)
newimg = score.compute(image, geometry=pol, scale=30)
maskpercent_prop = newimg.get(score.name).getInfo()
maskpercent_pix = tools.image.getValue(newimg, p, side='client')[score.name]
assert (maskpercent_prop == 0), 5625
assert (maskpercent_pix == 0), 5625 |
def g2p(norm_text):
(sep_text, sep_kata, acc) = text2sep_kata(norm_text)
sep_tokenized = []
for i in sep_text:
if (i not in punctuation):
sep_tokenized.append(tokenizer.tokenize(i))
else:
sep_tokenized.append([i])
sep_phonemes = handle_long([kata2phoneme(i) for i in sep_kata])
for i in sep_phonemes:
for j in i:
assert (j in symbols), (sep_text, sep_kata, sep_phonemes)
tones = align_tones(sep_phonemes, acc)
word2ph = []
for (token, phoneme) in zip(sep_tokenized, sep_phonemes):
phone_len = len(phoneme)
word_len = len(token)
aaa = distribute_phone(phone_len, word_len)
word2ph += aaa
phones = ((['_'] + [j for i in sep_phonemes for j in i]) + ['_'])
tones = (([0] + tones) + [0])
word2ph = (([1] + word2ph) + [1])
assert (len(phones) == len(tones))
return (phones, tones, word2ph) |
def test_dev_dependency_getter(tmp_path: Path) -> None:
fake_pyproject_toml = '[project]\n# PEP 621 project metadata\n# See = [\n "qux",\n "bar>=20.9",\n "optional-foo[option]>=0.12.11",\n "conditional-bar>=1.1.0; python_version < 3.11",\n]\n[tool.pdm.dev-dependencies]\ntest = [\n "qux",\n "bar; python_version < 3.11"\n ]\ntox = [\n "foo-bar",\n]\n'
with run_within_dir(tmp_path):
with Path('pyproject.toml').open('w') as f:
f.write(fake_pyproject_toml)
dev_dependencies = PDMDependencyGetter(Path('pyproject.toml')).get().dev_dependencies
assert (len(dev_dependencies) == 3)
assert (dev_dependencies[0].name == 'qux')
assert (not dev_dependencies[0].is_conditional)
assert (not dev_dependencies[0].is_optional)
assert ('qux' in dev_dependencies[0].top_levels)
assert (dev_dependencies[1].name == 'bar')
assert dev_dependencies[1].is_conditional
assert (not dev_dependencies[1].is_optional)
assert ('bar' in dev_dependencies[1].top_levels)
assert (dev_dependencies[2].name == 'foo-bar')
assert (not dev_dependencies[2].is_conditional)
assert (not dev_dependencies[2].is_optional)
assert ('foo_bar' in dev_dependencies[2].top_levels) |
def extractHainemakorutranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('news' in item['tags']):
return None
tagmap = [('10000 STEPS', 'Level Up Just by Walking. In 10 Thousand Steps It Will Be Level 10000!', 'translated'), ('Level Up Just by Walking. In 10 Thousand Steps It Will Be Level 10000!', 'Level Up Just by Walking. In 10 Thousand Steps It Will Be Level 10000!', 'translated'), ('Ecstas Online', 'Ecstas Online', 'translated'), ('Is He A Hero? Yes', 'Is He A Hero? Yes', 'translated'), ('Obtaining the Strongest Cheat', 'Obtaining the Strongest Cheat', 'translated'), ('Humans are the Strongest Race', 'Humans are the Strongest Race', 'translated'), ('Humans are the Strongest Race [LN]', 'Humans are the Strongest Race [LN]', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_custom_help(tmpdir: Path) -> None:
(result, _err) = run_python_script(['examples/configure_hydra/custom_help/my_app.py', ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True', '--help'])
expected = dedent(' == AwesomeApp ==\n\n This is AwesomeApp!\n You can choose a db driver by appending\n == Configuration groups ==\n Compose your configuration from those groups (db=mysql)\n\n db: mysql, postgresql\n\n\n == Config ==\n This is the config generated for this run.\n You can override everything, for example:\n python my_app.py db.user=foo db.pass=bar\n -------\n db:\n driver: mysql\n user: omry\n pass: secret\n\n -------\n\n Powered by Hydra ( Use --hydra-help to view Hydra specific help\n')
assert_text_same(from_line=expected, to_line=result) |
def reshape(array, operation):
operation = _normalize(operation)
if ('*' in operation.split('->')[0]):
raise NotImplementedError('Unflatten operation not supported by design. Actual values for dimensions are not available to this function.')
squeeze_operation = operation.split('->')[0].split()
for (axis, op) in reversed(list(enumerate(squeeze_operation))):
if (op == '1'):
array = np.squeeze(array, axis=axis)
transposition_operation = operation.replace('1', ' ').replace('*', ' ')
try:
array = np.einsum(transposition_operation, array)
except ValueError as e:
msg = 'op: {}, shape: {}'.format(transposition_operation, np.shape(array))
if (len(e.args) == 1):
e.args = (((e.args[0] + '\n\n') + msg),)
else:
print(msg)
raise
source = transposition_operation.split('->')[(- 1)]
target = operation.split('->')[(- 1)]
return _only_reshape(array, source, target) |
class Choices(metaclass=ChoicesMeta):
def choices(cls):
for attr_name in dir(cls):
if attr_name.startswith('_'):
continue
if (attr_name in ['keys', 'choices']):
continue
value = cls.__dict__[attr_name]
if (not callable(value)):
if isinstance(value, tuple):
(yield value)
else:
(yield (value, attr_name))
def keys(cls):
return [choice[0] for choice in cls.choices()] |
class InputCheckbox(Html.Html):
name = 'Checkbox'
def __init__(self, page: primitives.PageModel, flag, label, group_name, width, height, html_code, options, attrs, profile):
if (html_code in page.inputs):
page.inputs[html_code] = (True if (page.inputs[html_code] == 'true') else False)
if page.inputs[html_code]:
flag = True
if flag:
attrs['checked'] = flag
super(InputCheckbox, self).__init__(page, {'value': flag}, html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
self.set_attrs(attrs={'type': 'checkbox'})
if (group_name is not None):
self.attr['name'] = group_name
self.set_attrs(attrs=attrs)
self.css({'cursor': 'pointer', 'display': 'inline-block', 'vertical-align': 'middle', 'margin-left': '2px'})
self.style.css.height = Defaults.LINE_HEIGHT
self._label = (label or '')
self.style.add_classes.div.no_focus_outline()
def dom(self) -> JsHtmlField.Check:
if (self._dom is None):
self._dom = JsHtmlField.Check(self, page=self.page)
return self._dom
def js(self) -> JsComponents.Radio:
if (self._js is None):
self._js = JsComponents.Radio(self, page=self.page)
return self._js
def __str__(self):
return ('<input %(strAttr)s>%(label)s' % {'strAttr': self.get_attrs(css_class_names=self.style.get_classes()), 'label': self._label}) |
class ExaSQLImportThread(ExaSQLThread):
def __init__(self, connection, compression, table, import_params):
super().__init__(connection, compression)
self.table = table
self.params = import_params
def run_sql(self):
table_ident = self.connection.format.default_format_ident(self.table)
parts = list()
if self.params.get('comment'):
comment = self.params.get('comment')
if ('*/' in comment):
raise ValueError('Invalid comment, cannot contain */')
parts.append(f'/*{comment}*/')
parts.append(f'IMPORT INTO {table_ident}{self.build_columns_list()} FROM CSV')
parts.extend(self.build_file_list())
if self.params.get('encoding'):
parts.append(f"ENCODING = {self.connection.format.quote(self.params['encoding'])}")
if self.params.get('null'):
parts.append(f"NULL = {self.connection.format.quote(self.params['null'])}")
if self.params.get('skip'):
parts.append(f"SKIP = {self.connection.format.safe_decimal(self.params['skip'])}")
if self.params.get('trim'):
trim = str(self.params['trim']).upper()
if ((trim != 'TRIM') and (trim != 'LTRIM') and (trim != 'RTRIM')):
raise ValueError(('Invalid value for import parameter TRIM: ' + trim))
parts.append(trim)
if self.params.get('row_separator'):
parts.append(f"ROW SEPARATOR = {self.connection.format.quote(self.params['row_separator'])}")
if self.params.get('column_separator'):
parts.append(f"COLUMN SEPARATOR = {self.connection.format.quote(self.params['column_separator'])}")
if self.params.get('column_delimiter'):
parts.append(f"COLUMN DELIMITER = {self.connection.format.quote(self.params['column_delimiter'])}")
self.connection.execute('\n'.join(parts)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.