code stringlengths 281 23.7M |
|---|
.django_db
def test_is_registered_any_way_with_is_attendee_true_should_return_true(mocker, user1, event1):
mock_is_attendee = mocker.patch('manager.templatetags.filters.is_attendee')
mock_is_registered = mocker.patch('manager.templatetags.filters.is_registered')
mock_is_attendee.return_value = True
mock_is_registered.return_value = False
assert filters.is_registered_any_way(user1, event1.event_slug)
assert mock_is_attendee.called
assert (not mock_is_registered.called)
mock_is_attendee.assert_called_once_with(user1, event1.event_slug) |
def make_the_menu():
machine_learning.groups.append(top)
machine_learning.groups.append(classic)
incremental_learning.groups.append(classification)
incremental_learning.groups.append(clustering)
incremental_learning.groups.append(regression)
the_menu.groups.append(data_sources)
the_menu.groups.append(data_manipulation)
the_menu.groups.append(data_transformation)
the_menu.groups.append(incremental_learning)
the_menu.groups.append(machine_learning)
the_menu.groups.append(model_evaluation)
return the_menu |
def test_count_values_in_summary(backend_db, stats_update_db):
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
fw.processed_analysis = {'foo': generate_analysis_entry(summary=['s1', 's2'])}
parent_fo.processed_analysis = {'foo': generate_analysis_entry(summary=['s3', 's4'])}
child_fo.processed_analysis = {'foo': generate_analysis_entry(summary=['s4'])}
backend_db.add_object(fw)
backend_db.add_object(parent_fo)
backend_db.add_object(child_fo)
assert (stats_update_db.count_values_in_summary('plugin that did not run', firmware=True) == [])
assert (stats_update_db.count_values_in_summary('foo', firmware=True) == [('s1', 1), ('s2', 1)])
assert (stats_update_db.count_values_in_summary('foo', firmware=True, q_filter={'vendor': fw.vendor}) == [('s1', 1), ('s2', 1)])
assert (stats_update_db.count_values_in_summary('foo', firmware=False) == [('s3', 1), ('s4', 2)])
assert (stats_update_db.count_values_in_summary('foo', firmware=False, q_filter={'vendor': fw.vendor}) == [('s3', 1), ('s4', 2)])
assert (stats_update_db.count_values_in_summary('foo', firmware=False, q_filter={'vendor': 'different'}) == []) |
class PhoneNumber(str):
def __get_validators__(cls) -> Generator:
(yield cls.validate)
def validate(cls, value: str) -> str:
if (value == ''):
return ''
max_length = 16
min_length = 9
pattern = regex('^\\+[1-9]\\d{1,14}$')
if ((len(value) > max_length) or (len(value) < min_length) or (not pattern.search(value))):
raise ValueError("Phone number must be formatted in E.164 format, i.e. '+'.")
return value |
def atomic_write(path, mode='wt', permissions=None, file_factory=None, **kwargs):
if (permissions is None):
permissions = apply_umask()
if (path == '-'):
(yield sys.stdout)
else:
base_dir = os.path.dirname(path)
kwargs['suffix'] = os.path.basename(path)
tf = tempfile.NamedTemporaryFile(dir=base_dir, mode=mode, delete=False, **kwargs)
if (file_factory is not None):
tf.close()
tf = file_factory(tf.name)
try:
with tf:
(yield tf)
os.rename(tf.name, path)
os.chmod(path, permissions)
except:
os.remove(tf.name)
raise |
def generateCLM1(iterationsMap, iteration, t):
msg = generateGenericMessage('EiffelConfidenceLevelModifiedEvent', t, '1.0.0', 'CLM1', iteration)
link(msg, iterationsMap[iteration]['ArtC1'], 'SUBJECT')
link(msg, iterationsMap[iteration]['TCF1'], 'CAUSE')
link(msg, iterationsMap[iteration]['TCF2'], 'CAUSE')
link(msg, iterationsMap[iteration]['TCF3'], 'CAUSE')
link(msg, iterationsMap[iteration]['TCF4'], 'CAUSE')
msg['data']['name'] = 'readyForRelease'
msg['data']['value'] = getOutcomeValuesFromVerdicts([iterationsMap[iteration]['TCF1'], iterationsMap[iteration]['TCF2'], iterationsMap[iteration]['TCF3'], iterationsMap[iteration]['TCF4']], 'SUCCESS', 'FAILURE')
return msg |
def test_delayed(tmp_path: Path) -> None:
delayed = utils.DelayedSubmission(_three_time, 4)
assert (not delayed.done())
assert (delayed.result() == 12)
assert delayed.done()
delayed_pkl = (tmp_path / 'test_delayed.pkl')
delayed.dump(delayed_pkl)
delayed2 = utils.DelayedSubmission.load(delayed_pkl)
assert delayed2.done() |
class OptionSeriesScatter3dSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def replace_file(path: PathIn, src: PathIn, *, autodelete: bool=False) -> None:
path = _get_path(path)
src = _get_path(src)
assert_not_dir(path)
assert_file(src)
if (path == src):
return
make_dirs_for_file(path)
(dirpath, filename) = split_filepath(path)
(_, extension) = split_filename(filename)
temp_filename = get_unique_name(dirpath, extension=extension)
temp_dest = join_path(dirpath, temp_filename)
copy_file(path=src, dest=temp_dest, overwrite=False)
if exists(path):
temp_filename = get_unique_name(dirpath, extension=extension)
temp_path = join_path(dirpath, temp_filename)
rename_file(path=path, name=temp_filename)
rename_file(path=temp_dest, name=filename)
remove_file(path=temp_path)
else:
rename_file(path=temp_dest, name=filename)
if autodelete:
remove_file(path=src) |
class ListUserSchema(PaginatedSchema, SearchableSchema):
name = colander.SchemaNode(colander.String(), location='querystring', missing=None)
groups = Groups(colander.Sequence(accept_scalar=True), location='querystring', missing=None, preparer=[util.splitter])
updates = Updates(colander.Sequence(accept_scalar=True), location='querystring', missing=None, preparer=[util.splitter])
packages = Packages(colander.Sequence(accept_scalar=True), location='querystring', missing=None, preparer=[util.splitter]) |
class TestYaffsUnpacker(TestUnpackerBase):
def test_unpacker_selection_generic(self):
self.check_unpacker_selection('filesystem/yaffs', 'YAFFS')
def test_extraction_big_endian(self):
self.check_unpacking_of_standard_unpack_set(os.path.join(TEST_DATA_DIR, 'yaffs2_be.img'), additional_prefix_folder='')
def test_extraction_little_endian(self):
self.check_unpacking_of_standard_unpack_set(os.path.join(TEST_DATA_DIR, 'yaffs2_le.img'), additional_prefix_folder='') |
def extractCalicoxTabby(item):
(chp, vol, frag) = extractChapterVolFragment(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Meow Meow Meow' in item['tags']):
return buildReleaseMessageWithType(item, 'Meow Meow Meow', vol, chp, frag=frag)
return False |
class OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class Migration(migrations.Migration):
dependencies = [('search', '0035_add_county_fips_code_to_awardsearch_transactionsearch')]
operations = [migrations.AddField(model_name='subawardsearch', name='legal_entity_county_fips', field=models.TextField(blank=True, null=True)), migrations.AddField(model_name='subawardsearch', name='legal_entity_state_fips', field=models.TextField(blank=True, null=True)), migrations.AddField(model_name='subawardsearch', name='place_of_perform_county_fips', field=models.TextField(blank=True, null=True)), migrations.AddField(model_name='subawardsearch', name='place_of_perform_state_fips', field=models.TextField(blank=True, null=True)), migrations.AddField(model_name='subawardsearch', name='pop_county_name', field=models.TextField(blank=True, null=True))] |
class PerfettoConfig():
ADAPTIVE_SAMPLING_SHMEM_THRESHOLD_DEFAULT = 32746
BUFFER_SIZE_KB_DEFAULT = (256 * 1024)
BUFFER_SIZE2_KB_DEFAULT = (2 * 1024)
SHMEM_SIZE_BYTES_DEFAULT = (16384 * 4096)
SAMPLING_INTERVAL_BYTES_DEFAULT = 4096
DUMP_INTERVAL_MS_DEFAULT = 1000
BATTERY_POLL_MS_DEFAULT = 1000
CPU_POLL_MS_DEFAULT = 1000
MAX_FILE_SIZE_BYTES_DEFAULT =
def __init__(self, types: List[str], options: Dict[(str, Any)], *, app_name: Optional[str]='program'):
self.types = types
self.options = options
self.app_name = app_name
def GeneratePerfettoConfig(self, *, advanced_support: bool=False) -> str:
android_log_config = ''
cpu_scheduling_details_ftrace_config = ''
cpu_ftrace_config = ''
cpu_sys_stats_config = ''
cpu_syscalls_ftrace_config = ''
gpu_ftrace_config = ''
gpu_mem_total_frace_config = ''
gpu_memory_config = ''
heapprofd_config = ''
linux_ftrace_config = ''
linux_process_stats_config = ''
power_config = ''
power_ftrace_config = ''
power_suspend_resume_config = ''
track_event_config = ''
app_name = self.options.get('app_name', self.app_name)
buffer_size_kb = self.options.get('buffer_size_kb', self.BUFFER_SIZE_KB_DEFAULT)
buffer_size2_kb = self.options.get('buffer_size2_kb', self.BUFFER_SIZE2_KB_DEFAULT)
max_file_size_bytes = self.options.get('max_file_size_bytes', self.MAX_FILE_SIZE_BYTES_DEFAULT)
if self.options.get('include_android_log', False):
android_log_config = ANDROID_LOG_CONFIG
if ('memory' in self.types):
shmem_size_bytes = self.options.get('shmem_size_bytes', self.SHMEM_SIZE_BYTES_DEFAULT)
adaptive_sampling_shmem_threshold = self.options.get('adaptive_sampling_shmem_threshold', self.ADAPTIVE_SAMPLING_SHMEM_THRESHOLD_DEFAULT)
adaptive_sampling_shmem_threshold_config = (f''' adaptive_sampling_shmem_threshold: {adaptive_sampling_shmem_threshold}
''' if advanced_support else '')
all_heaps_config = (' all_heaps: true\n' if self.options.get('all_heaps', False) else '')
sampling_interval_bytes = self.options.get('sampling_interval_bytes', self.SAMPLING_INTERVAL_BYTES_DEFAULT)
dump_interval_ms = self.options.get('dump_interval_ms', self.DUMP_INTERVAL_MS_DEFAULT)
dump_phase_ms = self.options.get('dump_phase_ms', dump_interval_ms)
heapprofd_config = HEAPPROFD_CONFIG.format(all_heaps_config=all_heaps_config, shmem_size_bytes=shmem_size_bytes, adaptive_sampling_shmem_threshold_config=adaptive_sampling_shmem_threshold_config, sampling_interval_bytes=sampling_interval_bytes, dump_interval_ms=dump_interval_ms, dump_phase_ms=dump_phase_ms, app_name=app_name)
if ('battery' in self.types):
battery_poll_ms = self.options.get('battery_poll_ms', self.BATTERY_POLL_MS_DEFAULT)
power_config = POWER_CONFIG.format(battery_poll_ms=battery_poll_ms)
power_ftrace_config = POWER_FTRACE_CONFIG
power_suspend_resume_config = POWER_SUSPEND_RESUME_CONFIG
if ('gpu' in self.types):
gpu_mem_total_frace_config = GPU_MEM_TOTAL_FTRACE_CONFIG
gpu_memory_config = GPU_MEMORY_CONFIG
gpu_ftrace_config = GPU_FTRACE_CONFIG.format(gpu_mem_total_frace_config=gpu_mem_total_frace_config)
if ('cpu' in self.types):
cpu_poll_ms = max(self.options.get('cpu_poll_ms', self.CPU_POLL_MS_DEFAULT), 100)
log_cpu_scheduling_details = self.options.get('log_cpu_scheduling_details', True)
if self.options.get('log_coarse_cpu_usage', False):
cpu_sys_stats_config = CPU_SYS_STATS_CONFIG.format(cpu_poll_ms=cpu_poll_ms)
if self.options.get('log_cpu_sys_calls', False):
cpu_syscalls_ftrace_config = CPU_SYSCALLS_FTRACE_CONFIG
if log_cpu_scheduling_details:
cpu_scheduling_details_ftrace_config = CPU_SCHEDULING_DETAILS_FTRACE_CONFIG
linux_process_stats_config = LINUX_PROCESS_STATS_CONFIG.format(cpu_poll_ms=cpu_poll_ms)
cpu_ftrace_config = CPU_FTRACE_CONFIG
power_suspend_resume_config = POWER_SUSPEND_RESUME_CONFIG
if {'battery', 'gpu', 'cpu'}.intersection(self.types):
linux_ftrace_config = LINUX_FTRACE_CONFIG.format(app_name=app_name, cpu_ftrace_config=cpu_ftrace_config, cpu_scheduling_details_ftrace_config=cpu_scheduling_details_ftrace_config, cpu_syscalls_ftrace_config=cpu_syscalls_ftrace_config, gpu_ftrace_config=gpu_ftrace_config, power_ftrace_config=power_ftrace_config, power_suspend_resume_config=power_suspend_resume_config)
return PERFETTO_CONFIG_TEMPLATE.format(max_file_size_bytes=max_file_size_bytes, buffer_size_kb=buffer_size_kb, buffer_size2_kb=buffer_size2_kb, android_log_config=android_log_config, cpu_sys_stats_config=cpu_sys_stats_config, gpu_memory_config=gpu_memory_config, heapprofd_config=heapprofd_config, linux_ftrace_config=linux_ftrace_config, linux_process_stats_config=linux_process_stats_config, power_config=power_config, track_event_config=track_event_config) |
class NodeClient():
ACN_ACK_TIMEOUT = 5
def __init__(self, pipe: IPCChannel, agent_record: AgentRecord) -> None:
self.pipe = pipe
self.agent_record = agent_record
self._wait_status: Optional[asyncio.Future] = None
async def connect(self) -> bool:
return (await self.pipe.connect())
async def send_envelope(self, envelope: Envelope) -> None:
self._wait_status = asyncio.Future()
buf = self.make_acn_envelope_message(envelope)
(await self._write(buf))
status = (await self.wait_for_status())
if (status.code != int(AcnMessage.StatusBody.StatusCode.SUCCESS)):
raise ValueError(f'failed to send envelope. got error confirmation: {status.code}')
async def wait_for_status(self) -> Any:
if (self._wait_status is None):
raise ValueError('value failed!')
try:
status = (await asyncio.wait_for(self._wait_status, timeout=self.ACN_ACK_TIMEOUT))
return status
except asyncio.TimeoutError:
if (not self._wait_status.done()):
self._wait_status.set_exception(Exception('Timeout'))
(await asyncio.sleep(0))
raise ValueError('acn status await timeout!')
finally:
self._wait_status = None
def make_acn_envelope_message(envelope: Envelope) -> bytes:
acn_msg = acn_pb2.AcnMessage()
performative = acn_pb2.AcnMessage.Aea_Envelope_Performative()
performative.envelope = envelope.encode()
acn_msg.aea_envelope.CopyFrom(performative)
buf = acn_msg.SerializeToString()
return buf
async def read_envelope(self) -> Optional[Envelope]:
while True:
buf = (await self._read())
if (not buf):
return None
try:
acn_msg = acn_pb2.AcnMessage()
acn_msg.ParseFromString(buf)
except Exception as e:
(await self.write_acn_status_error(f'Failed to parse acn message {e}', status_code=AcnMessage.StatusBody.StatusCode.ERROR_DECODE))
raise ValueError(f'Error parsing acn message: {e}') from e
performative = acn_msg.WhichOneof('performative')
if (performative == 'aea_envelope'):
aea_envelope = acn_msg.aea_envelope
try:
envelope = Envelope.decode(aea_envelope.envelope)
(await self.write_acn_status_ok())
return envelope
except Exception as e:
(await self.write_acn_status_error(f'Failed to decode envelope: {e}', status_code=AcnMessage.StatusBody.StatusCode.ERROR_DECODE))
raise
elif (performative == 'status'):
if (self._wait_status is not None):
self._wait_status.set_result(acn_msg.status.body)
else:
(await self.write_acn_status_error(f'Bad acn message {performative}', status_code=AcnMessage.StatusBody.StatusCode.ERROR_UNEXPECTED_PAYLOAD))
async def write_acn_status_ok(self) -> None:
acn_msg = acn_pb2.AcnMessage()
performative = acn_pb2.AcnMessage.Status_Performative()
status = AcnMessage.StatusBody(status_code=AcnMessage.StatusBody.StatusCode.SUCCESS, msgs=[])
AcnMessage.StatusBody.encode(performative.body, status)
acn_msg.status.CopyFrom(performative)
buf = acn_msg.SerializeToString()
(await self._write(buf))
async def write_acn_status_error(self, msg: str, status_code: AcnMessage.StatusBody.StatusCode=AcnMessage.StatusBody.StatusCode.ERROR_GENERIC) -> None:
acn_msg = acn_pb2.AcnMessage()
performative = acn_pb2.AcnMessage.Status_Performative()
status = AcnMessage.StatusBody(status_code=status_code, msgs=[msg])
AcnMessage.StatusBody.encode(performative.body, status)
acn_msg.status.CopyFrom(performative)
buf = acn_msg.SerializeToString()
(await self._write(buf))
async def _write(self, data: bytes) -> None:
(await self.pipe.write(data))
async def _read(self) -> Optional[bytes]:
return (await self.pipe.read()) |
def correlation(trace, pattern):
(trace, pattern) = _check_and_cast_args(trace, pattern)
n = len(pattern)
ex = moving_mean(trace, n)
ey = _np.mean(pattern)
x2 = moving_sum((trace ** 2), n)
y2 = _np.sum((pattern ** 2))
xy = _signal.correlate(trace, pattern, 'valid')
numerator = (xy - ((n * ex) * ey))
denominator = (_np.sqrt((x2 - (n * (ex ** 2)))) * _np.sqrt((y2 - (n * (ey ** 2)))))
return (numerator / denominator) |
class OptionSonificationGlobaltracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Lock(Semaphore):
def release(self, blocking=True):
if (self.counter > 0):
raise RuntimeError('release unlocked lock')
self.counter += 1
if self._waiters:
hubs.get_hub().schedule_call_global(0, self._do_acquire)
return True
def _at_fork_reinit(self):
self.counter = 1
self._waiters.clear() |
def test():
sample_path = os.path.join('examples', 'data', 'sample.wav')
alt_path = os.path.join('..', 'data', 'sample.wav')
fname = find_resource('Chaco', sample_path, alt_path=alt_path, return_path=True)
(index, data) = wav_to_numeric(fname)
print(data[:100])
return (index, data) |
def extractNovelSanctuary(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def update_binwise_positions(cnarr, segments=None, variants=None):
cnarr = cnarr.copy()
if segments:
segments = segments.copy()
seg_chroms = set(segments.chromosome.unique())
if variants:
variants = variants.copy()
var_chroms = set(variants.chromosome.unique())
for chrom in cnarr.chromosome.unique():
c_idx = (cnarr.chromosome == chrom)
c_bins = cnarr[c_idx]
if (segments and (chrom in seg_chroms)):
c_seg_idx = (segments.chromosome == chrom).values
seg_starts = np.searchsorted(c_bins.start.values, segments.start.values[c_seg_idx])
seg_ends = np.r_[(seg_starts[1:], len(c_bins))]
segments.data.loc[(c_seg_idx, 'start')] = seg_starts
segments.data.loc[(c_seg_idx, 'end')] = seg_ends
if (variants and (chrom in var_chroms)):
c_varr_idx = (variants.chromosome == chrom).values
c_varr_df = variants.data[c_varr_idx]
v_starts = np.searchsorted(c_bins.start.values, c_varr_df.start.values)
for (idx, size) in list(get_repeat_slices(v_starts)):
v_starts[idx] += (np.arange(size) / size)
variant_sizes = (c_varr_df.end - c_varr_df.start)
variants.data.loc[(c_varr_idx, 'start')] = v_starts
variants.data.loc[(c_varr_idx, 'end')] = (v_starts + variant_sizes)
c_starts = np.arange(len(c_bins))
c_ends = np.arange(1, (len(c_bins) + 1))
cnarr.data.loc[(c_idx, 'start')] = c_starts
cnarr.data.loc[(c_idx, 'end')] = c_ends
return (cnarr, segments, variants) |
class TestFilmAdvanceMechanismShutterInterlock(object):
def test_film_advance_cocks_shutter(self):
c = Camera()
c.film_advance_mechanism.advance()
assert (c.exposure_control_system.shutter.cocked == True)
with pytest.raises(FilmAdvanceMechanism.AlreadyAdvanced):
c.film_advance_mechanism.advance()
assert (c.exposure_control_system.shutter.trip() == 'Tripped')
c.film_advance_mechanism.advance()
assert (c.film_advance_mechanism.advanced == True) |
class TestKQLtoDSL(unittest.TestCase):
def validate(self, kql_source, dsl, **kwargs):
actual_dsl = kql.to_dsl(kql_source, **kwargs)
self.assertListEqual(list(actual_dsl), ['bool'])
self.assertDictEqual(actual_dsl['bool'], dsl)
def test_field_match(self):
def match(**kv):
return {'filter': [{'match': kv}]}
self.validate('user:bob', match(user='bob'))
self.validate('number:-1', match(number=(- 1)))
self.validate('number:1.1', match(number=1.1))
self.validate('boolean:true', match(boolean=True))
self.validate('boolean:false', match(boolean=False))
def test_field_exists(self):
self.validate('user:*', {'filter': [{'exists': {'field': 'user'}}]})
def test_field_inequality(self):
def rng(op, val):
return {'filter': [{'range': {'field': {op: val}}}]}
self.validate('field < value', rng('lt', 'value'))
self.validate('field > -1', rng('gt', (- 1)))
self.validate('field <= 1.1', rng('lte', 1.1))
self.validate('field >= 0', rng('gte', 0))
self.validate('field >= abc', rng('gte', 'abc'))
def test_or_query(self):
self.validate('field:value or field2:value2', {'should': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}], 'minimum_should_match': 1})
def test_and_query(self):
self.validate('field:value and field2:value2', {'filter': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}]})
def test_not_query(self):
self.validate('not field:value', {'must_not': [{'match': {'field': 'value'}}]})
self.validate('field:(not value)', {'must_not': [{'match': {'field': 'value'}}]})
self.validate('field:(a and not b)', {'filter': [{'match': {'field': 'a'}}], 'must_not': [{'match': {'field': 'b'}}]})
self.validate('not field:value and not field2:value2', {'must_not': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}]})
self.validate('not (field:value or field2:value2)', {'must_not': [{'bool': {'minimum_should_match': 1, 'should': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}]}}]}, optimize=False)
self.validate('not (field:value and field2:value2)', {'must_not': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}]})
def test_optimizations(self):
self.validate('(field:value or field2:value2) and field3:value3', {'should': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}], 'filter': [{'match': {'field3': 'value3'}}], 'minimum_should_match': 1})
self.validate('(field:value and field2:value2) or field3:value3', {'should': [{'bool': {'filter': [{'match': {'field': 'value'}}, {'match': {'field2': 'value2'}}]}}, {'match': {'field3': 'value3'}}], 'minimum_should_match': 1})
self.validate('a:(v1 or v2 or v3) or b:(v4 or v5)', {'should': [{'match': {'a': 'v1'}}, {'match': {'a': 'v2'}}, {'match': {'a': 'v3'}}, {'match': {'b': 'v4'}}, {'match': {'b': 'v5'}}], 'minimum_should_match': 1})
self.validate('a:(v1 or v2 or v3) and b:(v4 or v5)', {'should': [{'match': {'a': 'v1'}}, {'match': {'a': 'v2'}}, {'match': {'a': 'v3'}}], 'filter': [{'bool': {'should': [{'match': {'b': 'v4'}}, {'match': {'b': 'v5'}}], 'minimum_should_match': 1}}], 'minimum_should_match': 1})
self.validate('(field:value and not field2:value2) or field3:value3', {'should': [{'bool': {'filter': [{'match': {'field': 'value'}}], 'must_not': [{'match': {'field2': 'value2'}}]}}, {'match': {'field3': 'value3'}}], 'minimum_should_match': 1}) |
def test_host_with_auth_and_port_in_url():
url = '
client =
response = client.get(url)
assert (response.status_code == 200)
assert (response.json() == {'headers': {'accept': '*/*', 'accept-encoding': 'gzip, deflate, br', 'connection': 'keep-alive', 'host': 'example.org', 'user-agent': f'python- 'authorization': 'Basic dXNlcm5hbWU6cGFzc3dvcmQ='}}) |
class WeeklyLogFile(logfile.DailyLogFile):
def __init__(self, name, directory, defaultMode=None, day_rotation=7, max_size=1000000):
self.day_rotation = day_rotation
self.max_size = max_size
self.size = 0
logfile.DailyLogFile.__init__(self, name, directory, defaultMode=defaultMode)
def _openFile(self):
logfile.DailyLogFile._openFile(self)
self.size = self._file.tell()
def shouldRotate(self):
now = self.toDate()
then = self.lastDate
return ((now[0] > then[0]) or (now[1] > then[1]) or (now[2] > (then[2] + self.day_rotation)) or (self.size >= self.max_size))
def suffix(self, tupledate):
suffix = ''
copy_suffix = 0
while True:
try:
suffix = '_'.join(['{:02d}'.format(part) for part in tupledate])
except Exception:
suffix = '_'.join(['{:02d}'.format(part) for part in self.toDate(tupledate)])
suffix += (f'__{copy_suffix}' if copy_suffix else '')
if os.path.exists(f'{self.path}.{suffix}'):
copy_suffix += 1
else:
break
return suffix
def rotate(self):
try:
super().rotate()
except Exception:
log_trace(f'Could not rotate the log file {self.name}.')
def write(self, data):
logfile.BaseLogFile.write(self, data)
self.lastDate = max(self.lastDate, self.toDate())
self.size += len(data) |
_stats_reply_type(ofproto.OFPST_DESC, body_single_struct=True)
class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', ('mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))):
_TYPE = {'ascii': ['mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc']}
def parser(cls, buf, offset):
desc = struct.unpack_from(ofproto.OFP_DESC_STATS_PACK_STR, buf, offset)
desc = list(desc)
desc = [x.rstrip(b'\x00') for x in desc]
stats = cls(*desc)
stats.length = ofproto.OFP_DESC_STATS_SIZE
return stats |
_checkpoint(dump_params=True, include=['adpixels_ids'], component=LOG_COMPONENT)
def _verify_adspixels_if_exist(adspixels_ids: List[str], client: BoltGraphAPIClient[BoltPLGraphAPICreateInstanceArgs]) -> None:
if adspixels_ids:
try:
for pixel_id in adspixels_ids:
client.get_adspixels(adspixels_id=pixel_id, fields=['id'])
except GraphAPIGenericException:
raise PCStudyValidationException(cause=f'Read adspixel {adspixels_ids} failed.', remediation='Check access token has permission to read adspixel', exit_code=OneCommandRunnerExitCode.ERROR_READ_ADSPIXELS) |
def fail_processing(submission_id: int, processor_id: str, exception: BaseException) -> int:
exception_message = ''.join(format_exception(type(exception), exception, exception.__traceback__))
return DABSLoaderQueue.objects.filter(submission_id=submission_id, processor_id=processor_id, state=DABSLoaderQueue.IN_PROGRESS).update(state=DABSLoaderQueue.FAILED, processor_id=None, processing_failed=now(), exception=exception_message) |
class Text_Visitor(AST_Visitor):
def __init__(self, fd):
super().__init__()
self.indent = 0
self.fd = fd
def write(self, string):
assert isinstance(string, str)
txt = ((' ' * (self.indent * 2)) + string)
if self.fd:
self.fd.write((txt + '\n'))
else:
print(txt)
def write_head(self, string, relation):
if relation:
self.write(((relation + ': ') + string))
else:
self.write(string)
self.indent += 1
def visit(self, node, n_parent, relation):
if isinstance(node, Special_Block):
self.write_head(((node.t_kw.value.capitalize() + ' ') + node.__class__.__name__), relation)
elif isinstance(node, Entity_Constraints):
self.write_head(node.__class__.__name__, relation)
for (dim, t_cons) in enumerate(node.l_dim_constraint, 1):
if (t_cons.kind == 'COLON'):
self.write(('Dimension %u constraint: %s' % (dim, t_cons.kind)))
else:
self.write(('Dimension %u constraint: %s' % (dim, t_cons.value)))
elif isinstance(node, Function_Call):
self.write_head(((node.variant.capitalize() + ' form ') + node.__class__.__name__), relation)
elif isinstance(node, Action):
self.write_head(((node.kind().capitalize() + ' ') + node.__class__.__name__), relation)
elif isinstance(node, Identifier):
self.write_head((((node.__class__.__name__ + ' <') + node.t_ident.value) + '>'), relation)
elif isinstance(node, Number_Literal):
self.write_head((((node.__class__.__name__ + ' <') + node.t_value.value) + '>'), relation)
elif isinstance(node, Char_Array_Literal):
self.write_head((((node.__class__.__name__ + " '") + node.t_string.value) + "'"), relation)
elif isinstance(node, String_Literal):
self.write_head((((node.__class__.__name__ + ' "') + node.t_string.value) + '"'), relation)
elif isinstance(node, Unary_Operation):
self.write_head(((node.__class__.__name__ + ' ') + node.t_op.value), relation)
elif isinstance(node, Binary_Operation):
self.write_head(((node.__class__.__name__ + ' ') + node.t_op.value), relation)
if isinstance(node, Binary_Logical_Operation):
self.write(('Short-Circuit: %s' % node.short_circuit))
elif isinstance(node, Import_Statement):
self.write_head(((node.__class__.__name__ + ' for ') + '.'.join(node.get_chain_strings())), relation)
elif isinstance(node, Metric_Justification_Pragma):
self.write_head((node.__class__.__name__ + (' for %s' % node.t_metric.value)), relation)
else:
self.write_head(node.__class__.__name__, relation)
def visit_end(self, node, n_parent, relation):
self.indent -= 1 |
def extractIsekaintrWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Saint Doll', 'Seikishin -Saint Doll-', 'translated'), ('When I was summoned to different world with RPG style, [Loves NTR] skill appeared', 'When I was summoned to different world with RPG style, [Likes NTR] skill appeared', 'translated'), ('When I was summoned to different world with RPG style, [Likes NTR] skill appeared', 'When I was summoned to different world with RPG style, [Likes NTR] skill appeared', 'translated'), ('When I was summoned to another world with RPG style, [Likes NTR] skill appeared', 'When I was summoned to different world with RPG style, [Likes NTR] skill appeared', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractLotustranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Announcements']):
return None
tagmap = [('Xianggong, Please Divorce Me!', 'Xianggong, Please Divorce Me!', 'translated'), ('100% Sweet Love', '100% sweet love: The delinquent XXX wife is a bit sweet', 'translated'), ('Black Bellied President Dotes on Wife', 'Black Bellied President Dotes on Wife', 'translated'), ('icsaytd', 'I Can Still Accompany You Till Dawn', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_deserializable
class CacheConfig(BaseConfig):
def __init__(self, similarity_eval_config: Optional[CacheSimilarityEvalConfig]=CacheSimilarityEvalConfig(), init_config: Optional[CacheInitConfig]=CacheInitConfig()):
self.similarity_eval_config = similarity_eval_config
self.init_config = init_config
def from_config(config: Optional[Dict[(str, Any)]]):
if (config is None):
return CacheConfig()
else:
return CacheConfig(similarity_eval_config=CacheSimilarityEvalConfig.from_config(config.get('similarity_evaluation', {})), init_config=CacheInitConfig.from_config(config.get('init_config', {}))) |
def create_ssl_context(options: argparse.Namespace, bool=False) -> ssl.SSLContext:
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ctx.load_cert_chain(options.certfile, keyfile=options.keyfile)
if
ctx.set_alpn_protocols(['h2'])
ctx.options |= ((ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1) | ssl.OP_NO_COMPRESSION)
ctx.set_ciphers(constants.DOH_CIPHERS)
return ctx |
def read_asn1_key(binary: bytes, offset: int):
if (binary[offset] not in TLV_KNOWN_STARTS):
return None
(start, size) = _get_start_and_size_of_der_field(binary=binary, offset=offset)
try:
key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_ASN1, binary[offset:(start + size)])
return make_unicode_string(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_TEXT, key))
except OpenSSL.crypto.Error:
logging.debug('Found PKCS#8 key signature, but looks false positive')
return None
except TypeError:
logging.warning('Found PKCS#8 key signature but openssl binding could not decode it.')
return None |
class DNSLookupTool(LocalComponent, metaclass=ABCMeta):
DEFAULT_HOSTNAME = 'google.com'
DEFAULT_TIMEOUT = 5
def __init__(self, device, config):
super().__init__(device, config)
self._dig = Dig(self._device)
def _check_current_dns_server_is_known(self, servers):
server = ipaddress.ip_address(self.lookup()[0])
if (server not in servers):
raise XVEx('Current DNS server is {} but the system only knows about {}'.format(server, servers))
def known_servers(self):
pass
def lookup(self, hostname=None, timeout=None, server=None):
if (hostname is None):
hostname = DNSLookupTool.DEFAULT_HOSTNAME
if (timeout is None):
timeout = DNSLookupTool.DEFAULT_TIMEOUT
L.debug("Doing DNS lookup for '{}'".format(hostname))
(server, ips) = self._dig.lookup(hostname, timeout, server)
L.debug('DNS lookup returned ips: {} using server {}'.format(ips, server))
return (server, ips) |
class SparkWasserstein(SparkStatTestImpl):
base_stat_test = wasserstein_stat_test
def __call__(self, data: SpartStatTestData, feature_type: ColumnType, threshold: float) -> StatTestFuncReturns:
cur = data.current_data
ref = data.reference_data
column_name = data.column_name
from scipy.stats import wasserstein_distance
std = calculate_stats(ref, column_name, sf.stddev_pop)
norm = max(std, 0.001)
(w1, vals1) = get_histogram(cur, column_name, APPROX_WASSERSTEIN_BINS, False)
(w2, vals2) = get_histogram(ref, column_name, APPROX_WASSERSTEIN_BINS, False)
centers1 = ((np.diff(vals1) / 2) + vals1[:(- 1)])
centers2 = ((np.diff(vals2) / 2) + vals2[:(- 1)])
wd_norm_value = (wasserstein_distance(centers1, centers2, w1, w2) / norm)
return (wd_norm_value, (wd_norm_value >= threshold)) |
def create(reg_code: RegCodeModel, password: str) -> RegCodeModel:
reg_code.password = _hash(password)
with session() as s:
existing = s.query(RegCodeOrmModel).filter_by(password=reg_code.password).one_or_none()
if existing:
raise ArgumentError(MESSAGE_DUPLICATE_REG_CODE)
orm_model = reg_code.to_orm_model()
s.add(orm_model)
s.flush()
m = RegCodeModel.from_orm_model(orm_model)
s.commit()
return m |
def admit_patient(inpatient_record, service_unit, check_in, expected_discharge=None):
validate_nursing_tasks(inpatient_record)
inpatient_record.admitted_datetime = check_in
inpatient_record.status = 'Admitted'
inpatient_record.expected_discharge = expected_discharge
inpatient_record.set('inpatient_occupancies', [])
transfer_patient(inpatient_record, service_unit, check_in)
frappe.db.set_value('Patient', inpatient_record.patient, {'inpatient_status': 'Admitted', 'inpatient_record': inpatient_record.name}) |
class TaskHandler(object):
def __init__(self):
self.tasks = {}
self.to_save = {}
self.clock = reactor
self.stale_timeout = 60
self._now = False
def load(self):
to_save = False
value = ServerConfig.objects.conf('delayed_tasks', default={})
if isinstance(value, str):
tasks = dbunserialize(value)
else:
tasks = value
for (task_id, value) in tasks.items():
(date, callback, args, kwargs) = dbunserialize(value)
if isinstance(callback, tuple):
(obj, method) = callback
if (obj is None):
to_save = True
continue
callback = getattr(obj, method)
self.tasks[task_id] = (date, callback, args, kwargs, True, None)
if (self.stale_timeout > 0):
self.clean_stale_tasks()
if to_save:
self.save()
def clean_stale_tasks(self):
clean_ids = []
for (task_id, (date, callback, args, kwargs, persistent, _)) in self.tasks.items():
if (not self.active(task_id)):
stale_date = (date + timedelta(seconds=self.stale_timeout))
now = (self._now if self._now else datetime.now())
if (now > stale_date):
clean_ids.append(task_id)
for task_id in clean_ids:
self.remove(task_id)
return True
def save(self):
for (task_id, (date, callback, args, kwargs, persistent, _)) in self.tasks.items():
if (task_id in self.to_save):
continue
if (not persistent):
continue
safe_callback = callback
if getattr(callback, '__self__', None):
obj = callback.__self__
name = callback.__name__
safe_callback = (obj, name)
try:
dbserialize(safe_callback)
except (TypeError, AttributeError, PickleError) as err:
raise ValueError('the specified callback {callback} cannot be pickled. It must be a top-level function in a module or an instance method ({err}).'.format(callback=callback, err=err))
self.to_save[task_id] = dbserialize((date, safe_callback, args, kwargs))
ServerConfig.objects.conf('delayed_tasks', self.to_save)
def add(self, timedelay, callback, *args, **kwargs):
now = datetime.now()
delta = timedelta(seconds=timedelay)
comp_time = (now + delta)
used_ids = list(self.tasks.keys())
task_id = 1
while (task_id in used_ids):
task_id += 1
persistent = kwargs.get('persistent', False)
if ('persistent' in kwargs):
del kwargs['persistent']
if persistent:
safe_args = []
safe_kwargs = {}
for arg in args:
try:
dbserialize(arg)
except (TypeError, AttributeError, PickleError):
log_err('The positional argument {} cannot be pickled and will not be present in the arguments fed to the callback {}'.format(arg, callback))
else:
safe_args.append(arg)
for (key, value) in kwargs.items():
try:
dbserialize(value)
except (TypeError, AttributeError, PickleError):
log_err('The {} keyword argument {} cannot be pickled and will not be present in the arguments fed to the callback {}'.format(key, value, callback))
else:
safe_kwargs[key] = value
self.tasks[task_id] = (comp_time, callback, safe_args, safe_kwargs, persistent, None)
self.save()
else:
self.tasks[task_id] = (comp_time, callback, args, kwargs, persistent, None)
callback = self.do_task
args = [task_id]
kwargs = {}
d = deferLater(self.clock, timedelay, callback, *args, **kwargs)
d.addErrback(handle_error)
if (task_id in self.tasks):
task = self.tasks.get(task_id)
task = list(task)
task[4] = persistent
task[5] = d
self.tasks[task_id] = task
else:
return False
if (self.stale_timeout > 0):
self.clean_stale_tasks()
return TaskHandlerTask(task_id)
def exists(self, task_id):
if (task_id in self.tasks):
return True
else:
return False
def active(self, task_id):
if (task_id in self.tasks):
deferred = self.get_deferred(task_id)
return (not (deferred and deferred.called))
else:
return False
def cancel(self, task_id):
if (task_id in self.tasks):
d = self.get_deferred(task_id)
if d:
if d.called:
return False
else:
d.cancel()
return True
else:
return False
else:
return False
def remove(self, task_id):
d = None
if (task_id in self.tasks):
self.cancel(task_id)
del self.tasks[task_id]
if (task_id in self.to_save):
del self.to_save[task_id]
self.save()
if d:
del d
return True
def clear(self, save=True, cancel=True):
if self.tasks:
for task_id in self.tasks.keys():
if cancel:
self.cancel(task_id)
self.tasks = {}
if self.to_save:
self.to_save = {}
if save:
self.save()
return True
def call_task(self, task_id):
if (task_id in self.tasks):
(date, callback, args, kwargs, persistent, d) = self.tasks.get(task_id)
else:
return False
return callback(*args, **kwargs)
def do_task(self, task_id):
callback_return = False
if (task_id in self.tasks):
(date, callback, args, kwargs, persistent, d) = self.tasks.get(task_id)
else:
return False
if d:
if (not d.called):
d.cancel()
else:
return False
callback_return = callback(*args, **kwargs)
self.remove(task_id)
return callback_return
def get_deferred(self, task_id):
if (task_id in self.tasks):
return self.tasks[task_id][5]
else:
return None
def create_delays(self):
now = datetime.now()
for (task_id, (date, callback, args, kwargs, _, _)) in self.tasks.items():
self.tasks[task_id] = (date, callback, args, kwargs, True, None)
seconds = max(0, (date - now).total_seconds())
d = deferLater(self.clock, seconds, self.do_task, task_id)
d.addErrback(handle_error)
if self.tasks.get(task_id, False):
self.tasks[task_id] = (date, callback, args, kwargs, True, d) |
def read_simulation_from_hdf5(file_name: str) -> str:
with h5py.File(file_name, 'r') as f_handle:
num_string_parts = len([key for key in f_handle.keys() if (JSON_TAG in key)])
json_string = b''
for ind in range(num_string_parts):
json_string += f_handle[_json_string_key(ind)][()]
return json_string |
def hash_domain(domain_data: Dict[(str, Any)]) -> bytes:
eip712_domain_map = {'name': {'name': 'name', 'type': 'string'}, 'version': {'name': 'version', 'type': 'string'}, 'chainId': {'name': 'chainId', 'type': 'uint256'}, 'verifyingContract': {'name': 'verifyingContract', 'type': 'address'}, 'salt': {'name': 'salt', 'type': 'bytes32'}}
for k in domain_data.keys():
if (k not in eip712_domain_map.keys()):
raise ValueError(f'Invalid domain key: `{k}`')
domain_types = {'EIP712Domain': [eip712_domain_map[k] for k in eip712_domain_map.keys() if (k in domain_data)]}
return hash_struct('EIP712Domain', domain_types, domain_data) |
def test_strhex2int():
assert (strhex2int('0x00') == 0)
assert (strhex2int('0x0A') == 10)
assert (strhex2int('0x7F') == 127)
assert (strhex2int('0xFF') == (- 1))
assert (strhex2int('0x0F') == 15)
assert (strhex2int('0x80') == (- 128))
assert (strhex2int('0x80', signed=False) == 128)
assert (strhex2int('0xFF', signed=False) == 255)
assert (strhex2int('0x100') == 256) |
class OptionSeriesArearangeSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class BaseInstanceTest(object):
def test_tag_unique(self, instance, i_data, cls, mod):
tag = instance.tag
err = ("tag '%s' is not unique (%s.%s)" % (tag, mod, cls))
assert (tags.count(tag) == 1), err
def test_fields(self, instance, i_data, cls, mod):
assert ('tag' in i_data)
assert ('meta' in i_data)
assert re.match('^[a-z0-9\\-]+$', i_data['tag'])
for field in ['latitude', 'longitude']:
assert (field in instance.meta)
assert isinstance(instance.meta[field], float)
for field in ['city', 'country', 'name']:
assert (field in instance.meta)
assert ((- 90) <= instance.meta['latitude'] <= 90)
assert ((- 180) <= instance.meta['longitude'] <= 180)
assert ('company' in instance.meta)
company = instance.meta['company']
err = ("[company] expected list, found '%s' for %s" % (type(company).__name__, company))
assert isinstance(instance.meta['company'], list), err
def test_uses_scraper(self, instance, i_data, cls, mod):
scraper = pybikes.PyBikesScraper()
request = Mock
scraper.request = request
try:
instance.update(scraper)
except Exception:
pass
assert request.called
.update
def test_update(self, instance, i_data, cls, mod, record_property):
scraper = pybikes.PyBikesScraper(cachedict=(cache if instance.unifeed else None), headers=headers.setdefault(mod, {}))
scraper.requests_timeout = 11
instance.update(scraper)
assert (len(instance.stations) > 0)
if instance.sync:
check_for = len(instance.stations)
else:
check_for = min(len(instance.stations), 5)
for i in range(0, check_for):
station = instance.stations[i]
station.update(scraper)
assert isinstance(station.bikes, int)
assert isinstance(station.latitude, float)
assert isinstance(station.longitude, float)
if (station.free is not None):
assert isinstance(station.free, int)
record_property('geojson', json.loads(json.dumps(instance.to_geojson()))) |
class OptionPlotoptionsVariablepieSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_cli_group.command()
('--trace_id', required=False, type=str, default=None, show_default=True, help='Specify the trace ID to list')
('--span_id', required=False, type=str, default=None, show_default=True, help='Specify the Span ID to list.')
('--span_type', required=False, type=str, default=None, show_default=True, help='Specify the Span Type to list.')
('--parent_span_id', required=False, type=str, default=None, show_default=True, help='Specify the Parent Span ID to list.')
('--search', required=False, type=str, default=None, show_default=True, help='Search trace_id, span_id, parent_span_id, operation_name or content in metadata.')
('-l', '--limit', type=int, default=20, help='Limit the number of recent span displayed.')
('--start_time', type=str, help='Filter by start time. Format: "YYYY-MM-DD HH:MM:SS.mmm"')
('--end_time', type=str, help='Filter by end time. Format: "YYYY-MM-DD HH:MM:SS.mmm"')
('--desc', required=False, type=bool, default=False, is_flag=True, help='Whether to use reverse sorting. By default, sorting is based on start time.')
('--output', required=False, type=click.Choice(['text', 'html', 'csv', 'latex', 'json']), default='text', help='The output format')
('files', nargs=(- 1), type=click.Path(exists=True, readable=True))
def list(trace_id: str, span_id: str, span_type: str, parent_span_id: str, search: str, limit: int, start_time: str, end_time: str, desc: bool, output: str, files=None):
from prettytable import PrettyTable
spans = read_spans_from_files(files)
if trace_id:
spans = filter((lambda s: (s['trace_id'] == trace_id)), spans)
if span_id:
spans = filter((lambda s: (s['span_id'] == span_id)), spans)
if span_type:
spans = filter((lambda s: (s['span_type'] == span_type)), spans)
if parent_span_id:
spans = filter((lambda s: (s['parent_span_id'] == parent_span_id)), spans)
if start_time:
start_dt = _parse_datetime(start_time)
spans = filter((lambda span: (_parse_datetime(span['start_time']) >= start_dt)), spans)
if end_time:
end_dt = _parse_datetime(end_time)
spans = filter((lambda span: (_parse_datetime(span['start_time']) <= end_dt)), spans)
if search:
spans = filter(_new_search_span_func(search), spans)
spans = sorted(spans, key=(lambda span: _parse_datetime(span['start_time'])), reverse=desc)[:limit]
table = PrettyTable(['Trace ID', 'Span ID', 'Operation Name', 'Conversation UID'])
for sp in spans:
conv_uid = None
if (('metadata' in sp) and sp):
metadata = sp['metadata']
if isinstance(metadata, dict):
conv_uid = metadata.get('conv_uid')
table.add_row([sp.get('trace_id'), sp.get('span_id'), sp.get('operation_name'), conv_uid])
out_kwargs = ({'ensure_ascii': False} if (output == 'json') else {})
print(table.get_formatted_string(out_format=output, **out_kwargs)) |
def _responderChain(startResponder):
responderAddress = fb.evaluateExpression(startResponder)
while int(responderAddress, 16):
(yield fb.evaluateExpressionValue(responderAddress).GetObjectDescription())
responderAddress = fb.evaluateExpression((('(id)[(id)' + responderAddress) + ' nextResponder]')) |
class TestScriptGeneratorCli():
.parametrize('args', [['my-script.sh'], ['--use-nix', 'my-script.sh']])
def test_should_generate_script_using_given_arguments(self, cli_runner, args):
with cli_runner.isolated_filesystem() as tmp:
result = cli_runner.invoke(ScriptGenerator, args)
assert (result.exit_code == 0)
assert_paths([f'{tmp}/my-script.sh'], condition=(lambda x: os.path.isfile(x)))
assert_paths([f'{tmp}/my-script.sh'], condition=(lambda x: os.access(x, os.X_OK))) |
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('django_etebase', '0002_userinfo')]
operations = [migrations.CreateModel(name='CollectionInvitation', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('uid', models.CharField(db_index=True, max_length=44, validators=[django.core.validators.RegexValidator(message='Expected a 256bit base64url.', regex='^[a-zA-Z0-9\\-_]{43}$')])), ('signedEncryptionKey', models.BinaryField()), ('accessLevel', models.CharField(choices=[('adm', 'Admin'), ('rw', 'Read Write'), ('ro', 'Read Only')], default='ro', max_length=3)), ('fromMember', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_etebase.CollectionMember')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='incoming_invitations', to=settings.AUTH_USER_MODEL))], options={'unique_together': {('user', 'fromMember')}})] |
class MigrationMongoInterface(MongoInterface):
def _setup_database_mapping(self):
main_database = self.config['data-storage']['main-database']
self.main = self.client[main_database]
self.firmwares = self.main.firmwares
self.file_objects = self.main.file_objects
self.compare_results = self.main.compare_results
sanitize_db = self.config['data-storage'].get('sanitize-database', 'faf_sanitize')
self.sanitize_storage = self.client[sanitize_db]
self.sanitize_fs = gridfs.GridFS(self.sanitize_storage)
def get_object(self, uid, analysis_filter=None):
fo = self.get_file_object(uid, analysis_filter=analysis_filter)
if (fo is None):
fo = self.get_firmware(uid, analysis_filter=analysis_filter)
return fo
def get_firmware(self, uid: str, analysis_filter: (list[str] | None)=None) -> (Firmware | None):
firmware_entry = self.firmwares.find_one(uid)
if firmware_entry:
return self._convert_to_firmware(firmware_entry, analysis_filter=analysis_filter)
logging.debug(f'No firmware with UID {uid} found.')
return None
def _convert_to_firmware(self, entry: dict, analysis_filter: Optional[list[str]]=None) -> Firmware:
firmware = Firmware()
firmware.uid = entry['_id']
firmware.size = entry['size']
firmware.sha256 = entry['sha256']
firmware.file_name = entry['file_name']
firmware.device_name = entry['device_name']
firmware.device_class = entry['device_class']
firmware.release_date = convert_time_to_str(entry['release_date'])
firmware.vendor = entry['vendor']
firmware.version = entry['version']
firmware.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter)
firmware.files_included = set(entry['files_included'])
firmware.virtual_file_path = entry['virtual_file_path']
firmware.tags = entry.get('tags', {})
firmware.set_part_name(entry.get('device_part', 'complete'))
firmware.comments = entry.get('comments', [])
return firmware
def get_file_object(self, uid: str, analysis_filter: (list[str] | None)=None) -> (FileObject | None):
file_entry = self.file_objects.find_one(uid)
if file_entry:
return self._convert_to_file_object(file_entry, analysis_filter=analysis_filter)
logging.debug(f'No FileObject with UID {uid} found.')
return None
def _convert_to_file_object(self, entry: dict, analysis_filter: (list[str] | None)=None) -> FileObject:
file_object = FileObject()
file_object.uid = entry['_id']
file_object.size = entry['size']
file_object.sha256 = entry['sha256']
file_object.file_name = entry['file_name']
file_object.virtual_file_path = entry['virtual_file_path']
file_object.parents = entry['parents']
file_object.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter)
file_object.files_included = set(entry['files_included'])
file_object.parent_firmware_uids = set(entry['parent_firmware_uids'])
file_object.comments = entry.get('comments', [])
return file_object
def retrieve_analysis(self, sanitized_dict: dict, analysis_filter: (list[str] | None)=None) -> dict:
if (analysis_filter is None):
plugins = sanitized_dict.keys()
else:
plugins = set(sanitized_dict.keys()).intersection(analysis_filter)
for key in plugins:
try:
if sanitized_dict[key]['file_system_flag']:
logging.debug(f'Retrieving stored file {key}')
sanitized_dict[key].pop('file_system_flag')
sanitized_dict[key] = self._retrieve_binaries(sanitized_dict, key)
else:
sanitized_dict[key].pop('file_system_flag')
except (KeyError, IndexError, AttributeError, TypeError, pickle.PickleError, gridfs.errors.NoFile):
logging.exception(f'''Could not retrieve sanitized analysis:
{sanitized_dict.get(key, {})}''')
return sanitized_dict
def _retrieve_binaries(self, sanitized_dict, key):
tmp_dict = {}
for analysis_key in sanitized_dict[key]:
if self.is_not_sanitized(analysis_key, sanitized_dict[key]):
tmp_dict[analysis_key] = sanitized_dict[key][analysis_key]
else:
logging.debug(f'Retrieving {analysis_key}')
tmp = self.sanitize_fs.get_last_version(sanitized_dict[key][analysis_key])
if (tmp is not None):
try:
report = pickle.loads(tmp.read())
except ModuleNotFoundError:
logging.error(f'Could not load sanitized dict: {sanitized_dict[key][analysis_key]}')
report = {}
else:
logging.error(f'Sanitized file not found: {sanitized_dict[key][analysis_key]}')
report = {}
tmp_dict[analysis_key] = report
return tmp_dict
def is_not_sanitized(field, analysis_result):
return ((field in ['summary', 'tags']) and (not isinstance(analysis_result[field], str))) |
def run_baker_ts_opts(geoms, meta, coord_type='cart', thresh='baker', runid=0):
start = time.time()
converged = 0
failed = 0
cycles = 0
opt_kwargs = {'thresh': thresh, 'max_cycles': 100, 'dump': True, 'trust_radius': 0.3, 'trust_max': 0.3}
results = dict()
for (i, (name, geom)) in enumerate(geoms.items()):
print(f' {name}')
(charge, mult, ref_energy) = meta[name]
calc_kwargs = {'charge': charge, 'mult': mult, 'pal': 4}
geom.set_calculator(Gaussian16(route='HF/3-21G', **calc_kwargs))
geom = augment_bonds(geom)
opt = RSIRFOptimizer(geom, **opt_kwargs)
opt.run()
if opt.is_converged:
converged += 1
else:
failed += 1
cycles += (opt.cur_cycle + 1)
energies_match = np.allclose(geom.energy, ref_energy)
try:
assert np.allclose(geom.energy, ref_energy)
print(green(f' MATCH for {name}! ({geom.energy:.6f}, {ref_energy:.6f})'))
except AssertionError as err:
print(red(f" energy {geom.energy:.6f} and reference energy {ref_energy:.6f} DON'T MATCH'."))
print()
print_summary((converged & energies_match), failed, cycles, i, runid)
print()
results[name] = ((opt.cur_cycle + 1), opt.is_converged)
pprint(results)
print()
end = time.time()
duration = (end - start)
print(f' runtime: {duration:.1f} s')
print_summary(converged, failed, cycles, i, runid)
return (results, duration, cycles) |
class OptionSeriesParetoMarkerStatesHover(Options):
def animation(self) -> 'OptionSeriesParetoMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesParetoMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsAreasplinerangeLowmarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesBubbleSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class KobatoChanDaiSukiPageProcessor(BaseFontRemapProcessor):
wanted_mimetypes = ['text/html']
want_priority = 80
loggerPath = 'Main.Text.KobatoChanDaiSuki'
def wantsUrl(url):
if re.search('^ url):
print(("KobatoChanDaiSukiProcessor Wants url: '%s'" % url))
return True
return False |
class CurrentPrivacyPreference(LastSavedMixin, Base):
preference = Column(EnumColumn(UserConsentPreference), nullable=False, index=True)
privacy_preference_history_id = Column(String, ForeignKey(PrivacyPreferenceHistory.id), nullable=False, index=True)
__table_args__ = (UniqueConstraint('provided_identity_id', 'privacy_notice_id', name='identity_privacy_notice'), UniqueConstraint('fides_user_device_provided_identity_id', 'privacy_notice_id', name='fides_user_device_identity_privacy_notice'), UniqueConstraint('provided_identity_id', 'purpose_consent', name='identity_purpose_consent'), UniqueConstraint('provided_identity_id', 'purpose_legitimate_interests', name='identity_purpose_leg_interests'), UniqueConstraint('fides_user_device_provided_identity_id', 'purpose_consent', name='fides_user_device_identity_purpose_consent'), UniqueConstraint('fides_user_device_provided_identity_id', 'purpose_legitimate_interests', name='fides_user_device_identity_purpose_leg_interests'), UniqueConstraint('provided_identity_id', 'special_purpose', name='identity_special_purpose'), UniqueConstraint('fides_user_device_provided_identity_id', 'special_purpose', name='fides_user_device_identity_special_purpose'), UniqueConstraint('provided_identity_id', 'vendor_consent', name='identity_vendor_consent'), UniqueConstraint('provided_identity_id', 'vendor_legitimate_interests', name='identity_vendor_leg_interests'), UniqueConstraint('fides_user_device_provided_identity_id', 'vendor_consent', name='fides_user_device_identity_vendor_consent'), UniqueConstraint('fides_user_device_provided_identity_id', 'vendor_legitimate_interests', name='fides_user_device_identity_vendor_leg_interests'), UniqueConstraint('provided_identity_id', 'system_consent', name='identity_system_consent'), UniqueConstraint('provided_identity_id', 'system_legitimate_interests', name='identity_system_leg_interests'), UniqueConstraint('fides_user_device_provided_identity_id', 'system_consent', name='fides_user_device_identity_system_consent'), UniqueConstraint('fides_user_device_provided_identity_id', 'system_legitimate_interests', name='fides_user_device_identity_system_leg_interests'), UniqueConstraint('provided_identity_id', 'feature', name='identity_feature'), UniqueConstraint('fides_user_device_provided_identity_id', 'feature', name='fides_user_device_identity_feature'), UniqueConstraint('provided_identity_id', 'special_feature', name='identity_special_feature'), UniqueConstraint('fides_user_device_provided_identity_id', 'special_feature', name='fides_user_device_identity_special_feature'))
privacy_preference_history = relationship(PrivacyPreferenceHistory, cascade='delete, delete-orphan', single_parent=True)
def get_preference_by_type_and_fides_user_device(cls, db: Session, fides_user_provided_identity: ProvidedIdentity, preference_type: ConsentRecordType, preference_value: Union[(int, str)]) -> Optional[CurrentPrivacyPreference]:
return db.query(CurrentPrivacyPreference).filter((CurrentPrivacyPreference.fides_user_device_provided_identity_id == fides_user_provided_identity.id), (CurrentPrivacyPreference.__table__.c[preference_type.value] == preference_value)).first() |
def getTemporaryPath(cmdenv):
tmpPath = pathlib.Path('prices.tmp')
if tmpPath.exists():
if (not cmdenv.force):
raise TemporaryFileExistsError("Temporary file already exists: {}\n(Check you aren't already editing in another window".format(tmpPath))
tmpPath.unlink()
return tmpPath |
class SwapQuotes(bh_plugin.BracketPluginCommand):
def escaped(self, idx):
view = self.view
escaped = False
while ((idx >= 0) and (view.substr(idx) == '\\')):
escaped = (~ escaped)
idx -= 1
return escaped
def run(self, edit, name):
view = self.view
quote = view.substr(self.left.begin)
if ((quote != "'") and (quote != '"')):
return
new = ("'" if (quote == '"') else '"')
old = quote
begin = self.left.end
end = self.right.begin
content_end = self.right.begin
view.replace(edit, self.left.toregion(), view.substr(self.left.toregion()).replace(old, new))
view.replace(edit, self.right.toregion(), view.substr(self.right.toregion()).replace(old, new))
offset = 0
while (begin < (end + offset)):
char = view.substr(begin)
if ((char == old) and self.escaped((begin - 1))):
view.replace(edit, sublime.Region((begin - 1), begin), '')
offset -= 1
content_end -= 1
elif ((char == new) and (not self.escaped((begin - 1)))):
view.insert(edit, begin, '\\')
offset += 1
content_end += 1
begin += 1
self.right = self.right.move(content_end, (end + offset))
self.selection = [sublime.Region(content_end)] |
class OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Solution():
def wordsAbbreviation(self, dict: List[str]) -> List[str]:
def shorten(word, idx):
return (word if (idx > (len(word) - 3)) else ((word[:idx] + str(((len(word) - 1) - idx))) + word[(- 1)]))
res = [shorten(word, 1) for word in dict]
pre = {word: 1 for word in dict}
n = len(dict)
for i in range(n):
while True:
duplicate = [j for j in range(i, n) if (res[i] == res[j])]
if (len(duplicate) == 1):
break
for k in duplicate:
pre[dict[k]] += 1
res[k] = shorten(dict[k], pre[dict[k]])
return res |
def get_bridge_by_datapath_id(manager, system_id, datapath_id, fn=None):
def _match_fn(row):
row_dpid = dpidlib.str_to_dpid(str(row.datapath_id[0]))
return (row_dpid == datapath_id)
bridge = match_row(manager, system_id, 'Bridge', _match_fn)
if (fn is not None):
return fn(bridge)
return bridge |
class RaisingContainer(collections.abc.Sequence):
def __len__(self):
return 15
def __getitem__(self, index):
if (not (0 <= index < 15)):
raise IndexError('Index out of range')
return 1729
def __contains__(self, value):
if (value != 1729):
raise TypeError('My contents are my own private business!')
return True |
def add_filter(fledge_url, filter_plugin, filter_name, filter_config, plugin_to_filter):
data = {'name': '{}'.format(filter_name), 'plugin': '{}'.format(filter_plugin), 'filter_config': filter_config}
conn =
conn.request('POST', '/fledge/filter', json.dumps(data))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert (filter_name == jdoc['filter'])
uri = '{}/pipeline?allow_duplicates=true&append_filter=true'.format(quote(plugin_to_filter))
filters_in_pipeline = [filter_name]
conn.request('PUT', ('/fledge/filter/' + uri), json.dumps({'pipeline': filters_in_pipeline}))
r = conn.getresponse()
assert (200 == r.status)
res = r.read().decode()
jdoc = json.loads(res)
assert (filter_name in jdoc['result'])
return jdoc |
.parametrize('transaction_args,method_args,method_kwargs,expected,skip_testrpc', (({}, (5,), {}, {'data': '0x6abbb3b', 'value': 0, 'maxFeePerGas': , 'maxPriorityFeePerGas': , 'chainId': }, False), ({'gas': 800000}, (5,), {}, {'data': '0x6abbb3b', 'value': 0, 'maxFeePerGas': , 'maxPriorityFeePerGas': , 'chainId': }, False), ({'gasPrice': (22 ** 8)}, (5,), {}, {'data': '0x6abbb3b', 'value': 0, 'gasPrice': (22 ** 8), 'chainId': }, False), ({'maxFeePerGas': (22 ** 8), 'maxPriorityFeePerGas': (22 ** 8)}, (5,), {}, {'data': '0x6abbb3b', 'value': 0, 'maxFeePerGas': (22 ** 8), 'maxPriorityFeePerGas': (22 ** 8), 'chainId': }, False), ({'nonce': 7}, (5,), {}, {'data': '0x6abbb3b', 'value': 0, 'maxFeePerGas': , 'maxPriorityFeePerGas': , 'nonce': 7, 'chainId': }, True), ({'value': 20000}, (5,), {}, {'data': '0x6abbb3b', 'value': 20000, 'maxFeePerGas': , 'maxPriorityFeePerGas': , 'chainId': }, False)), ids=['Standard', 'Explicit Gas', 'Explicit Gas Price', 'Explicit Dynamic Fees', 'Explicit Nonce', 'With Value'])
def test_build_transaction_with_contract_arguments(w3, skip_if_testrpc, math_contract, transaction_args, method_args, method_kwargs, expected, skip_testrpc, build_transaction):
if skip_testrpc:
skip_if_testrpc(w3)
txn = build_transaction(contract=math_contract, contract_function='incrementCounter', func_args=method_args, func_kwargs=method_kwargs, tx_params=transaction_args)
expected['to'] = math_contract.address
assert (txn is not None)
if ('gas' in transaction_args):
assert (txn['gas'] == transaction_args['gas'])
else:
assert ('gas' in txn)
assert (dissoc(txn, 'gas') == expected) |
def test_full_initialized_data_dir_with_custom_nodekey():
trinity_config = TrinityConfig(network_id=1, nodekey=NODEKEY)
os.makedirs(trinity_config.data_dir, exist_ok=True)
os.makedirs(trinity_config.logfile_path, exist_ok=True)
os.makedirs(trinity_config.ipc_dir, exist_ok=True)
os.makedirs(trinity_config.pid_dir, exist_ok=True)
os.makedirs(trinity_config.enr_db_dir, exist_ok=True)
trinity_config.logfile_path.touch()
assert (trinity_config.nodekey_path is None)
assert (trinity_config.nodekey is not None)
assert is_data_dir_initialized(trinity_config) |
def test_create_categorical_plot():
values = [int(i) for i in np.random.randint(low=0, high=10, size=100)]
fig = create_categorical_plot(values)
plt.close(fig)
values = [(v, 10) for v in np.random.randint(low=0, high=10, size=100)]
fig = create_categorical_plot(values)
plt.close(fig)
with raises(NotImplementedError):
values = np.random.random(100)
create_categorical_plot(values) |
def test_predict_num_roles():
with tempfile.TemporaryDirectory() as tmpdir:
testdata = os.path.join(tmpdir, 'test_data')
shutil.copytree('./tests/test_data', testdata)
for file in ['combined_three_roles.csv', 'combined_two_roles.csv']:
input_file = os.path.join(testdata, file)
operation = 'train'
sys.argv = hf_args(tmpdir, operation, input_file)
instance = HostFootprint()
instance.main()
operation = 'predict'
sys.argv = hf_args(tmpdir, operation, input_file)
instance = HostFootprint()
instance.main()
predictions = json.loads(instance.predict())
assert isinstance(predictions, dict)
if (file == 'combined_three_roles.csv'):
assert (len(predictions) == 6)
else:
assert (len(predictions) == 4) |
class DahoasRMStaticDataset(Dataset):
def __init__(self, block_size, split='train', max_examples=None, tokenizer_name='tiktoken/gpt2') -> None:
super().__init__()
dataset = load_dataset('Dahoas/rm-static', split=split)
self.pairs = []
self.masks = []
if (tokenizer_name == 'huggingface/gpt2'):
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
tokenizer.pad_token = tokenizer.eos_token
elif (tokenizer_name == 'huggingface/gpt2fast'):
tokenizer = GPT2TokenizerFast.from_pretrained('gpt2')
elif (tokenizer_name == 'tiktoken/gpt2'):
tokenizer = TiktokenTokenizer('gpt2')
cnt = 0
print(f'Loading DahoasRMStaticDataset {split} split')
for data in dataset:
cnt += 1
prompt = data['prompt']
positive_text = ((prompt + data['chosen']) + '<|endoftext|>')
positive = tokenizer(positive_text, max_length=block_size, padding='max_length', truncation=True, return_tensors='pt')
negative_text = ((prompt + data['rejected']) + '<|endoftext|>')
negative = tokenizer(negative_text, max_length=block_size, padding='max_length', truncation=True, return_tensors='pt')
self.pairs.append(torch.stack((positive['input_ids'], negative['input_ids']), dim=0))
self.masks.append(torch.stack((positive['attention_mask'], negative['attention_mask']), dim=0))
if (max_examples and (cnt >= max_examples)):
break
def save(cls, split, fp):
dataset = load_dataset('Dahoas/rm-static', split=split)
examples = []
for data in tqdm(dataset):
examples.append((data['prompt'] + data['chosen']))
import json
json.dump(examples, fp)
def __len__(self):
return len(self.pairs)
def __getitem__(self, idx):
return (self.pairs[idx], self.masks[idx]) |
def load_result_format(lab_test, template, prescription, invoice):
if (template.lab_test_template_type == 'Single'):
create_normals(template, lab_test)
elif (template.lab_test_template_type == 'Compound'):
create_compounds(template, lab_test, False)
elif (template.lab_test_template_type == 'Descriptive'):
create_descriptives(template, lab_test)
elif (template.lab_test_template_type == 'Imaging'):
create_imaging(template, lab_test)
elif (template.lab_test_template_type == 'Grouped'):
for lab_test_group in template.lab_test_groups:
if lab_test_group.lab_test_template:
template_in_group = frappe.get_doc('Lab Test Template', lab_test_group.lab_test_template)
if template_in_group:
if (template_in_group.lab_test_template_type == 'Single'):
create_normals(template_in_group, lab_test)
elif (template_in_group.lab_test_template_type == 'Compound'):
normal_heading = lab_test.append('normal_test_items')
normal_heading.lab_test_name = template_in_group.lab_test_name
normal_heading.require_result_value = 0
normal_heading.allow_blank = 1
normal_heading.template = template_in_group.name
create_compounds(template_in_group, lab_test, True)
elif (template_in_group.lab_test_template_type == 'Descriptive'):
descriptive_heading = lab_test.append('descriptive_test_items')
descriptive_heading.lab_test_name = template_in_group.lab_test_name
descriptive_heading.require_result_value = 0
descriptive_heading.allow_blank = 1
descriptive_heading.template = template_in_group.name
create_descriptives(template_in_group, lab_test)
else:
normal = lab_test.append('normal_test_items')
normal.lab_test_name = lab_test_group.group_event
normal.lab_test_uom = lab_test_group.group_test_uom
normal.secondary_uom = lab_test_group.secondary_uom
normal.conversion_factor = lab_test_group.conversion_factor
normal.normal_range = lab_test_group.group_test_normal_range
normal.allow_blank = lab_test_group.allow_blank
normal.require_result_value = 1
normal.template = template.name
if (template.lab_test_template_type != 'No Result'):
if prescription:
lab_test.prescription = prescription
if invoice:
frappe.db.set_value('Service Request', lab_test.service_request, 'status', 'Completed')
lab_test.save(ignore_permissions=True)
return lab_test |
def fortios_gtp(data, fos, check_mode):
fos.do_member_operation('gtp', 'tunnel-limit')
if data['gtp_tunnel_limit']:
resp = gtp_tunnel_limit(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'gtp_tunnel_limit'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def parseruleline(linestr, rulenum=(- 1)):
global GlobalRules, EventValues, SysVars
cline = linestr.strip()
state = 'CMD'
if ('[' in linestr):
m = re.findall('\\[([A-Za-z0-9_#\\-]+)\\]', linestr)
if (len(m) > 0):
for r in range(len(m)):
tval = str(gettaskvaluefromname(m[r]))
if (tval == 'None'):
try:
taskprop = m[r].split('#')
taskprop[0] = taskprop[0].strip().lower()
taskprop[1] = taskprop[1].strip().lower()
except:
state = 'INV'
tval = 'None'
taskprop = []
if (len(taskprop) > 0):
if (taskprop[0] in ['int', 'var']):
try:
sid = (int(taskprop[1]) - 1)
except Exception as e:
sid = (- 1)
if ((sid >= 0) and (sid < 16)):
if (taskprop[0] == 'int'):
tval = int(float(GlobalVars[sid]))
else:
tval = GlobalVars[sid]
else:
state = 'INV'
if (tval != 'None'):
cline = cline.replace((('[' + m[r]) + ']'), str(tval))
else:
print('Please avoid special characters in names! ', linestr)
if ('%eventvalue' in linestr):
try:
cline = cline.replace('%eventvalue%', str(EventValues[0]))
except:
cline = cline.replace('%eventvalue%', '-1')
for i in range(len(EventValues)):
try:
cline = cline.replace((('%eventvalue' + str((i + 1))) + '%'), str(EventValues[i]))
except:
pass
if ('%' in cline):
m = re.findall('\\%([A-Za-z0-9_#\\+\\-]+)\\%', cline)
if (len(m) > 0):
for r in range(len(m)):
if (m[r].lower() in SysVars):
cline = cline.replace((('%' + m[r]) + '%'), str(getglobalvar(m[r])))
elif (('-' in m[r]) or ('+' in m[r])):
val = str(getglobalvar(m[r]))
if (val != ''):
cline = cline.replace((('%' + m[r]) + '%'), val)
cline = parseconversions(cline)
equ = getfirstequpos(cline)
if (equ != (- 1)):
if (cline[:3].lower() == 'if '):
if ('=' in getequchars(cline, True)):
cline = cline.replace('=', '==')
cline = cline.replace('!==', '!=')
cline = cline.replace('>==', '>=')
cline = cline.replace('<==', '<=')
tline = cline
if ((SysVars[0] in linestr) or (SysVars[1] in linestr)):
m = re.findall('(?:[01]\\d|2[0-3]):(?:[0-5]\\d):(?:[0-5]\\d)', cline)
for tm in m:
st = timeStringToSeconds(tm)
if (st != None):
st = str(st)
cline = cline.replace(tm, st)
m = re.findall('(?:[01]\\d|2[0-3]):(?:[0-5]\\d)', cline)
for tm in m:
st = timeStringToSeconds(tm)
if (st != None):
st = str(st)
cline = cline.replace(tm, st)
state = 'IFST'
try:
cline = eval(cline[3:], globals())
except:
cline = False
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ((('Parsed condition: ' + str(tline)) + ' ') + str(cline)))
elif ('endif' in cline):
cline = True
state = 'IFEN'
elif ('else' in cline):
cline = False
state = 'IFEL'
elif cline.startswith('breakon'):
cline = False
state = 'BREAK'
return (cline, state) |
class HNSW(MutableMapping):
def __init__(self, distance_func: Callable[([np.ndarray, np.ndarray], float)], m: int=16, ef_construction: int=200, m0: Optional[int]=None, seed: Optional[int]=None, reversed_edges: bool=False) -> None:
self._nodes: OrderedDict[(Hashable, _Node)] = OrderedDict()
self._distance_func = distance_func
self._m = m
self._ef_construction = ef_construction
self._m0 = ((2 * m) if (m0 is None) else m0)
self._level_mult = (1 / np.log(m))
self._graphs: List[_Layer] = []
self._entry_point = None
self._random = np.random.RandomState(seed)
self._layer_class = (_LayerWithReversedEdges if reversed_edges else _Layer)
def __len__(self) -> int:
return sum(((not node.is_deleted) for node in self._nodes.values()))
def __contains__(self, key: Hashable) -> bool:
return ((key in self._nodes) and (not self._nodes[key].is_deleted))
def __getitem__(self, key: Hashable) -> np.ndarray:
if (key not in self):
raise KeyError(key)
return self._nodes[key].point
def __setitem__(self, key: Hashable, value: np.ndarray) -> None:
self.insert(key, value)
def __delitem__(self, key: Hashable) -> None:
self.remove(key)
def __iter__(self) -> Iterator[Hashable]:
return (key for key in self._nodes if (not self._nodes[key].is_deleted))
def reversed(self) -> Iterator[Hashable]:
return (key for key in reversed(self._nodes) if (not self._nodes[key].is_deleted))
def __eq__(self, __value: object) -> bool:
if (not isinstance(__value, HNSW)):
return False
if ((self._distance_func != __value._distance_func) or (self._m != __value._m) or (self._ef_construction != __value._ef_construction) or (self._m0 != __value._m0) or (self._level_mult != __value._level_mult) or (self._entry_point != __value._entry_point)):
return False
rand_state_1 = self._random.get_state()
rand_state_2 = __value._random.get_state()
for i in range(len(rand_state_1)):
if isinstance(rand_state_1[i], np.ndarray):
if (not np.array_equal(rand_state_1[i], rand_state_2[i])):
return False
elif (rand_state_1[i] != rand_state_2[i]):
return False
return (all(((key in self._nodes) for key in __value._nodes)) and all(((key in __value._nodes) for key in self._nodes)) and all(((self._nodes[key] == __value._nodes[key]) for key in self._nodes)) and (self._graphs == __value._graphs))
def get(self, key: Hashable, default: Optional[np.ndarray]=None) -> Union[(np.ndarray, None)]:
if (key not in self):
return default
return self._nodes[key].point
def items(self) -> Iterator[Tuple[(Hashable, np.ndarray)]]:
return ((key, node.point) for (key, node) in self._nodes.items() if (not node.is_deleted))
def keys(self) -> Iterator[Hashable]:
return (key for key in self._nodes if (not self._nodes[key].is_deleted))
def values(self) -> Iterator[np.ndarray]:
return (node.point for node in self._nodes.values() if (not node.is_deleted))
def pop(self, key: Hashable, default: Optional[np.ndarray]=None, hard: bool=False) -> np.ndarray:
if (key not in self):
if (default is None):
raise KeyError(key)
return default
point = self._nodes[key].point
self.remove(key, hard=hard)
return point
def popitem(self, last: bool=True, hard: bool=False) -> Tuple[(Hashable, np.ndarray)]:
if (not self._nodes):
raise KeyError('popitem(): index is empty')
if last:
key = next((key for key in reversed(self._nodes) if (not self._nodes[key].is_deleted)), None)
else:
key = next((key for key in self._nodes if (not self._nodes[key].is_deleted)), None)
if (key is None):
raise KeyError('popitem(): index is empty')
point = self._nodes[key].point
self.remove(key, hard=hard)
return (key, point)
def clear(self) -> None:
self._nodes = {}
self._graphs = []
self._entry_point = None
def copy(self) -> HNSW:
new_index = HNSW(self._distance_func, m=self._m, ef_construction=self._ef_construction, m0=self._m0)
new_index._nodes = OrderedDict(((key, node.copy()) for (key, node) in self._nodes.items()))
new_index._graphs = [layer.copy() for layer in self._graphs]
new_index._entry_point = self._entry_point
new_index._random.set_state(self._random.get_state())
return new_index
def update(self, other: Union[(Mapping, HNSW)]) -> None:
for (key, point) in other.items():
self.insert(key, point)
def setdefault(self, key: Hashable, default: np.ndarray) -> np.ndarray:
if (default is None):
raise ValueError('Default value cannot be None.')
if ((key not in self._nodes) or self._nodes[key].is_deleted):
self.insert(key, default)
return self._nodes[key]
def insert(self, key: Hashable, new_point: np.ndarray, ef: Optional[int]=None, level: Optional[int]=None) -> None:
if (ef is None):
ef = self._ef_construction
if (key in self._nodes):
if self._nodes[key].is_deleted:
self._nodes[key].is_deleted = False
self._update(key, new_point, ef)
return
if (level is None):
level = int(((- np.log(self._random.random_sample())) * self._level_mult))
self._nodes[key] = _Node(key, new_point)
if (self._entry_point is not None):
dist = self._distance_func(new_point, self._nodes[self._entry_point].point)
point = self._entry_point
for layer in reversed(self._graphs[(level + 1):]):
(point, dist) = self._search_ef1(new_point, point, dist, layer, allow_soft_deleted=True)
entry_points = [((- dist), point)]
for layer in reversed(self._graphs[:(level + 1)]):
level_m = (self._m if (layer is not self._graphs[0]) else self._m0)
entry_points = self._search_base_layer(new_point, entry_points, layer, ef, allow_soft_deleted=True)
layer[key] = {p: d for (d, p) in self._heuristic_prune([((- mdist), p) for (mdist, p) in entry_points], level_m)}
for (neighbor_key, dist) in layer[key].items():
layer[neighbor_key] = {p: d for (d, p) in self._heuristic_prune(([(d, p) for (p, d) in layer[neighbor_key].items()] + [(dist, key)]), level_m)}
for _ in range(len(self._graphs), (level + 1)):
self._graphs.append(self._layer_class(key))
self._entry_point = key
def _update(self, key: Hashable, new_point: np.ndarray, ef: int) -> None:
if (key not in self._nodes):
raise KeyError(key)
self._nodes[key].point = new_point
if ((self._entry_point == key) and (len(self._nodes) == 1)):
return
for layer in self._graphs:
if (key not in layer):
break
layer_m = (self._m if (layer is not self._graphs[0]) else self._m0)
neighborhood_keys = set([key])
for p in layer[key].keys():
neighborhood_keys.add(p)
for p2 in layer[p].keys():
neighborhood_keys.add(p2)
for p in layer[key].keys():
cands = []
elem_to_keep = min(ef, (len(neighborhood_keys) - 1))
for candidate_key in neighborhood_keys:
if (candidate_key == p):
continue
dist = self._distance_func(self._nodes[candidate_key].point, self._nodes[p].point)
if (len(cands) < elem_to_keep):
heapq.heappush(cands, ((- dist), candidate_key))
elif (dist < (- cands[0][0])):
heapq.heappushpop(cands, ((- dist), candidate_key))
layer[p] = {p2: d2 for (d2, p2) in self._heuristic_prune([((- md), p) for (md, p) in cands], layer_m)}
self._repair_connections(key, new_point, ef)
def _repair_connections(self, key: Hashable, new_point: np.ndarray, ef: int, key_to_delete: Optional[Hashable]=None) -> None:
entry_point = self._entry_point
entry_point_dist = self._distance_func(new_point, self._nodes[entry_point].point)
entry_points = [((- entry_point_dist), entry_point)]
for layer in reversed(self._graphs):
if (key not in layer):
(entry_point, entry_point_dist) = self._search_ef1(new_point, entry_point, entry_point_dist, layer, allow_soft_deleted=True, key_to_hard_delete=key_to_delete)
entry_points = [((- entry_point_dist), entry_point)]
else:
level_m = (self._m if (layer is not self._graphs[0]) else self._m0)
entry_points = self._search_base_layer(new_point, entry_points, layer, (ef + 1), allow_soft_deleted=True, key_to_hard_delete=key_to_delete)
filtered_candidates = [((- md), p) for (md, p) in entry_points if (p != key)]
layer[key] = {p: d for (d, p) in self._heuristic_prune(filtered_candidates, level_m)}
def query(self, query_point: np.ndarray, k: Optional[int]=None, ef: Optional[int]=None) -> List[Tuple[(Hashable, float)]]:
if (ef is None):
ef = self._ef_construction
if (self._entry_point is None):
raise ValueError('Entry point not found.')
entry_point_dist = self._distance_func(query_point, self._nodes[self._entry_point].point)
entry_point = self._entry_point
for layer in reversed(self._graphs[1:]):
(entry_point, entry_point_dist) = self._search_ef1(query_point, entry_point, entry_point_dist, layer)
candidates = self._search_base_layer(query_point, [((- entry_point_dist), entry_point)], self._graphs[0], ef)
if (k is not None):
candidates = heapq.nlargest(k, candidates)
else:
candidates.sort(reverse=True)
return [(key, (- mdist)) for (mdist, key) in candidates]
def _search_ef1(self, query_point: np.ndarray, entry_point: Hashable, entry_point_dist: float, layer: _Layer, allow_soft_deleted: bool=False, key_to_hard_delete: Optional[Hashable]=None) -> Tuple[(Hashable, float)]:
candidates = [(entry_point_dist, entry_point)]
visited = set([entry_point])
best = entry_point
best_dist = entry_point_dist
while candidates:
(dist, curr) = heapq.heappop(candidates)
if (dist > best_dist):
break
neighbors = [p for p in layer[curr] if (p not in visited)]
visited.update(neighbors)
dists = [self._distance_func(query_point, self._nodes[p].point) for p in neighbors]
for (p, d) in zip(neighbors, dists):
if (d < best_dist):
if (((not allow_soft_deleted) and self._nodes[p].is_deleted) or (p == key_to_hard_delete)):
pass
else:
(best, best_dist) = (p, d)
heapq.heappush(candidates, (d, p))
return (best, best_dist)
def _search_base_layer(self, query_point: np.ndarray, entry_points: List[Tuple[(float, Hashable)]], layer: _Layer, ef: int, allow_soft_deleted: bool=False, key_to_hard_delete: Optional[Hashable]=None) -> List[Tuple[(float, Hashable)]]:
candidates = [((- mdist), p) for (mdist, p) in entry_points]
heapq.heapify(candidates)
visited = set((p for (_, p) in entry_points))
while candidates:
(dist, curr_key) = heapq.heappop(candidates)
closet_dist = (- entry_points[0][0])
if (dist > closet_dist):
break
neighbors = [p for p in layer[curr_key] if (p not in visited)]
visited.update(neighbors)
dists = [self._distance_func(query_point, self._nodes[p].point) for p in neighbors]
for (p, dist) in zip(neighbors, dists):
if (((not allow_soft_deleted) and self._nodes[p].is_deleted) or (p == key_to_hard_delete)):
if (dist <= closet_dist):
heapq.heappush(candidates, (dist, p))
elif (len(entry_points) < ef):
heapq.heappush(candidates, (dist, p))
heapq.heappush(entry_points, ((- dist), p))
closet_dist = (- entry_points[0][0])
elif (dist <= closet_dist):
heapq.heappush(candidates, (dist, p))
heapq.heapreplace(entry_points, ((- dist), p))
closet_dist = (- entry_points[0][0])
return entry_points
def _heuristic_prune(self, candidates: List[Tuple[(float, Hashable)]], max_size: int) -> List[Tuple[(float, Hashable)]]:
if (len(candidates) < max_size):
return candidates
heapq.heapify(candidates)
pruned = []
while candidates:
if (len(pruned) >= max_size):
break
(candidate_dist, candidate_key) = heapq.heappop(candidates)
good = True
for (_, selected_key) in pruned:
dist_to_selected_neighbor = self._distance_func(self._nodes[selected_key].point, self._nodes[candidate_key].point)
if (dist_to_selected_neighbor < candidate_dist):
good = False
break
if good:
pruned.append((candidate_dist, candidate_key))
return pruned
def remove(self, key: Hashable, hard: bool=False, ef: Optional[int]=None) -> None:
if ((not self._nodes) or (key not in self._nodes)):
raise KeyError(key)
if (self._entry_point == key):
new_entry_point = None
for layer in reversed(list(self._graphs)):
new_entry_point = next((p for p in layer if ((p != key) and (not self._nodes[p].is_deleted))), None)
if (new_entry_point is not None):
break
else:
self._graphs.pop()
if (new_entry_point is None):
self.clear()
return
self._entry_point = new_entry_point
if (ef is None):
ef = self._ef_construction
self._nodes[key].is_deleted = True
if (not hard):
return
keys_to_update = set()
for layer in self._graphs:
if (key not in layer):
break
keys_to_update.update(layer.get_reverse_edges(key))
for key_to_update in keys_to_update:
self._repair_connections(key_to_update, self._nodes[key_to_update].point, ef, key_to_delete=key)
for layer in self._graphs:
if (key not in layer):
break
del layer[key]
del self._nodes[key]
def clean(self, ef: Optional[int]=None) -> None:
keys_to_remove = list((key for key in self._nodes if self._nodes[key].is_deleted))
for key in keys_to_remove:
self.remove(key, ef=ef, hard=True)
def merge(self, other: HNSW) -> HNSW:
new_index = self.copy()
new_index.update(other)
return new_index |
class OptionSeriesWaterfallSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesBulletDataTargetoptions(Options):
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(3)
def height(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get('140%')
def width(self, num: float):
self._config(num, js_type=False) |
class OptionsSparkLineDiscrete(OptionsSpark):
def lineHeight(self):
return self._config_get(None)
def lineHeight(self, value):
self._config(value)
def thresholdValue(self):
return self._config_get(None)
def thresholdValue(self, value):
self._config(value)
def thresholdColor(self):
return self._config_get(None)
def thresholdColor(self, value):
self._config(value) |
class OptionPlotoptionsPyramid3dSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_attrdict_middleware_is_recursive(w3):
w3.middleware_onion.inject(construct_result_generator_middleware({RPCEndpoint('fake_endpoint'): (lambda *_: GENERATED_NESTED_DICT_RESULT)}), 'result_gen', layer=0)
response = w3.manager.request_blocking('fake_endpoint', [])
result = response['result']
assert isinstance(result, AttributeDict)
assert (response.result == result)
assert isinstance(result['b'], AttributeDict)
assert (result.b == result['b'])
assert isinstance(result.b['b2'], AttributeDict)
assert (result.b.b2 == result.b['b2'])
assert isinstance(result.b.b2['b2b'], AttributeDict)
assert (result.b.b2.b2b == result.b.b2['b2b'])
assert isinstance(result.b.b2.b2b['b2b2'], AttributeDict)
assert (result.b.b2.b2b.b2b2 == result.b.b2.b2b['b2b2'])
w3.middleware_onion.remove('result_gen') |
def get_EC_and_optimization_config(enable_optimization):
(time_alignment_config, hand_eye_config) = get_exhaustive_search_pose_inliers_config()
optimiztion_config = OptimizationConfig()
optimiztion_config.enable_optimization = enable_optimization
optimiztion_config.optimization_only = False
algorithm_name = (hand_eye_config.algorithm_name + ('_opt' if enable_optimization else '_no_opt'))
return (algorithm_name, time_alignment_config, hand_eye_config, optimiztion_config) |
def test_data_integrity_test_constant_columns() -> None:
test_dataset = pd.DataFrame({'category_feature': [None, 'd', 'p', 'n'], 'numerical_feature': [0, 0, 0, 0], 'target': [0, 0, 0, 1]})
suite = TestSuite(tests=[TestNumberOfConstantColumns()])
suite.run(current_data=test_dataset, reference_data=test_dataset)
assert suite
suite = TestSuite(tests=[TestNumberOfConstantColumns(gte=5)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert (not suite)
suite = TestSuite(tests=[TestNumberOfConstantColumns(eq=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert suite
assert suite.show()
assert suite.json() |
_in_both(MyObject)
def test_property_dict_mutate():
m = MyObject()
print(m.dictprop)
loop._processing_action = True
m._mutate_dictprop(dict(foo=3), 'insert')
m._mutate_dictprop(dict(bar=4), 'replace')
print((('{' + ', '.join([('%s: %i' % (key, val)) for (key, val) in sorted(m.dictprop.items())])) + '}'))
m._mutate_dictprop(dict(foo=5), 'replace')
print((('{' + ', '.join([('%s: %i' % (key, val)) for (key, val) in sorted(m.dictprop.items())])) + '}'))
m._mutate_dictprop(['foo'], 'remove')
print((('{' + ', '.join([('%s: %i' % (key, val)) for (key, val) in sorted(m.dictprop.items())])) + '}'))
try:
m._mutate_dictprop(dict(foo=3), 'insert', 0)
except IndexError:
print('fail IndexError') |
class FakeTableDataCreater(object):
def __init__(self, id, dataset):
self._id = id
self._parent = dataset
self._create_time = DEFAULT_TABLE_CREATE_TIME
self._expiration_time = None
def SetExpirationTime(self, et):
self._expiration_time = et
def get_resource(self):
data_dict = {'id': ((((self._parent.parent.id + ':') + self._parent.id) + '.') + self._id), 'kind': 'bigquery#table', 'tableReference': {'projectId': self._parent.parent.id, 'datasetId': self._parent.id, 'tableId': self._id}, 'type': 'TABLE', 'creationTime': self._create_time}
if (self._expiration_time is not None):
data_dict['expirationTime'] = self._expiration_time
data = json.dumps(data_dict)
return table.Table(table_id=data_dict['id'], parent=self._parent, data=data, full_name='{}bigquery_table/{}/'.format(self._parent.full_name, data_dict['id'])) |
class JSONDiffTestCase(CfgDiffTestCase):
def test_json_same(self):
self._test_same(cfgdiff.JSONDiff, './tests/test_same_1-a.json', './tests/test_same_1-b.json')
def test_json_different(self):
self._test_different(cfgdiff.JSONDiff, './tests/test_different_1-a.json', './tests/test_different_1-b.json') |
class WalletStorage():
_store: AbstractStore
_is_closed: bool = False
_backup_filepaths: Optional[Tuple[(str, str)]] = None
def __init__(self, path: str, manual_upgrades: bool=False, storage_kind: StorageKind=StorageKind.UNKNOWN) -> None:
logger.debug("wallet path '%s'", path)
dirname = os.path.dirname(path)
if (not os.path.exists(dirname)):
raise IOError(f'directory {dirname} does not exist')
storage_info = categorise_file(path)
if (storage_kind == StorageKind.UNKNOWN):
storage_kind = storage_info.kind
if (storage_kind == StorageKind.HYBRID):
raise IncompatibleWalletError('Migration of development wallet format unsupported')
if (storage_info.kind in (StorageKind.DATABASE, StorageKind.UNKNOWN)):
path = storage_info.wallet_filepath
self._path = path
store: Optional[AbstractStore] = None
if (storage_kind == StorageKind.UNKNOWN):
self._set_store(DatabaseStore(path))
else:
if (storage_kind == StorageKind.FILE):
store = TextStore(path)
if os.path.exists(path):
store.attempt_load_data()
else:
store = DatabaseStore(path)
self._set_store(store)
def create(klass, wallet_path: str, password: str) -> 'WalletStorage':
storage = klass(wallet_path)
storage.put('password-token', pw_encode(os.urandom(32).hex(), password))
return storage
def from_file_data(cls, path: str, data: Dict[(str, Any)]) -> 'WalletStorage':
storage = WalletStorage(path=path, storage_kind=StorageKind.FILE)
text_store = storage.get_text_store()
text_store._set_data(data)
return storage
def move_to(self, new_path: str) -> None:
db_store = cast(DatabaseStore, self._store)
db_store.close_database()
if new_path.lower().endswith(DATABASE_EXT):
new_path = new_path[:(- len(DATABASE_EXT))]
shutil.copyfile(self.get_path(), (new_path + DATABASE_EXT))
self._path = new_path
db_store.set_path(new_path)
db_store.open_database()
def is_closed(self) -> bool:
return self._is_closed
def close(self) -> None:
if self._is_closed:
return
self._store.close()
del self.check_password
del self.get
del self.put
del self.write
del self.requires_split
del self.split_accounts
del self.requires_upgrade
self._is_closed = True
def get_path(self) -> str:
return self._store.get_path()
def get_eckey_from_password(password: str) -> PrivateKey:
secret = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), b'', iterations=1024)
return PrivateKey.from_arbitrary_bytes(secret)
def is_password_valid(self, password: str) -> bool:
try:
self.check_password(password)
except InvalidPassword:
pass
else:
return True
return False
def _set_store(self, store: StoreType) -> None:
self._store = store
self.check_password = store.check_password
self.get = store.get
self.put = store.put
self.write = store.write
self.requires_split = store.requires_split
self.split_accounts = store.split_accounts
self.requires_upgrade = store.requires_upgrade
def get_text_store(self) -> TextStore:
assert isinstance(self._store, TextStore)
return self._store
def get_database_store(self) -> DatabaseStore:
assert isinstance(self._store, DatabaseStore)
return self._store
def is_legacy_format(self) -> bool:
return (not isinstance(self._store, DatabaseStore))
def get_storage_path(self) -> str:
return self._path
def get_backup_filepaths(self) -> Optional[Tuple[(str, str)]]:
return self._backup_filepaths
def upgrade(self, has_password: bool, new_password: str) -> None:
logger.debug('upgrading wallet format')
self._backup_filepaths = backup_wallet_file(self._path)
while True:
new_store = self._store.upgrade(has_password, new_password)
if (new_store is not None):
self._set_store(new_store)
if new_store.requires_upgrade():
continue
break
def get_db_context(self) -> Optional[DatabaseContext]:
if isinstance(self._store, DatabaseStore):
return self._store._db_context
return None
def files_are_matched_by_path(klass, path: Optional[str]) -> bool:
if (path is None):
return False
return (categorise_file(path).kind != StorageKind.UNKNOWN)
def canonical_path(klass, database_filepath: str) -> str:
if (not database_filepath.lower().endswith(DATABASE_EXT)):
database_filepath += DATABASE_EXT
return database_filepath |
(malformed_type_strs)
def test_predicates_have_expected_behavior_for_malformed_types(malformed_type_str):
is_int = BaseEquals('int')
is_int_with_sub = BaseEquals('int', with_sub=True)
is_int_with_no_sub = BaseEquals('int', with_sub=False)
assert (not is_int(malformed_type_str))
assert (not is_int_with_sub(malformed_type_str))
assert (not is_int_with_no_sub(malformed_type_str))
assert (not has_arrlist(malformed_type_str))
assert (not is_base_tuple(malformed_type_str)) |
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
pkg_info = read_pkg_info(pkginfo_path)
pkg_info.replace_header('Metadata-Version', '2.1')
requires_path = os.path.join(egg_info_path, 'requires.txt')
if os.path.exists(requires_path):
with open(requires_path) as requires_file:
requires = requires_file.read()
for (extra, reqs) in sorted(pkg_resources.split_sections(requires), key=(lambda x: (x[0] or ''))):
for item in generate_requirements({extra: reqs}):
pkg_info[item[0]] = item[1]
description = pkg_info['Description']
if description:
pkg_info.set_payload(dedent_description(pkg_info))
del pkg_info['Description']
return pkg_info |
class IntervalSeries(MetricResult):
class Config():
underscore_attrs_are_private = True
bins: List[float]
values: List[float]
_data: pd.Series
def data(self):
if (not hasattr(self, '_data')):
self._data = pd.Series(self.values, index=[Interval(a, b, closed='right') for (a, b) in zip(self.bins, self.bins[1:])])
return self._data
def from_data(cls, data: pd.Series):
index = list(data.index)
interval_series = cls(values=list(data), bins=([i.left for i in index] + [index[(- 1)].right]))
interval_series._data = data
return interval_series
def __mul__(self, other: float):
series = IntervalSeries(bins=self.bins, values=[(v * other) for v in self.values])
if hasattr(self, '_data'):
series._data = (self._data * other)
return series |
class ClusterClient(NamespacedClient):
_rewrite_parameters(body_fields=('current_node', 'index', 'primary', 'shard'))
async def allocation_explain(self, *, current_node: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, include_disk_info: t.Optional[bool]=None, include_yes_decisions: t.Optional[bool]=None, index: t.Optional[str]=None, pretty: t.Optional[bool]=None, primary: t.Optional[bool]=None, shard: t.Optional[int]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/allocation/explain'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (include_disk_info is not None):
__query['include_disk_info'] = include_disk_info
if (include_yes_decisions is not None):
__query['include_yes_decisions'] = include_yes_decisions
if (pretty is not None):
__query['pretty'] = pretty
if (not __body):
if (current_node is not None):
__body['current_node'] = current_node
if (index is not None):
__body['index'] = index
if (primary is not None):
__body['primary'] = primary
if (shard is not None):
__body['shard'] = shard
if (not __body):
__body = None
__headers = {'accept': 'application/json'}
if (__body is not None):
__headers['content-type'] = 'application/json'
return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def delete_component_template(self, *, name: t.Union[(str, t.Sequence[str])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_component_template/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('DELETE', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def delete_voting_config_exclusions(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None, wait_for_removal: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/voting_config_exclusions'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (wait_for_removal is not None):
__query['wait_for_removal'] = wait_for_removal
__headers = {'accept': 'application/json'}
return (await self.perform_request('DELETE', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def exists_component_template(self, *, name: t.Union[(str, t.Sequence[str])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> HeadApiResponse:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_component_template/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('HEAD', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def get_component_template(self, *, name: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, flat_settings: t.Optional[bool]=None, human: t.Optional[bool]=None, include_defaults: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name not in SKIP_IN_PATH):
__path = f'/_component_template/{_quote(name)}'
else:
__path = '/_component_template'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (flat_settings is not None):
__query['flat_settings'] = flat_settings
if (human is not None):
__query['human'] = human
if (include_defaults is not None):
__query['include_defaults'] = include_defaults
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def get_settings(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, flat_settings: t.Optional[bool]=None, human: t.Optional[bool]=None, include_defaults: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/settings'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (flat_settings is not None):
__query['flat_settings'] = flat_settings
if (human is not None):
__query['human'] = human
if (include_defaults is not None):
__query['include_defaults'] = include_defaults
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def health(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, level: t.Optional[t.Union[("t.Literal['cluster', 'indices', 'shards']", str)]]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, wait_for_active_shards: t.Optional[t.Union[(int, t.Union[("t.Literal['all', 'index-setting']", str)])]]=None, wait_for_events: t.Optional[t.Union[("t.Literal['high', 'immediate', 'languid', 'low', 'normal', 'urgent']", str)]]=None, wait_for_no_initializing_shards: t.Optional[bool]=None, wait_for_no_relocating_shards: t.Optional[bool]=None, wait_for_nodes: t.Optional[t.Union[(int, str)]]=None, wait_for_status: t.Optional[t.Union[("t.Literal['green', 'red', 'yellow']", str)]]=None) -> ObjectApiResponse[t.Any]:
if (index not in SKIP_IN_PATH):
__path = f'/_cluster/health/{_quote(index)}'
else:
__path = '/_cluster/health'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (expand_wildcards is not None):
__query['expand_wildcards'] = expand_wildcards
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (level is not None):
__query['level'] = level
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
if (wait_for_active_shards is not None):
__query['wait_for_active_shards'] = wait_for_active_shards
if (wait_for_events is not None):
__query['wait_for_events'] = wait_for_events
if (wait_for_no_initializing_shards is not None):
__query['wait_for_no_initializing_shards'] = wait_for_no_initializing_shards
if (wait_for_no_relocating_shards is not None):
__query['wait_for_no_relocating_shards'] = wait_for_no_relocating_shards
if (wait_for_nodes is not None):
__query['wait_for_nodes'] = wait_for_nodes
if (wait_for_status is not None):
__query['wait_for_status'] = wait_for_status
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def info(self, *, target: t.Union[(t.Sequence[t.Union[("t.Literal['_all', ' 'ingest', 'script', 'thread_pool']", str)]], t.Union[("t.Literal['_all', ' 'ingest', 'script', 'thread_pool']", str)])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (target in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'target'")
__path = f'/_info/{_quote(target)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def pending_tasks(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/pending_tasks'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def post_voting_config_exclusions(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, node_ids: t.Optional[t.Union[(str, t.Sequence[str])]]=None, node_names: t.Optional[t.Union[(str, t.Sequence[str])]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/voting_config_exclusions'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (node_ids is not None):
__query['node_ids'] = node_ids
if (node_names is not None):
__query['node_names'] = node_names
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('template', 'allow_auto_create', 'meta', 'version'), parameter_aliases={'_meta': 'meta'})
async def put_component_template(self, *, name: str, template: t.Optional[t.Mapping[(str, t.Any)]]=None, allow_auto_create: t.Optional[bool]=None, create: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, meta: t.Optional[t.Mapping[(str, t.Any)]]=None, pretty: t.Optional[bool]=None, version: t.Optional[int]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
if ((template is None) and (body is None)):
raise ValueError("Empty value passed for parameter 'template'")
__path = f'/_component_template/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (create is not None):
__query['create'] = create
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (not __body):
if (template is not None):
__body['template'] = template
if (allow_auto_create is not None):
__body['allow_auto_create'] = allow_auto_create
if (meta is not None):
__body['_meta'] = meta
if (version is not None):
__body['version'] = version
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters(body_fields=('persistent', 'transient'))
async def put_settings(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, flat_settings: t.Optional[bool]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, persistent: t.Optional[t.Mapping[(str, t.Any)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, transient: t.Optional[t.Mapping[(str, t.Any)]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/settings'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (flat_settings is not None):
__query['flat_settings'] = flat_settings
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
if (not __body):
if (persistent is not None):
__body['persistent'] = persistent
if (transient is not None):
__body['transient'] = transient
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def remote_info(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_remote/info'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('commands',))
async def reroute(self, *, commands: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, dry_run: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, explain: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, metric: t.Optional[t.Union[(str, t.Sequence[str])]]=None, pretty: t.Optional[bool]=None, retry_failed: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
__path = '/_cluster/reroute'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (dry_run is not None):
__query['dry_run'] = dry_run
if (error_trace is not None):
__query['error_trace'] = error_trace
if (explain is not None):
__query['explain'] = explain
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (metric is not None):
__query['metric'] = metric
if (pretty is not None):
__query['pretty'] = pretty
if (retry_failed is not None):
__query['retry_failed'] = retry_failed
if (timeout is not None):
__query['timeout'] = timeout
if (not __body):
if (commands is not None):
__body['commands'] = commands
if (not __body):
__body = None
__headers = {'accept': 'application/json'}
if (__body is not None):
__headers['content-type'] = 'application/json'
return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def state(self, *, metric: t.Optional[t.Union[(str, t.Sequence[str])]]=None, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, allow_no_indices: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, flat_settings: t.Optional[bool]=None, human: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, wait_for_metadata_version: t.Optional[int]=None, wait_for_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
if ((metric not in SKIP_IN_PATH) and (index not in SKIP_IN_PATH)):
__path = f'/_cluster/state/{_quote(metric)}/{_quote(index)}'
elif (metric not in SKIP_IN_PATH):
__path = f'/_cluster/state/{_quote(metric)}'
elif (index not in SKIP_IN_PATH):
__path = f'/_cluster/state/_all/{_quote(index)}'
else:
__path = '/_cluster/state'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_indices is not None):
__query['allow_no_indices'] = allow_no_indices
if (error_trace is not None):
__query['error_trace'] = error_trace
if (expand_wildcards is not None):
__query['expand_wildcards'] = expand_wildcards
if (filter_path is not None):
__query['filter_path'] = filter_path
if (flat_settings is not None):
__query['flat_settings'] = flat_settings
if (human is not None):
__query['human'] = human
if (ignore_unavailable is not None):
__query['ignore_unavailable'] = ignore_unavailable
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (wait_for_metadata_version is not None):
__query['wait_for_metadata_version'] = wait_for_metadata_version
if (wait_for_timeout is not None):
__query['wait_for_timeout'] = wait_for_timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def stats(self, *, node_id: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, flat_settings: t.Optional[bool]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]:
if (node_id not in SKIP_IN_PATH):
__path = f'/_cluster/stats/nodes/{_quote(node_id)}'
else:
__path = '/_cluster/stats'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (flat_settings is not None):
__query['flat_settings'] = flat_settings
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (timeout is not None):
__query['timeout'] = timeout
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers)) |
class TestSpotifyShow():
.xfail(reason='API inconsistencies')
def test_show_not_found_without_market(self, app_client):
with pytest.raises(HTTPError):
app_client.show(show_id)
def test_show_found_with_market(self, app_client):
show = app_client.show(show_id, market='FI')
assert (show.id == show_id)
def test_shows(self, app_client):
shows = app_client.shows(show_ids, market='FI')
assert (show_ids == [s.id for s in shows])
def test_show_episodes(self, app_client):
episodes = app_client.show_episodes(show_id, market='FI', limit=1)
assert (episodes.items[0] is not None)
def test_show_found_without_market(self, user_client):
show = user_client.show(show_id)
assert (show_id == show.id) |
def test_marked_log_matches() -> None:
marked_bytes = importlib.resources.read_binary(package=plotman._tests.resources, resource='chianetwork.marked')
log_bytes = importlib.resources.read_binary(package=plotman._tests.resources, resource='chianetwork.plot.log')
for (marked_line, log_line) in zip(marked_bytes.splitlines(keepends=True), log_bytes.splitlines(keepends=True)):
(_, _, marked_just_line) = marked_line.partition(b',')
assert (marked_just_line == log_line) |
class Status(Html.Html):
name = 'status'
tag = 'div'
_option_cls = OptText.OptionsStatus
def __init__(self, page: primitives.PageModel, status, width, height, html_code, profile, options):
super(Status, self).__init__(page, status, html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
self.style.css.text_align = 'center'
self.style.css.line_height = 30
self.style.css.margin = 2
self.style.css.padding = '10px auto'
def options(self) -> OptText.OptionsStatus:
return super().options
_js__builder__ = "if(options.showdown){\n var converter = new showdown.Converter(options.showdown); var content = converter.makeHtml(data)} \nelse {var content = data}\nhtmlObj.innerHTML = content;\nif(typeof options.css !== 'undefined'){for(var k in options.css){htmlObj.style[k] = options.css[k]}}"
def __str__(self):
color_map = self.page.js.data.datamap().attrs(self.options.states)
if self.options.change_menu:
for (k, v) in self.options.states.items():
item = self.context.add(k)
item.click([self.context.source.build(item.dom.content), self.context.source.dom.css({'background': color_map.get(item.dom.content)}), self.context.dom.hide()])
self.style.css.background = self.options.states.get(self.val.upper(), self.options.background)
self.style.css.color = self.options.color
return ('<%s %s>%s</%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.val, self.tag)) |
def arc(pRA, pDecl, sRA, sDecl, mcRA, lat):
(pDArc, pNArc) = utils.dnarcs(pDecl, lat)
(sDArc, sNArc) = utils.dnarcs(sDecl, lat)
mdRA = mcRA
sArc = sDArc
pArc = pDArc
if (not utils.isAboveHorizon(sRA, sDecl, mcRA, lat)):
mdRA = angle.norm((mcRA + 180))
sArc = sNArc
pArc = pNArc
pDist = angle.closestdistance(mdRA, pRA)
sDist = angle.closestdistance(mdRA, sRA)
if (pDist < sDist):
pDist += 360
sPropDist = (sDist / (sArc / 2.0))
pPropDist = (pDist / (pArc / 2.0))
return ((pPropDist - sPropDist) * (pArc / 2.0)) |
class TaskContext(ABC, Generic[T]):
def task_id(self) -> str:
def task_input(self) -> 'InputContext':
def set_task_input(self, input_ctx: 'InputContext') -> None:
def task_output(self) -> TaskOutput[T]:
def set_task_output(self, task_output: TaskOutput[T]) -> None:
def current_state(self) -> TaskState:
def set_current_state(self, task_state: TaskState) -> None:
def new_ctx(self) -> 'TaskContext':
def metadata(self) -> Dict[(str, Any)]:
def update_metadata(self, key: str, value: Any) -> None:
self.metadata[key] = value
def call_data(self) -> Optional[Dict]:
return self.metadata.get('call_data')
async def _call_data_to_output(self) -> Optional[TaskOutput[T]]:
def set_call_data(self, call_data: Dict) -> None:
self.update_metadata('call_data', call_data) |
def uuid(ctokens=_UNPACKED_CTOKENS):
rand_longs = (random.getrandbits(64), random.getrandbits(64))
if _HAVE_URANDOM:
urand_longs = struct.unpack('=QQ', fast_urandom16())
byte_s = struct.pack('=QQ', ((rand_longs[0] ^ urand_longs[0]) ^ ctokens[0]), ((rand_longs[1] ^ urand_longs[1]) ^ ctokens[1]))
else:
byte_s = struct.pack('=QQ', (rand_longs[0] ^ ctokens[0]), (rand_longs[1] ^ ctokens[1]))
return str(uuidm.UUID(bytes=byte_s, version=4)) |
class TestUtils(unittest.TestCase):
def setUp(self) -> None:
self.utils = Utils()
def test_create_file(self) -> None:
fake_file_path = 'fake/file/path'
content_list = ['This is test string']
with patch('fbpcs.infra.logging_service.download_logs.utils.utils.open', mock_open()) as mocked_file:
with self.subTest('basic'):
self.utils.create_file(file_location=fake_file_path, content=content_list)
mocked_file.assert_called_once_with(fake_file_path, 'w')
mocked_file().write.assert_called_once_with((content_list[0] + '\n'))
with self.subTest('ExceptionOpen'):
mocked_file.side_effect = IOError()
with self.assertRaisesRegex(Exception, 'Failed to create file*'):
self.utils.create_file(file_location=fake_file_path, content=content_list)
def test_write_to_file(self) -> None:
pass
def test_create_folder(self) -> None:
pass
def test_compress_downloaded_logs(self) -> None:
pass
def test_copy_file(self) -> None:
pass |
class TestSetEventFactory(unittest.TestCase):
def test_trait_set_notification_compat(self):
events = []
def notifier(*args, **kwargs):
event = set_event_factory(*args, **kwargs)
events.append(event)
trait_set = TraitSet([1, 2, 3], notifiers=[notifier])
trait_set.add(4)
(event,) = events
self.assertIs(event.object, trait_set)
self.assertEqual(event.added, {4})
self.assertEqual(event.removed, set())
events.clear()
trait_set.remove(4)
(event,) = events
self.assertEqual(event.added, set())
self.assertEqual(event.removed, {4}) |
def expand_faces(faces: Set[bmesh.types.BMFace], dist: int) -> Set[bmesh.types.BMFace]:
if (dist <= 0):
visited = set(faces)
else:
visited = set()
traversal_queue = collections.deque(((f, 0) for f in faces))
while (len(traversal_queue) > 0):
(f_curr, dist_curr) = traversal_queue.pop()
visited.add(f_curr)
if (dist_curr < dist):
dist_next = (dist_curr + 1)
for l in f_curr.loops:
f_next = l.link_loop_radial_next.face
if (f_next not in visited):
traversal_queue.appendleft((f_next, dist_next))
return visited |
.integration
class TestHealthchecks():
.parametrize('database_health, expected_status_code', [('healthy', 200), ('unhealthy', 503), ('needs migration', 503)])
def test_database_healthcheck(self, test_config: FidesConfig, database_health: str, expected_status_code: int, monkeypatch: MonkeyPatch, test_client: TestClient) -> None:
def mock_get_db_health(url: str, db) -> str:
return database_health
monkeypatch.setattr(health, 'get_db_health', mock_get_db_health)
response = test_client.get((test_config.cli.server_url + '/health/database'))
assert (response.status_code == expected_status_code), f'Request failed: {response.text}'
def test_server_healthcheck(self, test_config: FidesConfig, test_client: TestClient) -> None:
response = test_client.get((test_config.cli.server_url + '/health'))
assert (response.status_code == 200)
def test_worker_healthcheck(self, test_config: FidesConfig, test_client: TestClient) -> None:
response = test_client.get((test_config.cli.server_url + '/health/workers'))
assert (response.status_code == 200) |
def get_workout_types(df_summary, run_status, ride_status, all_status):
df_summary['type'] = df_summary['type'].fillna('REMOVE')
df_summary = df_summary[(df_summary['type'] != 'REMOVE')]
other_workout_types = [x for x in df_summary['type'].unique() if (('ride' not in x.lower()) and ('run' not in x.lower()))]
run_workout_types = [x for x in df_summary['type'].unique() if ('run' in x.lower())]
ride_workout_types = [x for x in df_summary['type'].unique() if ('ride' in x.lower())]
workout_types = []
workout_types = ((workout_types + other_workout_types) if all_status else workout_types)
workout_types = ((workout_types + ride_workout_types) if ride_status else workout_types)
workout_types = ((workout_types + run_workout_types) if run_status else workout_types)
return workout_types |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.