code stringlengths 281 23.7M |
|---|
def extractAmatertranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('CEO above me below', 'CEO above me below', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesVariwideStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class FactBackend(FactBase):
PROGRAM_NAME = 'FACT Backend'
PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend'
COMPONENT = 'backend'
def __init__(self):
super().__init__()
self.unpacking_lock_manager = UnpackingLockManager()
self._create_docker_base_dir()
try:
self.analysis_service = AnalysisScheduler(unpacking_locks=self.unpacking_lock_manager)
except PluginInitException as error:
logging.critical(f'Error during initialization of plugin {error.plugin.NAME}: {error}.')
complete_shutdown()
self.unpacking_service = UnpackingScheduler(post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_combined_analysis_workload, unpacking_locks=self.unpacking_lock_manager)
self.compare_service = ComparisonScheduler()
self.intercom = InterComBackEndBinding(analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service, unpacking_locks=self.unpacking_lock_manager)
def start(self):
self.analysis_service.start()
self.unpacking_service.start()
self.compare_service.start()
self.intercom.start()
def shutdown(self):
super().shutdown()
self.intercom.shutdown()
self.compare_service.shutdown()
self.unpacking_service.shutdown()
self.analysis_service.shutdown()
self.unpacking_lock_manager.shutdown()
if (not self.args.testing):
complete_shutdown()
def _update_component_workload(self):
self.work_load_stat.update(unpacking_workload=self.unpacking_service.get_scheduled_workload(), analysis_workload=self.analysis_service.get_scheduled_workload())
def _create_docker_base_dir():
docker_mount_base_dir = Path(config.backend.docker_mount_base_dir)
docker_mount_base_dir.mkdir(504, exist_ok=True)
docker_gid = grp.getgrnam('docker').gr_gid
try:
os.chown(docker_mount_base_dir, (- 1), docker_gid)
except PermissionError:
logging.warning('Could not change permissions of docker-mount-base-dir. Ignoring.')
def _exception_occurred(self):
return any((self.unpacking_service.check_exceptions(), self.compare_service.check_exceptions(), self.analysis_service.check_exceptions())) |
def load_processed_data(data_save_dir, classes_save_path, test_ratio=0.2, items_limit_per_label=None):
X = np.load(os.path.join(data_save_dir, ('X_limit_%s.npy' % items_limit_per_label)))
Y = np.load(os.path.join(data_save_dir, ('Y_limit_%s.npy' % items_limit_per_label)))
with open(classes_save_path, 'r') as f:
label_names = f.readlines()
label_names = [label.replace('\n', '') for label in label_names]
test_size = int((Y.shape[0] * test_ratio))
X_test = X[:test_size]
Y_test = Y[:test_size]
X_train = X[test_size:]
Y_train = Y[test_size:]
return (X_train, Y_train, X_test, Y_test, label_names) |
class TestEos():
def __init__(self, yaml_location):
self._documents = []
self._results = []
if os.path.isdir(yaml_location):
for file in os.listdir(yaml_location):
if (file.endswith('.yml') or file.endswith('.yaml')):
full_path = os.path.join(yaml_location, file)
with open(full_path) as yaml_file:
self._documents += yaml.load_all(yaml_file)
else:
with open(yaml_location) as yaml_file:
self._documents += yaml.load_all(yaml_file)
validator = TestDocSchema()
for doc in self._documents:
validator.deserialize(doc)
def _get_rslt(self, rslt, message, exception):
return {'result': rslt, 'message': message, 'exception': exception}
def run_query(self, ce, query):
ret_rslt = []
eval_str = 'ce.{}(**query["parameters"])'.format(query['query'])
try:
query_rslt = eval(eval_str)
print(query_rslt)
for rslt in query['results']:
try:
if (not eval('query_rslt{}'.format(rslt))):
ret_rslt.append(self._get_rslt(False, 'result "{}" failed'.format(rslt), ''))
else:
ret_rslt.append(self._get_rslt(True, 'result "{}" successful'.format(rslt), ''))
except Exception as ex:
ret_rslt.append(self._get_rslt(False, 'failed', str(ex)))
except Exception as ex:
ret_rslt.append(self._get_rslt(False, 'failed', str(ex)))
return ret_rslt
def run_test(self, url, test):
print('Running: {}'.format(test['name']))
ce = Cleos(url)
for action in test['actions']:
rslts = {'name': test['name'], 'action': action['action'], 'contract': action['contract'], 'results': True, 'message': 'successful', 'comment': ''}
if ('comment' in action):
rslts['comment'] = action['comment']
if ('authorization' not in action):
authorization = test['authorization']
else:
authorization = action['authorization']
payload = {'account': action['contract'], 'name': action['action'], 'authorization': [{'actor': authorization['actor'], 'permission': authorization['permission']}]}
data = ce.abi_json_to_bin(payload['account'], payload['name'], action['parameters'])
payload['data'] = data['binargs']
trx = {'actions': [payload]}
try:
ce.push_transaction(trx, EOSKey(authorization['key']))
except Exception as ex:
if (not action['exception']):
rslts['results'] = False
rslts['message'] = str(ex)
query_rslts = []
if ('queries' in action):
for query in action['queries']:
query_rslts += self.run_query(ce, query)
failed_queries = list(filter((lambda x: (not x['result'])), query_rslts))
if (len(failed_queries) > 0):
rslts['results'] = False
rslts['message'] = 'queries failed'
rslts['queries'] = query_rslts
self._results.append(rslts)
def run_test_one(self, name):
for doc in self._documents:
url = doc['environment']['url']
for test in doc['tests']:
if (test['name'] == name):
self.run_test(url, test)
def run_test_all(self):
for doc in self._documents:
url = doc['environment']['url']
for test in doc['tests']:
self.run_test(url, test)
def _get_results(self, successful=True, failed=True):
return_rslts = []
for rslt in self._results:
if ((not rslt['results']) and failed):
return_rslts.append(rslt)
if (rslt['results'] and successful):
return_rslts.append(rslt)
return return_rslts
def get_all_results(self):
return self._get_results(True, True)
def get_failed_results(self):
return self._get_results(False, True)
def get_successful_results(self):
return self._get_results(True, False) |
class TestLegacyWrappingScheduler():
class SimpleLegacyScheduler():
def __init__(self, params):
pass
def next(self, current):
return current
def setup_method(self, method):
scheduler.register_scheduler('simple', self.SimpleLegacyScheduler)
def teardown_method(self, method):
scheduler.remove_scheduler('simple')
def test_legacy_scheduler(self):
task = track.Task(name='raw-request', operation=track.Operation(name='raw', operation_type=track.OperationType.RawRequest.to_hyphenated_string()), clients=1, schedule='simple')
s = scheduler.scheduler_for(task)
assert (s.next(0) == 0)
assert (s.next(0) == 0) |
def sort_objects(self):
objects = []
bounds = {}
for obj in bpy.context.selected_objects:
if (obj.type == 'MESH'):
objects.append(obj)
bounds[obj] = get_bbox(obj)
print('Objects {}x'.format(len(objects)))
min_side = min(bounds[objects[0]]['size'].x, bounds[objects[0]]['size'].y, bounds[objects[0]]['size'].z)
avg_side = 0
for obj in bounds:
min_side = min(min_side, bounds[obj]['size'].x, bounds[obj]['size'].y, bounds[obj]['size'].z)
avg_side += bounds[obj]['size'].x
avg_side += bounds[obj]['size'].y
avg_side += bounds[obj]['size'].z
avg_side /= (len(bounds) * 3)
objects_low = [obj for obj in objects if (utilities_bake.get_object_type(obj) == 'low')]
objects_high = [obj for obj in objects if (utilities_bake.get_object_type(obj) == 'high')]
if (len(objects_low) == 0):
self.report({'ERROR_INVALID_INPUT'}, 'There are no low poly objects selected')
return
elif (len(objects_high) == 0):
self.report({'ERROR_INVALID_INPUT'}, 'There are no high poly objects selected')
return
print('Low {}x, High {}x'.format(len(objects_low), len(objects_high)))
pairs_low_high = {}
objects_left_high = objects_high.copy()
for obj_A in objects_low:
matches = {}
for obj_B in objects_left_high:
score = get_score(obj_A, obj_B)
p = (score / avg_side)
if (0 < p <= 0.65):
matches[obj_B] = p
else:
print('Not matched: {} '.format(p))
if (len(matches) > 0):
sorted_matches = sorted(matches.items(), key=operator.itemgetter(1))
for i in range(0, len(sorted_matches)):
A = obj_A
B = sorted_matches[i][0]
p = sorted_matches[i][1]
print("Check: {}%\t'{}' = '{}' ".format(int((p * 100.0)), A.name, B.name))
objects_left_high.remove(sorted_matches[0][0])
pairs_low_high[obj_A] = sorted_matches[0][0]
print('')
bpy.ops.object.select_all(action='DESELECT')
for obj_A in pairs_low_high:
obj_B = pairs_low_high[obj_A]
try:
obj_B.name = (utilities_bake.get_set_name(obj_A) + ' high')
obj_A.select_set(state=True, view_layer=None)
obj_B.select_set(state=True, view_layer=None)
except:
print('Fail')
print('Matched {}x'.format(len(pairs_low_high))) |
def _execute_exploit_query(exploit_id, details):
if (not details):
result = InternalServer.get_mongodb_driver().get_products_by_exploit_db_id(exploit_id)
else:
result = InternalServer.get_mongodb_driver().get_exploit_info_by_id(exploit_id)
if (len(result) == 0):
return (json.dumps({'err': 404, 'msg': 'Exploit Id not found'}, sort_keys=True), 404)
return json.dumps(result, sort_keys=True) |
class Boolean(Field):
errors = {'type': 'Must be a boolean.', 'null': 'May not be null.'}
coerce_values = {'true': True, 'false': False, 'on': True, 'off': False, '1': True, '0': False, '': False, 1: True, 0: False}
coerce_null_values = {'', 'null', 'none'}
def __init__(self, *, coerce_types: bool=True, **kwargs: typing.Any) -> None:
super().__init__(**kwargs)
self.coerce_types = coerce_types
def validate(self, value: typing.Any) -> typing.Any:
if ((value is None) and self.allow_null):
return None
elif (value is None):
raise self.validation_error('null')
elif (not isinstance(value, bool)):
if (not self.coerce_types):
raise self.validation_error('type')
if isinstance(value, str):
value = value.lower()
if (self.allow_null and (value in self.coerce_null_values)):
return None
try:
value = self.coerce_values[value]
except (KeyError, TypeError):
raise self.validation_error('type')
return value |
def extractStrawberryJamNet(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Fox Demon Cultivation Manual', 'Fox Demon Cultivation Manual', 'translated'), ('His Royal Highness, Wants A Divorce', 'His Royal Highness, Wants A Divorce', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsHistogramSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionsEditor(Options):
component_properties = ('initialEditType',)
def height(self):
return self._config_get('500px')
def height(self, val):
if isinstance(val, int):
val = ('%spx' % val)
self._config(val)
def initialValue(self):
return self._config_get('')
def initialValue(self, val):
self._config(val)
def language(self):
return self._config_get('en-US')
def language(self, val: str):
self._config(val)
def placeholder(self):
return self._config_get()
def placeholder(self, val):
self._config(val)
def theme(self):
return self._config_get()
def theme(self, val):
self._config(val)
def viewer(self):
return self._config_get(None)
def viewer(self, flag: bool):
self._config(flag)
def initialEditType(self):
return self._config_get('markdown')
def initialEditType(self, val: str):
self._config(val)
def previewStyle(self):
return self._config_get('vertical')
def previewStyle(self, val: str):
self._config(val)
def previewStyles(self) -> EnumStyleOptions:
return EnumStyleOptions(self, 'previewStyle') |
class ForwardingRuleRulesEngine(bre.BaseRulesEngine):
RuleViolation = namedtuple('RuleViolation', ['violation_type', 'target', 'rule_index', 'load_balancing_scheme', 'port_range', 'resource_type', 'port', 'ip_protocol', 'ip_address', 'resource_id', 'full_name', 'resource_data', 'resource_name'])
def __init__(self, rules_file_path, snapshot_timestamp=None):
super(ForwardingRuleRulesEngine, self).__init__(rules_file_path=rules_file_path, snapshot_timestamp=snapshot_timestamp)
self.rule_book = None
def build_rule_book(self, global_configs=None):
self.rule_book = ForwardingRuleRulesBook(self._load_rule_definitions())
def find_violations(self, forwarding_rule, force_rebuild=False):
if ((self.rule_book is None) or force_rebuild):
self.build_rule_book()
resource_rules = self.rule_book.get_resource_rules()
if (not resource_rules):
return None
for rule in resource_rules:
if rule.find_match(forwarding_rule):
return None
return self.RuleViolation(violation_type='FORWARDING_RULE_VIOLATION', load_balancing_scheme=forwarding_rule.load_balancing_scheme, target=forwarding_rule.target, port_range=forwarding_rule.port_range, port=forwarding_rule.ports, ip_protocol=forwarding_rule.ip_protocol, ip_address=forwarding_rule.ip_address, resource_id=forwarding_rule.resource_id, full_name=forwarding_rule.full_name, rule_index=len(resource_rules), resource_name=forwarding_rule.name, resource_type=ResourceType.FORWARDING_RULE, resource_data=str(forwarding_rule))
def add_rules(self, rules):
if (self.rule_book is not None):
self.rule_book.add_rules(rules) |
class OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
.skipif((not can_import('magic')), reason='Libmagic is not installed')
def test_flyte_file_type_annotated_hashmethod(local_dummy_file):
def calc_hash(ff: FlyteFile) -> str:
return str(ff.path)
HashedFlyteFile = Annotated[(FlyteFile['jpeg'], HashMethod(calc_hash))]
def t1(path: str) -> HashedFlyteFile:
return HashedFlyteFile(path)
def t2(ff: HashedFlyteFile) -> None:
print(ff.path)
def wf(path: str) -> None:
ff = t1(path=path)
t2(ff=ff)
with pytest.raises(TypeError) as excinfo:
wf(path=local_dummy_file)
assert ('Incorrect file type, expected image/jpeg, got text/plain' in str(excinfo.value)) |
(hookwrapper=True)
def pytest_runtest_makereport(item: Item, call: CallInfo):
outcome = (yield)
result = outcome.get_result()
if ((result.when == 'call') and result.failed):
try:
print_windows_coordinates()
except Exception as e:
print('Unable to print windows coordinates', e)
pass |
def as_listener(func=None, signal_name=None):
if ((not func) and signal_name):
def wrapper(func):
func._listener_signal_name = signal_name
return func
return wrapper
signal_name = func.__name__
func._listener_signal_name = signal_name
return func |
class RadioButtonField(ToggleField):
def _create_control(self, parent):
control = wx.RadioButton(parent)
return control
def _set_control_value(self, value):
super()._set_control_value(value)
event = wx.CommandEvent(wx.EVT_RADIOBUTTON.typeId, self.control.GetId())
event.SetInt(value)
wx.PostEvent(self.control.GetEventHandler(), event)
def _observe_control_value(self, remove=False):
if remove:
self.control.Unbind(wx.EVT_RADIOBUTTON, handler=self._update_value)
else:
self.control.Bind(wx.EVT_RADIOBUTTON, self._update_value) |
def test_publication_date_sort(client, publish_dates_data):
resp = client.get((url + '?fiscal_year=2019&sort=publication_date'))
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY)
response = resp.json()
assert (response == {'detail': "publication_date sort param must be in the format 'publication_date,<fiscal_period>' where <fiscal_period> is in the range 2-12"})
dabs5 = baker.make('submissions.DABSSubmissionWindowSchedule', pk=5, submission_reveal_date='2020-01-05 00:00:00.000000+00', submission_fiscal_year=2020, submission_fiscal_quarter=1, submission_fiscal_month=3, is_quarter=True)
dabs6 = baker.make('submissions.DABSSubmissionWindowSchedule', pk=6, submission_reveal_date='2020-01-06 00:00:00.000000+00', submission_fiscal_year=2020, submission_fiscal_quarter=1, submission_fiscal_month=3, is_quarter=False)
baker.make('submissions.SubmissionAttributes', submission_id=5, toptier_code='001', reporting_fiscal_year=2019, reporting_fiscal_period=3, reporting_fiscal_quarter=1, quarter_format_flag=True, published_date='2020-01-28 07:46:21.419796+00', certified_date='2020-01-02 07:46:21.419796+00', submission_window=dabs5)
baker.make('submissions.SubmissionAttributes', submission_id=6, toptier_code='002', reporting_fiscal_year=2019, reporting_fiscal_period=3, reporting_fiscal_quarter=1, quarter_format_flag=False, published_date='2020-01-01 07:46:21.419796+00', certified_date='2020-01-28 07:46:21.419796+00', submission_window=dabs6)
baker.make('reporting.ReportingAgencyOverview', toptier_code='001', fiscal_year=2019, fiscal_period=3, total_budgetary_resources=10.0)
baker.make('reporting.ReportingAgencyOverview', toptier_code='002', fiscal_year=2019, fiscal_period=3, total_budgetary_resources=10.0)
resp = client.get((url + '?fiscal_year=2019&sort=publication_date,3&order=desc'))
assert (resp.status_code == status.HTTP_200_OK)
response = resp.json()
assert (len(response['results']) == 2)
expected_results = [{'agency_name': 'Test Agency', 'abbreviation': 'TA', 'toptier_code': '001', 'current_total_budget_authority_amount': 200.0, 'periods': [{'period': 3, 'quarter': 1, 'submission_dates': {'publication_date': '2020-01-28 07:46:21.419796+00', 'certification_date': '2020-01-02 07:46:21.419796+00'}, 'quarterly': True}, {'period': 6, 'quarter': 2, 'submission_dates': {'publication_date': '', 'certification_date': ''}, 'quarterly': False}, {'period': 9, 'quarter': 3, 'submission_dates': {'publication_date': '', 'certification_date': ''}, 'quarterly': False}, {'period': 12, 'quarter': 4, 'submission_dates': {'publication_date': '2020-10-02 07:46:21.419796+00', 'certification_date': '2020-10-02 07:46:21.419796+00'}, 'quarterly': True}]}, {'agency_name': 'Test Agency 2', 'abbreviation': 'TA2', 'toptier_code': '002', 'current_total_budget_authority_amount': 300.0, 'periods': [{'period': 3, 'quarter': 1, 'submission_dates': {'publication_date': '2020-01-01 07:46:21.419796+00', 'certification_date': '2020-01-28 07:46:21.419796+00'}, 'quarterly': False}, {'period': 6, 'quarter': 2, 'submission_dates': {'publication_date': '', 'certification_date': ''}, 'quarterly': False}, {'period': 9, 'quarter': 3, 'submission_dates': {'publication_date': '', 'certification_date': ''}, 'quarterly': False}, {'period': 12, 'quarter': 4, 'submission_dates': {'certification_date': '2020-10-02 07:46:21.419796+00', 'publication_date': '2020-10-02 07:46:21.419796+00'}, 'quarterly': True}]}]
assert (response['results'] == expected_results) |
.skipif((backend_default == 'numba'), reason='Not supported by Numba')
def test_jit_dict():
from _transonic_testing.for_test_justintime import func_dict
d = dict(a=1, b=2)
func_dict(d)
if (not can_import_accelerator()):
return
mod = modules[module_name]
cjit = mod.jit_functions['func_dict']
d = dict(a=1, b=2)
func_dict(d)
wait_for_all_extensions()
for _ in range(100):
d = dict(a=1, b=2)
func_dict(d)
sleep(0.1)
if (not cjit.compiling):
sleep(0.1)
func_dict(d)
break |
def round_to_n(x, n):
if (isinstance(x, str) or (not math.isfinite(x)) or (not x)):
return x
if (n == 0):
return str(round(x, None))
digits = ((- int(math.floor(math.log10(abs(x))))) + (n - 1))
try:
return str(round(x, (digits or None)))
except ValueError as e:
print('error', x, n, e)
raise e |
def _load_lexers(module_name):
if module_name.startswith('pygments'):
module_name = module_name.replace('pygments', 'mdpopups.pygments', 1)
mod = __import__(module_name, None, None, ['__all__'])
for lexer_name in mod.__all__:
cls = getattr(mod, lexer_name)
_lexer_cache[cls.name] = cls |
class ToDotConverter():
ATTRIBUTES = {'color', 'fillcolor', 'label', 'shape', 'style'}
def __init__(self, graph: DiGraph):
self._graph = graph
def write(cls, graph: DiGraph) -> str:
converter = cls(graph)
return converter._create_dot()
def _create_dot(self) -> str:
content = (HEADER + '\n')
for (node, data) in self._graph.nodes(data=True):
content += f'''{node} [{self._get_attributes(data)}];
'''
for (source, sink, data) in self._graph.edges(data=True):
content += f'''{source} -> {sink} [{self._get_attributes(data)}];
'''
content += FOOTER
return content
def _get_attributes(self, data):
return ', '.join((((key + '=') + self._process(value)) for (key, value) in data.items() if (key in self.ATTRIBUTES)))
def _process(self, value: str):
value = value.replace('"', '\\"')
value = value.replace('\n', '\\n')
return f'"{value}"' |
class OptionSeriesCylinderSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesCylinderSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesCylinderSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesCylinderSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesCylinderSonificationContexttracksMappingLowpassResonance) |
class OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_after_configure.connect
def setup_scheduled_task(sender, **kwargs):
from celery.schedules import crontab
sender.add_periodic_task(crontab(hour=5, minute=30), send_after_event_mail)
sender.add_periodic_task(crontab(hour=5, minute=30), ticket_sales_end_mail)
sender.add_periodic_task(crontab(minute=0, hour=0, day_of_month=1), send_monthly_event_invoice)
sender.add_periodic_task(crontab(minute=0, hour=0, day_of_month=14), send_event_fee_notification_followup.s(follow_up=True))
sender.add_periodic_task(crontab(minute=0, hour=0, day_of_month=27), send_event_fee_notification_followup.s(follow_up='pre_due'))
sender.add_periodic_task(crontab(minute=0, hour=0, day_of_month=3), send_event_fee_notification_followup.s(follow_up='post_due'))
sender.add_periodic_task(crontab(hour=5, minute=30), change_session_state_on_event_completion)
sender.add_periodic_task(crontab(minute='*/25'), expire_pending_tickets)
sender.add_periodic_task(crontab(minute='*/10'), expire_initializing_tickets)
sender.add_periodic_task(crontab(minute='*/5'), delete_ticket_holders_no_order_id) |
def log_response(self, endpoint_name, params, resp):
if (resp.status_code == 200):
logging.info('fetch_{} response: {}'.format(endpoint_name, resp))
logging.info('params: %s', params)
response_json = json.loads(resp.text)
count = response_json.get('pagination', {}).get('count', None)
logging.info('Response count: %s', count)
else:
logging.error('{} error fetching {}'.format(resp.status_code, endpoint_name)) |
class CustomAnalysis():
name = 'custom'
def __init__(self, filter_name, builtin_type='custom', **kwargs):
self._builtin_type = builtin_type
self._name = filter_name
super().__init__(**kwargs)
def to_dict(self):
return self._name
def get_definition(self):
d = super().to_dict()
d = d.pop(self.name)
d['type'] = self._builtin_type
return d |
class OptionPlotoptionsTreemapDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def get_art(item, server):
art = {'thumb': '', 'fanart': '', 'poster': '', 'banner': '', 'clearlogo': '', 'clearart': '', 'discart': '', 'landscape': '', 'tvshow.fanart': '', 'tvshow.poster': '', 'tvshow.clearart': '', 'tvshow.clearlogo': '', 'tvshow.banner': '', 'tvshow.landscape': ''}
image_tags = item['ImageTags']
if ((image_tags is not None) and (image_tags['Primary'] is not None)):
art['thumb'] = downloadUtils.get_artwork(item, 'Primary', server=server)
item_type = item['Type']
if (item_type == 'Genre'):
art['poster'] = downloadUtils.get_artwork(item, 'Primary', server=server)
elif (item_type == 'Episode'):
art['tvshow.poster'] = downloadUtils.get_artwork(item, 'Primary', parent=True, server=server)
art['tvshow.clearart'] = downloadUtils.get_artwork(item, 'Art', parent=True, server=server)
art['clearart'] = downloadUtils.get_artwork(item, 'Art', parent=True, server=server)
art['tvshow.clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=True, server=server)
art['clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=True, server=server)
art['tvshow.banner'] = downloadUtils.get_artwork(item, 'Banner', parent=True, server=server)
art['banner'] = downloadUtils.get_artwork(item, 'Banner', parent=True, server=server)
art['tvshow.landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=True, server=server)
art['landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=True, server=server)
art['tvshow.fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=True, server=server)
art['fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=True, server=server)
elif (item_type == 'Season'):
art['tvshow.poster'] = downloadUtils.get_artwork(item, 'Primary', parent=True, server=server)
art['season.poster'] = downloadUtils.get_artwork(item, 'Primary', parent=False, server=server)
art['poster'] = downloadUtils.get_artwork(item, 'Primary', parent=False, server=server)
art['tvshow.clearart'] = downloadUtils.get_artwork(item, 'Art', parent=True, server=server)
art['clearart'] = downloadUtils.get_artwork(item, 'Art', parent=True, server=server)
art['tvshow.clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=True, server=server)
art['clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=True, server=server)
art['tvshow.banner'] = downloadUtils.get_artwork(item, 'Banner', parent=True, server=server)
art['season.banner'] = downloadUtils.get_artwork(item, 'Banner', parent=False, server=server)
art['banner'] = downloadUtils.get_artwork(item, 'Banner', parent=False, server=server)
art['tvshow.landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=True, server=server)
art['season.landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=False, server=server)
art['landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=False, server=server)
art['tvshow.fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=True, server=server)
art['fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=True, server=server)
elif (item_type == 'Series'):
art['tvshow.poster'] = downloadUtils.get_artwork(item, 'Primary', parent=False, server=server)
art['poster'] = downloadUtils.get_artwork(item, 'Primary', parent=False, server=server)
art['tvshow.clearart'] = downloadUtils.get_artwork(item, 'Art', parent=False, server=server)
art['clearart'] = downloadUtils.get_artwork(item, 'Art', parent=False, server=server)
art['tvshow.clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=False, server=server)
art['clearlogo'] = downloadUtils.get_artwork(item, 'Logo', parent=False, server=server)
art['tvshow.banner'] = downloadUtils.get_artwork(item, 'Banner', parent=False, server=server)
art['banner'] = downloadUtils.get_artwork(item, 'Banner', parent=False, server=server)
art['tvshow.landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=False, server=server)
art['landscape'] = downloadUtils.get_artwork(item, 'Thumb', parent=False, server=server)
art['tvshow.fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=False, server=server)
art['fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=False, server=server)
elif ((item_type == 'Movie') or (item_type == 'BoxSet')):
art['poster'] = downloadUtils.get_artwork(item, 'Primary', server=server)
art['landscape'] = downloadUtils.get_artwork(item, 'Thumb', server=server)
art['banner'] = downloadUtils.get_artwork(item, 'Banner', server=server)
art['clearlogo'] = downloadUtils.get_artwork(item, 'Logo', server=server)
art['clearart'] = downloadUtils.get_artwork(item, 'Art', server=server)
art['discart'] = downloadUtils.get_artwork(item, 'Disc', server=server)
art['fanart'] = downloadUtils.get_artwork(item, 'Backdrop', server=server)
if (not art['fanart']):
art['fanart'] = downloadUtils.get_artwork(item, 'Backdrop', parent=True, server=server)
return art |
class BackgroundRemovalDataClass(BaseModel):
image_b64: str = Field(..., description='The image in base64 format.')
image_resource_url: str = Field(..., description='The image url.')
def generate_resource_url(img_b64: str, fmt: str='png') -> str:
data = img_b64.encode()
content = BytesIO(base64.b64decode(data))
uuid_name = uuid.uuid4()
filename = f'{uuid_name}.{fmt}'
s3_module = importlib.import_module('edenai_apis.utils.upload_s3')
return s3_module.upload_file_bytes_to_s3(file=content, file_name=filename, process_type=s3_module.USER_PROCESS) |
def register(registry):
handlers = [(MouseClick, (lambda wrapper, _: _interaction_helpers.mouse_click_qwidget(wrapper._target.control, wrapper.delay))), (DisplayedText, (lambda wrapper, _: wrapper._target.control.text()))]
for target_class in [SimpleEditor, CustomEditor]:
for (interaction_class, handler) in handlers:
registry.register_interaction(target_class=target_class, interaction_class=interaction_class, handler=handler) |
def get_instances(schema=None):
schemas = ([schema] if schema else get_schemas())
for schema in schemas:
data = get_data(schema)
instances = data.get('instances')
for (cls, instance) in _datafile_traversor(data['class'], instances):
(yield (cls, instance)) |
def test_guess_of_dataclass():
class Foo(DataClassJsonMixin):
x: int
y: str
z: typing.Dict[(str, int)]
def hello(self):
...
lt = TypeEngine.to_literal_type(Foo)
foo = Foo(1, 'hello', {'world': 3})
lv = TypeEngine.to_literal(FlyteContext.current_context(), foo, Foo, lt)
lit_dict = {'a': lv}
lr = LiteralsResolver(lit_dict)
assert (lr.get('a', Foo) == foo)
assert (hasattr(lr.get('a', Foo), 'hello') is True) |
class EntityData():
id: str
unique_id: str
name: str
state: bool
attributes: dict
icon: str
device_name: str
status: str
topic: str
event: str
binary_sensor_device_class: Optional[BinarySensorDeviceClass]
type: str
details: dict
disabled: bool
def __init__(self):
self.id = ''
self.unique_id = ''
self.name = ''
self.state = False
self.attributes = {}
self.icon = ''
self.device_name = ''
self.status = ENTITY_STATUS_CREATED
self.topic = ''
self.event = ''
self.binary_sensor_device_class = None
self.type = ''
self.details = {}
self.disabled = False
def __repr__(self):
obj = {ENTITY_ID: self.id, ENTITY_UNIQUE_ID: self.unique_id, ENTITY_NAME: self.name, ENTITY_STATE: self.state, ENTITY_ATTRIBUTES: self.attributes, ENTITY_ICON: self.icon, ENTITY_DEVICE_NAME: self.device_name, ENTITY_STATUS: self.status, ENTITY_TOPIC: self.topic, ENTITY_EVENT: self.event, ENTITY_BINARY_SENSOR_DEVICE_CLASS: self.binary_sensor_device_class, ENTITY_BINARY_SENSOR_TYPE: self.type, ENTITY_CAMERA_DETAILS: self.details, ENTITY_DISABLED: self.disabled}
to_string = f'{obj}'
return to_string |
class MyPlot(HasTraits):
plot = Instance(Plot)
status_overlay = Instance(StatusLayer)
error_button = Button('Error')
warn_button = Button('Warning')
no_problem_button = Button('No problem')
def _plot_default(self):
index = numpy.array([1, 2, 3, 4, 5])
data_series = (index ** 2)
plot_data = ArrayPlotData(index=index)
plot_data.set_data('data_series', data_series)
plot = Plot(plot_data)
plot.plot(('index', 'data_series'))
return plot
def _error_button_fired(self, event):
self.clear_status()
self.status_overlay = ErrorLayer(component=self.plot, align='ul', scale_factor=0.25)
self.plot.overlays.append(self.status_overlay)
self.plot.request_redraw()
def _warn_button_fired(self, event):
self.clear_status()
self.status_overlay = WarningLayer(component=self.plot, align='ur', scale_factor=0.25)
self.plot.overlays.append(self.status_overlay)
self.plot.request_redraw()
def _no_problem_button_fired(self, event):
self.clear_status()
self.plot.request_redraw()
def clear_status(self):
if (self.status_overlay in self.plot.overlays):
self.status_overlay.fade_out()
traits_view = View(HGroup(UItem('error_button'), UItem('warn_button'), UItem('no_problem_button')), UItem('plot', editor=ComponentEditor()), width=700, height=600, resizable=True) |
(name='arrays', params=['single-element', 'multi-element'])
def angles_vectors_as_arrays(request):
if (request.param == 'single-element'):
(intensity, inclination, declination) = tuple((np.atleast_1d(i) for i in ANGLES[0]))
(magnetic_e, magnetic_n, magnetic_u) = tuple((np.atleast_1d(i) for i in VECTORS[0]))
else:
(intensity, inclination, declination) = np.vstack(ANGLES).T
(magnetic_e, magnetic_n, magnetic_u) = np.vstack(VECTORS).T
return ((intensity, inclination, declination), (magnetic_e, magnetic_n, magnetic_u)) |
class Parameters(torch.nn.Module, metaclass=LazyMeta):
def __init__(self, param_shapes: Sequence[torch.Size], input_shape: torch.Size, context_shape: Optional[torch.Size]) -> None:
super().__init__()
self.input_shape = input_shape
self.param_shapes = param_shapes
self.context_shape = context_shape
def forward(self, x: Optional[torch.Tensor]=None, context: Optional[torch.Tensor]=None) -> Optional[Sequence[torch.Tensor]]:
return self._forward(x, context)
def _forward(self, x: Optional[torch.Tensor]=None, context: Optional[torch.Tensor]=None) -> Optional[Sequence[torch.Tensor]]:
raise NotImplementedError() |
class OptionSeriesWindbarbDragdrop(Options):
def draggableX(self):
return self._config_get(None)
def draggableX(self, flag: bool):
self._config(flag, js_type=False)
def draggableY(self):
return self._config_get(None)
def draggableY(self, flag: bool):
self._config(flag, js_type=False)
def dragHandle(self) -> 'OptionSeriesWindbarbDragdropDraghandle':
return self._config_sub_data('dragHandle', OptionSeriesWindbarbDragdropDraghandle)
def dragMaxX(self):
return self._config_get(None)
def dragMaxX(self, num: float):
self._config(num, js_type=False)
def dragMaxY(self):
return self._config_get(None)
def dragMaxY(self, num: float):
self._config(num, js_type=False)
def dragMinX(self):
return self._config_get(None)
def dragMinX(self, num: float):
self._config(num, js_type=False)
def dragMinY(self):
return self._config_get(None)
def dragMinY(self, num: float):
self._config(num, js_type=False)
def dragPrecisionX(self):
return self._config_get(0)
def dragPrecisionX(self, num: float):
self._config(num, js_type=False)
def dragPrecisionY(self):
return self._config_get(0)
def dragPrecisionY(self, num: float):
self._config(num, js_type=False)
def dragSensitivity(self):
return self._config_get(2)
def dragSensitivity(self, num: float):
self._config(num, js_type=False)
def groupBy(self):
return self._config_get(None)
def groupBy(self, text: str):
self._config(text, js_type=False)
def guideBox(self) -> 'OptionSeriesWindbarbDragdropGuidebox':
return self._config_sub_data('guideBox', OptionSeriesWindbarbDragdropGuidebox)
def liveRedraw(self):
return self._config_get(True)
def liveRedraw(self, flag: bool):
self._config(flag, js_type=False) |
def test_dolt_table_to_python_value(mocker):
mocker.patch('dolt_integrations.core.save', return_value=True)
table = DoltTable(data=pandas.DataFrame(), config=DoltConfig(db_path='p'))
lv = DoltTableNameTransformer.to_literal(self=None, ctx=None, python_val=table, python_type=DoltTable, expected=None)
assert (lv.scalar.generic['config']['branch_conf'] is None) |
class OptionSeriesPyramidSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def _estr(e, prec=0, tab=''):
if isinstance(e, A.Var):
return str(e.name)
elif isinstance(e, A.Unk):
return ''
elif isinstance(e, A.Not):
return f"{_estr(e.arg, op_prec['unary'], tab=tab)}"
elif isinstance(e, A.USub):
return f"-{_estr(e.arg, op_prec['unary'], tab=tab)}"
elif isinstance(e, A.Const):
return str(e.val)
elif isinstance(e, A.ConstSym):
return f'CONST({e.name})'
elif isinstance(e, A.BinOp):
local_prec = op_prec[e.op]
lhs = _estr(e.lhs, prec=local_prec, tab=tab)
rhs = _estr(e.rhs, prec=(local_prec + 1), tab=tab)
if (local_prec < prec):
return f'({lhs} {binop_print[e.op]} {rhs})'
else:
return f'{lhs} {binop_print[e.op]} {rhs}'
elif isinstance(e, A.Stride):
return f'stride({e.name},{e.dim})'
elif isinstance(e, A.LetStrides):
strides = ','.join([_estr(s, tab=(tab + ' ')) for s in e.strides])
bind = f'{e.name} = ({strides})'
body = _estr(e.body, tab=(tab + ' '))
s = f'''letStride {bind}
{tab}in {body}'''
return (f'''({s}
{tab})''' if (prec > 0) else s)
elif isinstance(e, A.Select):
local_prec = op_prec['ternary']
cond = _estr(e.cond, tab=tab)
tcase = _estr(e.tcase, prec=(local_prec + 1), tab=tab)
fcase = _estr(e.fcase, prec=(local_prec + 1), tab=tab)
if (local_prec < prec):
return f'(({cond})? {tcase} : {fcase})'
else:
return f'({cond})? {tcase} : {fcase}'
elif isinstance(e, (A.ForAll, A.Exists)):
op = ('' if isinstance(e, A.ForAll) else '')
local_prec = op_prec[('forall' if isinstance(e, A.ForAll) else 'exists')]
s = f"{op}{e.name},{_estr(e.arg, op_prec['forall'], tab=tab)}"
if (local_prec < prec):
s = f'({s})'
return s
elif isinstance(e, (A.Definitely, A.Maybe)):
op = ('D' if isinstance(e, A.Definitely) else 'M')
return f"{op}{_estr(e.arg, op_prec['unary'], tab=tab)}"
elif isinstance(e, A.Let):
if isinstance(e.body, A.Let):
return _estr(A.Let((e.names + e.body.names), (e.rhs + e.body.rhs), e.body.body, e.type, e.srcinfo), prec=prec, tab=tab)
binds = '\n'.join([f"{tab}{x} = {_estr(rhs, tab=(tab + ' '))}" for (x, rhs) in zip(e.names, e.rhs)])
body = _estr(e.body, tab=(tab + ' '))
s = f'''let
{binds}
{tab}in {body}'''
return (f'''({s}
{tab})''' if (prec > 0) else s)
elif isinstance(e, A.Tuple):
args = ', '.join([_estr(a, tab=tab) for a in e.args])
return f'({args})'
elif isinstance(e, A.LetTuple):
names = ','.join([str(n) for n in e.names])
bind = f"{names} = {_estr(e.rhs, tab=(tab + ' '))}"
body = _estr(e.body, tab=(tab + ' '))
s = f'''let_tuple {bind}
{tab}in {body}'''
return (f'''({s}
{tab})''' if (prec > 0) else s)
else:
assert False, 'bad case' |
def main():
build_dir = 'gateware'
platform = Platform()
if ('load' in sys.argv[1:]):
prog = platform.create_programmer()
prog.load_bitstream(os.path.join(build_dir, 'impl', 'pnr', 'project.fs'))
exit()
if ('sim' in sys.argv[1:]):
ring = RingSerialCtrl(12, .0)
run_simulation(ring, test(ring), clocks={'sys': (.0 / .0)}, vcd_name='sim.vcd')
exit()
design = Tuto(platform)
platform.build(design, build_dir=build_dir) |
class Gc(TelemetryDevice):
internal = False
command = 'gc'
human_name = 'GC log'
help = 'Enables GC logs.'
def __init__(self, telemetry_params, log_root, java_major_version):
super().__init__()
self.telemetry_params = telemetry_params
self.log_root = log_root
self.java_major_version = java_major_version
def instrument_java_opts(self):
io.ensure_dir(self.log_root)
log_file = os.path.join(self.log_root, 'gc.log')
console.info(('%s: Writing GC log to [%s]' % (self.human_name, log_file)), logger=self.logger)
return self.java_opts(log_file)
def java_opts(self, log_file):
if (self.java_major_version < 9):
return [f'-Xloggc:{log_file}', '-XX:+PrintGCDetails', '-XX:+PrintGCDateStamps', '-XX:+PrintGCTimeStamps', '-XX:+PrintGCApplicationStoppedTime', '-XX:+PrintGCApplicationConcurrentTime', '-XX:+PrintTenuringDistribution']
else:
log_config = self.telemetry_params.get('gc-log-config', 'gc*=info,safepoint=info,age*=trace')
return [f'-Xlog:{log_config}:file={log_file}:utctime,uptimemillis,level,tags:filecount=0'] |
class DBConnectionPool(DBTester):
__test__ = False
def setUp(self):
super().setUp()
self.pool = self.create_pool()
self.connection = self.pool.get()
def tearDown(self):
if self.connection:
self.pool.put(self.connection)
self.pool.clear()
super().tearDown()
def assert_cursor_works(self, cursor):
cursor.execute('select 1')
rows = cursor.fetchall()
assert rows
def test_connecting(self):
assert (self.connection is not None)
def test_create_cursor(self):
cursor = self.connection.cursor()
cursor.close()
def test_run_query(self):
cursor = self.connection.cursor()
self.assert_cursor_works(cursor)
cursor.close()
def test_run_bad_query(self):
cursor = self.connection.cursor()
try:
cursor.execute('garbage blah blah')
assert False
except AssertionError:
raise
except Exception:
pass
cursor.close()
def test_put_none(self):
assert (self.pool.free() == 0)
self.pool.put(None)
assert (self.pool.free() == 1)
conn2 = self.pool.get()
assert (conn2 is not None)
assert conn2.cursor
self.pool.put(conn2)
def test_close_does_a_put(self):
assert (self.pool.free() == 0)
self.connection.close()
assert (self.pool.free() == 1)
self.assertRaises(AttributeError, self.connection.cursor)
def test_put_doesnt_double_wrap(self):
self.pool.put(self.connection)
conn = self.pool.get()
assert (not isinstance(conn._base, db_pool.PooledConnectionWrapper))
self.pool.put(conn)
def test_bool(self):
assert self.connection
self.connection.close()
assert (not self.connection)
def fill_up_table(self, conn):
curs = conn.cursor()
for i in range(1000):
curs.execute(('insert into test_table (value_int) values (%s)' % i))
conn.commit()
def test_returns_immediately(self):
self.pool = self.create_pool()
conn = self.pool.get()
self.set_up_dummy_table(conn)
self.fill_up_table(conn)
curs = conn.cursor()
results = []
SHORT_QUERY = 'select * from test_table'
evt = eventlet.Event()
def a_query():
self.assert_cursor_works(curs)
curs.execute(SHORT_QUERY)
results.append(2)
evt.send()
eventlet.spawn(a_query)
results.append(1)
self.assertEqual([1], results)
evt.wait()
self.assertEqual([1, 2], results)
self.pool.put(conn)
def test_connection_is_clean_after_put(self):
self.pool = self.create_pool()
conn = self.pool.get()
self.set_up_dummy_table(conn)
curs = conn.cursor()
for i in range(10):
curs.execute(('insert into test_table (value_int) values (%s)' % i))
self.pool.put(conn)
del conn
conn2 = self.pool.get()
curs2 = conn2.cursor()
for i in range(10):
curs2.execute(('insert into test_table (value_int) values (%s)' % i))
conn2.commit()
curs2.execute('select * from test_table')
self.assertEqual(10, curs2.rowcount)
self.pool.put(conn2)
def test_visibility_from_other_connections(self):
self.pool = self.create_pool(max_size=3)
conn = self.pool.get()
conn2 = self.pool.get()
curs = conn.cursor()
try:
curs2 = conn2.cursor()
curs2.execute(('insert into gargleblatz (a) values (%s)' % 314159))
self.assertEqual(curs2.rowcount, 1)
conn2.commit()
selection_query = 'select * from gargleblatz'
curs2.execute(selection_query)
self.assertEqual(curs2.rowcount, 1)
del curs2
self.pool.put(conn2)
conn3 = self.pool.get()
curs3 = conn3.cursor()
curs3.execute(selection_query)
self.assertEqual(curs3.rowcount, 1)
curs.execute(selection_query)
self.assertEqual(curs.rowcount, 1)
self.pool.put(conn3)
finally:
curs.execute('delete from gargleblatz where a=314159')
conn.commit()
self.pool.put(conn)
def test_clear(self):
self.pool = self.create_pool()
self.pool.put(self.connection)
self.pool.clear()
self.assertEqual(len(self.pool.free_items), 0)
def test_clear_warmup(self):
self.pool = self.create_pool(min_size=1)
self.pool.clear()
self.assertEqual(len(self.pool.free_items), 0)
def test_unwrap_connection(self):
self.assertTrue(isinstance(self.connection, db_pool.GenericConnectionWrapper))
conn = self.pool._unwrap_connection(self.connection)
assert (not isinstance(conn, db_pool.GenericConnectionWrapper))
self.assertEqual(None, self.pool._unwrap_connection(None))
self.assertEqual(None, self.pool._unwrap_connection(1))
x = Mock()
x._base = 'hi'
self.assertEqual('hi', self.pool._unwrap_connection(x))
conn.close()
def test_safe_close(self):
self.pool._safe_close(self.connection, quiet=True)
self.assertEqual(len(self.pool.free_items), 1)
self.pool._safe_close(None)
self.pool._safe_close(1)
x = Mock()
def fail():
raise KeyboardInterrupt()
x.close = fail
self.assertRaises(KeyboardInterrupt, self.pool._safe_close, x)
x = Mock()
def fail2():
raise RuntimeError('if this line has been printed, the test succeeded')
x.close = fail2
self.pool._safe_close(x, quiet=False)
def test_zero_max_idle(self):
self.pool.put(self.connection)
self.pool.clear()
self.pool = self.create_pool(max_size=2, max_idle=0)
self.connection = self.pool.get()
self.connection.close()
self.assertEqual(len(self.pool.free_items), 0)
def test_zero_max_age(self):
self.pool.put(self.connection)
self.pool.clear()
self.pool = self.create_pool(max_size=2, max_age=0)
self.connection = self.pool.get()
self.connection.close()
self.assertEqual(len(self.pool.free_items), 0)
def test_waiters_get_woken(self):
self.pool.put(self.connection)
self.pool.clear()
self.pool = self.create_pool(max_size=1, max_age=0)
self.connection = self.pool.get()
self.assertEqual(self.pool.free(), 0)
self.assertEqual(self.pool.waiting(), 0)
e = eventlet.Event()
def retrieve(pool, ev):
c = pool.get()
ev.send(c)
eventlet.spawn(retrieve, self.pool, e)
eventlet.sleep(0)
eventlet.sleep(0)
self.assertEqual(self.pool.free(), 0)
self.assertEqual(self.pool.waiting(), 1)
self.pool.put(self.connection)
timer = eventlet.Timeout(1)
conn = e.wait()
timer.cancel()
self.assertEqual(self.pool.free(), 0)
self.assertEqual(self.pool.waiting(), 0)
self.pool.put(conn)
def test_raising_create(self):
self.pool = self.create_pool(max_size=1, module=RaisingDBModule())
self.assertRaises(RuntimeError, self.pool.get)
self.assertEqual(self.pool.free(), 1) |
class Migration(migrations.Migration):
dependencies = [('awards', '0076_auto__0312')]
operations = [migrations.RemoveIndex(model_name='financialaccountsbyawards', name='faba_subid_awardkey_sums_idx'), migrations.AddIndex(model_name='financialaccountsbyawards', index=models.Index(condition=models.Q(disaster_emergency_fund__in=['L', 'M', 'N', 'O', 'P']), fields=['submission', 'distinct_award_key', 'piid', 'transaction_obligated_amount', 'gross_outlay_amount_by_award_cpe'], name='faba_subid_awardkey_sums_idx'))] |
class Main(base.Module):
parameters = {'iface': 'wlan0'}
completions = list(parameters.keys())
def do_execute(self, line):
try:
self.cp.green(f"{'SSID':<30} {'BSSID':^18} {'CHANNEL':^9} {'SIGNAL':^9} {'BARS':^8} {'SECURITY':^18}")
result = self.scan()
if result:
for wifi in result:
self.cp.yellow(f"{wifi['SSID']:<30} {wifi['BSSID']:^18} {wifi['CHANNEL']:^9} {wifi['SIGNAL']:^9} {wifi['BARS']:^8} {wifi['SECURITY']:^18}")
except Exception:
self.cp.error(text=f"Error: Wireless interface {self.parameters['iface']} is busy or monitor mode enabled!")
def scan(self):
result = []
output = subprocess.run(['nmcli', '-t', '-e', 'yes', '-f', 'ssid,bssid,chan,signal,bars,security', 'dev', 'wifi'], stdout=subprocess.PIPE).stdout.decode('utf-8')
if output:
list_output = output.split('\n')
for info in list_output:
info = info.replace('\\:', '-')
try:
info = info.split(':')
result.append({'SSID': info[0], 'BSSID': info[1], 'CHANNEL': info[2], 'SIGNAL': info[3], 'BARS': info[4], 'SECURITY': info[5]})
except IndexError:
pass
else:
return None
return result
def complete_set(self, text, line, begidx, endidx):
mline = line.partition(' ')[2]
offs = (len(mline) - len(text))
return [s[offs:] for s in self.completions if s.startswith(mline)] |
def run_dev_streamlit_io():
streamlit_run_cmd = ['streamlit', 'run', 'app.py']
try:
console.print(f" [bold cyan]Running Streamlit app with command: {' '.join(streamlit_run_cmd)}[/bold cyan]")
subprocess.run(streamlit_run_cmd, check=True)
except subprocess.CalledProcessError as e:
console.print(f' [bold red]An error occurred: {e}[/bold red]')
except KeyboardInterrupt:
console.print('\n [bold yellow]Streamlit server stopped[/bold yellow]') |
class TwoPhase_PCDInv_shell(InvOperatorShell):
def __init__(self, Qp_visc, Qp_dens, Ap_rho, Np_rho, alpha=False, delta_t=0, num_chebyshev_its=0, strong_dirichlet_DOF=[], laplace_null_space=False, par_info=None):
from . import LinearSolvers as LS
self.Qp_visc = Qp_visc
self.Qp_dens = Qp_dens
self.Ap_rho = Ap_rho
self.Np_rho = Np_rho
self.alpha = alpha
self.delta_t = delta_t
self.num_chebyshev_its = num_chebyshev_its
self.strong_dirichlet_DOF = strong_dirichlet_DOF
self.laplace_null_space = laplace_null_space
self.par_info = par_info
self.options = p4pyPETSc.Options()
self._create_constant_nullspace()
self._set_dirichlet_idx_set()
self.kspAp_rho = self.create_petsc_ksp_obj('innerTPPCDsolver_Ap_rho_', self.Ap_rho, self.laplace_null_space)
self.kspAp_rho.getOperators()[0].zeroRows(self.known_dof_is)
if self.num_chebyshev_its:
self.Qp_visc = LS.ChebyshevSemiIteration(self.Qp_visc, 0.5, 2.0)
self.Qp_dens = LS.ChebyshevSemiIteration(self.Qp_dens, 0.5, 2.0)
else:
pass
def getSize(self):
return self.Ap_rho.getSizes()[0][0]
def apply(self, A, x, y):
comm = Comm.get()
x_tmp = self._create_copy_vec(x)
tmp1 = self._create_copy_vec(x_tmp)
tmp2 = self._create_copy_vec(x_tmp)
if self.num_chebyshev_its:
self.Qp_visc.apply(x_tmp, y, self.num_chebyshev_its)
self.Qp_dens.apply(x_tmp, tmp1, self.num_chebyshev_its)
else:
y.pointwiseDivide(x_tmp, self.Qp_visc.getDiagonal())
tmp1.pointwiseDivide(x_tmp, self.Qp_dens.getDiagonal())
self.Np_rho.mult(tmp1, tmp2)
if (self.alpha is True):
tmp2.axpy(old_div(1.0, self.delta_t), x_tmp)
if self.options.hasName('innerTPPCDsolver_Ap_rho_ksp_constant_null_space'):
self.const_null_space.remove(tmp2)
zero_array = numpy.zeros(len(self.known_dof_is.getIndices()))
tmp2.setValues(self.known_dof_is.getIndices(), zero_array)
tmp2.assemblyBegin()
tmp2.assemblyEnd()
self.kspAp_rho.solve(tmp2, tmp1)
y.axpy(1.0, tmp1)
y.setValues(self.known_dof_is.getIndices(), zero_array)
y.assemblyBegin()
y.assemblyEnd()
assert (numpy.isnan(y.norm()) == False), 'Applying the schur complement resulted in not-a-number.' |
class HomeAssistantManager():
def __init__(self, hass: HomeAssistant, scan_interval: datetime.timedelta, heartbeat_interval: (datetime.timedelta | None)=None):
self._hass = hass
self._is_initialized = False
self._update_entities_interval = scan_interval
self._update_data_providers_interval = scan_interval
self._heartbeat_interval = heartbeat_interval
self._entity_registry = None
self._entry: (ConfigEntry | None) = None
self._storage_manager = StorageManager(self._hass)
self._entity_manager = EntityManager(self._hass, self)
self._device_manager = DeviceManager(self._hass, self)
self._entity_registry = async_get(self._hass)
self._async_track_time_handlers = []
self._last_heartbeat = None
self._update_lock = False
self._actions: dict = {}
def _send_heartbeat(internal_now):
self._last_heartbeat = internal_now
self._hass.async_create_task(self.async_send_heartbeat())
self._send_heartbeat = _send_heartbeat
self._domains = {domain: self.is_domain_supported(domain) for domain in SUPPORTED_PLATFORMS}
def entity_manager(self) -> EntityManager:
if (self._entity_manager is None):
self._entity_manager = EntityManager(self._hass, self)
return self._entity_manager
def device_manager(self) -> DeviceManager:
return self._device_manager
def entity_registry(self) -> EntityRegistry:
return self._entity_registry
def storage_manager(self) -> StorageManager:
return self._storage_manager
def entry_id(self) -> str:
return self._entry.entry_id
def entry_title(self) -> str:
return self._entry.title
def update_intervals(self, entities_interval: datetime.timedelta, data_interval: datetime.timedelta):
self._update_entities_interval = entities_interval
self._update_data_providers_interval = data_interval
async def async_component_initialize(self, entry: ConfigEntry):
async def async_send_heartbeat(self):
def register_services(self, entry: (ConfigEntry | None)=None):
async def async_initialize_data_providers(self):
async def async_stop_data_providers(self):
async def async_update_data_providers(self):
def load_entities(self):
def load_devices(self):
async def async_init(self, entry: ConfigEntry):
try:
self._entry = entry
(await self.async_component_initialize(entry))
self._hass.loop.create_task(self._async_load_platforms())
except InvalidToken:
error_message = 'Encryption key got corrupted, please remove the integration and re-add it'
_LOGGER.error(error_message)
data = (await self._storage_manager.async_load_from_store())
data.key = None
(await self._storage_manager.async_save_to_store(data))
except Exception as ex:
(exc_type, exc_obj, tb) = sys.exc_info()
line_number = tb.tb_lineno
_LOGGER.error(f'Failed to async_init, error: {ex}, line: {line_number}')
async def _async_load_platforms(self):
load = self._hass.config_entries.async_forward_entry_setup
for domain in self._domains:
if self._domains.get(domain, False):
(await load(self._entry, domain))
else:
_LOGGER.debug(f'Skip loading {domain}')
self.register_services()
self._is_initialized = True
(await self.async_update_entry())
def _update_data_providers(self, now):
self._hass.async_create_task(self.async_update_data_providers())
async def async_update_entry(self, entry: (ConfigEntry | None)=None):
entry_changed = (entry is not None)
if entry_changed:
self._entry = entry
_LOGGER.info(f'Handling ConfigEntry load: {entry.as_dict()}')
else:
entry = self._entry
track_time_update_data_providers = async_track_time_interval(self._hass, self._update_data_providers, self._update_data_providers_interval)
self._async_track_time_handlers.append(track_time_update_data_providers)
track_time_update_entities = async_track_time_interval(self._hass, self._update_entities, self._update_entities_interval)
self._async_track_time_handlers.append(track_time_update_entities)
if (self._heartbeat_interval is not None):
track_time_send_heartbeat = async_track_time_interval(self._hass, self._send_heartbeat, self._heartbeat_interval)
self._async_track_time_handlers.append(track_time_send_heartbeat)
_LOGGER.info(f'Handling ConfigEntry change: {entry.as_dict()}')
(await self.async_initialize_data_providers())
async def async_unload(self):
_LOGGER.info('HA was stopped')
for handler in self._async_track_time_handlers:
if (handler is not None):
handler()
self._async_track_time_handlers.clear()
(await self.async_stop_data_providers())
async def async_remove(self, entry: ConfigEntry):
_LOGGER.info(f'Removing current integration - {entry.title}')
(await self.async_unload())
unload = self._hass.config_entries.async_forward_entry_unload
for domain in PLATFORMS:
if self._domains.get(domain, False):
(await unload(self._entry, domain))
else:
_LOGGER.debug(f'Skip unloading {domain}')
(await self._device_manager.async_remove())
self._entry = None
self.entity_manager.entities.clear()
_LOGGER.info(f'Current integration ({entry.title}) removed')
def _update_entities(self, now):
if self._update_lock:
_LOGGER.warning('Update in progress, will skip the request')
return
self._update_lock = True
try:
self.load_devices()
self.load_entities()
except Exception as ex:
(exc_type, exc_obj, tb) = sys.exc_info()
line_number = tb.tb_lineno
_LOGGER.error(f'Failed to update devices and entities, Error: {ex}, Line: {line_number}')
self.entity_manager.update()
self._hass.async_create_task(self.dispatch_all())
self._update_lock = False
async def dispatch_all(self):
if (not self._is_initialized):
_LOGGER.info('NOT INITIALIZED - Failed discovering components')
return
for domain in PLATFORMS:
if self._domains.get(domain, False):
signal = PLATFORMS.get(domain)
async_dispatcher_send(self._hass, signal)
def set_action(self, entity_id: str, action_name: str, action):
key = f'{entity_id}:{action_name}'
self._actions[key] = action
def get_action(self, entity_id: str, action_name: str):
key = f'{entity_id}:{action_name}'
action = self._actions.get(key)
return action
def get_core_entity_fan_speed(self, entity: EntityData) -> (str | None):
pass
async def async_core_entity_return_to_base(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_RETURN_TO_BASE)
if (action is not None):
(await action(entity))
async def async_core_entity_set_fan_speed(self, entity: EntityData, fan_speed: str) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_SET_FAN_SPEED)
if (action is not None):
(await action(entity, fan_speed))
async def async_core_entity_start(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_START)
if (action is not None):
(await action(entity))
async def async_core_entity_stop(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_STOP)
if (action is not None):
(await action(entity))
async def async_core_entity_pause(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_PAUSE)
if (action is not None):
(await action(entity))
async def async_core_entity_turn_on(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_TURN_ON)
if (action is not None):
(await action(entity))
async def async_core_entity_turn_off(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_TURN_OFF)
if (action is not None):
(await action(entity))
async def async_core_entity_send_command(self, entity: EntityData, command: str, params: ((dict[(str, Any)] | list[Any]) | None)=None) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_SEND_COMMAND)
if (action is not None):
(await action(entity, command, params))
async def async_core_entity_locate(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_LOCATE)
if (action is not None):
(await action(entity))
async def async_core_entity_select_option(self, entity: EntityData, option: str) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_SELECT_OPTION)
if (action is not None):
(await action(entity, option))
async def async_core_entity_toggle(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_TOGGLE)
if (action is not None):
(await action(entity))
async def async_core_entity_enable_motion_detection(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_ENABLE_MOTION_DETECTION)
if (action is not None):
(await action(entity))
async def async_core_entity_disable_motion_detection(self, entity: EntityData) -> None:
action = self.get_action(entity.id, ACTION_CORE_ENTITY_DISABLE_MOTION_DETECTION)
if (action is not None):
(await action(entity))
def log_exception(ex, message):
(exc_type, exc_obj, tb) = sys.exc_info()
line_number = tb.tb_lineno
_LOGGER.error(f'{message}, Error: {str(ex)}, Line: {line_number}')
def is_domain_supported(domain) -> bool:
is_supported = True
try:
__import__(f'custom_components.{DOMAIN}.{domain}')
except ModuleNotFoundError:
is_supported = False
return is_supported |
def extractCheeseburgerbrownCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsOrganizationStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsOrganizationStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsOrganizationStatesHoverAnimation)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def brightness(self):
return self._config_get(0.1)
def brightness(self, num: float):
self._config(num, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def linkOpacity(self):
return self._config_get(1)
def linkOpacity(self, num: float):
self._config(num, js_type=False)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False) |
def _flatten_params_rec(obj: Any, paths: List[str]) -> List[Tuple[(List[str], str)]]:
res = []
if (isinstance(obj, ColumnName) and (obj == ColumnName.from_any(obj.name))):
return [(paths, obj.name)]
if isinstance(obj, BaseModel):
for (field_name, field) in obj.__fields__.items():
if (isinstance(obj, EvidentlyBaseModel) and (field_name == 'type')):
continue
field_value = getattr(obj, field_name)
if (field_value == field.default):
continue
if (isinstance(field.type_, type) and issubclass(field.type_, BaseModel)):
res.extend(_flatten_params_rec(field_value, (paths + [field_name])))
else:
res.append(((paths + [field_name]), str(field_value)))
return res |
def _print_cursor_block(cur: Block, target: Cursor, env: PrintEnv, indent: str) -> list[str]:
def if_cursor(c, move, k):
try:
return k(move(c))
except InvalidCursorError:
return []
def more_stmts(_):
return [f'{indent}"..."']
def local_stmt(c):
return _print_cursor_stmt(c, target, env, indent)
if (isinstance(target, Gap) and (target in cur)):
if (target._type == GapType.Before):
return [*if_cursor(target, (lambda g: g.anchor().prev(2)), more_stmts), *if_cursor(target, (lambda g: g.anchor().prev()), local_stmt), f'{indent}[GAP - Before]', *if_cursor(target, (lambda g: g.anchor()), local_stmt), *if_cursor(target, (lambda g: g.anchor().next()), more_stmts)]
else:
assert (target._type == GapType.After)
return [*if_cursor(target, (lambda g: g.anchor().prev()), more_stmts), *if_cursor(target, (lambda g: g.anchor()), local_stmt), f'{indent}[GAP - After]', *if_cursor(target, (lambda g: g.anchor().next()), local_stmt), *if_cursor(target, (lambda g: g.anchor().next(2)), more_stmts)]
elif (isinstance(target, Block) and (target in cur)):
block = [f'{indent}# BLOCK START']
for stmt in target:
block.extend(local_stmt(stmt))
block.append(f'{indent}# BLOCK END')
return [*if_cursor(target, (lambda g: g[0].prev()), more_stmts), *block, *if_cursor(target, (lambda g: g[(- 1)].next()), more_stmts)]
else:
stmt = next(filter((lambda s: s.is_ancestor_of(target)), cur), None)
if (stmt is None):
return [f'{indent}"..."']
return [*if_cursor(stmt, (lambda g: g.prev().before()), more_stmts), *local_stmt(stmt), *if_cursor(stmt, (lambda g: g.next().after()), more_stmts)] |
def test_text_prefix_suffix(channel, bot_admin):
message = channel.bot_manager.send_message(bot_admin, 'Message', prefix='Prefix', suffix='Suffix')
assert (message.text == 'Prefix\nMessage\nSuffix')
edited = channel.bot_manager.edit_message_text(text='Edited text', prefix='Edited prefix', suffix='Edited suffix', chat_id=message.chat_id, message_id=message.message_id)
assert (edited.chat_id == message.chat_id)
assert (edited.message_id == message.message_id)
assert (edited.text == 'Edited prefix\nEdited text\nEdited suffix') |
def check_test_loss(loader, model):
loss = 0
with torch.no_grad():
for (i, (x, y)) in enumerate(loader):
x = x.to(device, dtype=torch.float32)
y = y.to(device, dtype=torch.float32)
y_pred = model(x)
loss_batch = loss_fn(y_pred, y)
loss += loss_batch
return (loss / len(loader)) |
def _deploy_contract(w3, contract_factory):
deploy_txn_hash = contract_factory.constructor().transact({'from': w3.eth.coinbase})
deploy_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn_hash)
assert is_dict(deploy_receipt)
contract_address = deploy_receipt['contractAddress']
assert is_checksum_address(contract_address)
return contract_factory(contract_address) |
def test_AHHY_all_static_residues():
f = open(ahhy_example, 'r')
pdb_string = f.read()
chorizo = LinkedRDKitChorizo(pdb_string)
assert (len(chorizo.residues) == 4)
assert (len(chorizo.getIgnoredResidues()) == 0)
expected_suggested_mutations = {'A:HIS:2': 'A:HID:2', 'A:HIS:3': 'A:HIE:3'}
assert (chorizo.suggested_mutations == expected_suggested_mutations)
expected_residue_data = {'A:ALA:1': ChorizoResidue('A:ALA:1', 'ATOM 1 N ALA A 1 6.061 2.529 -3.691 1.00 0.00 N \nATOM 2 CA ALA A 1 5.518 2.870 -2.403 1.00 0.00 C \nATOM 3 C ALA A 1 4.995 1.645 -1.690 1.00 0.00 C \nATOM 4 O ALA A 1 5.294 0.515 -2.156 1.00 0.00 O \nATOM 5 CB ALA A 1 4.421 3.891 -2.559 1.00 0.00 C \n', None, 'A:HIS:2'), 'A:HIS:2': ChorizoResidue('A:HIS:2', 'ATOM 6 N HIS A 2 4.201 1.774 -0.543 1.00 0.00 N \nATOM 7 CA HIS A 2 3.690 0.569 0.155 1.00 0.00 C \nATOM 8 C HIS A 2 2.368 0.239 -0.349 1.00 0.00 C \nATOM 9 O HIS A 2 1.827 0.959 -1.278 1.00 0.00 O \nATOM 10 CB HIS A 2 3.958 0.658 1.602 1.00 0.00 C \nATOM 11 CG HIS A 2 3.518 -0.435 2.481 1.00 0.00 C \nATOM 12 ND1 HIS A 2 4.232 -1.588 2.706 1.00 0.00 N \nATOM 13 CD2 HIS A 2 2.407 -0.562 3.290 1.00 0.00 C \nATOM 14 CE1 HIS A 2 3.592 -2.347 3.583 1.00 0.00 C \nATOM 15 NE2 HIS A 2 2.438 -1.715 3.961 1.00 0.00 N \nATOM 16 H13 HIS A 2 5.120 -1.737 2.040 1.00 0.00 H \n', 'A:ALA:1', 'A:HIS:3'), 'A:HIS:3': ChorizoResidue('A:HIS:3', 'ATOM 17 N HIS A 3 1.527 -0.823 0.040 1.00 0.00 N \nATOM 18 CA HIS A 3 0.243 -1.075 -0.553 1.00 0.00 C \nATOM 19 C HIS A 3 -0.832 -0.126 -0.071 1.00 0.00 C \nATOM 20 O HIS A 3 -0.560 1.096 0.147 1.00 0.00 O \nATOM 21 CB HIS A 3 -0.214 -2.454 -0.694 1.00 0.00 C \nATOM 22 CG HIS A 3 0.654 -3.363 -1.491 1.00 0.00 C \nATOM 23 ND1 HIS A 3 1.819 -2.985 -2.046 1.00 0.00 N \nATOM 24 CD2 HIS A 3 0.457 -4.684 -1.802 1.00 0.00 C \nATOM 25 CE1 HIS A 3 2.360 -4.051 -2.700 1.00 0.00 C \nATOM 26 NE2 HIS A 3 1.515 -5.068 -2.538 1.00 0.00 N \nATOM 27 H20 HIS A 3 1.651 -6.016 -2.918 1.00 0.00 H \n', 'A:HIS:2', 'A:TYR:4'), 'A:TYR:4': ChorizoResidue('A:TYR:4', 'ATOM 28 N TYR A 4 -2.156 -0.543 0.154 1.00 0.00 N \nATOM 29 CA TYR A 4 -3.237 0.354 0.596 1.00 0.00 C \nATOM 30 C TYR A 4 -3.373 0.217 2.071 1.00 0.00 C \nATOM 31 O TYR A 4 -2.656 -0.595 2.677 1.00 0.00 O \nATOM 32 CB TYR A 4 -4.460 -0.123 -0.108 1.00 0.00 C \nATOM 33 CG TYR A 4 -5.699 0.602 0.156 1.00 0.00 C \nATOM 34 CD1 TYR A 4 -6.089 1.698 -0.613 1.00 0.00 C \nATOM 35 CD2 TYR A 4 -6.492 0.168 1.200 1.00 0.00 C \nATOM 36 CE1 TYR A 4 -7.276 2.306 -0.282 1.00 0.00 C \nATOM 37 CE2 TYR A 4 -7.679 0.783 1.528 1.00 0.00 C \nATOM 38 CZ TYR A 4 -8.060 1.866 0.764 1.00 0.00 C \nATOM 39 OH TYR A 4 -9.262 2.477 1.103 1.00 0.00 O \nATOM 40 OXT TYR A 4 -4.293 0.998 2.728 1.00 0.00 O \nATOM 41 H29 TYR A 4 -9.644 3.279 0.612 1.00 0.00 H \n', 'A:HIS:3', None)}
for residue_id in chorizo.residues:
residue_object = chorizo.residues[residue_id]
expected_object = expected_residue_data[residue_id]
assert (residue_object.residue_id == expected_object.residue_id)
assert (residue_object.pdb_text == expected_object.pdb_text)
assert (residue_object.previous_id == expected_object.previous_id)
assert (residue_object.next_id == expected_object.next_id)
assert (residue_object.rdkit_mol != None)
pdbqt_strings = PDBQTWriterLegacy.write_string_from_linked_rdkit_chorizo(chorizo)
(rigid_part, movable_part) = pdbqt_strings
rigid_part = ''.join(rigid_part.splitlines())
assert (len(rigid_part) == 3476)
assert (len(movable_part) == 0) |
class MockCloudClient(BaseCloudClient):
def __init__(self, maximum_running_instances=2, *args, **kwargs):
super().__init__(*args, **kwargs)
self.maximum_running_instances = maximum_running_instances
self.model_instances: List[dm.ModelInstance] = []
def can_create_instance(self, model: dm.ModelObject) -> bool:
return True
async def start_instance(self, model: dm.ModelObject) -> dm.ModelInstance:
model_instance = dm.ModelInstance(model=model, name=(model.name + '_model_instance'), sender=BaseSender(), receiver=BaseReceiver(), source_id=None, lock=False, running=True, hostname='Test')
self.model_instances += [model_instance]
return model_instance
def stop_instance(self, model_instance: dm.ModelInstance):
model_instance.running = False
def get_maximum_running_instances(self):
return self.maximum_running_instances
def is_instance_running(self, model_instance: dm.ModelInstance) -> dm.ReasoningOutput[(bool, HealthCheckError)]:
is_running = model_instance.running
if (not is_running):
return dm.ReasoningOutput(is_running, reason=ContainerExited('ModelInstance.running == False'))
is_running &= (model_instance in self.model_instances)
if (not is_running):
return dm.ReasoningOutput(is_running, reason=ContainerDoesNotExists('ModelInstance created without MockCloudClient'))
return dm.ReasoningOutput(is_running) |
def luv_to_hsluv(luv: Vector) -> Vector:
l = luv[0]
(c, h) = alg.rect_to_polar(luv[1], luv[2])
s = 0.0
if (l > (100 - 1e-07)):
l = 100.0
elif (l < 1e-08):
l = 0.0
else:
_hx_max = max_chroma_for_lh(l, h)
s = ((c / _hx_max) * 100.0)
return [util.constrain_hue(h), s, l] |
def get_optimized_pow_patches(_fork_name: str) -> Dict[(str, Any)]:
patches: Dict[(str, Any)] = {}
mod = cast(Any, import_module((('ethereum.' + _fork_name) + '.fork')))
if (not hasattr(mod, 'validate_proof_of_work')):
raise Exception('Attempted to get optimized pow patches for non-pow fork')
generate_header_hash_for_pow = mod.generate_header_hash_for_pow
_item(patches)
def validate_proof_of_work(header: Header_) -> None:
epoch_number = epoch(header.number)
header_hash = generate_header_hash_for_pow(header)
result = ethash.verify(int(epoch_number), header_hash, header.mix_digest, int.from_bytes(header.nonce, 'big'), (U256_CEIL_VALUE // header.difficulty).to_be_bytes32())
ensure(result, InvalidBlock)
return patches |
class bsn_virtual_port_create_reply(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 16
def __init__(self, xid=None, status=None, vport_no=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (status != None):
self.status = status
else:
self.status = 0
if (vport_no != None):
self.vport_no = vport_no
else:
self.vport_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!L', self.status))
packed.append(struct.pack('!L', self.vport_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_virtual_port_create_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 16)
obj.status = reader.read('!L')[0]
obj.vport_no = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.status != other.status):
return False
if (self.vport_no != other.vport_no):
return False
return True
def pretty_print(self, q):
q.text('bsn_virtual_port_create_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('status = ')
q.text(('%#x' % self.status))
q.text(',')
q.breakable()
q.text('vport_no = ')
q.text(('%#x' % self.vport_no))
q.breakable()
q.text('}') |
class UnitValidator(object):
def __init__(self, regex, verbose_pattern, unit_multipliers) -> None:
self.regex = regex
self.verbose_pattern = verbose_pattern
self.unit_multipliers = unit_multipliers
def __call__(self, value, field_name):
value = str(value)
match = re.match(self.regex, value, re.IGNORECASE)
if (not match):
raise ConfigurationError('{} does not match pattern {}'.format(value, self.verbose_pattern), field_name)
(val, unit) = match.groups()
try:
val = (int(val) * self.unit_multipliers[unit])
except KeyError:
raise ConfigurationError('{} is not a supported unit'.format(unit), field_name)
return val |
def test_complex_types(tmpdir):
_main.__module__ = __name__
main = get_main(tmpdir)
xp = call(main, [])
print(xp.cfg.complex)
assert (xp.cfg.complex.a == [1, 2, 3])
xp = call(main, ['complex.a=[0]'])
assert (xp.cfg.complex.a == [0])
xp = call(main, ['complex.b.a=50'])
assert (xp.cfg.complex.b == {'a': 50, 'b': 2})
xp = call(main, ['complex.b={a:21}'])
assert (xp.cfg.complex.b == {'a': 21, 'b': 2})
argv = main.value_to_argv({'complex.b': {'a': 21, 'b': 52}})
xp = call(main, argv)
assert (xp.cfg.complex.b == {'a': 21, 'b': 52}) |
def test_build_mistyped_rule():
config = {u'FilterRules': [Rule('Path', 'contains', '1337', True), Rule('pid', 'is_not', '1338', True), Rule('Event_class', 'is', 'Profiling', False), u'SomeString']}
with pytest.raises(AttributeError):
_ = dumps_configuration(config)
with pytest.raises(TypeError):
_ = Rule('Path', 'ends_with', 12345, False) |
class Setup():
def __init__(self, device=None, camres=(640, 480), disptype='window', dispres=(1024, 768), display=None):
if DEBUG:
self.savefile = open('data/savefile.txt', 'w')
if (display == None):
if (disptype == 'window'):
self.disp = pygame.display.set_mode(dispres, pygame.RESIZABLE)
elif (disptype == 'fullscreen'):
self.disp = pygame.display.set_mode(dispres, ((pygame.FULLSCREEN | pygame.HWSURFACE) | pygame.DOUBLEBUF))
else:
raise Exception("Error in camtracker.Setup.__init__: disptype '%s' was not recognized; please use 'window', 'fullscreen'")
else:
self.disp = display
dispres = self.disp.get_size()
if (device == None):
available = available_devices()
if (available == []):
raise Exception('Error in camtracker.Setup.__init__: no available camera devices found (did you forget to plug it in?)')
else:
device = available[0]
self.tracker = CamEyeTracker(device=device, camres=camres)
try:
fontname = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'resources', 'roboto_regular-webfont.ttf')
except:
fontname = pygame.font.get_default_font()
print("WARNING: camtracker.Setup.__init__: could not find 'roboto_regular-webfont.ttf' in the resources directory!")
self.font = pygame.font.Font(fontname, 24)
self.sfont = pygame.font.Font(fontname, 12)
self.disptype = disptype
self.dispsize = dispres
self.fgc = (255, 255, 255)
self.bgc = (0, 0, 0)
self.disp.fill(self.bgc)
self.img = pygame.surface.Surface(self.tracker.get_size())
self.settings = {'pupilcol': (0, 0, 0), 'threshold': 100, 'nonthresholdcol': (100, 100, 255, 255), 'pupilpos': ((camres[0] / 2), (camres[1] / 2)), 'pupilrect': pygame.Rect(((camres[0] / 2) - 50), ((camres[1] / 2) - 25), 100, 50), 'pupilbounds': [0, 0, 0, 0], '': None}
def start(self):
self.show_welcome(loading=True)
if DEBUG:
self.savefile.write((pygame.image.tostring(self.disp, 'RGB') + BUFFSEP))
self.setup_GUI()
self.show_welcome(loading=False)
if DEBUG:
self.savefile.write((pygame.image.tostring(self.disp, 'RGB') + BUFFSEP))
noinput = True
while noinput:
for event in pygame.event.get():
if (event.type == pygame.KEYDOWN):
noinput = False
self.show_welcome(loading=True)
if DEBUG:
self.savefile.write((pygame.image.tostring(self.disp, 'RGB') + BUFFSEP))
self.draw_stage(stagenr=None)
if DEBUG:
self.savefile.write((pygame.image.tostring(self.disp, 'RGB') + BUFFSEP))
pygame.mouse.set_visible(True)
tracker = self.run_GUI()
return tracker
def show_welcome(self, loading=False):
welcometext = 'Welcome to the Webcam EyeTracker calibration interface!\n\t\t\n\t\tauthor: Edwin Dalmaijer\n\t\tversion: 0.1 (12-10-2013)\n\t\t\n\t\t\n\t\t\n\t\t'
self.disp.fill(self.bgc)
if loading:
welcometext += 'Loading, please wait...'
else:
welcometext += 'Press any key to start!'
welcometext = welcometext.replace('\t', '')
x = (self.dispsize[0] / 2)
y = (self.dispsize[0] / 2)
lines = welcometext.split('\n')
nlines = len(lines)
for lnr in range(nlines):
linesize = self.font.size(lines[lnr])
rendered = self.font.render(lines[lnr], True, self.fgc)
pos = ((x - (linesize[0] / 2)), (y + ((lnr - (nlines / 2)) * linesize[1])))
self.disp.blit(rendered, pos)
pygame.display.flip()
def setup_GUI(self):
resdir = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'resources')
if (not os.path.exists(resdir)):
raise Exception("Error in camtracker.Setup.setup_GUI: could not find 'resources' directory to access button images; was it relocated or renamed, or is the installation of camtracker incorrect?")
imgpaths = {}
buttnames = ['1', '2', '3', 'up', 'down', 't', 'space', 'r', 'escape']
buttstates = ['active', 'inactive']
for bn in buttnames:
imgpaths[bn] = {}
for bs in buttstates:
filename = ('%s_%s.png' % (bn, bs))
imgpaths[bn][bs] = os.path.join(resdir, filename)
if (not os.path.isfile(imgpaths[bn][bs])):
print(("WARNING: image file '%s' was not found in resources!" % filename))
imgpaths[bn][bs] = os.path.join(resdir, ('blank_%s.png' % bs))
buttsize = (50, 50)
camres = self.tracker.get_size()
buttpos = {}
y = ((self.dispsize[1] / 2) + int((camres[1] * 0.6)))
buttpos['1'] = (int(((self.dispsize[0] * (2 / 6.0)) - (buttsize[0] / 2))), y)
buttpos['2'] = (int(((self.dispsize[0] * (3 / 6.0)) - (buttsize[0] / 2))), y)
buttpos['3'] = (int(((self.dispsize[0] * (4 / 6.0)) - (buttsize[0] / 2))), y)
buttpos['space'] = (int(((self.dispsize[0] * (5 / 6.0)) - (buttsize[0] / 2))), y)
leftx = ((self.dispsize[0] / 2) - ((camres[0] / 2) + buttsize[0]))
rightx = (((self.dispsize[0] / 2) + (camres[0] / 2)) + buttsize[0])
buttpos['up'] = (rightx, ((self.dispsize[1] / 2) - buttsize[1]))
buttpos['down'] = (rightx, ((self.dispsize[1] / 2) + buttsize[1]))
buttpos['t'] = (leftx, (((self.dispsize[1] / 2) + (camres[1] / 2)) - (buttsize[1] / 2)))
buttpos['r'] = (leftx, (self.dispsize[1] / 2))
buttpos['escape'] = (buttsize[0], buttsize[1])
self.buttons = {}
for bn in imgpaths.keys():
self.buttons[bn] = {}
buttpos[bn] = ((buttpos[bn][0] - (buttsize[0] / 2)), (buttpos[bn][1] - (buttsize[1] / 2)))
for bs in imgpaths[bn].keys():
self.buttons[bn][bs] = {}
self.buttons[bn][bs]['img'] = pygame.image.load(imgpaths[bn][bs])
self.buttons[bn][bs]['pos'] = buttpos[bn]
self.buttons[bn][bs]['rect'] = (buttpos[bn][0], buttpos[bn][1], buttsize[0], buttsize[1])
self.buttsize = buttsize
def draw_button(self, image, pos):
self.disp.blit(image, pos)
def draw_stage(self, stagenr=None):
self.disp.fill(self.bgc)
buttonstodraw = ['1', '2', '3', 'space', 'escape', 't', 'r']
activetodraw = []
if (stagenr == 1):
title = 'set pupil detection threshold'
buttonstodraw.extend(['up', 'down'])
activetodraw.extend(['1'])
elif (stagenr == 2):
title = 'select pupil and set pupil detection bounds'
buttonstodraw.extend(['up', 'down'])
activetodraw.extend(['2'])
elif (stagenr == 3):
title = 'confirmation'
buttonstodraw.extend(['up', 'down'])
activetodraw.extend(['3'])
else:
title = 'loading, please wait...'
for buttname in buttonstodraw:
self.draw_button(self.buttons[buttname]['inactive']['img'], self.buttons[buttname]['inactive']['pos'])
for buttname in activetodraw:
self.draw_button(self.buttons[buttname]['active']['img'], self.buttons[buttname]['active']['pos'])
titsize = self.font.size(title)
titpos = (((self.dispsize[0] / 2) - (titsize[0] / 2)), ((self.dispsize[1] / 2) - ((self.tracker.get_size()[1] / 2) + titsize[1])))
titsurf = self.font.render(title, True, self.fgc)
self.disp.blit(titsurf, titpos)
def run_GUI(self):
stage = 1
stagevars = {}
stagevars[0] = {}
stagevars[0]['show_threshimg'] = False
stagevars[0]['use_prect'] = True
stagevars[1] = {}
stagevars[1]['thresholdchange'] = None
stagevars[2] = {}
stagevars[2]['clickpos'] = (0, 0)
stagevars[2]['prectsize'] = (100, 50)
stagevars[2]['prect'] = pygame.Rect(stagevars[2]['clickpos'][0], stagevars[2]['clickpos'][1], stagevars[2]['prectsize'][0], stagevars[2]['prectsize'][1])
stagevars[2]['vprectchange'] = None
stagevars[2]['hprectchange'] = None
stagevars[3] = {}
stagevars[3]['confirmed'] = False
running = True
imgsize = self.img.get_size()
blitpos = (((self.dispsize[0] / 2) - (imgsize[0] / 2)), ((self.dispsize[1] / 2) - (imgsize[1] / 2)))
while running:
self.draw_stage(stagenr=stage)
useprect = (stagevars[0]['use_prect'] and (stage > 1))
(self.img, self.thresholded, pupilpos, pupilsize, pupilbounds) = self.tracker.give_me_all(pupilrect=useprect)
self.settings = self.tracker.settings
if stagevars[0]['show_threshimg']:
self.draw_button(self.buttons['t']['active']['img'], self.buttons['t']['active']['pos'])
else:
self.draw_button(self.buttons['t']['inactive']['img'], self.buttons['t']['inactive']['pos'])
if stagevars[0]['use_prect']:
self.draw_button(self.buttons['r']['active']['img'], self.buttons['r']['active']['pos'])
else:
self.draw_button(self.buttons['r']['inactive']['img'], self.buttons['r']['inactive']['pos'])
(inp, inptype) = self.check_input()
(stage, stagevars) = self.handle_input(inptype, inp, stage, stagevars)
if (stage == 1):
if (stagevars[1]['thresholdchange'] != None):
if ((stagevars[1]['thresholdchange'] == 'up') and (self.settings['threshold'] < 255)):
self.settings['threshold'] += 1
elif ((stagevars[1]['thresholdchange'] == 'down') and (self.settings['threshold'] > 0)):
self.settings['threshold'] -= 1
stagevars[1]['thresholdchange'] = None
if (stage == 2):
if (type(inp) in [tuple, list]):
mpos = pygame.mouse.get_pos()
hposok = ((mpos[0] > blitpos[0]) and (mpos[0] < (blitpos[0] + imgsize[0])))
vposok = ((mpos[1] > blitpos[1]) and (mpos[1] < (blitpos[1] + imgsize[1])))
if (hposok and vposok):
stagevars[2]['clickpos'] = ((inp[0] - blitpos[0]), (inp[1] - blitpos[1]))
self.settings['pupilpos'] = stagevars[2]['clickpos'][:]
x = (stagevars[2]['clickpos'][0] - (stagevars[2]['prectsize'][0] / 2))
y = (stagevars[2]['clickpos'][1] - (stagevars[2]['prectsize'][1] / 2))
stagevars[2]['prect'] = pygame.Rect(x, y, stagevars[2]['prectsize'][0], stagevars[2]['prectsize'][1])
self.settings['pupilrect'] = stagevars[2]['prect']
elif (stagevars[2]['vprectchange'] or stagevars[2]['hprectchange']):
if (stagevars[2]['vprectchange'] != None):
if (stagevars[2]['vprectchange'] == 'up'):
stagevars[2]['prectsize'] = (stagevars[2]['prectsize'][0], (stagevars[2]['prectsize'][1] + 1))
elif (stagevars[2]['vprectchange'] == 'down'):
stagevars[2]['prectsize'] = (stagevars[2]['prectsize'][0], (stagevars[2]['prectsize'][1] - 1))
stagevars[2]['vprectchange'] = None
if (stagevars[2]['hprectchange'] != None):
if (stagevars[2]['hprectchange'] == 'right'):
stagevars[2]['prectsize'] = ((stagevars[2]['prectsize'][0] + 1), stagevars[2]['prectsize'][1])
elif (stagevars[2]['hprectchange'] == 'left'):
stagevars[2]['prectsize'] = ((stagevars[2]['prectsize'][0] - 1), stagevars[2]['prectsize'][1])
stagevars[2]['hprectchange'] = None
x = self.settings['pupilrect'][0]
y = self.settings['pupilrect'][1]
stagevars[2]['prect'] = pygame.Rect(x, y, stagevars[2]['prectsize'][0], stagevars[2]['prectsize'][1])
self.settings['pupilrect'] = stagevars[2]['prect']
pygame.draw.rect(self.img, (0, 0, 255), self.settings['pupilrect'], 2)
pygame.draw.rect(self.thresholded, (0, 0, 255), self.settings['pupilrect'], 2)
if (stage == 3):
if (stagevars[1]['thresholdchange'] != None):
if ((stagevars[1]['thresholdchange'] == 'up') and (self.settings['threshold'] < 255)):
self.settings['threshold'] += 1
elif ((stagevars[1]['thresholdchange'] == 'down') and (self.settings['threshold'] > 0)):
self.settings['threshold'] -= 1
stagevars[1]['thresholdchange'] = None
try:
pygame.draw.rect(self.img, (0, 255, 0), pupilbounds, 1)
pygame.draw.rect(self.thresholded, (0, 255, 0), pupilbounds, 1)
except:
print(('pupilbounds=%s' % pupilbounds))
try:
pygame.draw.circle(self.img, (255, 0, 0), pupilpos, 3, 0)
pygame.draw.circle(self.thresholded, (255, 0, 0), pupilpos, 3, 0)
except:
print(('pupilpos=%s' % pupilpos))
if stagevars[3]['confirmed']:
running = False
starty = ((self.dispsize[1] / 2) - (imgsize[1] / 2))
vtx = (((self.dispsize[0] / 2) - (imgsize[0] / 2)) - 10)
vals = ['pupil colour', str(self.settings['pupilcol']), 'threshold', str(self.settings['threshold']), 'pupil position', str(self.settings['pupilpos']), 'pupil rect', str(self.settings['pupilrect'])]
for i in range(len(vals)):
tsize = self.sfont.size(vals[i])
tpos = ((vtx - tsize[0]), (starty + (i * 20)))
tsurf = self.sfont.render(vals[i], True, self.fgc)
self.disp.blit(tsurf, tpos)
if stagevars[0]['show_threshimg']:
self.disp.blit(self.thresholded, blitpos)
else:
self.disp.blit(self.img, blitpos)
pygame.display.flip()
self.tracker.settings = self.settings
if DEBUG:
self.savefile.write((pygame.image.tostring(self.disp, 'RGB') + BUFFSEP))
if DEBUG:
self.savefile.close()
print('processing images...')
savefile = open('data/savefile.txt', 'r')
raw = savefile.read()
savefile.close()
raw = raw.split(BUFFSEP)
for framenr in range((len(raw) - 1)):
img = pygame.image.fromstring(raw[framenr], self.dispsize, 'RGB')
pygame.image.save(img, ('data/frame%d.png' % framenr))
return self.tracker
def check_input(self):
inp = None
inptype = None
for event in pygame.event.get():
if (event.type == pygame.MOUSEBUTTONDOWN):
inp = pygame.mouse.get_pos()
inptype = 'mouseclick'
elif (event.type == pygame.KEYDOWN):
inp = pygame.key.name(event.key)
inptype = 'keypress'
return (inp, inptype)
def handle_input(self, inptype, inp, stage, stagevars):
if (inptype == 'mouseclick'):
pos = inp[:]
for bn in self.buttons.keys():
r = self.buttons[bn]['inactive']['rect']
if ((pos[0] > r[0]) and (pos[0] < (r[0] + r[2])) and (pos[1] > r[1]) and (pos[1] < (r[1] + r[3]))):
inp = bn
break
if (stage == 1):
if (inp in ['up', 'down']):
stagevars[1]['thresholdchange'] = inp
elif (stage == 2):
if (inp in ['up', 'down']):
stagevars[2]['vprectchange'] = inp
elif (inp in ['left', 'right']):
stagevars[2]['hprectchange'] = inp
elif (stage == 3):
if (inp in ['up', 'down']):
stagevars[1]['thresholdchange'] = inp
if (inp == 'space'):
stagevars[3]['confirmed'] = True
if ((inp == 'space') and (stage < 3)):
stage += 1
if (inp in ['1', '2', '3']):
stage = int(inp)
if (inp == 't'):
if stagevars[0]['show_threshimg']:
stagevars[0]['show_threshimg'] = False
else:
stagevars[0]['show_threshimg'] = True
if (inp == 'r'):
if stagevars[0]['use_prect']:
stagevars[0]['use_prect'] = False
else:
stagevars[0]['use_prect'] = True
if (inp == 'escape'):
pygame.display.quit()
raise Exception('camtracker.Setup: Escape was pressed')
return (stage, stagevars) |
class OptionSeriesTilemapSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def test_outputs(capfd):
_test_outputs(True, False, 'test_outputs_tensor', 'float16', capfd)
_test_outputs(False, True, 'test_outputs_all', 'float16', capfd)
_test_outputs(True, True, 'test_outputs_both_float16', 'float16', capfd)
_test_outputs(True, True, 'test_outputs_both_float32', 'float32', capfd) |
class BotCommand(JsonSerializable, JsonDeserializable, Dictionaryable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string, dict_copy=False)
return cls(**obj)
def __init__(self, command, description):
self.command: str = command
self.description: str = description
def to_json(self):
return json.dumps(self.to_dict())
def to_dict(self):
return {'command': self.command, 'description': self.description} |
def new_access_list_transaction(vm: VM, from_: Address, to: Address, private_key: PrivateKey, amount: int=0, gas_price: int=(10 ** 10), gas: int=100000, data: bytes=b'', nonce: int=None, chain_id: int=1, access_list: Sequence[Tuple[(Address, Sequence[int])]]=None) -> AccessListTransaction:
if (nonce is None):
nonce = vm.state.get_nonce(from_)
if (access_list is None):
access_list = []
tx = vm.get_transaction_builder().new_unsigned_access_list_transaction(chain_id=chain_id, nonce=nonce, gas_price=gas_price, gas=gas, to=to, value=amount, data=data, access_list=access_list)
return tx.as_signed_transaction(private_key) |
def test_linear_stretch_user_error():
with Image(width=100, height=100, pseudo='gradient:') as img:
with raises(TypeError):
img.linear_stretch(white_point='NaN', black_point=0.5)
with raises(TypeError):
img.linear_stretch(white_point=0.5, black_point='NaN') |
def lazy_import():
from fastly.model.type_waf_rule_revision import TypeWafRuleRevision
from fastly.model.waf_rule_revision import WafRuleRevision
from fastly.model.waf_rule_revision_attributes import WafRuleRevisionAttributes
from fastly.model.waf_tag import WafTag
globals()['TypeWafRuleRevision'] = TypeWafRuleRevision
globals()['WafRuleRevision'] = WafRuleRevision
globals()['WafRuleRevisionAttributes'] = WafRuleRevisionAttributes
globals()['WafTag'] = WafTag |
_instruction_type([ofproto.OFPIT_METER])
class OFPInstructionMeter(OFPInstruction):
def __init__(self, meter_id=1, type_=None, len_=None):
super(OFPInstructionMeter, self).__init__()
self.type = ofproto.OFPIT_METER
self.len = ofproto.OFP_INSTRUCTION_METER_SIZE
self.meter_id = meter_id
def parser(cls, buf, offset):
(type_, len_, meter_id) = struct.unpack_from(ofproto.OFP_INSTRUCTION_METER_PACK_STR, buf, offset)
return cls(meter_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_METER_PACK_STR, buf, offset, self.type, self.len, self.meter_id) |
def replace_memory_keywords(config, keyword, value):
mems = config['memories']
for (mem_name, mem_config) in list(mems.items()):
for key in mem_config:
if (mem_config[key] == keyword):
print(('Replacing {%s: %s} with {%s:%s}' % (key, mem_config[key], key, value)))
mem_config[key] = value |
class Analogous(Harmony):
def harmonize(self, color: 'Color', space: Optional[str]) -> List['Color']:
if (space is None):
space = color.HARMONY
orig_space = color.space()
color0 = color.convert(space)
if (not isinstance(color0._space, Cylindrical)):
raise ValueError('Color space must be cylindrical')
name = color0._space.hue_name()
color2 = color0.clone()
color3 = color0.clone()
color2.set(name, (lambda x: cast(float, (x + 30))))
color3.set(name, (lambda x: cast(float, (x - 30))))
return [color, color2.convert(orig_space, in_place=True), color3.convert(orig_space, in_place=True)] |
class FlowRoot(FlowNode):
def __init__(self, name: str, description: str):
super().__init__()
self.name = name
self.description = description
self.auto_triggers = set()
self.room_flow = False
def connect(self, node_or_command: Union[('FlowNode', str)], predicate: Predicate=(lambda _: False), auto_trigger: bool=False, room_flow: bool=False) -> 'FlowNode':
resp = super().connect(node_or_command, predicate)
if auto_trigger:
self.auto_triggers.add(node_or_command)
self.room_flow = room_flow
return resp
def __str__(self):
return self.name |
def extractYehetstradamusWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('BILIP', 'Brother-in-Law, Im Pregnant!', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestContour(unittest.TestCase):
def test_contour_trace_levels_no_mask(self):
xs = np.array([0, 1, 2, 3])
ys = np.array([10, 20, 30, 40])
(xg, yg) = np.meshgrid(xs, ys)
data = np.array([[0, 0, 1, 2], [0, 1, 2, 3], [1, 2, 0, 3], [2, 3, 3, 3]])
mask = np.ones(data.shape, dtype=bool)
c = Cntr(xg, yg, data, (~ mask))
levels = c.trace(0.0)
self.assertEqual(len(levels), 2)
self._check_level(levels[0], [1.0, 1.0, 0.0, 0.0], [10.0, 10.0, 20.0, 20.0])
self._check_level(levels[1], [2.0, 2.0, 2.0, 2.0, 2.0], [30.0, 30.0, 30.0, 30.0, 30.0])
levels = c.trace(1.0)
self.assertEqual(len(levels), 2)
self._check_level(levels[0], [2.0, 2.0, 1.0, 1.0, 0.0, 0.0], [10.0, 10.0, 20.0, 20.0, 30.0, 30.0])
self._check_level(levels[1], [2.0, 1.5, 2.0, 2., 2.0], [25.0, 30.0, 33., 30.0, 25.0])
levels = c.trace(2.0)
self.assertEqual(len(levels), 1)
self._check_level(levels[0], [3.0, 2.0, 2., 2.0, 1.0, 0.0], [10.0, 20.0, 30.0, 36., 30.0, 40.0])
levels = c.trace(2.5)
self.assertEqual(len(levels), 1)
self._check_level(levels[0], [3.0, 2.5, 2., 2.0, 1.0, 0.5], [15.0, 20.0, 30.0, 38., 35.0, 40.0])
levels = c.trace(3.0)
self.assertEqual(len(levels), 0)
def _check_level(self, level, expected_x, expected_y):
(level_x, level_y) = level
nptest.assert_allclose(level_x, expected_x)
nptest.assert_allclose(level_y, expected_y)
def test_contour_init_bad_datatype(self):
xs = np.array([0])
ys = np.array([1])
(xg, yg) = np.meshgrid(xs, ys)
data = np.array([[3]])
mask_bad_datatype = np.ones(data.shape, dtype=np.uint8)
with self.assertRaises(TypeError):
Cntr(xg, yg, data, mask_bad_datatype) |
def proto_process_releases(sess, feed_releases):
ret_dict = {'successful': [], 'missed': [], 'ignored': [], 'ignored-w-tumblr-idiots': []}
feed_releases = list(feed_releases)
feed_releases.sort(key=(lambda x: x.published), reverse=True)
print(('Found %s feed releases' % len(feed_releases)))
dp = RssProcessor(db_sess=sess, loggerPath='Main.WebProto', pageUrl=None, pgContent=None, type=None)
futures = []
for item in tqdm.tqdm(feed_releases):
proc_tmp = {}
proc_tmp['feedtype'] = item.type
proc_tmp['title'] = item.title
proc_tmp['guid'] = item.contentid
proc_tmp['linkUrl'] = item.contenturl
proc_tmp['updated'] = item.updated
proc_tmp['published'] = item.published
proc_tmp['contents'] = item.contents
proc_tmp['tags'] = item.tags
proc_tmp['authors'] = item.author
proc_tmp['srcname'] = item.feed_entry.feed_name
proc_tmp['feed_id'] = item.feed_entry.id
future_tbd = process_release(dp, proc_tmp, str(item.title))
futures.append((future_tbd, proc_tmp))
for (ret, proc_tmp) in futures:
if ret:
ret_dict['successful'].append((ret, proc_tmp))
elif (ret is False):
ret_dict['missed'].append((ret, proc_tmp))
elif (ret is None):
if ('tumblr.com/' not in proc_tmp['linkUrl']):
ret_dict['ignored'].append((ret, proc_tmp))
ret_dict['ignored-w-tumblr-idiots'].append((ret, proc_tmp))
else:
raise RuntimeError('Wat? Unknown ret ({}) for release: {}'.format(ret, proc_tmp))
return ret_dict |
.skipif((sys.version_info < (2, 7)), reason='These tests should all run because argument evaluates to False')
class TestMockingAndStubbingFixtures(PluginTestingOrderOfOperationsTestCase):
server_class = MockingAndStubbingServer
server_settings = {}
fixture_path = (os.path.dirname(__file__) + '/mocking_and_stubbing') |
def setCmd(snmpDispatcher, authData, transportTarget, *varBinds, **options):
def _cbFun(snmpDispatcher, stateHandle, errorIndication, rspPdu, _cbCtx):
if (not cbFun):
return
if errorIndication:
cbFun(errorIndication, pMod.Integer(0), pMod.Integer(0), None, cbCtx=cbCtx, snmpDispatcher=snmpDispatcher, stateHandle=stateHandle)
return
errorStatus = pMod.apiPDU.getErrorStatus(rspPdu)
errorIndex = pMod.apiPDU.getErrorIndex(rspPdu)
varBinds = pMod.apiPDU.getVarBinds(rspPdu)
if lookupMib:
varBinds = VB_PROCESSOR.unmakeVarBinds(snmpDispatcher.cache, varBinds)
nextStateHandle = pMod.getNextRequestID()
nextVarBinds = cbFun(errorIndication, errorStatus, errorIndex, varBinds, cbCtx=cbCtx, snmpDispatcher=snmpDispatcher, stateHandle=stateHandle, nextStateHandle=nextStateHandle)
if (not nextVarBinds):
return
pMod.apiPDU.setRequestID(reqPdu, nextStateHandle)
pMod.apiPDU.setVarBinds(reqPdu, nextVarBinds)
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun)
(lookupMib, cbFun, cbCtx) = [options.get(x) for x in ('lookupMib', 'cbFun', 'cbCtx')]
if lookupMib:
varBinds = VB_PROCESSOR.makeVarBinds(snmpDispatcher.cache, varBinds)
pMod = api.PROTOCOL_MODULES[authData.mpModel]
reqPdu = pMod.SetRequestPDU()
pMod.apiPDU.setDefaults(reqPdu)
pMod.apiPDU.setVarBinds(reqPdu, varBinds)
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun) |
def extractPraisethemetalbatWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Running Away From the Hero', 'Running Away From the Hero', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.AnalysisPluginTestConfig(plugin_class=AnalysisPlugin)
def test_detect_type_of_file(analysis_plugin):
result = analysis_plugin.analyze(io.FileIO(f'{get_test_data_dir()}/container/test.zip'), {}, {})
summary = analysis_plugin.summarize(result)
assert (result.mime == 'application/zip'), 'mime-type not detected correctly'
assert result.full.startswith('Zip archive data, at least'), 'full type not correct'
assert (summary == ['application/zip']) |
class TooManyDigitsPattern(AbstractAstPattern):
name = 'Too Many Digit Literals'
description = 'Usage of assembly in Solidity code is discouraged.'
severity = Severity.INFO
tags = {}
def find_matches(self) -> List[PatternMatch]:
ast = self.get_ast_module()
ast_root = self.get_ast_root()
matches = []
for literal in ast_root.find_descendants_of_type(ast.Literal):
if (not any((literal.type_string.startswith('int'), literal.type_string.startswith('uint'), literal.type_string.startswith('fixed'), literal.type_string.startswith('ufixed')))):
continue
if literal.value.startswith('0x'):
continue
contract = literal.find_ancestor_of_type(ast.ContractDefinition)
if ('00000' in literal.value):
match = self.match_violation().with_info(MatchComment(f"Contract '{contract.name}' contains a numeric literal with too many digits."), *self.ast_node_info(literal))
matches.append(match)
return matches |
class OptionSeriesParetoStatesSelectHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsWordcloudSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class File_Based_Message_Handler(Message_Handler):
def __init__(self, tool_id, filename):
super().__init__(tool_id)
self.filename = filename
self.fd = None
def fork(self):
raise ICE('unimplemented abstract class')
def setup_fd(self):
raise ICE('unimplemented abstract class') |
class Text3D(Module):
__version__ = 0
actor = Instance(Actor, allow_none=False, record=True)
vector_text = Instance(tvtk.VectorText, allow_none=False, record=True)
text = Str('Text', desc='the text to be displayed', enter_set=True, auto_set=False)
position = CArray(value=(0.0, 0.0, 0.0), cols=3, desc='the world coordinates of the text', enter_set=True, auto_set=False)
scale = CArray(value=(1.0, 1.0, 1.0), cols=3, desc='the scale of the text', enter_set=True, auto_set=False)
orientation = CArray(value=(0.0, 0.0, 0.0), cols=3, desc='the orientation angles of the text', enter_set=True, auto_set=False)
orient_to_camera = Bool(True, desc='if the text is kept facing the camera')
input_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['any'])
view = View(Group(Item(name='text'), Group(Item(name='position'), show_labels=False, show_border=True, label='Position'), Group(Item(name='scale'), show_labels=False, show_border=True, label='Scale'), Group(Item(name='orient_to_camera'), Item(name='orientation', label='Angles'), show_border=True, label='Orientation'), label='Text'), Group(Item(name='actor', style='custom', show_label=False), label='Actor'))
def setup_pipeline(self):
self.vector_text = tvtk.VectorText(text=self.text)
self.outputs = [self.vector_text]
self.actor = Actor()
self._text_changed(self.text)
def update_pipeline(self):
self.pipeline_changed = True
def has_output_port(self):
return True
def get_output_object(self):
return self.vector_text.output_port
def _text_changed(self, value):
vector_text = self.vector_text
if (vector_text is None):
return
vector_text.text = str(value)
self.render()
def _actor_changed(self, old, new):
new.scene = self.scene
new.inputs = [self]
self._change_components(old, new)
old_actor = None
if (old is not None):
old_actor = old.actor
new.actor = self._get_actor_or_follower(old=old_actor)
self.actors = new.actors
self.render()
def _orient_to_camera_changed(self):
self.actor.actor = self._get_actor_or_follower(old=self.actor.actor)
def _get_actor_or_follower(self, old=None):
if self.orient_to_camera:
new = tvtk.Follower()
if (self.scene is not None):
new.camera = self.scene.camera
else:
new = tvtk.Actor()
if (old is not None):
self.sync_trait('position', old, 'position', remove=True)
self.sync_trait('scale', old, 'scale', remove=True)
self.sync_trait('orientation', old, 'orientation', remove=True)
self.sync_trait('position', new, 'position')
self.sync_trait('scale', new, 'scale')
self.sync_trait('orientation', new, 'orientation')
return new
def _scene_changed(self, old, new):
super(Text3D, self)._scene_changed(old, new)
if ((new is not None) and self.orient_to_camera):
self.actor.actor.camera = new.camera |
def identity(family, degree):
mesh = UnitCubeMesh(3, 3, 3)
fs = FunctionSpace(mesh, family, degree)
x = SpatialCoordinate(mesh)
f = Function(fs)
out = Function(fs)
u = TrialFunction(fs)
v = TestFunction(fs)
a = (inner(u, v) * dx)
f.interpolate(x[0])
L = (inner(f, v) * dx)
solve((a == L), out)
return norm(assemble((f - out))) |
class SchemaspaceList(SchemaspaceBase):
json_flag = Flag('--json', name='json', description='List complete instances as JSON', default_value=False)
include_invalid_flag = Flag('--include-invalid', name='include-invalid', description='Include invalid instances (default displays only valid instances)', default_value=False)
options = [json_flag, include_invalid_flag]
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.metadata_manager = MetadataManager(schemaspace=self.schemaspace)
def start(self):
super().start()
try:
metadata_instances = self.metadata_manager.get_all(include_invalid=self.include_invalid_flag.value)
except MetadataNotFoundError:
metadata_instances = None
if self.json_flag.value:
if (metadata_instances is None):
metadata_instances = []
print(metadata_instances)
else:
if (not metadata_instances):
print(f'No metadata instances found for {self.schemaspace}')
return
validity_clause = (' (includes invalid)' if self.include_invalid_flag.value else '')
print(f'Available metadata instances for {self.schemaspace}{validity_clause}:')
sorted_instances = sorted(metadata_instances, key=(lambda inst: (inst.schema_name, inst.name)))
max_schema_name_len = len('Schema')
max_name_len = len('Instance')
max_resource_len = len('Resource')
for instance in sorted_instances:
max_schema_name_len = max(len(instance.schema_name), max_schema_name_len)
max_name_len = max(len(instance.name), max_name_len)
max_resource_len = max(len(instance.resource), max_resource_len)
print()
print(f"{'Schema'.ljust(max_schema_name_len)} {'Instance'.ljust(max_name_len)} {'Resource'.ljust(max_resource_len)} ")
print(f"{'------'.ljust(max_schema_name_len)} {''.ljust(max_name_len)} {''.ljust(max_resource_len)} ")
for instance in sorted_instances:
invalid = ''
if (instance.reason and (len(instance.reason) > 0)):
invalid = f'**INVALID** ({instance.reason})'
print(f'{instance.schema_name.ljust(max_schema_name_len)} {instance.name.ljust(max_name_len)} {instance.resource.ljust(max_resource_len)} {invalid}') |
class OptionSeriesSplineSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractSeonbiNovels(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class DocxLoader(BaseLoader):
def __init__(self, file_path: str, encoding: Optional[str]=None):
self.file_path = file_path
self.encoding = encoding
def load(self) -> List[Document]:
docs = []
doc = docx.Document(self.file_path)
content = []
for i in range(len(doc.paragraphs)):
para = doc.paragraphs[i]
text = para.text
content.append(text)
docs.append(Document(page_content=''.join(content), metadata={'source': self.file_path}))
return docs |
class Duplicate(Core):
def duplicate_list(self, drive_id: str=None) -> List[DuplicateItem]:
return list(self._core_duplicate_list(drive_id))
def list_to_clean(self, album_drive_id: str, size: int=200, drive_id: str=None) -> List[BaseFile]:
if (drive_id is None):
drive_id = self.default_drive_id
ll = []
for i in itertools.islice(self._core_list_to_clean(ListToCleanRequest(drive_id=drive_id, album_drive_id=album_drive_id)), size):
ll.append(i)
return ll |
.integration_external
.integration_bigquery
def test_create_and_process_access_request_bigquery(bigquery_resources, db, cache, policy, run_privacy_request_task):
customer_email = bigquery_resources['email']
customer_name = bigquery_resources['name']
data = {'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': customer_email}}
pr = get_privacy_request_results(db, policy, run_privacy_request_task, data, task_timeout=PRIVACY_REQUEST_TASK_TIMEOUT_EXTERNAL)
results = pr.get_results()
customer_table_key = f'EN_{pr.id}__access_request__bigquery_example_test_dataset:customer'
assert (len(results[customer_table_key]) == 1)
assert (results[customer_table_key][0]['email'] == customer_email)
assert (results[customer_table_key][0]['name'] == customer_name)
address_table_key = f'EN_{pr.id}__access_request__bigquery_example_test_dataset:address'
city = bigquery_resources['city']
state = bigquery_resources['state']
assert (len(results[address_table_key]) == 1)
assert (results[address_table_key][0]['city'] == city)
assert (results[address_table_key][0]['state'] == state)
pr.delete(db=db) |
def verify_ref_count(trie):
enumerated_ref_count = trie.regenerate_ref_count()
tracked_keys = set(trie.ref_count.keys())
enumerated_keys = set(enumerated_ref_count.keys())
untracked_keys = (enumerated_keys - tracked_keys)
assert (len(untracked_keys) == 0)
for unenumerated_key in (tracked_keys - enumerated_keys):
assert (trie.ref_count[unenumerated_key] == 0)
for matching_key in (tracked_keys & enumerated_keys):
actual_num = trie.ref_count[matching_key]
expected_num = enumerated_ref_count[matching_key]
assert (actual_num == expected_num) |
class ModelBundleDeleter(ErsiliaBase):
def __init__(self, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json)
def _model_path(self, model_id):
folder = os.path.join(self._bundles_dir, model_id)
return folder
def delete(self, model_id):
folder = self._model_path(model_id)
if (not os.path.exists(folder)):
return
bento_folder = self._get_bentoml_location(model_id)
if (bento_folder is not None):
self.logger.info('Removing bento folder first {0}'.format(bento_folder))
rmtree(bento_folder)
os.makedirs(bento_folder, exist_ok=True)
self.logger.info('Removing bundle folder {0}'.format(folder))
rmtree(folder)
self.logger.debug('Folder removed') |
.parametrize(('input_data', 'verbose', 'expected'), [(1000, False, '1000.00 Byte'), (1024, False, '1.00 KiB'), ((1024 * 1024), False, '1.00 MiB'), (1234.1234, False, '1.21 KiB'), (1000, True, '1000.00 Byte (1,000 bytes)'), (b'abc', False, 'not available'), (None, False, 'not available')])
def test_byte_number_filter(input_data, verbose, expected):
assert (flt.byte_number_filter(input_data, verbose) == expected) |
class MultiThreadTrajectoryBatcher():
def reset(self, agent_info=DictTensor({}), env_info=DictTensor({})):
n_workers = len(self.workers)
assert (isinstance(agent_info, DictTensor) and (agent_info.empty() or (agent_info.n_elems() == (self.n_envs * n_workers))))
assert (isinstance(env_info, DictTensor) and (env_info.empty() or (env_info.n_elems() == (self.n_envs * n_workers))))
pos = 0
for k in range(n_workers):
n = self.n_envs
wi = (None if (agent_info is None) else agent_info.slice(pos, (pos + n)))
ei = (None if (env_info is None) else env_info.slice(pos, (pos + n)))
self.workers[k].reset(agent_info=wi, env_info=ei)
pos += n
def execute(self):
n_workers = len(self.workers)
for k in range(n_workers):
self.workers[k].acquire_slot()
def get(self, blocking=True):
if (not blocking):
for w in range(len(self.workers)):
if (not self.workers[w].finished()):
return None
buffer_slot_ids = []
for w in range(len(self.workers)):
buffer_slot_ids += self.workers[w].get()
if (len(buffer_slot_ids) == 0):
return None
slots = self.buffer.get_single_slots(buffer_slot_ids, erase=True)
assert (not slots.lengths.eq(0).any())
return slots
def update(self, info):
for w in self.workers:
w.update_worker(info)
def close(self):
for w in self.workers:
w.close()
for w in self.workers:
del w |
def freeze(args=0):
global b, timed, count, times_list, p, avg5, avg5_flo, times_list_5_flo, times_list_5, min_5, max_5, min_c, max_c, avg12, avg12_flo, times_list_12_flo, times_list_12, min_12, max_12, min_c12, max_c12, k, mean
b = (b + 1)
if (float(timed) < 14.96):
messagebox.showinfo('Congratulations!', "You just beat the Developer's Best Time of 14.96 seconds!")
start.config(state=NORMAL)
stop.config(state=DISABLED)
scramble.config(text=scrambler333.get_WCA_scramble())
times_file = open('times.txt', 'a+')
times_file.write((timed + '\n'))
times_file.close()
times_file = open('times.txt', 'r+')
times_list = times_file.readlines()
count = len(times_list)
times_file.close()
i = 1
saved_times_str = ''
if (count < 12):
while (i <= count):
if (i == count):
saved_times_str = (saved_times_str + times_list[(count - i)].strip())
break
saved_times_str = ((saved_times_str + times_list[(count - i)].strip()) + ', ')
i = (i + 1)
else:
while (i < 13):
if (i == 12):
saved_times_str = (saved_times_str + times_list[(count - i)].strip())
break
saved_times_str = ((saved_times_str + times_list[(count - i)].strip()) + ', ')
i = (i + 1)
times_count.config(text=('No. of Solves: ' + str(count)))
saved_times.config(text=('Last 12 Solves: ' + saved_times_str))
avg5 = ''
avg5_flo = 0
min_c = 0
max_c = 0
if (count >= 5):
times_list_5 = times_list[(count - 5):count]
times_list_5_flo = [0, 1, 2, 3, 4]
p = 0
while (p < 5):
times_list_5_flo[p] = float(times_list_5[p].strip())
p = (p + 1)
min_5 = min(times_list_5_flo)
max_5 = max(times_list_5_flo)
p = 0
while (p < 5):
if ((times_list_5_flo[p] == max_5) and (max_c == 0)):
p = (p + 1)
max_c = 1
continue
if ((times_list_5_flo[p] == min_5) and (min_c == 0)):
p = (p + 1)
min_c = 1
continue
avg5_flo = (avg5_flo + (times_list_5_flo[p] / 3))
p = (p + 1)
avg5 = ('%.2f' % avg5_flo)
ao5.config(text=('Average of 5: ' + avg5))
avg12 = ''
avg12_flo = 0
max_c12 = 0
min_c12 = 0
if (count >= 12):
times_list_12 = times_list[(count - 12):count]
times_list_12_flo = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
p = 0
while (p < 12):
times_list_12_flo[p] = float(times_list_12[p].strip())
p = (p + 1)
min_12 = min(times_list_12_flo)
max_12 = max(times_list_12_flo)
p = 0
while (p < 12):
if ((times_list_12_flo[p] == max_12) and (max_c12 == 0)):
p = (p + 1)
max_c12 = 1
continue
if ((times_list_12_flo[p] == min_12) and (min_c12 == 0)):
p = (p + 1)
min_c12 = 1
continue
avg12_flo = (avg12_flo + (times_list_12_flo[p] / 10))
p = (p + 1)
avg12 = ('%.2f' % avg12_flo)
ao12.config(text=('Average of 12: ' + avg12))
k = 0
mean = 0
while (k < count):
mean = (mean + (float(times_list[k].strip()) / count))
k = (k + 1)
mean = ('%.2f' % mean)
meantotal.config(text=('Total Mean: ' + mean))
root.update()
start.focus_set() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.