code stringlengths 281 23.7M |
|---|
('config_type', ['strict'])
def test_option_missing_envs_strict_mode(config, json_config_file_3):
with open(json_config_file_3, 'w') as file:
file.write(json.dumps({'section': {'undefined': '${UNDEFINED}'}}))
with raises(ValueError, match='Missing required environment variable "UNDEFINED"'):
config.option.from_json(json_config_file_3) |
def extractPerpetuallyperennialCarBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('please don't eat me', 'Please Dont Eat Me', 'translated'), ('the villainess needs a tyrant', 'The Villainess Need A Tyrant', 'translated'), ('a villain is a good match for a tyrant', 'A Villain Is A Good Match For A Tyrant', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def merge_file_level(config: MdParserConfig, topmatter: Dict[(str, Any)], warning: Callable[([MystWarnings, str], None)]) -> MdParserConfig:
updates: Dict[(str, Any)] = {}
myst = topmatter.get('myst', {})
if (not isinstance(myst, dict)):
warning(MystWarnings.MD_TOPMATTER, f"'myst' key not a dict: {type(myst)}")
else:
updates = myst
if ('html_meta' in topmatter):
warning(MystWarnings.MD_TOPMATTER, "top-level 'html_meta' key is deprecated, place under 'myst' key instead")
updates['html_meta'] = topmatter['html_meta']
if ('substitutions' in topmatter):
warning(MystWarnings.MD_TOPMATTER, "top-level 'substitutions' key is deprecated, place under 'myst' key instead")
updates['substitutions'] = topmatter['substitutions']
new = config.copy()
fields = {name: (value, field) for (name, value, field) in config.as_triple()}
for (name, value) in updates.items():
if (name not in fields):
warning(MystWarnings.MD_TOPMATTER, f'Unknown field: {name}')
continue
(old_value, field) = fields[name]
try:
validate_field(new, field, value)
except Exception as exc:
warning(MystWarnings.MD_TOPMATTER, str(exc))
continue
if field.metadata.get('merge_topmatter'):
value = {**old_value, **value}
setattr(new, name, value)
return new |
def extractLazycatnovelsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Mighty Female Official', 'Mighty Female Official', 'translated'), ("Xiaobei's Life as a Proud and Respected Woman", "Xiaobei's Life as a Proud and Respected Woman", 'translated'), ("the world's number one den of iniquity", "The World's No. 1 Den of Iniquity", 'translated'), ("the world's number one brothel", "The World's No. 1 Den of Iniquity", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class WorkflowExecutionStatus(betterproto.Enum):
WORKFLOW_EXECUTION_STATUS_UNSPECIFIED = 0
WORKFLOW_EXECUTION_STATUS_RUNNING = 1
WORKFLOW_EXECUTION_STATUS_COMPLETED = 2
WORKFLOW_EXECUTION_STATUS_FAILED = 3
WORKFLOW_EXECUTION_STATUS_CANCELED = 4
WORKFLOW_EXECUTION_STATUS_TERMINATED = 5
WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW = 6
WORKFLOW_EXECUTION_STATUS_TIMED_OUT = 7 |
class ScrubberEditor(BasicEditorFactory):
klass = Property()
low = Float()
high = Float()
increment = Float()
alignment = Alignment('center')
color = Color(None)
hover_color = Color(None)
active_color = Color(None)
border_color = Color(None)
text_color = Color('black')
def _get_klass(self):
return toolkit_object('scrubber_editor:_ScrubberEditor') |
def test_transaction_request_response_data(django_elasticapm_client, client):
client.cookies = SimpleCookie({'foo': 'bar'})
with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])):
client.get(reverse('elasticapm-no-error'))
transactions = django_elasticapm_client.events[TRANSACTION]
assert (len(transactions) == 1)
transaction = transactions[0]
assert (transaction['result'] == 'HTTP 2xx')
assert ('request' in transaction['context'])
request = transaction['context']['request']
assert (request['method'] == 'GET')
assert ('headers' in request)
headers = request['headers']
assert (headers['cookie'] in (' foo=bar', 'foo=bar'))
env = request['env']
assert ('SERVER_NAME' in env), env.keys()
assert (env['SERVER_NAME'] == 'testserver')
assert ('SERVER_PORT' in env), env.keys()
assert (env['SERVER_PORT'] == '80')
assert ('response' in transaction['context'])
response = transaction['context']['response']
assert (response['status_code'] == 200)
if ('my-header' in response['headers']):
assert (response['headers']['my-header'] == 'foo')
else:
assert (response['headers']['My-Header'] == 'foo') |
def extractKscansWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_insert_aliased_variable_more_dominators_same_block():
(list_instructions, aliased_variables, task) = construct_graph_aliased(2)
InsertMissingDefinitions().run(task)
assert ([node.instructions for node in task.graph.nodes] == [((([list_instructions[0], Assignment(aliased_variables[0], Variable('x', Integer.int32_t(), 0, is_aliased=True))] + list_instructions[1:3]) + [Relation(aliased_variables[1], aliased_variables[0])]) + list_instructions[3:5]), (((((((list_instructions[5:8] + [Assignment(aliased_variables[2], aliased_variables[1])]) + list_instructions[8:10]) + [Relation(aliased_variables[3], aliased_variables[2])]) + list_instructions[10:13]) + [Assignment(aliased_variables[4], aliased_variables[3])]) + list_instructions[13:15]) + [Relation(aliased_variables[5], aliased_variables[4])])]) |
def test_duplicated_param_names():
with pytest.raises(ValueError, match='Duplicated param name id at path /{id}/{id}'):
Route('/{id}/{id}', user)
with pytest.raises(ValueError, match='Duplicated param names id, name at path /{id}/{name}/{id}/{name}'):
Route('/{id}/{name}/{id}/{name}', user) |
('fides.api.service.storage.storage_uploader_service.upload_to_s3')
def test_uploader_s3_success_automatic_auth(mock_upload_to_s3: Mock, db: Session, privacy_request: PrivacyRequest) -> None:
request_id = privacy_request.id
mock_config = {'name': 'test dest', 'key': 'test_dest_key', 'type': StorageType.s3.value, 'details': {'auth_method': S3AuthMethod.AUTOMATIC.value, 'bucket': 'some-bucket', 'naming': FileNaming.request_id.value, 'max_retries': 10}}
storage_config = StorageConfig.create(db, data=mock_config)
mock_upload_to_s3.return_value = f'
upload_data = {'phone': ''}
upload(db=db, privacy_request=privacy_request, data=upload_data, storage_key=mock_config['key'])
mock_upload_to_s3.assert_called_with(None, upload_data, mock_config['details'][StorageDetails.BUCKET.value], f'{request_id}.json', 'json', privacy_request, S3AuthMethod.AUTOMATIC.value, None, None)
storage_config.delete(db) |
def test2():
typer = {'reduced set': [{'smarts': '[#1]', 'atype': 'H_MERGE'}, {'smarts': '[#1][#6]([#7,#8])[#7,#8]', 'atype': 'HC'}, {'smarts': '[#1][#8]', 'atype': 'HD'}, {'smarts': '[C]', 'atype': 'C'}, {'smarts': '[c]', 'atype': 'A'}, {'smarts': '[#7]', 'atype': 'NA'}, {'smarts': '[#8]', 'atype': 'OA'}]}
preparator = MoleculePreparation(input_atom_params=typer, load_atom_params=None, merge_these_atom_types=('H', 'H_MERGE'))
mol = Chem.MolFromSmiles('c1nc(CO)co1')
mol = Chem.AddHs(mol)
rdDistGeom.EmbedMolecule(mol)
setups = preparator.prepare(mol)
(pdbqt_string, is_ok, error_msg) = PDBQTWriterLegacy.write_string(setups[0])
count_atoms = 0
count_HD = 0
count_HC = 0
for line in pdbqt_string.split('\n'):
if (line.startswith('ATOM') or line.startswith('HETATM')):
count_atoms += 1
count_HD += int((line[77:79] == 'HD'))
count_HC += int((line[77:79] == 'HC'))
assert (count_HD == 1)
assert (count_HC == 1)
assert (count_atoms == 9) |
class OptionPlotoptionsBarTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
('INT')
class IntOp(Node):
def forward(self, x, final=None, **kwargs):
if (x is None):
return None
if (x == ''):
return None
if ((final is not None) and all(((f == 'fin') for f in final))):
return True
if x.startswith(' '):
x = x[1:]
if (not all([(c in '') for c in x])):
return False
else:
return True
def follow(self, v, **kwargs):
if (v is None):
return None
has_next_token = (v != strip_next_token(v))
v = strip_next_token(v)
context = kwargs.get('context', None)
if context.runtime.prefers_compact_mask:
number_tokens = tset('1', '2', '3', '4', '5', '6', '7', '8', '9', 'G2', 'G3', 'G4', 'G5', 'G0', 'G6', 'G7', 'G8', 'G9', '10', '12', '50', '19', '11', '20', '30', '15', '14', '16', '13', '25', '18', '17', '24', '80', '40', '22', '60', '23', '29', '27', '26', '28', '99', '33', '70', '45', '35', '64', '75', '21', '38', '44', '36', '32', '39', '34', '37', '48', '66', '55', '47', '49', '65', '68', '31', '67', '59', '77', '58', '69', '88', '46', '57', '43', '42', '78', '79', '90', '95', '41', '56', '54', '98', '76', '52', '53', '51', '86', '74', '89', '72', '73', '96', '71', '63', '62', '85', '61', '97', '84', '87', '94', '92', '83', '93', '91', '82', '81', exact=True, name='number_tokens')
else:
number_tokens = tset('[ 1-9][0-9]*$', regex=True, name='full_number_tokens')
number_cont_tokens = tset('[1-9][0-9]*$', regex=True, name='number_continuation_tokens')
if (not has_next_token):
return fmap(('eos', (len(v.strip()) != 0)), ('*', self.forward(v)))
if ((not all([(c.strip() in ',') for c in v])) and (len(v.strip()) > 0)):
return fmap(('*', False))
if model_info(context.runtime.model_identifier).is_chat_model:
if (not all([(c in '') for c in v])):
return fmap(('*', False))
else:
return fmap(('*', True))
if (len(v) == 0):
return fmap((number_tokens, True), ('*', False))
else:
if (len(v.strip()) == 0):
return fmap((number_cont_tokens, True), ('eos', False), ('*', False))
return fmap(('*', True))
def postprocess_var(self, var_name):
return (var_name == self.predecessors[0].name)
def postprocess(self, operands, raw):
raw = ''.join([c for c in raw if (c in '')])
value = int(raw)
return (postprocessed_rewrite(str(value)), postprocessed_value(value))
def postprocess_order(self, other, **kwargs):
if isinstance(other, StopAtOp):
return 'after'
else:
return 0
def final(self, x, operands=None, result=None, **kwargs):
if ((result == False) and (x[0] == 'inc')):
return 'fin'
return super().final(x, operands=operands, result=result, **kwargs) |
class Solution():
def longestPalindrome(self, s: str) -> str:
def helper(s, l, r):
while ((l >= 0) and (r < len(s)) and (s[l] == s[r])):
l -= 1
r += 1
return s[(l + 1):r]
res = ''
for i in range(len(s)):
tmp = helper(s, i, i)
if (len(tmp) > len(res)):
res = tmp
tmp = helper(s, i, (i + 1))
if (len(tmp) > len(res)):
res = tmp
return res |
def fortios_log_syslogd3(data, fos):
fos.do_member_operation('log.syslogd3', 'filter')
if data['log_syslogd3_filter']:
resp = log_syslogd3_filter(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_syslogd3_filter'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class OptionSeriesVariwideSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def mock_core_dumps_pass(self, cmd):
if ('limits.conf' in cmd):
stdout = ['* hard core 0']
stderr = ['']
returncode = 0
elif ('sysctl' in cmd):
stdout = ['fs.suid_dumpable = 0']
stderr = ['']
returncode = 0
return SimpleNamespace(returncode=returncode, stderr=stderr, stdout=stdout) |
def extractThenewdevilBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def filter_firewall_vip_data(json):
option_list = ['add_nat46_route', 'arp_reply', 'color', 'comment', 'dns_mapping_ttl', 'extaddr', 'extintf', 'extip', 'extport', 'gratuitous_arp_interval', 'h2_support', 'h3_support', ' ' ' ' ' ' ' ' ' ' ' ' ' ' ' 'id', 'ipv6_mappedip', 'ipv6_mappedport', 'ldb_method', 'mapped_addr', 'mappedip', 'mappedport', 'max_embryonic_connections', 'monitor', 'name', 'nat_source_vip', 'nat44', 'nat46', 'outlook_web_access', 'persistence', 'portforward', 'portmapping_type', 'protocol', 'quic', 'realservers', 'server_type', 'service', 'src_filter', 'srcintf_filter', 'ssl_accept_ffdhe_groups', 'ssl_algorithm', 'ssl_certificate', 'ssl_cipher_suites', 'ssl_client_fallback', 'ssl_client_rekey_count', 'ssl_client_renegotiation', 'ssl_client_session_state_max', 'ssl_client_session_state_timeout', 'ssl_client_session_state_type', 'ssl_dh_bits', 'ssl_hpkp', 'ssl_hpkp_age', 'ssl_hpkp_backup', 'ssl_hpkp_include_subdomains', 'ssl_hpkp_primary', 'ssl_hpkp_report_uri', 'ssl_hsts', 'ssl_hsts_age', 'ssl_hsts_include_subdomains', 'ssl_ 'ssl_ 'ssl_max_version', 'ssl_min_version', 'ssl_mode', 'ssl_pfs', 'ssl_send_empty_frags', 'ssl_server_algorithm', 'ssl_server_cipher_suites', 'ssl_server_max_version', 'ssl_server_min_version', 'ssl_server_renegotiation', 'ssl_server_session_state_max', 'ssl_server_session_state_timeout', 'ssl_server_session_state_type', 'status', 'type', 'uuid', 'weblogic_server', 'websphere_server']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class ClassificationDummyMetric(ThresholdClassificationMetric[ClassificationDummyMetricResults]):
_quality_metric: ClassificationQualityMetric
def __init__(self, probas_threshold: Optional[float]=None, k: Optional[int]=None, options: AnyOptions=None):
self.probas_threshold = probas_threshold
self.k = k
super().__init__(probas_threshold, k, options)
self._quality_metric = ClassificationQualityMetric()
def calculate(self, data: InputData) -> ClassificationDummyMetricResults:
quality_metric: Optional[ClassificationQualityMetric]
dataset_columns = process_columns(data.current_data, data.column_mapping)
target_name = dataset_columns.utility_columns.target
prediction_name = dataset_columns.utility_columns.prediction
if (target_name is None):
raise ValueError("The column 'target' should present")
if (prediction_name is None):
quality_metric = None
else:
quality_metric = self._quality_metric
labels_ratio = data.current_data[target_name].value_counts(normalize=True)
np.random.seed(0)
dummy_preds = np.random.choice(labels_ratio.index, data.current_data.shape[0], p=labels_ratio)
dummy_preds = pd.Series(dummy_preds)
prediction: Optional[PredictionData] = None
if (prediction_name is not None):
(target, prediction) = self.get_target_prediction_data(data.current_data, data.column_mapping)
labels = prediction.labels
else:
target = data.current_data[target_name]
labels = list(target.unique())
current_matrix = calculate_matrix(target, dummy_preds, labels)
current_dummy = calculate_metrics(data.column_mapping, current_matrix, target, PredictionData(predictions=dummy_preds, prediction_probas=None, labels=labels))
metrics_matrix = ClassificationReport.create(target, dummy_preds).classes
threshold = 0.5
if ((prediction is not None) and (prediction.prediction_probas is not None) and (len(labels) == 2)):
if ((self.probas_threshold is not None) or (self.k is not None)):
if (self.probas_threshold is not None):
threshold = self.probas_threshold
if (self.k is not None):
threshold = k_probability_threshold(prediction.prediction_probas, self.k)
current_dummy = self.correction_for_threshold(current_dummy, threshold, target, labels, prediction.prediction_probas.shape)
if (threshold == 1.0):
coeff_recall = 1.0
else:
coeff_recall = min(1.0, (0.5 / (1 - threshold)))
coeff_precision = min(1.0, ((1 - threshold) / 0.5))
neg_label_precision = (precision_score(target, dummy_preds, pos_label=labels[1]) * coeff_precision)
neg_label_recall = (recall_score(target, dummy_preds, pos_label=labels[1]) * coeff_recall)
f1_label2_value = (((2 * neg_label_precision) * neg_label_recall) / (neg_label_precision + neg_label_recall))
metrics_matrix = {str(labels[0]): ClassMetric(precision=current_dummy.precision, recall=current_dummy.recall, **{'f1': current_dummy.f1}), str(labels[1]): ClassMetric(precision=neg_label_precision, recall=neg_label_recall, **{'f1': f1_label2_value})}
if ((prediction is not None) and (prediction.prediction_probas is not None)):
binaraized_target = (target.astype(str).values.reshape((- 1), 1) == list(labels)).astype(int)
dummy_prediction = np.full(prediction.prediction_probas.shape, (1 / prediction.prediction_probas.shape[1]))
current_dummy.log_loss = log_loss(binaraized_target, dummy_prediction)
current_dummy.roc_auc = 0.5
by_reference_dummy: Optional[DatasetClassificationQuality] = None
if (data.reference_data is not None):
labels_ratio = data.reference_data[target_name].value_counts(normalize=True)
np.random.seed(1)
dummy_preds = np.random.choice(labels_ratio.index, data.current_data.shape[0], p=labels_ratio)
dummy_preds = pd.Series(dummy_preds)
if (prediction_name is not None):
(target, prediction) = self.get_target_prediction_data(data.current_data, data.column_mapping)
labels = prediction.labels
else:
target = data.current_data[target_name]
labels = list(target.unique())
current_matrix = calculate_matrix(target, dummy_preds, labels)
by_reference_dummy = calculate_metrics(data.column_mapping, current_matrix, target, PredictionData(predictions=dummy_preds, prediction_probas=None, labels=labels))
if ((prediction is not None) and (prediction.prediction_probas is not None) and (len(labels) == 2)):
by_reference_dummy = self.correction_for_threshold(by_reference_dummy, threshold, target, labels, prediction.prediction_probas.shape)
if ((prediction is not None) and (prediction.prediction_probas is not None)):
binaraized_target = (target.astype(str).values.reshape((- 1), 1) == list(labels)).astype(int)
dummy_prediction = np.full(prediction.prediction_probas.shape, (1 / prediction.prediction_probas.shape[1]))
if (by_reference_dummy is not None):
by_reference_dummy.log_loss = log_loss(binaraized_target, dummy_prediction)
by_reference_dummy.roc_auc = 0.5
model_quality: Optional[DatasetClassificationQuality] = None
if (quality_metric is not None):
model_quality = quality_metric.get_result().current
return ClassificationDummyMetricResults(dummy=current_dummy, by_reference_dummy=by_reference_dummy, model_quality=model_quality, metrics_matrix=metrics_matrix)
def correction_for_threshold(self, dummy_results: DatasetClassificationQuality, threshold: float, target: pd.Series, labels: list, probas_shape: tuple):
if (threshold == 1.0):
coeff_precision = 1.0
else:
coeff_precision = min(1.0, (0.5 / (1 - threshold)))
coeff_recall = min(1.0, ((1 - threshold) / 0.5))
tpr: Optional[float] = None
tnr: Optional[float] = None
fpr: Optional[float] = None
fnr: Optional[float] = None
if ((dummy_results.tpr is not None) and (dummy_results.tnr is not None) and (dummy_results.fpr is not None) and (dummy_results.fnr is not None)):
tpr = (dummy_results.tpr * coeff_recall)
tnr = (dummy_results.tnr * coeff_precision)
fpr = (dummy_results.fpr * coeff_recall)
fnr = (dummy_results.fnr * coeff_precision)
return DatasetClassificationQuality(accuracy=dummy_results.accuracy, precision=(dummy_results.precision * coeff_precision), recall=(dummy_results.recall * coeff_recall), f1=(((((2 * dummy_results.precision) * coeff_precision) * dummy_results.recall) * coeff_recall) / ((dummy_results.precision * coeff_precision) + (dummy_results.recall * coeff_recall))), roc_auc=0.5, log_loss=None, tpr=tpr, tnr=tnr, fpr=fpr, fnr=fnr) |
(scope='function')
def fresh_log_file():
cfg = ConfigFile(config_name=None)
log_file = os.path.join(cfg.rally_home, 'logs', 'rally.log')
if os.path.exists(log_file):
bak = os.path.join(tempfile.mkdtemp(), 'rally.log')
shutil.move(log_file, bak)
(yield log_file)
with open(log_file) as src:
with open(bak, 'a') as dst:
dst.write(src.read())
shutil.move(bak, log_file)
else:
(yield log_file) |
class Buffer(object):
def __init__(self, str_buffer_size=1000):
self.i = 0
self.str_buffer = []
self.file_str = StringIO()
self.file_str_write = self.file_str.write
self.str_buffer_append = self.str_buffer.append
self.str_buffer_size = str_buffer_size
def flush(self):
self.file_str_write(' '.join(self.str_buffer))
self.str_buffer = []
self.i = 0
def append(self, data):
self_i = self.i
self_i += 1
if (self_i == self.str_buffer_size):
self.flush()
self.str_buffer_append(str(data))
def getvalue(self):
self.flush()
return self.file_str.getvalue() |
def extractNanodesuBibliaKoshodouNoJikenTechou(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class VideoMedia(DatClass):
time: str = None
city: str = None
country: str = None
address_line: str = None
district: str = None
duration: str = None
height: int = None
location: str = None
province: str = None
township: str = None
video_media_audio_stream: List[VideoMediaAudioStream] = None
video_media_video_stream: List[VideoMediaVideoStream] = None
width: int = None
image_tags: List[ImageTag] = None |
class FormParser(BaseParser):
media_type = 'application/x-www-form-urlencoded'
def parse(self, stream, media_type=None, parser_context=None):
parser_context = (parser_context or {})
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
return QueryDict(stream.read(), encoding=encoding) |
def test_no_cse_for_listoperation():
cfg = ControlFlowGraph()
cfg.add_nodes_from((nodes := [BasicBlock(0, [Branch(Condition(OperationType.equal, [expr1, Constant(1)]))]), BasicBlock(1, []), BasicBlock(2, []), BasicBlock(3, instructions=[Phi(Variable('a', ssa_label=1), [Variable('x', ssa_label=0), Variable('y', ssa_label=0)]), Phi(Variable('b', ssa_label=1), [Variable('x', ssa_label=0), Variable('y', ssa_label=0)]), Return([BinaryOperation(OperationType.plus, [Variable('a', ssa_label=1), Variable('b', ssa_label=1)])])])]))
cfg.add_edges_from([TrueCase(nodes[0], nodes[1]), FalseCase(nodes[0], nodes[2]), UnconditionalEdge(nodes[1], nodes[3]), UnconditionalEdge(nodes[2], nodes[3])])
old_inst = [i.copy() for i in cfg.instructions]
_run_cse(cfg, _generate_options(threshold=2))
assert (old_inst == list(cfg.instructions)) |
def add_binary_feature(feature, n1, n2):
if (type(n1) in (GivReg, Reg, Flag)):
n1_info = coarse(n1)
else:
n1_info = fine(n1)
if (type(n2) in (GivReg, Reg, Flag)):
n2_info = coarse(n2)
else:
n2_info = fine(n2)
if isinstance(n1, Reg):
n1.features.add(('L' + feature.format(coarse(n1), n2_info)))
if isinstance(n2, IndirectOffset):
n1.features.add(('L' + feature.format(coarse(n1), coarse(n2_info))))
elif isinstance(n1, IndirectOffset):
n1.features.add(('L' + feature.format(coarse(n1), n2_info)))
n1.features.add(('L' + feature.format(fine(n1), n2_info)))
if isinstance(n2, IndirectOffset):
n1.features.add(('L' + feature.format(coarse(n1), coarse(n2_info))))
n1.features.add(('L' + feature.format(fine(n1), coarse(n2_info))))
if isinstance(n2, Reg):
n2.features.add(('R' + feature.format(n1_info, coarse(n2))))
if isinstance(n1, IndirectOffset):
n2.features.add(('R' + feature.format(coarse(n1), coarse(n2))))
elif isinstance(n2, IndirectOffset):
n2.features.add(('R' + feature.format(n1_info, coarse(n2))))
n2.features.add(('R' + feature.format(n1_info, fine(n2))))
if isinstance(n1, IndirectOffset):
n2.features.add(('R' + feature.format(coarse(n1), coarse(n2))))
n2.features.add(('R' + feature.format(coarse(n1), coarse(n2)))) |
class CFG_Context():
def __init__(self, v_entry=None):
assert ((v_entry is None) or isinstance(v_entry, Vertex_Root))
self.v_entry = v_entry
self.l_exits = []
self.l_loop_breaks = []
self.l_loop_continues = []
def merge_loops(self, other):
assert isinstance(other, CFG_Context)
self.l_loop_breaks += other.l_loop_breaks
self.l_loop_continues += other.l_loop_continues
def merge_exits(self, other):
assert isinstance(other, CFG_Context)
self.l_exits += other.l_exits |
class hash_algorithm(bsn_tlv):
type = 145
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!H', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = hash_algorithm()
_type = reader.read('!H')[0]
assert (_type == 145)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!H')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('hash_algorithm {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
value_name_map = {0: 'OFP_BSN_HASH_ALGORITHM_CRC16XOR8', 1: 'OFP_BSN_HASH_ALGORITHM_CRC16XOR4', 2: 'OFP_BSN_HASH_ALGORITHM_CRC16XOR2', 3: 'OFP_BSN_HASH_ALGORITHM_CRC16XOR1', 4: 'OFP_BSN_HASH_ALGORITHM_CRC16', 5: 'OFP_BSN_HASH_ALGORITHM_XOR16', 6: 'OFP_BSN_HASH_ALGORITHM_CRC16CCITT', 7: 'OFP_BSN_HASH_ALGORITHM_CRC32LO', 8: 'OFP_BSN_HASH_ALGORITHM_CRC32HI'}
if (self.value in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.value], self.value)))
else:
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
class TestExtractReadsForLane(unittest.TestCase):
def setUp(self):
self.wd = tempfile.mkdtemp()
self.fastq_data_l2 = u':43:HL3LWBBXX:2:1101:21440:1121 1:N:0:CNATGT\nGCCNGACAGCAGAAAT\n+\nAAF#FJJJJJJJJJJJ\:43:HL3LWBBXX:2:1101:21460:1121 1:N:0:CNATGT\nGGGNGTCATTGATCAT\n+\nAAF#FJJJJJJJJJJJ\:43:HL3LWBBXX:2:1101:21805:1121 1:N:0:CNATGT\nCCCNACCCTTGCCTAC\n+\nAAF#FJJJJJJJJJJJ\n'
self.fastq_data_l8 = u':43:HL3LWBBXX:8:1101:21440:1121 1:N:0:CNATGT\nGCCNGACAGCAGAAAT\n+\nAAF#FJJJJJJJJJJJ\:43:HL3LWBBXX:8:1101:21460:1121 1:N:0:CNATGT\nGGGNGTCATTGATCAT\n+\nAAF#FJJJJJJJJJJJ\n'
def tearDown(self):
if os.path.exists(self.wd):
shutil.rmtree(self.wd)
def test_extract_reads_for_lane(self):
fastq_in = os.path.join(self.wd, 'Test_S1_R1_001.fastq')
with io.open(fastq_in, 'wt') as fp:
fp.write(self.fastq_data_l2)
fp.write(self.fastq_data_l8)
reads_l2 = []
for r in extract_reads_for_lane(fastq_in, 2):
reads_l2.append(r)
self.assertEqual(len(reads_l2), 3)
self.assertEqual('\n'.join(reads_l2), self.fastq_data_l2.strip())
reads_l8 = []
for r in extract_reads_for_lane(fastq_in, 8):
reads_l8.append(r)
self.assertEqual(len(reads_l8), 2)
self.assertEqual('\n'.join(reads_l8), self.fastq_data_l8.strip())
def test_get_fastq_lanes_from_gzipped_input(self):
fastq_in = os.path.join(self.wd, 'Test_S1_R1_001.fastq.gz')
with gzip.open(fastq_in, 'wt') as fp:
fp.write(self.fastq_data_l2)
fp.write(self.fastq_data_l8)
reads_l2 = []
for r in extract_reads_for_lane(fastq_in, 2):
reads_l2.append(r)
self.assertEqual(len(reads_l2), 3)
self.assertEqual('\n'.join(reads_l2), self.fastq_data_l2.strip())
reads_l8 = []
for r in extract_reads_for_lane(fastq_in, 8):
reads_l8.append(r)
self.assertEqual(len(reads_l8), 2)
self.assertEqual('\n'.join(reads_l8), self.fastq_data_l8.strip()) |
class TestDataWifi(MultimachineTestCase):
def test(self):
L.describe('Open and connect the VPN application')
self.target_device['vpn_application'].open_and_connect()
L.describe('Capture traffic')
self.capture_device['packet_capturer'].start()
L.describe('Generate whatever traffic you want')
message_and_await_enter('Are you done?')
self.target_device['settings'].enable_wifi()
L.describe('Generate whatever traffic you want')
message_and_await_enter('Are you done?')
L.describe('Stop capturing traffic')
packets = self.capture_device['packet_capturer'].stop()
whitelist = self.capture_device.local_ips()
L.debug('Excluding {} from analysis'.format(whitelist))
self.traffic_analyser.get_vpn_server_ip(packets, whitelist) |
class OptionPlotoptionsScatter3dZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def download_node_src(node_url, src_dir, args):
logger.info('.', extra=dict(continued=True))
dl_contents = _download_node_file(node_url)
logger.info('.', extra=dict(continued=True))
if (is_WIN or is_CYGWIN):
ctx = zipfile.ZipFile(dl_contents)
members = operator.methodcaller('namelist')
member_name = (lambda s: s)
else:
ctx = tarfile_open(fileobj=dl_contents)
members = operator.methodcaller('getmembers')
member_name = operator.attrgetter('name')
with ctx as archive:
node_ver = re.escape(args.node)
rexp_string = ('node-v%s[^/]*/(README\\.md|CHANGELOG\\.md|LICENSE)' % node_ver)
extract_list = [member for member in members(archive) if (re.match(rexp_string, member_name(member)) is None)]
archive.extractall(src_dir, extract_list) |
class EvCell():
def __init__(self, data, **kwargs):
self.formatted = None
padwidth = kwargs.get('pad_width', None)
padwidth = (int(padwidth) if (padwidth is not None) else None)
self.pad_left = int(kwargs.get('pad_left', (padwidth if (padwidth is not None) else 1)))
self.pad_right = int(kwargs.get('pad_right', (padwidth if (padwidth is not None) else 1)))
self.pad_top = int(kwargs.get('pad_top', (padwidth if (padwidth is not None) else 0)))
self.pad_bottom = int(kwargs.get('pad_bottom', (padwidth if (padwidth is not None) else 0)))
self.enforce_size = kwargs.get('enforce_size', False)
pad_char = kwargs.get('pad_char', ' ')
pad_char = (pad_char[0] if pad_char else ' ')
hpad_char = kwargs.get('hpad_char', pad_char)
self.hpad_char = (hpad_char[0] if hpad_char else pad_char)
vpad_char = kwargs.get('vpad_char', pad_char)
self.vpad_char = (vpad_char[0] if vpad_char else pad_char)
fill_char = kwargs.get('fill_char', ' ')
fill_char = (fill_char[0] if fill_char else ' ')
hfill_char = kwargs.get('hfill_char', fill_char)
self.hfill_char = (hfill_char[0] if hfill_char else ' ')
vfill_char = kwargs.get('vfill_char', fill_char)
self.vfill_char = (vfill_char[0] if vfill_char else ' ')
self.crop_string = kwargs.get('crop_string', '[...]')
borderwidth = kwargs.get('border_width', 0)
self.border_left = kwargs.get('border_left', borderwidth)
self.border_right = kwargs.get('border_right', borderwidth)
self.border_top = kwargs.get('border_top', borderwidth)
self.border_bottom = kwargs.get('border_bottom', borderwidth)
borderchar = kwargs.get('border_char', None)
self.border_left_char = kwargs.get('border_left_char', (borderchar if borderchar else '|'))
self.border_right_char = kwargs.get('border_right_char', (borderchar if borderchar else self.border_left_char))
self.border_top_char = kwargs.get('border_top_char', (borderchar if borderchar else '-'))
self.border_bottom_char = kwargs.get('border_bottom_char', (borderchar if borderchar else self.border_top_char))
corner_char = kwargs.get('corner_char', '+')
self.corner_top_left_char = kwargs.get('corner_top_left_char', corner_char)
self.corner_top_right_char = kwargs.get('corner_top_right_char', corner_char)
self.corner_bottom_left_char = kwargs.get('corner_bottom_left_char', corner_char)
self.corner_bottom_right_char = kwargs.get('corner_bottom_right_char', corner_char)
self.align = kwargs.get('align', 'l')
self.valign = kwargs.get('valign', 'c')
self.data = self._split_lines(_to_ansi(data))
self.raw_width = max((d_len(line) for line in self.data))
self.raw_height = len(self.data)
self.trim_horizontal = 0
self.trim_vertical = 0
if ('width' in kwargs):
width = kwargs.pop('width')
self.width = ((((width - self.pad_left) - self.pad_right) - self.border_left) - self.border_right)
if (self.width <= 0 < self.raw_width):
raise Exception('Cell width too small - no space for data.')
else:
self.width = self.raw_width
if ('height' in kwargs):
height = kwargs.pop('height')
self.height = ((((height - self.pad_top) - self.pad_bottom) - self.border_top) - self.border_bottom)
if (self.height <= 0 < self.raw_height):
raise Exception('Cell height too small - no space for data.')
else:
self.height = self.raw_height
def _reformat(self):
data = self._border(self._pad(self._valign(self._align(self._fit_width(self.data)))))
return data
def _split_lines(self, text):
return text.split('\n')
def _fit_width(self, data):
width = self.width
adjusted_data = []
for line in data:
if (0 < width < d_len(line)):
adjusted_data.extend([ANSIString((part + ANSIString('|n'))) for part in wrap(line, width=width, drop_whitespace=False)])
else:
adjusted_data.append(line)
if self.enforce_size:
excess = (len(adjusted_data) - self.height)
if (excess > 0):
crop_string = self.crop_string
adjusted_data = adjusted_data[:(- excess)]
adjusted_data_length = len(adjusted_data[(- 1)])
crop_string_length = len(crop_string)
if (adjusted_data_length >= crop_string_length):
adjusted_data[(- 1)] = (adjusted_data[(- 1)][:(- crop_string_length)] + crop_string)
elif (excess < 0):
adjusted_data.extend(['' for _ in range(excess)])
return adjusted_data
def _align(self, data):
align = self.align
hfill_char = self.hfill_char
width = self.width
return [justify(line, width, align=align, fillchar=hfill_char) for line in data]
def _valign(self, data):
valign = self.valign
height = self.height
cheight = len(data)
excess = (height - cheight)
padline = (self.vfill_char * self.width)
if (excess <= 0):
return data
if (valign == 't'):
return (data + [padline for _ in range(excess)])
elif (valign == 'b'):
return ([padline for _ in range(excess)] + data)
else:
narrowside = [padline for _ in range((excess // 2))]
widerside = (narrowside + [padline])
if (excess % 2):
if (height % 2):
return ((widerside + data) + narrowside)
else:
return ((narrowside + data) + widerside)
else:
return ((narrowside + data) + narrowside)
def _pad(self, data):
left = (self.hpad_char * self.pad_left)
right = (self.hpad_char * self.pad_right)
vfill = (((self.width + self.pad_left) + self.pad_right) * self.vpad_char)
top = [vfill for _ in range(self.pad_top)]
bottom = [vfill for _ in range(self.pad_bottom)]
return ((top + [((left + line) + right) for line in data]) + bottom)
def _border(self, data):
left = ((self.border_left_char * self.border_left) + ANSIString('|n'))
right = (ANSIString('|n') + (self.border_right_char * self.border_right))
cwidth = ((((self.width + self.pad_left) + self.pad_right) + max(0, (self.border_left - 1))) + max(0, (self.border_right - 1)))
vfill = (self.corner_top_left_char if left else '')
vfill += (cwidth * self.border_top_char)
vfill += (self.corner_top_right_char if right else '')
top = [vfill for _ in range(self.border_top)]
vfill = (self.corner_bottom_left_char if left else '')
vfill += (cwidth * self.border_bottom_char)
vfill += (self.corner_bottom_right_char if right else '')
bottom = [vfill for _ in range(self.border_bottom)]
return ((top + [((left + line) + right) for line in data]) + bottom)
def get_min_height(self):
return ((((self.pad_top + self.pad_bottom) + self.border_bottom) + self.border_top) + 1)
def get_min_width(self):
return ((((self.pad_left + self.pad_right) + self.border_left) + self.border_right) + 1)
def get_height(self):
return len(self.formatted)
def get_width(self):
return d_len(self.formatted[0])
def replace_data(self, data, **kwargs):
self.data = self._split_lines(_to_ansi(data))
self.raw_width = max((d_len(line) for line in self.data))
self.raw_height = len(self.data)
self.reformat(**kwargs)
def reformat(self, **kwargs):
padwidth = kwargs.get('pad_width', None)
padwidth = (int(padwidth) if (padwidth is not None) else None)
self.pad_left = int(kwargs.pop('pad_left', (padwidth if (padwidth is not None) else self.pad_left)))
self.pad_right = int(kwargs.pop('pad_right', (padwidth if (padwidth is not None) else self.pad_right)))
self.pad_top = int(kwargs.pop('pad_top', (padwidth if (padwidth is not None) else self.pad_top)))
self.pad_bottom = int(kwargs.pop('pad_bottom', (padwidth if (padwidth is not None) else self.pad_bottom)))
self.enforce_size = kwargs.get('enforce_size', False)
pad_char = kwargs.pop('pad_char', None)
hpad_char = kwargs.pop('hpad_char', pad_char)
self.hpad_char = (hpad_char[0] if hpad_char else self.hpad_char)
vpad_char = kwargs.pop('vpad_char', pad_char)
self.vpad_char = (vpad_char[0] if vpad_char else self.vpad_char)
fillchar = kwargs.pop('fill_char', None)
hfill_char = kwargs.pop('hfill_char', fillchar)
self.hfill_char = (hfill_char[0] if hfill_char else self.hfill_char)
vfill_char = kwargs.pop('vfill_char', fillchar)
self.vfill_char = (vfill_char[0] if vfill_char else self.vfill_char)
borderwidth = kwargs.get('border_width', None)
self.border_left = kwargs.pop('border_left', (borderwidth if (borderwidth is not None) else self.border_left))
self.border_right = kwargs.pop('border_right', (borderwidth if (borderwidth is not None) else self.border_right))
self.border_top = kwargs.pop('border_top', (borderwidth if (borderwidth is not None) else self.border_top))
self.border_bottom = kwargs.pop('border_bottom', (borderwidth if (borderwidth is not None) else self.border_bottom))
borderchar = kwargs.get('border_char', None)
self.border_left_char = kwargs.pop('border_left_char', (borderchar if borderchar else self.border_left_char))
self.border_right_char = kwargs.pop('border_right_char', (borderchar if borderchar else self.border_right_char))
self.border_top_char = kwargs.pop('border_topchar', (borderchar if borderchar else self.border_top_char))
self.border_bottom_char = kwargs.pop('border_bottom_char', (borderchar if borderchar else self.border_bottom_char))
corner_char = kwargs.get('corner_char', None)
self.corner_top_left_char = kwargs.pop('corner_top_left', (corner_char if (corner_char is not None) else self.corner_top_left_char))
self.corner_top_right_char = kwargs.pop('corner_top_right', (corner_char if (corner_char is not None) else self.corner_top_right_char))
self.corner_bottom_left_char = kwargs.pop('corner_bottom_left', (corner_char if (corner_char is not None) else self.corner_bottom_left_char))
self.corner_bottom_right_char = kwargs.pop('corner_bottom_right', (corner_char if (corner_char is not None) else self.corner_bottom_right_char))
self.trim_horizontal = kwargs.pop('trim_horizontal', self.trim_horizontal)
self.trim_vertical = kwargs.pop('trim_vertical', self.trim_vertical)
for (key, value) in kwargs.items():
setattr(self, key, value)
if ('width' in kwargs):
width = kwargs.pop('width')
self.width = (((((width - self.pad_left) - self.pad_right) - self.border_left) - self.border_right) + self.trim_horizontal)
if (self.width <= 0 < self.raw_width):
raise Exception('Cell width too small, no room for data.')
if ('height' in kwargs):
height = kwargs.pop('height')
self.height = (((((height - self.pad_top) - self.pad_bottom) - self.border_top) - self.border_bottom) + self.trim_vertical)
if (self.height <= 0 < self.raw_height):
raise Exception('Cell height too small, no room for data.')
self.formatted = self._reformat()
def get(self):
if (not self.formatted):
self.formatted = self._reformat()
return self.formatted
def __repr__(self):
if (not self.formatted):
self.formatted = self._reformat()
return str(ANSIString(('<EvCel %s>' % self.formatted)))
def __str__(self):
if (not self.formatted):
self.formatted = self._reformat()
return str(ANSIString('\n').join(self.formatted)) |
(name=TCP_CONN_TIMEOUT)
def validate_tcp_conn_timeout(tcp_conn_timeout):
if (not isinstance(tcp_conn_timeout, numbers.Integral)):
raise ConfigTypeError(desc=('Invalid tcp connection timeout configuration value %s' % tcp_conn_timeout))
if (tcp_conn_timeout < 10):
raise ConfigValueError(desc=('Invalid tcp connection timeout configuration value %s' % tcp_conn_timeout))
return tcp_conn_timeout |
class ModifyL3Src(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Modify_L3_Src test')
of_ports = config['port_map'].keys()
of_ports.sort()
self.assertTrue((len(of_ports) > 1), 'Not enough ports for test')
delete_all_flows(self.controller)
logging.info('Verify if switch supports the action -- modify_l3_src, if not skip the test')
logging.info('Insert a flow with action -- set network src address ')
logging.info('Send packet matching the flow, verify recieved packet network src address rewritten ')
sup_acts = sw_supported_actions(self)
if (not (sup_acts & (1 << ofp.OFPAT_SET_NW_SRC))):
skip_message_emit(self, 'modify_l3_src test')
return
(pkt, exp_pkt, acts) = pkt_action_setup(self, mod_fields=['ip_src'], check_test_params=True)
flow_match_test(self, config['port_map'], pkt=pkt, exp_pkt=exp_pkt, action_list=acts, max_test=2) |
class RateLimitExceptionTests(unittest.TestCase):
def test_reset_time(self):
time = '2018-08-24T09:36:15Z'
exp = arrow.get(time)
e = exceptions.RateLimitException(time)
self.assertEqual(exp, e.reset_time)
def test_str(self):
time = '2018-08-24T09:36:15Z'
exp = 'Rate limit was reached. Will be reset in "2018-08-24T09:36:15+00:00".'
e = exceptions.RateLimitException(time)
self.assertEqual(exp, str(e)) |
def cfg_with_single_aliased_variable_3(x, z_aliased) -> Tuple[(ControlFlowGraph, ControlFlowGraph)]:
cfg = ControlFlowGraph()
(_, mem1, mem2, mem3, mem4, mem5, mem6, mem7, mem8) = generate_mem_phi_variables(9)
n1 = BasicBlock(1, [MemPhi(mem3, [mem1, mem2]), Assignment(x[2], z_aliased[3])])
n2 = BasicBlock(2, [MemPhi(mem5, [mem3, mem4])])
n3 = BasicBlock(3, [MemPhi(mem7, [mem5, mem6])])
cfg.add_nodes_from([n1, n2, n3])
expected_cfg = ControlFlowGraph()
n1 = BasicBlock(1, [Phi(z_aliased[3], [z_aliased[1], z_aliased[2]]), Assignment(x[2], z_aliased[3])])
n2 = BasicBlock(2, [Phi(z_aliased[5], [z_aliased[3], z_aliased[4]])])
n3 = BasicBlock(3, [Phi(z_aliased[7], [z_aliased[5], z_aliased[6]])])
expected_cfg.add_nodes_from([n1, n2, n3])
return (cfg, expected_cfg) |
def CreateGroupNormOperator(manifest, rank=5):
operation_kind = library.OperationKind.GroupNorm
in_dtype = library.DataType.f16
out_dtype = library.DataType.f16
tile_descriptions = [groupnorm.TileDesc(256, 8, 32, 1, 8, 1, 1, 1, 1, 1, 1, 1), groupnorm.TileDesc(256, 8, 32, 1, 8, 1, 2, 1, 2, 1, 2, 2), groupnorm.TileDesc(256, 8, 32, 1, 8, 1, 4, 1, 4, 1, 4, 4), groupnorm.TileDesc(256, 8, 32, 1, 8, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 4, 64, 1, 8, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 2, 128, 1, 8, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 2, 128, 1, 16, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 2, 128, 1, 32, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 1, 256, 1, 8, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 1, 256, 1, 16, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(256, 1, 256, 1, 32, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(1024, 1, 1024, 1, 32, 1, 8, 1, 8, 1, 8, 8), groupnorm.TileDesc(1024, 1, 1024, 1, 8, 1, 2, 1, 2, 1, 2, 2)]
operations = []
for tile_desc in tile_descriptions:
new_operation = groupnorm.GroupNormOperation(operation_kind=operation_kind, extra_kind=rank, In=in_dtype, Out=out_dtype, Rank=rank, NumReduceDim=3, tile_desc=tile_desc)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
def main(args=sys.argv[1:]):
cli_parser = argparse.ArgumentParser(prog='nautilus-terminal', epilog=_EPILOG)
cli_parser.add_argument('--version', action='version', version=VERSION)
cli_parser.add_argument('--print-debug', help='Prints debug informations and exit', nargs=0, action=PrintDebugAction)
cli_parser.add_argument('--check-extension', help='Check if the Nautilus extension is properly installed and exit', nargs=0, action=CheckExtensionAction)
if (not is_packaged()):
cli_parser.add_argument('--install-system', help='Install Nautilus Terminal extention system-wide and exit', nargs=0, action=InstallSystemAction)
cli_parser.add_argument('--uninstall-system', help='Uninstall Nautilus Terminal extention system-wide and exit', nargs=0, action=UninstallSystemAction)
cli_parser.add_argument('--install-user', help='Install Nautilus Terminal extention for the current user and exit', nargs=0, action=InstallUserAction)
cli_parser.add_argument('--uninstall-user', help='Uninstall Nautilus Terminal extention from the current user and exit.', nargs=0, action=UninstallUserAction)
else:
cli_parser.add_argument('--install-system', help=argparse.SUPPRESS, nargs=0, action=DisplayPackagedMessageAction)
cli_parser.add_argument('--uninstall-system', help=argparse.SUPPRESS, nargs=0, action=DisplayPackagedMessageAction)
cli_parser.add_argument('--install-user', help=argparse.SUPPRESS, nargs=0, action=DisplayPackagedMessageAction)
cli_parser.add_argument('--uninstall-user', help=argparse.SUPPRESS, nargs=0, action=DisplayPackagedMessageAction)
if (len(args) == 0):
cli_parser.parse_args(['--help'])
else:
cli_parser.parse_args(args) |
def extractRomanticmanfantasyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CRUDOperaLogDao(CRUDBase[(OperaLog, CreateOperaLog, UpdateOperaLog)]):
async def get_all(self, username: (str | None)=None, status: (int | None)=None, ip: (str | None)=None) -> Select:
se = select(self.model).order_by(desc(self.model.created_time))
where_list = []
if username:
where_list.append(self.model.username.like(f'%{username}%'))
if (status is not None):
where_list.append((self.model.status == status))
if ip:
where_list.append(self.model.ip.like(f'%{ip}%'))
if where_list:
se = se.where(and_(*where_list))
return se
async def create(self, db: AsyncSession, obj_in: CreateOperaLog) -> None:
(await self.create_(db, obj_in))
async def delete(self, db: AsyncSession, pk: list[int]) -> int:
logs = (await db.execute(delete(self.model).where(self.model.id.in_(pk))))
return logs.rowcount
async def delete_all(self, db: AsyncSession) -> int:
logs = (await db.execute(delete(self.model)))
return logs.rowcount |
def test_tac_message_instantiation():
assert TacMessage(performative=TacMessage.Performative.REGISTER, agent_name='some_name')
assert TacMessage(performative=TacMessage.Performative.UNREGISTER)
assert TacMessage(performative=TacMessage.Performative.TRANSACTION, transaction_id='some_id', ledger_id='some_ledger', sender_address='some_address', counterparty_address='some_other_address', amount_by_currency_id={'FET': 10}, fee_by_currency_id={'FET': 1}, quantities_by_good_id={'123': 0, '1234': 10}, nonce=1, sender_signature='some_signature', counterparty_signature='some_other_signature')
assert TacMessage(performative=TacMessage.Performative.CANCELLED)
assert TacMessage(performative=TacMessage.Performative.GAME_DATA, amount_by_currency_id={'FET': 10}, exchange_params_by_currency_id={'FET': 10.0}, quantities_by_good_id={'123': 20, '1234': 15}, utility_params_by_good_id={'123': 30.0, '1234': 50.0}, fee_by_currency_id={'FET': 1}, agent_addr_to_name={'agent_1': 'Agent one', 'agent_2': 'Agent two'}, currency_id_to_name={'FET': 'currency_name'}, good_id_to_name={'123': 'First good', '1234': 'Second good'}, version_id='game_version_1')
assert TacMessage(performative=TacMessage.Performative.TRANSACTION_CONFIRMATION, transaction_id='some_id', amount_by_currency_id={'FET': 10}, quantities_by_good_id={'123': 20, '1234': 15})
assert TacMessage(performative=TacMessage.Performative.TAC_ERROR, error_code=TacMessage.ErrorCode.GENERIC_ERROR, info={'msg': 'This is info msg.'})
assert (str(TacMessage.Performative.REGISTER) == 'register') |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestSimpleCheckListEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def setup_gui(self, model, view):
with create_ui(model, dict(view=view)) as ui:
process_cascade_events()
editor = ui.get_editors('value')[0]
(yield editor)
def test_simple_check_list_editor_text(self):
list_edit = ListModel(value=['one'])
with reraise_exceptions(), self.setup_gui(list_edit, get_view('simple')) as editor:
self.assertEqual(get_combobox_text(editor.control), 'One')
list_edit.value = ['two']
process_cascade_events()
self.assertEqual(get_combobox_text(editor.control), 'Two')
def test_simple_check_list_editor_text_mapped(self):
view = get_mapped_view('simple')
list_edit = ListModel(value=[1])
with reraise_exceptions(), self.setup_gui(list_edit, view) as editor:
with self.assertRaises(AssertionError):
self.assertEqual(get_combobox_text(editor.control), 'One')
self.assertEqual(get_combobox_text(editor.control), 'one')
list_edit.value = [2]
process_cascade_events()
with self.assertRaises(AssertionError):
self.assertEqual(get_combobox_text(editor.control), 'Two')
self.assertEqual(get_combobox_text(editor.control), 'two')
def test_simple_check_list_editor_index(self):
list_edit = ListModel(value=['one'])
with reraise_exceptions(), self.setup_gui(list_edit, get_view('simple')) as editor:
self.assertEqual(list_edit.value, ['one'])
set_combobox_index(editor, 1)
process_cascade_events()
self.assertEqual(list_edit.value, ['two'])
set_combobox_index(editor, 0)
process_cascade_events()
self.assertEqual(list_edit.value, ['one'])
def test_simple_check_list_editor_invalid_current_values(self):
list_edit = ListModel(value=[1, 'two', 'a', object(), 'one'])
with reraise_exceptions(), self.setup_gui(list_edit, get_view('simple')):
self.assertEqual(list_edit.value, ['two', 'one'])
def test_simple_check_list_editor_invalid_current_values_str(self):
class StrModel(HasTraits):
value = Str()
str_edit = StrModel(value='alpha, \ttwo, beta,\n lambda, one')
with reraise_exceptions(), self.setup_gui(str_edit, get_view('simple')):
self.assertEqual(str_edit.value, 'two,one') |
.usefixtures('use_tmpdir')
def test_run_one_job_with_an_integer_arg_is_actually_a_fractional():
executable = 'echo'
job_0 = {'name': 'JOB_1', 'executable': executable, 'stdout': 'outfile.stdout.1', 'stderr': None, 'argList': ['a_file', '5.12'], 'min_arg': 1, 'max_arg': 2, 'arg_types': ['STRING', 'RUNTIME_INT']}
data = {'jobList': [job_0]}
runner = JobRunner(data)
statuses = list(runner.run([]))
starts = [e for e in statuses if isinstance(e, Start)]
assert (len(starts) == 1), 'There should be 1 start message'
assert (not starts[0].success()), 'job should not start with success' |
class ResourceWithSuffixRoutes():
def __init__(self):
self.get_called = False
self.post_called = False
self.put_called = False
def on_get(self, req, resp, collection_id, item_id):
self.collection_id = collection_id
self.item_id = item_id
self.get_called = True
def on_post(self, req, resp, collection_id, item_id):
self.collection_id = collection_id
self.item_id = item_id
self.post_called = True
def on_put(self, req, resp, collection_id, item_id):
self.collection_id = collection_id
self.item_id = item_id
self.put_called = True
def on_get_collection(self, req, resp, collection_id):
self.collection_id = collection_id
self.get_called = True
def on_post_collection(self, req, resp, collection_id):
self.collection_id = collection_id
self.post_called = True
def on_put_collection(self, req, resp, collection_id):
self.collection_id = collection_id
self.put_called = True |
def jit_class(cls, jit_methods, backend):
if (not has_to_replace):
return cls
cls_name = cls.__name__
mod_name = cls.__module__
module = sys.modules[mod_name]
if (mod_name == '__main__'):
mod_name = find_module_name_from_path(module.__file__)
path_jit_class = mpi.Path(backend.jit.path_class)
python_path_dir = (path_jit_class / mod_name.replace('.', os.path.sep))
python_path = (python_path_dir / (cls_name + '.py'))
if mpi.has_to_build(python_path, module.__file__):
from transonic.justintime import _get_module_jit
mod = _get_module_jit(backend_name=backend.name, depth_frame=5)
if (mpi.rank == 0):
python_path = mpi.PathSeq(python_path)
python_code = (mod.info_analysis['codes_dependance_classes'][cls_name] + '\n')
python_code += backend.jit.produce_code_class(cls)
write_if_has_to_write(python_path, python_code)
python_path = mpi.Path(python_path)
mpi.barrier()
python_mod_name = ((((path_jit_class.name + '.') + mod_name) + '.') + cls_name)
module = import_from_path(python_path, python_mod_name)
for (name_method, method) in jit_methods.items():
func = method.func
name_new_method = f'__new_method__{cls.__name__}__{name_method}'
new_method = getattr(module, name_new_method)
setattr(cls, name_method, functools.wraps(func)(new_method))
return cls |
.parametrize('degree', [1, 2, 3])
def test_facet_avg_extruded(mesh, degree):
Vt = FunctionSpace(mesh, 'DGT', degree)
ft = Function(Vt, name='f_trace')
(x, y, z) = SpatialCoordinate(mesh)
source = ((((2 * x) + z) - (y * 10)) ** degree)
test = TestFunction(Vt)
trial = TrialFunction(Vt)
a = ((inner(trial, test) * ((ds_v + ds_t) + ds_b)) + (inner(avg(trial), avg(test)) * (dS_v + dS_h)))
l = ((inner(facet_avg(source), test) * ((ds_v + ds_t) + ds_b)) + (inner(avg(facet_avg(source)), avg(test)) * (dS_v + dS_h)))
solve((a == l), ft, solver_parameters={'pc_type': 'lu', 'ksp_type': 'preonly'})
ft_ref = Function(Vt, name='ref_sol')
Vt0 = FunctionSpace(mesh, 'DGT', 0)
ft_ref_p0 = Function(Vt0, name='ref_sol')
v = TestFunction(Vt0)
u = TrialFunction(Vt0)
a0 = ((inner(u, v) * ((ds_v + ds_t) + ds_b)) + (inner(avg(u), avg(v)) * (dS_v + dS_h)))
L0 = ((inner(source, v) * ((ds_v + ds_t) + ds_b)) + (inner(avg(source), avg(v)) * (dS_v + dS_h)))
solve((a0 == L0), ft_ref_p0, solver_parameters={'pc_type': 'lu', 'ksp_type': 'preonly'})
l_ref = ((inner(ft_ref_p0, test) * ((ds_v + ds_t) + ds_b)) + (inner(avg(ft_ref_p0), avg(test)) * (dS_v + dS_h)))
solve((a == l_ref), ft_ref, solver_parameters={'pc_type': 'lu', 'ksp_type': 'preonly'})
assert numpy.allclose(ft_ref.dat.data_ro, ft.dat.data_ro) |
def set_flytekit_log_properties(handler: typing.Optional[logging.Handler]=None, filter: typing.Optional[logging.Filter]=None, level: typing.Optional[int]=None):
global logger
if (handler is not None):
logger.handlers.clear()
logger.addHandler(handler)
if (filter is not None):
logger.addFilter(filter)
if (level is not None):
logger.setLevel(level) |
def get_s3_sts_token(resource_id: str, file_name: str, extra_arguments: Mapping[(str, str)]=None) -> _S3STSToken:
cache_key = f'{resource_id}:{file_name}'
if ((cache_key not in _s3_sts_tokens) or _s3_sts_tokens[cache_key].is_expired()):
method = f'tidy3d/py/tasks/{resource_id}/file?filename={file_name}'
if (extra_arguments is not None):
method += ('&' + '&'.join((f'{k}={v}' for (k, v) in extra_arguments.items())))
resp =
token = _S3STSToken.parse_obj(resp)
_s3_sts_tokens[cache_key] = token
return _s3_sts_tokens[cache_key] |
def _check_toy_tokenizer(pieces):
assert isinstance(pieces, PiecesWithIds)
assert (len(pieces.ids) == 3)
assert (len(pieces.pieces) == 3)
assert (pieces.ids == [[2, 41, 818, 61, 67, 193, 88, 204, 61, 251, 909, 682, 102, 95, 17, 3], [2, 824, 98, 189, 311, 417, 65, 155, 1, 416, 117, 88, 15, 844, 91, 100, 163, 183, 5, 3], [2, 576, 159, 100, 365, 319, 356, 99, 93, 281, 1, 1, 1, 340, 102, 103, 608, 184, 1, 809, 90, 608, 328, 162, 742, 17, 3]])
assert (pieces.pieces == [['[CLS]', 'I', 'saw', 'a', 'g', '##ir', '##l', 'with', 'a', 'te', '##les', '##co', '##p', '##e', '.', '[SEP]'], ['[CLS]', 'To', '##d', '##ay', 'we', 'will', 'e', '##at', '[UNK]', 'bo', '##w', '##l', ',', 'lo', '##t', '##s', 'of', 'it', '!', '[SEP]'], ['[CLS]', 'Tok', '##en', '##s', 'which', 'are', 'un', '##k', '##n', '##own', '[UNK]', '[UNK]', '[UNK]', 'al', '##p', '##h', '##ab', '##et', '[UNK]', 'vo', '##c', '##ab', '##ul', '##ar', '##ies', '.', '[SEP]']])
torch_assertclose(pieces.padded_tensor(padding_id=1), torch.tensor([[2, 41, 818, 61, 67, 193, 88, 204, 61, 251, 909, 682, 102, 95, 17, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [2, 824, 98, 189, 311, 417, 65, 155, 1, 416, 117, 88, 15, 844, 91, 100, 163, 183, 5, 3, 1, 1, 1, 1, 1, 1, 1], [2, 576, 159, 100, 365, 319, 356, 99, 93, 281, 1, 1, 1, 340, 102, 103, 608, 184, 1, 809, 90, 608, 328, 162, 742, 17, 3]], dtype=torch.int32))
torch_assertclose(pieces.attention_mask().bool_mask.squeeze(dim=(1, 2)), torch.tensor([[True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, False, False, False, False, False, False, False, False, False, False, False], [True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, False, False, False, False, False, False, False], [True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True]])) |
class DataSim(Module, AutoCSR):
def __init__(self, pads, cmd_info, latency_ready, mode_regs, ck_freq, wck_ck_ratio, *, log_level, logger_kwargs, nrows=32768, ncols=1024, nbanks=16):
self.submodules.log = SimLogger(log_level=log_level('data'), **logger_kwargs)
self.submodules.cmd_buf = stream.PipeValid(CMD_INFO_LAYOUT)
gtkw_dbg['cmd_buf'] = self.cmd_buf
twck = (1 / (ck_freq * wck_ck_ratio))
frange = get_frange(twck, wck_ck_ratio).for_set(wl_set='A', rl_set=0)
taps = (max(1, (max(frange.t_wckenl_wr, frange.t_wckenl_rd) + frange.t_wckpre_static)) - 1)
def delay_cmd_info(signals):
for signal in signals:
tapped_delay = ClockDomainsRenamer('ck')(TappedDelayLine(getattr(cmd_info, signal), ntaps=taps))
setattr(self.submodules, f'{signal}_tap', tapped_delay)
self.comb += [getattr(self.cmd_buf.sink, signal).eq(reduce(or_, getattr(self, f'{signal}_tap').taps[0:taps]))]
if (taps > 0):
delay_cmd_info(([tup[0] for tup in CMD_INFO_LAYOUT] + ['valid', 'ready', 'first', 'last']))
else:
self.comb += [cmd_info.connect(self.cmd_buf.sink, omit={'ready', 'valid'}), self.cmd_buf.sink.valid.eq(edge(self, cmd_info.valid)), If((self.cmd_buf.sink.valid & (~ self.cmd_buf.sink.ready)), self.log.error('Simulator internal error: CMD-to-DATA overflow'))]
self.comb += [cmd_info.ready.eq(1)]
cmd = self.cmd_buf.source
mems = [Memory(len(pads.dq), depth=(nrows * ncols)) for _ in range(nbanks)]
self.specials += mems
dq_i_p = Signal.like(pads.dq_i)
dq_i_n = Signal.like(pads.dq_i)
self.comb += [If(pads.wck, pads.dq_i.eq(dq_i_n)).Else(pads.dq_i.eq(dq_i_p))]
Burst = (lambda wck_cd, dq_i_x: BurstHalf(wck_cd=wck_cd, pads=pads, dq_i=dq_i_x, cmd=cmd, mems=mems, nrows=nrows, ncols=ncols, log_level=log_level, logger_kwargs=logger_kwargs))
self.submodules.burst_p = ClockDomainsRenamer('wck')(Burst('wck', dq_i_p))
self.submodules.burst_n = ClockDomainsRenamer('wck_n')(Burst('wck_n', dq_i_n))
def delay(sig, cycles):
if (cycles == 0):
return sig
return delayed(self, sig, cycles=cycles)
wr_start = TappedDelayLine(ntaps=2)
rd_start = TappedDelayLine(ntaps=2)
self.submodules += (wr_start, rd_start)
def delayed_cases(signal, delay_line, ckr_to_delay):
cases = {}
for (ckr, delay) in ckr_to_delay.items():
cases[ckr] = signal.eq((delay_line.input if (delay == 0) else delay_line.taps[(delay - 1)]))
return Case(mode_regs.ckr, cases)
self.comb += [wr_start.input.eq(((cmd.valid & cmd.we) & latency_ready)), rd_start.input.eq(((cmd.valid & (~ cmd.we)) & latency_ready)), delayed_cases(self.burst_p.enable_wr, wr_start, {2: 0, 4: 0}), delayed_cases(self.burst_n.enable_wr, wr_start, {2: 1, 4: 1}), delayed_cases(self.burst_p.enable_rd, rd_start, {2: 0, 4: 0}), delayed_cases(self.burst_n.enable_rd, rd_start, {2: 1, 4: 1}), cmd.ready.eq(self.burst_p.ready)] |
class OptionXaxisPlotbands(Options):
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#e6e9ff')
def color(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionXaxisPlotbandsEvents':
return self._config_sub_data('events', OptionXaxisPlotbandsEvents)
def from_(self):
return self._config_get(None)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def label(self) -> 'OptionXaxisPlotbandsLabel':
return self._config_sub_data('label', OptionXaxisPlotbandsLabel)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class LabelledVPNIP6AddrPrefix(_LabelledAddrPrefix, _VPNAddrPrefix, _IP6AddrPrefix):
ROUTE_FAMILY = RF_IPv6_VPN
def prefix(self):
masklen = ((self.length - (struct.calcsize(self._RD_PACK_STR) * 8)) - ((struct.calcsize(self._LABEL_PACK_STR) * 8) * len(self.addr[:(- 2)])))
return (self.addr[(- 1)] + '/{0}'.format(masklen))
def route_dist(self):
return self.addr[(- 2)].formatted_str
def label_list(self):
return self.addr[0]
def formatted_nlri_str(self):
return ('%s:%s' % (self.route_dist, self.prefix)) |
def ovlp3d_01(ax, da, A, bx, db, B):
result = numpy.zeros((1, 3), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = ((ax * bx) * x0)
x2 = ((((5. * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
result[(0, 0)] = numpy.sum((x2 * ((x0 * ((ax * A[0]) + (bx * B[0]))) - B[0])))
result[(0, 1)] = numpy.sum((x2 * ((x0 * ((ax * A[1]) + (bx * B[1]))) - B[1])))
result[(0, 2)] = numpy.sum((x2 * ((x0 * ((ax * A[2]) + (bx * B[2]))) - B[2])))
return result |
class DisablePlugin(MethodView):
decorators = [allows.requires(IsAdmin, on_fail=FlashAndRedirect(message=_('You are not allowed to modify plugins'), level='danger', endpoint='management.overview'))]
def post(self, name):
validate_plugin(name)
plugin = PluginRegistry.query.filter_by(name=name).first_or_404()
if (not plugin.enabled):
flash(_('Plugin %(plugin)s is already disabled.', plugin=plugin.name), 'info')
return redirect(url_for('management.plugins'))
plugin.enabled = False
plugin.save()
flash(_('Plugin %(plugin)s disabled. Please restart FlaskBB now.', plugin=plugin.name), 'success')
return redirect(url_for('management.plugins')) |
.slow
.parametrize('block_file_name', ['block_1.json', 'block_1234567.json', 'block_.json'])
def test_pow_validation_block_headers(block_file_name: str) -> None:
block_str_data = cast(bytes, pkgutil.get_data('ethereum', f'assets/blocks/{block_file_name}')).decode()
block_json_data = json.loads(block_str_data)
load = Load('Homestead', 'homestead')
header: Header = cast(Header, load.json_to_header(block_json_data))
validate_proof_of_work(header) |
def _pack_string(x, write):
x = x.encode('utf-8')
sz = len(x)
if (sz <= 31):
write(_struct_pack('B', (160 | sz)))
write(x)
elif (sz <= ((2 ** 8) - 1)):
write(b'\xd9')
write(_struct_pack('B', sz))
write(x)
elif (sz <= ((2 ** 16) - 1)):
write(b'\xda')
write(_struct_pack('>H', sz))
write(x)
elif (sz <= ((2 ** 32) - 1)):
write(b'\xdb')
write(_struct_pack('>I', sz))
write(x)
else:
raise UnsupportedTypeException('huge string') |
def add_to_excluded_apps_txt(app, bench_path='.'):
if (app == 'frappe'):
raise ValueError('Frappe app cannot be excluded from update')
if (app not in os.listdir('apps')):
raise ValueError(f'The app {app} does not exist')
apps = get_excluded_apps(bench_path=bench_path)
if (app not in apps):
apps.append(app)
return write_excluded_apps_txt(apps, bench_path=bench_path) |
class HJKRingDetection():
def __init__(self, mgraph, max_iterations=8000000):
self.mgraph = {key: [x for x in values] for (key, values) in mgraph.items()}
self.rings = []
self._iterations = 0
self._max_iterations = max_iterations
self._is_failed = False
def scan(self, keep_chorded_rings=False, keep_equivalent_rings=False):
self.prune()
self.build_pgraph()
self.vertices = self._get_sorted_vertices()
while self.vertices:
self._remove_vertex(self.vertices[0])
if (not keep_chorded_rings):
self.find_chordless_rings(keep_equivalent_rings)
output_rings = []
for ring in self.rings:
output_rings.append(tuple(ring[:(- 1)]))
return output_rings
def _get_sorted_vertices(self):
vertices = ((k, len(v)) for (k, v) in self.mgraph.items())
return [x[0] for x in sorted(vertices, key=itemgetter(1))]
def prune(self):
while True:
prune = []
for (node, neighbors) in self.mgraph.items():
if (len(neighbors) == 1):
prune.append((node, neighbors))
if (len(prune) == 0):
break
for (node, neighbors) in prune:
self.mgraph.pop(node)
for n in neighbors:
self.mgraph[n].remove(node)
def build_pgraph(self, prune=True):
self.pgraph = []
for (node, neigh) in self.mgraph.items():
for n in neigh:
edge = set((node, n))
if (not (edge in self.pgraph)):
self.pgraph.append(edge)
self.pgraph = [list(x) for x in self.pgraph]
def _remove_vertex(self, vertex):
visited = {}
remove = []
pool = []
for path in self.pgraph:
if self._has_vertex(vertex, path):
pool.append(path)
for (i, path1) in enumerate(pool):
for (j, path2) in enumerate(pool):
if (i == j):
continue
self._iterations += 1
if (self._iterations > self._max_iterations):
self._is_failed = True
break
pair_id = tuple(set((i, j)))
if (pair_id in visited):
continue
visited[pair_id] = None
common = list((set(path1) & set(path2)))
common_count = len(common)
if (not (1 <= common_count <= 2)):
continue
joint_path = self._concatenate_path(path1, path2, vertex)
is_ring = (joint_path[0] == joint_path[(- 1)])
if ((common_count == 2) and (not is_ring)):
continue
if is_ring:
self._add_ring(joint_path)
elif (not (joint_path in self.pgraph)):
self.pgraph.append(joint_path)
for p in pool:
self.pgraph.remove(p)
self.vertices.remove(vertex)
def _add_ring(self, ring):
r = set(ring)
for candidate in self.rings:
if (r == set(candidate)):
return
self.rings.append(ring)
def _has_vertex(self, vertex, edge):
if (edge[0] == vertex):
return edge
if (edge[(- 1)] == vertex):
return edge[::(- 1)]
return None
def _concatenate_path(self, path1, path2, v):
if (not (path1[(- 1)] == v)):
path1.reverse()
if (not (path2[0] == v)):
path2.reverse()
return (path1 + path2[1:])
def _edge_in_pgraph(self, edge):
e = set(edge)
for p in self.pgraph:
if ((e == set(p)) and (len(p) == len(edge))):
return True
return False
def find_chordless_rings(self, keep_equivalent_rings):
self.rings.sort(key=len, reverse=False)
chordless_rings = []
ring_edges = []
rings_set = [set(x) for x in self.rings]
for r in self.rings:
edges = []
for i in range((len(r) - 1)):
edges.append(tuple(set((r[i], r[((i + 1) % len(r))]))))
edges = sorted(edges, key=itemgetter(0))
ring_edges.append(edges)
ring_contacts = {}
for (i, r1) in enumerate(self.rings):
chordless = True
r1_edges = ring_edges[i]
ring_contacts[i] = []
for (j, r2) in enumerate(self.rings):
if (i == j):
continue
if (len(r2) >= len(r1)):
continue
r2_edges = ring_edges[j]
shared = (set(r1_edges) & set(r2_edges))
if (len(shared) < 1):
continue
ring_contacts[i].append(j)
core_edges = [x for x in r2_edges if (not (x in r1_edges))]
chord = [x for x in r1_edges if (not (x in r2_edges))]
ring_new = []
for edge in (chord + core_edges):
ring_new.append(edge[0])
ring_new.append(edge[1])
ring_new = set(ring_new)
if ((ring_new in rings_set) and (len(ring_new) < (len(r1) - 1))):
chordless = False
break
if chordless:
chordless_rings.append(i)
ring_contacts[i] = set(ring_contacts[i])
if (not keep_equivalent_rings):
chordless_rings = self._remove_equivalent_rings(chordless_rings, ring_contacts)
self.rings = [self.rings[x] for x in chordless_rings]
return
def _remove_equivalent_rings(self, chordless_rings, ring_contacts):
size_clusters = {}
for ring_id in chordless_rings:
if (len(ring_contacts[ring_id]) == 0):
continue
size = (len(self.rings[ring_id]) - 1)
if (not (size in size_clusters)):
size_clusters[size] = []
size_clusters[size].append(ring_id)
remove = []
for (size, ring_pool) in size_clusters.items():
for ri in ring_pool:
if (ri in remove):
continue
for rj in ring_pool:
if (ri == rj):
continue
common_neigh = (ring_contacts[ri] & ring_contacts[rj])
for c in common_neigh:
d1 = (set(self.rings[ri]) - set(self.rings[c]))
d2 = (set(self.rings[rj]) - set(self.rings[c]))
if (d1 == d2):
remove.append(rj)
chordless_rings = [i for i in chordless_rings if (not (i in set(remove)))]
return chordless_rings |
_options
('--database-engine', '--database', type=click.Choice([DBType.SQLITE, DBType.MEMORY]), default=DBType.SQLITE, help='database engine to use')
('--tool', type=click.Choice(list(PARSERS.keys())), default='pysa', help='tool the data is coming from')
_context
def cli(ctx: click.Context, repository: str, database_name: str, database_engine: str, tool: str) -> None:
ctx.obj = Context(repository=repository, database=DB(database_engine, os.path.expanduser(database_name), assertions=True), tool=tool, parser_class=PARSERS[tool])
LOG.debug(f'Context: {ctx.obj}') |
def get_unique_slug(model_instance, slugable_field_name, slug_field_name):
slug = slugify(getattr(model_instance, slugable_field_name))
unique_slug = slug
extension = 1
ModelClass = model_instance.__class__
while ModelClass._default_manager.filter(**{slug_field_name: unique_slug}).exists():
unique_slug = '{}-{}'.format(slug, extension)
extension += 1
return unique_slug |
def _record_buffer(records, buffer_size=DEFAULT_BUFFER_SIZE):
with tempfile.SpooledTemporaryFile(buffer_size, mode='wb+') as tf:
pickler = pickle.Pickler(tf)
for record in records:
pickler.dump(record)
def record_iter():
tf.seek(0)
unpickler = pickle.Unpickler(tf._file)
while True:
try:
(yield unpickler.load())
except EOFError:
break
(yield record_iter) |
class MagicProxy():
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
m = self.create_mock()
return m(*args, **kwargs)
def create_mock(self):
entry = self.name
parent = self.parent
m = parent._get_child_mock(name=entry, _new_name=entry, _new_parent=parent)
setattr(parent, entry, m)
_set_return_value(parent, m, entry)
return m
def __get__(self, obj, _type=None):
return self.create_mock() |
_app.callback(Output(component_id='galaxy-tab-ui', component_property='style'), [Input('tabs-container', 'value')])
def show_hide_galaxy_tab_ui(tab_value):
style_dict = {'display': 'none', 'width': f'{config.CONFIG.plot_width}px', 'margin': 'auto', 'text-align': 'center', 'padding-left': '1%', 'padding-right': '1%', 'background-color': BACKGROUND_DARK}
if (tab_value == config.GALAXY_MAP_TAB):
style_dict['display'] = 'block'
return style_dict |
class Test_Datapath(unittest.TestCase):
def _test_ports_accessibility(self, ofproto_parser, msgs_len):
with mock.patch('ryu.controller.controller.Datapath.set_state'):
with warnings.catch_warnings(record=True) as msgs:
warnings.simplefilter('always')
sock_mock = mock.Mock()
addr_mock = mock.Mock()
dp = controller.Datapath(sock_mock, addr_mock)
dp.ofproto_parser = ofproto_parser
dp.ports = {}
port_mock = mock.Mock()
dp.ports[0] = port_mock
del dp.ports[0]
self.assertEqual(len(msgs), msgs_len)
for msg in msgs:
self.assertTrue(issubclass(msg.category, UserWarning))
def test_ports_accessibility_v13(self):
self._test_ports_accessibility(ofproto_v1_3_parser, 0)
def test_ports_accessibility_v12(self):
self._test_ports_accessibility(ofproto_v1_2_parser, 0)
def test_ports_accessibility_v10(self):
self._test_ports_accessibility(ofproto_v1_0_parser, 0)
('ryu.base.app_manager', spec=app_manager)
def test_recv_loop(self, app_manager_mock):
test_messages = ['4-6-ofp_features_reply.packet', '4-14-ofp_echo_reply.packet', '4-14-ofp_echo_reply.packet', '4-4-ofp_packet_in.packet', '4-14-ofp_echo_reply.packet', '4-14-ofp_echo_reply.packet']
this_dir = os.path.dirname(sys.modules[__name__].__file__)
packet_data_dir = os.path.join(this_dir, '../../packet_data/of13')
json_dir = os.path.join(this_dir, '../ofproto/json/of13')
packet_buf = bytearray()
expected_json = list()
for msg in test_messages:
packet_data_file = os.path.join(packet_data_dir, msg)
packet_buf += open(packet_data_file, 'rb').read()
json_data_file = os.path.join(json_dir, (msg + '.json'))
expected_json.append(json.load(open(json_data_file)))
class SocketMock(mock.MagicMock):
buf = bytearray()
random = None
def recv(self, bufsize):
size = self.random.randint(1, bufsize)
out = self.buf[:size]
self.buf = self.buf[size:]
return out
ofp_brick_mock = mock.MagicMock(spec=app_manager.RyuApp)
app_manager_mock.lookup_service_brick.return_value = ofp_brick_mock
sock_mock = SocketMock()
sock_mock.buf = packet_buf
sock_mock.random = random.Random('Ryu SDN Framework')
addr_mock = mock.MagicMock()
dp = controller.Datapath(sock_mock, addr_mock)
dp.set_state(handler.MAIN_DISPATCHER)
ofp_brick_mock.reset_mock()
dp._recv_loop()
output_json = list()
for call in ofp_brick_mock.send_event_to_observers.call_args_list:
(args, kwargs) = call
(ev, state) = args
if (not hasattr(ev, 'msg')):
continue
output_json.append(ev.msg.to_jsondict())
self.assertEqual(state, handler.MAIN_DISPATCHER)
self.assertEqual(kwargs, {})
self.assertEqual(expected_json, output_json) |
class ConvNet(nn.Module):
def __init__(self, height: int, width: int, channels: int, use_combined_linear=False):
super(ConvNet, self).__init__()
self.conv = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.activation = nn.ReLU()
self.linear = nn.Linear((math.prod(infer_conv_output_shape([height, width], kernel_size=3, stride=1, padding=1)) * 64), 10)
def forward(self, x):
return self.linear(torch.flatten(self.activation(self.conv(x)), 1)) |
.integrationtest
.skipif((not has_postgres_configured), reason='PostgresSQL not configured')
def test_psycopg_select_LIKE(instrument, postgres_connection, elasticapm_client):
cursor = postgres_connection.cursor()
query = "SELECT * FROM test WHERE name LIKE 't%'"
try:
elasticapm_client.begin_transaction('web.django')
cursor.execute(query)
cursor.fetchall()
elasticapm_client.end_transaction(None, 'test-transaction')
finally:
transactions = elasticapm_client.events[TRANSACTION]
spans = elasticapm_client.spans_for_transaction(transactions[0])
span = spans[0]
assert (span['name'] == 'SELECT FROM test')
assert (span['type'] == 'db')
assert (span['subtype'] == 'postgresql')
assert (span['action'] == 'query')
assert ('db' in span['context'])
assert (span['context']['db']['instance'] == 'elasticapm_test')
assert (span['context']['db']['type'] == 'sql')
assert (span['context']['db']['statement'] == query)
assert (span['context']['service']['target']['type'] == 'postgresql')
assert (span['context']['service']['target']['name'] == 'elasticapm_test') |
_os(*metadata.platforms)
def main():
import os
from os import path
import win32file
if Path(DLL).is_file():
tempc = path.expandvars('%localappdata%\\Temp\\oversized.dll')
rta_dll = path.expandvars('%localappdata%\\Temp\\faultrep.dll')
rta_pe = path.expandvars('%localappdata%\\Temp\\wer.exe')
win32file.CopyFile(DLL, tempc, 0)
win32file.CopyFile(WER, rta_pe, 0)
if Path(tempc).is_file():
print(f'[+] - {DLL} copied to {tempc}')
print(f'[+] - File {tempc} will be appended with null bytes to reach 90MB in size.')
with open(tempc, 'rb+') as binfile:
binfile.seek()
binfile.write(b'\x00')
common.execute(['cmd.exe', '/c', 'copy', tempc, rta_dll])
if (Path(rta_dll).is_file() and Path(rta_pe).is_file()):
common.execute(['rundll32.exe', rta_dll, 'DllMain'])
common.execute(rta_pe)
common.execute(['taskkill', '/f', '/im', 'notepad.exe'])
print(f'[+] - Cleanup.')
win32file.DeleteFile(tempc)
win32file.DeleteFile(rta_dll)
win32file.DeleteFile(rta_pe)
print(f'[+] - RTA Done!') |
class ReadFromFolder(object):
def __init__(self, folder_path):
fnames = []
for ext in ['.jpg', '.png', '.jpeg', '.bmp']:
fnames.extend(glob.glob(((folder_path + '/*') + ext)))
self.fnames = sorted(fnames)
if (len(self.fnames) == 0):
raise IOError(('The folder has no images: ' + folder_path))
self.cnt_imgs = 0
self.cur_filename = ''
def read_image(self):
if (self.cnt_imgs < len(self.fnames)):
self.cur_filename = self.fnames[self.cnt_imgs]
img = cv2.imread(self.cur_filename, cv2.IMREAD_UNCHANGED)
self.cnt_imgs += 1
return img
else:
return None
def __len__(self):
return len(self.fnames)
def get_cur_filename(self):
return self.cur_filename
def stop(self):
None |
class SKLearnTransformer(ModelTransformer):
def __init__(self, model: Any, feature_names: Sequence[str], classification_labels: Optional[Sequence[str]]=None, classification_weights: Optional[Sequence[float]]=None):
super().__init__(model, feature_names, classification_labels, classification_weights)
self._node_decision_type = 'lte'
def build_tree_node(self, node_index: int, node_data: Tuple[(Union[(int, float)], ...)], value: np.ndarray) -> TreeNode:
if (value.shape[0] != 1):
raise ValueError(f"unexpected multiple values returned from leaf node '{node_index}'")
if (node_data[0] == (- 1)):
if (value.shape[1] == 1):
leaf_value = [float(value[0][0])]
else:
leaf_value = [((- ) if (n <= 0) else math.log(float(n))) for n in value[0]]
return TreeNode(node_index, decision_type=self._node_decision_type, leaf_value=leaf_value, number_samples=int(node_data[5]))
else:
return TreeNode(node_index, decision_type=self._node_decision_type, left_child=int(node_data[0]), right_child=int(node_data[1]), split_feature=int(node_data[2]), threshold=float(node_data[3]), number_samples=int(node_data[5])) |
def stability_digest_payload_from_ce_payload(payload: dict):
from firebase_functions.alerts.crashlytics_fn import StabilityDigestPayload
return StabilityDigestPayload(digest_date=_util.timestamp_conversion(payload['digestDate']), trending_issues=[trending_issue_details_from_ce_payload(issue) for issue in payload['trendingIssues']]) |
def timed_wait(cv, fn, timeout=(- 1)):
if (timeout == (- 1)):
timeout = default_timeout
end_time = (time.time() + timeout)
while True:
val = fn()
if (val != None):
return val
remaining_time = (end_time - time.time())
cv.wait(remaining_time)
if (time.time() > end_time):
return None |
def test_passing_enum_to_choice():
class Something(TestSetup):
favorite_color: Color = field(default=Color.green)
colors: List[Color] = field(default_factory=[Color.green].copy)
s = Something.setup('')
assert (s.favorite_color == Color.green)
assert (s.colors == [Color.green])
s = Something.setup('--colors [blue,red]')
assert (s.colors == [Color.blue, Color.red]) |
class WeakCache(WeakKeyDictionary):
def __init__(self):
self._tuple_dict = WeakKeyDictionary()
self._dict = WeakKeyDictionary()
def __contains__(self, key):
if isinstance(key, (tuple, list)):
lookup = self._tuple_dict
for k in key:
if (k not in lookup):
return False
else:
lookup = lookup[k]
return (_WC_Leaf in lookup)
else:
return (key in self._dict)
def __getitem__(self, key):
if isinstance(key, (tuple, list)):
lookup = self._tuple_dict
for k in key:
lookup = lookup[k]
return lookup[_WC_Leaf]
else:
return self._dict[key]
def __setitem__(self, key, value):
if isinstance(key, (tuple, list)):
lookup = self._tuple_dict
for k in key:
if (k not in lookup):
lookup[k] = WeakKeyDictionary()
lookup = lookup[k]
lookup[_WC_Leaf] = value
else:
self._dict[key] = value |
def read_file(fd):
key_dat = pkt_dat = None
header = fd.readline().strip()
a = header.split(':', 1)
if (not (a[0].startswith('Filetype') and (a[1].strip() == SUBGHZ_KEY_FILE_TYPE))):
print('invalid filetype')
sys.exit(0)
for line in fd:
a = line.split(':', 1)
if a[0].startswith('Protocol'):
if (a[1].strip() != 'Security+ 2.0'):
print('invalid Protocol')
sys.exit(0)
if a[0].startswith('Key'):
key_dat = a[1].strip().split()
if a[0].startswith('Secplus_packet_1'):
pkt_dat = a[1].strip().split()
if _debug:
print('read_file', key_dat, pkt_dat)
if (key_dat and pkt_dat):
return (''.join(key_dat), ''.join(pkt_dat))
return (None, None) |
def reg_or_not():
google_calendar_service_address = os.getenv('DEMO_GOOGLE_CALENDAR_SERVICE_ADDRESS')
if ((google_calendar_service_address is None) or (google_calendar_service_address.strip() == '')):
logger.warn("'DEMO_GOOGLE_CALENDAR_SERVICE_ADDRESS' is not provided, google calendar function will not available")
return
_module(name='add_alarm', description='Create an alarm', signature={'date': {'description': "The alarm date, 'YYYY-mm-dd HH:MM:SS format'", 'type': 'string', 'required': True}, 'desc': {'description': 'The event description', 'type': 'string', 'required': True}})
async def add_alarm(context: CallerContext, date, desc):
now = datetime.datetime.strptime(date, '%Y-%m-%d %H:%M:%S').timestamp()
now -= (context.get_tz_offset() * 3600)
result = (await do_post((google_calendar_service_address + '/task/add'), {'start_time': now, 'end_time': now, 'summary': desc, 'description': desc}))
if (not isinstance(result, dict)):
(await context.reply_text('Sorry, failed to access calendar'))
return 'Failed to access calendar'
if (result['code'] == 200):
(await context.reply_text(f'Alarm have been added: {desc} as {date}'))
return 'Success'
(await context.reply_text(f"Sorry, failed to access calendar: {result['message']}"))
return result['message']
_module(name='delete_alarm', description='delete all alarms whose ID is in the list', signature={'IDs': {'type': 'array', 'items': {'type': 'string'}, 'description': 'A list of alarm IDs to delete', 'required': True}})
async def delete_alarm(context: CallerContext, IDs: List[str]):
result = (await do_get((google_calendar_service_address + '/tasks')))
if (not isinstance(result, dict)):
(await context.reply_text('Sorry, failed to access google calendar'))
return 'Failed to query google calendar'
if (result['code'] != 200):
(await context.reply_text(f"Sorry, failed to access google calendar: {result['message']}"))
return result['message']
items = result['data']
if (len(items) == 0):
(await context.reply_text("Sorry, you don't have any alarm now."))
return 'Canceled'
deleted = []
all_event_ids = [item['id'] for item in items]
for alarm_id in IDs:
if (alarm_id not in all_event_ids):
continue
result = (await do_post((google_calendar_service_address + f'/task/delete/{alarm_id}'), ''))
if (not isinstance(result, dict)):
logger.log(f'Failed to delete alarm: {alarm_id}')
continue
if (result['code'] != 200):
logger.log(f"Failed to delete alarm: {alarm_id}: {result['message']}")
continue
deleted.append(alarm_id)
if (len(deleted) == 0):
(await context.reply_text('Sorry, failed to delete calendar event'))
return 'Failed'
msg = 'These alarms are deleted:'
for alarm_id in deleted:
for item in items:
if (item['id'] == alarm_id):
msg += f'''
{item['summary']}'''
break
(await context.reply_text(msg))
return 'Success'
_module(name='query_alarm', description='query all existing alarm', signature={})
async def query_alarm(context: CallerContext):
result = (await do_get((google_calendar_service_address + '/tasks')))
if (not isinstance(result, dict)):
(await context.reply_text('Sorry, failed to access google calendar'))
return 'Failed to query google calendar'
if (result['code'] == 200):
if (len(result['data']) > 0):
markdown = 'Here is your calendar:\n'
markdown_to_gpt = '| ID | Date | Event |\n|----|----|----|'
for item in result['data']:
timestamp = (item['start_time'] + (context.get_tz_offset() * 3600))
time_str = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
markdown += f'''
{time_str} UTC{context.get_tz_offset_str()}, {item['summary']}'''
markdown_to_gpt += f'''
| {item['id']} | {time_str} | {item['summary']} |'''
(await context.reply_text(markdown))
return markdown_to_gpt
else:
(await context.reply_text("You don't have any calendar event."))
return 'Success'
(await context.reply_text(f"Sorry, failed access google calendar: {result['message']}"))
return result['message'] |
def upload_task_input_files(task):
storage_client = StorageClient()
bucket = storage_client.get_bucket()
for path in task.input_paths():
assert (path[0] == '/')
assert (settings.PIPELINE_DATA_BASEDIR[(- 1)] == '/')
name = ('hscic' + path.replace(settings.PIPELINE_DATA_BASEDIR, '/'))
blob = bucket.blob(name)
if blob.exists():
print(('Skipping %s, already uploaded' % name))
continue
print(('Uploading %s to %s' % (path, name)))
with open(path, 'rb') as f:
blob.upload_from_file(f) |
class ShopKeeper(TalkativeNPC):
upsell_factor = AttributeProperty(1.0, autocreate=False)
miser_factor = AttributeProperty(0.5, autocreate=False)
common_ware_prototypes = AttributeProperty([], autocreate=False)
def at_damage(self, damage, attacker=None):
attacker.msg(f'{self.key} brushes off the hit and shouts "|wHey! This is not the way to get a discount!|n"') |
class Choropleth(GraphPlotly.Chart):
requirements = ('plotly.js',)
__reqJs = ['plotly.js']
def chart(self) -> JsPlotly.Pie:
if (self._chart is None):
self._chart = JsPlotly.Pie(component=self, page=self.page, js_code=self.js_code)
return self._chart
def layout(self) -> LayoutGeoMap:
if (self._layout is None):
self._layout = LayoutGeoMap(page=self.page, component=self)
return self._layout
def data(self):
return self._traces[(- 1)]
def add_trace(self, data, type: str='choropleth', mode: str=None):
c_data = dict(data)
if (type is not None):
c_data['type'] = self.options.type
if (mode is not None):
c_data['mode'] = (self.options.mode or mode)
self._traces.append(DataChoropleth(page=self.page, attrs=c_data, component=self))
return self
def _js__convertor__(self):
return '\nvar result = []; data.forEach(function(series, i){var dataset = Object.assign(series, options); result.push(dataset)})' |
def fetch_production(zone_key: str, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
if (target_datetime is not None):
raise NotImplementedError('This parser is not yet able to parse past dates')
r = (session or requests.session())
(data, timestamp) = fetch_data(r)
result = template_response(zone_key, timestamp, DATA_SOURCE)
for (production_type, ids) in CELL_MAPPING.items():
for id in ids:
result['production'][production_type] += data[('d_' + id)]
return [result] |
class Mxp():
def __init__(self, protocol):
self.protocol = protocol
self.protocol.protocol_flags['MXP'] = False
if settings.MXP_ENABLED:
self.protocol.will(MXP).addCallbacks(self.do_mxp, self.no_mxp)
def no_mxp(self, option):
self.protocol.protocol_flags['MXP'] = False
self.protocol.handshake_done()
def do_mxp(self, option):
if settings.MXP_ENABLED:
self.protocol.protocol_flags['MXP'] = True
self.protocol.requestNegotiation(MXP, b'')
else:
self.protocol.wont(MXP)
self.protocol.handshake_done() |
def resize_all_icons(repodirs):
for repodir in repodirs:
for density in screen_densities:
icon_dir = get_icon_dir(repodir, density)
icon_glob = os.path.join(icon_dir, '*.png')
for iconpath in glob.glob(icon_glob):
resize_icon(iconpath, density) |
_event
class PeopleAdded(ThreadEvent):
thread = attr.ib(type='_threads.Group')
added = attr.ib(type=Sequence['_threads.User'])
at = attr.ib(type=datetime.datetime)
def _parse(cls, session, data):
(author, thread, at) = cls._parse_metadata(session, data)
added = [_threads.User(session=session, id=x['userFbId']) for x in data['addedParticipants']]
return cls(author=author, thread=thread, added=added, at=at) |
def annotate(obj, owner, **kwargs):
if hasattr(obj, '_metadata'):
_annotate_pandas(obj, owner, **kwargs)
return obj
if hasattr(obj, 'climetlab'):
_annotate_xarray(obj, owner, **kwargs)
return obj
raise NotImplementedError('Cannot annotate object of type', type(obj)) |
class ParameterDescription(object):
def __init__(self, name, msg_type, value=None, widget=None):
self._name = str(name)
self._type = msg_type
self._value = None
self._value_org = None
self.read_only = False
self.path_type = ''
self.hint = ''
self._min = None
self._max = None
self._tags = {}
self._read_value(value)
self._widget = widget
try:
(self._base_type, self._is_array_type, self._array_length) = roslib.msgs.parse_type(self._type)
except Exception:
pass
if (msg_type == 'binary'):
self._base_type = msg_type
def _read_value(self, value):
if isinstance(value, dict):
for (key, val) in value.items():
if key.startswith(':'):
if (key == ':value'):
self._value = val
self._value_org = val
elif (key == ':ro'):
self.read_only = val
elif (key == ':hint'):
self.hint = val
elif (key == ':path'):
self.path_type = val
elif (key == ':min'):
self._min = val
elif (key == ':max'):
self._max = val
self._tags[key] = val
else:
self._value = value
self._value_org = value
def __repr__(self):
return ('%s [%s]: %s' % (self._name, self._type, utf8(self._value)))
def is_primitive_type(cls, value_type):
result = (value_type in roslib.msgs.PRIMITIVE_TYPES)
result = (result or (value_type in ['string', 'int', 'float', 'time', 'duration', 'binary', 'unicode']))
return result
def add_tag(self, key, value):
self._tags[key] = value
def origin_value(self):
return self._value_org
def clear_origin_value(self):
self._value_org = None
def changed(self):
return (utf8(self.origin_value()) != utf8(self._value))
def name(self):
return self._name
def setWidget(self, widget):
self._widget = widget
if (widget is not None):
self.addCachedValuesToWidget()
def widget(self):
return self._widget
def fullName(self):
result = self.name()
widget = self._widget
while (widget is not None):
if isinstance(widget, (MainBox, GroupBox, ArrayBox)):
result = roslib.names.ns_join(widget.name, result)
widget = widget.parent()
return result
def isArrayType(self):
return (self._is_array_type or (self._type in ['[]']))
def arrayLength(self):
return self._array_length
def isPrimitiveType(self):
result = self.is_primitive_type(self._base_type)
result = (result or (self._type in ['[]']))
return result
def isTimeType(self):
return (self._base_type in ['time', 'duration'])
def isBinaryType(self):
return (self._base_type in ['binary'])
def baseType(self):
return self._base_type
def msgType(self):
return self._type
def updateValueFromField(self):
if self.read_only:
return
result = self.widget().current_text()
self._value = self.updateValue(result, raise_on_min_max=False)
if self.changed():
nm.history().addParamCache(self.fullName(), self._value)
def updateValue(self, value, raise_on_min_max=True):
rvalue = value
try:
if isinstance(value, (dict, list)):
rvalue = value
elif value:
if self.isArrayType():
if (('int' in self.baseType()) or ('byte' in self.baseType())):
rvalue = list(map(int, value.lstrip('[').rstrip(']').split(',')))
elif ('float' in self.baseType()):
rvalue = list(map(float, value.lstrip('[').rstrip(']').split(',')))
elif ('bool' in self.baseType()):
rvalue = list(map(str2bool, value.lstrip('[').rstrip(']').split(',')))
elif self.isBinaryType():
rvalue = value
else:
try:
rvalue = value.lstrip('[').rstrip(']')
rvalue = ruamel.yaml.load(('[%s]' % rvalue), Loader=ruamel.yaml.Loader)
if (rvalue is None):
rvalue = []
except ruamel.yaml.MarkedYAMLError as e:
raise Exception(('Field [%s] yaml error: %s' % (self.fullName(), utf8(e))))
if ((self.arrayLength() is not None) and (self.arrayLength() != len(rvalue))):
raise Exception(''.join(['Field [', self.fullName(), '] has incorrect number of elements: ', utf8(len(rvalue)), ' != ', str(self.arrayLength())]))
elif (('int' in self.baseType()) or ('byte' in self.baseType())):
rvalue = int(value)
elif ('float' in self.baseType()):
rvalue = float(value)
elif ('bool' in self.baseType()):
if isinstance(value, bool):
rvalue = value
else:
rvalue = str2bool(value)
elif self.isBinaryType():
rvalue = utf8(value)
elif self.isTimeType():
if (value == 'now'):
rvalue = 'now'
else:
try:
val = eval(value)
if isinstance(val, dict):
rvalue = val
else:
secs = int(val)
nsecs = int(((val - secs) * ))
rvalue = {'secs': secs, 'nsecs': nsecs}
except Exception:
rvalue = {'secs': 0, 'nsecs': 0}
elif (sys.version_info[0] <= 2):
rvalue = value.encode(sys.getfilesystemencoding())
else:
rvalue = value
elif self.isArrayType():
arr = []
rvalue = arr
elif (('int' in self.baseType()) or ('byte' in self.baseType())):
rvalue = 0
elif ('float' in self.baseType()):
rvalue = 0.0
elif ('bool' in self.baseType()):
rvalue = False
elif self.isBinaryType():
rvalue = utf8(value)
elif self.isTimeType():
rvalue = {'secs': 0, 'nsecs': 0}
else:
rvalue = ''
except Exception as e:
raise Exception(("Error while set value '%s', for '%s': %s" % (utf8(value), self.fullName(), utf8(e))))
if (self._min is not None):
if (rvalue < self._min):
if raise_on_min_max:
raise Exception(('%s is smaller than minimum: %s' % (utf8(rvalue), utf8(self._min))))
rvalue = self._min
if (self._max is not None):
if (rvalue > self._max):
if raise_on_min_max:
raise Exception(('%s is greater than maximum: %s' % (utf8(rvalue), utf8(self._max))))
rvalue = self._max
return rvalue
def value(self, with_tags=False):
if ((not self.isPrimitiveType()) and (not (self.widget() is None))):
return self.widget().value(with_tags)
elif self.isPrimitiveType():
self.updateValueFromField()
if with_tags:
result = {}
result.update(self._tags)
result[':value'] = self._value
return result
return self._value
def removeCachedValue(self, value):
nm.history().removeParamCache(self.fullName(), value)
def createTypedWidget(self, parent):
result = None
if self.isPrimitiveType():
result = ValueWidget(self, parent)
elif self.isArrayType():
result = ArrayBox(self.name(), self._type, dynamic=(self.arrayLength() is None), parent=parent)
else:
result = GroupBox(self.name(), self._type, parent=parent)
return result
def addCachedValuesToWidget(self):
if isinstance(self.widget(), ValueWidget):
self.widget().add_cached_values() |
class TabSetHtmlTransform(SphinxPostTransform):
default_priority = 200
formats = ('html',)
def run(self) -> None:
tab_set_id_base = 'sd-tab-set-'
tab_set_id_num = 0
tab_item_id_base = 'sd-tab-item-'
tab_item_id_num = 0
for tab_set in findall(self.document)((lambda node: is_component(node, 'tab-set'))):
tab_set_identity = (tab_set_id_base + str(tab_set_id_num))
tab_set_id_num += 1
children = []
selected_idx = None
for (idx, tab_item) in enumerate(tab_set.children):
if tab_item.get('selected', False):
if (selected_idx is None):
selected_idx = idx
else:
LOGGER.warning(f"Multiple selected 'tab-item' directives [{WARNING_TYPE}.tab]", location=tab_item, type=WARNING_TYPE, subtype='tab')
selected_idx = (0 if (selected_idx is None) else selected_idx)
for (idx, tab_item) in enumerate(tab_set.children):
try:
(tab_label, tab_content) = tab_item.children
except ValueError:
print(tab_item)
raise
tab_item_identity = (tab_item_id_base + str(tab_item_id_num))
tab_item_id_num += 1
input_node = sd_tab_input('', id=tab_item_identity, set_id=tab_set_identity, type='radio', checked=(idx == selected_idx))
(input_node.source, input_node.line) = (tab_item.source, tab_item.line)
children.append(input_node)
label_node = sd_tab_label('', *tab_label.children, input_id=tab_item_identity, classes=tab_label['classes'])
if tab_label.get('ids'):
label_node['ids'] += tab_label['ids']
if ('sync_id' in tab_label):
label_node['sync_id'] = tab_label['sync_id']
(label_node.source, label_node.line) = (tab_item.source, tab_item.line)
children.append(label_node)
children.append(tab_content)
tab_set.children = children |
class HasAdLabels(object):
def add_labels(self, labels=None):
return self.get_api_assured().call('POST', (self.get_id_assured(), 'adlabels'), params={'adlabels': [{'id': label} for label in labels]})
def remove_labels(self, labels=None):
return self.get_api_assured().call('DELETE', (self.get_id_assured(), 'adlabels'), params={'adlabels': [{'id': label} for label in labels]}) |
class OptionSeriesPyramid3dSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def generate_sleep_modal_summary(days=7):
date = (datetime.now().date() - timedelta(days=days))
df = pd.read_sql(sql=app.session.query(ouraSleepSummary.report_date, ouraSleepSummary.score, ouraSleepSummary.total, ouraSleepSummary.bedtime_end_local).filter((ouraSleepSummary.report_date > date)).statement, con=engine, index_col='report_date')
app.session.remove()
df['wakeup'] = df['bedtime_end_local'].apply((lambda x: ((datetime.strptime('1970-01-01', '%Y-%m-%d') + timedelta(hours=x.hour)) + timedelta(minutes=x.minute))))
sleep_last_7_graph = dcc.Graph(config={'displayModeBar': False}, figure={'data': [go.Bar(name='Sleep', x=df.index, y=df['score'], yaxis='y', text=df['score'], hoverinfo='text', hovertext=['Sleep: <b>{:.0f}'.format(x) for x in df['score']], textposition='auto', marker={'color': light_blue})], 'layout': go.Layout(height=300, font=dict(size=10, color=white), xaxis=dict(showline=True, color=white, showticklabels=True, showgrid=False, tickvals=df.index, tickformat='%a'), yaxis=dict(showticklabels=True, showgrid=True, gridcolor='rgb(66,66,66)', color=white, tickformat=',d'), showlegend=False, margin={'l': 40, 'b': 20, 't': 0, 'r': 0})})
total_sleep_last_7_graph = dcc.Graph(config={'displayModeBar': False}, figure={'data': [go.Bar(name='Total Sleep Time', x=df.index, y=(df['total'] / 3600), yaxis='y', hoverinfo='text', text=['Total Sleep: <b>{:.0f}h {:.0f}m'.format((x // 3600), ((x // 3600) % 60)) for x in df['total']], marker={'color': light_blue})], 'layout': go.Layout(height=300, font=dict(size=10, color=white), xaxis=dict(showline=True, color=white, showticklabels=True, showgrid=False, tickvals=df.index, tickformat='%a'), yaxis=dict(showticklabels=True, showgrid=True, gridcolor='rgb(66,66,66)', color=white, tickformat=',d'), showlegend=False, margin={'l': 40, 'b': 20, 't': 0, 'r': 0})})
wake_up_last_7_graph = dcc.Graph(config={'displayModeBar': False}, figure={'data': [go.Scatter(name='Wake Up Time', x=df.index, y=df['wakeup'], yaxis='y', text=df['wakeup'], hovertext=['Wake Up: <b>{}'.format(datetime.strftime(x, '%I:%M%p')) for x in df['wakeup']], hoverinfo='text', mode='lines+markers', line={'dash': 'dot', 'color': white, 'width': 2}, showlegend=False, marker={'size': 5})], 'layout': go.Layout(height=300, font=dict(size=10, color=white), xaxis=dict(showline=True, color=white, showticklabels=True, showgrid=False, tickvals=df.index, tickformat='%a'), yaxis=dict(showticklabels=True, showgrid=True, gridcolor='rgb(66,66,66)', color=white, tickformat='%I:%M%p'), showlegend=False, margin={'l': 50, 'b': 20, 't': 0, 'r': 0})})
return [html.Div(id='sleep-modal-last-7-container', className='row align-items-center text-center mb-2', style={'whiteSpace': 'normal'}, children=[html.Div(id='sleep-score-last-7', className='col-lg-4', children=[html.Div(id='sleep-score-last-7-title', className='col-lg-12', children=[html.P('Your average sleep score for the last 7 days is {:.0f}'.format(df['score'].mean()))]), html.Div(id='sleep-score-last-7-chart', className='col-lg-12', children=[sleep_last_7_graph])]), html.Div(id='total-sleep-last-7', className='col-lg-4', children=[html.Div(id='total-sleep-last-7-title', className='col-lg-12', children=[html.P('Over the last 7 nights you slept on average {:.0f}h {:.0f}m per night'.format((df['total'].mean() // 3600), ((df['total'].mean() // 3600) % 60)))]), html.Div(id='total-sleep-last-7-chart', className='col-lg-12', children=[total_sleep_last_7_graph])]), html.Div(id='wake-up-last-7', className='col-lg-4', children=[html.Div(id='wake-up-last-7-title', className='col-lg-12', children=[html.P("Here's a summary of your wake-up times over the last 7 days")]), html.Div(id='wake-up-last-7-chart', className='col-lg-12', children=[wake_up_last_7_graph])])]), html.Div(className='row', children=[html.Div(id='sleep-score-correlations', className='col-lg-6', children=[html.Div(id='sleep-score-correlation-title', className='col-lg-12 text-center', children=[html.P('Sleep Score Correlations (L6M)')]), html.Div(id='sleep-score-correlation-chart', className='col-lg-12', children=[generate_correlation_table(10, 'Sleep score', 180)])]), html.Div(className='col-lg-6', children=[html.Div(className='row align-items-center text-center', children=[html.Div(id='sleep-groupby-controls', className='col-lg-12 mb-2 mt-2', children=[dbc.Button('Year', id='sleep-year-button', n_clicks=0, size='sm', className='mr-3'), dbc.Button('Month', id='sleep-month-button', n_clicks=0, size='sm', className='mr-3'), dbc.Button('Week', id='sleep-week-button', n_clicks=0, size='sm', className='mr-3'), dbc.Button('Day', id='sleep-day-button', size='sm')])]), html.Div(className='row', children=[html.Div(className='col-lg-12', children=[dbc.Spinner(color='info', children=[dcc.Graph(id='sleep-modal-full-chart', config={'displayModeBar': False})])])])])])] |
def test_set_pod_security_context():
config = ''
r = helm_template(config)
assert (r['statefulset'][name]['spec']['template']['spec']['securityContext']['fsGroup'] == 1000)
assert (r['statefulset'][name]['spec']['template']['spec']['securityContext']['runAsUser'] == 1000)
config = '\n podSecurityContext:\n fsGroup: 1001\n other: test\n '
r = helm_template(config)
assert (r['statefulset'][name]['spec']['template']['spec']['securityContext']['fsGroup'] == 1001)
assert (r['statefulset'][name]['spec']['template']['spec']['securityContext']['other'] == 'test') |
def main(me, args):
parser = optparse.OptionParser('%prog [options] <executable_path>')
parser.add_option('--executable-heap', action='store_false', dest='no_heap_execution', default=True, help='Clear the MH_NO_HEAP_EXECUTION bit')
parser.add_option('--no-pie', action='store_false', dest='pie', default=True, help='Clear the MH_PIE bit')
(options, loose_args) = parser.parse_args(args)
if (len(loose_args) != 1):
parser.print_usage()
return 1
executable_path = loose_args[0]
executable_file = open(executable_path, 'rb+')
magic = ReadUInt32(executable_file, '<')
if (magic == FAT_CIGAM):
HandleFatFile(executable_file, options)
elif ((magic == MH_MAGIC) or (magic == MH_CIGAM) or (magic == MH_MAGIC_64) or (magic == MH_CIGAM_64)):
HandleMachOFile(executable_file, options)
else:
raise MachOError(('%s is not a Mach-O or fat file' % executable_file))
executable_file.close()
return 0 |
class CmdInventory(EvAdventureCommand):
key = 'inventory'
aliases = ('i', 'inv')
def func(self):
loadout = self.caller.equipment.display_loadout()
backpack = self.caller.equipment.display_backpack()
slot_usage = self.caller.equipment.display_slot_usage()
self.caller.msg(f'''{loadout}
{backpack}
You use {slot_usage} equipment slots.''') |
def print_output(p):
from select import select
while (p.poll() is None):
readx = select([p.stdout.fileno(), p.stderr.fileno()], [], [])[0]
send_buffer = []
for fd in readx:
if (fd == p.stdout.fileno()):
while 1:
buf = p.stdout.read(1)
if (not len(buf)):
break
if ((buf == '\r') or (buf == '\n')):
send_buffer.append(buf)
log_line(''.join(send_buffer), 'stdout')
send_buffer = []
else:
send_buffer.append(buf)
if (fd == p.stderr.fileno()):
log_line(p.stderr.readline(), 'stderr')
return p.poll() |
class MergeFind():
def __init__(self):
self.db = dict()
def merge(self, a, b):
a = self.find(a)
b = self.find(b)
if (a != b):
self.db[a] = b
def find(self, a):
if (a in self.db):
c = self.find(self.db[a])
self.db[a] = c
return c
return a |
.parametrize('key,num_tokens,etype', [('', 1, ValueError), ('', 0, ValueError), ('x', 0, ValueError), ('x', (- 1), ValueError), ('x', (- 2), ValueError), ((- 1), None, (ValueError, TypeError)), (None, (- 1), (ValueError, TypeError)), (None, 1, TypeError), (1, None, TypeError)])
def test_input_validation_on_consume(key, num_tokens, etype):
limiter = token_bucket.Limiter(1, 1, token_bucket.MemoryStorage())
with pytest.raises(etype):
limiter.consume(key, num_tokens) |
def reference_launch_plan(project: str, domain: str, name: str, version: str) -> Callable[([Callable[(..., Any)]], ReferenceLaunchPlan)]:
def wrapper(fn) -> ReferenceLaunchPlan:
interface = transform_function_to_interface(fn)
return ReferenceLaunchPlan(project, domain, name, version, interface.inputs, interface.outputs)
return wrapper |
def test_hydrate_registration_parameters__task_already_set():
task = _task_pb2.TaskSpec(template=_core_task_pb2.TaskTemplate(id=_identifier_pb2.Identifier(resource_type=_identifier_pb2.TASK, project='project2', domain='domain2', name='name', version='abc')))
(identifier, entity) = hydrate_registration_parameters(_identifier_pb2.TASK, 'project', 'domain', '12345', task)
assert (identifier == _identifier_pb2.Identifier(resource_type=_identifier_pb2.TASK, project='project2', domain='domain2', name='name', version='abc') == entity.template.id) |
_visible
def detect_joomla(source_file, regexp):
if (not (os.path.isfile(source_file) and regexp)):
return
logging.debug('Dectecting Joomla from: %s', source_file)
release_version = grep_from_file(source_file, regexp[0])
if (not release_version):
logging.debug('Could not find release version from: %s', source_file)
return
logging.debug('Release version: %s', release_version)
dev_level_version = grep_from_file(source_file, regexp[1])
if (not dev_level_version):
logging.debug('Could not find development version from: %s', source_file)
return
logging.debug('Development level version: %s', dev_level_version)
return ((release_version + '.') + dev_level_version) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.