code stringlengths 281 23.7M |
|---|
def test_deepcopy_with_sys_streams():
provider = providers.List()
provider.add_args(sys.stdin, sys.stdout, sys.stderr)
provider_copy = providers.deepcopy(provider)
assert (provider is not provider_copy)
assert isinstance(provider_copy, providers.List)
assert (provider.args[0] is sys.stdin)
assert (provider.args[1] is sys.stdout)
assert (provider.args[2] is sys.stderr) |
('kwargs', itertools.product([('xradius', 10), ('yradius', 10)], [('xradius', 20)], [('yradius', 20)], [('radius', 10)]))
def test_draw_rectangle_with_radius(kwargs):
with Image(width=50, height=50, background='white') as img:
was = img.signature
with Drawing() as ctx:
ctx.stroke_width = 2
ctx.fill_color = 'black'
ctx.stroke_color = '#ccc'
ctx.rectangle(left=10, top=10, width=30, height=30, **dict(kwargs))
ctx.draw(img)
assert (was != img.signature) |
class EditUserInfoView(View):
def put(self, request):
res = {'code': 332, 'msg': '!', 'self': None}
valid_email_obj = request.session.get('valid_email_obj')
if (not valid_email_obj):
res['msg'] = '!'
return JsonResponse(res)
time_stamp = valid_email_obj['time_stamp']
now = time.time()
if ((now - time_stamp) > 300):
res['msg'] = ',!'
return JsonResponse(res)
form = EditUserInfoForm(request.data, request=request)
if (not form.is_valid()):
(res['self'], res['msg']) = clean_form(form)
return JsonResponse(res)
user = request.user
user.email = form.cleaned_data['email']
user.save()
res['code'] = 0
return JsonResponse(res) |
class GymHandler(Handler):
SUPPORTED_PROTOCOL = GymMessage.protocol_id
def __init__(self, **kwargs: Any):
nb_steps = kwargs.pop('nb_steps', DEFAULT_NB_STEPS)
super().__init__(**kwargs)
self.task = GymTask(self.context, nb_steps)
self._task_id: Optional[int] = None
def setup(self) -> None:
self.context.logger.info('Gym handler: setup method called.')
self._task_id = self.context.task_manager.enqueue_task(self.task)
def handle(self, message: Message) -> None:
gym_msg = cast(GymMessage, message)
gym_dialogues = cast(GymDialogues, self.context.gym_dialogues)
gym_dialogue = cast(GymDialogue, gym_dialogues.update(gym_msg))
if (gym_dialogue is None):
self._handle_unidentified_dialogue(gym_msg)
return
if (gym_msg.performative == GymMessage.Performative.PERCEPT):
self._handle_percept(gym_msg, gym_dialogue)
elif (gym_msg.performative == GymMessage.Performative.STATUS):
self._handle_status(gym_msg, gym_dialogue)
else:
self._handle_invalid(gym_msg, gym_dialogue)
def _handle_unidentified_dialogue(self, gym_msg: GymMessage) -> None:
self.context.logger.info('received invalid gym message={}, unidentified dialogue.'.format(gym_msg))
default_dialogues = cast(DefaultDialogues, self.context.default_dialogues)
(default_msg, _) = default_dialogues.create(counterparty=gym_msg.sender, performative=DefaultMessage.Performative.ERROR, error_code=DefaultMessage.ErrorCode.INVALID_DIALOGUE, error_msg='Invalid dialogue.', error_data={'gym_message': gym_msg.encode()})
self.context.outbox.put_message(message=default_msg)
def _handle_percept(self, gym_msg: GymMessage, gym_dialogue: GymDialogue) -> None:
if (self.task.proxy_env.active_gym_dialogue == gym_dialogue):
self.task.proxy_env_queue.put(gym_msg)
else:
self.context.logger.warning('gym dialogue not active dialogue.')
def _handle_status(self, gym_msg: GymMessage, gym_dialogue: GymDialogue) -> None:
if ((self.task.proxy_env.active_gym_dialogue == gym_dialogue) and (gym_msg.content.get('reset', 'failure') == 'success')):
self.task.proxy_env_queue.put(gym_msg)
else:
self.context.logger.warning('gym dialogue not active dialogue.')
def _handle_invalid(self, gym_msg: GymMessage, gym_dialogue: GymDialogue) -> None:
self.context.logger.warning('cannot handle gym message of performative={} in dialogue={}.'.format(gym_msg.performative, gym_dialogue))
def teardown(self) -> None:
self.context.logger.info('Gym handler: teardown method called.')
if (self._task_id is None):
return
self.task.teardown()
result = self.context.task_manager.get_task_result(self._task_id)
if (not result.successful()):
self.context.logger.warning('Task not successful!') |
class OptionSeriesErrorbarSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesParetoLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsParetoSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsParetoSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsParetoSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsParetoSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsParetoSonificationTracksMappingTremoloSpeed) |
def heckbert_interval(data_low, data_high, numticks=8, nicefunc=_nice, enclose=False):
if (data_high == data_low):
return (data_high, data_low, 0)
if (numticks == 0):
numticks = 1
range = nicefunc((data_high - data_low))
if (numticks > 1):
numticks -= 1
d = nicefunc((range / numticks), round=True)
if enclose:
graphmin = (ceil((data_low / d)) * d)
graphmax = (floor((data_high / d)) * d)
else:
graphmin = (floor((data_low / d)) * d)
graphmax = (ceil((data_high / d)) * d)
return (graphmin, graphmax, d) |
class AWin():
name: Sym
coords: list[AWinCoord]
strides: list[A.expr]
def __str__(win):
def coordstr(c):
op = ('=' if c.is_pt else '+')
return f'{op}{c.val}'
coords = ''
if (len(win.coords) > 0):
coords = (',' + ','.join([coordstr(c) for c in win.coords]))
return f'({win.name}{coords})'
def nslots(self):
return sum([(not c.is_pt) for c in self.coords])
def __add__(lhs, rhs):
assert isinstance(rhs, AWin)
assert (lhs.nslots() == len(rhs.coords))
ri = 0
coords = []
for lc in lhs.coords:
if lc.is_pt:
coords.append(lc)
else:
rc = rhs.coords[ri]
ri += 1
coords.append(AWinCoord(rc.is_pt, (rc.val + lc.val)))
return AWin(lhs.name, coords, lhs.strides)
def __call__(self, pt):
assert isinstance(pt, APoint)
assert (self.nslots() == len(pt.coords))
pi = 0
coords = []
for wc in self.coords:
if wc.is_pt:
coords.append(wc.val)
else:
coords.append((wc.val + pt.coords[pi]))
pi += 1
return APoint(self.name, coords, pt.typ)
def get_stride(self, ndim):
interval_strides = [s for (c, s) in zip(self.coords, self.strides) if (not c.is_pt)]
return interval_strides[ndim] |
class SlateKernel(TSFCKernel):
def _cache_key(cls, expr, compiler_parameters):
return (md5((expr.expression_hash + str(sorted(compiler_parameters.items()))).encode()).hexdigest(), expr.ufl_domains()[0].comm)
def __init__(self, expr, compiler_parameters):
if self._initialized:
return
self.split_kernel = generate_loopy_kernel(expr, compiler_parameters)
self._initialized = True |
class FederalAccount(models.Model):
agency_identifier = models.TextField(db_index=True)
main_account_code = models.TextField(db_index=True)
account_title = models.TextField()
federal_account_code = models.TextField(null=True)
parent_toptier_agency = models.ForeignKey('references.ToptierAgency', models.DO_NOTHING, null=True, help_text='The toptier agency under which this federal account should appear in lists and dropdowns. Not as simple as just mapping the AID to an agency, although AID does factor into the decision.')
class Meta():
managed = True
db_table = 'federal_account'
unique_together = ('agency_identifier', 'main_account_code')
def fa_rendering_label_to_component_dictionary(fa_rendering_label) -> dict:
return {'faaid': fa_rendering_label.split('-')[0], 'famain': fa_rendering_label.split('-')[1]} |
def test_h4_1():
T = bytearray.fromhex('c234c1198f3b520186ab92a2f874934e')
A1 = bytearray.fromhex('bfce')
A2 = bytearray.fromhex('a713702dcfc1')
(ComputedHash, ComputedDAK) = h4(T, A1, A2, KeyID['btdk'])
Hash = bytearray.fromhex('b089c4e39d7c192c3aba3c2109d24c0dc039e700adf3a263008e65a8b00fb1fa')
DAK = bytearray.fromhex('b089c4e39d7c192c3aba3c2109d24c0d')
emsg1 = 'test_h4_1: Hash {} != {}'.format(repr(Hash), repr(ComputedHash))
emsg2 = 'test_h4_1: DAK {} != {}'.format(repr(DAK), repr(ComputedDAK))
assert (Hash == ComputedHash), emsg1
assert (DAK == ComputedDAK), emsg2 |
def test_wf1_with_subwf():
def t1(a: int) -> NamedTuple('OutputsBC', t1_int_output=int, c=str):
a = (a + 2)
return (a, ('world-' + str(a)))
def t2(a: str, b: str) -> str:
return (b + a)
def my_subwf(a: int) -> (str, str):
(x, y) = t1(a=a)
(u, v) = t1(a=x)
return (y, v)
def my_wf(a: int, b: str) -> (int, str, str):
(x, y) = t1(a=a).with_overrides()
(u, v) = my_subwf(a=x)
return (x, u, v)
res = my_wf(a=5, b='hello ')
assert (res == (7, 'world-9', 'world-11')) |
.parametrize('current_data, reference_data, column_mapping, metric, expected_json', ((pd.DataFrame(), None, ColumnMapping(), DatasetSummaryMetric(almost_duplicated_threshold=0.9), {'almost_duplicated_threshold': 0.9, 'current': {'date_column': None, 'id_column': None, 'nans_by_columns': {}, 'number_of_almost_constant_columns': 0, 'number_of_almost_duplicated_columns': 0, 'number_of_categorical_columns': 0, 'number_of_columns': 0, 'number_of_constant_columns': 0, 'number_of_datetime_columns': 0, 'number_of_duplicated_columns': 0, 'number_of_duplicated_rows': 0, 'number_of_empty_columns': 0, 'number_of_empty_rows': 0, 'number_of_missing_values': 0.0, 'number_of_numeric_columns': 0, 'number_of_text_columns': 0, 'number_of_rows': 0, 'number_uniques_by_columns': {}, 'prediction': None, 'target': None}, 'reference': None}), (pd.DataFrame({'test1': [1, 2, 3], 'test2': [1, 2, 3], 'test3': [1, 1, 1]}), pd.DataFrame({'test4': [1, 2, 3], 'test2': ['a', 'a', 'a'], 'test3': [1, 1, 1]}), ColumnMapping(), DatasetSummaryMetric(almost_duplicated_threshold=0.9), {'almost_duplicated_threshold': 0.9, 'current': {'date_column': None, 'id_column': None, 'nans_by_columns': {'test1': 0, 'test2': 0, 'test3': 0}, 'number_of_almost_constant_columns': 1, 'number_of_almost_duplicated_columns': 1, 'number_of_categorical_columns': 0, 'number_of_columns': 3, 'number_of_constant_columns': 1, 'number_of_datetime_columns': 0, 'number_of_duplicated_columns': 1, 'number_of_duplicated_rows': 0, 'number_of_empty_columns': 0, 'number_of_empty_rows': 0, 'number_of_missing_values': 0, 'number_of_numeric_columns': 3, 'number_of_text_columns': 0, 'number_of_rows': 3, 'number_uniques_by_columns': {'test1': 3, 'test2': 3, 'test3': 1}, 'prediction': None, 'target': None}, 'reference': {'date_column': None, 'id_column': None, 'nans_by_columns': {'test2': 0, 'test3': 0, 'test4': 0}, 'number_of_almost_constant_columns': 2, 'number_of_almost_duplicated_columns': 0, 'number_of_categorical_columns': 1, 'number_of_columns': 3, 'number_of_constant_columns': 2, 'number_of_datetime_columns': 0, 'number_of_duplicated_columns': 0, 'number_of_duplicated_rows': 0, 'number_of_empty_columns': 0, 'number_of_empty_rows': 0, 'number_of_missing_values': 0, 'number_of_numeric_columns': 2, 'number_of_text_columns': 0, 'number_of_rows': 3, 'number_uniques_by_columns': {'test2': 1, 'test3': 1, 'test4': 3}, 'prediction': None, 'target': None}})))
def test_dataset_summary_metric_with_report(current_data: pd.DataFrame, reference_data: pd.DataFrame, column_mapping: ColumnMapping, metric: DatasetSummaryMetric, expected_json: dict) -> None:
report = Report(metrics=[metric])
report.run(current_data=current_data, reference_data=reference_data, column_mapping=column_mapping)
assert report.show()
json_result = report.json()
assert (len(json_result) > 0)
result = json.loads(json_result)
assert (result['metrics'][0]['metric'] == 'DatasetSummaryMetric')
assert (result['metrics'][0]['result'] == expected_json) |
.parametrize('block_hash,is_valid', ((1, False), (True, False), ((b'\x00' * 32), True), ((b'\xff' * 32), True), (('\x00' * 32), False), (encode_hex((b'\x00' * 32)), False)))
def test_block_hash_output_validation(validator, block_hash, is_valid):
if is_valid:
validator.validate_outbound_block_hash(block_hash)
else:
with pytest.raises(ValidationError):
validator.validate_outbound_block_hash(block_hash) |
def test_raises_only_field_errors_unexpected(unknown_event_id_field_error, invalid_organization_id_field_error):
errors = [unknown_event_id_field_error, invalid_organization_id_field_error]
with pytest.raises(pytest.raises.Exception):
with raises_only_field_errors({'organization_id': 'INVALID'}):
raise Client.CallActionError(actions=[ActionResponse(action='', errors=errors)]) |
def failing_call_with_output(cmd, expected, env=None):
proc = subprocess.Popen(cmd.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env)
(stdout, stderr) = proc.communicate()
if WINDOWS:
print('warning: skipping part of failing_call_with_output() due to error codes not being propagated (see #592)')
else:
assert proc.returncode, ('call must have failed: ' + str([stdout, '\n\n', stderr]))
assert ((expected in stdout) or (expected in stderr)), ('call did not have the expected output: %s: %s' % (expected, str([stdout, '\n\n', stderr]))) |
def listNextPasesHtml(passTable, howmany):
i = 1
output = "<table class='table small'>\n"
output += '<tr><th>#</th><th>satellite</th><th>start</th><th>duration</th><th>peak</th><th>azimuth</th><th>freq</th><th>process with</th><tr>\n'
uniqueEvents = list(set([x[0] for x in passTable[0:howmany]]))
colorDict = assignColorsToEvent(uniqueEvents)
for satelitePass in passTable[0:howmany]:
(satellite, start, duration, peak, azimuth) = satelitePass
freq = satellitesData[satellite]['freq']
processWith = satellitesData[satellite]['processWith']
color = colorDict[satellite]
output += ("<tr><td style='background-color: %s'>%i</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s (%s)</td><td>%sHz</td><td>%s</td><tr>\n" % (color, i, satellite, t2human(start), t2humanMS(duration), peak, azimuth, azimuth2dir(azimuth), freq, processWith))
i += 1
output += '</table>\n'
return output |
def forward(model: Model[(InT, OutT)], Xr: InT, is_train: bool) -> Tuple[(OutT, Callable)]:
Y = model.ops.reduce_mean(cast(Floats2d, Xr.data), Xr.lengths)
lengths = Xr.lengths
array_info = ArrayInfo.from_array(Y)
def backprop(dY: OutT) -> InT:
array_info.check_consistency(dY)
return Ragged(model.ops.backprop_reduce_mean(dY, lengths), lengths)
return (Y, backprop) |
class Decoder():
def __init__(self, wrapper: Wrapper, serverVersion: int):
self.wrapper = wrapper
self.serverVersion = serverVersion
self.logger = logging.getLogger('ib_insync.Decoder')
self.handlers = {1: self.priceSizeTick, 2: self.wrap('tickSize', [int, int, float]), 3: self.wrap('orderStatus', [int, str, float, float, float, int, int, float, int, str, float], skip=1), 4: self.errorMsg, 5: self.openOrder, 6: self.wrap('updateAccountValue', [str, str, str, str]), 7: self.updatePortfolio, 8: self.wrap('updateAccountTime', [str]), 9: self.wrap('nextValidId', [int]), 10: self.contractDetails, 11: self.execDetails, 12: self.wrap('updateMktDepth', [int, int, int, int, float, float]), 13: self.wrap('updateMktDepthL2', [int, int, str, int, int, float, float, bool]), 14: self.wrap('updateNewsBulletin', [int, int, str, str]), 15: self.wrap('managedAccounts', [str]), 16: self.wrap('receiveFA', [int, str]), 17: self.historicalData, 18: self.bondContractDetails, 19: self.wrap('scannerParameters', [str]), 20: self.scannerData, 21: self.tickOptionComputation, 45: self.wrap('tickGeneric', [int, int, float]), 46: self.wrap('tickString', [int, int, str]), 47: self.wrap('tickEFP', [int, int, float, str, float, int, str, float, float]), 49: self.wrap('currentTime', [int]), 50: self.wrap('realtimeBar', [int, int, float, float, float, float, float, float, int]), 51: self.wrap('fundamentalData', [int, str]), 52: self.wrap('contractDetailsEnd', [int]), 53: self.wrap('openOrderEnd', []), 54: self.wrap('accountDownloadEnd', [str]), 55: self.wrap('execDetailsEnd', [int]), 56: self.deltaNeutralValidation, 57: self.wrap('tickSnapshotEnd', [int]), 58: self.wrap('marketDataType', [int, int]), 59: self.commissionReport, 61: self.position, 62: self.wrap('positionEnd', []), 63: self.wrap('accountSummary', [int, str, str, str, str]), 64: self.wrap('accountSummaryEnd', [int]), 65: self.wrap('verifyMessageAPI', [str]), 66: self.wrap('verifyCompleted', [bool, str]), 67: self.wrap('displayGroupList', [int, str]), 68: self.wrap('displayGroupUpdated', [int, str]), 69: self.wrap('verifyAndAuthMessageAPI', [str, str]), 70: self.wrap('verifyAndAuthCompleted', [bool, str]), 71: self.positionMulti, 72: self.wrap('positionMultiEnd', [int]), 73: self.wrap('accountUpdateMulti', [int, str, str, str, str, str]), 74: self.wrap('accountUpdateMultiEnd', [int]), 75: self.securityDefinitionOptionParameter, 76: self.wrap('securityDefinitionOptionParameterEnd', [int], skip=1), 77: self.softDollarTiers, 78: self.familyCodes, 79: self.symbolSamples, 80: self.mktDepthExchanges, 81: self.wrap('tickReqParams', [int, float, str, int], skip=1), 82: self.smartComponents, 83: self.wrap('newsArticle', [int, int, str], skip=1), 84: self.wrap('tickNews', [int, int, str, str, str, str], skip=1), 85: self.newsProviders, 86: self.wrap('historicalNews', [int, str, str, str, str], skip=1), 87: self.wrap('historicalNewsEnd', [int, bool], skip=1), 88: self.wrap('headTimestamp', [int, str], skip=1), 89: self.histogramData, 90: self.historicalDataUpdate, 91: self.wrap('rerouteMktDataReq', [int, int, str], skip=1), 92: self.wrap('rerouteMktDepthReq', [int, int, str], skip=1), 93: self.marketRule, 94: self.wrap('pnl', [int, float, float, float], skip=1), 95: self.wrap('pnlSingle', [int, float, float, float, float, float], skip=1), 96: self.historicalTicks, 97: self.historicalTicksBidAsk, 98: self.historicalTicksLast, 99: self.tickByTick, 100: self.wrap('orderBound', [int, int, int], skip=1), 101: self.completedOrder, 102: self.wrap('completedOrdersEnd', [], skip=1), 103: self.wrap('replaceFAEnd', [int, str], skip=1), 104: self.wrap('wshMetaData', [int, str], skip=1), 105: self.wrap('wshEventData', [int, str], skip=1), 106: self.historicalSchedule, 107: self.wrap('userInfo', [int, str], skip=1)}
def wrap(self, methodName, types, skip=2):
def handler(fields):
method = getattr(self.wrapper, methodName, None)
if method:
try:
args = [(field if (typ is str) else (int((field or 0)) if (typ is int) else (float((field or 0)) if (typ is float) else bool(int((field or 0)))))) for (typ, field) in zip(types, fields[skip:])]
method(*args)
except Exception:
self.logger.exception(f'Error for {methodName}:')
return handler
def interpret(self, fields):
try:
msgId = int(fields[0])
handler = self.handlers[msgId]
handler(fields)
except Exception:
self.logger.exception(f'Error handling fields: {fields}')
def parse(self, obj):
for field in dataclasses.fields(obj):
typ = type(field.default)
if (typ is str):
continue
v = getattr(obj, field.name)
if (typ is int):
setattr(obj, field.name, (int(v) if v else field.default))
elif (typ is float):
setattr(obj, field.name, (float(v) if v else field.default))
elif (typ is bool):
setattr(obj, field.name, (bool(int(v)) if v else field.default))
def priceSizeTick(self, fields):
(_, _, reqId, tickType, price, size, _) = fields
if price:
self.wrapper.priceSizeTick(int(reqId), int(tickType), float(price), float((size or 0)))
def errorMsg(self, fields):
(_, _, reqId, errorCode, errorString, *fields) = fields
advancedOrderRejectJson = ''
if (self.serverVersion >= 166):
(advancedOrderRejectJson, *fields) = fields
self.wrapper.error(int(reqId), int(errorCode), errorString, advancedOrderRejectJson)
def updatePortfolio(self, fields):
c = Contract()
(_, _, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.primaryExchange, c.currency, c.localSymbol, c.tradingClass, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName) = fields
self.parse(c)
self.wrapper.updatePortfolio(c, float(position), float(marketPrice), float(marketValue), float(averageCost), float(unrealizedPNL), float(realizedPNL), accountName)
def contractDetails(self, fields):
cd = ContractDetails()
cd.contract = c = Contract()
if (self.serverVersion < 164):
fields.pop(0)
(_, reqId, c.symbol, c.secType, lastTimes, c.strike, c.right, c.exchange, c.currency, c.localSymbol, cd.marketName, c.tradingClass, c.conId, cd.minTick, *fields) = fields
if (self.serverVersion < 164):
fields.pop(0)
(c.multiplier, cd.orderTypes, cd.validExchanges, cd.priceMagnifier, cd.underConId, cd.longName, c.primaryExchange, cd.contractMonth, cd.industry, cd.category, cd.subcategory, cd.timeZoneId, cd.tradingHours, cd.liquidHours, cd.evRule, cd.evMultiplier, numSecIds, *fields) = fields
numSecIds = int(numSecIds)
if (numSecIds > 0):
cd.secIdList = []
for _ in range(numSecIds):
(tag, value, *fields) = fields
cd.secIdList += [TagValue(tag, value)]
(cd.aggGroup, cd.underSymbol, cd.underSecType, cd.marketRuleIds, cd.realExpirationDate, cd.stockType, *fields) = fields
if (self.serverVersion == 163):
(cd.suggestedSizeIncrement, *fields) = fields
if (self.serverVersion >= 164):
(cd.minSize, cd.sizeIncrement, cd.suggestedSizeIncrement, *fields) = fields
times = lastTimes.split(('-' if ('-' in lastTimes) else None))
if (len(times) > 0):
c.lastTradeDateOrContractMonth = times[0]
if (len(times) > 1):
cd.lastTradeTime = times[1]
if (len(times) > 2):
cd.timeZoneId = times[2]
cd.longName = cd.longName.encode().decode('unicode-escape')
self.parse(cd)
self.parse(c)
self.wrapper.contractDetails(int(reqId), cd)
def bondContractDetails(self, fields):
cd = ContractDetails()
cd.contract = c = Contract()
if (self.serverVersion < 164):
fields.pop(0)
(_, reqId, c.symbol, c.secType, cd.cusip, cd.coupon, lastTimes, cd.issueDate, cd.ratings, cd.bondType, cd.couponType, cd.convertible, cd.callable, cd.putable, cd.descAppend, c.exchange, c.currency, cd.marketName, c.tradingClass, c.conId, cd.minTick, *fields) = fields
if (self.serverVersion < 164):
fields.pop(0)
(cd.orderTypes, cd.validExchanges, cd.nextOptionDate, cd.nextOptionType, cd.nextOptionPartial, cd.notes, cd.longName, cd.evRule, cd.evMultiplier, numSecIds, *fields) = fields
numSecIds = int(numSecIds)
if (numSecIds > 0):
cd.secIdList = []
for _ in range(numSecIds):
(tag, value, *fields) = fields
cd.secIdList += [TagValue(tag, value)]
(cd.aggGroup, cd.marketRuleIds, *fields) = fields
if (self.serverVersion >= 164):
(cd.minSize, cd.sizeIncrement, cd.suggestedSizeIncrement, *fields) = fields
times = lastTimes.split(('-' if ('-' in lastTimes) else None))
if (len(times) > 0):
cd.maturity = times[0]
if (len(times) > 1):
cd.lastTradeTime = times[1]
if (len(times) > 2):
cd.timeZoneId = times[2]
self.parse(cd)
self.parse(c)
self.wrapper.bondContractDetails(int(reqId), cd)
def execDetails(self, fields):
c = Contract()
ex = Execution()
(_, reqId, ex.orderId, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.exchange, c.currency, c.localSymbol, c.tradingClass, ex.execId, timeStr, ex.acctNumber, ex.exchange, ex.side, ex.shares, ex.price, ex.permId, ex.clientId, ex.liquidation, ex.cumQty, ex.avgPrice, ex.orderRef, ex.evRule, ex.evMultiplier, ex.modelCode, ex.lastLiquidity, *fields) = fields
if (self.serverVersion >= 178):
(ex.pendingPriceRevision, *fields) = fields
self.parse(c)
self.parse(ex)
time = cast(datetime, parseIBDatetime(timeStr))
if (not time.tzinfo):
tz = self.wrapper.ib.TimezoneTWS
if tz:
time = time.replace(tzinfo=ZoneInfo(str(tz)))
ex.time = time.astimezone(timezone.utc)
self.wrapper.execDetails(int(reqId), c, ex)
def historicalData(self, fields):
(_, reqId, startDateStr, endDateStr, numBars, *fields) = fields
get = iter(fields).__next__
for _ in range(int(numBars)):
bar = BarData(date=get(), open=float(get()), high=float(get()), low=float(get()), close=float(get()), volume=float(get()), average=float(get()), barCount=int(get()))
self.wrapper.historicalData(int(reqId), bar)
self.wrapper.historicalDataEnd(int(reqId), startDateStr, endDateStr)
def historicalDataUpdate(self, fields):
(_, reqId, *fields) = fields
get = iter(fields).__next__
bar = BarData(barCount=int((get() or 0)), date=get(), open=float((get() or 0)), close=float((get() or 0)), high=float((get() or 0)), low=float((get() or 0)), average=float((get() or 0)), volume=float((get() or 0)))
self.wrapper.historicalDataUpdate(int(reqId), bar)
def scannerData(self, fields):
(_, _, reqId, n, *fields) = fields
for _ in range(int(n)):
cd = ContractDetails()
cd.contract = c = Contract()
(rank, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.exchange, c.currency, c.localSymbol, cd.marketName, c.tradingClass, distance, benchmark, projection, legsStr, *fields) = fields
self.parse(cd)
self.parse(c)
self.wrapper.scannerData(int(reqId), int(rank), cd, distance, benchmark, projection, legsStr)
self.wrapper.scannerDataEnd(int(reqId))
def tickOptionComputation(self, fields):
(_, reqId, tickTypeInt, tickAttrib, *fields) = fields
(impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice) = fields
self.wrapper.tickOptionComputation(int(reqId), int(tickTypeInt), int(tickAttrib), float(impliedVol), float(delta), float(optPrice), float(pvDividend), float(gamma), float(vega), float(theta), float(undPrice))
def deltaNeutralValidation(self, fields):
(_, _, reqId, conId, delta, price) = fields
self.wrapper.deltaNeutralValidation(int(reqId), DeltaNeutralContract(int(conId), float((delta or 0)), float((price or 0))))
def commissionReport(self, fields):
(_, _, execId, commission, currency, realizedPNL, yield_, yieldRedemptionDate) = fields
self.wrapper.commissionReport(CommissionReport(execId, float((commission or 0)), currency, float((realizedPNL or 0)), float((yield_ or 0)), int((yieldRedemptionDate or 0))))
def position(self, fields):
c = Contract()
(_, _, account, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.exchange, c.currency, c.localSymbol, c.tradingClass, position, avgCost) = fields
self.parse(c)
self.wrapper.position(account, c, float((position or 0)), float((avgCost or 0)))
def positionMulti(self, fields):
c = Contract()
(_, _, reqId, account, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.exchange, c.currency, c.localSymbol, c.tradingClass, position, avgCost, modelCode) = fields
self.parse(c)
self.wrapper.positionMulti(int(reqId), account, modelCode, c, float((position or 0)), float((avgCost or 0)))
def securityDefinitionOptionParameter(self, fields):
(_, reqId, exchange, underlyingConId, tradingClass, multiplier, n, *fields) = fields
n = int(n)
expirations = fields[:n]
strikes = [float(field) for field in fields[(n + 1):]]
self.wrapper.securityDefinitionOptionParameter(int(reqId), exchange, underlyingConId, tradingClass, multiplier, expirations, strikes)
def softDollarTiers(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
tiers = [SoftDollarTier(name=get(), val=get(), displayName=get()) for _ in range(int(n))]
self.wrapper.softDollarTiers(int(reqId), tiers)
def familyCodes(self, fields):
(_, n, *fields) = fields
get = iter(fields).__next__
familyCodes = [FamilyCode(accountID=get(), familyCodeStr=get()) for _ in range(int(n))]
self.wrapper.familyCodes(familyCodes)
def symbolSamples(self, fields):
(_, reqId, n, *fields) = fields
cds = []
for _ in range(int(n)):
cd = ContractDescription()
cd.contract = c = Contract()
(c.conId, c.symbol, c.secType, c.primaryExchange, c.currency, m, *fields) = fields
c.conId = int(c.conId)
m = int(m)
cd.derivativeSecTypes = fields[:m]
fields = fields[m:]
if (self.serverVersion >= 176):
(cd.contract.description, cd.contract.issuerId, *fields) = fields
cds.append(cd)
self.wrapper.symbolSamples(int(reqId), cds)
def smartComponents(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
components = [SmartComponent(bitNumber=int(get()), exchange=get(), exchangeLetter=get()) for _ in range(int(n))]
self.wrapper.smartComponents(int(reqId), components)
def mktDepthExchanges(self, fields):
(_, n, *fields) = fields
get = iter(fields).__next__
descriptions = [DepthMktDataDescription(exchange=get(), secType=get(), listingExch=get(), serviceDataType=get(), aggGroup=int(get())) for _ in range(int(n))]
self.wrapper.mktDepthExchanges(descriptions)
def newsProviders(self, fields):
(_, n, *fields) = fields
get = iter(fields).__next__
providers = [NewsProvider(code=get(), name=get()) for _ in range(int(n))]
self.wrapper.newsProviders(providers)
def histogramData(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
histogram = [HistogramData(price=float(get()), count=int(get())) for _ in range(int(n))]
self.wrapper.histogramData(int(reqId), histogram)
def marketRule(self, fields):
(_, marketRuleId, n, *fields) = fields
get = iter(fields).__next__
increments = [PriceIncrement(lowEdge=float(get()), increment=float(get())) for _ in range(int(n))]
self.wrapper.marketRule(int(marketRuleId), increments)
def historicalTicks(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
ticks = []
for _ in range(int(n)):
time = int(get())
get()
price = float(get())
size = float(get())
dt = datetime.fromtimestamp(time, timezone.utc)
ticks.append(HistoricalTick(dt, price, size))
done = bool(int(get()))
self.wrapper.historicalTicks(int(reqId), ticks, done)
def historicalTicksBidAsk(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
ticks = []
for _ in range(int(n)):
time = int(get())
mask = int(get())
attrib = TickAttribBidAsk(askPastHigh=bool((mask & 1)), bidPastLow=bool((mask & 2)))
priceBid = float(get())
priceAsk = float(get())
sizeBid = float(get())
sizeAsk = float(get())
dt = datetime.fromtimestamp(time, timezone.utc)
ticks.append(HistoricalTickBidAsk(dt, attrib, priceBid, priceAsk, sizeBid, sizeAsk))
done = bool(int(get()))
self.wrapper.historicalTicksBidAsk(int(reqId), ticks, done)
def historicalTicksLast(self, fields):
(_, reqId, n, *fields) = fields
get = iter(fields).__next__
ticks = []
for _ in range(int(n)):
time = int(get())
mask = int(get())
attrib = TickAttribLast(pastLimit=bool((mask & 1)), unreported=bool((mask & 2)))
price = float(get())
size = float(get())
exchange = get()
specialConditions = get()
dt = datetime.fromtimestamp(time, timezone.utc)
ticks.append(HistoricalTickLast(dt, attrib, price, size, exchange, specialConditions))
done = bool(int(get()))
self.wrapper.historicalTicksLast(int(reqId), ticks, done)
def tickByTick(self, fields):
(_, reqId, tickType, time, *fields) = fields
reqId = int(reqId)
tickType = int(tickType)
time = int(time)
if (tickType in (1, 2)):
(price, size, mask, exchange, specialConditions) = fields
mask = int(mask)
attrib: Any = TickAttribLast(pastLimit=bool((mask & 1)), unreported=bool((mask & 2)))
self.wrapper.tickByTickAllLast(reqId, tickType, time, float(price), float(size), attrib, exchange, specialConditions)
elif (tickType == 3):
(bidPrice, askPrice, bidSize, askSize, mask) = fields
mask = int(mask)
attrib = TickAttribBidAsk(bidPastLow=bool((mask & 1)), askPastHigh=bool((mask & 2)))
self.wrapper.tickByTickBidAsk(reqId, time, float(bidPrice), float(askPrice), float(bidSize), float(askSize), attrib)
elif (tickType == 4):
(midPoint,) = fields
self.wrapper.tickByTickMidPoint(reqId, time, float(midPoint))
def openOrder(self, fields):
o = Order()
c = Contract()
st = OrderState()
(_, o.orderId, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.exchange, c.currency, c.localSymbol, c.tradingClass, o.action, o.totalQuantity, o.orderType, o.lmtPrice, o.auxPrice, o.tif, o.ocaGroup, o.account, o.openClose, o.origin, o.orderRef, o.clientId, o.permId, o.outsideRth, o.hidden, o.discretionaryAmt, o.goodAfterTime, _, o.faGroup, o.faMethod, o.faPercentage, *fields) = fields
if (self.serverVersion < 177):
(o.faProfile, *fields) = fields
(o.modelCode, o.goodTillDate, o.rule80A, o.percentOffset, o.settlingFirm, o.shortSaleSlot, o.designatedLocation, o.exemptCode, o.auctionStrategy, o.startingPrice, o.stockRefPrice, o.delta, o.stockRangeLower, o.stockRangeUpper, o.displaySize, o.blockOrder, o.sweepToFill, o.allOrNone, o.minQty, o.ocaType, o.eTradeOnly, o.firmQuoteOnly, o.nbboPriceCap, o.parentId, o.triggerMethod, o.volatility, o.volatilityType, o.deltaNeutralOrderType, o.deltaNeutralAuxPrice, *fields) = fields
if o.deltaNeutralOrderType:
(o.deltaNeutralConId, o.deltaNeutralSettlingFirm, o.deltaNeutralClearingAccount, o.deltaNeutralClearingIntent, o.deltaNeutralOpenClose, o.deltaNeutralShortSale, o.deltaNeutralShortSaleSlot, o.deltaNeutralDesignatedLocation, *fields) = fields
(o.continuousUpdate, o.referencePriceType, o.trailStopPrice, o.trailingPercent, o.basisPoints, o.basisPointsType, c.comboLegsDescrip, *fields) = fields
numLegs = int(fields.pop(0))
c.comboLegs = []
for _ in range(numLegs):
leg: Any = ComboLeg()
(leg.conId, leg.ratio, leg.action, leg.exchange, leg.openClose, leg.shortSaleSlot, leg.designatedLocation, leg.exemptCode, *fields) = fields
self.parse(leg)
c.comboLegs.append(leg)
numOrderLegs = int(fields.pop(0))
o.orderComboLegs = []
for _ in range(numOrderLegs):
leg = OrderComboLeg()
leg.price = fields.pop(0)
self.parse(leg)
o.orderComboLegs.append(leg)
numParams = int(fields.pop(0))
if (numParams > 0):
o.smartComboRoutingParams = []
for _ in range(numParams):
(tag, value, *fields) = fields
o.smartComboRoutingParams.append(TagValue(tag, value))
(o.scaleInitLevelSize, o.scaleSubsLevelSize, increment, *fields) = fields
o.scalePriceIncrement = float((increment or UNSET_DOUBLE))
if (0 < o.scalePriceIncrement < UNSET_DOUBLE):
(o.scalePriceAdjustValue, o.scalePriceAdjustInterval, o.scaleProfitOffset, o.scaleAutoReset, o.scaleInitPosition, o.scaleInitFillQty, o.scaleRandomPercent, *fields) = fields
o.hedgeType = fields.pop(0)
if o.hedgeType:
o.hedgeParam = fields.pop(0)
(o.optOutSmartRouting, o.clearingAccount, o.clearingIntent, o.notHeld, dncPresent, *fields) = fields
if int(dncPresent):
(conId, delta, price, *fields) = fields
c.deltaNeutralContract = DeltaNeutralContract(int((conId or 0)), float((delta or 0)), float((price or 0)))
o.algoStrategy = fields.pop(0)
if o.algoStrategy:
numParams = int(fields.pop(0))
if (numParams > 0):
o.algoParams = []
for _ in range(numParams):
(tag, value, *fields) = fields
o.algoParams.append(TagValue(tag, value))
(o.solicited, o.whatIf, st.status, st.initMarginBefore, st.maintMarginBefore, st.equityWithLoanBefore, st.initMarginChange, st.maintMarginChange, st.equityWithLoanChange, st.initMarginAfter, st.maintMarginAfter, st.equityWithLoanAfter, st.commission, st.minCommission, st.maxCommission, st.commissionCurrency, st.warningText, o.randomizeSize, o.randomizePrice, *fields) = fields
if (o.orderType in ('PEG BENCH', 'PEGBENCH')):
(o.referenceContractId, o.isPeggedChangeAmountDecrease, o.peggedChangeAmount, o.referenceChangeAmount, o.referenceExchangeId, *fields) = fields
numConditions = int(fields.pop(0))
if (numConditions > 0):
for _ in range(numConditions):
condType = int(fields.pop(0))
condCls = OrderCondition.createClass(condType)
n = (len(dataclasses.fields(condCls)) - 1)
cond = condCls(condType, *fields[:n])
self.parse(cond)
o.conditions.append(cond)
fields = fields[n:]
(o.conditionsIgnoreRth, o.conditionsCancelOrder, *fields) = fields
(o.adjustedOrderType, o.triggerPrice, o.trailStopPrice, o.lmtPriceOffset, o.adjustedStopPrice, o.adjustedStopLimitPrice, o.adjustedTrailingAmount, o.adjustableTrailingUnit, o.softDollarTier.name, o.softDollarTier.val, o.softDollarTier.displayName, o.cashQty, o.dontUseAutoPriceForHedge, o.isOmsContainer, o.discretionaryUpToLimitPrice, o.usePriceMgmtAlgo, *fields) = fields
if (self.serverVersion >= 159):
o.duration = fields.pop(0)
if (self.serverVersion >= 160):
o.postToAts = fields.pop(0)
if (self.serverVersion >= 162):
o.autoCancelParent = fields.pop(0)
if (self.serverVersion >= 170):
(o.minTradeQty, o.minCompeteSize, o.competeAgainstBestOffset, o.midOffsetAtWhole, o.midOffsetAtHalf, *fields) = fields
self.parse(c)
self.parse(o)
self.parse(st)
self.wrapper.openOrder(o.orderId, c, o, st)
def completedOrder(self, fields):
o = Order()
c = Contract()
st = OrderState()
(_, c.conId, c.symbol, c.secType, c.lastTradeDateOrContractMonth, c.strike, c.right, c.multiplier, c.exchange, c.currency, c.localSymbol, c.tradingClass, o.action, o.totalQuantity, o.orderType, o.lmtPrice, o.auxPrice, o.tif, o.ocaGroup, o.account, o.openClose, o.origin, o.orderRef, o.permId, o.outsideRth, o.hidden, o.discretionaryAmt, o.goodAfterTime, o.faGroup, o.faMethod, o.faPercentage, *fields) = fields
if (self.serverVersion < 177):
(o.faProfile, *fields) = fields
(o.modelCode, o.goodTillDate, o.rule80A, o.percentOffset, o.settlingFirm, o.shortSaleSlot, o.designatedLocation, o.exemptCode, o.startingPrice, o.stockRefPrice, o.delta, o.stockRangeLower, o.stockRangeUpper, o.displaySize, o.sweepToFill, o.allOrNone, o.minQty, o.ocaType, o.triggerMethod, o.volatility, o.volatilityType, o.deltaNeutralOrderType, o.deltaNeutralAuxPrice, *fields) = fields
if o.deltaNeutralOrderType:
(o.deltaNeutralConId, o.deltaNeutralShortSale, o.deltaNeutralShortSaleSlot, o.deltaNeutralDesignatedLocation, *fields) = fields
(o.continuousUpdate, o.referencePriceType, o.trailStopPrice, o.trailingPercent, c.comboLegsDescrip, *fields) = fields
numLegs = int(fields.pop(0))
c.comboLegs = []
for _ in range(numLegs):
leg: Any = ComboLeg()
(leg.conId, leg.ratio, leg.action, leg.exchange, leg.openClose, leg.shortSaleSlot, leg.designatedLocation, leg.exemptCode, *fields) = fields
self.parse(leg)
c.comboLegs.append(leg)
numOrderLegs = int(fields.pop(0))
o.orderComboLegs = []
for _ in range(numOrderLegs):
leg = OrderComboLeg()
leg.price = fields.pop(0)
self.parse(leg)
o.orderComboLegs.append(leg)
numParams = int(fields.pop(0))
if (numParams > 0):
o.smartComboRoutingParams = []
for _ in range(numParams):
(tag, value, *fields) = fields
o.smartComboRoutingParams.append(TagValue(tag, value))
(o.scaleInitLevelSize, o.scaleSubsLevelSize, increment, *fields) = fields
o.scalePriceIncrement = float((increment or UNSET_DOUBLE))
if (0 < o.scalePriceIncrement < UNSET_DOUBLE):
(o.scalePriceAdjustValue, o.scalePriceAdjustInterval, o.scaleProfitOffset, o.scaleAutoReset, o.scaleInitPosition, o.scaleInitFillQty, o.scaleRandomPercent, *fields) = fields
o.hedgeType = fields.pop(0)
if o.hedgeType:
o.hedgeParam = fields.pop(0)
(o.clearingAccount, o.clearingIntent, o.notHeld, dncPresent, *fields) = fields
if int(dncPresent):
(conId, delta, price, *fields) = fields
c.deltaNeutralContract = DeltaNeutralContract(int((conId or 0)), float((delta or 0)), float((price or 0)))
o.algoStrategy = fields.pop(0)
if o.algoStrategy:
numParams = int(fields.pop(0))
if (numParams > 0):
o.algoParams = []
for _ in range(numParams):
(tag, value, *fields) = fields
o.algoParams.append(TagValue(tag, value))
(o.solicited, st.status, o.randomizeSize, o.randomizePrice, *fields) = fields
if (o.orderType in ('PEG BENCH', 'PEGBENCH')):
(o.referenceContractId, o.isPeggedChangeAmountDecrease, o.peggedChangeAmount, o.referenceChangeAmount, o.referenceExchangeId, *fields) = fields
numConditions = int(fields.pop(0))
if (numConditions > 0):
for _ in range(numConditions):
condType = int(fields.pop(0))
condCls = OrderCondition.createClass(condType)
n = (len(dataclasses.fields(condCls)) - 1)
cond = condCls(condType, *fields[:n])
self.parse(cond)
o.conditions.append(cond)
fields = fields[n:]
(o.conditionsIgnoreRth, o.conditionsCancelOrder, *fields) = fields
(o.trailStopPrice, o.lmtPriceOffset, o.cashQty, o.dontUseAutoPriceForHedge, o.isOmsContainer, o.autoCancelDate, o.filledQuantity, o.refFuturesConId, o.autoCancelParent, o.shareholder, o.imbalanceOnly, o.routeMarketableToBbo, o.parentPermId, st.completedTime, st.completedStatus, *fields) = fields
if (self.serverVersion >= 170):
(o.minTradeQty, o.minCompeteSize, o.competeAgainstBestOffset, o.midOffsetAtWhole, o.midOffsetAtHalf, *fields) = fields
self.parse(c)
self.parse(o)
self.parse(st)
self.wrapper.completedOrder(c, o, st)
def historicalSchedule(self, fields):
(_, reqId, startDateTime, endDateTime, timeZone, count, *fields) = fields
get = iter(fields).__next__
sessions = [HistoricalSession(startDateTime=get(), endDateTime=get(), refDate=get()) for _ in range(int(count))]
self.wrapper.historicalSchedule(int(reqId), startDateTime, endDateTime, timeZone, sessions) |
class init_cond(object):
def __init__(self, L, scaling=0.25):
self.radius = 0.15
self.xc = 0.5
self.yc = 0.75
self.scaling = scaling
def uOfXT(self, x, t):
import numpy as np
beta = (epsCoupez * he)
r = math.sqrt((((x[0] - self.xc) ** 2) + ((x[1] - self.yc) ** 2)))
if ((ct.STABILIZATION_TYPE_ncls == 0) or (ct.level_set_function == 0)):
return (self.radius - r)
else:
return (beta * ((2 * smoothedHeaviside(beta, (self.radius - r))) - 1)) |
def prepare_data_for_plots(current_data: pd.Series, reference_data: Optional[pd.Series], column_type: ColumnType, max_categories: Optional[int]=MAX_CATEGORIES) -> Tuple[(pd.Series, Optional[pd.Series])]:
if (column_type == ColumnType.Categorical):
(current_data, reference_data) = relabel_data(current_data, reference_data, max_categories)
else:
current_data = current_data.copy()
if (reference_data is not None):
reference_data = reference_data.copy()
return (current_data, reference_data) |
class OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('exception_phase', (ProcessPhase.STARTING, ProcessPhase.READY, ProcessPhase.RUNNING, ProcessPhase.STOPPING))
_test
def test_exception(exception_phase: ProcessPhase) -> None:
manager = ProcessManager(processes=(ProcessInfo(module=__name__, name='proc1', args=('--manager-name', 'test_manager', '--shutdown', 'EXCEPTION', '--last-phase', exception_phase.name)), ProcessInfo(module=__name__, name='proc2', args=('--manager-name', 'test_manager', '--shutdown', 'NORMAL'))), name='test_manager', startup_period=TEST_STARTUP_PERIOD, shutdown_period=TEST_SHUTDOWN_PERIOD)
with pytest.raises(ProcessManagerException) as ex:
manager.run()
assert (ex.value.failures == {'proc1': ProcessFailureType.EXCEPTION, 'proc2': None}) |
class InfoOthers(View):
def get(self, request, article_id):
category = Category_Article.objects.all()
count = User.objects.filter(follow__fan__id=article_id)
floow = User.objects.filter(fan__follow_id=article_id)
user = User.objects.get(id=article_id)
is_active = Follows.objects.filter(follow=article_id, fan=request.user.id).exists()
return render(request, 'pc/person/infoOthers.html', {'category': category, 'count': count, 'floow': floow, 'user': user, 'is_active': is_active}) |
class TestAmazon(unittest.TestCase):
def setUp(self):
frappe.set_user('Administrator')
setup_custom_fields()
def test_get_orders(self):
amazon_repository = TestAmazonRepository()
sales_orders = amazon_repository.get_orders('2000-07-23')
self.assertEqual(len(sales_orders), 2)
def test_validate_credentials(self):
credentials = dict(iam_arn='', client_id='', client_secret='', refresh_token='', aws_access_key='', aws_secret_key='', country='US')
self.assertRaises(ValidationError, validate_amazon_sp_api_credentials, **credentials) |
class PatternSequenceProcessor(InlineProcessor):
PATTERNS = []
def build_single(self, m, tag, full_recursion, idx):
el1 = etree.Element(tag)
text = m.group(2)
self.parse_sub_patterns(text, el1, None, full_recursion, idx)
return el1
def build_double(self, m, tags, full_recursion, idx):
(tag1, tag2) = tags.split(',')
el1 = etree.Element(tag1)
el2 = etree.Element(tag2)
text = m.group(2)
self.parse_sub_patterns(text, el2, None, full_recursion, idx)
el1.append(el2)
if (len(m.groups()) == 3):
text = m.group(3)
self.parse_sub_patterns(text, el1, el2, full_recursion, idx)
return el1
def build_double2(self, m, tags, full_recursion, idx):
(tag1, tag2) = tags.split(',')
el1 = etree.Element(tag1)
el2 = etree.Element(tag2)
text = m.group(2)
self.parse_sub_patterns(text, el1, None, full_recursion, idx)
text = m.group(3)
el1.append(el2)
self.parse_sub_patterns(text, el2, None, full_recursion, idx)
return el1
def parse_sub_patterns(self, data, parent, last, full_recursion, idx):
offset = 0
pos = 0
length = len(data)
while (pos < length):
if self.compiled_re.match(data, pos):
matched = False
for (index, item) in enumerate(self.PATTERNS):
if ((not full_recursion) and (index <= idx)):
continue
m = item.pattern.match(data, pos)
if m:
text = data[offset:m.start(0)]
if text:
if (last is not None):
last.tail = text
else:
parent.text = text
el = self.build_element(m, item.builder, item.tags, item.full_recursion, index)
parent.append(el)
last = el
offset = pos = m.end(0)
matched = True
if (not matched):
pos += 1
else:
pos += 1
text = data[offset:]
if text:
if (last is not None):
last.tail = text
else:
parent.text = text
def build_element(self, m, builder, tags, full_recursion, index):
if (builder == 'double2'):
return self.build_double2(m, tags, full_recursion, index)
elif (builder == 'double'):
return self.build_double(m, tags, full_recursion, index)
else:
return self.build_single(m, tags, full_recursion, index)
def handleMatch(self, m, data):
el = None
start = None
end = None
for (index, item) in enumerate(self.PATTERNS):
m1 = item.pattern.match(data, m.start(0))
if m1:
start = m1.start(0)
end = m1.end(0)
el = self.build_element(m1, item.builder, item.tags, item.full_recursion, index)
break
return (el, start, end) |
.integrationtest
.skipif((not has_postgres_configured), reason='PostgresSQL not configured')
def test_psycopg_composable_query_works(instrument, postgres_connection, elasticapm_client):
from psycopg import sql
cursor = postgres_connection.cursor()
query = sql.SQL("SELECT * FROM {table} WHERE {row} LIKE 't%' ORDER BY {row} DESC").format(table=sql.Identifier('test'), row=sql.Identifier('name'))
baked_query = query.as_string(cursor.__wrapped__)
result = None
try:
elasticapm_client.begin_transaction('web.django')
cursor.execute(query)
result = cursor.fetchall()
elasticapm_client.end_transaction(None, 'test-transaction')
finally:
assert ([(2, 'two'), (3, 'three')] == result)
transactions = elasticapm_client.events[TRANSACTION]
spans = elasticapm_client.spans_for_transaction(transactions[0])
span = spans[0]
assert (span['name'] == 'SELECT FROM test')
assert ('db' in span['context'])
assert (span['context']['db']['instance'] == 'elasticapm_test')
assert (span['context']['db']['type'] == 'sql')
assert (span['context']['db']['statement'] == baked_query) |
('/settings', methods=['GET'])
def settings():
config = current_app.config.get('MASTER_CONFIG')
config_contents = pformat(config)
file_contents = ''
file_path = ''
file_name = config.get('CONFIG_FILE')
if file_name:
file_path = os.path.join(config.data_path, file_name)
with open(file_path, mode='r') as f:
file_contents += f.read()
curr_settings = highlight(config_contents, PythonLexer(), HtmlFormatter())
file_settings = highlight(file_contents, PythonLexer(), HtmlFormatter())
return render_template('settings.html', active_nav_item='settings', config_file=file_path, curr_settings=curr_settings, file_settings=file_settings) |
class GenericTableModel(QStandardItemModel):
rowCountChanged = pyqtSignal()
beginViewPortRefresh = pyqtSignal()
endViewPortRefresh = pyqtSignal()
db = None
tableName = ''
totalRowCount = 0
lastColumnCount = 0
origQueryStr = QSqlQuery()
prevQueryStr = ''
realQuery = QSqlQuery()
items = []
lastItems = []
def __init__(self, tableName, headerLabels):
self.tableName = tableName
self.headerLabels = headerLabels
self.lastColumnCount = len(self.headerLabels)
QStandardItemModel.__init__(self, 0, self.lastColumnCount)
self.setHorizontalHeaderLabels(self.headerLabels)
def query(self):
return self
def lastQuery(self):
return self.origQueryStr
def lastError(self):
return self.realQuery.lastError()
def clear(self):
pass
def rowCount(self, index=None):
return len(self.items)
def data(self, index, role=Qt.DisplayRole):
if ((role == Qt.DisplayRole) or (role == Qt.EditRole)):
items_count = len(self.items)
if (index.isValid() and (items_count > 0) and (index.row() < items_count)):
return self.items[index.row()][index.column()]
return QStandardItemModel.data(self, index, role)
def setModelColumns(self, newColumns):
self.blockSignals(True)
self.headerLabels = []
self.removeColumns(0, self.lastColumnCount)
self.setHorizontalHeaderLabels(self.headerLabels)
for col in range(0, newColumns):
self.headerLabels.append(self.realQuery.record().fieldName(col))
self.lastColumnCount = newColumns
self.setHorizontalHeaderLabels(self.headerLabels)
self.setColumnCount(len(self.headerLabels))
self.blockSignals(False)
def setQuery(self, q, db):
self.origQueryStr = q
self.db = db
if (self.prevQueryStr != self.origQueryStr):
self.realQuery = QSqlQuery(q, db)
self.realQuery.exec_()
self.realQuery.last()
queryRows = max(0, (self.realQuery.at() + 1))
self.totalRowCount = queryRows
self.setRowCount(self.totalRowCount)
queryColumns = self.realQuery.record().count()
if (queryColumns != self.lastColumnCount):
self.setModelColumns(queryColumns)
self.prevQueryStr = self.origQueryStr
self.rowCountChanged.emit()
def nextRecord(self, offset):
cur_pos = self.realQuery.at()
q.seek(max(cur_pos, (cur_pos + offset)))
def prevRecord(self, offset):
cur_pos = self.realQuery.at()
q.seek(min(cur_pos, (cur_pos - offset)))
def refreshViewport(self, scrollValue, maxRowsInViewport, force=False):
if (not force):
return
self.beginViewPortRefresh.emit()
self.realQuery.last()
rowsFound = max(0, (self.realQuery.at() + 1))
if ((scrollValue == 0) or (self.realQuery.at() == QSql.BeforeFirstRow)):
self.realQuery.seek(QSql.BeforeFirstRow)
elif (self.realQuery.at() == QSql.AfterLastRow):
self.realQuery.seek((rowsFound - maxRowsInViewport))
else:
self.realQuery.seek(min((scrollValue - 1), self.realQuery.at()))
upperBound = min(maxRowsInViewport, rowsFound)
self.setRowCount(self.totalRowCount)
if (force and ((upperBound > 0) or (self.realQuery.at() < 0))):
self.fillVisibleRows(self.realQuery, upperBound, force)
self.endViewPortRefresh.emit()
def fillVisibleRows(self, q, upperBound, force=False):
rowsLabels = []
self.setVerticalHeaderLabels(rowsLabels)
self.items = []
cols = []
for x in range(0, upperBound):
q.next()
if (q.at() < 0):
q.seek(upperBound)
break
rowsLabels.append(str((q.at() + 1)))
cols = []
for col in range(0, len(self.headerLabels)):
cols.append(str(q.value(col)))
self.items.append(cols)
self.setVerticalHeaderLabels(rowsLabels)
if ((self.lastItems != self.items) or (force == True)):
self.dataChanged.emit(self.createIndex(0, 0), self.createIndex(upperBound, len(self.headerLabels)))
self.lastItems = self.items
del cols
def dumpRows(self):
rows = []
q = QSqlQuery(self.db)
q.exec(self.origQueryStr)
q.seek(QSql.BeforeFirstRow)
while True:
q.next()
if (q.at() == QSql.AfterLastRow):
break
row = []
for col in range(0, len(self.headerLabels)):
row.append(q.value(col))
rows.append(row)
return rows
def copySelectedRows(self, start=QSql.BeforeFirstRow, end=QSql.AfterLastRow):
rows = []
lastAt = self.realQuery.at()
self.realQuery.seek(start)
while True:
self.realQuery.next()
if ((self.realQuery.at() == QSql.AfterLastRow) or (len(rows) >= end)):
break
row = []
for col in range(0, len(self.headerLabels)):
row.append(self.realQuery.value(col))
rows.append(row)
self.realQuery.seek(lastAt)
return rows |
def test_reload_dir_subdirectories_are_removed(reload_directory_structure: Path) -> None:
app_dir = (reload_directory_structure / 'app')
app_sub_dir = (app_dir / 'sub')
ext_dir = (reload_directory_structure / 'ext')
ext_sub_dir = (ext_dir / 'sub')
with as_cwd(reload_directory_structure):
config = Config(app='tests.test_config:asgi_app', reload=True, reload_dirs=[str(app_dir), str(app_sub_dir), str(ext_sub_dir), str(ext_dir)])
assert (frozenset(config.reload_dirs) == frozenset([app_dir, ext_dir])) |
def convert_to_bgra_if_required(color_format: ImageFormat, color_image):
if (color_format == ImageFormat.COLOR_MJPG):
color_image = cv2.imdecode(color_image, cv2.IMREAD_COLOR)
elif (color_format == ImageFormat.COLOR_NV12):
color_image = cv2.cvtColor(color_image, cv2.COLOR_YUV2BGRA_NV12)
elif (color_format == ImageFormat.COLOR_YUY2):
color_image = cv2.cvtColor(color_image, cv2.COLOR_YUV2BGRA_YUY2)
return color_image |
class RememberThreads(object):
def __init__(self):
self._threads = []
def __call__(self, *args, **kwargs):
thread = threading.Thread(*args, **kwargs)
self._threads.append(thread)
return thread
def __enter__(self):
return self
def __exit__(self, *exc_args):
threads = self._threads
while threads:
thread = threads.pop()
thread.join(timeout=SAFETY_TIMEOUT)
if thread.is_alive():
raise RuntimeError('Failed to join thread') |
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class _UserIdentityType():
def new(cls, event):
user_type = event['CloudTrailEvent']['userIdentity'].get('type', 'AWSService')
klass = globals().get(f'_{user_type}Type', cls)
return klass(event)
def __init__(self, event):
self.event = event
self.ct_event = event['CloudTrailEvent']
self.user_identity = self.ct_event['userIdentity']
def type(self):
return self.user_identity.get('type', 'NO_TYPE')
def username(self):
if self.event['EventName'].startswith('AssumeRole'):
user = self._parse_username_from_request_params()
if user:
return user
return self._parse_username()
def contains(self, s):
s = s.lower()
try:
next(_deep_finder(self.event, (lambda n: (isinstance(n, str) and (s in n.lower())))))
return True
except StopIteration:
return False
def has_error(self):
return ('errorCode' in self.ct_event)
def _parse_username(self):
return self.event.get('Username', self.user_identity.get('userName', 'NO_USERNAME'))
def _find_resource(self, type_, default=None):
for resource in self.event['Resources']:
if (resource['ResourceType'] == type_):
return resource['ResourceName']
return default
def _parse_username_from_request_params(self):
params = self.ct_event.get('requestParameters', {})
arn = params.get('roleArn')
session_name = params.get('roleSessionName')
if (arn and session_name):
return ((_strip_to('/', arn, greedy=True) + '/') + session_name)
return None
def event_id(self):
return self.event['EventId']
def to_json(self):
return json.dumps(self.event, default=str, indent=4)
def to_row(self):
return (self.event.get('EventTime', ''), self.event.get('EventSource', ''), self.event.get('EventName', ''), self.ct_event.get('errorCode', ''))
def __str__(self):
src = self.event.get('EventSource', '')
time = self.event.get('EventTime', '')
name = self.event.get('EventName', '')
error = self.ct_event.get('errorCode', '')
if error:
error = f'ERROR: {error}'
return f'{time} {src:25.25} {name:30.30} {self.username()} {error}' |
_tuple
def _extract_vm_config(vm_config: Dict[(str, str)]) -> Iterable[VMFork]:
if ('frontierForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['frontierForkBlock']), FrontierVM))
if ('homesteadForkBlock' in vm_config.keys()):
homestead_fork_block = hex_to_block_number(vm_config['homesteadForkBlock'])
if ('DAOForkBlock' in vm_config):
dao_fork_block_number = hex_to_block_number(vm_config['DAOForkBlock'])
HomesteadVM = MainnetDAOValidatorVM.configure(_dao_fork_block_number=dao_fork_block_number)
(yield (homestead_fork_block, HomesteadVM))
else:
(yield (homestead_fork_block, BaseHomesteadVM))
if ('EIP150ForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['EIP150ForkBlock']), TangerineWhistleVM))
if ('EIP158ForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['EIP158ForkBlock']), SpuriousDragonVM))
if ('byzantiumForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['byzantiumForkBlock']), ByzantiumVM))
if ('constantinopleForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['constantinopleForkBlock']), ConstantinopleVM))
if ('petersburgForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['petersburgForkBlock']), PetersburgVM))
if ('istanbulForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['istanbulForkBlock']), IstanbulVM))
if ('muirglacierForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['muirglacierForkBlock']), MuirGlacierVM))
if ('berlinForkBlock' in vm_config.keys()):
(yield (hex_to_block_number(vm_config['berlinForkBlock']), BerlinVM)) |
def find_project_config_file(project_directory: 'Path | None'=None, use_git: bool=False) -> 'Path | None':
git_directory = git_toplevel_dir(project_directory, use_git=use_git)
if (not git_directory):
LOGGER.debug('Project-based config file could not be loaded, is project not managed with git?')
return None
project_config_file = (git_directory / '.foundry_dev_tools')
if project_config_file.is_file():
return project_config_file
foundry_local_project_config_file = (git_directory / '.foundry_local_config')
if foundry_local_project_config_file.is_file():
warnings.warn(f'''Foundrylocal has been renamed to Foundry DevTools.
Move the old config file {foundry_local_project_config_file} to {project_config_file}
The fallback to the old config file will be removed in the future!''', category=DeprecationWarning)
return foundry_local_project_config_file
return None |
class LifespanHandler():
def __init__(self):
self.startup_succeeded = False
self.shutdown_succeeded = False
async def process_startup(self, scope, event):
assert (scope['type'] == 'lifespan')
assert (event['type'] == 'lifespan.startup')
self.startup_succeeded = True
async def process_shutdown(self, scope, event):
assert (scope['type'] == 'lifespan')
assert (event['type'] == 'lifespan.shutdown')
self.shutdown_succeeded = True |
class BatchCache():
_INDEX_NAME: str = 'index.jsonl'
def __init__(self, path: Optional[Union[(str, Path)]], batch_size: int, max_batches_in_mem: int):
self._path = (Path(path) if path else None)
self._batch_size = batch_size
self.max_batches_in_mem = max_batches_in_mem
self._vocab: Optional[Vocab] = None
self._prompt_template: Optional[str] = None
self._prompt_template_checked: bool = False
self._doc2batch: Dict[(int, int)] = {}
self._batch_hashes: List[int] = []
self._loaded_docs: Dict[(int, Dict[(int, Doc)])] = {}
self._cache_queue: List[Doc] = []
self._stats: Dict[(str, int)] = {'hit': 0, 'hit_contains': 0, 'missed': 0, 'missed_contains': 0, 'added': 0, 'persisted': 0}
self._init_cache_dir()
def initialize(self, vocab: Vocab, task: LLMTask) -> None:
self._vocab = vocab
if isinstance(task, PromptTemplateProvider):
self.prompt_template = task.prompt_template
else:
self.prompt_template = ''
if self._path:
warnings.warn('The specified task does not provide its prompt template via `prompt_template()`. This means that the cache cannot verify whether all cached documents were generated using the same prompt template.')
def prompt_template(self) -> Optional[str]:
return self._prompt_template
_template.setter
def prompt_template(self, prompt_template: str) -> None:
self._prompt_template = prompt_template
if (not self._path):
return
prompt_template_path = (self._path / 'prompt_template.txt')
if (not prompt_template_path.exists()):
with open(prompt_template_path, 'w') as file:
file.write(self._prompt_template)
else:
with open(prompt_template_path, 'r') as file:
existing_prompt_template = ''.join(file.readlines())
if (hash(existing_prompt_template) != hash(self._prompt_template)):
raise ValueError(f'Prompt template in cache directory ({prompt_template_path}) is not equal with current prompt template. Reset your cache if you are using a new prompt template.')
def _init_cache_dir(self) -> None:
if (self._path is None):
return
if (self._path.exists() and (not self._path.is_dir())):
raise ValueError('Cache directory exists and is not a directory.')
self._path.mkdir(parents=True, exist_ok=True)
index_path = self._index_path
if index_path.exists():
for rec in srsly.read_jsonl(index_path):
self._doc2batch = {**self._doc2batch, **{int(k): int(v) for (k, v) in rec.items()}}
def _index_path(self) -> Path:
assert (self._path is not None)
return (self._path / BatchCache._INDEX_NAME)
def _batch_path(self, batch_id: int) -> Path:
assert (self._path is not None)
return (self._path / f'{batch_id}.spacy')
def _doc_id(doc: Doc) -> int:
return numpy.sum(doc.to_array(['ORTH']), dtype=numpy.uint64).item()
def _batch_id(doc_ids: Iterable[int]) -> int:
return numpy.sum(numpy.asarray(doc_ids, dtype=numpy.uint64), dtype=numpy.uint64).item()
def add(self, doc: Doc) -> None:
if (self._path is None):
return
if ((not self._prompt_template_checked) and (self._prompt_template is None)):
warnings.warn('No prompt template set for Cache object, entailing that consistency of prompt template used to generate docs cannot be checked. Be mindful to reset your cache whenever you change your prompt template.')
self._prompt_template_checked = True
self._cache_queue.append(doc)
self._stats['added'] += 1
if (len(self._cache_queue) == self._batch_size):
self._persist()
def _persist(self) -> None:
assert self._path
doc_ids = [self._doc_id(doc) for doc in self._cache_queue]
batch_id = self._batch_id(doc_ids)
for doc_id in doc_ids:
self._doc2batch[doc_id] = batch_id
batch_path = self._batch_path(batch_id)
DocBin(docs=self._cache_queue, store_user_data=True).to_disk(batch_path)
srsly.write_jsonl(self._index_path, lines=[{str(doc_id): str(batch_id)} for doc_id in doc_ids], append=True, append_new_line=False)
self._stats['persisted'] += len(self._cache_queue)
self._cache_queue = []
def __contains__(self, doc: Doc) -> bool:
if (self._doc_id(doc) not in self._doc2batch):
self._stats['missed_contains'] += 1
return False
self._stats['hit_contains'] += 1
return True
def __getitem__(self, doc: Doc) -> Optional[Doc]:
doc_id = self._doc_id(doc)
batch_id = self._doc2batch.get(doc_id, None)
if (not batch_id):
self._stats['missed'] += 1
return None
self._stats['hit'] += 1
if (batch_id not in self._loaded_docs):
if (self._path is None):
raise ValueError("Cache directory path was not configured. Documents can't be read from cache.")
if (self._vocab is None):
raise ValueError('Vocab must be set in order to Cache.__get_item__() to work.')
if (len(self._loaded_docs) == self.max_batches_in_mem):
self._loaded_docs.pop(self._batch_hashes[0])
self._batch_hashes = self._batch_hashes[1:]
self._batch_hashes.append(batch_id)
self._loaded_docs[batch_id] = {self._doc_id(proc_doc): proc_doc for proc_doc in DocBin().from_disk(self._batch_path(batch_id)).get_docs(self._vocab)}
return self._loaded_docs[batch_id][doc_id] |
class sFlowV5RawPacketHeader(object):
_PACK_STR = '!iIII'
def __init__(self, header_protocol, frame_length, stripped, header_size, header):
super(sFlowV5RawPacketHeader, self).__init__()
self.header_protocol = header_protocol
self.frame_length = frame_length
self.stripped = stripped
self.header_size = header_size
self.header = header
def parser(cls, buf, offset):
(header_protocol, frame_length, stripped, header_size) = struct.unpack_from(cls._PACK_STR, buf, offset)
offset += struct.calcsize(cls._PACK_STR)
header_pack_str = ('!%sc' % header_size)
header = struct.unpack_from(header_pack_str, buf, offset)
msg = cls(header_protocol, frame_length, stripped, header_size, header)
return msg |
.gui()
def test_delete_without_selection_does_nothing(monkeypatch, tmp_path, qtbot: QtBot):
Path((tmp_path / 'tessdata')).mkdir()
Path(((tmp_path / 'tessdata') / 'deu.traineddata')).touch()
Path(((tmp_path / 'tessdata') / 'eng.traineddata')).touch()
window = language_manager.LanguageManager(tessdata_path=(tmp_path / 'tessdata'))
qtbot.add_widget(window)
assert (len(window.installed_layout.model.languages) == 2)
assert (window.installed_layout.model.languages[0][0] == 'deu')
window.installed_layout.view.clearSelection()
with qtbot.wait_signal(window.com.on_languages_changed, timeout=100, raising=False) as result:
window.installed_layout.button.click()
assert (not result.signal_triggered)
assert (len(window.installed_layout.model.languages) == 2)
assert (window.installed_layout.model.languages[0][0] == 'deu') |
class OptionPlotoptionsBellcurveSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsStreamgraphSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesVariwideTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesVariwideTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesVariwideTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsCylinderSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsCylinderSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsCylinderSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsCylinderSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
def main(wf):
if wf.update_available:
wf.add_item('An update is available!', autocomplete='workflow:update', valid=False, icon=helpers.get_icon(wf, 'cloud-download'))
find_brew = helpers.brew_installed()
if (not (find_brew['INTEL'] or find_brew['ARM'])):
helpers.brew_installation_instructions(wf)
else:
query = (wf.args[0] if len(wf.args) else None)
if ((not query) and (len(wf.cached_data('cask_outdated_casks', get_outdated_casks, max_age=3600)) > 0)):
wf.add_item('Some of your casks are outdated!', autocomplete='outdated ', valid=False, icon=helpers.get_icon(wf, 'cloud-download'))
if (query and query.startswith('install')):
for formula in filter_all_casks(wf, query):
wf.add_item(formula, 'Install cask', arg=('brew install --cask %s' % formula), valid=True, icon=helpers.get_icon(wf, 'package'))
elif (query and any((query.startswith(x) for x in ['search', 'home']))):
for formula in filter_all_casks(wf, query):
wf.add_item(formula, 'Open homepage', arg=('brew home %s' % formula), valid=True, icon=helpers.get_icon(wf, 'package'))
elif (query and query.startswith('uninstall')):
for formula in filter_installed_casks(wf, query):
name = formula.split(' ')[0]
item = wf.add_item(formula, 'Uninstall cask', arg=('brew uninstall --cask %s' % name), valid=True, icon=helpers.get_icon(wf, 'package'))
item.add_modifier('alt', 'Uninstall and zap cask', arg=('brew uninstall --cask --zap %s' % name), valid=True, icon=helpers.get_icon(wf, 'package'))
elif (query and query.startswith('list')):
for formula in filter_installed_casks(wf, query):
wf.add_item(formula, 'Open homepage', arg=('brew home %s' % formula), valid=True, icon=helpers.get_icon(wf, 'package'))
elif (query and query.startswith('outdated')):
for formula in filter_outdated_casks(wf, query):
name = formula.split(' ')[0]
wf.add_item(formula, 'Upgrade cask', arg=('brew upgrade --cask %s' % name), valid=True, icon=helpers.get_icon(wf, 'package'))
elif (query and query.startswith('config')):
helpers.edit_settings(wf)
wf.add_item('`settings.json` has been opened.', autocomplete='', icon=helpers.get_icon(wf, 'info'))
else:
actions = cask_actions.ACTIONS
if query:
actions = wf.filter(query, actions, key=helpers.search_key_for_action, match_on=MATCH_SUBSTRING)
if (len(actions) > 0):
for action in actions:
wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right'))
else:
wf.add_item(('No action found for "%s"' % query), autocomplete='', icon=helpers.get_icon(wf, 'info'))
if (len(wf._items) == 0):
query_name = query[(query.find(' ') + 1):]
wf.add_item(('No formula found for "%s"' % query_name), autocomplete=('%s ' % query[:query.find(' ')]), icon=helpers.get_icon(wf, 'info'))
wf.send_feedback()
cmd = ['/usr/bin/env', 'python3', wf.workflowfile('cask_refresh.py')]
run_in_background('cask_refresh', cmd) |
def print_human_readable_format(sar_data):
header_start = '>'
header_stop = '<'
chapter_start = '---->'
chapter_stop = '<----'
for user in sar_data:
click.echo('{} User account data for: {} {}\n'.format(header_start, sar_data[user]['name'], header_stop))
click.echo('email: {}'.format(sar_data[user]['email']))
click.echo('groups: {}'.format(sar_data[user]['groups']))
click.echo('\n{} Comments: {}'.format(chapter_start, chapter_stop))
for (idx, comment) in enumerate(sar_data[user]['comments'], 1):
click.echo('\nComment no {}:'.format(idx))
for item in sorted(comment):
click.echo('{}: {}'.format(item, comment[item]))
click.echo('\n{} Updates: {}'.format(chapter_start, chapter_stop))
for (idx, update) in enumerate(sar_data[user]['updates'], 1):
click.echo('\nUpdate no {}:'.format(idx))
for item in sorted(update):
click.echo('{}: {}'.format(item, update[item])) |
class OptionPlotoptionsLineSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def choose_alternative_id(alt_ids):
if (set(alt_ids.keys()) == set(['001', '002'])):
if (sys.version_info.major != 3):
print('Invalid alternative set')
assert False
if (sys.version_info.minor <= 7):
return '001'
return '002'
print('Not implemented alternative id set')
assert False |
.requires_roxar
def test_rox_getset_cube(roxar_project):
cube = xtgeo.cube_from_roxar(roxar_project, CUBENAME1)
assert (cube.values.mean() == pytest.approx(0.000718, abs=0.001))
cube.values += 100
assert (cube.values.mean() == pytest.approx(100.000718, abs=0.001))
cube.to_roxar(roxar_project, (CUBENAME1 + '_copy1'))
cube.to_roxar(roxar_project, (CUBENAME1 + '_copy2'), folder='somefolder') |
def has_extension(name):
try:
if (platform.system() == 'Windows'):
res = subprocess.check_output('code --list-extensions', shell=True).decode('utf-8')
else:
res = subprocess.check_output(['code', '--list-extensions']).decode('utf-8')
return (name in res)
except (OSError, subprocess.CalledProcessError):
return False |
def exposed_nu_retrigger_series_pages():
step = 500000
with db.session_context() as sess:
end = sess.execute('SELECT MAX(id) FROM web_pages;')
end = list(end)[0][0]
start = sess.execute('SELECT MIN(id) FROM web_pages;')
start = list(start)[0][0]
changed = 0
if (not start):
print('No null rows to fix!')
return
start = (start - (start % step))
for x in range(start, end, step):
have = sess.execute(("UPDATE web_pages SET state='new', priority=50000 WHERE url LIKE ' AND id < %s AND id >= %s AND state != 'new';" % (x, (x - step))))
print(('%10i, %7.4f, %6i, %6i' % (x, ((x / end) * 100), have.rowcount, changed)))
changed += have.rowcount
if (changed > 100):
print(('Committing (%s changed rows)....' % changed), end=' ')
sess.commit()
print('done')
changed = 0
sess.commit() |
def parse(repo, tag):
with codecs.open(os.path.join(current_dir, 'tags', repo, repo, 'db', 'emoji.json'), 'r', encoding='utf-8') as f:
emojis = json.loads(f.read())
emoji_db = {}
shortnames = set()
aliases = {}
for v in emojis:
short = v['aliases'][0]
shortnames.add((':%s:' % short))
if ('emoji' in v):
(uc, uc_alt) = get_unicode(v)
emoji_db[(':%s:' % short)] = {'name': v.get('description', short), 'unicode': uc, 'category': v['category']}
if uc_alt:
emoji_db[(':%s:' % short)]['unicode_alt'] = uc_alt
else:
emoji_db[(':%s:' % short)] = {'name': v.get('description', short)}
for alias in v['aliases'][1:]:
aliases[(':%s:' % alias)] = (':%s:' % short)
for test in ('png', 'entities'):
with open(('../tests/extensions/emoji/gemoji (%s).txt' % test), 'w') as f:
f.write('# Emojis\n')
count = 0
for emoji in sorted(shortnames):
f.write(''.join(('%s %s<br>\n' % (emoji[1:(- 1)], emoji))))
count += 1
if ((test != 'png') and (count == 10)):
break
with open(os.path.join(current_dir, 'tags', repo, repo, 'LICENSE'), 'r') as f:
license_content = f.read()
with open('../pymdownx/gemoji_db.py', 'w') as f:
f.write(('"""Gemoji autogen.\n\nGenerated from gemoji source. Do not edit by hand.\n\n%s"""\n' % license_content))
f.write(('version = "%s"\n' % tag))
f.write('name = "gemoji"\n')
f.write(('emoji = %s\n' % json.dumps(emoji_db, sort_keys=True, indent=4, separators=(',', ': '))))
f.write(('aliases = %s\n' % json.dumps(aliases, sort_keys=True, indent=4, separators=(',', ': ')))) |
class Opacity(Filter):
NAME = 'opacity'
ALLOWED_SPACES = ('srgb-linear', 'srgb')
def filter(self, color: 'Color', amount: Optional[float], **kwargs: Any) -> None:
amount = alg.clamp((1 if (amount is None) else amount), 0, 1)
color[(- 1)] = alg.lerp(0, amount, color[(- 1)]) |
def test_model_set_reference():
parent = create_model('parent')
child = create_model('child')
grandchild = create_model('child')
parent.layers.append(child)
assert (parent.ref_names == tuple())
parent.set_ref('kid', child)
assert (parent.ref_names == ('kid',))
assert (parent.get_ref('kid') is child)
child.layers.append(grandchild)
with pytest.raises(KeyError):
parent.get_ref('grandkid')
parent.set_ref('grandkid', grandchild)
assert (parent.get_ref('grandkid') is grandchild)
parent.remove_node(grandchild)
assert (grandchild not in child.layers)
assert (not parent.has_ref('grandkind')) |
class TaxRate(QuickbooksTransactionEntity, QuickbooksBaseObject, ReadMixin, ListMixin):
class_dict = {'AgencyRef': Ref, 'TaxReturnLineRef': Ref}
qbo_object_name = 'TaxRate'
def __init__(self):
super(TaxRate, self).__init__()
self.Name = ''
self.Description = ''
self.RateValue = 0
self.SpecialTaxType = ''
self.Active = True
self.DisplayType = ''
self.EffectiveTaxRate = ''
self.AgencyRef = None
self.TaxReturnLineRef = None
def __str__(self):
return self.Name |
class DataclassMutabilityMixin(DataclassHookMixin):
initialized: bool = field(default=False, init=False)
def __post_init__(self) -> None:
self.initialized = True
def __setattr__(self, name: str, value: Any) -> None:
if self.initialized:
try:
self.__getattribute__(name)
except AttributeError:
DataclassHookMixin.__setattr__(self, name, value)
else:
if self.__dataclass_fields__[name].metadata.get(IS_FROZEN_FIELD, False):
raise InstanceFrozenFieldError(name)
else:
DataclassHookMixin.__setattr__(self, name, value)
else:
DataclassHookMixin.__setattr__(self, name, value) |
class bsn_table_checksum_stats_reply(bsn_stats_reply):
version = 5
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 11
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_table_checksum_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 11)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_table_checksum_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_table_checksum_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class FoldersTab(QWidget):
def __init__(self, parent):
self.parent = parent
QWidget.__init__(self)
self.setup_ui()
def setup_ui(self):
self.vbox_left = QVBoxLayout()
r_lbl = QLabel('Most Used Folders:')
r_lbl.setAlignment(Qt.AlignmentFlag.AlignCenter)
self.vbox_left.addWidget(r_lbl)
self.vbox_left.setAlignment(r_lbl, Qt.AlignmentFlag.AlignTop)
self.folders_tree = QTreeWidget()
self.folders_tree.setColumnCount(1)
self.folders_tree.setHeaderHidden(True)
self.folders_tree.setRootIsDecorated(False)
self.folders_tree.setMaximumWidth(370)
self.folders_tree.itemExpanded.connect(self.tree_exp)
self.folders_tree.itemCollapsed.connect(self.tree_coll)
self.folders_tree.setStyleSheet(f'''
QTreeWidget::branch:has-siblings:!adjoins-item {{
border-image: url({QueuePicker.vline_icn}.png) 0;
}}
QTreeWidget::branch:has-siblings:adjoins-item {{
border-image: url({QueuePicker.branch_more_icn}.png) 0;
}}
QTreeWidget::branch:!has-children:!has-siblings:adjoins-item {{
border-image: url({QueuePicker.branch_end_icn}.png) 0;
}}
QTreeWidget::branch:has-children:!has-siblings:closed,
QTreeWidget::branch:closed:has-children:has-siblings {{
border-image: none;
image: url({QueuePicker.branch_closed_icn}.png);
}}
QTreeWidget::branch:open:has-children:!has-siblings,
QTreeWidget::branch:open:has-children:has-siblings {{
border-image: none;
image: url({QueuePicker.branch_open_icn}.png);
}}''')
style = QApplication.style()
self.dir_open = style.standardIcon(QStyle.StandardPixmap.SP_DirOpenIcon)
self.dir_closed = style.standardIcon(QStyle.StandardPixmap.SP_DirClosedIcon)
self.pdf_icon = QIcon((utility.misc.get_web_folder_path() + 'icons/pdf-icon.png'))
self.vbox_left.addWidget(self.folders_tree)
self.path_displayed = None
self.vbox_right = QVBoxLayout()
lbl = QLabel('PDFs, unused. (Double Click to Add)')
self.list = QListWidget()
self.vbox_right.addWidget(lbl)
self.vbox_right.addWidget(self.list)
self.list.itemDoubleClicked.connect(self.add_pdf_note)
self.folders_tree.itemClicked.connect(self.tree_item_clicked)
hbox = QHBoxLayout()
hbox.addLayout(self.vbox_left)
hbox.addLayout(self.vbox_right)
self.setLayout(hbox)
self.setStyleSheet('\n QTreeWidget::item {\n padding: 0;\n margin: 0;\n }\n ')
def tree_item_clicked(self, item):
path = item.data(1, 1)
self.load_folders_unused_pdfs(path)
def tree_exp(self, item):
item.setIcon(0, self.dir_open)
def tree_coll(self, item):
item.setIcon(0, self.dir_closed)
def refresh(self):
if (self.path_displayed is None):
return
self.load_folders_unused_pdfs(self.path_displayed)
def load_folders_unused_pdfs(self, path):
path = path.replace('\\', '/')
if (not path.endswith('/')):
path += '/'
files = utility.misc.find_pdf_files_in_dir(path)
files_full = [os.path.join(path, f).replace('\\', '/') for f in files]
existing = get_pdfs_by_sources(files_full)
res = (set(files_full) - set(existing))
res_f = [(r[(r.rindex('/') + 1):] if ('/' in r) else r) for r in res]
self.fill_list(path, res_f)
self.path_displayed = path
def fill_list(self, path, names):
self.list.clear()
for (ix, n) in enumerate(names):
title_i = QListWidgetItem(self.pdf_icon, n)
title_i.setData(Qt.ItemDataRole.UserRole, QVariant(os.path.join(path, n)))
self.list.insertItem(ix, title_i)
def fill_tree(self, folders):
self.folders_tree.clear()
fmap = utility.tags.to_tag_hierarchy(folders, sep='/')
for (t, children) in fmap.items():
ti = QTreeWidgetItem([t])
ti.setTextAlignment(0, Qt.AlignmentFlag.AlignLeft)
ti.setData(1, 1, QVariant(t))
ti.setIcon(0, self.dir_open)
ti.addChildren(self._add_to_tree(children, (t + '/')))
self.folders_tree.addTopLevelItem(ti)
self.folders_tree.setExpandsOnDoubleClick(True)
self.folders_tree.expandAll()
def _add_to_tree(self, map, prefix):
res = []
for (t, children) in map.items():
ti = QTreeWidgetItem([t])
ti.setTextAlignment(0, Qt.AlignmentFlag.AlignLeft)
ti.setData(1, 1, QVariant((prefix + t)))
ti.setIcon(0, self.dir_open)
prefix_c = ((prefix + t) + '/')
for (c, m) in children.items():
ti.addChildren(self._add_to_tree({c: m}, prefix_c))
res.append(ti)
return res
def add_pdf_note(self, item_clicked):
full_path = item_clicked.data(Qt.ItemDataRole.UserRole)
if (not state.note_editor_shown):
if (self.path_displayed is not None):
tab = self
def after():
tab.load_folders_unused_pdfs(tab.path_displayed)
tab.parent.refresh_queue_list()
tab.parent.pdfs_tab.refresh()
add_tmp_hook('user-note-created', after)
e = NoteEditor(self.parent, add_only=True, source_prefill=full_path)
else:
tooltip('Close the opened Note dialog first!') |
class OptionSeriesSolidgaugeSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_slice_will_un_slice_the_same_camera(prepare_scene, create_pymel):
camera = prepare_scene
pm = create_pymel
dres = pm.PyNode('defaultResolution')
dres.width.set(960)
dres.height.set(540)
rs = RenderSlicer(camera=camera)
rs.slice(10, 10)
assert (dres.width.get() == 96)
assert (dres.height.get() == 54)
rs2 = RenderSlicer(camera=camera)
rs2.slice(5, 5)
assert (dres.width.get() == 192)
assert (dres.height.get() == 108) |
class TraitsUIScrolledPanel(wx.lib.scrolledpanel.ScrolledPanel):
def __init__(self, parent, id=(- 1), pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.TAB_TRAVERSAL, name='scrolledpanel'):
wx.ScrolledWindow.__init__(self, parent, id, pos=pos, size=size, style=style, name=name)
self.SetSize(size)
self.SetBackgroundColour(parent.GetBackgroundColour())
ChildFocusOverride(self)
def Destroy(self):
from traitsui.wx.toolkit import _popEventHandlers
_popEventHandlers(self, ChildFocusOverride)
super().Destroy()
def OnChildFocus(self, event):
self.ScrollChildIntoView(self.FindFocus())
return True
def ScrollChildIntoView(self, child):
if (child is None):
return
(sppux, sppuy) = self.GetScrollPixelsPerUnit()
(vsx, vsy) = self.GetViewStart()
(crx, cry, crdx, crdy) = child.GetRect()
subwindow = child.GetParent()
while (subwindow not in [self, None]):
(pwx, pwy) = subwindow.GetRect()[:2]
(crx, cry) = ((crx + pwx), (cry + pwy))
subwindow = subwindow.GetParent()
cr = wx.Rect(crx, cry, crdx, crdy)
client_size = self.GetClientSize()
(new_vsx, new_vsy) = ((- 1), (- 1))
if ((cr.x < 0) and (sppux > 0)):
new_vsx = (vsx + (cr.x // sppux))
if ((cr.y < 0) and (sppuy > 0)):
new_vsy = (vsy + (cr.y // sppuy))
if ((cr.right > client_size.width) and (sppux > 0)):
diff = ((cr.right - client_size.width) // sppux)
if ((cr.x - (diff * sppux)) > 0):
new_vsx = ((vsx + diff) + 1)
else:
new_vsx = (vsx + (cr.x // sppux))
if ((cr.bottom > client_size.height) and (sppuy > 0)):
diff = ((cr.bottom - client_size.height) // sppuy)
if ((cr.y - (diff * sppuy)) > 0):
new_vsy = ((vsy + diff) + 1)
else:
new_vsy = (vsy + (cr.y // sppuy))
if ((new_vsx != (- 1)) or (new_vsy != (- 1))):
self.Scroll(new_vsx, new_vsy) |
class KubernetesClient(object):
def __init__(self):
if ('KUBERNETES_LOAD_KUBE_CONFIG' in os.environ):
config.load_kube_config(persist_config=False)
else:
config.load_incluster_config()
self._v1 = client.CoreV1Api()
self._batch_v1 = client.BatchV1Api()
def delete_pod(self, name):
namespace = self._find_pod_namespace(name)
body = client.V1DeleteOptions()
self._v1.delete_namespaced_pod(name=name, namespace=namespace, body=body)
def exists_pod(self, name):
response = self._v1.list_pod_for_all_namespaces(watch=False)
for item in response.items:
if (item.metadata.name == name):
if (item.metadata.deletion_timestamp is None):
return True
return False
def _find_pod_namespace(self, name):
response = self._v1.list_pod_for_all_namespaces(watch=False)
for item in response.items:
if (item.metadata.name == name):
return item.metadata.namespace
def find_node_running_pod(self, name):
response = self._v1.list_pod_for_all_namespaces(watch=False)
for item in response.items:
if (item.metadata.name == name):
return item.spec.node_name
def taint_node(self, name, key, value, effect):
body = client.V1Node(spec=client.V1NodeSpec(taints=[client.V1Taint(key=key, value=value, effect=effect)]))
return self._v1.patch_node(name, body)
def add_label_to_pod(self, name, label, value):
namespace = self._find_pod_namespace(name)
body = client.V1Pod(metadata=client.V1ObjectMeta(labels={label: value}))
return self._v1.patch_namespaced_pod(name, namespace, body)
def start_sysdig_capture_for(self, cloud_provider, pod_name, event_time, duration_in_seconds, bucket, access=None, secret_key=None):
job_name = 'sysdig-{}-{}'.format(pod_name, event_time)
node_name = self.find_node_running_pod(pod_name)
namespace = self._find_pod_namespace(pod_name)
if (cloud_provider == 's3'):
body = self._build_sysdig_capture_job_body_s3(job_name, node_name, duration_in_seconds, bucket, access, secret_key)
elif (cloud_provider == 'gcloud'):
body = self._build_sysdig_capture_job_body_gcloud(job_name, node_name, duration_in_seconds, bucket)
return self._batch_v1.create_namespaced_job(namespace, body)
def _build_sysdig_capture_job_body_gcloud(self, job_name, node_name, duration_in_seconds, gcloud_bucket):
return client.V1Job(metadata=client.V1ObjectMeta(name=job_name), spec=client.V1JobSpec(template=client.V1PodTemplateSpec(metadata=client.V1ObjectMeta(name=job_name), spec=client.V1PodSpec(containers=[client.V1Container(name='capturer', image='sysdiglabs/capturer', image_pull_policy='Always', security_context=client.V1SecurityContext(privileged=True), env=[client.V1EnvVar(name='GCLOUD_BUCKET', value=gcloud_bucket), client.V1EnvVar(name='CAPTURE_DURATION', value=str(duration_in_seconds)), client.V1EnvVar(name='CAPTURE_FILE_NAME', value=job_name)], volume_mounts=[client.V1VolumeMount(mount_path='/host/var/run/docker.sock', name='docker-socket'), client.V1VolumeMount(mount_path='/host/dev', name='dev-fs'), client.V1VolumeMount(mount_path='/host/proc', name='proc-fs', read_only=True), client.V1VolumeMount(mount_path='/host/boot', name='boot-fs', read_only=True), client.V1VolumeMount(mount_path='/host/lib/modules', name='lib-modules', read_only=True), client.V1VolumeMount(mount_path='/host/usr', name='usr-fs', read_only=True), client.V1VolumeMount(mount_path='/dev/shm', name='dshm')])], volumes=[client.V1Volume(name='dshm', empty_dir=client.V1EmptyDirVolumeSource(medium='Memory')), client.V1Volume(name='docker-socket', host_path=client.V1HostPathVolumeSource(path='/var/run/docker.sock')), client.V1Volume(name='dev-fs', host_path=client.V1HostPathVolumeSource(path='/dev')), client.V1Volume(name='proc-fs', host_path=client.V1HostPathVolumeSource(path='/proc')), client.V1Volume(name='boot-fs', host_path=client.V1HostPathVolumeSource(path='/boot')), client.V1Volume(name='lib-modules', host_path=client.V1HostPathVolumeSource(path='/lib/modules')), client.V1Volume(name='usr-fs', host_path=client.V1HostPathVolumeSource(path='/usr'))], node_name=node_name, restart_policy='Never'))))
def _build_sysdig_capture_job_body_s3(self, job_name, node_name, duration_in_seconds, s3_bucket, aws_access_key_id, aws_secret_access_key):
return client.V1Job(metadata=client.V1ObjectMeta(name=job_name), spec=client.V1JobSpec(template=client.V1PodTemplateSpec(metadata=client.V1ObjectMeta(name=job_name), spec=client.V1PodSpec(containers=[client.V1Container(name='capturer', image='sysdiglabs/capturer', image_pull_policy='Always', security_context=client.V1SecurityContext(privileged=True), env=[client.V1EnvVar(name='AWS_S3_BUCKET', value=s3_bucket), client.V1EnvVar(name='CAPTURE_DURATION', value=str(duration_in_seconds)), client.V1EnvVar(name='CAPTURE_FILE_NAME', value=job_name), client.V1EnvVar(name='AWS_ACCESS_KEY_ID', value=aws_access_key_id), client.V1EnvVar(name='AWS_SECRET_ACCESS_KEY', value=aws_secret_access_key)], volume_mounts=[client.V1VolumeMount(mount_path='/host/var/run/docker.sock', name='docker-socket'), client.V1VolumeMount(mount_path='/host/dev', name='dev-fs'), client.V1VolumeMount(mount_path='/host/proc', name='proc-fs', read_only=True), client.V1VolumeMount(mount_path='/host/boot', name='boot-fs', read_only=True), client.V1VolumeMount(mount_path='/host/lib/modules', name='lib-modules', read_only=True), client.V1VolumeMount(mount_path='/host/usr', name='usr-fs', read_only=True), client.V1VolumeMount(mount_path='/dev/shm', name='dshm')])], volumes=[client.V1Volume(name='dshm', empty_dir=client.V1EmptyDirVolumeSource(medium='Memory')), client.V1Volume(name='docker-socket', host_path=client.V1HostPathVolumeSource(path='/var/run/docker.sock')), client.V1Volume(name='dev-fs', host_path=client.V1HostPathVolumeSource(path='/dev')), client.V1Volume(name='proc-fs', host_path=client.V1HostPathVolumeSource(path='/proc')), client.V1Volume(name='boot-fs', host_path=client.V1HostPathVolumeSource(path='/boot')), client.V1Volume(name='lib-modules', host_path=client.V1HostPathVolumeSource(path='/lib/modules')), client.V1Volume(name='usr-fs', host_path=client.V1HostPathVolumeSource(path='/usr'))], node_name=node_name, restart_policy='Never')))) |
('flytekit.core.utils.load_proto_from_file')
('flytekit.core.data_persistence.FileAccessProvider.get_data')
('flytekit.core.data_persistence.FileAccessProvider.put_data')
('flytekit.core.utils.write_proto_to_file')
def test_dispatch_execute_ignore(mock_write_to_file, mock_upload_dir, mock_get_data, mock_load_proto):
mock_get_data.return_value = True
mock_upload_dir.return_value = True
ctx = context_manager.FlyteContext.current_context()
with context_manager.FlyteContextManager.with_context(ctx.with_execution_state(ctx.execution_state.with_params(mode=context_manager.ExecutionState.Mode.TASK_EXECUTION))) as ctx:
python_task = mock.MagicMock()
python_task.dispatch_execute.side_effect = IgnoreOutputs()
empty_literal_map = _literal_models.LiteralMap({}).to_flyte_idl()
mock_load_proto.return_value = empty_literal_map
system_entry_point(_dispatch_execute)(ctx, python_task, 'inputs path', 'outputs prefix')
assert (mock_write_to_file.call_count == 0) |
class TelnetOOB():
def __init__(self, protocol):
self.protocol = protocol
self.protocol.protocol_flags['OOB'] = False
self.MSDP = False
self.GMCP = False
self.protocol.negotiationMap[MSDP] = self.decode_msdp
self.protocol.negotiationMap[GMCP] = self.decode_gmcp
self.protocol.will(MSDP).addCallbacks(self.do_msdp, self.no_msdp)
self.protocol.will(GMCP).addCallbacks(self.do_gmcp, self.no_gmcp)
self.oob_reported = {}
def no_msdp(self, option):
self.protocol.handshake_done()
def do_msdp(self, option):
self.MSDP = True
self.protocol.protocol_flags['OOB'] = True
self.protocol.handshake_done()
def no_gmcp(self, option):
self.protocol.handshake_done()
def do_gmcp(self, option):
self.GMCP = True
self.protocol.protocol_flags['OOB'] = True
self.protocol.handshake_done()
def encode_msdp(self, cmdname, *args, **kwargs):
msdp_cmdname = '{msdp_var}{msdp_cmdname}{msdp_val}'.format(msdp_var=MSDP_VAR.decode(), msdp_cmdname=cmdname, msdp_val=MSDP_VAL.decode())
if (not (args or kwargs)):
return msdp_cmdname.encode()
msdp_args = ''
if args:
msdp_args = msdp_cmdname
if (len(args) == 1):
msdp_args += args[0]
else:
msdp_args += '{msdp_array_open}{msdp_args}{msdp_array_close}'.format(msdp_array_open=MSDP_ARRAY_OPEN.decode(), msdp_array_close=MSDP_ARRAY_CLOSE.decode(), msdp_args=''.join((('%s%s' % (MSDP_VAL.decode(), val)) for val in args)))
msdp_kwargs = ''
if kwargs:
msdp_kwargs = msdp_cmdname
msdp_kwargs += '{msdp_table_open}{msdp_kwargs}{msdp_table_close}'.format(msdp_table_open=MSDP_TABLE_OPEN.decode(), msdp_table_close=MSDP_TABLE_CLOSE.decode(), msdp_kwargs=''.join((('%s%s%s%s' % (MSDP_VAR.decode(), key, MSDP_VAL.decode(), val)) for (key, val) in kwargs.items())))
msdp_string = (msdp_args + msdp_kwargs)
return msdp_string.encode()
def encode_gmcp(self, cmdname, *args, **kwargs):
if (cmdname in EVENNIA_TO_GMCP):
gmcp_cmdname = EVENNIA_TO_GMCP[cmdname]
elif ('_' in cmdname):
if cmdname.istitle():
gmcp_cmdname = '.'.join((word for word in cmdname.split('_')))
else:
gmcp_cmdname = '.'.join((word.capitalize() for word in cmdname.split('_')))
else:
gmcp_cmdname = ('Core.%s' % (cmdname if cmdname.istitle() else cmdname.capitalize()))
if (not (args or kwargs)):
gmcp_string = gmcp_cmdname
elif args:
if (len(args) == 1):
args = args[0]
if kwargs:
gmcp_string = ('%s %s' % (gmcp_cmdname, json.dumps([args, kwargs])))
else:
gmcp_string = ('%s %s' % (gmcp_cmdname, json.dumps(args)))
else:
gmcp_string = ('%s %s' % (gmcp_cmdname, json.dumps(kwargs)))
return gmcp_string.encode()
def decode_msdp(self, data):
if isinstance(data, list):
data = b''.join(data)
tables = {}
arrays = {}
variables = {}
for (key, table) in msdp_regex_table.findall(data):
key = key.decode()
tables[key] = ({} if (key not in tables) else tables[key])
for varval in msdp_regex_var.split(table)[1:]:
(var, val) = msdp_regex_val.split(varval, 1)
(var, val) = (var.decode(), val.decode())
if var:
tables[key][var] = val
data_no_tables = msdp_regex_table.sub(b'', data)
for (key, array) in msdp_regex_array.findall(data_no_tables):
key = key.decode()
arrays[key] = ([] if (key not in arrays) else arrays[key])
parts = msdp_regex_val.split(array)
parts = [part.decode() for part in parts]
if (len(parts) == 2):
arrays[key].append(parts[1])
elif (len(parts) > 1):
arrays[key].extend(parts[1:])
data_no_tables_or_arrays = msdp_regex_array.sub(b'', data_no_tables)
for varval in msdp_regex_var.split(data_no_tables_or_arrays):
parts = msdp_regex_val.split(varval)
parts = [part.decode() for part in parts]
if (len(parts) == 2):
variables[parts[0]] = parts[1]
elif (len(parts) > 1):
variables[parts[0]] = parts[1:]
cmds = {}
for (key, table) in tables.items():
(args, kwargs) = ([], table)
if (key in arrays):
args.extend(arrays.pop(key))
if (key in variables):
args.append(variables.pop(key))
cmds[key] = [args, kwargs]
for (key, arr) in arrays.items():
(args, kwargs) = (arr, {})
if (key in variables):
args.append(variables.pop(key))
cmds[key] = [args, kwargs]
for (key, var) in variables.items():
cmds[key] = [[var], {}]
lower_case = {key.lower(): key for key in cmds}
for remap in ('list', 'report', 'reset', 'send', 'unreport'):
if (remap in lower_case):
cmds['msdp_{}'.format(remap)] = cmds.pop(lower_case[remap])
self.protocol.data_in(**cmds)
def decode_gmcp(self, data):
if isinstance(data, list):
data = b''.join(data)
if data:
try:
(cmdname, structure) = data.split(None, 1)
except ValueError:
(cmdname, structure) = (data, b'')
cmdname = cmdname.replace(b'.', b'_')
try:
structure = json.loads(structure)
except ValueError:
pass
(args, kwargs) = ([], {})
if is_iter(structure):
if isinstance(structure, dict):
kwargs = {key: value for (key, value) in structure.items() if key}
else:
args = list(structure)
else:
args = (structure,)
if cmdname.lower().startswith(b'core_'):
cmdname = cmdname[5:]
self.protocol.data_in(**{cmdname.lower().decode(): [args, kwargs]})
def data_out(self, cmdname, *args, **kwargs):
kwargs.pop('options', None)
if self.MSDP:
encoded_oob = self.encode_msdp(cmdname, *args, **kwargs)
self.protocol._write((((((IAC + SB) + MSDP) + encoded_oob) + IAC) + SE))
if self.GMCP:
encoded_oob = self.encode_gmcp(cmdname, *args, **kwargs)
self.protocol._write((((((IAC + SB) + GMCP) + encoded_oob) + IAC) + SE)) |
class AptUtils():
def checkapt():
result = CmdTask('sudo apt update', 100).run()
if (result[0] != 0):
if FileUtils.check_result(result, ['certificate', '']):
PrintUtils.print_warn('{}, /etc/apt/apt.conf.d/99verify-peer.conf'.format(result[2]))
CmdTask('touch /etc/apt/apt.conf.d/99verify-peer.conf').run()
CmdTask('echo "Acquire { false }" > /etc/apt/apt.conf.d/99verify-peer.conf').run()
CmdTask('sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F42ED6FBAB17C654', 10).run()
result = CmdTask('sudo apt update', 100).run()
if (result[0] != 0):
PrintUtils.print_warn('apt,...,{}'.format(result[2]))
return False
return True
def getArch():
result = CmdTask('dpkg --print-architecture', 2).run()
arc = result[1][0].strip('\n')
if (arc == 'armhf'):
arc = 'arm64'
if (result[0] == 0):
return arc
PrintUtils.print_error(':...')
return None
def search_package(name, pattern, replace1='', replace2=''):
result = CmdTask('sudo apt-cache search {} '.format(name), 20).run()
if (result[0] != 0):
PrintUtils.print_error('{}'.format(name))
return None
dic = {}
for line in result[1]:
temp = re.findall(pattern, line)
if (len(temp) > 0):
dic[temp[0].replace(replace1, '').replace(replace2, '')] = temp[0]
if (len(dic) == 0):
return None
return dic
def install_pkg(name, apt_tool='apt', auto_yes=True, os_command=False):
dic = AptUtils().search_package(name, name)
yes = ''
if auto_yes:
yes = '-y'
result = None
for key in dic.keys():
result = CmdTask('sudo {} install {} {}'.format(apt_tool, dic[key], yes), 0, os_command=os_command).run()
if (not result):
PrintUtils.print_warn(':{}'.format(name))
return result
def install_pkg_check_dep(name):
result = AptUtils.install_pkg(name)
if result:
AptUtils.install_pkg('aptitude')
if FileUtils.check_result(result, ['', 'unmet dependencies']):
result = AptUtils.install_pkg(name, apt_tool='aptitude', os_command=False, auto_yes=True)
while FileUtils.check_result(result, ['', 'unmet dependencies']):
PrintUtils.print_warn('')
PrintUtils.print_delay(',,(,),')
input(',')
result = AptUtils.install_pkg(name, apt_tool='aptitude', os_command=True, auto_yes=False)
result = AptUtils.install_pkg(name, apt_tool='aptitude', os_command=False, auto_yes=True) |
def replace_widget_ids(widget: BaseWidgetInfo, generator: WidgetIdGenerator):
widget.id = generator.get_id()
add_graph_id_mapping: Dict[(str, Union[(BaseWidgetInfo, AdditionalGraphInfo, PlotlyGraphInfo)])] = {}
for add_graph in widget.additionalGraphs:
if isinstance(add_graph, BaseWidgetInfo):
add_graph_id_mapping[add_graph.id] = add_graph
replace_widget_ids(add_graph, generator)
elif isinstance(add_graph, (AdditionalGraphInfo, PlotlyGraphInfo)):
add_graph_id_mapping[add_graph.id] = add_graph
add_graph.id = generator.get_id(add_graph.id.replace(' ', '-'))
else:
raise ValueError(f'Unknown add graph type {add_graph.__class__.__name__}')
parts = []
if isinstance(widget.params, dict):
if ('data' in widget.params):
data = widget.params['data']
for item in data:
if (('details' in item) and ('parts' in item['details'])):
parts.extend(item['details']['parts'])
if ('details' in widget.params):
details = widget.params['details']
if ('parts' in details):
parts.extend(details['parts'])
for part in parts:
if ('id' in part):
widget_id = part['id']
if (widget_id in add_graph_id_mapping):
part['id'] = add_graph_id_mapping[widget_id].id
for w in widget.widgets:
replace_widget_ids(w, generator) |
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingVolume) |
class EcsTestClient(object):
def __init__(self, access_key_id=None, secret_access_key=None, region=None, profile=None, deployment_errors=False, client_errors=False, wait=0):
super(EcsTestClient, self).__init__()
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.region = region
self.profile = profile
self.deployment_errors = deployment_errors
self.client_errors = client_errors
self.wait_until = (datetime.now() + timedelta(seconds=wait))
def describe_services(self, cluster_name, service_name):
if ((not self.access_key_id) or (not self.secret_access_key)):
raise NoCredentialsError()
if (cluster_name != u'test-cluster'):
error_response = {u'Error': {u'Code': u'ClusterNotFoundException', u'Message': u'Cluster not found.'}}
raise ClientError(error_response, u'DescribeServices')
if (service_name != u'test-service'):
return {u'services': []}
if self.deployment_errors:
return {u'services': [PAYLOAD_SERVICE_WITH_ERRORS], u'failures': []}
return {u'services': [PAYLOAD_SERVICE], u'failures': []}
def describe_task_definition(self, task_definition_arn):
if ((not self.access_key_id) or (not self.secret_access_key)):
raise EcsConnectionError(u'Unable to locate credentials. Configure credentials by running "aws configure".')
if (task_definition_arn in RESPONSE_TASK_DEFINITIONS):
return deepcopy(RESPONSE_TASK_DEFINITIONS[task_definition_arn])
raise UnknownTaskDefinitionError(('Unknown task definition arn: %s' % task_definition_arn))
def list_tasks(self, cluster_name, service_name):
if (self.wait_until <= datetime.now()):
return deepcopy(RESPONSE_LIST_TASKS_2)
return deepcopy(RESPONSE_LIST_TASKS_0)
def describe_tasks(self, cluster_name, task_arns):
return deepcopy(RESPONSE_DESCRIBE_TASKS)
def register_task_definition(self, family, containers, volumes, role_arn, execution_role_arn, runtime_platform, tags, cpu, memory, additional_properties):
if ((not self.access_key_id) or (not self.secret_access_key)):
raise EcsConnectionError(u'Unable to locate credentials. Configure credentials by running "aws configure".')
return deepcopy(RESPONSE_TASK_DEFINITION_2)
def deregister_task_definition(self, task_definition_arn):
return deepcopy(RESPONSE_TASK_DEFINITION)
def update_service(self, cluster, service, desired_count, task_definition):
if self.client_errors:
error = dict(Error=dict(Code=123, Message='Something went wrong'))
raise ClientError(error, 'fake_error')
if self.deployment_errors:
return deepcopy(RESPONSE_SERVICE_WITH_ERRORS)
return deepcopy(RESPONSE_SERVICE)
def run_task(self, cluster, task_definition, count, started_by, overrides, launchtype='EC2', subnets=(), security_groups=(), public_ip=False, platform_version=None):
if ((not self.access_key_id) or (not self.secret_access_key)):
raise EcsConnectionError(u'Unable to locate credentials. Configure credentials by running "aws configure".')
if (cluster == 'unknown-cluster'):
raise EcsConnectionError(u'An error occurred (ClusterNotFoundException) when calling the RunTask operation: Cluster not found.')
if self.deployment_errors:
error = dict(Error=dict(Code=123, Message='Something went wrong'))
raise ClientError(error, 'fake_error')
return dict(tasks=[dict(taskArn='arn:foo:bar'), dict(taskArn='arn:lorem:ipsum')])
def update_rule(self, cluster, rule, task_definition):
if ((not self.access_key_id) or (not self.secret_access_key)):
raise EcsConnectionError(u'Unable to locate credentials. Configure credentials by running "aws configure".')
if (cluster == 'unknown-cluster'):
raise EcsConnectionError(u'An error occurred (ClusterNotFoundException) when calling the RunTask operation: Cluster not found.') |
class RelationshipCustomerCustomer(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([RelationshipMemberCustomer],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
('cuda.gemm_rcr_fast_gelu.gen_function')
def gen_function(func_attrs, exec_cond_template, dim_info_dict):
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
output_ndims = len(func_attrs['output_accessors'][0].original_shapes)
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
problem_args = PROBLEM_ARGS_TEMPLATE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type)
problem_args_cutlass_3x = PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type)
return common.gen_function(func_attrs=func_attrs, src_template=common_no_bias.SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, support_split_k=True, output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(stride_dim='N', output_accessor=func_attrs['output_accessors'][0]), extra_code=EXTRA_CODE.render()) |
def validate_display_name(display_name, required=False):
if ((display_name is None) and (not required)):
return None
if ((not isinstance(display_name, str)) or (not display_name)):
raise ValueError('Invalid display name: "{0}". Display name must be a non-empty string.'.format(display_name))
return display_name |
def decompress_bytes(data, offset, dictionary, pos=0):
length = int.from_bytes(data[pos:(pos + 2)], 'little')
pos += 2
bitfield_length = data[pos]
bitfield = int.from_bytes(data[(pos + 1):((pos + 1) + bitfield_length)], 'little')
o = []
subdata_end = (pos + length)
pos += (1 + bitfield_length)
while (pos < subdata_end):
if (bitfield % 2):
if (data[pos] < 192):
o.append(dictionary[data[pos]])
pos += 1
elif (data[pos] < 224):
zeros = (data[pos] - 192)
o.append(((b'\x00' * zeros) + data[(pos + 1):((pos + 33) - zeros)]))
pos += (33 - zeros)
else:
zeros = (data[pos] - 224)
o.append((data[(pos + 1):((pos + 33) - zeros)] + (b'\x00' * zeros)))
pos += (33 - zeros)
else:
o.append(data[pos:min((pos + offset), subdata_end)])
pos += offset
offset = 32
bitfield //= 2
front_crop = False
return (b''.join(o), subdata_end) |
_tag('django_social_share/templatetags/send_email.html', takes_context=True)
def send_email(context, subject, text, obj_or_url=None, link_text='', link_class=''):
context = send_email_url(context, subject, text, obj_or_url)
context['link_class'] = link_class
context['link_text'] = (link_text or 'Share via email')
return context |
class SignedHandler(object):
def __init__(self):
self.db_factory = transactional_session_maker()
def __call__(self, message: fedora_messaging.api.Message):
message = message.body
build_nvr = ('%(name)s-%(version)s-%(release)s' % message)
tag = message['tag']
log.info(('%s tagged into %s' % (build_nvr, tag)))
with self.db_factory() as dbsession:
build = Build.get(build_nvr)
if (not build):
log.info('Build was not submitted, skipping')
return
if (not build.release):
log.info('Build is not assigned to release, skipping')
return
if (build.update and build.update.from_tag and (not build.update.release.composed_by_bodhi)):
koji_testing_tag = build.release.get_pending_testing_side_tag(build.update.from_tag)
if (tag != koji_testing_tag):
log.info('Tag is not testing side tag, skipping')
return
elif (build.release.pending_testing_tag != tag):
log.info('Tag is not pending_testing tag, skipping')
return
if build.signed:
log.info('Build was already marked as signed (maybe a duplicate message)')
return
log.info('Build has been signed, marking')
build.signed = True
dbsession.flush()
log.info(('Build %s has been marked as signed' % build_nvr))
if (build.update and build.update.release.composed_by_bodhi and build.update.from_tag and build.update.signed):
log.info(f'Setting request for new side-tag update {build.update.alias}.')
req = UpdateRequest.testing
build.update.set_request(dbsession, req, 'bodhi')
return
if (build.update and (build.update.status != UpdateStatus.obsolete) and (not build.update.release.composed_by_bodhi) and build.update.signed):
log.info('Every build in update is signed, set status to testing')
build.update.status = UpdateStatus.testing
build.update.date_testing = func.current_timestamp()
build.update.request = None
build.update.pushed = True
if config.get('test_gating.required'):
log.debug('Test gating is required, marking the update as waiting on test gating and updating it from Greenwave to get the real status.')
build.update.test_gating_status = TestGatingStatus.waiting
build.update.update_test_gating_status()
log.info(f'Update {build.update.alias} status has been set to testing') |
_settings(DEBUG=True)
class Test(TestCase):
def create_tree(self):
tree = type('Namespace', (), {})()
tree.root = Model.objects.create(name='root')
tree.child1 = Model.objects.create(parent=tree.root, order=0, name='1')
tree.child2 = Model.objects.create(parent=tree.root, order=1, name='2')
tree.child1_1 = Model.objects.create(parent=tree.child1, order=0, name='1-1')
tree.child2_1 = Model.objects.create(parent=tree.child2, order=0, name='2-1')
tree.child2_2 = Model.objects.create(parent=tree.child2, order=1, name='2-2')
return tree
def test_stuff(self):
Model.objects.create()
self.assertEqual(len(Model.objects.with_tree_fields()), 1)
instance = Model.objects.with_tree_fields().get()
self.assertEqual(instance.tree_depth, 0)
self.assertEqual(instance.tree_path, [instance.pk])
def test_no_attributes(self):
tree = self.create_tree()
root = Model.objects.get(pk=tree.root.pk)
self.assertFalse(hasattr(root, 'tree_depth'))
self.assertFalse(hasattr(root, 'tree_ordering'))
self.assertFalse(hasattr(root, 'tree_path'))
def test_attributes(self):
tree = self.create_tree()
child2_2 = Model.objects.with_tree_fields().get(pk=tree.child2_2.pk)
self.assertEqual(child2_2.tree_depth, 2)
self.assertEqual(child2_2.tree_ordering, [0, 1, 1])
self.assertEqual(child2_2.tree_path, [tree.root.pk, tree.child2.pk, tree.child2_2.pk])
def test_ancestors(self):
tree = self.create_tree()
with self.assertNumQueries(2):
self.assertEqual(list(tree.child2_2.ancestors()), [tree.root, tree.child2])
self.assertEqual(list(tree.child2_2.ancestors(include_self=True)), [tree.root, tree.child2, tree.child2_2])
self.assertEqual(list(tree.child2_2.ancestors().reverse()), [tree.child2, tree.root])
self.assertEqual(list(tree.root.ancestors()), [])
self.assertEqual(list(tree.root.ancestors(include_self=True)), [tree.root])
child2_2 = Model.objects.with_tree_fields().get(pk=tree.child2_2.pk)
with self.assertNumQueries(1):
self.assertEqual(list(child2_2.ancestors()), [tree.root, tree.child2])
def test_descendants(self):
tree = self.create_tree()
self.assertEqual(list(tree.child2.descendants()), [tree.child2_1, tree.child2_2])
self.assertEqual(list(tree.child2.descendants(include_self=True)), [tree.child2, tree.child2_1, tree.child2_2])
def test_queryset_or(self):
tree = self.create_tree()
qs = Model.objects.with_tree_fields()
self.assertEqual(list((qs.filter(pk=tree.child1.pk) | qs.filter(pk=tree.child2.pk))), [tree.child1, tree.child2])
def test_twice(self):
self.assertEqual(list(Model.objects.with_tree_fields().with_tree_fields()), [])
def test_boring_coverage(self):
with self.assertRaises(ValueError):
TreeQuery(Model).get_compiler()
def test_count(self):
tree = self.create_tree()
self.assertEqual(Model.objects.count(), 6)
self.assertEqual(Model.objects.with_tree_fields().count(), 6)
self.assertEqual(Model.objects.with_tree_fields().distinct().count(), 6)
self.assertEqual(list(Model.objects.descendants(tree.child1)), [tree.child1_1])
self.assertEqual(Model.objects.descendants(tree.child1).count(), 1)
self.assertEqual(Model.objects.descendants(tree.child1).distinct().count(), 1)
qs = list(Model.objects.with_tree_fields().distinct())
self.assertEqual(qs[0].tree_depth, 0)
self.assertEqual(qs[5].tree_depth, 2)
def test_annotate(self):
tree = self.create_tree()
self.assertEqual([(node, node.children__count, node.tree_depth) for node in Model.objects.with_tree_fields().annotate(Count('children'))], [(tree.root, 2, 0), (tree.child1, 1, 1), (tree.child1_1, 0, 2), (tree.child2, 2, 1), (tree.child2_1, 0, 2), (tree.child2_2, 0, 2)])
def test_update_aggregate(self):
self.create_tree()
Model.objects.with_tree_fields().update(order=3)
self.assertEqual(Model.objects.with_tree_fields().aggregate(Sum('order')), {'order__sum': 18})
def test_values(self):
tree = self.create_tree()
self.assertEqual(list(Model.objects.with_tree_fields().values('name')), [{'name': 'root'}, {'name': '1'}, {'name': '1-1'}, {'name': '2'}, {'name': '2-1'}, {'name': '2-2'}])
def test_values_ancestors(self):
tree = self.create_tree()
self.assertEqual(list(Model.objects.ancestors(tree.child2_1).values()), [{'custom_id': tree.root.pk, 'name': 'root', 'order': 0, 'parent_id': None}, {'custom_id': tree.child2.pk, 'name': '2', 'order': 1, 'parent_id': tree.root.pk}])
def test_values_list(self):
tree = self.create_tree()
self.assertEqual(list(Model.objects.with_tree_fields().values_list('name', flat=True)), ['root', '1', '1-1', '2', '2-1', '2-2'])
def test_values_list_ancestors(self):
tree = self.create_tree()
self.assertEqual(list(Model.objects.ancestors(tree.child2_1).values_list('parent', flat=True)), [tree.root.parent_id, tree.child2.parent_id])
def test_loops(self):
tree = self.create_tree()
tree.root.parent_id = tree.child1.pk
with self.assertRaises(ValidationError) as cm:
tree.root.full_clean()
self.assertEqual(cm.exception.messages, ['A node cannot be made a descendant of itself.'])
tree.child1.full_clean()
def test_unordered(self):
self.assertEqual(list(UnorderedModel.objects.all()), [])
def test_revert(self):
tree = self.create_tree()
obj = Model.objects.with_tree_fields().without_tree_fields().get(pk=tree.root.pk)
self.assertFalse(hasattr(obj, 'tree_depth'))
def test_form_field(self):
tree = self.create_tree()
class Form(forms.ModelForm):
class Meta():
model = Model
fields = ['parent']
html = f'{Form().as_table()}'
self.assertIn(f'<option value="{tree.child2_1.pk}">--- --- 2-1</option>', html)
self.assertIn('root', html)
class OtherForm(forms.Form):
node = Model._meta.get_field('parent').formfield(label_from_instance=(lambda obj: '{}{}'.format(''.join(([('*** ' if (obj == tree.child2_1) else '--- ')] * obj.tree_depth)), obj)), queryset=tree.child2.descendants())
html = f'{OtherForm().as_table()}'
self.assertIn(f'<option value="{tree.child2_1.pk}">*** *** 2-1</option>', html)
self.assertNotIn('root', html)
def test_string_ordering(self):
tree = type('Namespace', (), {})()
tree.americas = StringOrderedModel.objects.create(name='Americas')
tree.europe = StringOrderedModel.objects.create(name='Europe')
tree.france = StringOrderedModel.objects.create(name='France', parent=tree.europe)
tree.south_america = StringOrderedModel.objects.create(name='South America', parent=tree.americas)
tree.ecuador = StringOrderedModel.objects.create(name='Ecuador', parent=tree.south_america)
tree.colombia = StringOrderedModel.objects.create(name='Colombia', parent=tree.south_america)
tree.peru = StringOrderedModel.objects.create(name='Peru', parent=tree.south_america)
tree.north_america = StringOrderedModel.objects.create(name='North America', parent=tree.americas)
self.assertEqual(list(StringOrderedModel.objects.with_tree_fields()), [tree.americas, tree.north_america, tree.south_america, tree.colombia, tree.ecuador, tree.peru, tree.europe, tree.france])
self.assertEqual(list(tree.peru.ancestors(include_self=True)), [tree.americas, tree.south_america, tree.peru])
self.assertEqual(list(StringOrderedModel.objects.descendants(tree.americas, include_self=True)), [tree.americas, tree.north_america, tree.south_america, tree.colombia, tree.ecuador, tree.peru])
def test_many_ordering(self):
root = Model.objects.create(order=1, name='root')
for i in range(20, 0, (- 1)):
Model.objects.create(parent=root, name=f'Node {i}', order=(i * 10))
positions = [m.order for m in Model.objects.with_tree_fields()]
self.assertEqual(positions, sorted(positions))
def test_bfs_ordering(self):
tree = self.create_tree()
nodes = Model.objects.with_tree_fields().extra(order_by=['__tree.tree_depth', '__tree.tree_ordering'])
self.assertEqual(list(nodes), [tree.root, tree.child1, tree.child2, tree.child1_1, tree.child2_1, tree.child2_2])
def test_always_tree_query(self):
AlwaysTreeQueryModel.objects.create(name='Nothing')
obj = AlwaysTreeQueryModel.objects.get()
self.assertTrue(hasattr(obj, 'tree_depth'))
self.assertTrue(hasattr(obj, 'tree_ordering'))
self.assertTrue(hasattr(obj, 'tree_path'))
self.assertEqual(obj.tree_depth, 0)
AlwaysTreeQueryModel.objects.update(name='Something')
obj.refresh_from_db()
self.assertEqual(obj.name, 'Something')
AlwaysTreeQueryModel.objects.all().delete()
def test_always_tree_query_relations(self):
c = AlwaysTreeQueryModelCategory.objects.create()
m1 = AlwaysTreeQueryModel.objects.create(name='Nothing', category=c)
m2 = AlwaysTreeQueryModel.objects.create(name='Something')
m1.related.add(m2)
m3 = m2.related.get()
self.assertEqual(m1, m3)
self.assertEqual(m3.tree_depth, 0)
m4 = c.instances.get()
self.assertEqual(m1, m4)
self.assertEqual(m4.tree_depth, 0)
def test_reference(self):
tree = self.create_tree()
references = type('Namespace', (), {})()
references.none = ReferenceModel.objects.create(position=0)
references.root = ReferenceModel.objects.create(position=1, tree_field=tree.root)
references.child1 = ReferenceModel.objects.create(position=2, tree_field=tree.child1)
references.child2 = ReferenceModel.objects.create(position=3, tree_field=tree.child2)
references.child1_1 = ReferenceModel.objects.create(position=4, tree_field=tree.child1_1)
references.child2_1 = ReferenceModel.objects.create(position=5, tree_field=tree.child2_1)
references.child2_2 = ReferenceModel.objects.create(position=6, tree_field=tree.child2_2)
self.assertEqual(list(ReferenceModel.objects.filter(tree_field__in=tree.child2.descendants(include_self=True))), [references.child2, references.child2_1, references.child2_2])
self.assertEqual(list(ReferenceModel.objects.filter((Q(tree_field__in=tree.child2.ancestors(include_self=True)) | Q(tree_field__in=tree.child2.descendants(include_self=True))))), [references.root, references.child2, references.child2_1, references.child2_2])
self.assertEqual(list(ReferenceModel.objects.filter(((Q(tree_field__in=tree.child2_2.descendants(include_self=True)) | Q(tree_field__in=tree.child1.descendants())) | Q(tree_field__in=tree.child1.ancestors())))), [references.root, references.child1_1, references.child2_2])
self.assertEqual(list(ReferenceModel.objects.exclude(((Q(tree_field__in=tree.child2.ancestors(include_self=True)) | Q(tree_field__in=tree.child2.descendants(include_self=True))) | Q(tree_field__isnull=True)))), [references.child1, references.child1_1])
self.assertEqual(list(ReferenceModel.objects.exclude((((Q(tree_field__in=tree.child2.descendants()) | Q(tree_field__in=tree.child2.ancestors())) | Q(tree_field__in=tree.child1.descendants(include_self=True))) | Q(tree_field__in=tree.child1.ancestors())))), [references.none, references.child2])
self.assertEqual(list(ReferenceModel.objects.filter(((Q((Q(tree_field__in=tree.child2.descendants()) & (~ Q(id=references.child2_2.id)))) | Q(tree_field__isnull=True)) | Q(tree_field__in=tree.child1.ancestors())))), [references.none, references.root, references.child2_1])
self.assertEqual(list(ReferenceModel.objects.filter(tree_field__in=tree.child2.descendants(include_self=True).filter(parent__in=tree.child2.descendants(include_self=True)))), [references.child2_1, references.child2_2])
def test_annotate_tree(self):
tree = self.create_tree()
qs = Model.objects.with_tree_fields().filter((Q(pk__in=tree.child2.ancestors(include_self=True)) | Q(pk__in=tree.child2.descendants(include_self=True))))
if (connections[Model.objects.db].vendor == 'postgresql'):
qs = qs.annotate(is_my_field=RawSQL('%s = ANY(__tree.tree_path)', [pk(tree.child2_1)], output_field=models.BooleanField()))
else:
qs = qs.annotate(is_my_field=RawSQL('instr(__tree.tree_path, "{sep}{pk}{sep}") <> 0'.format(pk=pk(tree.child2_1), sep=SEPARATOR), [], output_field=models.BooleanField()))
self.assertEqual([(node, node.is_my_field) for node in qs], [(tree.root, False), (tree.child2, False), (tree.child2_1, True), (tree.child2_2, False)])
def test_uuid_queries(self):
root = UUIDModel.objects.create(name='root')
child1 = UUIDModel.objects.create(parent=root, name='child1')
child2 = UUIDModel.objects.create(parent=root, name='child2')
self.assertCountEqual(root.descendants(), {child1, child2})
self.assertEqual(list(child1.ancestors(include_self=True)), [root, child1])
def test_sibling_ordering(self):
tree = type('Namespace', (), {})()
tree.root = MultiOrderedModel.objects.create(name='root')
tree.child1 = MultiOrderedModel.objects.create(parent=tree.root, first_position=0, second_position=1, name='1')
tree.child2 = MultiOrderedModel.objects.create(parent=tree.root, first_position=1, second_position=0, name='2')
tree.child1_1 = MultiOrderedModel.objects.create(parent=tree.child1, first_position=0, second_position=1, name='1-1')
tree.child2_1 = MultiOrderedModel.objects.create(parent=tree.child2, first_position=0, second_position=1, name='2-1')
tree.child2_2 = MultiOrderedModel.objects.create(parent=tree.child2, first_position=1, second_position=0, name='2-2')
first_order = [tree.root, tree.child1, tree.child1_1, tree.child2, tree.child2_1, tree.child2_2]
second_order = [tree.root, tree.child2, tree.child2_2, tree.child2_1, tree.child1, tree.child1_1]
nodes = MultiOrderedModel.objects.order_siblings_by('second_position')
self.assertEqual(list(nodes), second_order)
nodes = MultiOrderedModel.objects.with_tree_fields()
self.assertEqual(list(nodes), first_order)
nodes = MultiOrderedModel.objects.order_siblings_by('second_position').all()
self.assertEqual(list(nodes), second_order)
def test_depth_filter(self):
tree = self.create_tree()
nodes = Model.objects.with_tree_fields().extra(where=['__tree.tree_depth between %s and %s'], params=[0, 1])
self.assertEqual(list(nodes), [tree.root, tree.child1, tree.child2])
def test_explain(self):
if (connections[Model.objects.db].vendor == 'postgresql'):
explanation = Model.objects.with_tree_fields().explain()
self.assertIn('CTE', explanation)
def test_tree_queries_without_tree_node(self):
TreeNodeIsOptional.objects.create(parent=TreeNodeIsOptional.objects.create())
nodes = list(TreeNodeIsOptional.objects.with_tree_fields())
self.assertEqual(nodes[0].tree_depth, 0)
self.assertEqual(nodes[1].tree_depth, 1)
def test_polymorphic_queries(self):
root = InheritChildModel.objects.create(name='root')
child1 = InheritGrandChildModel.objects.create(parent=root, name='child1')
child2 = InheritParentModel.objects.create(parent=root, name='child2')
InheritParentModel.objects.create(parent=child1, name='child1_1')
InheritChildModel.objects.create(parent=child2, name='child2_1')
InheritConcreteGrandChildModel.objects.create(parent=child2, name='child2_2')
objs = InheritParentModel.objects.with_tree_fields()
self.assertCountEqual([(p.name, p.tree_path) for p in objs], [('root', [1]), ('child1', [1, 2]), ('child1_1', [1, 2, 4]), ('child2', [1, 3]), ('child2_1', [1, 3, 5]), ('child2_2', [1, 3, 6])])
objs = InheritChildModel.objects.with_tree_fields()
self.assertCountEqual([(p.name, p.tree_path) for p in objs], [('root', [1]), ('child1', [1, 2]), ('child2_1', [1, 3, 5])])
objs = InheritGrandChildModel.objects.with_tree_fields()
self.assertCountEqual([(p.name, p.tree_path) for p in objs], [('child1', [1, 2])])
objs = InheritConcreteGrandChildModel.objects.with_tree_fields()
self.assertCountEqual([(p.name, p.tree_path) for p in objs], [('child2_2', [1, 3, 6])]) |
class OptionPlotoptionsHeatmapSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def api_request(url, method, headers, body=None):
try:
headers = update_header_w_auth(headers)
except:
pass
try:
if (method.upper() == 'GET'):
auth_request = requests.get(url, headers=headers, allow_redirects=False, verify=False, timeout=10)
elif (method.upper() == 'POST'):
auth_request = requests.post(url, headers=headers, data=body, allow_redirects=False, verify=False, timeout=10)
elif (method.upper() == 'PUT'):
auth_request = requests.put(url, headers=headers, data=body, allow_redirects=False, verify=False, timeout=10)
elif (method.upper() == 'OPTIONS'):
auth_request = requests.options(url, headers=headers, verify=False, timeout=10)
return auth_request
except Exception as e:
logs.logging.error('Exception from sendrequest %s', e) |
def assign_scaffold_names(scaffolds, perm_container, ref_genome):
MIN_RATE = 0.1
PREFIX = 'chr'
chr_index = {}
for perm in perm_container.ref_perms:
if (perm.genome_name == ref_genome):
for block in perm.blocks:
chr_index[block.block_id] = (perm.chr_name, block.sign)
assigned_names = {}
need_rev_compl = {}
for scf in scaffolds:
scf_index = defaultdict(int)
sign_agreement = 0
total = 0
for contig in scf.contigs:
for block in contig.perm.blocks:
if (block.block_id in chr_index):
(chrom, sign) = chr_index[block.block_id]
scf_index[chrom] += 1
total += 1
sign_agreement += int((sign == (block.sign * contig.sign)))
name_str = PREFIX
for chrom in sorted(scf_index, key=scf_index.get, reverse=True):
if (scf_index[chrom] > (MIN_RATE * total)):
name_str += ('_' + chrom)
else:
break
assigned_names[scf] = name_str
need_rev_compl[scf] = (sign_agreement < (total // 2))
same_names = defaultdict(list)
for (scf, name) in assigned_names.items():
same_names[name].append(scf)
for (name, scf_list) in same_names.items():
scf_list.sort(key=(lambda s: len(s.contigs)), reverse=True)
unlocalized = scf_list[1:]
for scf in unlocalized:
assigned_names[scf] += '_unlocalized'
if (len(unlocalized) > 1):
for (num, scf) in enumerate(unlocalized):
assigned_names[scf] += ('.' + str((num + 1)))
for scf in scaffolds:
scf.name = assigned_names[scf]
if need_rev_compl[scf]:
new_contigs = [c.reverse_copy() for c in scf.contigs][::(- 1)]
for i in range((len(new_contigs) - 1)):
new_contigs[i].link = new_contigs[(i + 1)].link
new_contigs[(- 1)].link = Link(0, [])
scf.contigs = new_contigs |
class TestSuperFencesBad(util.MdCase):
extension = ['pymdownx.superfences']
extension_configs = {}
def test_bad_options(self):
self.check_markdown('\n ```python option="bad"\n import test\n ```\n ', '\n <p><code>python option="bad"\n import test</code></p>\n ', True)
def test_bad_option_value(self):
self.check_markdown('\n ```python hl_lines="unexpected 3" linenums="1"\n """Some file."""\n import foo.bar\n import boo.baz\n import foo.bar.baz\n ```\n ', '\n <p><code>python hl_lines="unexpected 3" linenums="1"\n """Some file."""\n import foo.bar\n import boo.baz\n import foo.bar.baz</code></p>\n ', True) |
class MonitorWrapper(object):
def __init__(self, copr, monitor_data):
self.copr = copr
self.monitor_data = monitor_data
def render_packages(self):
packages = []
results = {}
current_package_id = None
for row in self.monitor_data:
if (row['package_id'] != current_package_id):
if current_package_id:
packages.append({'pkg_name': row['package_name'], 'pkg_version': None, 'results': results})
current_package_id = row['package_id']
results = {}
build_chroot_name = '{}-{}-{}'.format(row['mock_chroot_os_release'], row['mock_chroot_os_version'], row['mock_chroot_arch'])
if (build_chroot_name in [chroot.name for chroot in self.copr.active_chroots]):
results[build_chroot_name] = {'build_id': row['build_id'], 'status': StatusEnum(row['build_chroot_status']), 'pkg_version': row['build_pkg_version']}
packages.append({'pkg_name': row['package_name'], 'pkg_version': None, 'results': results})
return packages
def to_dict(self):
return {'chroots': list(map((lambda x: x.name), self.copr.active_chroots_sorted)), 'builds': [BuildWrapper(build).to_dict() for build in self.copr.builds], 'packages': self.render_packages()} |
def quadrupole3d_22(ax, da, A, bx, db, B, R):
result = numpy.zeros((6, 6, 6), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (3.0 * x0)
x2 = (x0 * ((ax * A[0]) + (bx * B[0])))
x3 = (- x2)
x4 = (x3 + A[0])
x5 = (x3 + B[0])
x6 = (x4 * x5)
x7 = (2.0 * x6)
x8 = (x3 + R[0])
x9 = (x4 * x8)
x10 = (2.0 * x9)
x11 = (x5 * x8)
x12 = (2.0 * x11)
x13 = (x0 * (((x1 + x10) + x12) + x7))
x14 = ((- 2.0) * x2)
x15 = (x14 + R[0])
x16 = (x15 + B[0])
x17 = (x0 * x16)
x18 = (x0 + x12)
x19 = (x18 * x4)
x20 = (x17 + x19)
x21 = (4.0 * x20)
x22 = (x8 ** 2)
x23 = (x1 + (2.0 * x22))
x24 = (x18 * x8)
x25 = (x17 + x24)
x26 = (2.0 * x5)
x27 = ((x0 * ((4.0 * x11) + x23)) + (x25 * x26))
x28 = (x0 * (x15 + A[0]))
x29 = (x0 + x10)
x30 = (x28 + (x29 * x8))
x31 = ((x1 * x16) + (2.0 * x19))
x32 = (x0 * ((x24 + x30) + x31))
x33 = (2.0 * x20)
x34 = (x13 + (x33 * x8))
x35 = (x32 + (x34 * x5))
x36 = (2.0 * x4)
x37 = ((ax * bx) * x0)
x38 = (((5. * da) * db) * numpy.exp(((- x37) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x39 = ((x0 ** 1.5) * x38)
x40 = (0. * x39)
x41 = (x0 * ((ax * A[1]) + (bx * B[1])))
x42 = (- x41)
x43 = (x42 + B[1])
x44 = 1.
x45 = (0. * x39)
x46 = (x44 * x45)
x47 = (x46 * (x32 + (x34 * x4)))
x48 = (x0 * ((ax * A[2]) + (bx * B[2])))
x49 = (- x48)
x50 = (x49 + B[2])
x51 = (((0. * x0) * (x23 + (4.0 * x9))) + ((0. * x30) * x36))
x52 = (x43 ** 2)
x53 = (0.5 * x0)
x54 = ((x0 ** 1.5) * x38)
x55 = (x54 * (x52 + x53))
x56 = (x44 * x50)
x57 = (x39 * x56)
x58 = (x50 ** 2)
x59 = (x54 * (x53 + x58))
x60 = (x42 + A[1])
x61 = (x35 * x46)
x62 = (x43 * x60)
x63 = (x54 * (x53 + x62))
x64 = (0.25 * x34)
x65 = (x39 * x64)
x66 = ((- 2.0) * x41)
x67 = (x66 + B[1])
x68 = (x0 * (x67 + A[1]))
x69 = (2.0 * x62)
x70 = (x0 + x69)
x71 = ((x43 * x70) + x68)
x72 = (0. * x54)
x73 = (x44 * x72)
x74 = (x30 * x73)
x75 = (0.5 * x30)
x76 = (0. * x59)
x77 = (x30 * x44)
x78 = (x49 + A[2])
x79 = (x50 * x78)
x80 = (x53 + x79)
x81 = (x54 * x80)
x82 = (0. * x55)
x83 = ((- 2.0) * x48)
x84 = (x83 + B[2])
x85 = (x0 * (x84 + A[2]))
x86 = (2.0 * x79)
x87 = (x0 + x86)
x88 = ((x50 * x87) + x85)
x89 = (x53 + (x60 ** 2))
x90 = (x27 * x72)
x91 = ((x60 * x70) + x68)
x92 = (x25 * x73)
x93 = (x50 * x54)
x94 = (0. * x44)
x95 = (x93 * x94)
x96 = (2.0 * x60)
x97 = ((x0 * ((x1 + (2.0 * x52)) + (4.0 * x62))) + (x71 * x96))
x98 = (x22 + x53)
x99 = (x72 * x98)
x100 = (0. * x98)
x101 = (x46 * x78)
x102 = (0.5 * x25)
x103 = (x54 * x78)
x104 = (x103 * x94)
x105 = (0. * x54)
x106 = (x105 * x44)
x107 = (x106 * x98)
x108 = (x53 + (x78 ** 2))
x109 = (x105 * x108)
x110 = (x109 * x44)
x111 = ((x78 * x87) + x85)
x112 = (2.0 * x78)
x113 = ((x0 * ((x1 + (2.0 * x58)) + (4.0 * x79))) + (x112 * x88))
x114 = (x42 + R[1])
x115 = (x18 * x5)
x116 = (x0 * ((x14 + A[0]) + B[0]))
x117 = (x0 + x7)
x118 = (x116 + (x117 * x5))
x119 = (x13 + (x20 * x26))
x120 = (x45 * ((x0 * ((x115 + x118) + x31)) + (x119 * x4)))
x121 = (x114 * x43)
x122 = (x121 + x53)
x123 = (x13 + (x33 * x4))
x124 = (x123 * x73)
x125 = (x114 * x45)
x126 = (x67 + R[1])
x127 = (x0 * x126)
x128 = (2.0 * x121)
x129 = (x0 + x128)
x130 = (x129 * x43)
x131 = (x127 + x130)
x132 = (x28 + (x29 * x4))
x133 = (x132 * x72)
x134 = (x114 * x60)
x135 = (x134 + x53)
x136 = (x119 * x73)
x137 = (x129 * x60)
x138 = (x127 + x137)
x139 = (0.25 * x54)
x140 = (x139 * x20)
x141 = (0.5 * x20)
x142 = (2.0 * x134)
x143 = (x0 * (((x1 + x128) + x142) + x69))
x144 = (2.0 * x43)
x145 = ((x138 * x144) + x143)
x146 = (x53 + x9)
x147 = (x146 * x73)
x148 = (0.5 * x138)
x149 = (0. * x44)
x150 = (x146 * x149)
x151 = (x44 * x78)
x152 = (x106 * x88)
x153 = (x0 * ((x66 + A[1]) + R[1]))
x154 = (x0 + x142)
x155 = (x153 + (x154 * x60))
x156 = (x115 + x17)
x157 = (x156 * x72)
x158 = ((x138 * x96) + x143)
x159 = (x11 + x53)
x160 = (x159 * x73)
x161 = ((x1 * x126) + (2.0 * x137))
x162 = ((x0 * ((x130 + x161) + x71)) + (x145 * x60))
x163 = (x45 * x8)
x164 = ((x149 * x159) * x54)
x165 = (x106 * x111)
x166 = (x49 + R[2])
x167 = (x166 * x45)
x168 = (x43 * x44)
x169 = (x166 * x50)
x170 = (x169 + x53)
x171 = (x106 * x170)
x172 = (x84 + R[2])
x173 = (x0 * x172)
x174 = (2.0 * x169)
x175 = (x0 + x174)
x176 = (x175 * x50)
x177 = (x173 + x176)
x178 = (x44 * x60)
x179 = (x141 * x54)
x180 = (x106 * x146)
x181 = (x166 * x78)
x182 = (x181 + x53)
x183 = (x175 * x78)
x184 = (x173 + x183)
x185 = (0.5 * x184)
x186 = (x185 * x54)
x187 = (2.0 * x181)
x188 = (x0 * (((x1 + x174) + x187) + x86))
x189 = (2.0 * x50)
x190 = ((x184 * x189) + x188)
x191 = (x105 * x89)
x192 = (x106 * x159)
x193 = (x106 * x182)
x194 = (x0 * ((x83 + A[2]) + R[2]))
x195 = (x0 + x187)
x196 = (x194 + (x195 * x78))
x197 = ((x112 * x184) + x188)
x198 = ((x1 * x172) + (2.0 * x183))
x199 = ((x0 * ((x176 + x198) + x88)) + (x190 * x78))
x200 = (x5 ** 2)
x201 = ((x0 * ((x1 + (2.0 * x200)) + (4.0 * x6))) + (x118 * x36))
x202 = (x114 ** 2)
x203 = (x202 + x53)
x204 = (x203 * x72)
x205 = (x116 + (x117 * x4))
x206 = (x114 * x129)
x207 = (x127 + x206)
x208 = (x207 * x73)
x209 = ((x4 ** 2) + x53)
x210 = (x1 + (2.0 * x202))
x211 = ((x0 * ((4.0 * x121) + x210)) + (x144 * x207))
x212 = (x211 * x72)
x213 = (0. * x203)
x214 = ((x114 * x154) + x153)
x215 = (x214 * x73)
x216 = (x114 * x138)
x217 = (x143 + (2.0 * x216))
x218 = (x53 + x6)
x219 = (x139 * x218)
x220 = (x218 * x54)
x221 = (0.5 * x220)
x222 = (x0 * ((x161 + x206) + x214))
x223 = ((x217 * x43) + x222)
x224 = (x223 * x46)
x225 = (0.25 * x217)
x226 = (x225 * x39)
x227 = (x4 * x44)
x228 = (0.5 * x81)
x229 = ((x0 * ((4.0 * x134) + x210)) + (x214 * x96))
x230 = (x200 + x53)
x231 = (x230 * x72)
x232 = (x46 * ((x217 * x60) + x222))
x233 = (0. * x229)
x234 = (x230 * x54)
x235 = (x106 * x166)
x236 = (x105 * x209)
x237 = (x148 * x54)
x238 = (x106 * x4)
x239 = (x105 * x230)
x240 = (x44 * x5)
x241 = (x106 * x5)
x242 = (x166 ** 2)
x243 = (x242 + x53)
x244 = (x243 * x72)
x245 = (x106 * x243)
x246 = (x166 * x175)
x247 = (x173 + x246)
x248 = (x247 * x73)
x249 = (0. * x243)
x250 = (x247 * x44)
x251 = (x1 + (2.0 * x242))
x252 = ((x0 * ((4.0 * x169) + x251)) + (x189 * x247))
x253 = (x252 * x72)
x254 = (0.5 * x63)
x255 = ((x166 * x195) + x194)
x256 = (x255 * x73)
x257 = (x166 * x184)
x258 = (x188 + (2.0 * x257))
x259 = (0.25 * x258)
x260 = (x259 * x39)
x261 = (x0 * ((x198 + x246) + x255))
x262 = ((x258 * x50) + x261)
x263 = (x262 * x46)
x264 = ((x0 * ((4.0 * x181) + x251)) + (x112 * x255))
x265 = (0. * x264)
x266 = (x46 * ((x258 * x78) + x261))
result[(0, 0, 0)] = numpy.sum((x40 * ((x0 * ((((4.0 * x13) + (x21 * x5)) + (x21 * x8)) + x27)) + (x35 * x36))))
result[(0, 0, 1)] = numpy.sum((x43 * x47))
result[(0, 0, 2)] = numpy.sum((x47 * x50))
result[(0, 0, 3)] = numpy.sum((x51 * x55))
result[(0, 0, 4)] = numpy.sum(((x43 * x51) * x57))
result[(0, 0, 5)] = numpy.sum((x51 * x59))
result[(0, 1, 0)] = numpy.sum((x60 * x61))
result[(0, 1, 1)] = numpy.sum((x63 * x64))
result[(0, 1, 2)] = numpy.sum(((x50 * x60) * x65))
result[(0, 1, 3)] = numpy.sum((x71 * x74))
result[(0, 1, 4)] = numpy.sum(((x50 * x63) * x75))
result[(0, 1, 5)] = numpy.sum(((x60 * x76) * x77))
result[(0, 2, 0)] = numpy.sum((x61 * x78))
result[(0, 2, 1)] = numpy.sum(((x43 * x65) * x78))
result[(0, 2, 2)] = numpy.sum((x64 * x81))
result[(0, 2, 3)] = numpy.sum(((x77 * x78) * x82))
result[(0, 2, 4)] = numpy.sum(((x43 * x75) * x81))
result[(0, 2, 5)] = numpy.sum((x74 * x88))
result[(0, 3, 0)] = numpy.sum((x89 * x90))
result[(0, 3, 1)] = numpy.sum((x91 * x92))
result[(0, 3, 2)] = numpy.sum(((x25 * x89) * x95))
result[(0, 3, 3)] = numpy.sum((x97 * x99))
result[(0, 3, 4)] = numpy.sum(((x91 * x95) * x98))
result[(0, 3, 5)] = numpy.sum(((x100 * x59) * x89))
result[(0, 4, 0)] = numpy.sum(((x101 * x27) * x60))
result[(0, 4, 1)] = numpy.sum(((x102 * x63) * x78))
result[(0, 4, 2)] = numpy.sum(((x102 * x60) * x81))
result[(0, 4, 3)] = numpy.sum(((x104 * x71) * x98))
result[(0, 4, 4)] = numpy.sum(((x63 * x80) * x98))
result[(0, 4, 5)] = numpy.sum(((x107 * x60) * x88))
result[(0, 5, 0)] = numpy.sum((x108 * x90))
result[(0, 5, 1)] = numpy.sum(((x110 * x25) * x43))
result[(0, 5, 2)] = numpy.sum((x111 * x92))
result[(0, 5, 3)] = numpy.sum(((x100 * x108) * x55))
result[(0, 5, 4)] = numpy.sum(((x107 * x111) * x43))
result[(0, 5, 5)] = numpy.sum((x113 * x99))
result[(1, 0, 0)] = numpy.sum((x114 * x120))
result[(1, 0, 1)] = numpy.sum((x122 * x124))
result[(1, 0, 2)] = numpy.sum(((x123 * x125) * x56))
result[(1, 0, 3)] = numpy.sum((x131 * x133))
result[(1, 0, 4)] = numpy.sum(((x122 * x132) * x95))
result[(1, 0, 5)] = numpy.sum(((x114 * x132) * x76))
result[(1, 1, 0)] = numpy.sum((x135 * x136))
result[(1, 1, 1)] = numpy.sum((x138 * x140))
result[(1, 1, 2)] = numpy.sum(((x135 * x141) * x93))
result[(1, 1, 3)] = numpy.sum((x145 * x147))
result[(1, 1, 4)] = numpy.sum(((x146 * x148) * x93))
result[(1, 1, 5)] = numpy.sum(((x135 * x150) * x59))
result[(1, 2, 0)] = numpy.sum(((x119 * x125) * x151))
result[(1, 2, 1)] = numpy.sum(((x103 * x122) * x141))
result[(1, 2, 2)] = numpy.sum(((x114 * x141) * x81))
result[(1, 2, 3)] = numpy.sum(((x104 * x131) * x146))
result[(1, 2, 4)] = numpy.sum(((x122 * x146) * x81))
result[(1, 2, 5)] = numpy.sum(((x114 * x146) * x152))
result[(1, 3, 0)] = numpy.sum((x155 * x157))
result[(1, 3, 1)] = numpy.sum((x158 * x160))
result[(1, 3, 2)] = numpy.sum(((x155 * x159) * x95))
result[(1, 3, 3)] = numpy.sum((x162 * x163))
result[(1, 3, 4)] = numpy.sum(((x158 * x163) * x56))
result[(1, 3, 5)] = numpy.sum(((x155 * x76) * x8))
result[(1, 4, 0)] = numpy.sum(((x104 * x135) * x156))
result[(1, 4, 1)] = numpy.sum(((x103 * x148) * x159))
result[(1, 4, 2)] = numpy.sum(((x135 * x159) * x81))
result[(1, 4, 3)] = numpy.sum(((x145 * x151) * x163))
result[(1, 4, 4)] = numpy.sum(((x148 * x8) * x81))
result[(1, 4, 5)] = numpy.sum(((x135 * x152) * x8))
result[(1, 5, 0)] = numpy.sum(((x109 * x114) * x156))
result[(1, 5, 1)] = numpy.sum(((x108 * x122) * x164))
result[(1, 5, 2)] = numpy.sum(((x114 * x159) * x165))
result[(1, 5, 3)] = numpy.sum(((x109 * x131) * x8))
result[(1, 5, 4)] = numpy.sum(((x122 * x165) * x8))
result[(1, 5, 5)] = numpy.sum(((x113 * x114) * x163))
result[(2, 0, 0)] = numpy.sum((x120 * x166))
result[(2, 0, 1)] = numpy.sum(((x123 * x167) * x168))
result[(2, 0, 2)] = numpy.sum((x124 * x170))
result[(2, 0, 3)] = numpy.sum(((x132 * x166) * x82))
result[(2, 0, 4)] = numpy.sum(((x132 * x171) * x43))
result[(2, 0, 5)] = numpy.sum((x133 * x177))
result[(2, 1, 0)] = numpy.sum(((x119 * x167) * x178))
result[(2, 1, 1)] = numpy.sum(((x141 * x166) * x63))
result[(2, 1, 2)] = numpy.sum(((x170 * x179) * x60))
result[(2, 1, 3)] = numpy.sum(((x166 * x180) * x71))
result[(2, 1, 4)] = numpy.sum(((x146 * x170) * x63))
result[(2, 1, 5)] = numpy.sum(((x177 * x180) * x60))
result[(2, 2, 0)] = numpy.sum((x136 * x182))
result[(2, 2, 1)] = numpy.sum(((x179 * x182) * x43))
result[(2, 2, 2)] = numpy.sum((x140 * x184))
result[(2, 2, 3)] = numpy.sum(((x150 * x182) * x55))
result[(2, 2, 4)] = numpy.sum(((x146 * x186) * x43))
result[(2, 2, 5)] = numpy.sum((x147 * x190))
result[(2, 3, 0)] = numpy.sum(((x156 * x166) * x191))
result[(2, 3, 1)] = numpy.sum(((x166 * x192) * x91))
result[(2, 3, 2)] = numpy.sum(((x164 * x170) * x89))
result[(2, 3, 3)] = numpy.sum(((x163 * x166) * x97))
result[(2, 3, 4)] = numpy.sum(((x171 * x8) * x91))
result[(2, 3, 5)] = numpy.sum(((x177 * x191) * x8))
result[(2, 4, 0)] = numpy.sum(((x156 * x193) * x60))
result[(2, 4, 1)] = numpy.sum(((x159 * x182) * x63))
result[(2, 4, 2)] = numpy.sum(((x159 * x186) * x60))
result[(2, 4, 3)] = numpy.sum(((x193 * x71) * x8))
result[(2, 4, 4)] = numpy.sum(((x185 * x63) * x8))
result[(2, 4, 5)] = numpy.sum(((x163 * x178) * x190))
result[(2, 5, 0)] = numpy.sum((x157 * x196))
result[(2, 5, 1)] = numpy.sum(((x192 * x196) * x43))
result[(2, 5, 2)] = numpy.sum((x160 * x197))
result[(2, 5, 3)] = numpy.sum(((x196 * x8) * x82))
result[(2, 5, 4)] = numpy.sum(((x163 * x168) * x197))
result[(2, 5, 5)] = numpy.sum((x163 * x199))
result[(3, 0, 0)] = numpy.sum((x201 * x204))
result[(3, 0, 1)] = numpy.sum((x205 * x208))
result[(3, 0, 2)] = numpy.sum(((x203 * x205) * x95))
result[(3, 0, 3)] = numpy.sum((x209 * x212))
result[(3, 0, 4)] = numpy.sum(((x207 * x209) * x95))
result[(3, 0, 5)] = numpy.sum(((x209 * x213) * x59))
result[(3, 1, 0)] = numpy.sum((x118 * x215))
result[(3, 1, 1)] = numpy.sum((x217 * x219))
result[(3, 1, 2)] = numpy.sum(((x214 * x221) * x50))
result[(3, 1, 3)] = numpy.sum((x224 * x4))
result[(3, 1, 4)] = numpy.sum(((x226 * x4) * x50))
result[(3, 1, 5)] = numpy.sum(((x214 * x227) * x76))
result[(3, 2, 0)] = numpy.sum(((x104 * x118) * x203))
result[(3, 2, 1)] = numpy.sum(((x207 * x221) * x78))
result[(3, 2, 2)] = numpy.sum(((x203 * x218) * x81))
result[(3, 2, 3)] = numpy.sum(((x101 * x211) * x4))
result[(3, 2, 4)] = numpy.sum(((x207 * x228) * x4))
result[(3, 2, 5)] = numpy.sum(((x152 * x203) * x4))
result[(3, 3, 0)] = numpy.sum((x229 * x231))
result[(3, 3, 1)] = numpy.sum((x232 * x5))
result[(3, 3, 2)] = numpy.sum(((x233 * x5) * x57))
result[(3, 3, 3)] = numpy.sum((x40 * ((x0 * (((((4.0 * x138) * x43) + (4.0 * x143)) + x211) + (4.0 * x216))) + (x223 * x96))))
result[(3, 3, 4)] = numpy.sum((x232 * x50))
result[(3, 3, 5)] = numpy.sum((x233 * x59))
result[(3, 4, 0)] = numpy.sum(((x104 * x214) * x230))
result[(3, 4, 1)] = numpy.sum(((x226 * x5) * x78))
result[(3, 4, 2)] = numpy.sum(((x214 * x228) * x5))
result[(3, 4, 3)] = numpy.sum((x224 * x78))
result[(3, 4, 4)] = numpy.sum((x225 * x81))
result[(3, 4, 5)] = numpy.sum((x215 * x88))
result[(3, 5, 0)] = numpy.sum(((x108 * x213) * x234))
result[(3, 5, 1)] = numpy.sum(((x110 * x207) * x5))
result[(3, 5, 2)] = numpy.sum(((x165 * x203) * x5))
result[(3, 5, 3)] = numpy.sum((x108 * x212))
result[(3, 5, 4)] = numpy.sum((x111 * x208))
result[(3, 5, 5)] = numpy.sum((x113 * x204))
result[(4, 0, 0)] = numpy.sum(((x114 * x167) * x201))
result[(4, 0, 1)] = numpy.sum(((x122 * x205) * x235))
result[(4, 0, 2)] = numpy.sum(((x114 * x171) * x205))
result[(4, 0, 3)] = numpy.sum(((x131 * x166) * x236))
result[(4, 0, 4)] = numpy.sum(((((x122 * x149) * x170) * x209) * x54))
result[(4, 0, 5)] = numpy.sum(((x114 * x177) * x236))
result[(4, 1, 0)] = numpy.sum(((x118 * x135) * x235))
result[(4, 1, 1)] = numpy.sum(((x148 * x166) * x220))
result[(4, 1, 2)] = numpy.sum(((x135 * x170) * x220))
result[(4, 1, 3)] = numpy.sum(((x145 * x167) * x227))
result[(4, 1, 4)] = numpy.sum(((x170 * x237) * x4))
result[(4, 1, 5)] = numpy.sum(((x135 * x177) * x238))
result[(4, 2, 0)] = numpy.sum(((x114 * x118) * x193))
result[(4, 2, 1)] = numpy.sum(((x122 * x182) * x220))
result[(4, 2, 2)] = numpy.sum(((x114 * x185) * x220))
result[(4, 2, 3)] = numpy.sum(((x131 * x193) * x4))
result[(4, 2, 4)] = numpy.sum(((x122 * x186) * x4))
result[(4, 2, 5)] = numpy.sum(((x125 * x190) * x227))
result[(4, 3, 0)] = numpy.sum(((x155 * x166) * x239))
result[(4, 3, 1)] = numpy.sum(((x158 * x167) * x240))
result[(4, 3, 2)] = numpy.sum(((x155 * x171) * x5))
result[(4, 3, 3)] = numpy.sum((x162 * x167))
result[(4, 3, 4)] = numpy.sum(((x158 * x170) * x73))
result[(4, 3, 5)] = numpy.sum(((x155 * x177) * x72))
result[(4, 4, 0)] = numpy.sum((((x135 * x149) * x182) * x234))
result[(4, 4, 1)] = numpy.sum(((x182 * x237) * x5))
result[(4, 4, 2)] = numpy.sum(((x135 * x186) * x5))
result[(4, 4, 3)] = numpy.sum(((x145 * x182) * x73))
result[(4, 4, 4)] = numpy.sum(((x138 * x139) * x184))
result[(4, 4, 5)] = numpy.sum(((x135 * x190) * x73))
result[(4, 5, 0)] = numpy.sum(((x114 * x196) * x239))
result[(4, 5, 1)] = numpy.sum(((x122 * x196) * x241))
result[(4, 5, 2)] = numpy.sum(((x125 * x197) * x240))
result[(4, 5, 3)] = numpy.sum(((x131 * x196) * x72))
result[(4, 5, 4)] = numpy.sum(((x122 * x197) * x73))
result[(4, 5, 5)] = numpy.sum((x125 * x199))
result[(5, 0, 0)] = numpy.sum((x201 * x244))
result[(5, 0, 1)] = numpy.sum(((x205 * x245) * x43))
result[(5, 0, 2)] = numpy.sum((x205 * x248))
result[(5, 0, 3)] = numpy.sum(((x209 * x249) * x55))
result[(5, 0, 4)] = numpy.sum(((x236 * x250) * x43))
result[(5, 0, 5)] = numpy.sum((x209 * x253))
result[(5, 1, 0)] = numpy.sum(((x118 * x245) * x60))
result[(5, 1, 1)] = numpy.sum(((x218 * x243) * x63))
result[(5, 1, 2)] = numpy.sum(((x221 * x247) * x60))
result[(5, 1, 3)] = numpy.sum(((x238 * x243) * x71))
result[(5, 1, 4)] = numpy.sum(((x247 * x254) * x4))
result[(5, 1, 5)] = numpy.sum((((x252 * x4) * x46) * x60))
result[(5, 2, 0)] = numpy.sum((x118 * x256))
result[(5, 2, 1)] = numpy.sum(((x221 * x255) * x43))
result[(5, 2, 2)] = numpy.sum((x219 * x258))
result[(5, 2, 3)] = numpy.sum(((x227 * x255) * x82))
result[(5, 2, 4)] = numpy.sum(((x260 * x4) * x43))
result[(5, 2, 5)] = numpy.sum((x263 * x4))
result[(5, 3, 0)] = numpy.sum(((x234 * x249) * x89))
result[(5, 3, 1)] = numpy.sum(((x241 * x243) * x91))
result[(5, 3, 2)] = numpy.sum(((x191 * x250) * x5))
result[(5, 3, 3)] = numpy.sum((x244 * x97))
result[(5, 3, 4)] = numpy.sum((x248 * x91))
result[(5, 3, 5)] = numpy.sum((x253 * x89))
result[(5, 4, 0)] = numpy.sum(((x178 * x239) * x255))
result[(5, 4, 1)] = numpy.sum(((x254 * x255) * x5))
result[(5, 4, 2)] = numpy.sum(((x260 * x5) * x60))
result[(5, 4, 3)] = numpy.sum((x256 * x71))
result[(5, 4, 4)] = numpy.sum((x259 * x63))
result[(5, 4, 5)] = numpy.sum((x263 * x60))
result[(5, 5, 0)] = numpy.sum((x231 * x264))
result[(5, 5, 1)] = numpy.sum((((x168 * x265) * x39) * x5))
result[(5, 5, 2)] = numpy.sum((x266 * x5))
result[(5, 5, 3)] = numpy.sum((x265 * x55))
result[(5, 5, 4)] = numpy.sum((x266 * x43))
result[(5, 5, 5)] = numpy.sum((x40 * ((x0 * (((((4.0 * x184) * x50) + (4.0 * x188)) + x252) + (4.0 * x257))) + (x112 * x262))))
return result |
class JsNvd3(JsPackage):
lib_alias = {'js': 'nvd3', 'css': 'nvd3'}
lib_selector = 'd3.select("body")'
def __init__(self, component: primitives.HtmlModel=None, page: primitives.PageModel=None, js_code: str=None, selector: str=None, data: Any=None, set_var: bool=None):
(self.component, self.page) = (component, page)
if ((page is None) and (component is not None)):
self.page = component.page
self._selector = ('nv.models.%s()' % self.chartFnc)
(self.varName, self.setVar) = (js_code, set_var)
self.component.jsImports.add(self.lib_alias['js'])
self.component.cssImport.add(self.lib_alias['css'])
(self._js, self._xaxis, self._yaxis, self._u) = ([[]], None, None, {})
(self._js_enums, self._container) = ({}, ('%s.parentNode' % self.element))
def set_var(self, flag: bool):
self.setVar = flag
return self
def varId(self):
return (self._selector if (self.varName is None) else self.varName)
def options(self, opts):
raise NotImplementedError()
def width(self, value):
return self.fnc(('width(%s)' % value))
def height(self, value: float):
return self.fnc(('height(%s)' % value))
def margin(self, options: dict):
options = JsUtils.jsConvertData(options, None)
self._js.append(('margin(%s)' % options))
return self
def useInteractiveGuideline(self, flag: bool):
flag = JsUtils.jsConvertData(flag, None)
return self.fnc(('useInteractiveGuideline(%s)' % flag))
def transitionDuration(self, time):
time = JsUtils.jsConvertData(time, None)
return self.fnc(('transitionDuration(%s)' % time))
def showLegend(self, flag: bool):
flag = JsUtils.jsConvertData(flag, None)
return self.fnc(('showLegend(%s)' % flag))
def xAxis(self) -> JsNvd3Axis:
if (self._xaxis is None):
self._xaxis = JsNvd3Axis(('%s.xAxis' % self.varName), page=self.page, component=self.component)
return self._xaxis
def yAxis(self) -> JsNvd3Axis:
if (self._yaxis is None):
self._yaxis = JsNvd3Axis(('%s.yAxis' % self.varName), page=self.page, component=self.component)
return self._yaxis
def showYAxis(self, flag: bool):
flag = JsUtils.jsConvertData(flag, None)
return self.fnc(('showYAxis(%s)' % flag))
def showXAxis(self, flag: bool):
flag = JsUtils.jsConvertData(flag, None)
return self.fnc(('showXAxis(%s)' % flag))
def update(self):
pass
def showControls(self, flag: bool):
flag = JsUtils.jsConvertData(flag, None)
return self.fnc(('showControls(%s)' % flag))
def noData(self):
raise NotImplementedError()
def color(self, colors):
pass
def createWidget(self, html_code: str, container: str=None, options: etypes.JS_DATA_TYPES=None):
self.component.options.managed = False
self.component.js_code = html_code
return JsUtils.jsWrap(('(function(containerId, tag, htmlCode, jsCode, attrs){\nconst newDiv = document.createElement(tag); \nObject.keys(attrs).forEach(function(key) {newDiv.setAttribute(key, attrs[key]);}); newDiv.id = htmlCode;\nif(!containerId){document.body.appendChild(newDiv)} else {document.getElementById(containerId).appendChild(newDiv)};\n%(builder)s;return newDiv})(%(container)s, "%(tag)s", %(html_code)s, %(js_code)s, %(attrs)s)' % {'js_code': JsUtils.jsConvertData(self.component.js_code, None), 'html_code': JsUtils.jsConvertData((html_code or self.component.html_code), None), 'tag': self.component.tag, 'ctx': self.component.options.config_js(options).toStr(), 'container': JsUtils.jsConvertData(container, None), 'attrs': self.component.get_attrs(css_class_names=self.component.style.get_classes(), to_str=False), 'builder': self.component.build(options=options)})) |
class TransitionIterator():
def __init__(self, transitions: TransitionBatch, batch_size: int, shuffle_each_epoch: bool=False, rng: Optional[np.random.Generator]=None):
self.transitions = transitions
self.num_stored = len(transitions)
self._order: np.ndarray = np.arange(self.num_stored)
self.batch_size = batch_size
self._current_batch = 0
self._shuffle_each_epoch = shuffle_each_epoch
self._rng = (rng if (rng is not None) else np.random.default_rng())
def _get_indices_next_batch(self) -> Sized:
start_idx = (self._current_batch * self.batch_size)
if (start_idx >= self.num_stored):
raise StopIteration
end_idx = min(((self._current_batch + 1) * self.batch_size), self.num_stored)
order_indices = range(start_idx, end_idx)
indices = self._order[order_indices]
self._current_batch += 1
return indices
def __iter__(self):
self._current_batch = 0
if self._shuffle_each_epoch:
self._order = self._rng.permutation(self.num_stored)
return self
def __next__(self):
return self.transitions[self._get_indices_next_batch()]
def ensemble_size(self):
return 0
def __len__(self):
return (((self.num_stored - 1) // self.batch_size) + 1) |
.parametrize('media_type', ['application/json', 'application/msgpack'])
def test_empty_body(asgi, media_type):
client = _create_client_invalid_media(asgi, errors.HTTPBadRequest, {'application/msgpack': media.MessagePackHandler(), 'application/json': media.JSONHandler()})
headers = {'Content-Type': media_type}
assert (client.simulate_post('/', headers=headers).status_code == 200)
assert ('Could not parse an empty' in client.resource.captured_error.value.description)
assert isinstance(client.resource.captured_error.value, errors.MediaNotFoundError) |
def extractAnhobbiesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def request_response(func: typing.Callable[([Request], typing.Union[(typing.Awaitable[Response], Response)])]) -> ASGIApp:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive, send)
async def app(scope: Scope, receive: Receive, send: Send) -> None:
if is_async_callable(func):
response = (await func(request))
else:
response = (await run_in_threadpool(func, request))
(await response(scope, receive, send))
(await wrap_app_handling_exceptions(app, request)(scope, receive, send))
return app |
class TestSetCustomUserClaims():
.parametrize('arg', (INVALID_STRINGS + [('a' * 129)]))
def test_invalid_uid(self, user_mgt_app, arg):
with pytest.raises(ValueError):
auth.set_custom_user_claims(arg, {'foo': 'bar'}, app=user_mgt_app)
.parametrize('arg', (INVALID_DICTS[1:] + ['"json"']))
def test_invalid_custom_claims(self, user_mgt_app, arg):
with pytest.raises(ValueError):
auth.set_custom_user_claims('user', arg, app=user_mgt_app)
.parametrize('key', _auth_utils.RESERVED_CLAIMS)
def test_single_reserved_claim(self, user_mgt_app, key):
claims = {key: 'value'}
with pytest.raises(ValueError) as excinfo:
auth.set_custom_user_claims('user', claims, app=user_mgt_app)
assert (str(excinfo.value) == 'Claim "{0}" is reserved, and must not be set.'.format(key))
def test_multiple_reserved_claims(self, user_mgt_app):
claims = {key: 'value' for key in _auth_utils.RESERVED_CLAIMS}
with pytest.raises(ValueError) as excinfo:
auth.set_custom_user_claims('user', claims, app=user_mgt_app)
joined = ', '.join(sorted(claims.keys()))
assert (str(excinfo.value) == 'Claims "{0}" are reserved, and must not be set.'.format(joined))
def test_large_claims_payload(self, user_mgt_app):
claims = {'key': ('A' * 1000)}
with pytest.raises(ValueError) as excinfo:
auth.set_custom_user_claims('user', claims, app=user_mgt_app)
assert (str(excinfo.value) == 'Custom claims payload must not exceed 1000 characters.')
def test_set_custom_user_claims(self, user_mgt_app):
(_, recorder) = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}')
claims = {'admin': True, 'package': 'gold'}
auth.set_custom_user_claims('testuser', claims, app=user_mgt_app)
request = json.loads(recorder[0].body.decode())
assert (request == {'localId': 'testuser', 'customAttributes': json.dumps(claims)})
def test_set_custom_user_claims_str(self, user_mgt_app):
(_, recorder) = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}')
claims = json.dumps({'admin': True, 'package': 'gold'})
auth.set_custom_user_claims('testuser', claims, app=user_mgt_app)
request = json.loads(recorder[0].body.decode())
assert (request == {'localId': 'testuser', 'customAttributes': claims})
.parametrize('claims', [None, auth.DELETE_ATTRIBUTE])
def test_set_custom_user_claims_remove(self, user_mgt_app, claims):
(_, recorder) = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}')
auth.set_custom_user_claims('testuser', claims, app=user_mgt_app)
request = json.loads(recorder[0].body.decode())
assert (request == {'localId': 'testuser', 'customAttributes': json.dumps({})})
def test_set_custom_user_claims_error(self, user_mgt_app):
_instrument_user_manager(user_mgt_app, 500, '{"error": {"message": "UNEXPECTED_CODE"}}')
with pytest.raises(exceptions.InternalError) as excinfo:
auth.set_custom_user_claims('user', {}, app=user_mgt_app)
assert (str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).')
assert (excinfo.value. is not None)
assert (excinfo.value.cause is not None) |
class UserInfoAdmin(admin.ModelAdmin):
def get_avatar(self: UserInfo):
if (self.sign_status in [1, 2]):
return mark_safe(f'<img src="{self.avatar_url}" style="width:30px;height:30px;border-radius:50%;">')
if self.avatar:
return mark_safe(f'<img src="{self.avatar.url.url}" style="width:30px;height:30px;border-radius:50%;">')
get_avatar.short_description = ''
def get_user_name(self):
if (not self.sign_status):
return self.username
return '****'
get_user_name.short_description = ''
list_display = [get_user_name, 'nick_name', get_avatar, 'sign_status', 'ip', 'addr', 'is_superuser', 'date_joined', 'last_login']
def get_avatar_action(self, request, queryset):
for obj in queryset:
if (not obj.sign_status):
continue
get_avatar_action.short_description = ''
actions = [get_avatar_action] |
def test_origin(f):
def_origin = f.object('ORIGIN', 'DEFINING_ORIGIN', 10, 0)
assert (def_origin.type == 'ORIGIN')
assert (def_origin.origin == 10)
assert (def_origin.file_id == 'some logical file')
assert (def_origin.file_set_name == 'SET-NAME')
assert (def_origin.file_set_nr == 1042)
assert (def_origin.file_nr == 7)
assert (def_origin.file_type == 'CRUCIAL')
assert (def_origin.product == 'fantasy')
assert (def_origin.version == '-1.0')
assert (def_origin.programs == ['PROG1', 'PROG2'])
assert (def_origin.creation_time == datetime(2019, 5, 2, 13, 51))
assert (def_origin.order_nr == 'SR-BB')
assert (def_origin.descent_nr == ['DESNUM'])
assert (def_origin.run_nr == [17])
assert (def_origin.well_id == 'CODED-WELL')
assert (def_origin.well_name == 'SECRET-WELL')
assert (def_origin.field_name == 'WILDCAT')
assert (def_origin.producer_code == 307)
assert (def_origin.producer_name == 'Test Production')
assert (def_origin.company == 'The Company')
assert (def_origin.namespace_name == 'DIC1')
assert (def_origin.namespace_version == 6)
random_origin = f.object('ORIGIN', 'RANDOM', 127, 0)
assert (random_origin.origin == 127)
assert (random_origin.file_id == 'some other logical file')
assert (random_origin.file_set_nr == 1042)
assert (random_origin.file_nr == 6) |
def execute_lexer_test(name):
files = [f for f in os.listdir('.') if f.endswith('.m')]
for f in files:
r = run_module('miss_hit_core.m_lexer', [f])
plain_out = r.stdout
with open((f + '.out'), 'w') as fd:
fd.write(plain_out)
return ('Ran lexer test %s' % name) |
def main():
tree = EvolTree((WRKDIR + 'tree.nw'))
tree.workdir = 'data/protamine/PRM1/paml/'
random_swap(tree)
tree.link_to_evol_model((WRKDIR + 'paml/fb/fb.out'), 'fb')
check_annotation(tree)
tree.link_to_evol_model((WRKDIR + 'paml/M1/M1.out'), 'M1')
tree.link_to_evol_model((WRKDIR + 'paml/M2/M2.out'), 'M2')
tree.link_to_evol_model((WRKDIR + 'paml/M7/M7.out'), 'M7')
tree.link_to_evol_model((WRKDIR + 'paml/M8/M8.out'), 'M8')
tree.link_to_alignment((WRKDIR + 'alignments.fasta_ali'))
print('pv of LRT M2 vs M1:\n', tree.get_most_likely('M2', 'M1'), '\n', 'pv of LRT M8 vs M7:\n', tree.get_most_likely('M8', 'M7'))
tree.show(histfaces=['M2'])
print('The End.') |
class OptionSeriesDependencywheelDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class CheckPayment(QuickbooksBaseObject):
class_dict = {'BankAccountRef': Ref}
qbo_object_name = 'CheckPayment'
def __init__(self):
super(CheckPayment, self).__init__()
self.PrintStatus = 'NotSet'
self.BankAccountRef = None
def __str__(self):
return self.PrintStatus |
def _dp_add_ports(dp, dp_conf, dp_id, vlans):
ports_conf = dp_conf.get('interfaces', {})
port_ranges_conf = dp_conf.get('interface_ranges', {})
test_config_condition((not isinstance(ports_conf, dict)), 'Invalid syntax in interface config')
test_config_condition((not isinstance(port_ranges_conf, dict)), 'Invalid syntax in interface ranges config')
def _map_port_num_to_port(ports_conf):
port_num_to_port_conf = {}
for (port_key, port_conf) in ports_conf.items():
test_config_condition((not isinstance(port_conf, dict)), 'Invalid syntax in port config')
port_num = port_conf.get('number', port_key)
try:
port_num_to_port_conf[port_num] = (port_key, port_conf)
except TypeError as type_error:
raise InvalidConfigError('Invalid syntax in port config') from type_error
return port_num_to_port_conf
def _parse_port_ranges(port_ranges_conf, port_num_to_port_conf):
all_port_nums = set()
for (port_range, port_conf) in port_ranges_conf.items():
test_config_condition((not isinstance(port_conf, dict)), 'Invalid syntax in port config')
port_nums = set()
if ('number' in port_conf):
del port_conf['number']
for range_ in re.findall('(\\d+-\\d+)', str(port_range)):
(start_num, end_num) = [int(num) for num in range_.split('-')]
test_config_condition((start_num >= end_num), ('Incorrect port range (%d - %d)' % (start_num, end_num)))
port_nums.update(range(start_num, (end_num + 1)))
port_range = re.sub(range_, '', port_range)
other_nums = [int(p) for p in re.findall('\\d+', str(port_range))]
port_nums.update(other_nums)
test_config_condition((not port_nums), 'interface-ranges contain invalid config')
test_config_condition(port_nums.intersection(all_port_nums), 'interfaces-ranges cannot overlap')
all_port_nums.update(port_nums)
for port_num in port_nums:
if (port_num in port_num_to_port_conf):
for (attr, value) in port_conf.items():
port_num_to_port_conf[port_num][1].setdefault(attr, value)
else:
port_num_to_port_conf[port_num] = (port_num, port_conf)
port_num_to_port_conf = _map_port_num_to_port(ports_conf)
_parse_port_ranges(port_ranges_conf, port_num_to_port_conf)
for (port_num, port_conf) in port_num_to_port_conf.values():
port = _dp_parse_port(dp_id, port_num, port_conf, vlans)
dp.add_port(port) |
def test_sub_wf_single_named_tuple():
nt = typing.NamedTuple('SingleNamedOutput', [('named1', int)])
def t1(a: int) -> nt:
a = (a + 2)
return nt(a)
def subwf(a: int) -> nt:
return t1(a=a)
def wf(b: int) -> nt:
out = subwf(a=b)
return t1(a=out.named1)
x = wf(b=3)
assert (x == (7,)) |
class OptionSeriesVectorSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_colors_whole_table_supports_ansi_false(data, header, footer, fg_colors, bg_colors):
os.environ['ANSI_COLORS_DISABLED'] = 'True'
result = table(data, header=header, footer=footer, divider=True, fg_colors=fg_colors, bg_colors=bg_colors)
assert (result == '\nCOL A COL B COL 3 \n ----- \nHello World \nThis is a test World 1234 \n ----- \n 2030203.00\n')
del os.environ['ANSI_COLORS_DISABLED'] |
def generic_upload_dataset_if_not_exists(client: 'foundry_dev_tools.foundry_api_client.FoundryRestClient | MockFoundryRestClient', name='iris_new', upload_folder: 'Path | None'=None, foundry_schema=None) -> 'tuple[str, str, str, str, bool]':
ds_path = f'{INTEGRATION_TEST_COMPASS_ROOT_PATH}/{name}'
ds_branch = 'master'
newly_created = False
try:
identity = client.get_dataset_identity(dataset_path_or_rid=ds_path, branch=ds_branch)
rid = identity['dataset_rid']
transaction_rid = identity['last_transaction_rid']
except DatasetNotFoundError:
rid = client.create_dataset(ds_path)['rid']
_ = client.create_branch(rid, ds_branch)
newly_created = True
if upload_folder:
recursive_listing = glob.glob(os.fspath((upload_folder / '**')), recursive=True)
path_file_dict = {}
for file in recursive_listing:
pfile = Path(file)
if (not pfile.is_dir()):
path_file_dict[pfile.relative_to(upload_folder).as_posix().lstrip('/')] = file
transaction_rid = client.open_transaction(dataset_rid=rid, mode='SNAPSHOT')
client.upload_dataset_files(dataset_rid=rid, transaction_rid=transaction_rid, path_file_dict=path_file_dict)
client.commit_transaction(dataset_rid=rid, transaction_id=transaction_rid)
if foundry_schema:
client.upload_dataset_schema(rid, transaction_rid, foundry_schema, ds_branch)
else:
transaction_rid = None
return (rid, ds_path, transaction_rid, ds_branch, newly_created) |
('rig')
def check_root_node_name(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
progress_controller.maximum = 2
root_nodes = auxiliary.get_root_nodes()
from anima.dcc import mayaEnv
m_env = mayaEnv.Maya()
v = m_env.get_current_version()
t = v.task
from stalker import Asset
asset_name = None
if isinstance(t.parent, Asset):
asset_name = t.parent.name
progress_controller.increment()
if (not root_nodes[0].isReferenced()):
root_node_name = root_nodes[0].name()
else:
root_node_name = str(root_nodes[0].stripNamespace())
progress_controller.increment()
if root_node_name[(- 1)].isdigit():
progress_controller.complete()
raise PublishError('The name of the root node should not end with a number')
if (asset_name is not None):
asset_name = asset_name.replace(' ', '_')
if asset_name[0].isdigit():
asset_name = ('_%s' % asset_name)
if (':' in root_node_name):
raise PublishError('Imported namespaces are not allowed in non-referenced root node names')
if (not root_node_name.lower().startswith(asset_name.lower())):
progress_controller.complete()
raise PublishError("The name of the root node should start with asset's name")
progress_controller.complete() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.