code stringlengths 281 23.7M |
|---|
class OptionPlotoptionsArearangeSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_strict_allow_default_bug():
'
schema = {'namespace': 'namespace', 'name': 'name', 'type': 'record', 'fields': [{'name': 'some_field', 'type': 'string', 'default': 'test'}]}
test_record = {'eggs': 'eggs'}
with pytest.raises(ValueError, match='record contains more fields .*? eggs'):
roundtrip(schema, [test_record], writer_kwargs={'strict_allow_default': True}) |
class SVSession(RPCSession):
ca_path = certifi.where()
_connecting_tips: Dict[(bytes, asyncio.Event)] = {}
_need_checkpoint_headers = True
_subs_by_account: Dict[('AbstractAccount', List[str])] = {}
_keyinstance_map: Dict[(str, Tuple[(int, ScriptType)])] = {}
def __init__(self, network, server, logger, *args, **kwargs):
super().__init__(*args, **kwargs)
self._handlers = {}
self._network = network
self._closed_event = app_state.async_.event()
self.chain = None
self.logger = logger
self.server = server
self.tip = None
self.ptuple = (0,)
def set_throttled(self, flag: bool) -> None:
if flag:
RPCSession.recalibrate_count = 30
else:
RPCSession.recalibrate_count =
def get_current_outgoing_concurrency_target(self) -> int:
return self._outgoing_concurrency.max_concurrent
def default_framer(self) -> NewlineFramer:
max_size = ((app_state.electrumx_message_size_limit() * 1024) * 1024)
return NewlineFramer(max_size=max_size)
def _required_checkpoint_headers(cls):
if cls._need_checkpoint_headers:
headers_obj = app_state.headers
chain = headers_obj.longest_chain()
cp_height = headers_obj.checkpoint.height
if (cp_height == 0):
cls._need_checkpoint_headers = False
else:
try:
for height in range((cp_height - 146), cp_height):
headers_obj.header_at_height(chain, height)
cls._need_checkpoint_headers = False
except MissingHeader:
return (height, (cp_height - height))
return (0, 0)
def _connect_header(cls, height, raw_header):
headers_obj = app_state.headers
checkpoint = headers_obj.checkpoint
if (height <= checkpoint.height):
headers_obj.set_one(height, raw_header)
headers_obj.flush()
header = Net.COIN.deserialized_header(raw_header, height)
return (header, headers_obj.longest_chain())
else:
return app_state.headers.connect(raw_header)
def _connect_chunk(cls, start_height, raw_chunk):
headers_obj = app_state.headers
checkpoint = headers_obj.checkpoint
coin = headers_obj.coin
end_height = (start_height + (len(raw_chunk) // HEADER_SIZE))
def extract_header(height):
start = ((height - start_height) * HEADER_SIZE)
return raw_chunk[start:(start + HEADER_SIZE)]
def verify_chunk_contiguous_and_set(next_raw_header, to_height):
for height in reversed(range(start_height, to_height)):
raw_header = extract_header(height)
if (coin.header_prev_hash(next_raw_header) != coin.header_hash(raw_header)):
raise MissingHeader('prev_hash does not connect')
headers_obj.set_one(height, raw_header)
next_raw_header = raw_header
try:
if (end_height < checkpoint.height):
last_header = extract_header((end_height - 1))
headers_obj.set_one((end_height - 1), last_header)
verify_chunk_contiguous_and_set(last_header, (end_height - 1))
return headers_obj.longest_chain()
verify_chunk_contiguous_and_set(checkpoint.raw_header, checkpoint.height)
chain = None
for height in range(max((checkpoint.height + 1), start_height), end_height):
(_header, chain) = headers_obj.connect(extract_header(height))
return (chain or headers_obj.longest_chain())
finally:
headers_obj.flush()
async def _negotiate_protocol(self):
method = 'server.version'
args = (PACKAGE_VERSION, [version_string(PROTOCOL_MIN), version_string(PROTOCOL_MAX)])
try:
(server_string, protocol_string) = (await self.send_request(method, args))
assert isinstance(server_string, str)
assert isinstance(protocol_string, str)
self.logger.debug(f'server string: {server_string}')
self.logger.debug(f'negotiated protocol: {protocol_string}')
self.ptuple = protocol_tuple(protocol_string)
assert (len(self.ptuple) in (2, 3))
assert (PROTOCOL_MIN <= self.ptuple <= PROTOCOL_MAX)
except (AssertionError, TypeError, ValueError) as e:
raise DisconnectSessionError(f'{method} failed: {e}', blacklist=True)
async def _get_checkpoint_headers(self):
while True:
(start_height, count) = self._required_checkpoint_headers()
if (not count):
break
logger.info(f'{count:,d} checkpoint headers needed')
(await self._request_chunk(start_height, count))
async def _request_chunk(self, height, count):
self.logger.info(f'requesting {count:,d} headers from height {height:,d}')
method = 'blockchain.block.headers'
cp_height = app_state.headers.checkpoint.height
if ((height + count) >= cp_height):
cp_height = 0
try:
result = (await self.send_request(method, (height, count, cp_height)))
rec_count = result['count']
last_height = ((height + rec_count) - 1)
if (count != rec_count):
self.logger.info(f'received just {rec_count:,d} headers')
raw_chunk = bytes.fromhex(result['hex'])
assert (len(raw_chunk) == (HEADER_SIZE * rec_count))
if cp_height:
hex_root = result['root']
branch = [hex_str_to_hash(item) for item in result['branch']]
self._check_header_proof(hex_root, branch, raw_chunk[(- HEADER_SIZE):], last_height)
self.chain = self._connect_chunk(height, raw_chunk)
except (AssertionError, KeyError, TypeError, ValueError, IncorrectBits, InsufficientPoW, MissingHeader) as e:
raise DisconnectSessionError(f'{method} failed: {e}', blacklist=True)
self.logger.info(f'connected {rec_count:,d} headers up to height {last_height:,d}')
return last_height
async def _subscribe_headers(self):
self._handlers[HEADERS_SUBSCRIBE] = self._on_new_tip
tip = (await self.send_request(HEADERS_SUBSCRIBE))
(await self._on_new_tip(tip))
def _secs_to_next_ping(self):
return ((self.last_send + 300) - time.time())
async def _ping_loop(self):
method = 'server.ping'
while True:
(await sleep(self._secs_to_next_ping()))
if (self._secs_to_next_ping() < 1):
self.logger.debug(f'sending {method}')
(await self.send_request(method))
def _check_header_proof(self, hex_root, branch, raw_header, height):
expected_root = Net.VERIFICATION_BLOCK_MERKLE_ROOT
if (hex_root != expected_root):
raise DisconnectSessionError(f'bad header merkle root {hex_root} expected {expected_root}', blacklist=True)
header = Net.COIN.deserialized_header(raw_header, height)
proven_root = hash_to_hex_str(_root_from_proof(header.hash, branch, height))
if (proven_root != expected_root):
raise DisconnectSessionError(f'invalid header proof {proven_root} expected {expected_root}', blacklist=True)
self.logger.debug(f'good header proof for height {height}')
async def _on_new_tip(self, json_tip):
try:
raw_header = bytes.fromhex(json_tip['hex'])
height = json_tip['height']
assert isinstance(height, int), 'height must be an integer'
except Exception as e:
raise DisconnectSessionError(f'error connecting tip: {e} {json_tip}')
if (height < Net.CHECKPOINT.height):
raise DisconnectSessionError(f'server tip height {height:,d} below checkpoint')
self.chain = None
self.tip = None
tip = Net.COIN.deserialized_header(raw_header, height)
while True:
try:
(self.tip, self.chain) = self._connect_header(tip.height, tip.raw)
self.logger.debug(f'connected tip at height {height:,d}')
self._network.check_main_chain_event.set()
return
except (IncorrectBits, InsufficientPoW) as e:
raise DisconnectSessionError(f'bad header provided: {e}', blacklist=True)
except MissingHeader:
pass
(await self._catch_up_to_tip_throttled(tip))
async def _catch_up_to_tip_throttled(self, tip):
done_event = SVSession._connecting_tips.get(tip.raw)
if done_event:
self.logger.debug(f'another session is connecting my tip {tip.hex_str()}')
(await done_event.wait())
else:
self.logger.debug(f'connecting my own tip {tip.hex_str()}')
SVSession._connecting_tips[tip.raw] = app_state.async_.event()
try:
(await self._catch_up_to_tip(tip))
finally:
SVSession._connecting_tips.pop(tip.raw).set()
async def _catch_up_to_tip(self, tip):
headers_obj = app_state.headers
cp_height = headers_obj.checkpoint.height
max_height = max((chain.height for chain in headers_obj.chains()))
heights = [(cp_height + 1)]
step = 1
height = min(tip.height, max_height)
while (height > cp_height):
heights.append(height)
height -= step
step += step
height = (await self._request_headers_at_heights(heights))
while (height < tip.height):
height = (await self._request_chunk((height + 1), 2016))
async def _subscribe_to_script_hash(self, script_hash: str) -> None:
status = (await self.send_request(SCRIPTHASH_SUBSCRIBE, [script_hash]))
(await self._on_queue_status_changed(script_hash, status))
async def _unsubscribe_from_script_hash(self, script_hash: str) -> bool:
return (await self.send_request(SCRIPTHASH_UNSUBSCRIBE, [script_hash]))
async def _on_status_changed(self, script_hash: str, status: str) -> None:
keydata = self._keyinstance_map.get(script_hash)
if (keydata is None):
self.logger.error(f'received status notification for unsubscribed {script_hash}')
return
(keyinstance_id, script_type) = keydata
accounts = [account for (account, subs) in self._subs_by_account.items() if ((script_hash in subs) and (_history_status(account.get_key_history(keyinstance_id, script_type)) != status))]
if (not accounts):
return
result = (await self.request_history(script_hash))
self.logger.debug(f'received history of {keyinstance_id} length {len(result)}')
try:
history = [(item['tx_hash'], item['height']) for item in result]
tx_fees = {item['tx_hash']: item['fee'] for item in result if ('fee' in item)}
assert (len(set((tx_hash for (tx_hash, tx_height) in history))) == len(history)), f'server history for {keyinstance_id} has duplicate transactions'
except (AssertionError, KeyError) as e:
self._network._on_status_queue.put_nowait((script_hash, status))
raise DisconnectSessionError(f'bad history returned: {e}')
hstatus = _history_status(history)
if (hstatus != status):
self.logger.warning(f'history status mismatch {hstatus} vs {status} for {keyinstance_id}')
for account in accounts:
if (history != account.get_key_history(keyinstance_id, script_type)):
self.logger.debug('_on_status_changed new=%s old=%s', history, account.get_key_history(keyinstance_id, script_type))
(await account.set_key_history(keyinstance_id, script_type, history, tx_fees))
async def _main_server_batch(self):
async with timeout_after(10):
async with self.send_batch(raise_errors=True) as batch:
batch.add_request('server.banner')
batch.add_request('server.donation_address')
batch.add_request('server.peers.subscribe')
server = self.server
try:
server.state.banner = _require_string(batch.results[0])
server.state.donation_address = _require_string(batch.results[1])
server.state.peers = self._parse_peers_subscribe(batch.results[2])
self._network.trigger_callback('banner')
except AssertionError as e:
raise DisconnectSessionError(f'main server requests bad batch response: {e}')
def _parse_peers_subscribe(self, result):
peers = []
for host_details in _require_list(result):
host_details = _require_list(host_details)
host = _require_string(host_details[1])
for v in host_details[2]:
if re.match('[st]\\d*', _require_string(v)):
(protocol, port) = (v[0], v[1:])
try:
peers.append(SVServer.unique(host, port, protocol))
except ValueError:
pass
self.logger.info(f'{len(peers)} servers returned from server.peers.subscribe')
return peers
async def _request_headers_at_heights(self, heights):
async def _request_header_batch(batch_heights):
nonlocal good_height
self.logger.debug(f'requesting {len(batch_heights):,d} headers at heights {batch_heights}')
async with timeout_after(10):
async with self.send_batch(raise_errors=True) as batch:
for height in batch_heights:
batch.add_request(method, (height, (cp_height if (height <= cp_height) else 0)))
try:
for (result, height) in zip(batch.results, batch_heights):
if (height <= cp_height):
hex_root = result['root']
branch = [hex_str_to_hash(item) for item in result['branch']]
raw_header = bytes.fromhex(result['header'])
self._check_header_proof(hex_root, branch, raw_header, height)
else:
raw_header = bytes.fromhex(result)
(_header, self.chain) = self._connect_header(height, raw_header)
good_height = height
except MissingHeader:
hex_str = hash_to_hex_str(Net.COIN.header_hash(raw_header))
self.logger.info(f'failed to connect at height {height:,d}, hash {hex_str} last good {good_height:,d}')
except (AssertionError, KeyError, TypeError, ValueError) as e:
raise DisconnectSessionError(f'bad {method} response: {e}')
heights = sorted(set(heights))
cp_height = Net.CHECKPOINT.height
method = 'blockchain.block.header'
good_height = (- 1)
min_good_height = max((height for height in heights if (height <= cp_height)), default=(- 1))
for chunk in chunks(heights, 100):
(await _request_header_batch(chunk))
if (good_height < min_good_height):
raise DisconnectSessionError(f'cannot connect to checkpoint', blacklist=True)
return good_height
async def handle_request(self, request):
if isinstance(request, Notification):
handler = self._handlers.get(request.method)
else:
handler = None
coro = handler_invocation(handler, request)()
return (await coro)
async def connection_lost(self):
(await super().connection_lost())
self._closed_event.set()
async def disconnect(self, reason, *, blacklist=False):
if blacklist:
self.server.state.last_blacklisted = time.time()
self.logger.error(f'disconnecting and blacklisting: {reason}')
else:
self.logger.error(f'disconnecting: {reason}')
(await self.close())
async def run(self):
(await self._negotiate_protocol())
(await self._get_checkpoint_headers())
(await self._subscribe_headers())
is_main_server = (await self._network.session_established(self))
try:
self.server.state.retry_delay = 0
async with TaskGroup() as group:
if is_main_server:
self.logger.info('using as main server')
(await group.spawn(self._main_server_batch))
(await group.spawn(self._ping_loop))
(await self._closed_event.wait())
(await group.cancel_remaining())
finally:
(await self._network.session_closed(self))
async def headers_at_heights(self, heights):
result = {}
missing = []
header_at_height = app_state.headers.header_at_height
for height in set(heights):
try:
result[height] = header_at_height(self.chain, height)
except MissingHeader:
missing.append(height)
if missing:
(await self._request_headers_at_heights(missing))
for height in missing:
result[height] = header_at_height(self.chain, height)
self._network.trigger_callback('updated')
return result
async def request_tx(self, tx_id: str):
return (await self.send_request('blockchain.transaction.get', [tx_id]))
async def request_proof(self, *args):
return (await self.send_request(REQUEST_MERKLE_PROOF, args))
async def request_history(self, script_hash):
return (await self.send_request(SCRIPTHASH_HISTORY, [script_hash]))
async def _on_queue_status_changed(self, script_hash: str, status: str) -> None:
item = (script_hash, status)
self._network._on_status_queue.put_nowait(item)
async def subscribe_to_triples(self, account: 'AbstractAccount', triples) -> None:
self._handlers[SCRIPTHASH_SUBSCRIBE] = self._on_queue_status_changed
if (account not in self._subs_by_account):
self._subs_by_account[account] = []
subs = self._subs_by_account[account]
async with TaskGroup() as group:
for (keyinstance_id, script_type, script_hash) in triples:
subs.append(script_hash)
self._keyinstance_map[script_hash] = (keyinstance_id, script_type)
(await group.spawn(self._subscribe_to_script_hash(script_hash)))
account.request_count += len((set(triples) - set(subs)))
account._wallet.progress_event.set()
while (await group.next_done()):
account.response_count += 1
account._wallet.progress_event.set()
assert (len(set(subs)) == len(subs)), 'account subscribed to the same keys twice'
async def unsubscribe_from_pairs(self, account: 'AbstractAccount', pairs) -> None:
subs = self._subs_by_account[account]
exclusive_subs = self._get_exclusive_set(account, subs)
async with TaskGroup() as group:
for (keyinstance_id, script_type, script_hash) in pairs:
if (script_hash not in exclusive_subs):
continue
if (script_hash not in subs):
continue
subs.remove(script_hash)
del self._keyinstance_map[script_hash]
(await group.spawn(self._unsubscribe_from_script_hash(script_hash)))
def _get_exclusive_set(cls, account: 'AbstractAccount', subs: List[str]) -> set:
subs_set = set(subs)
for (other_account, other_subs) in cls._subs_by_account.items():
if (other_account == account):
continue
subs_set -= set(other_subs)
return subs_set
async def unsubscribe_account(cls, account: 'AbstractAccount', session):
subs = cls._subs_by_account.pop(account, None)
if (subs is None):
return
if (not session):
return
exclusive_subs = cls._get_exclusive_set(account, subs)
if (not exclusive_subs):
return
if (session.ptuple < (1, 4, 2)):
logger.debug('negotiated protocol does not support unsubscribing')
return
logger.debug(f'unsubscribing {len(exclusive_subs)} subscriptions for {account}')
async with TaskGroup() as group:
for script_hash in exclusive_subs:
(await group.spawn(session._unsubscribe_from_script_hash(script_hash)))
logger.debug(f'unsubscribed {len(exclusive_subs)} subscriptions for {account}') |
((MAGICK_VERSION_NUMBER < 1801), reason='Trim by percent-background requires ImagesMagick-7.0.9')
def test_trim_percent_background():
with Image(filename='wizard:') as img:
was = img.size
img.trim(fuzz=0.0, percent_background=0.5, background_color='white')
assert (img.size != was) |
def span_position_distance_breakends(candidate1, candidate2):
(candidate1_hap, candidate1_pos1, candidate1_dir1, candidate1_pos2, candidate1_dir2) = candidate1
(candidate2_hap, candidate2_pos1, candidate2_dir1, candidate2_pos2, candidate2_dir2) = candidate2
if (candidate1_hap != candidate2_hap):
if ((candidate1_dir1 == candidate2_dir1) and (candidate1_dir2 == candidate2_dir2)):
dist1 = abs((candidate1_pos1 - candidate2_pos1))
dist2 = abs((candidate1_pos2 - candidate2_pos2))
position_distance = ((dist1 + dist2) / 3000)
else:
position_distance = 99999
else:
position_distance = 99999
return position_distance |
def extractTrialntriedWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsColumnpyramidSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
((API + '.get_dataset_identity'), MagicMock(return_value={'dataset_rid': DATASET_RID, 'dataset_path': DATASET_PATH, 'last_transaction_rid': TRANSACTION_RID, 'last_transaction': {'rid': TRANSACTION_RID, 'transaction': {'record': {'view': True}, 'metadata': {'fileCount': 1, 'hiddenFileCount': 0, 'totalFileSize': 0, 'totalHiddenFileSize': 0}}}}))
((API + '.get_dataset_schema'))
((API + '.query_foundry_sql'))
((API + '.is_dataset_in_trash'))
((API + '.get_dataset_rid'))
def test_load_dataset_is_view(get_dataset_rid, is_dataset_in_trash, query_foundry_sql, get_dataset_schema, mocker):
from_cache = mocker.spy(CachedFoundryClient, '_return_local_path_of_cached_dataset')
fdt = CachedFoundryClient()
get_dataset_rid.return_value = DATASET_RID
is_dataset_in_trash.return_value = False
df = get_spark_session().createDataFrame([[1]], 'col1:int')
query_foundry_sql.return_value = df
get_dataset_schema.return_value = {'fieldSchemaList': [{'type': 'INTEGER', 'name': 'col1', 'nullable': True, 'customMetadata': {}}], 'dataFrameReaderClass': 'com.palantir.foundry.spark.input.ParquetDataFrameReader', 'customMetadata': {'format': 'parquet', 'options': {}}}
spark_df = fdt.load_dataset(DATASET_PATH, 'master')
from_cache.assert_called()
assert (query_foundry_sql.call_count == 1)
from_cache.reset_mock()
assert_frame_equal(spark_df.toPandas(), df.toPandas())
fdt.load_dataset(DATASET_PATH, 'master')
assert (query_foundry_sql.call_count == 1)
from_cache.assert_called() |
class TraceIDMiddleware(BaseHTTPMiddleware):
def __init__(self, app: ASGIApp, trace_context_var: ContextVar[TracerContext], tracer: Tracer, root_operation_name: str='DB-GPT-Web-Entry', include_prefix: str='/api', exclude_paths=_DEFAULT_EXCLUDE_PATHS):
super().__init__(app)
self.trace_context_var = trace_context_var
self.tracer = tracer
self.root_operation_name = root_operation_name
self.include_prefix = include_prefix
self.exclude_paths = exclude_paths
async def dispatch(self, request: Request, call_next):
if ((request.url.path in self.exclude_paths) or (not request.url.path.startswith(self.include_prefix))):
return (await call_next(request))
span_id = request.headers.get('DBGPT_TRACER_SPAN_ID')
with self.tracer.start_span(self.root_operation_name, span_id, metadata={'path': request.url.path}):
response = (await call_next(request))
return response |
def test_set_check_modes(tfm):
tfm = SpectralModel(verbose=False)
tfm.set_check_modes(False, False)
assert (tfm._check_freqs is False)
assert (tfm._check_data is False)
freqs = np.array([1, 2, 4])
powers = np.array([1, 2, 3])
tfm.add_data(freqs, powers)
assert tfm.has_data
freqs = gen_freqs([3, 30], 1)
powers = (np.ones_like(freqs) * np.nan)
tfm.add_data(freqs, powers)
assert tfm.has_data
tfm.fit()
assert (not tfm.has_model)
tfm.set_check_modes(True, True)
assert (tfm._check_freqs is True)
assert (tfm._check_data is True) |
class BucketWorker(Thread):
def __init__(self, q, *args, **kwargs):
self.q = q
self.use_aws = (CONFIG['aws_access_key'] and CONFIG['aws_secret'])
if self.use_aws:
self.session = Session(aws_access_key_id=CONFIG['aws_access_key'], aws_secret_access_key=CONFIG['aws_secret']).resource('s3')
else:
self.session = requests.Session()
self.session.mount(' HTTPAdapter(pool_connections=ARGS.threads, pool_maxsize=QUEUE_SIZE, max_retries=0))
super().__init__(*args, **kwargs)
def run(self):
global THREAD_EVENT
while (not THREAD_EVENT.is_set()):
try:
bucket_url = self.q.get()
(self.__check_boto(bucket_url) if self.use_aws else self.__check_
except Exception as e:
print(e)
pass
finally:
self.q.task_done()
def __check_ bucket_url):
check_response = self.session.head(S3_URL, timeout=3, headers={'Host': bucket_url})
if ((not ARGS.ignore_rate_limiting) and ((check_response.status_code == 503) and (check_response.reason == 'Slow Down'))):
self.q.rate_limited = True
self.q.put(bucket_url)
elif (check_response.status_code == 307):
new_bucket_url = check_response.headers['Location']
bucket_response = requests.request(('GET' if ARGS.only_interesting else 'HEAD'), new_bucket_url, timeout=3)
if ((bucket_response.status_code == 200) and ((not ARGS.only_interesting) or (ARGS.only_interesting and any(((keyword in bucket_response.text) for keyword in KEYWORDS))))):
self.__output("Found bucket '{}'".format(new_bucket_url), 'green')
self.__log(new_bucket_url)
def __check_boto(self, bucket_url):
bucket_name = bucket_url.replace('.s3.amazonaws.com', '')
try:
self.session.meta.client.head_bucket(Bucket=bucket_name)
if ((not ARGS.only_interesting) or (ARGS.only_interesting and self.__bucket_contains_any_keywords(bucket_name))):
owner = None
acls = None
try:
acl = self.session.meta.client.get_bucket_acl(Bucket=bucket_name)
owner = acl['Owner']['DisplayName']
acls = '. ACLs = {} | {}'.format(self.__get_group_acls(acl, 'AllUsers'), self.__get_group_acls(acl, 'AuthenticatedUsers'))
except:
acls = '. ACLS = (could not read)'
color = ('green' if (not owner) else 'magenta')
self.__output("Found bucket '{}'. Owned by '{}'{}".format(bucket_url, (owner if owner else '(unknown)'), acls), color)
self.__log(bucket_url)
except Exception as e:
pass
def __get_group_acls(self, acl, group):
group_uri = (' % group)
perms = [g['Permission'] for g in acl['Grants'] if ((g['Grantee']['Type'] == 'Group') and (g['Grantee']['URI'] == group_uri))]
return '{}: {}'.format(group, (', '.join(perms) if perms else '(none)'))
def __bucket_contains_any_keywords(self, bucket_name):
try:
objects = [o.key for o in self.session.Bucket(bucket_name).objects.all()]
return any(((keyword in ','.join(objects)) for keyword in KEYWORDS))
except:
return False
def __log(self, new_bucket_url):
global FOUND_COUNT
FOUND_COUNT += 1
if ARGS.log_to_file:
with open('buckets.log', 'a+') as log:
log.write(('%s%s' % (new_bucket_url, os.linesep)))
def __output(self, line, color=None):
cprint(line, color, attrs=['bold'])
if CONFIG['slack_webhook']:
resp = requests.post(CONFIG['slack_webhook'], data=json.dumps({'text': line}), headers={'Content-Type': 'application/json'})
if (resp.status_code != 200):
cprint(('Could not send to your Slack Webhook. Server returned: %s' % resp.status_code), 'red') |
def processJsStacktrace(stack, allowInternal=False):
lines = []
message_line = ''
error_line = ''
found_main_line = False
stacks = (stack if (type(stack) is list) else stack.split('\n'))
for line in stacks:
if (not message_line):
message_line = line
if allowInternal:
lines.append(line.strip())
elif ((not isInternal(line)) and (not found_main_line)):
abs_path = re.search('\\((.*):(\\d+):(\\d+)\\)', line)
file_path = re.search('(file:\\/\\/.*):(\\d+):(\\d+)', line)
base_path = re.search('at (.*):(\\d+):(\\d+)$', line)
if (abs_path or file_path or base_path):
path = (abs_path or file_path or base_path)
(fpath, errorline, char) = path.groups()
if fpath.startswith('node:'):
continue
with open(fpath, 'r') as f:
flines = f.readlines()
error_line = flines[(int(errorline) - 1)].strip()
lines.append(line.strip())
found_main_line = True
elif found_main_line:
lines.append(line.strip())
if (allowInternal and (not error_line)):
error_line = '^'
return ((error_line, message_line, lines) if error_line else None) |
def test_new_awards_only_transaction_search_time_period():
default_start_date = '1111-11-11'
default_end_date = '9999-99-99'
transaction_search = TransactionSearchTimePeriod(default_start_date=default_start_date, default_end_date=default_end_date)
transaction_search_decorator = NewAwardsOnlyTimePeriod(time_period_obj=transaction_search, query_type=_QueryType.TRANSACTIONS)
time_period_filter = {'start_date': '2020-10-01', 'end_date': '2021-09-30'}
transaction_search_decorator.filter_value = time_period_filter
expected_start_date = '2020-10-01'
expected_end_date = '2021-09-30'
expected_new_awards_only = False
assert (transaction_search_decorator.start_date() == expected_start_date)
assert (transaction_search_decorator.end_date() == expected_end_date)
assert (transaction_search_decorator._new_awards_only() == expected_new_awards_only)
time_period_filter = {'end_date': '2021-09-30'}
transaction_search_decorator.filter_value = time_period_filter
assert (transaction_search_decorator.start_date() == default_start_date)
time_period_filter = {'start_date': '2020-10-01'}
transaction_search_decorator.filter_value = time_period_filter
assert (transaction_search_decorator.end_date() == default_end_date)
time_period_filter = {}
transaction_search_decorator.filter_value = time_period_filter
assert (transaction_search_decorator.end_date() == default_end_date)
assert (transaction_search_decorator.start_date() == default_start_date)
time_period_filter = {'date_type': 'date_signed', 'start_date': '2020-10-01', 'end_date': '2021-09-30'}
transaction_search_decorator.filter_value = time_period_filter
expected_gte_date_type = 'award_date_signed'
expected_lte_date_type = 'award_date_signed'
assert (transaction_search_decorator.gte_date_type() == expected_gte_date_type)
assert (transaction_search_decorator.lte_date_type() == expected_lte_date_type)
time_period_filter = {'start_date': '2020-10-01', 'end_date': '2021-09-30'}
transaction_search_decorator.filter_value = time_period_filter
expected_gte_date_type = 'action_date'
expected_lte_date_type = 'action_date'
assert (transaction_search_decorator.gte_date_type() == expected_gte_date_type)
assert (transaction_search_decorator.lte_date_type() == expected_lte_date_type)
time_period_filter = {'date_type': 'new_awards_only', 'start_date': '2020-10-01', 'end_date': '2021-09-30'}
transaction_search_decorator.filter_value = time_period_filter
expected_start_date = '2020-10-01'
expected_end_date = '2021-09-30'
expected_new_awards_only = True
assert (transaction_search_decorator.start_date() == expected_start_date)
assert (transaction_search_decorator.end_date() == expected_end_date)
assert (transaction_search_decorator._new_awards_only() == expected_new_awards_only) |
def get_and_save_remote_with_click_context(ctx: click.Context, project: str, domain: str, save: bool=True, data_upload_location: Optional[str]=None) -> FlyteRemote:
if (ctx.obj.get(FLYTE_REMOTE_INSTANCE_KEY) is not None):
return ctx.obj[FLYTE_REMOTE_INSTANCE_KEY]
cfg_file_location = ctx.obj.get(CTX_CONFIG_FILE)
r = get_plugin().get_remote(cfg_file_location, project, domain, data_upload_location)
if save:
ctx.obj[FLYTE_REMOTE_INSTANCE_KEY] = r
return r |
class SummaryExtractor(Extractor):
def __init__(self, model_name: str=None, llm_metadata: LLMMetadata=None):
self.model_name = (model_name,)
self.llm_metadata = ((llm_metadata or LLMMetadata),)
async def extract(self, chunks: List[Document]) -> str:
texts = [doc.page_content for doc in chunks]
from dbgpt.util.prompt_util import PromptHelper
prompt_helper = PromptHelper()
from dbgpt.app.scene.chat_knowledge.summary.prompt import prompt
texts = prompt_helper.repack(prompt_template=prompt.template, text_chunks=texts)
return (await self._mapreduce_extract_summary(docs=texts, model_name=self.model_name, llm_metadata=self.llm_metadata))
async def _mapreduce_extract_summary(self, docs, model_name, llm_metadata: LLMMetadata):
from dbgpt.app.scene import ChatScene
from dbgpt._private.chat_util import llm_chat_response_nostream
import uuid
tasks = []
if (len(docs) == 1):
return docs[0]
else:
max_iteration = (llm_metadata.max_chat_iteration if (len(docs) > llm_metadata.max_chat_iteration) else len(docs))
for doc in docs[0:max_iteration]:
chat_param = {'chat_session_id': uuid.uuid1(), 'current_user_input': '', 'select_param': doc, 'model_name': model_name, 'model_cache_enable': True}
tasks.append(llm_chat_response_nostream(ChatScene.ExtractSummary.value(), **{'chat_param': chat_param}))
from dbgpt._private.chat_util import run_async_tasks
summary_iters = (await run_async_tasks(tasks=tasks, concurrency_limit=llm_metadata.concurrency_limit))
summary_iters = list(filter((lambda content: ('LLMServer Generate Error' not in content)), summary_iters))
from dbgpt.util.prompt_util import PromptHelper
from dbgpt.app.scene.chat_knowledge.summary.prompt import prompt
prompt_helper = PromptHelper()
summary_iters = prompt_helper.repack(prompt_template=prompt.template, text_chunks=summary_iters)
return (await self._mapreduce_extract_summary(summary_iters, model_name, max_iteration, llm_metadata.concurrency_limit)) |
.asyncio
.workspace_host
class TestUserPermissions():
async def test_unauthorized(self, unauthorized_dashboard_assertions: HTTPXResponseAssertion, test_client_dashboard: test_data: TestData):
response = (await test_client_dashboard.get(f"/users/{test_data['users']['regular'].id}/permissions"))
unauthorized_dashboard_assertions(response)
.authenticated_admin(mode='session')
async def test_not_existing(self, test_client_dashboard: not_existing_uuid: uuid.UUID):
response = (await test_client_dashboard.get(f'/users/{not_existing_uuid}/permissions'))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin(mode='session')
.htmx(target='aside')
async def test_valid(self, test_client_dashboard: test_data: TestData):
user = test_data['users']['regular']
response = (await test_client_dashboard.get(f'/users/{user.id}/permissions'))
assert (response.status_code == status.HTTP_200_OK)
html = BeautifulSoup(response.text, features='html.parser')
rows = html.find('table', id='user-permissions-table').find('tbody').find_all('tr')
assert (len(rows) == len([user_role for user_role in test_data['user_permissions'].values() if (user_role.user_id == user.id)]))
.authenticated_admin(mode='session')
.htmx(target='aside')
async def test_create_permission_unknown(self, test_client_dashboard: test_data: TestData, not_existing_uuid: uuid.UUID, csrf_token: str):
user = test_data['users']['regular']
response = (await test_client_dashboard.post(f'/users/{user.id}/permissions', data={'permission': str(not_existing_uuid), 'csrf_token': csrf_token}))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
assert (response.headers['X-Fief-Error'] == 'unknown_permission')
.authenticated_admin(mode='session')
.htmx(target='aside')
async def test_create_permission_already_added(self, test_client_dashboard: test_data: TestData, csrf_token: str):
permission = test_data['permissions']['castles:delete']
user = test_data['users']['regular']
response = (await test_client_dashboard.post(f'/users/{user.id}/permissions', data={'permission': str(permission.id), 'csrf_token': csrf_token}))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
assert (response.headers['X-Fief-Error'] == 'already_added_permission')
.parametrize('permission_alias', ['castles:create', 'castles:read'])
.authenticated_admin(mode='session')
.htmx(target='aside')
async def test_create_permission_valid(self, permission_alias: str, test_client_dashboard: test_data: TestData, csrf_token: str, workspace_session: AsyncSession):
permission = test_data['permissions'][permission_alias]
user = test_data['users']['regular']
response = (await test_client_dashboard.post(f'/users/{user.id}/permissions', data={'permission': str(permission.id), 'csrf_token': csrf_token}))
assert (response.status_code == status.HTTP_201_CREATED)
workspace_session.expire_all()
user_permission_repository = UserPermissionRepository(workspace_session)
user_permissions = (await user_permission_repository.list(user_permission_repository.get_by_user_statement(user.id, direct_only=True)))
assert (len(user_permissions) == 2)
assert (permission.id in [user_permission.permission_id for user_permission in user_permissions]) |
def generate_tiles(options: argparse.Namespace) -> None:
directory: Path = workspace.get_tile_path()
zoom_levels: list[int] = parse_zoom_level(options.zoom)
min_zoom_level: int = min(zoom_levels)
scheme: Scheme = Scheme.from_file(workspace.find_scheme_path(options.scheme))
if options.input_file_name:
osm_data: OSMData = OSMData()
osm_data.parse_osm_file(Path(options.input_file_name))
if (osm_data.view_box is None):
logging.fatal(f'Failed to parse boundary box input file {options.input_file_name}.')
sys.exit(1)
boundary_box: BoundaryBox = osm_data.view_box
for zoom_level in zoom_levels:
configuration: MapConfiguration = MapConfiguration.from_options(scheme, options, zoom_level)
tiles: Tiles = Tiles.from_boundary_box(boundary_box, zoom_level)
tiles.draw(directory, Path(options.cache), configuration, osm_data)
elif options.coordinates:
coordinates: list[float] = list(map(float, options.coordinates.strip().split(',')))
min_tile: Tile = Tile.from_coordinates(np.array(coordinates), min_zoom_level)
try:
osm_data: OSMData = min_tile.load_osm_data(Path(options.cache))
except NetworkError as error:
raise NetworkError(f'Map is not loaded. {error.message}')
for zoom_level in zoom_levels:
tile: Tile = Tile.from_coordinates(np.array(coordinates), zoom_level)
try:
configuration: MapConfiguration = MapConfiguration.from_options(scheme, options, zoom_level)
tile.draw_with_osm_data(osm_data, directory, configuration)
except NetworkError as error:
logging.fatal(error.message)
elif options.tile:
(zoom_level, x, y) = map(int, options.tile.split('/'))
tile: Tile = Tile(x, y, zoom_level)
configuration: MapConfiguration = MapConfiguration.from_options(scheme, options, zoom_level)
tile.draw(directory, Path(options.cache), configuration)
elif options.boundary_box:
boundary_box: Optional[BoundaryBox] = BoundaryBox.from_text(options.boundary_box)
if (boundary_box is None):
logging.fatal('Failed to parse boundary box.')
sys.exit(1)
min_tiles: Tiles = Tiles.from_boundary_box(boundary_box, min_zoom_level)
try:
osm_data: OSMData = min_tiles.load_osm_data(Path(options.cache))
except NetworkError as error:
raise NetworkError(f'Map is not loaded. {error.message}')
for zoom_level in zoom_levels:
if EXTEND_TO_BIGGER_TILE:
tiles: Tiles = min_tiles.subdivide(zoom_level)
else:
tiles: Tiles = Tiles.from_boundary_box(boundary_box, zoom_level)
configuration: MapConfiguration = MapConfiguration.from_options(scheme, options, zoom_level)
tiles.draw(directory, Path(options.cache), configuration, osm_data)
else:
logging.fatal('Specify either --coordinates, --boundary-box, --tile, or --input.')
sys.exit(1) |
def ert_config_values(draw, use_eclbase=booleans):
queue_system = draw(queue_systems)
install_jobs = draw(small_list(random_forward_model_names(words, file_names)))
forward_model = (draw(small_list(job(install_jobs))) if install_jobs else [])
simulation_job = (draw(small_list(sim_job(install_jobs))) if install_jobs else [])
gen_data = draw(small_list(st.tuples(st.builds((lambda x: f'GEN_DATA-{x}'), words), st.builds((lambda x: f'RESULT_FILE:{x}'), format_result_file_name), st.just('INPUT_FORMAT:ASCII'), st.builds((lambda x: f'REPORT_STEPS:{x}'), report_steps())), unique_by=(lambda tup: tup[0])))
sum_keys = draw(small_list(summary_variables(), min_size=1))
first_date = datetime.datetime.strptime('1999-1-1', '%Y-%m-%d')
smspec = draw(smspecs(sum_keys=st.just(sum_keys), start_date=st.just(Date(year=first_date.year, month=first_date.month, day=first_date.day, hour=first_date.hour, minutes=first_date.minute, micro_seconds=((first_date.second * (10 ** 6)) + first_date.microsecond)))))
std_cutoff = draw(small_floats)
obs = draw(observations((st.sampled_from([g[0] for g in gen_data]) if gen_data else None), (composite_keys(smspec) if (len(smspec.keywords) > 1) else None), std_cutoff=std_cutoff))
need_eclbase = any((isinstance(val, (HistoryObservation, SummaryObservation)) for val in obs))
use_eclbase = (draw(use_eclbase) if (not need_eclbase) else st.just(True))
dates = _observation_dates(obs, first_date)
time_diffs = [(d - first_date) for d in dates]
time_diff_floats = [(diff.total_seconds() / (3600 * 24)) for diff in time_diffs]
unsmry = draw(unsmrys(len(sum_keys), report_steps=st.just(list(range(1, (len(dates) + 1)))), mini_steps=st.just(list(range((len(dates) + 1)))), days=st.just(time_diff_floats)))
return draw(st.builds(ErtConfigValues, forward_model=st.just(forward_model), simulation_job=st.just(simulation_job), num_realizations=positives, eclbase=(st.just((draw(words) + '%d')) if use_eclbase else st.just(None)), runpath_file=st.just((draw(file_names) + 'runpath')), run_template=small_list(st.builds((lambda fil: [(fil + '.templ'), fil]), file_names)), std_cutoff=st.just(std_cutoff), enkf_alpha=small_floats, iter_case=words, iter_count=positives, iter_retry_count=positives, update_log_path=directory_names(), max_runtime=positives, min_realizations=st.builds((lambda a, b: (str(a) if b else (str(a) + '%'))), st.integers(), booleans), define=small_list(st.tuples(st.builds((lambda x: f'<key-{x}>'), words), words)), stop_long_running=booleans, data_kw_key=small_list(st.tuples(st.builds((lambda x: f'<{x}>'), words), words)), data_file=st.just((draw(file_names) + '.DATA')), grid_file=st.just((draw(words) + '.EGRID')), job_script=st.just((draw(file_names) + 'job_script')), jobname=(st.just(('JOBNAME-' + draw(words))) if (not use_eclbase) else st.just(None)), runpath=st.just(('runpath-' + draw(format_runpath_file_name))), enspath=st.just((draw(words) + '.enspath')), time_map=st.builds((lambda fn: (fn + '.timemap')), file_names), obs_config=st.just(('obs-config-' + draw(file_names))), history_source=st.just(HistorySource.REFCASE_SIMULATED), refcase=st.just(('refcase/' + draw(file_names))), gen_kw_export_name=st.just(('gen-kw-export-name-' + draw(file_names))), field=small_list(st.tuples(st.builds((lambda w: ('FIELD-' + w)), words), st.just('PARAMETER'), field_output_names(), st.builds((lambda x: f'FORWARD_INIT:{x}'), booleans), st.builds((lambda x: f'INIT_TRANSFORM:{x}'), transforms), st.builds((lambda x: f'OUTPUT_TRANSFORM:{x}'), transforms), st.builds((lambda x: f'MIN:{x}'), small_floats), st.builds((lambda x: f'MAX:{x}'), small_floats), st.builds((lambda x: f'INIT_FILES:{x}'), file_names)), unique_by=(lambda element: element[0])), gen_data=st.just(gen_data), max_submit=positives, num_cpu=positives, queue_system=st.just(queue_system), queue_option=small_list(queue_options(st.just(queue_system))), analysis_set_var=small_list(st.tuples(st.just('STD_ENKF'), st.just('ENKF_TRUNCATION'), st.floats(min_value=0.0, max_value=1.0, exclude_min=True))), install_job=st.just(install_jobs), install_job_directory=small_list(directory_names()), license_path=directory_names(), random_seed=st.integers(), setenv=small_list(st.tuples(words, words)), observations=st.just(obs), refcase_smspec=st.just(smspec), refcase_unsmry=st.just(unsmry), egrid=egrids, datetimes=st.just(dates))) |
class UnitSquareRotation(TransportCoefficients.TC_base):
from proteus.ctransportCoefficients import unitSquareRotationEvaluate
def __init__(self):
mass = {0: {0: 'linear'}}
advection = {0: {0: 'linear'}}
diffusion = {}
potential = {}
reaction = {}
hamiltonian = {}
TransportCoefficients.TC_base.__init__(self, 1, mass, advection, diffusion, potential, reaction, hamiltonian)
def evaluate(self, t, c):
self.unitSquareRotationEvaluate(c['x'], c[('u', 0)], c[('m', 0)], c[('dm', 0, 0)], c[('f', 0)], c[('df', 0, 0)]) |
class TestNotificationService():
def test_service(self, reset_and_start_fledge, service_branch, fledge_url, wait_time, retries, remove_directories):
_configure_and_start_service(service_branch, fledge_url, remove_directories)
retry_count = 0
default_registry_count = 2
service_registry = default_registry_count
while ((service_registry != 3) and (retry_count < retries)):
svc = _get_result(fledge_url, '/fledge/service')
service_registry = svc['services']
retry_count += 1
pause_for_x_seconds(x=(wait_time * 2))
if (len(service_registry) == default_registry_count):
assert False, 'Failed to start the {} service'.format(SERVICE)
_verify_service(fledge_url, status='running')
_verify_audit_log_entry(fledge_url, '/fledge/audit?source=NTFST', name=SERVICE_NAME)
def test_get_default_notification_plugins(self, fledge_url, remove_directories):
remove_directories((os.environ['FLEDGE_ROOT'] + '/plugins/notificationDelivery'))
remove_directories((os.environ['FLEDGE_ROOT'] + '/plugins/notificationRule'))
remove_directories((os.environ['FLEDGE_ROOT'] + 'cmake_build/C/plugins/notificationDelivery'))
remove_directories((os.environ['FLEDGE_ROOT'] + 'cmake_build/C/plugins/notificationRule'))
jdoc = _get_result(fledge_url, '/fledge/notification/plugin')
assert ([] == jdoc['delivery'])
assert (2 == len(jdoc['rules']))
assert (NOTIFY_INBUILT_RULES[0] == jdoc['rules'][1]['name'])
assert (NOTIFY_INBUILT_RULES[1] == jdoc['rules'][0]['name']) |
_metaclass(abc.ABCMeta)
class BaseRedisClient(object):
DEFAULT_RECEIVE_TIMEOUT = 5
RESPONSE_QUEUE_SPECIFIER = '!'
def __init__(self, ring_size):
self.metrics_counter_getter = None
self._ring_size = ring_size
self._connection_index_generator = itertools.cycle(random.sample(range(self._ring_size), k=self._ring_size))
self.send_message_to_queue = SendMessageToQueueCommand(self._get_connection(0))
def get_connection(self, queue_key):
if (self.RESPONSE_QUEUE_SPECIFIER in queue_key):
return self._get_connection(self._get_consistent_hash_index(queue_key))
else:
return self._get_connection(next(self._connection_index_generator))
def _get_connection(self, index):
def _get_consistent_hash_index(self, value):
big_value = (binascii.crc32((value.encode('utf8') if isinstance(value, six.text_type) else value)) & 4095)
ring_divisor = (4096.0 / self._ring_size)
return int((big_value / ring_divisor))
def _get_counter(self, name):
return (self.metrics_counter_getter(name) if self.metrics_counter_getter else _no_op_counter) |
class TestMarkdownChained(util.PluginTestCase):
def setup_fs(self):
config = self.dedent("\n matrix:\n - name: markdown\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.text:\n - pyspelling.filters.markdown:\n markdown_extensions:\n - markdown.extensions.fenced_code:\n - pyspelling.filters.html:\n ignores:\n - code\n - pre\n ").format(self.tempdir)
self.mktemp('.markdown.yml', config, 'utf-8')
def test_markdown_after_text(self):
bad_words = ['helo', 'begn']
good_words = ['yes', 'word']
template = self.dedent('\n ## Title\n\n {}\n\n Line `slajdl alsjs`\n\n ```\n skjadf alsdkjls\n ```\n ').format('\n'.join((bad_words + good_words)))
self.mktemp('test.txt', template, 'utf-8')
self.assert_spellcheck('.markdown.yml', bad_words) |
def extractVasaandypresWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DenseBlock(ShapeNormalizationBlock):
def __init__(self, in_keys: Union[(str, List[str])], out_keys: Union[(str, List[str])], in_shapes: Union[(Sequence[int], List[Sequence[int]])], hidden_units: List[int], non_lin: Union[(str, type(nn.Module))]):
super().__init__(in_keys=in_keys, out_keys=out_keys, in_shapes=in_shapes, in_num_dims=2, out_num_dims=2)
self.input_units = self.in_shapes[0][(- 1)]
self.hidden_units = hidden_units
self.non_lin = Factory(base_type=nn.Module).type_from_name(non_lin)
self.output_units = self.hidden_units[(- 1)]
layer_dict = self.build_layer_dict()
self.net = nn.Sequential(layer_dict)
(ShapeNormalizationBlock)
def normalized_forward(self, block_input: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
input_tensor = block_input[self.in_keys[0]]
assert (input_tensor.ndim == self.in_num_dims[0])
assert (input_tensor.shape[(- 1)] == self.input_units), f'failed for obs {self.in_keys[0]} because {input_tensor.shape[(- 1)]} != {self.input_units}'
output_tensor = self.net(input_tensor)
assert (output_tensor.ndim == self.out_num_dims[0])
assert (output_tensor.shape[(- 1)] == self.output_units)
return {self.out_keys[0]: output_tensor}
def build_layer_dict(self) -> OrderedDict:
layer_dict = OrderedDict()
layer_dict['linear_0'] = nn.Linear(self.input_units, self.hidden_units[0])
layer_dict[f'{self.non_lin.__name__}_0'] = self.non_lin()
for (i, h) in enumerate(self.hidden_units[1:], start=1):
layer_dict[f'linear_{i}'] = nn.Linear(self.hidden_units[(i - 1)], self.hidden_units[i])
layer_dict[f'{self.non_lin.__name__}_{i}'] = self.non_lin()
return layer_dict
def __repr__(self):
txt = f'{DenseBlock.__name__}({self.non_lin.__name__})'
txt += ((('\n\t' + f'({self.input_units}->') + '->'.join([f'{h}' for h in self.hidden_units])) + ')')
txt += f'''
Out Shapes: {self.out_shapes()}'''
return txt |
class OptionPlotoptionsArearangeOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsArearangeOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsArearangeOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsArearangeOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsArearangeOnpointPosition) |
class mlfbf(primal_dual):
def _pre(self, functions, x0):
super(mlfbf, self)._pre(functions, x0)
if (len(functions) != 3):
raise ValueError('MLFBF requires 3 convex functions.')
self.non_smooth_funs.append(functions[0])
self.non_smooth_funs.append(functions[1])
self.smooth_funs.append(functions[2])
def _algo(self):
y1 = (self.sol - (self.step * (self.smooth_funs[0].grad(self.sol) + self.Lt(self.dual_sol))))
y2 = (self.dual_sol + (self.step * self.L(self.sol)))
p1 = self.non_smooth_funs[0].prox(y1, self.step)
p2 = _prox_star(self.non_smooth_funs[1], y2, self.step)
q1 = (p1 - (self.step * (self.smooth_funs[0].grad(p1) + self.Lt(p2))))
q2 = (p2 + (self.step * self.L(p1)))
self.sol[:] = ((self.sol - y1) + q1)
self.dual_sol[:] = ((self.dual_sol - y2) + q2) |
class TestGlobEscapes(unittest.TestCase):
def check_escape(self, arg, expected, raw=False, unix=None, raw_chars=True):
flags = 0
if (unix is False):
flags = glob.FORCEWIN
elif (unix is True):
flags = glob.FORCEUNIX
if raw:
self.assertEqual(glob.raw_escape(arg, unix=unix, raw_chars=raw_chars), expected)
self.assertEqual(glob.raw_escape(os.fsencode(arg), unix=unix, raw_chars=raw_chars), os.fsencode(expected))
file = (util.norm_pattern(arg, False, True) if raw_chars else arg).replace('\\\\', '\\')
self.assertTrue(glob.globmatch(file, glob.raw_escape(arg, unix=unix, raw_chars=raw_chars), flags=flags))
else:
self.assertEqual(glob.escape(arg, unix=unix), expected)
self.assertEqual(glob.escape(os.fsencode(arg), unix=unix), os.fsencode(expected))
self.assertTrue(glob.globmatch(arg, glob.escape(arg, unix=unix), flags=flags))
def test_raw_escape_deprecation(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
glob.raw_escape('test\\\\test')
self.assertTrue((len(w) == 1))
self.assertTrue(issubclass(w[(- 1)].category, DeprecationWarning))
def test_escape(self):
check = self.check_escape
check('abc', 'abc')
check('[', '\\[')
check('?', '\\?')
check('*', '\\*')
check('[[_/*?*/_]]', '\\[\\[_/\\*\\?\\*/_\\]\\]')
check('/[[_/*?*/_]]/', '/\\[\\[_/\\*\\?\\*/_\\]\\]/')
def test_raw_escape(self):
check = self.check_escape
check('abc', 'abc', raw=True)
check('[', '\\[', raw=True)
check('?', '\\?', raw=True)
check('*', '\\*', raw=True)
check('[[_/*?*/_]]', '\\[\\[_/\\*\\?\\*/_\\]\\]', raw=True)
check('/[[_/*?*/_]]/', '/\\[\\[_/\\*\\?\\*/_\\]\\]/', raw=True)
check('\\x3f', '\\?', raw=True)
check('[^what]\\\\name\\\\temp', '[^what]\\\\name\\\\temp', raw=True, unix=False)
check('//[^what]/name/temp', '//[^what]/name/temp', raw=True, unix=False)
def test_raw_escape_no_raw_chars(self):
check = self.check_escape
check('abc', 'abc', raw=True, raw_chars=False)
check('[', '\\[', raw=True, raw_chars=False)
check('?', '\\?', raw=True, raw_chars=False)
check('*', '\\*', raw=True, raw_chars=False)
check('[[_/*?*/_]]', '\\[\\[_/\\*\\?\\*/_\\]\\]', raw=True, raw_chars=False)
check('/[[_/*?*/_]]/', '/\\[\\[_/\\*\\?\\*/_\\]\\]/', raw=True, raw_chars=False)
check('\\x3f', '\\\\x3f', raw=True, raw_chars=False)
check('[^what]\\\\name\\\\temp', '[^what]\\\\name\\\\temp', raw=True, raw_chars=False, unix=False)
check('//[^what]/name/temp', '//[^what]/name/temp', raw=True, raw_chars=False, unix=False)
(sys.platform.startswith('win'), 'Windows specific test')
def test_escape_windows(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?')
check('b:\\*', 'b:\\\\\\*')
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?')
check('\\\\*\\*\\*', '*\\\\*\\\\\\*')
check('//?/c:/?', '//?/c:/\\?')
check('//*/*/*', '//*/*/\\*')
check('//[^what]/name/temp', '//[^what]/name/temp')
def test_escape_forced_windows(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?', unix=False)
check('b:\\*', 'b:\\\\\\*', unix=False)
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?', unix=False)
check('\\\\*\\*\\*', '*\\\\*\\\\\\*', unix=False)
check('//?/c:/?', '//?/c:/\\?', unix=False)
check('//*/*/*', '//*/*/\\*', unix=False)
check('//./Volume{b75e2c83-0000-0000-0000-602f}/temp', '//./Volume\\{b75e2c83-0000-0000-0000-602f\\}/temp', unix=False)
check('//[^what]/name/temp', '//[^what]/name/temp', unix=False)
def test_escape_forced_unix(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?', unix=True)
check('b:\\*', 'b:\\\\\\*', unix=True)
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?', unix=True)
check('\\\\*\\*\\*', '*\\\\\\*\\\\\\*', unix=True)
check('//?/c:/?', '//\\?/c:/\\?', unix=True)
check('//*/*/*', '//\\*/\\*/\\*', unix=True)
check('//[^what]/name/temp', '//\\[^what\\]/name/temp', unix=True) |
(scope='function')
def friendbuy_nextgen_secrets(saas_config):
return {'domain': (pydash.get(saas_config, 'friendbuy_nextgen.domain') or secrets['domain']), 'key': (pydash.get(saas_config, 'friendbuy_nextgen.key') or secrets['key']), 'secret': (pydash.get(saas_config, 'friendbuy_nextgen.secret') or secrets['secret'])} |
def c_generate_convenience_functions(module_name, name, feature_type='int16_t', clasification_return_type='int16_t'):
predict_function = f'''
{clasification_return_type}
{name}_predict(const {feature_type} *features, int32_t n_features)
{{
return {module_name}_predict(&{name}, features, n_features);
}}
'''
return [predict_function] |
class Dolly(HuggingFace):
MODEL_NAMES = Literal[('dolly-v2-3b', 'dolly-v2-7b', 'dolly-v2-12b')]
def init_model(self) -> Any:
return transformers.pipeline(model=self._name, return_full_text=False, **self._config_init)
def __call__(self, prompts: Iterable[str]) -> Iterable[str]:
return [self._model(pr, **self._config_run)[0]['generated_text'] for pr in prompts]
def hf_account(self) -> str:
return 'databricks'
def compile_default_configs() -> Tuple[(Dict[(str, Any)], Dict[(str, Any)])]:
(default_cfg_init, default_cfg_run) = HuggingFace.compile_default_configs()
return ({**default_cfg_init, 'trust_remote_code': True}, default_cfg_run) |
class DBID():
__slots__ = ['_id', 'is_new', 'local_id']
next_id: int = 0
def __init__(self, id: Union[(int, None, DBID)]=None) -> None:
self.resolve(id)
self.local_id: int = DBID.next_id
DBID.next_id += 1
def resolve(self, id: Union[(int, None, DBID)], is_new: bool=True) -> DBID:
self._check_type(id)
self._id = id
self.is_new = is_new
return self
def resolved(self) -> Optional[int]:
id = self._id
if isinstance(id, DBID):
id = id.resolved()
return id
def _check_type(self, id: Union[(int, None, DBID)]) -> None:
if (not isinstance(id, (int, type(None), DBID))):
raise TypeError("id expected to be type '{}' but was type '{}'".format(int, type(id)))
def __int__(self) -> int:
resolved = self.resolved()
if (resolved is None):
raise TypeError(f'cannot convert unset {repr(self)} to int')
return resolved
def __str__(self) -> str:
return str(self.resolved())
def __add__(self, other: Union[(DBID, int)]) -> int:
return (int(self) + int(other))
def __lt__(self, other: Union[(DBID, int)]) -> bool:
return (int(self) < int(other))
def __gt__(self, other: Union[(DBID, int)]) -> bool:
return (int(self) > int(other))
def __ge__(self, other: Union[(DBID, int)]) -> bool:
return (int(self) >= int(other))
def __le__(self, other: Union[(DBID, int)]) -> bool:
return (int(self) <= int(other))
def __repr__(self) -> str:
return '<{}(id={}) object at 0x{:x}>'.format(self.__class__.__name__, self._id, id(self)) |
class TestOFPInstructionGotoTable(unittest.TestCase):
type_ = ofproto.OFPIT_GOTO_TABLE
len_ = ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE
fmt = ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR
def test_init(self):
table_id = 3
c = OFPInstructionGotoTable(table_id)
eq_(self.type_, c.type)
eq_(self.len_, c.len)
eq_(table_id, c.table_id)
def _test_parser(self, table_id):
buf = pack(self.fmt, self.type_, self.len_, table_id)
res = OFPInstructionGotoTable.parser(buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
eq_(res.table_id, table_id)
def test_parser_mid(self):
self._test_parser(3)
def test_parser_max(self):
self._test_parser(255)
def test_parser_min(self):
self._test_parser(0)
def _test_serialize(self, table_id):
c = OFPInstructionGotoTable(table_id)
buf = bytearray()
c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_)
eq_(res[2], table_id)
def test_serialize_mid(self):
self._test_serialize(3)
def test_serialize_max(self):
self._test_serialize(255)
def test_serialize_min(self):
self._test_serialize(0) |
class speed_checker(object):
def __init__(self, cnt=5):
self.cnt = cnt
self.speed_buffer = []
self.reset()
def check(self, l):
self.current_bytes += l
self.current_tm = time.time()
if ((self.current_tm - self.last_tm) > 1):
self.speed_buffer.append(((self.current_bytes - self.last_bytes) / (self.current_tm - self.last_tm)))
while (len(self.speed_buffer) > self.cnt):
self.speed_buffer.pop(0)
self.last_tm = self.current_tm
self.last_bytes = self.current_bytes
return
def calc(self, full=False):
if (len(self.speed_buffer) == 0):
return 0
elif (full and (len(self.speed_buffer) < self.cnt)):
return 0
return (sum(self.speed_buffer) / len(self.speed_buffer))
def reset(self):
self.last_tm = time.time()
self.last_bytes = 0
self.current_bytes = 0
self.current_tm = 0
if self.speed_buffer:
self.speed_buffer = [] |
class OptionSeriesColumnpyramidSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesColumnpyramidSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesColumnpyramidSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesColumnpyramidSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesColumnpyramidSonificationTracksMappingLowpassResonance) |
class OptionSeriesDependencywheelSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _solve_bezier(target: float, a: float, b: float, c: float) -> float:
x = 0.0
t = 0.5
last = alg.nan
for _ in range(MAX_ITER):
x = (_bezier(t, a, b, c) - target)
dx = _bezier_derivative(t, a, b, c)
if (dx == 0):
break
t -= (x / dx)
if (t == last):
return t
last = t
if (abs((_bezier(t, a, b, c) - target)) < EPSILON):
return t
(low, high) = (0.0, 1.0)
t = target
while (abs((high - low)) > EPSILON):
x = _bezier(t, a, b, c)
if (abs((x - target)) < EPSILON):
return t
if (x > target):
high = t
else:
low = t
t = ((high + low) * 0.5)
return t |
class AnalysisPlugin(AnalysisBasePlugin):
NAME = 'tlsh'
DESCRIPTION = 'find files with similar tlsh and calculate similarity value'
DEPENDENCIES = ['file_hashes']
VERSION = '0.2'
FILE = __file__
def __init__(self, *args, **kwargs):
self.db = TLSHInterface()
super().__init__(*args, **kwargs)
def process_object(self, file_object):
comparisons_dict = {}
if ('tlsh' in file_object.processed_analysis['file_hashes']['result']):
for (uid, tlsh_hash) in self.db.get_all_tlsh_hashes():
value = get_tlsh_comparison(file_object.processed_analysis['file_hashes']['result']['tlsh'], tlsh_hash)
if ((value <= 150) and (uid != file_object.uid)):
comparisons_dict[uid] = value
file_object.processed_analysis[self.NAME] = comparisons_dict
return file_object |
_parameters()
(name='daily_buckets', time_bucket={'period': 'day', 'count': 1}, dates_step=timedelta(days=1))
(name='six_hour_buckets', time_bucket={'period': 'hour', 'count': 6}, dates_step=timedelta(hours=6))
def test_exclude_specific_dates(test_id: str, dbt_project: DbtProject, time_bucket: dict, dates_step: timedelta):
utc_now = datetime.utcnow()
(test_bucket, *training_buckets) = generate_dates(base_date=(utc_now - timedelta(1)), step=dates_step)
exclude_dates = [(utc_now - timedelta(5)).date(), (utc_now - timedelta(3)).date()]
data: List[Dict[(str, Any)]] = [{TIMESTAMP_COLUMN: test_bucket.strftime(DATE_FORMAT), 'metric': 10}]
data += [{TIMESTAMP_COLUMN: cur_bucket.strftime(DATE_FORMAT), 'metric': (1 if (cur_bucket.date() not in exclude_dates) else 10)} for cur_bucket in training_buckets]
test_args = {**DBT_TEST_ARGS, 'time_bucket': time_bucket}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, data=data, test_column='metric')
assert (test_result['status'] == 'pass')
excluded_dates_str = ', '.join([f"cast('{cur_date}' as date)" for cur_date in exclude_dates])
test_args = {**DBT_TEST_ARGS, 'anomaly_exclude_metrics': f'metric_date in ({excluded_dates_str})', 'time_bucket': time_bucket}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, test_column='metric')
assert (test_result['status'] == 'fail') |
def get_note_delete_confirm_modal_html(nid: int) -> Optional[HTML]:
note = get_note(nid)
if (not note):
return None
title = utility.text.trim_if_longer_than(note.get_title(), 100)
priority = note.priority
if ((priority is None) or (priority == 0)):
priority = '-'
fg = 'black'
bg = 'transparent'
else:
priority = int(priority)
fg = 'white'
bg = utility.misc.prio_color(priority)
priority = f"<span style='padding: 0 3px 0 3px; color: {fg} ;background: {bg}'>{priority}</span>"
return filled_template('note_delete', dict(title=title, creation_date=note.created, priority=priority, nid=nid)) |
('/knowledge/space/add')
def space_add(request: KnowledgeSpaceRequest):
print(f'/space/add params: {request}')
try:
knowledge_space_service.create_knowledge_space(request)
return Result.succ([])
except Exception as e:
return Result.failed(code='E000X', msg=f'space add error {e}') |
.skipif((django.VERSION < (1, 10)), reason='MIDDLEWARE new in Django 1.10')
def test_user_info_without_auth_middleware_django_2(django_elasticapm_client, client):
with override_settings(MIDDLEWARE_CLASSES=None, MIDDLEWARE=[m for m in settings.MIDDLEWARE if (m != 'django.contrib.auth.middleware.AuthenticationMiddleware')]):
with pytest.raises(Exception):
client.get(reverse('elasticapm-raise-exc'))
assert (len(django_elasticapm_client.events[ERROR]) == 1)
event = django_elasticapm_client.events[ERROR][0]
assert (event['context']['user'] == {}) |
class TestCUSUM(unittest.TestCase):
def test_various_data(self):
with open((settings.APPS_ROOT + '/frontend/tests/fixtures/alert_test_cases.txt')) as expected:
test_cases = expected.readlines()
for test in each_cusum_test(test_cases):
cusum = bookmark_utils.CUSUM(test['data'], window_size=3, sensitivity=5)
cusum.work()
new_result_formatted = extract_percentiles_for_alerts(cusum.as_dict())
error_message = ("In test '%s':\n" % test['name'])
error_message += (' Input values: %s\n' % test['data'])
error_message += ('Expected alerts: %s\n' % test['expected'])
self.assertEqual(new_result_formatted, test['expected'], (error_message + (' Got: %s' % new_result_formatted)))
info = cusum.get_last_alert_info()
if info:
change = deltawords((info['to'] * 100.0), (info['from'] * 100.0))
self.assertEqual(test['deltawords'], change)
else:
self.assertEqual(test['deltawords'], 'not at all') |
def upgrade():
op.create_table('submissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('submitted_at', sa.DateTime(), nullable=True), sa.Column('form_id', sa.Integer(), nullable=True), sa.Column('data', postgresql.JSON(), nullable=True), sa.ForeignKeyConstraint(['form_id'], ['forms.id']), sa.PrimaryKeyConstraint('id')) |
class EnsureRenderedStringPattern(PropertyPreprocessor):
type = 'ensure_rendered_string_pattern'
properties_schema_cls = EnsureRenderedStringPatternSchema
def process_arg(self, arg, node, raw_args):
regex = None
try:
regex = re.compile(self.pattern)
except Exception:
raise Exception('Error compiling regex for `{}`: `{}` is an invalid pattern'.format(self.type, self.properties['pattern']))
rendered_arg = None
try:
rendered_arg = self.render_template(arg, raw_args)
except jinja2.exceptions.UndefinedError:
logger.debug('Could not render template `%s`; cannot verify that the argument matches the required pattern `%s`!', arg, regex.pattern)
return arg
if regex.match(rendered_arg):
return arg
VERBATIM_REGEX = '<<.+>>'
if re.compile(VERBATIM_REGEX).search(rendered_arg):
logger.debug('Argument generated from `%s` may not match the required pattern `%s` and fail.', rendered_arg, regex.pattern)
return arg
raise Exception('Invalid argument `{}`: does not match expected pattern `{}`'.format(rendered_arg, regex.pattern))
def pattern(self):
pattern = self.properties['pattern'].rstrip('$')
return (pattern + '$')
def render_template(self, arg, raw_args):
env = jinja2.Environment(undefined=jinja2.StrictUndefined)
return env.from_string(arg).render(**self._template_context(raw_args))
def _template_context(raw_args):
now = datetime.datetime.utcnow()
context = {'ts': now.isoformat(), 'ts_nodash': now.isoformat().replace('-', '')}
TaskTuple = namedtuple('TaskTuple', ['task_id'])
task_id = raw_args.get('task_id')
if task_id:
context['task'] = TaskTuple(task_id=task_id)
for field_name in ['ds', 'yesterday_ds', 'tomorrow_ds', 'next_ds', 'prev_ds']:
context[field_name] = now.strftime('%Y-%m-%d')
context[(field_name + '_nodash')] = now.strftime('%Y%m%d')
for field_name in ['execution_date', 'prev_execution_date', 'next_execution_date']:
context[field_name] = now
return context |
class AccountingMethodIterator():
def __init__(self, acquired_lot_list: List[InTransaction], up_to_index: int, order_type: AcquiredLotCandidatesOrder) -> None:
self.__acquired_lot_list = acquired_lot_list
self.__start_index = (0 if (order_type == AcquiredLotCandidatesOrder.OLDER_TO_NEWER) else up_to_index)
self.__end_index = (up_to_index if (order_type == AcquiredLotCandidatesOrder.OLDER_TO_NEWER) else 0)
self.__step = (1 if (order_type == AcquiredLotCandidatesOrder.OLDER_TO_NEWER) else (- 1))
self.__index = self.__start_index
self.__order_type = order_type
def _check_index(self) -> bool:
if (self.__order_type == AcquiredLotCandidatesOrder.OLDER_TO_NEWER):
return (self.__index <= self.__end_index)
return (self.__index >= self.__end_index)
def __next__(self) -> InTransaction:
result: Optional[InTransaction] = None
while self._check_index():
result = self.__acquired_lot_list[self.__index]
self.__index += self.__step
return result
raise StopIteration(self) |
class TextOverviewPreset(MetricPreset):
column_name: str
def __init__(self, column_name: str, descriptors: Optional[Dict[(str, FeatureDescriptor)]]=None):
super().__init__()
self.column_name = column_name
self.descriptors = descriptors
def generate_metrics(self, data_definition: DataDefinition, additional_data: Optional[Dict[(str, Any)]]):
result = [ColumnSummaryMetric(column_name=self.column_name), TextDescriptorsDistribution(column_name=self.column_name, descriptors=self.descriptors), TextDescriptorsCorrelationMetric(column_name=self.column_name, descriptors=self.descriptors)]
if (data_definition.reference_present() is not None):
result.extend([ColumnDriftMetric(column_name=self.column_name), TextDescriptorsDriftMetric(column_name=self.column_name, descriptors=self.descriptors)])
return result |
def rebuild_index():
from redis.commands.search.field import TextField
from redis.commands.search.indexDefinition import IndexDefinition
from redis.exceptions import ResponseError
r = frappe.cache()
r.set_value('wiki_page_index_in_progress', True)
schema = (TextField('title', weight=3.0), TextField('content'))
spaces = frappe.db.get_all('Wiki Space', pluck='route')
wiki_pages = frappe.db.get_all('Wiki Page', fields=['name', 'title', 'content', 'route'])
for space in spaces:
try:
drop_index(space)
index_def = IndexDefinition(prefix=[f"{r.make_key(f'{PREFIX}{space}').decode()}:"], score=0.5, score_field='doc_score')
r.ft(space).create_index(schema, definition=index_def)
records_to_index = [d for d in wiki_pages if (space in d.get('route'))]
create_index_for_records(records_to_index, space)
except ResponseError as e:
print(e)
r.set_value('wiki_page_index_in_progress', False) |
_type(OSPF_MSG_HELLO)
class OSPFHello(OSPFMessage):
_PACK_STR = '!4sHBBI4s4s'
_PACK_LEN = struct.calcsize(_PACK_STR)
_MIN_LEN = (OSPFMessage._HDR_LEN + _PACK_LEN)
def __init__(self, length=None, router_id='0.0.0.0', area_id='0.0.0.0', au_type=1, authentication=0, checksum=None, version=_VERSION, mask='0.0.0.0', hello_interval=10, options=0, priority=1, dead_interval=40, designated_router='0.0.0.0', backup_router='0.0.0.0', neighbors=None):
neighbors = (neighbors if neighbors else [])
super(OSPFHello, self).__init__(OSPF_MSG_HELLO, length, router_id, area_id, au_type, authentication, checksum, version)
self.mask = mask
self.hello_interval = hello_interval
self.options = options
self.priority = priority
self.dead_interval = dead_interval
self.designated_router = designated_router
self.backup_router = backup_router
self.neighbors = neighbors
def parser(cls, buf):
(mask, hello_interval, options, priority, dead_interval, designated_router, backup_router) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf))
mask = addrconv.ipv4.bin_to_text(mask)
designated_router = addrconv.ipv4.bin_to_text(designated_router)
backup_router = addrconv.ipv4.bin_to_text(backup_router)
neighbors = []
binneighbors = buf[cls._PACK_LEN:len(buf)]
while binneighbors:
n = binneighbors[:4]
n = addrconv.ipv4.bin_to_text(six.binary_type(n))
binneighbors = binneighbors[4:]
neighbors.append(n)
return {'mask': mask, 'hello_interval': hello_interval, 'options': options, 'priority': priority, 'dead_interval': dead_interval, 'designated_router': designated_router, 'backup_router': backup_router, 'neighbors': neighbors}
def serialize_tail(self):
head = bytearray(struct.pack(self._PACK_STR, addrconv.ipv4.text_to_bin(self.mask), self.hello_interval, self.options, self.priority, self.dead_interval, addrconv.ipv4.text_to_bin(self.designated_router), addrconv.ipv4.text_to_bin(self.backup_router)))
try:
return (head + reduce((lambda a, b: (a + b)), (addrconv.ipv4.text_to_bin(n) for n in self.neighbors)))
except TypeError:
return head |
class GlobalConstantNewton(proteus.NonlinearSolvers.NonlinearSolver):
def __init__(self, linearSolver, F, J=None, du=None, par_du=None, rtol_r=0.0001, atol_r=1e-16, rtol_du=0.0001, atol_du=1e-16, maxIts=100, norm=l2Norm, convergenceTest='r', computeRates=True, printInfo=True, fullNewton=True, directSolver=False, EWtol=True, maxLSits=100):
import copy
self.par_du = par_du
if (par_du is not None):
F.dim_proc = par_du.dim_proc
NonlinearSolver.__init__(self, F, J, du, rtol_r, atol_r, rtol_du, atol_du, maxIts, norm, convergenceTest, computeRates, printInfo)
self.updateJacobian = True
self.fullNewton = fullNewton
self.linearSolver = linearSolver
self.directSolver = directSolver
self.lineSearch = True
self.EWtol = EWtol
self.maxLSits = maxLSits
if self.linearSolver.computeEigenvalues:
self.JLast = copy.deepcopy(self.J)
self.J_t_J = copy.deepcopy(self.J)
self.dJ_t_dJ = copy.deepcopy(self.J)
self.JLsolver = LU(self.J_t_J, computeEigenvalues=True)
self.dJLsolver = LU(self.dJ_t_dJ, computeEigenvalues=True)
self.u0 = numpy.zeros(self.F.dim, 'd')
def info(self):
return 'Not Implemented'
def solve(self, u, r=None, b=None, par_u=None, par_r=None):
self.F.maxIts = self.maxIts
self.F.maxLSits = self.maxLSits
self.F.atol = self.atol_r
self.F.globalConstantSolve(u, r)
self.failedFlag = False
return self.failedFlag |
def get_level_from_xp(shrine_xp: int, is_jp: bool) -> Optional[dict[(str, Any)]]:
xp_requirements = get_boundaries(is_jp)
if (xp_requirements is None):
return None
level = 1
for requirement in xp_requirements:
if (shrine_xp >= requirement):
level += 1
if (level > len(xp_requirements)):
level = len(xp_requirements)
return {'level': level, 'max_level': len(xp_requirements), 'max_xp': xp_requirements[(- 2)]} |
_server.peripheral_model
class IEEE802_15_4(object):
IRQ_NAME = '802_15_4_RX_Frame'
frame_queue = deque()
calc_crc = True
rx_frame_isr = None
rx_isr_enabled = False
frame_time = deque()
def enable_rx_isr(cls, interface_id):
cls.rx_isr_enabled = True
if (cls.frame_queue and (cls.rx_frame_isr is not None)):
Interrupts.trigger_interrupt(cls.rx_frame_isr, cls.IRQ_NAME)
def disable_rx_isr(self, interface_id):
IEEE802_15_4.rx_isr_enabled = False
_server.tx_msg
def tx_frame(cls, interface_id, frame):
print(('Sending Frame (%i): ' % len(frame)), binascii.hexlify(frame))
msg = {'frame': frame}
return msg
_server.reg_rx_handler
def rx_frame(cls, msg):
frame = msg['frame']
log.info(('Received Frame: %s' % binascii.hexlify(frame)))
cls.frame_queue.append(frame)
cls.frame_time.append(time.time())
if ((cls.rx_frame_isr is not None) and cls.rx_isr_enabled):
Interrupts.trigger_interrupt(cls.rx_frame_isr, cls.IRQ_NAME)
def get_first_frame(cls, get_time=False):
frame = None
rx_time = None
log.info('Checking for frame')
if (cls.frame_queue > 0):
log.info('Returning frame')
frame = cls.frame_queue.popleft()
rx_time = cls.frame_time.popleft()
if get_time:
return (frame, rx_time)
else:
return frame
def has_frame(cls):
return (len(cls.frame_queue) > 0)
def get_frame_info(cls):
queue = cls.frame_queue
if queue:
return (len(queue), len(queue[0]))
return (0, 0) |
def extractAlmightyEditorBplacedNet(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Isekai Tensei Harem', 'Isekai Tensei Harem', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_kronos_device_creation(client: TestClient):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_DEVICE
response = client.post('/api/v1/kronos/devices', json=SAMPLE_DEVICE)
assert (response.status_code == 200)
assert (response.json() == {'hid': SAMPLE_DEVICE_HID, 'links': {}, 'message': 'device is already registered', 'pri': f'arw:krn:dev:{SAMPLE_DEVICE_HID}'}) |
class MT48LC16M16(SDRModule):
nbanks = 4
nrows = 8192
ncols = 512
technology_timings = _TechnologyTimings(tREFI=(.0 / 8192), tWTR=(2, None), tCCD=(1, None), tRRD=(None, 15))
speedgrade_timings = {'default': _SpeedgradeTimings(tRP=20, tRCD=20, tWR=15, tRFC=(None, 66), tFAW=None, tRAS=44)} |
class Vulnerability():
def __init__(self, rule, description, reliability, score, link, short_name):
try:
self.reliability = str(int(reliability))
self.score = score
self.description = description
self.rule = rule
self.link = link
self.short_name = short_name
self._make_type_assertions(link, rule)
except (ValueError, TypeError) as exception:
raise BadRuleError(str(exception))
def _make_type_assertions(self, link, rule):
for (type_assertion, error_message) in [((int(self.reliability) in range(0, 101)), 'reliability must be between 0 and 100'), ((self.score in ['low', 'medium', 'high']), 'score has to be one of low, medium or high'), (isinstance(self.description, str), 'description must be a string'), (isinstance(self.rule, (SingleRule, MetaRule, SubPathRule)), f'rule must be of type in [SingleRule, MetaRule, SubPathRule]. Has type {type(rule)}'), ((isinstance(self.link, str) or (not link)), 'if link is set it has to be a string'), (isinstance(self.short_name, str), 'short_name has to be a string')]:
if (not type_assertion):
raise ValueError(error_message)
def get_dict(self):
return {'description': self.description, 'score': self.score, 'reliability': self.reliability, 'link': self.link, 'short_name': self.short_name} |
class OptionSeriesArcdiagramStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def DoExpandDim(alloc_cursor, alloc_dim, indexing):
alloc_s = alloc_cursor._node
assert isinstance(alloc_s, LoopIR.Alloc)
assert isinstance(alloc_dim, LoopIR.expr)
assert isinstance(indexing, LoopIR.expr)
Check_IsPositiveExpr(alloc_cursor.get_root(), [alloc_s], alloc_dim)
old_typ = alloc_s.type
new_rngs = [alloc_dim]
if isinstance(old_typ, T.Tensor):
new_rngs += old_typ.shape()
basetyp = old_typ.basetype()
new_typ = T.Tensor(new_rngs, False, basetyp)
new_alloc = alloc_s.update(type=new_typ)
(ir, fwd) = alloc_cursor._child_node('type')._replace(new_typ)
def mk_read(c):
rd = c._node
if (isinstance(c.parent()._node, LoopIR.Call) and (not rd.idx)):
raise SchedulingError('TODO: Please Contact the developers to fix (i.e. add) support for passing windows to scalar arguments')
if isinstance(rd, LoopIR.Read):
return {'idx': ([indexing] + rd.idx)}
elif isinstance(rd, LoopIR.WindowExpr):
return {'idx': ([LoopIR.Point(indexing, rd.srcinfo)] + rd.idx)}
else:
raise NotImplementedError(f'Did not implement {type(rd)}. This may be a bug.')
def mk_write(c):
s = c._node
return {'idx': ([indexing] + s.idx)}
for c in get_rest_of_block(alloc_cursor):
(ir, fwd) = _replace_reads(ir, fwd, c, alloc_s.name, mk_read)
(ir, fwd) = _replace_writes(ir, fwd, c, alloc_s.name, mk_write)
after_alloc = [c._node for c in get_rest_of_block(fwd(alloc_cursor))]
Check_Bounds(ir, new_alloc, after_alloc)
return (ir, fwd) |
class KeystoreCrypto(BytesDataclass):
kdf: KeystoreModule = KeystoreModule()
checksum: KeystoreModule = KeystoreModule()
cipher: KeystoreModule = KeystoreModule()
def from_json(cls, json_dict: Dict[(Any, Any)]) -> 'KeystoreCrypto':
kdf = KeystoreModule(**json_dict['kdf'])
checksum = KeystoreModule(**json_dict['checksum'])
cipher = KeystoreModule(**json_dict['cipher'])
return cls(kdf=kdf, checksum=checksum, cipher=cipher) |
class SuperCollider(Instrument):
NAME = 'supercollider'
def __init__(self, args):
Instrument.__init__(self, SuperCollider.NAME)
self.initialized = False
def enable(self):
self.initialized = True
def enabled(self):
return self.enabled
def supported(self):
return (not ERROR)
def support(self):
return ['supercollider']
def stop(self):
pass |
class LCInfo():
def __init__(self, locale, currency, columns):
self.columns = columns
if (locale == 'en_US'):
headers = ['Date', 'Description', 'Change']
self.datefmt = '{0.month:02}/{0.day:02}/{0.year:04}'
self.cur_fmt = '{}{}{}{}'
self.lead_neg = '('
self.trail_neg = ')'
self.thousands = ','
self.decimal = '.'
elif (locale == 'nl_NL'):
headers = ['Datum', 'Omschrijving', 'Verandering']
self.datefmt = '{0.day:02}-{0.month:02}-{0.year:04}'
self.cur_fmt = '{1} {0}{2}{3}'
self.lead_neg = '-'
self.trail_neg = ' '
self.thousands = '.'
self.decimal = ','
fmt = ROW_FMT.format('<', *columns)
self.headers = fmt.format(*headers)
self.cur_symbol = {'USD': '$', 'EUR': ''}.get(currency)
def number(self, n):
(n_int, n_float) = divmod(abs(n), 100)
n_int_parts = []
while (n_int > 0):
(n_int, idx) = divmod(n_int, 1000)
n_int_parts.insert(0, str(idx))
return '{}{}{:02}'.format((self.thousands.join(n_int_parts) or '0'), self.decimal, n_float)
def currency(self, change):
return self.cur_fmt.format((self.lead_neg if (change < 0) else ''), self.cur_symbol, self.number(change), (self.trail_neg if (change < 0) else ' '))
def entry(self, entry):
(date, change, desc) = entry
fmt = ROW_FMT.format('>', *self.columns)
return fmt.format(self.datefmt.format(date), truncate(desc), self.currency(change))
def table(self, entries):
lines = [self.headers]
lines.extend(map(self.entry, sorted(entries)))
return '\n'.join(lines) |
class AdCreativeCollectionThumbnailInfo(AbstractObject):
def __init__(self, api=None):
super(AdCreativeCollectionThumbnailInfo, self).__init__()
self._isAdCreativeCollectionThumbnailInfo = True
self._api = api
class Field(AbstractObject.Field):
element_child_index = 'element_child_index'
element_crops = 'element_crops'
element_id = 'element_id'
_field_types = {'element_child_index': 'int', 'element_crops': 'AdsImageCrops', 'element_id': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class EventQuery(EqlNode):
__slots__ = ('event_type', 'query')
template = Template('$event_type where $query')
def __init__(self, event_type, query):
self.event_type = event_type
self.query = query
def _render(self):
query_text = self.query.render()
if ('\n' in query_text):
return '{} where\n{}'.format(self.event_type, self.indent(query_text))
return super(EventQuery, self)._render() |
def transform_df_to_time_count_view(period_data: pd.Series, datetime_column_name: str, datetime_data: pd.Series, data_column_name: str, column_data: pd.Series):
df = pd.DataFrame({'period': period_data, datetime_column_name: datetime_data, data_column_name: column_data})
df = df.groupby(['period', data_column_name]).size()
df.name = 'num'
df = df.reset_index()
df[datetime_column_name] = df['period'].dt.to_timestamp()
return df[(df['num'] > 0)] |
class StatusBaseInformationError(ErsiliaError):
def __init__(self):
self.message = 'Wrong Ersilia status'
self.hints = 'Only one of the following status is allowed: {}'.format(', '.join(_read_default_fields('Status')))
ErsiliaError.__init__(self, self.message, self.hints) |
class FancyValidator(Validator):
if_invalid = NoDefault
if_invalid_python = NoDefault
if_empty = NoDefault
not_empty = False
accept_python = True
strip = False
messages = dict(empty=_('Please enter a value'), badType=_('The input must be a string (not a %(type)s: %(value)r)'), noneType=_('The input must be a string (not None)'))
_inheritance_level = 0
_deprecated_methods = (('_to_python', '_convert_to_python'), ('_from_python', '_convert_from_python'), ('validate_python', '_validate_python'), ('validate_other', '_validate_other'))
def __classinit__(cls, new_attrs):
Validator.__classinit__(cls, new_attrs)
cls._inheritance_level += 1
if ('_deprecated_methods' in new_attrs):
cls._deprecated_methods = (cls._deprecated_methods + new_attrs['_deprecated_methods'])
for (old, new) in cls._deprecated_methods:
if (old in new_attrs):
if (new not in new_attrs):
deprecation_warning(old, new, stacklevel=(cls._inheritance_level + 2))
setattr(cls, new, new_attrs[old])
elif (new in new_attrs):
setattr(cls, old, deprecated(old=old, new=new)(new_attrs[new]))
def to_python(self, value, state=None):
try:
if (self.strip and isinstance(value, str)):
value = value.strip()
elif hasattr(value, 'mixed'):
value = value.mixed()
if self.is_empty(value):
if self.not_empty:
raise Invalid(self.message('empty', state), value, state)
if (self.if_empty is not NoDefault):
return self.if_empty
return self.empty_value(value)
vo = self._validate_other
if (vo and (vo is not self._validate_noop)):
vo(value, state)
tp = self._convert_to_python
if tp:
value = tp(value, state)
vp = self._validate_python
if (vp and (vp is not self._validate_noop)):
vp(value, state)
except Invalid:
value = self.if_invalid
if (value is NoDefault):
raise
return value
def from_python(self, value, state=None):
try:
if (self.strip and isinstance(value, str)):
value = value.strip()
if (not self.accept_python):
if self.is_empty(value):
if self.not_empty:
raise Invalid(self.message('empty', state), value, state)
return self.empty_value(value)
vp = self._validate_python
if (vp and (vp is not self._validate_noop)):
vp(value, state)
fp = self._convert_from_python
if fp:
value = fp(value, state)
vo = self._validate_other
if (vo and (vo is not self._validate_noop)):
vo(value, state)
else:
if self.is_empty(value):
return self.empty_value(value)
fp = self._convert_from_python
if fp:
value = fp(value, state)
except Invalid:
value = self.if_invalid_python
if (value is NoDefault):
raise
return value
def is_empty(self, value):
return is_empty(value)
def empty_value(self, value):
return None
def assert_string(self, value, state):
if (not isinstance(value, str)):
raise Invalid(self.message('badType', state, type=type(value), value=value), value, state)
def base64encode(self, value):
return value.encode('base64').strip().replace('\n', '')
def _validate_noop(self, value, state=None):
pass
_validate_python = _validate_other = _validate_noop
_convert_to_python = _convert_from_python = None |
def test_double_slash_fails(casper, concise_casper, funded_account, validation_key, deposit_amount, induct_validator, mk_slash_votes, assert_tx_failed):
validator_index = induct_validator(funded_account, validation_key, deposit_amount)
(vote_1, vote_2) = mk_slash_votes(validator_index, validation_key)
assert concise_casper.slashable(vote_1, vote_2)
casper.functions.slash(vote_1, vote_2).transact()
assert (not concise_casper.slashable(vote_1, vote_2))
assert_tx_failed((lambda : casper.functions.slash(vote_1, vote_2).transact())) |
def get_fastas_cmd(o_dir, e_dir, o_file, request, incomplete=False):
program = 'bin/assembly/phyluce_assembly_get_fastas_from_match_counts'
cmd = [os.path.join(request.config.rootdir, program), '--locus-db', os.path.join(e_dir, 'probe-match', 'probe.matches.sqlite'), '--contigs', os.path.join(e_dir, 'spades', 'contigs'), '--locus-db', os.path.join(e_dir, 'probe-match', 'probe.matches.sqlite'), '--match-count-output', os.path.join(e_dir, 'taxon-set.complete.conf')]
if (not incomplete):
cmd.extend(['--output', o_file, '--log-path', o_dir])
else:
cmd.extend(['--output', o_file, '--log-path', o_dir, '--incomplete-matrix', os.path.join(o_dir, 'taxon-set.incomplete')])
return cmd |
def test_phi_function_in_head3():
u1 = Variable('u', Integer.int32_t(), 1)
u2 = Variable('u', Integer.int32_t(), 2)
v0 = Variable('v', Integer.int32_t(), 0)
v1 = Variable('v', Integer.int32_t(), 1)
node = BasicBlock(0, [Phi(u1, [v0, u2]), Phi(v1, [v0, u1]), Assignment(u2, BinaryOperation(OperationType.plus, [v0, v1]))])
node.instructions[0]._origin_block = {None: v0, node: u2}
node.instructions[1]._origin_block = {None: v0, node: u1}
cfg = ControlFlowGraph()
cfg.add_node(node)
cfg.add_edges_from([UnconditionalEdge(node, node)])
liveness_analysis = LivenessAnalysis(cfg)
print(liveness_analysis.live_in_of(node), liveness_analysis.live_out_of(node))
assert (liveness_analysis._uses_phi_block[None] == {v0})
assert ((liveness_analysis.live_in_of(node) == {v0, v1, u1}) and (liveness_analysis.live_out_of(node) == {u2, u1, v0})) |
class TokenBase(object):
def __init__(self, prefix, intermediate, content):
self.prefix = prefix
self.intermediate = intermediate
self.content = content
def string(self):
return self.content
def __repr__(self):
ret = "<{:14} - contents: '{}' '{}' '{}'>".format(self.__class__.__name__, self.prefix, self.intermediate, self.content)
return ret |
class OptionSeriesXrangeAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
def main(sleep_timeout: int) -> None:
datasets_path = os.path.abspath('datasets')
if (not os.path.exists(datasets_path)):
exit('Cannot find datasets, try to run run_example.py script for initial setup')
print(f'Get production data from {datasets_path} and send the data to monitoring service each {args.timeout} seconds')
datasets = {}
max_index = 0
for dataset_name in os.listdir(datasets_path):
production_data_path = os.path.join(datasets_path, dataset_name, 'production.csv')
new_data = pd.read_csv(production_data_path)
datasets[dataset_name] = new_data
max_index = max(max_index, new_data.shape[0])
for idx in range(0, max_index):
for (dataset_name, dataset) in datasets.items():
dataset_size = dataset.shape[0]
data = dataset.iloc[(idx % dataset_size)].to_dict()
send_data_row(dataset_name, data)
print(f'Wait {sleep_timeout} seconds till the next try.')
time.sleep(sleep_timeout) |
class TFLogger(SummaryWriter, Logger):
def __init__(self, log_dir=None, hps={}, save_every=1):
SummaryWriter.__init__(self, log_dir=log_dir)
Logger.__init__(self, **hps)
self.for_save_every = {}
self.save_every = save_every
f = open((log_dir + '/params.json'), 'wt')
f.write((str(hps) + '\n'))
f.close()
self.log_dir = log_dir
outfile = open((log_dir + '/params.pickle'), 'wb')
pickle.dump(hps, outfile)
outfile.close()
self.last_csv_update_iteration = 0
self.csvname = (log_dir + '/db.csv')
self.add_text('Hyperparameters', str(hps))
def add_images(self, name, value, iteration):
Logger.add_images(self, name, value, iteration)
SummaryWriter.add_images(self, name, value, iteration)
def add_scalar(self, name, value, iteration):
if (self.save_every > 1):
if (not (name in self.for_save_every)):
if (isinstance(value, int) or isinstance(value, float)):
SummaryWriter.add_scalar(self, name, value, iteration)
Logger.add_scalar(self, name, value, iteration)
self.for_save_every[name] = iteration
elif ((iteration - self.for_save_every[name]) > self.save_every):
if (isinstance(value, int) or isinstance(value, float)):
SummaryWriter.add_scalar(self, name, value, iteration)
Logger.add_scalar(self, name, value, iteration)
self.for_save_every[name] = iteration
else:
if (isinstance(value, int) or isinstance(value, float)):
SummaryWriter.add_scalar(self, name, value, iteration)
Logger.add_scalar(self, name, value, iteration)
def update_csv(self):
length = len(self.all_values)
values_to_save = self.all_values[self.last_csv_update_iteration:length]
values = []
for i in range(len(values_to_save)):
l = values_to_save[i]
if (len(l) > 0):
vv = {**l, 'training_iteration': (i + self.last_csv_update_iteration)}
vv = {**{('_hp_' + k): self.config[k] for k in self.config}, **vv}
values.append(vv)
with open(self.csvname, 'a+') as output_file:
dict_writer = csv.DictWriter(output_file, fieldnames=list(self.keys.keys()))
if (self.last_csv_update_iteration == 0):
dict_writer.writeheader()
dict_writer.writerows(values)
self.last_csv_update_iteration = length
def close(self):
SummaryWriter.close(self)
Logger.close(self)
self.update_csv()
f = open((self.log_dir + '/done'), 'wt')
f.write('Done\n')
f.close() |
def _back_sub_matrix(a: Matrix, b: Matrix, s: Shape) -> Matrix:
(size1, size2) = s
for i in range((size1 - 1), (- 1), (- 1)):
v = b[i]
for j in range((i + 1), size1):
for k in range(size2):
v[k] -= (a[i][j] * b[j][k])
for j in range(size2):
b[i][j] /= a[i][i]
return b |
class AppComponentMeta(ComponentMeta):
CLASSES = []
__repr__ = meta_repr
def _init_hook1(cls, cls_name, bases, dct):
CSS = dct.get('CSS', '')
if issubclass(cls, LocalComponent):
cls._make_js_proxy_class(cls_name, bases, dct)
elif issubclass(cls, ProxyComponent):
cls._make_js_local_class(cls_name, bases, dct)
else:
raise TypeError('Expected class to inherit from LocalComponent or ProxyComponent.')
cls.__jsmodule__ = get_mod_name(sys.modules[cls.__module__])
cls.JS.__jsmodule__ = cls.__jsmodule__
cls.JS.__module__ = cls.__module__
cls.CSS = CSS
try:
delattr(cls.JS, 'CSS')
except AttributeError:
pass
def _init_hook2(cls, cls_name, bases, dct):
if issubclass(cls, LocalComponent):
cls.__proxy_properties__ = cls.JS.__properties__
cls.JS.__emitters__ = cls.__emitters__
else:
cls.JS.__proxy_properties__ = cls.__properties__
cls.__emitters__ = cls.JS.__emitters__
cls.JS.CODE = cls._get_js()
AppComponentMeta.CLASSES.append(cls)
def _make_js_proxy_class(cls, cls_name, bases, dct):
for c in bases:
assert (not issubclass(cls, ProxyComponent))
jsbases = [b.JS for b in cls.__bases__ if hasattr(b, 'JS')]
if (not jsbases):
jsbases.append(ProxyComponent)
jsdict = {}
for (name, val) in dct.items():
if (name.startswith('__') and name.endswith('__')):
continue
elif isinstance(val, LocalProperty):
pass
elif isinstance(val, Property):
jsdict[name] = val
elif isinstance(val, EmitterDescriptor):
jsdict[name] = make_proxy_emitter(val)
elif isinstance(val, ActionDescriptor):
jsdict[name] = make_proxy_action(val)
else:
pass
cls.JS = ComponentMetaJS(cls_name, tuple(jsbases), jsdict)
def _make_js_local_class(cls, cls_name, bases, dct):
for c in bases:
assert (not issubclass(cls, LocalComponent))
jsbases = [b.JS for b in cls.__bases__ if hasattr(b, 'JS')]
if (not jsbases):
jsbases.append(LocalComponent)
jsdict = {}
py_only = ['_repr_html_']
for (name, val) in list(dct.items()):
if isinstance(val, classmethod):
continue
elif ((name in py_only) or (name.startswith('__') and name.endswith('__'))):
if (name not in ('__init__', '__linenr__')):
continue
if (isinstance(val, Property) or (callable(val) and name.endswith('_validate'))):
jsdict[name] = val
if isinstance(val, LocalProperty):
delattr(cls, name)
dct.pop(name, None)
elif isinstance(val, EmitterDescriptor):
jsdict[name] = val
setattr(cls, name, make_proxy_emitter(val))
elif isinstance(val, ActionDescriptor):
jsdict[name] = val
setattr(cls, name, make_proxy_action(val))
else:
jsdict[name] = val
delattr(cls, name)
dct.pop(name, None)
cls.JS = ComponentMetaJS(cls_name, tuple(jsbases), jsdict)
def _get_js(cls):
cls_name = cls.__name__
base_class = cls.JS.mro()[1]
base_class_name = ('%s.prototype' % base_class.__name__)
code = []
c = create_js_component_class(cls.JS, cls_name, base_class_name)
meta = c.meta
code.append(c)
if (cls.__name__ == 'JsComponent'):
c = cls._get_js_of_base_classes()
for k in ['vars_unknown', 'vars_global', 'std_functions', 'std_methods']:
meta[k].update(c.meta[k])
code.insert(0, c)
js = JSString('\n'.join(code))
js.meta = meta
return js
def _get_js_of_base_classes(cls):
c1 = create_js_component_class(BaseAppComponent, 'BaseAppComponent', 'Component.prototype')
c2 = create_js_component_class(LocalComponent, 'LocalComponent', 'BaseAppComponent.prototype')
c3 = create_js_component_class(ProxyComponent, 'ProxyComponent', 'BaseAppComponent.prototype')
c4 = create_js_component_class(StubComponent, 'StubComponent', 'BaseAppComponent.prototype')
meta = c1.meta
for k in ['vars_unknown', 'vars_global', 'std_functions', 'std_methods']:
for c in (c2, c3, c4):
meta[k].update(c.meta[k])
js = JSString('\n'.join([c1, c2, c3, c4]))
js.meta = meta
return js |
class OptionSeriesPieDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class ScreenChannel(ChannelInterface):
multiple_screens = Signal(str, dict)
log_dir_size_signal = Signal(str, float)
def clear_cache(self, grpc_url=''):
pass
def get_screen_manager(self, uri='localhost:12321'):
channel = self.get_insecure_channel(uri)
return (sstub.ScreenStub(channel), channel)
def get_all_screens(self, grpc_url='grpc://localhost:12321'):
rospy.logdebug(('get all screens from %s' % grpc_url))
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
screens = sm.all_screens()
self.close_channel(channel, uri)
return screens
def get_screens(self, grpc_url='grpc://localhost:12321', node=''):
rospy.logdebug(('get screen from %s for %s' % (grpc_url, node)))
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
screens = sm.screens(node)
self.close_channel(channel, uri)
return screens
def multiple_screens_threaded(self, grpc_url='grpc://localhost:12321'):
self._threads.start_thread(('mst_%s' % grpc_url), target=self._multiple_screens, args=(grpc_url,))
def _multiple_screens(self, grpc_url='grpc://localhost:12321'):
rospy.logdebug(('get multiple screens from %s' % grpc_url))
try:
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
result = sm.multiple_screens()
self.multiple_screens.emit(grpc_url, result)
except Exception as e:
self.error.emit('get_multiple_screens', ('grpc://%s' % uri), '', e)
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('mst_%s' % grpc_url))
def rosclean(self, grpc_url='grpc://localhost:12321'):
rospy.logdebug(('clear log directory on %s' % grpc_url))
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
result = sm.rosclean()
self.close_channel(channel, uri)
return result
def wipe_screens(self, grpc_url='grpc://localhost:12321'):
rospy.logdebug(('wipe screens on %s' % grpc_url))
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
sm.wipe_screens()
self.close_channel(channel, uri)
def log_dir_size_threaded(self, grpc_url='grpc://localhost:12321'):
self._threads.start_thread(('lds_%s' % grpc_url), target=self._log_dir_size, args=(grpc_url,))
def _log_dir_size(self, grpc_url='grpc://localhost:12321'):
rospy.logdebug(('get log_dir size on %s' % grpc_url))
try:
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
log_dir_size = sm.log_dir_size()
rospy.logdebug(('log_dir size on %s: %s' % (grpc_url, sizeof_fmt(log_dir_size))))
self.log_dir_size_signal.emit(grpc_url, log_dir_size)
except Exception as e:
self.error.emit('log_dir_size', ('grpc://%s' % uri), '', e)
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('lds_%s' % grpc_url))
def delete_log(self, grpc_url='grpc://localhost:12321', nodes=[]):
rospy.logdebug(('delete logs on %s for %s' % (grpc_url, nodes)))
(uri, _) = nmdurl.split(grpc_url)
(sm, channel) = self.get_screen_manager(uri)
result = sm.delete_log(nodes)
self.close_channel(channel, uri)
return result |
class InferenceTests(unittest.TestCase):
def test_simple_regression(self):
torch.manual_seed(1)
n_samples = 100
x_train = torch.linspace(0, 1, 10)
y_train = torch.sin((x_train * (2 * math.pi)))
kernel = ScaleKernel(base_kernel=PeriodicKernel(period_length_prior=UniformPrior(0.5, 1.5), lengthscale_prior=UniformPrior(0.01, 1.5)), outputscale_prior=UniformPrior(0.01, 2.0))
likelihood = likelihoods.GaussianLikelihood()
likelihood.noise = 0.0001
gp = Regression(x_train, y_train, kernel, likelihood)
name_to_rv = make_prior_random_variables(gp)
_variable
def y():
sampled_model = bm_sample_from_prior(gp.to_pyro_random_module(), name_to_rv)
return sampled_model.likelihood(sampled_model(x_train))
queries = list(name_to_rv.values())
obs = {y(): y_train}
samples = bm.GlobalNoUTurnSampler(nnc_compile=False).infer(queries, obs, n_samples, num_chains=1)
x_test = torch.linspace(0, 1, 21).unsqueeze((- 1))
y_test = torch.sin((x_test * (2 * math.pi))).squeeze(0)
gp.eval()
s = samples.get_chain(0)
lengthscale_samples = s[name_to_rv['kernel.base_kernel.lengthscale_prior']]
outputscale_samples = s[name_to_rv['kernel.outputscale_prior']]
period_length_samples = s[name_to_rv['kernel.base_kernel.period_length_prior']]
gp.pyro_load_from_samples({'kernel.outputscale_prior': outputscale_samples, 'kernel.base_kernel.lengthscale_prior': lengthscale_samples, 'kernel.base_kernel.period_length_prior': period_length_samples})
expanded_x_test = x_test.unsqueeze(0).repeat(n_samples, 1, 1)
output = gp.likelihood(gp(expanded_x_test.detach()))
assert ((y_test.squeeze() - output.mean.squeeze().mean(0)).abs().mean() < 1.0).item() |
class TestTurnBattleRangeCmd(BaseEvenniaCommandTest):
def test_turnbattlerangecmd(self):
self.call(tb_range.CmdShoot(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdApproach(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdWithdraw(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdStatus(), '', 'HP Remaining: 100 / 100')
self.call(tb_range.CmdFight(), '', "There's nobody here to fight!")
self.call(tb_range.CmdAttack(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdPass(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdDisengage(), '', 'You can only do that in combat. (see: help fight)')
self.call(tb_range.CmdRest(), '', 'Char rests to recover HP.') |
def test_configure_project_should_configure_a_project_persistently(create_test_db, create_pymel, create_maya_env, create_test_data, temp_project):
pm = create_pymel
project = temp_project
repo = project.repository
assert isinstance(repo, Repository)
config_file_path = os.path.join(repo.path, project.code, 'References', 'COLOR_MANAGEMENT_CONFIG')
assert (not os.path.exists(config_file_path))
from anima.dcc.mayaEnv.render import MayaColorManagementConfigurator
MayaColorManagementConfigurator.configure_project(project, 'scene-linear Rec.709-sRGB')
with open(config_file_path, 'r') as f:
data = json.load(f)
cm_name = data[pm.about(v=1)].strip()
assert (cm_name == 'scene-linear Rec.709-sRGB') |
def draw_combined(ast_root, cfg_root, reduced=True, only_blocks=False, format='png', highlight=None):
ast_dot = ASTVisualiser(ast_root).start(combined=True, reduced=reduced, highlight=highlight)
cfg_dot = CFGVisualiser(cfg_root).start(only_blocks=only_blocks, combined=True)
dot = Digraph(strict=True)
dot.attr('graph', fontname='helvetica')
dot.attr('graph', splines='polyline')
dot.attr('graph', compound='true')
dot.attr('node', fontname='helvetica')
dot.attr('node', style='filled', fillcolor='white')
dot.attr('edge', fontname='helvetica')
dot.subgraph(ast_dot)
dot.subgraph(cfg_dot)
dot.format = format
dot.engine = 'dot'
dot.render(view=True, cleanup=True, filename='Combined') |
class CollectMissingAccount(BaseRequestResponseEvent[MissingAccountResult]):
missing_node_hash: Hash32
address_hash: Hash32
state_root_hash: Hash32
urgent: bool
block_number: BlockNumber
def expected_response_type() -> Type[MissingAccountResult]:
return MissingAccountResult |
class TestDIN99oSerialize(util.ColorAssertsPyTest):
COLORS = [('color(--din99o 75 10 -10 / 0.5)', {}, 'color(--din99o 75 10 -10 / 0.5)'), ('color(--din99o 75 10 -10)', {'alpha': True}, 'color(--din99o 75 10 -10 / 1)'), ('color(--din99o 75 10 -10 / 0.5)', {'alpha': False}, 'color(--din99o 75 10 -10)'), ('color(--din99o none 10 -10)', {}, 'color(--din99o 0 10 -10)'), ('color(--din99o none 10 -10)', {'none': True}, 'color(--din99o none 10 -10)'), ('color(--din99o 120 10 -10)', {}, 'color(--din99o 120 10 -10)'), ('color(--din99o 120 10 -10)', {'fit': False}, 'color(--din99o 120 10 -10)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
class AlertEndPosition(object):
swagger_types = {}
attribute_map = {}
def __init__(self):
self.discriminator = None
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(AlertEndPosition, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, AlertEndPosition)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def register_config_iterator(name: str, iterator_class: Type[ConfigIterator]):
global config_iterator_map
logger.debug(f'register iterator: {name}')
if (name not in config_iterator_map):
config_iterator_map[name] = iterator_class
else:
raise ValueError(f'Duplicate iterator registration name: {name}') |
class OptionSeriesXrangeSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def lookup_file_in_cache(repo_id: str, revision: str, filename: str) -> Optional[str]:
resolved = huggingface_hub.try_to_load_from_cache(repo_id=repo_id, filename=filename, revision=revision)
if ((resolved is None) or (resolved is huggingface_hub._CACHED_NO_EXIST)):
return None
else:
return resolved |
class AsyncENS(BaseENS):
w3: 'AsyncWeb3'
def __init__(self, provider: 'AsyncBaseProvider'=cast('AsyncBaseProvider', default), addr: ChecksumAddress=None, middlewares: Optional[Sequence[Tuple[('AsyncMiddleware', str)]]]=None) -> None:
self.w3 = init_async_web3(provider, middlewares)
ens_addr = (addr if addr else ENS_MAINNET_ADDR)
self.ens = self.w3.eth.contract(abi=abis.ENS, address=ens_addr)
self._resolver_contract = self.w3.eth.contract(abi=abis.PUBLIC_RESOLVER_2_EXTENDED)
self._reverse_resolver_contract = self.w3.eth.contract(abi=abis.REVERSE_RESOLVER)
def from_web3(cls, w3: 'AsyncWeb3', addr: ChecksumAddress=None) -> 'AsyncENS':
provider = w3.manager.provider
middlewares = w3.middleware_onion.middlewares
ns = cls(cast('AsyncBaseProvider', provider), addr=addr, middlewares=middlewares)
ns.strict_bytes_type_checking = w3.strict_bytes_type_checking
return ns
async def address(self, name: str, coin_type: Optional[int]=None) -> Optional[ChecksumAddress]:
r = (await self.resolver(name))
if (coin_type is None):
return cast(ChecksumAddress, (await self._resolve(name, 'addr')))
else:
(await _async_validate_resolver_and_interface_id(name, r, ENS_MULTICHAIN_ADDRESS_INTERFACE_ID, 'addr(bytes32,uint256)'))
node = raw_name_to_hash(name)
address_as_bytes = (await r.caller.addr(node, coin_type))
if is_none_or_zero_address(address_as_bytes):
return None
return to_checksum_address(address_as_bytes)
async def setup_address(self, name: str, address: Union[(Address, ChecksumAddress, HexAddress)]=cast(ChecksumAddress, default), coin_type: Optional[int]=None, transact: Optional['TxParams']=None) -> Optional[HexBytes]:
if (not transact):
transact = {}
transact = deepcopy(transact)
owner = (await self.setup_owner(name, transact=transact))
(await self._assert_control(owner, name))
if is_none_or_zero_address(address):
address = None
elif (address is default):
address = owner
elif is_binary_address(address):
address = to_checksum_address(cast(str, address))
elif (not is_checksum_address(address)):
raise ValueError('You must supply the address in checksum format')
if ((await self.address(name)) == address):
return None
if (address is None):
address = EMPTY_ADDR_HEX
transact['from'] = owner
resolver: 'AsyncContract' = (await self._set_resolver(name, transact=transact))
node = raw_name_to_hash(name)
if (coin_type is None):
return (await resolver.functions.setAddr(node, address).transact(transact))
else:
return (await resolver.functions.setAddr(node, coin_type, address).transact(transact))
async def name(self, address: ChecksumAddress) -> Optional[str]:
reversed_domain = address_to_reverse_domain(address)
name = (await self._resolve(reversed_domain, fn_name='name'))
return (name if (to_checksum_address(address) == (await self.address(name))) else None)
async def setup_name(self, name: str, address: Optional[ChecksumAddress]=None, transact: Optional['TxParams']=None) -> HexBytes:
if (not transact):
transact = {}
transact = deepcopy(transact)
if (not name):
(await self._assert_control(address, 'the reverse record'))
return (await self._setup_reverse(None, address, transact=transact))
else:
resolved = (await self.address(name))
if is_none_or_zero_address(address):
address = resolved
elif (resolved and (address != resolved) and (resolved != EMPTY_ADDR_HEX)):
raise AddressMismatch(f'Could not set address {address!r} to point to name, because the name resolves to {resolved!r}. To change the name for an existing address, call setup_address() first.')
if is_none_or_zero_address(address):
address = (await self.owner(name))
if is_none_or_zero_address(address):
raise UnownedName('claim subdomain using setup_address() first')
if is_binary_address(address):
address = to_checksum_address(address)
if (not is_checksum_address(address)):
raise ValueError('You must supply the address in checksum format')
(await self._assert_control(address, name))
if (not resolved):
(await self.setup_address(name, address, transact=transact))
return (await self._setup_reverse(name, address, transact=transact))
async def owner(self, name: str) -> ChecksumAddress:
node = raw_name_to_hash(name)
return (await self.ens.caller.owner(node))
async def setup_owner(self, name: str, new_owner: ChecksumAddress=cast(ChecksumAddress, default), transact: Optional['TxParams']=None) -> Optional[ChecksumAddress]:
if (not transact):
transact = {}
transact = deepcopy(transact)
(super_owner, unowned, owned) = (await self._first_owner(name))
if (new_owner is default):
new_owner = super_owner
elif (not new_owner):
new_owner = ChecksumAddress(EMPTY_ADDR_HEX)
else:
new_owner = to_checksum_address(new_owner)
current_owner = (await self.owner(name))
if ((new_owner == EMPTY_ADDR_HEX) and (not current_owner)):
return None
elif (current_owner == new_owner):
return current_owner
else:
(await self._assert_control(super_owner, name, owned))
(await self._claim_ownership(new_owner, unowned, owned, super_owner, transact=transact))
return new_owner
async def resolver(self, name: str) -> Optional['AsyncContract']:
normal_name = normalize_name(name)
resolver = (await self._get_resolver(normal_name))
return resolver[0]
async def reverser(self, target_address: ChecksumAddress) -> Optional['AsyncContract']:
reversed_domain = address_to_reverse_domain(target_address)
return (await self.resolver(reversed_domain))
async def get_text(self, name: str, key: str) -> str:
node = raw_name_to_hash(name)
r = (await self.resolver(name))
(await _async_validate_resolver_and_interface_id(name, r, ENS_TEXT_INTERFACE_ID, 'text'))
return (await r.caller.text(node, key))
async def set_text(self, name: str, key: str, value: str, transact: 'TxParams'=None) -> HexBytes:
r = (await self.resolver(name))
(await _async_validate_resolver_and_interface_id(name, r, ENS_TEXT_INTERFACE_ID, 'setText'))
node = raw_name_to_hash(name)
return (await self._set_property(name, r.functions.setText, (node, key, value), transact))
async def _get_resolver(self, normal_name: str, fn_name: str='addr') -> Tuple[(Optional['AsyncContract'], str)]:
current_name = normal_name
while True:
if is_empty_name(current_name):
return (None, current_name)
resolver_addr = (await self.ens.caller.resolver(normal_name_to_hash(current_name)))
if (not is_none_or_zero_address(resolver_addr)):
resolver = cast('AsyncContract', self._type_aware_resolver(resolver_addr, fn_name))
return (resolver, current_name)
current_name = self.parent(current_name)
async def _set_resolver(self, name: str, resolver_addr: Optional[ChecksumAddress]=None, transact: Optional['TxParams']=None) -> 'AsyncContract':
if (not transact):
transact = {}
transact = deepcopy(transact)
if is_none_or_zero_address(resolver_addr):
resolver_addr = (await self.address('resolver.eth'))
namehash = raw_name_to_hash(name)
if ((await self.ens.caller.resolver(namehash)) != resolver_addr):
(await self.ens.functions.setResolver(namehash, resolver_addr).transact(transact))
return cast('AsyncContract', self._resolver_contract(address=resolver_addr))
async def _resolve(self, name: str, fn_name: str='addr') -> Optional[Union[(ChecksumAddress, str)]]:
normal_name = normalize_name(name)
(resolver, current_name) = (await self._get_resolver(normal_name, fn_name))
if (not resolver):
return None
node = self.namehash(normal_name)
if (await _async_resolver_supports_interface(resolver, ENS_EXTENDED_RESOLVER_INTERFACE_ID)):
contract_func_with_args = (fn_name, [node])
calldata = resolver.encodeABI(*contract_func_with_args)
contract_call_result = (await resolver.caller.resolve(ens_encode_name(normal_name), calldata))
result = self._decode_ensip10_resolve_data(contract_call_result, resolver, fn_name)
return (to_checksum_address(result) if is_address(result) else result)
elif (normal_name == current_name):
lookup_function = getattr(resolver.functions, fn_name)
result = (await lookup_function(node).call())
if is_none_or_zero_address(result):
return None
return (to_checksum_address(result) if is_address(result) else result)
return None
async def _assert_control(self, account: ChecksumAddress, name: str, parent_owned: Optional[str]=None) -> None:
if (not address_in(account, (await self.w3.eth.accounts))):
raise UnauthorizedError(f'in order to modify {name!r}, you must control account {account!r}, which owns {(parent_owned or name)!r}')
async def _first_owner(self, name: str) -> Tuple[(Optional[ChecksumAddress], Sequence[str], str)]:
owner = None
unowned = []
pieces = normalize_name(name).split('.')
while (pieces and is_none_or_zero_address(owner)):
name = '.'.join(pieces)
owner = (await self.owner(name))
if is_none_or_zero_address(owner):
unowned.append(pieces.pop(0))
return (owner, unowned, name)
async def _claim_ownership(self, owner: ChecksumAddress, unowned: Sequence[str], owned: str, old_owner: Optional[ChecksumAddress]=None, transact: Optional['TxParams']=None) -> None:
if (not transact):
transact = {}
transact = deepcopy(transact)
transact['from'] = (old_owner or owner)
for label in reversed(unowned):
(await self.ens.functions.setSubnodeOwner(raw_name_to_hash(owned), label_to_hash(label), owner).transact(transact))
owned = f'{label}.{owned}'
async def _setup_reverse(self, name: Optional[str], address: ChecksumAddress, transact: Optional['TxParams']=None) -> HexBytes:
name = (normalize_name(name) if name else '')
if (not transact):
transact = {}
transact = deepcopy(transact)
transact['from'] = address
reverse_registrar = (await self._reverse_registrar())
return (await reverse_registrar.functions.setName(name).transact(transact))
async def _reverse_registrar(self) -> 'AsyncContract':
addr = (await self.ens.caller.owner(normal_name_to_hash(REVERSE_REGISTRAR_DOMAIN)))
return self.w3.eth.contract(address=addr, abi=abis.REVERSE_REGISTRAR)
async def _set_property(self, name: str, func: 'AsyncContractFunction', args: Sequence[Any], transact: 'TxParams'=None) -> HexBytes:
if (not transact):
transact = {}
owner = (await self.owner(name))
transact_from_owner = merge({'from': owner}, transact)
return (await func(*args).transact(transact_from_owner)) |
class ShadowIGMediaCollaborators(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isShadowIGMediaCollaborators = True
super(ShadowIGMediaCollaborators, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
id = 'id'
invite_status = 'invite_status'
username = 'username'
_field_types = {'id': 'string', 'invite_status': 'string', 'username': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Settings(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.maintain_presence.start()
(seconds=30)
async def maintain_presence(self):
(await self.bot.wait_until_ready())
current_activity = self.bot.activities.get()
(await self.bot.change_presence(activity=disnake.Game(name=current_activity)))
_command(name='settings')
async def settings_group(self, inter):
pass
_group.sub_command(name='systemchannel', description=static_response.get('brief-settings-systemchannel'))
_only()
async def set_systemchannel(self, inter, channel_type: str=commands.Param(description=static_response.get('settings-systemchannel-option-type'), choices={'main', 'server', 'explanation'}), channel: disnake.TextChannel=commands.Param(description=static_response.get('settings-systemchannel-option-channel'), default=None)):
if (not self.bot.isadmin(inter.author, inter.guild.id)):
(await inter.send(content=self.bot.response.get('not-admin', guild_id=inter.guild.id)))
return
(await inter.response.defer())
if ((not channel) or (channel_type not in ('main', 'server'))):
try:
server_channel = self.bot.db.fetch_systemchannel(inter.guild.id)
except DatabaseError as error:
(await self.bot.report(self.bot.response.get('db-error-fetching-systemchannels').format(exception=error)))
return
if server_channel:
server_channel = server_channel[0][0]
main_text = ((await self.bot.getchannel(self.bot.config.system_channel)).mention if self.bot.config.system_channel else 'none')
server_text = ((await self.bot.getchannel(server_channel)).mention if server_channel else 'none')
(await inter.edit_original_message(content=self.bot.response.get('systemchannels-info', guild_id=inter.guild.id).format(main_channel=main_text, server_channel=server_text)))
return
bot_user = inter.guild.get_member(self.bot.user.id)
bot_permissions = channel.permissions_for(bot_user)
writable = bot_permissions.read_messages
readable = bot_permissions.view_channel
if ((not writable) or (not readable)):
(await inter.edit_original_message(content=self.bot.response.get('permission-error-channel', guild_id=inter.guild.id)))
return
if (channel_type == 'main'):
self.bot.config.update('server', 'system_channel', str(channel.id))
elif (channel_type == 'server'):
try:
self.bot.db.add_systemchannel(inter.guild.id, channel.id)
except DatabaseError as error:
(await self.bot.report(self.bot.response.get('db-error-adding-systemchannels', guild_id=inter.guild.id).format(exception=error), inter.guild.id))
return
(await inter.edit_original_message(content=self.bot.response.get('systemchannels-success', guild_id=inter.guild.id)))
_group.sub_command(name='notify', description=static_response.get('brief-settings-notify'))
_only()
async def toggle_notify(self, inter):
if (not self.bot.isadmin(inter.author, inter.guild.id)):
return
(await inter.response.defer())
try:
notify = self.bot.db.toggle_notify(inter.guild.id)
except DatabaseError as error:
(await self.bot.report(self.bot.response.get('db-error-toggle-notify', guild_id=inter.guild.id).format(exception=error), inter.guild.id))
return
if notify:
(await inter.edit_original_message(content=self.bot.response.get('notifications-on', guild_id=inter.guild.id)))
else:
(await inter.edit_original_message(content=self.bot.response.get('notifications-off', guild_id=inter.guild.id)))
_group.sub_command(name='language', description=static_response.get('brief-settings-language'))
async def set_language(self, inter, _range: str=commands.Param(name='range', description=static_response.get('settings-language-option-range'), choices={'global', 'server'}), language: str=commands.Param(description=static_response.get('settings-language-option-language'), choices=static_response.languages())):
(await inter.response.defer())
if (_range == 'server'):
if (not inter.guild):
(await inter.send(content=self.bot.response.get('no-dm-parameters').format(parameters='server')))
return
if (not self.bot.isadmin(inter.author, inter.guild.id)):
(await inter.send(content=self.bot.response.get('not-admin', guild_id=inter.guild.id)))
return
self.bot.db.set_language(inter.guild.id, language)
else:
if (not (await self.bot.is_owner(inter.author))):
(await inter.send(content=self.bot.response.get('not-owner', guild_id=(inter.guild.id if inter.guild else None))))
return
self.bot.config.update('server', 'language', language)
self.bot.response.global_language = language
(await inter.edit_original_message(content=self.bot.response.get('language-success', guild_id=(inter.guild.id if inter.guild else None))))
_owner()
_group.sub_command(name='colour', description=static_response.get('brief-settings-colour'))
async def set_colour(self, inter, colour: str=commands.Param(description=static_response.get('settings-colour-option-colour'))):
(await inter.response.defer())
guild_id_or_none = (inter.guild.id if inter.guild else None)
try:
self.bot.config.botcolour = disnake.Colour(int(colour, 16))
self.bot.config.update('server', 'colour', colour)
example = disnake.Embed(title=self.bot.response.get('example-embed', guild_id=guild_id_or_none), description=self.bot.response.get('example-embed-new-colour', guild_id=guild_id_or_none), colour=self.bot.config.botcolour)
(await inter.edit_original_message(content=self.bot.response.get('colour-changed', guild_id=guild_id_or_none), embed=example))
except ValueError:
(await inter.edit_original_message(content=self.bot.response.get('colour-hex-error', guild_id=guild_id_or_none)))
_owner()
_group.sub_command(name='activity', description=static_response.get('brief-settings-activity'))
async def change_activity(self, inter, action: str=commands.Param(description=static_response.get('settings-activity-option-action'), choices={'add', 'remove', 'list'}), activity: str=commands.Param(description=static_response.get('settings-activity-option-activity'), default=None)):
(await inter.response.defer())
guild_id_or_none = (inter.guild.id if inter.guild else None)
if ((action == 'add') and activity):
if (',' in activity):
(await inter.send(self.bot.response.get('activity-no-commas', guild_id=guild_id_or_none)))
else:
self.bot.activities.add(activity)
(await inter.send(self.bot.response.get('activity-success', guild_id=guild_id_or_none).format(new_activity=activity)))
elif (action == 'list'):
if self.bot.activities.activity_list:
formatted_list = []
for item in self.bot.activities.activity_list:
formatted_list.append(f'`{item}`')
(await inter.send(self.bot.response.get('current-activities', guild_id=guild_id_or_none).format(activity_list='\n- '.join(formatted_list))))
else:
(await inter.send(self.bot.response.get('no-current-activities', guild_id=guild_id_or_none)))
elif ((action == 'remove') and activity):
removed = self.bot.activities.remove(activity)
if removed:
(await inter.send(self.bot.response.get('rm-activity-success', guild_id=guild_id_or_none).format(activity_to_delete=activity)))
else:
(await inter.send(self.bot.response.get('rm-activity-not-exists', guild_id=guild_id_or_none)))
else:
(await inter.send(self.bot.response.get('activity-add-list-remove', guild_id=guild_id_or_none))) |
def generate_python_code(argv=sys.argv):
processed_args = process_args(_is_python_reserved_str, argv)
optlines = (' ' + '\n '.join([_generate_python_code_line(o) for o in processed_args]))
printlines = ''
env_print = os.environ.get('DUCKARGS_PRINT', 1)
try:
env_print_int = int(env_print)
except ValueError:
raise RuntimeError('DUCKARGS_PRINT must be an integer')
if (env_print_int > 0):
printlines += ('\n\n ' + '\n '.join([f'print(args.{o.var_name})' for o in processed_args]))
comment = ''
env_comment = os.environ.get('DUCKARGS_COMMENT', 1)
try:
env_comment_int = int(env_comment)
except ValueError:
raise RuntimeError('DUCKARGS_COMMENT must be an integer')
if (env_comment_int > 0):
comment = ((f'''# Generated by duckargs, invoked with the following arguments:
# ''' + ' '.join(argv[1:])) + '\n\n')
CmdlineOpt.positional_count = 0
return PYTHON_TEMPLATE.format(comment, optlines, printlines) |
def test_request_exception_signal():
app = flask.Flask(__name__)
recorded = []
('/')
def index():
(1 // 0)
def record(sender, exception):
recorded.append(exception)
flask.got_request_exception.connect(record, app)
try:
assert (app.test_client().get('/').status_code == 500)
assert (len(recorded) == 1)
assert isinstance(recorded[0], ZeroDivisionError)
finally:
flask.got_request_exception.disconnect(record, app) |
class TestCollectionInterface(unittest.TestCase):
def setUp(self):
self.d = CreateObject('Scripting.Dictionary', dynamic=False)
def tearDown(self):
del self.d
def assertAccessInterface(self, d):
self.assertEqual(d.CompareMode, 42)
self.assertEqual(d['foo'], 1)
self.assertEqual(d.Item['foo'], d['foo'])
self.assertEqual(d.Item('foo'), d['foo'])
self.assertEqual(d['bar'], 'spam foo')
self.assertEqual(d.Item('bar'), 'spam foo')
self.assertEqual(d['baz'], 3.14)
self.assertEqual(d.Item('baz'), d['baz'])
self.assertIsNone(d['asdlfkj'])
self.assertIsNone(d.Item['asdlfkj'])
self.assertIsNone(d.Item('asdlfkj'))
items = iter(d)
self.assertEqual(items[0], 'foo')
self.assertEqual(items[1], 'bar')
self.assertEqual(items[2], 'baz')
self.assertEqual(items[3], 'asdlfkj')
def test_index_setter(self):
d = self.d
d.CompareMode = 42
d['foo'] = 1
d['bar'] = 'spam foo'
d['baz'] = 3.14
self.assertAccessInterface(d)
def test_named_property_setter(self):
d = self.d
d.CompareMode = 42
d.Item['foo'] = 1
d.Item['bar'] = 'spam foo'
d.Item['baz'] = 3.14
self.assertAccessInterface(d)
def test_named_property_no_length(self):
self.assertRaises(TypeError, len, self.d.Item)
def test_named_property_not_iterable(self):
self.assertRaises(TypeError, list, self.d.Item) |
def _mypy_cmd(strict: bool, python_version: Optional[str]='3.7') -> List[str]:
mypy = ['mypy', '--install-types', '--non-interactive', '--config-file', f'{BASE}/.mypy.ini']
if strict:
mypy.append('--strict')
if (python_version is not None):
mypy.append(f'--python-version={python_version}')
return mypy |
def read_use_stmt(line: str) -> (tuple[(Literal['use'], Use)] | None):
use_match = FRegex.USE.match(line)
if (use_match is None):
return None
trailing_line = line[use_match.end(0):].lower()
use_mod = use_match.group(2)
only_list: set[str] = set()
rename_map: dict[(str, str)] = {}
if use_match.group(3):
for only_stmt in trailing_line.split(','):
only_split = only_stmt.split('=>')
only_name = only_split[0].strip()
only_list.add(only_name)
if (len(only_split) == 2):
rename_map[only_name] = only_split[1].strip()
return ('use', Use(use_mod, only_list, rename_map)) |
class FactDb(FactBase):
PROGRAM_NAME = 'FACT DB-Service'
PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) DB-Service'
COMPONENT = 'database'
def __init__(self):
super().__init__()
self._check_postgres_connection()
def _check_postgres_connection():
try:
ReadOnlyDbInterface().connection.engine.connect()
except (SQLAlchemyError, ModuleNotFoundError):
logging.exception('Could not connect to PostgreSQL. Is the service running?')
sys.exit(1) |
('ecs_deploy.cli.get_client')
def test_update_task_without_changing_secrets_value(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.update, (TASK_DEFINITION_ARN_1, '-s', 'webserver', 'baz', 'qux'))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Update task definition based on: test-task:1' in result.output)
assert (u'Updating task definition' not in result.output)
assert (u'Changed secrets' not in result.output)
assert (u'Successfully created revision: 2' in result.output) |
_coconut_mark_as_match
def mean(_coconut_match_first_arg=_coconut_sentinel, *_coconut_match_args, **_coconut_match_kwargs):
_coconut_match_check_0 = False
_coconut_match_set_name_xs = _coconut_sentinel
_coconut_FunctionMatchError = _coconut_get_function_match_error()
if (_coconut_match_first_arg is not _coconut_sentinel):
_coconut_match_args = ((_coconut_match_first_arg,) + _coconut_match_args)
if (_coconut.len(_coconut_match_args) == 1):
if _coconut.isinstance(_coconut_match_args[0], _coconut.abc.Sequence):
_coconut_match_temp_4 = _coconut.list(_coconut_match_args[0])
_coconut_match_set_name_xs = _coconut_match_temp_4
if (not _coconut_match_kwargs):
_coconut_match_check_0 = True
if _coconut_match_check_0:
if (_coconut_match_set_name_xs is not _coconut_sentinel):
xs = _coconut_match_set_name_xs
if (not _coconut_match_check_0):
raise _coconut_FunctionMatchError('match def mean([] + xs) =', _coconut_match_args)
return (sum(xs) / len(xs)) |
class ProjectFlag(Base):
__tablename__ = 'projects_flags'
id = sa.Column(sa.Integer, primary_key=True)
project_id = sa.Column(sa.Integer, sa.ForeignKey('projects.id', ondelete='cascade', onupdate='cascade'))
reason = sa.Column(sa.Text, nullable=False)
user = sa.Column(sa.String(200), index=True, nullable=False)
state = sa.Column(sa.String(50), default='open', nullable=False)
created_on = sa.Column(sa.DateTime, default=datetime.datetime.utcnow)
updated_on = sa.Column(sa.DateTime, server_default=sa.func.now(), onupdate=sa.func.current_timestamp())
project = sa.orm.relationship('Project', backref=sa.orm.backref('flags', cascade='all, delete-orphan'))
def __repr__(self):
return f'<ProjectFlag({self.project.name}, {self.user}, {self.state})>'
def __json__(self, detailed=False):
output = dict(id=self.id, project=self.project.name, user=self.user, state=self.state, created_on=time.mktime(self.created_on.timetuple()), updated_on=time.mktime(self.updated_on.timetuple()))
if detailed:
output['reason'] = self.reason
return output
def all(cls, session, page=None, count=False):
query = session.query(ProjectFlag).order_by(ProjectFlag.created_on)
return query.all()
def search(cls, session, project_name=None, from_date=None, user=None, state=None, limit=None, offset=None, count=False):
query = session.query(cls)
if project_name:
query = query.filter((cls.project_id == Project.id)).filter((Project.name == project_name))
if from_date:
query = query.filter((cls.created_on >= from_date))
if user:
query = query.filter((cls.user == user))
if state:
query = query.filter((cls.state == state))
query = query.order_by(cls.created_on.desc())
if count:
return query.count()
if offset:
query = query.offset(offset)
if limit:
query = query.limit(limit)
return query.all()
def get(cls, session, flag_id):
query = session.query(cls).filter((cls.id == flag_id))
return query.first() |
class Function():
def __init__(self, value):
functions = value.split('|')
self.name = functions[0]
self.variable = Variable(functions[0])
self.functions = functions[1:]
def substitute(self, params):
value = self.variable.substitute(params)
for f in self.functions:
value = FUNCTIONS[f](value)
return value |
def extractSuibiansubsTumblrCom(item):
badwords = ['audio drama', 'Manhua', 'MKV', 'badword']
if any([(bad in item['tags']) for bad in badwords]):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('copper coins', 'copper coins', 'translated'), ("Han Shan's Sword Unsheathed", "Han Shan's Sword Unsheathed", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.