code stringlengths 281 23.7M |
|---|
def django_main(server_getter):
import django
parser = _get_arg_parser()
parser.add_argument('-s', '--settings', help='The settings module to use (must be importable)', required=('DJANGO_SETTINGS_MODULE' not in os.environ))
args = _get_args(parser)
if args.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = args.settings
warn_about_logging = False
try:
django_settings = importlib.import_module(os.environ['DJANGO_SETTINGS_MODULE'])
if ('logging' in django_settings.SOA_SERVER_SETTINGS):
if (getattr(django_settings, 'LOGGING', None) and (django_settings.LOGGING != django_settings.SOA_SERVER_SETTINGS['logging'])):
warn_about_logging = True
django_settings.LOGGING = django_settings.SOA_SERVER_SETTINGS['logging']
elif (not getattr(django_settings, 'LOGGING', None)):
from pysoa.server.settings import ServerSettings
django_settings.LOGGING = ServerSettings.defaults['logging']
except ImportError:
raise ValueError('Cannot import Django settings module `{}`.'.format(os.environ['DJANGO_SETTINGS_MODULE']))
except AttributeError:
raise ValueError('Cannot find `SOA_SERVER_SETTINGS` in the Django settings module.')
django.setup()
if warn_about_logging:
logging.warning("Django setting `LOGGING` differs from `SOA_SERVER_SETTINGS['logging']` and has been overwritten with the value of `SOA_SERVER_SETTINGS['logging']`.")
_run_server_reloader_wrapper(args, server_getter()) |
class Account():
CONTRACT_TO_ACCOUNT_COUNT = defaultdict(int)
def __init__(self, address, contract, typ=AccountType.DEFAULT, balance=None, storage=None, account_id=None, mapping_id_to_sum=None):
global account_counter
if (account_id is None):
account_counter += 1
self.id = (account_id if (account_id is not None) else account_counter)
self.address = address
self.contract = contract
self.typ = typ
self.balance = (z3.BitVec(f'{address}_balance', 256) if (balance is None) else balance)
self.CONTRACT_TO_ACCOUNT_COUNT[contract] += 1
self.contract_tag = ((contract.name + utils.ADDRESS_ARG_TAG) + str(self.CONTRACT_TO_ACCOUNT_COUNT[contract]))
self.storage = (storage if (storage is not None) else EmptyStorage())
self.mapping_id_to_sum = (mapping_id_to_sum if (mapping_id_to_sum is not None) else {})
def abstract(self, label_suffix):
old_storage = self.storage
self.storage = AbstractStorage(f'abstract_storage{label_suffix}')
for map_id in self.mapping_id_to_sum:
if svm_utils.is_bv_concrete(map_id):
map_id_string = svm_utils.get_concrete_int(map_id)
else:
map_id_string = str(z3.simplify(map_id))
raise Exception('pdb')
label = f'abstract_sum_{map_id_string}{label_suffix}'
self.mapping_id_to_sum[map_id] = z3.BitVec(label, 256)
self.balance = z3.BitVec(f'gstate_balance{label_suffix}', 256)
def __deepcopy__(self, memo):
return Account(self.address, self.contract, self.typ, self.balance, copy(self.storage), self.id, copy(self.mapping_id_to_sum))
def __str__(self):
return str({'Name': self.contract.name, 'id': self.id})
def __repr__(self):
return f'Account_{self.contract.name}_obj_id_{id(self)}_acc_id_{self.id}' |
.django_db
def test_is_registered_any_way_with_is_attendee_false_and_is_registered_true_should_return_true(mocker, user1, event1):
mock_is_attendee = mocker.patch('manager.templatetags.filters.is_attendee')
mock_is_registered = mocker.patch('manager.templatetags.filters.is_registered')
mock_is_attendee.return_value = False
mock_is_registered.return_value = True
assert filters.is_registered_any_way(user1, event1.event_slug)
assert mock_is_attendee.called
assert mock_is_registered.called
mock_is_attendee.assert_called_once_with(user1, event1.event_slug)
mock_is_registered.assert_called_once_with(user1, event1.event_slug) |
def DoJoinLoops(loop1_c, loop2_c):
if (loop1_c.next() != loop2_c):
raise SchedulingError('expected the second loop to be directly after the first')
loop1 = loop1_c._node
loop2 = loop2_c._node
try:
Check_ExprEqvInContext(loop1_c.get_root(), loop1.hi, [loop1], loop2.lo, [loop2])
except Exception as e:
raise SchedulingError(f'expected the first loop upper bound {loop1.hi} to be the same as the second loop lower bound {loop2.lo}')
compare_ir = LoopIR_Compare()
if (not compare_ir.match_stmts(loop1.body, loop2.body)):
raise SchedulingError('expected the two loops to have identical bodies')
(ir, fwd) = loop1_c._child_node('hi')._replace(loop2.hi)
(ir, fwd_del) = fwd(loop2_c)._delete()
return (ir, _compose(fwd_del, fwd)) |
class ExtendedQueue(Queue):
def remove(self, element):
with self.not_empty:
self.queue.remove(element)
self.not_full.notify()
def as_list(self):
with self.mutex:
return [*self.queue]
def put(self, item, put_front=False, **kwargs):
super().put([put_front, item], **kwargs)
def _put(self, item):
(put_front, original_item) = item
if put_front:
self.queue.appendleft(original_item)
else:
self.queue.append(original_item) |
def exposed_filter_links(path):
if (not os.path.exists(path)):
raise IOError(("File at path '%s' doesn't exist!" % path))
with open(path, 'r') as fp:
urls = fp.readlines()
urls = [item.strip() for item in urls if item.strip()]
havestarts = []
for ruleset in WebMirror.rules.load_rules():
if ruleset['starturls']:
havestarts += ruleset['starturls']
for item in urls:
if (item not in havestarts):
print(item) |
def example():
async def move_vertical_divider(e: ft.DragUpdateEvent):
if (((e.delta_x > 0) and (c.width < 300)) or ((e.delta_x < 0) and (c.width > 100))):
c.width += e.delta_x
(await c.update_async())
async def show_draggable_cursor(e: ft.HoverEvent):
e.control.mouse_cursor = ft.MouseCursor.RESIZE_LEFT_RIGHT
(await e.control.update_async())
c = ft.Container(bgcolor=ft.colors.ORANGE_300, alignment=ft.alignment.center, width=100)
return ft.Row(controls=[c, ft.GestureDetector(content=ft.VerticalDivider(), drag_interval=10, on_pan_update=move_vertical_divider, on_hover=show_draggable_cursor), ft.Container(bgcolor=ft.colors.BROWN_400, alignment=ft.alignment.center, expand=1)], spacing=0, width=400, height=400) |
class DummyModel(BaseModel):
def chat(self, messages, max_tokens=None, temperature=0.8):
return 'I am a dummy model.'
def count_tokens(self, messages):
return 0
def get_token_limit(self):
return 4000
def config(self):
return {'class': self.__class__.__name__, 'type': 'model'}
def from_config(cls, config):
return cls() |
class LinearEigenproblem():
def __init__(self, A, M=None, bcs=None, bc_shift=0.0):
if (not SLEPc):
raise ImportError('Unable to import SLEPc, eigenvalue computation not possible (try firedrake-update --slepc)')
self.A = A
args = A.arguments()
(v, u) = args
if M:
self.M = M
else:
from ufl import inner, dx
self.M = (inner(u, v) * dx)
self.output_space = u.function_space()
self.bcs = bcs
self.bc_shift = bc_shift
def dirichlet_bcs(self):
for bc in self.bcs:
(yield from bc.dirichlet_bcs())
_property
def dm(self):
return self.output_space.dm |
class Migration(migrations.Migration):
dependencies = [('search', '0014_additional_transaction_search_cols')]
operations = [migrations.AddField(model_name='awardsearch', name='base_and_all_options_value', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='base_exercised_options_val', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='certified_date', field=models.DateField(null=True)), migrations.AddField(model_name='awardsearch', name='create_date', field=models.DateTimeField(null=True)), migrations.AddField(model_name='awardsearch', name='earliest_transaction_id', field=models.IntegerField(db_index=True, null=True)), migrations.AddField(model_name='awardsearch', name='fpds_agency_id', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='fpds_parent_agency_id', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='is_fpds', field=models.BooleanField(default=False)), migrations.AddField(model_name='awardsearch', name='latest_transaction_id', field=models.IntegerField(db_index=True, null=True)), migrations.AddField(model_name='awardsearch', name='non_federal_funding_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_1_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_1_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='officer_2_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_2_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='officer_3_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_3_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='officer_4_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_4_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='officer_5_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='officer_5_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='parent_award_piid', field=models.TextField(db_index=True, null=True)), migrations.AddField(model_name='awardsearch', name='raw_recipient_name', field=models.TextField(null=True)), migrations.AddField(model_name='awardsearch', name='subaward_count', field=models.IntegerField(null=True)), migrations.AddField(model_name='awardsearch', name='total_funding_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='total_indirect_federal_sharing', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='total_subaward_amount', field=models.DecimalField(blank=True, decimal_places=2, max_digits=23, null=True)), migrations.AddField(model_name='awardsearch', name='transaction_unique_id', field=models.TextField(null=True)), migrations.AlterField(model_name='awardsearch', name='awarding_agency_id', field=models.IntegerField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='category', field=models.TextField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='fain', field=models.TextField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='funding_agency_id', field=models.IntegerField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='period_of_performance_current_end_date', field=models.DateField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='period_of_performance_start_date', field=models.DateField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='piid', field=models.TextField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='total_obligation', field=models.DecimalField(blank=True, db_index=True, decimal_places=2, max_digits=23, null=True)), migrations.AlterField(model_name='awardsearch', name='type', field=models.TextField(db_index=True, null=True)), migrations.AlterField(model_name='awardsearch', name='uri', field=models.TextField(db_index=True, null=True))] |
def run_core() -> None:
import sys
import pathlib
base = (pathlib.Path(__file__) / '..').resolve()
p = (base / 'modmocks')
sys.path.insert(0, str(p.resolve()))
import logging
import utils.log
import settings
utils.log.init_embedded(logging.DEBUG, settings.SENTRY_DSN, settings.VERSION)
from gevent import monkey
monkey.patch_socket()
monkey.patch_time()
monkey.patch_select()
from client.core import Core
from core import CoreRunner
core = Core()
runner = CoreRunner(core)
runner.run()
logging.info('Core halted because %s', core.result.get()) |
.xfail
.parametrize('expected_txn, raw_tx, expected_tx_hash, r, s, v', (({'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55', 'value': , 'gas': 2000000, 'gasPrice': , 'nonce': 0, 'chainId': 1}, HexBytes('0xf86a8086de848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833abd5341a6f9fa91216a6f3ee2c051fea6a0428'), HexBytes('0xd8f64a42b57be0d565f385378db2f6bf324ce14a594afc05de90436e9ce01f60'), , , 37), ({'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55', 'value': 0, 'gas': 31853, 'gasPrice': 0, 'nonce': 0, 'chainId': 1}, HexBytes('0xf85d8080827c6d94f0109fc8df283027b6285cc889f5aa624eac1ff22f17b38af35286ffbb0c6376c86ec91c20ecbad93f84913a0cc15e7580cd99f83d6e12e82e3544cb4439964d5087da78f74cefeec9a450b16ae179fd8fe20'), HexBytes('0xb0c5e2c6b29eeb0b9c1d63eaa8b0f93c02ead18ae01cb7fc795b0612d3e9d55a'), , , 38)), ids=['web3js_example', '31byte_r_and_s'])
def test_eth_account_sign_and_send_EIP155_transaction_to_eth_tester(w3, expected_txn, raw_tx, expected_tx_hash, r, s, v):
actual_tx_hash = w3.eth.send_raw_transaction(raw_tx)
assert (actual_tx_hash == expected_tx_hash)
actual_txn = w3.eth.get_transaction(actual_tx_hash)
for key in ('to', 'nonce', 'gas', 'gasPrice', 'value'):
assert (actual_txn[key] == expected_txn[key])
assert (actual_txn.r == r)
assert (actual_txn.s == s)
assert (actual_txn.v == v) |
class OptionSeriesPyramid3dSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class UnsignedCharType(Type):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = UNSIGNED_CHAR
self.byte_size = 1
def debug_info(self):
bs = bytearray()
bs.append(ENUM_ABBREV_CODE['BASE_TYPE_WITH_ENCODING'])
bs.append(self.byte_size)
bs.append(ENUM_DW_ATE['DW_ATE_unsigned'])
bs.extend(map(ord, self.name))
bs.append(0)
return bs |
def train_gpu_with_autocast(model, device, optimizer, data_type, input_size, output_size, batch_size, args):
print('Running with 32-bit weights using autocast to ', data_type, ' data type')
dtype_map = {'float16': torch.float16, 'float': torch.float32, 'tf32': torch.float32, 'bfloat16': torch.bfloat16}
dt = dtype_map[data_type]
loss_f = nn.CrossEntropyLoss().to(device)
from torch.cuda.amp import autocast
torch.cuda.synchronize()
start_event = torch.cuda.Event(enable_timing=True)
end_event = torch.cuda.Event(enable_timing=True)
total_time = 0.0
for i in range((args.steps + args.warmups)):
data = torch.randn(batch_size, input_size, device=device)
target = torch.randint(output_size, [batch_size], device=device, dtype=torch.long)
if (i >= args.warmups):
start_event.record()
loss = None
if (not args.fw_only):
optimizer.zero_grad(set_to_none=args.set_to_none)
with autocast(dtype=dt):
output = model(data)
if (not args.fw_only):
loss = loss_f(output, target)
if (not args.fw_only):
loss.backward()
if args.optimizer:
optimizer.step()
if (i >= args.warmups):
end_event.record()
torch.cuda.synchronize()
total_time += (start_event.elapsed_time(end_event) * 0.001)
return (total_time, loss) |
def get_agent_class_from_string(class_name: str) -> GenericAgent:
try:
agent_module = importlib.import_module('.'.join(class_name.split('.')[:(- 1)]))
agent_class = getattr(agent_module, class_name.split('.')[(- 1)])
except Exception as e:
logger.error(f'Not able to load {class_name}. Try setting --user-dir?')
raise e
return agent_class |
class OptionPlotoptionsColumnZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
class Benchmark(lg.Node):
INPUT = lg.Topic(CaptureResult)
OUTPUT_EXIT = lg.Topic(ExitSignal)
state: BenchmarkState
config: BenchmarkConfig
def setup(self) -> None:
logger.info(f' benchmarking for {self.config.run_time} seconds')
self.state.points = Queue()
self.state.tasks = multiprocessing.JoinableQueue()
self.state.worker = BenchmarkWorker(self.state.tasks, 0, self.config.output_path, self.config.output_name)
self.state.worker.start()
(OUTPUT_EXIT)
async def on_done(self) -> lg.AsyncPublisher:
while True:
point: CapturePoint = None
try:
point = self.state.points.get_nowait()
except:
pass
if (point is not None):
cap0 = point.captures[0]
self.state.tasks.put((point.rec_time, (cap0.frame_index, cap0.proc_runtime, cap0.proc_target_fps, cap0.system_timestamp), [point.captures[i].system_timestamp for i in range(1, len(point.captures))]))
elif self.state.done:
break
(await asyncio.sleep(0.005))
(yield (self.OUTPUT_EXIT, ExitSignal()))
(INPUT)
async def on_msg(self, message: CaptureResult) -> None:
rec_time = time.perf_counter()
if (self.state.start_time == 0):
self.state.start_time = rec_time
if ((not self.state.done) and ((rec_time - self.state.start_time) >= self.config.run_time)):
self.state.done = True
elif ((rec_time - self.state.start_time) > 5):
captures = message.captures[:]
self.state.points.put(CapturePoint(rec_time, captures))
def cleanup(self) -> None:
logger.info(' closing worker...')
self.state.tasks.put(None)
self.state.tasks.join() |
def test_message_arity_1() -> None:
def f(a: int) -> str:
pass
(ArgumentMessage, ReturnMessage, _) = function_to_node(f)
assert (ArgumentMessage.__annotations__ == {'a': int})
ArgumentMessage(a=0)
assert (ReturnMessage.__annotations__ == {'sample': str})
ReturnMessage(sample='hello') |
class CupertinoSlider(ConstrainedControl):
def __init__(self, ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, value: OptionalNumber=None, min: OptionalNumber=None, max: OptionalNumber=None, divisions: Optional[int]=None, active_color: Optional[str]=None, thumb_color: Optional[str]=None, on_change=None, on_change_start=None, on_change_end=None, on_focus=None, on_blur=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.value = value
self.min = min
self.max = max
self.divisions = divisions
self.round = round
self.active_color = active_color
self.thumb_color = thumb_color
self.on_change = on_change
self.on_change_start = on_change_start
self.on_change_end = on_change_end
self.on_focus = on_focus
self.on_blur = on_blur
def _get_control_name(self):
return 'cupertinoslider'
def value(self) -> OptionalNumber:
v = self._get_attr('value', data_type='float')
if (self.min and (v < self.min)):
v = self.min
elif (self.max and (v > self.max)):
v = self.max
return v
def value(self, value: OptionalNumber):
self._set_attr('value', value)
def min(self) -> OptionalNumber:
return self._get_attr('min')
def min(self, value: OptionalNumber):
self._set_attr('min', value)
def max(self) -> OptionalNumber:
return self._get_attr('max')
def max(self, value: OptionalNumber):
self._set_attr('max', value)
def divisions(self) -> Optional[int]:
return self._get_attr('divisions')
def divisions(self, value: Optional[int]):
self._set_attr('divisions', value)
def round(self) -> Optional[int]:
return self._get_attr('round')
def round(self, value: Optional[int]):
self._set_attr('round', value)
def active_color(self):
return self._get_attr('activeColor')
_color.setter
def active_color(self, value):
self._set_attr('activeColor', value)
def thumb_color(self):
return self._get_attr('thumbColor')
_color.setter
def thumb_color(self, value):
self._set_attr('thumbColor', value)
def on_change(self):
return self._get_event_handler('change')
_change.setter
def on_change(self, handler):
self._add_event_handler('change', handler)
def on_change_start(self):
return self._get_event_handler('change_start')
_change_start.setter
def on_change_start(self, handler):
self._add_event_handler('change_start', handler)
def on_change_end(self):
return self._get_event_handler('change_end')
_change_end.setter
def on_change_end(self, handler):
self._add_event_handler('change_end', handler)
def on_focus(self):
return self._get_event_handler('focus')
_focus.setter
def on_focus(self, handler):
self._add_event_handler('focus', handler)
def on_blur(self):
return self._get_event_handler('blur')
_blur.setter
def on_blur(self, handler):
self._add_event_handler('blur', handler) |
class AmazonVideoApi(VideoInterface):
def video__label_detection_async__launch_job(self, file: str, file_url: str='') -> AsyncLaunchJobResponseType:
return AsyncLaunchJobResponseType(provider_job_id=amazon_launch_video_job(file, 'LABEL'))
def video__text_detection_async__launch_job(self, file: str, file_url: str='') -> AsyncLaunchJobResponseType:
return AsyncLaunchJobResponseType(provider_job_id=amazon_launch_video_job(file, 'TEXT'))
def video__face_detection_async__launch_job(self, file: str, file_url: str='') -> AsyncLaunchJobResponseType:
return AsyncLaunchJobResponseType(provider_job_id=amazon_launch_video_job(file, 'FACE'))
def video__person_tracking_async__launch_job(self, file: str, file_url: str='') -> AsyncLaunchJobResponseType:
return AsyncLaunchJobResponseType(provider_job_id=amazon_launch_video_job(file, 'PERSON'))
def video__explicit_content_detection_async__launch_job(self, file: str, file_url: str='') -> AsyncLaunchJobResponseType:
return AsyncLaunchJobResponseType(provider_job_id=amazon_launch_video_job(file, 'EXPLICIT'))
def video__label_detection_async__get_job_result(self, provider_job_id: str) -> AsyncBaseResponseType[LabelDetectionAsyncDataClass]:
payload = {'JobId': provider_job_id}
response = handle_amazon_call(self.clients['video'].get_label_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
if (response['JobStatus'] == 'SUCCEEDED'):
pagination_token = response.get('NextToken')
responses = [response]
while pagination_token:
payload = {'JobId': provider_job_id, 'NextToken': pagination_token}
response = handle_amazon_call(self.clients['video'].get_label_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
responses.append(response)
pagination_token = response.get('NextToken')
labels = []
for response in responses:
labels.extend(amazon_video_labels_parser(response))
return AsyncResponseType(original_response=responses, standardized_response=LabelDetectionAsyncDataClass(labels=labels), provider_job_id=provider_job_id)
return AsyncPendingResponseType(provider_job_id=response['JobStatus'])
def video__text_detection_async__get_job_result(self, provider_job_id: str) -> TextDetectionAsyncDataClass:
payload = {'JobId': provider_job_id}
response = handle_amazon_call(self.clients['video'].get_text_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
if (response['JobStatus'] == 'SUCCEEDED'):
pagination_token = response.get('NextToken')
responses = [response]
while pagination_token:
payload = {'JobId': provider_job_id, 'NextToken': pagination_token}
response = handle_amazon_call(self.clients['video'].get_text_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
responses.append(response)
pagination_token = response.get('NextToken')
texts = []
for response in responses:
texts.extend(amazon_video_text_parser(response))
return AsyncResponseType(original_response=responses, standardized_response=TextDetectionAsyncDataClass(texts=texts), provider_job_id=provider_job_id)
return AsyncPendingResponseType(provider_job_id=response['JobStatus'])
def video__face_detection_async__get_job_result(self, provider_job_id: str) -> FaceDetectionAsyncDataClass:
payload = {'JobId': provider_job_id}
response = handle_amazon_call(self.clients['video'].get_face_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
if (response['JobStatus'] == 'SUCCEEDED'):
pagination_token = response.get('NextToken')
responses = [response]
while pagination_token:
payload = {'JobId': provider_job_id, 'NextToken': pagination_token}
response = handle_amazon_call(self.clients['video'].get_face_detection, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
responses.append(response)
pagination_token = response.get('NextToken')
faces = []
for response in responses:
faces.extend(amazon_video_face_parser(response))
return AsyncResponseType(original_response=responses, standardized_response=FaceDetectionAsyncDataClass(faces=faces), provider_job_id=provider_job_id)
return AsyncPendingResponseType(provider_job_id=response['JobStatus'])
def video__person_tracking_async__get_job_result(self, provider_job_id: str) -> PersonTrackingAsyncDataClass:
payload = {'JobId': provider_job_id}
response = handle_amazon_call(self.clients['video'].get_person_tracking, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
if (response['JobStatus'] == 'SUCCEEDED'):
pagination_token = response.get('NextToken')
responses = [response]
while pagination_token:
payload = {'JobId': provider_job_id, 'NextToken': pagination_token}
response = handle_amazon_call(self.clients['video'].get_person_tracking, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
responses.append(response)
pagination_token = response.get('NextToken')
persons = []
for response in responses:
persons.extend(amazon_video_person_tracking_parser(response))
return AsyncResponseType(original_response=responses, standardized_response=PersonTrackingAsyncDataClass(persons=persons), provider_job_id=provider_job_id)
return AsyncPendingResponseType(provider_job_id=response['JobStatus'])
def video__explicit_content_detection_async__get_job_result(self, provider_job_id: str) -> ExplicitContentDetectionAsyncDataClass:
payload = {'JobId': provider_job_id}
response = handle_amazon_call(self.clients['video'].get_content_moderation, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
if (response['JobStatus'] == 'SUCCEEDED'):
pagination_token = response.get('NextToken')
responses = [response]
while pagination_token:
payload = {'JobId': provider_job_id, 'NextToken': pagination_token}
response = handle_amazon_call(self.clients['video'].get_content_moderation, **payload)
if (response['JobStatus'] == 'FAILED'):
error: str = response.get('StatusMessage', 'Amazon returned a job status: FAILED')
raise ProviderException(error)
responses.append(response)
pagination_token = response.get('NextToken')
moderated_content = []
for response in responses:
moderated_content.extend(amazon_video_explicit_parser(response))
return AsyncResponseType(original_response=responses, standardized_response=ExplicitContentDetectionAsyncDataClass(moderation=moderated_content), provider_job_id=provider_job_id)
return AsyncPendingResponseType(provider_job_id=response['JobStatus']) |
.requires_window_manager
('ert.gui.tools.run_analysis.run_analysis_tool.QMessageBox')
def test_failure(mock_msgbox, mock_tool, qtbot, monkeypatch):
monkeypatch.setattr(run_analysis_tool, 'smoother_update', Mock(side_effect=ErtAnalysisError('some error')))
mock_tool.run()
qtbot.waitUntil((lambda : (len(mock_msgbox.warning.mock_calls) > 0)), timeout=2000)
mock_msgbox.critical.assert_not_called()
mock_msgbox.warning.assert_called_once_with(None, 'Failed', "Unable to run analysis for case 'source'.\nThe following error occurred: some error")
mock_tool._dialog.accept.assert_not_called() |
def extractOldmosstreeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("Love Rival's Brother", "Love Rival's Brother", 'translated'), ('Thinks I Like Him', 'Everyone Thinks that I Like Him', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class _ContainerProjectsZonesRepository(_base_repository.GCPRepository):
def __init__(self, **kwargs):
super(_ContainerProjectsZonesRepository, self).__init__(component='projects.zones', **kwargs)
def get_serverconfig(self, project_id, zone, fields=None, **kwargs):
arguments = {'projectId': project_id, 'zone': zone, 'fields': fields}
if kwargs:
arguments.update(kwargs)
return self.execute_query(verb='getServerconfig', verb_arguments=arguments) |
class OptionSeriesScatterDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesScatterDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesScatterDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesScatterDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesScatterDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesScatterDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesScatterDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx):
if errorIndication:
print(('Notification %s not sent: %s' % (sendRequestHandle, errorIndication)))
elif errorStatus:
print(('Notification Receiver returned error for %s: %s %s' % (sendRequestHandle, errorStatus, errorIndex)))
else:
print(('Notification %s delivered:' % sendRequestHandle))
for (name, val) in varBinds:
print(('%s = %s' % (name.prettyPrint(), val.prettyPrint()))) |
class Datalabels(Options):
component_properties = ('color',)
def align(self):
return self._config_get('center')
def align(self, value: str):
self._config(value)
def anchor(self):
return self._config_get('center')
def anchor(self, value: str):
self._config(value)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, color: Optional[str]):
self._config(color)
def borderColor(self):
return self._config_get(None)
def borderColor(self, color: Optional[str]):
self._config(color)
def borderRadius(self):
return self._config_get(None)
def borderRadius(self, num: float):
self._config(num)
def borderWidth(self):
return self._config_get(None)
def borderWidth(self, num: float):
self._config(num)
def clamp(self):
return self._config_get(False)
def clamp(self, flag: bool):
self._config(flag)
def clip(self):
return self._config_get(False)
def clip(self, flag: bool):
self._config(flag)
def color(self):
return self._config_get('#F66')
def color(self, c: str):
self._config(c)
def display(self):
return self._config_get(True)
def display(self, value: Union[(str, bool)]):
self._config(value)
def displays(self) -> EnumDisplays:
return EnumDisplays(self, 'display')
def font(self):
return self._config_get(None)
def font(self, values: dict):
self._config(values)
def fonts(self) -> Font:
return self._config_sub_data('font', Font)
def formatters(self) -> EnumFormatters:
return EnumFormatters(self, 'formatter')
def formatter(self):
return self.get(None)
def formatter(self, value):
self.set(value)
def padding(self):
return self._config_get(None)
def padding(self, values: dict):
self._config(values)
def paddings(self) -> Padding:
return self._config_sub_data('padding', Padding)
def rotation(self):
return self.get(0)
def rotation(self, num: float):
self.set(num)
def textAlign(self):
return self.get('start')
def textAlign(self, value: str):
self.set(value)
def textStrokeColor(self):
return self.get(None)
def textStrokeColor(self, value: str):
self.set(value)
def textStrokeWidth(self):
return self.get(0)
def textStrokeWidth(self, num: float):
self.set(num)
def textShadowBlur(self):
return self.get(0)
def textShadowBlur(self, num: float):
self.set(num)
def textShadowColor(self):
return self.get(None)
def textShadowColor(self, color: str):
self.set(color) |
class BitgetBot(Passivbot):
def __init__(self, config: dict):
super().__init__(config)
self.ccp = getattr(ccxt_pro, self.exchange)({'apiKey': self.user_info['key'], 'secret': self.user_info['secret'], 'password': self.user_info['passphrase']})
self.ccp.options['defaultType'] = 'swap'
self.cca = getattr(ccxt_async, self.exchange)({'apiKey': self.user_info['key'], 'secret': self.user_info['secret'], 'password': self.user_info['passphrase']})
self.cca.options['defaultType'] = 'swap'
self.max_n_cancellations_per_batch = 10
self.max_n_creations_per_batch = 5
self.order_side_map = {'buy': {'long': 'open_long', 'short': 'close_short'}, 'sell': {'long': 'close_long', 'short': 'open_short'}}
async def init_bot(self):
(await self.init_symbols())
for symbol in self.symbols:
elm = self.markets_dict[symbol]
self.symbol_ids[symbol] = elm['id']
self.min_costs[symbol] = max(5.1, (0.1 if (elm['limits']['cost']['min'] is None) else elm['limits']['cost']['min']))
self.min_qtys[symbol] = elm['limits']['amount']['min']
self.qty_steps[symbol] = elm['precision']['amount']
self.price_steps[symbol] = elm['precision']['price']
self.c_mults[symbol] = elm['contractSize']
self.coins[symbol] = symbol.replace('/USDT:USDT', '')
self.tickers[symbol] = {'bid': 0.0, 'ask': 0.0, 'last': 0.0}
self.open_orders[symbol] = []
self.positions[symbol] = {'long': {'size': 0.0, 'price': 0.0}, 'short': {'size': 0.0, 'price': 0.0}}
self.upd_timestamps['open_orders'][symbol] = 0.0
self.upd_timestamps['tickers'][symbol] = 0.0
self.upd_timestamps['positions'][symbol] = 0.0
(await super().init_bot())
async def start_websockets(self):
(await asyncio.gather(self.watch_balance(), self.watch_orders(), self.watch_tickers()))
async def watch_balance(self):
while True:
try:
if self.stop_websocket:
break
res = (await self.ccp.watch_balance())
res['USDT']['total'] = res['USDT']['free']
self.handle_balance_update(res)
except Exception as e:
print(f'exception watch_balance', e)
traceback.print_exc()
async def watch_orders(self):
while True:
try:
if self.stop_websocket:
break
res = (await self.ccp.watch_orders())
for i in range(len(res)):
res[i]['position_side'] = res[i]['info']['posSide']
res[i]['qty'] = res[i]['amount']
self.handle_order_update(res)
except Exception as e:
print(f'exception watch_orders', e)
traceback.print_exc()
async def watch_tickers(self, symbols=None):
symbols = list((self.symbols if (symbols is None) else symbols))
while True:
try:
if self.stop_websocket:
break
res = (await self.ccp.watch_tickers(symbols))
if (res['last'] is None):
res['last'] = np.random.choice([res['bid'], res['ask']])
self.handle_ticker_update(res)
except Exception as e:
print(f'exception watch_tickers {symbols}', e)
traceback.print_exc()
async def fetch_open_orders(self, symbol: str=None):
fetched = None
open_orders = []
try:
fetched = (await self.cca.private_mix_get_mix_v1_order_margincoincurrent(params={'productType': 'umcbl'}))
for elm in fetched['data']:
elm['side'] = ('buy' if (elm['side'] in ['close_short', 'open_long']) else 'sell')
elm['position_side'] = elm['posSide']
elm['price'] = float(elm['price'])
elm['qty'] = elm['amount'] = float(elm['size'])
elm['timestamp'] = float(elm['cTime'])
elm['id'] = elm['orderId']
elm['custom_id'] = elm['clientOid']
elm['symbol'] = self.symbol_ids_inv[elm['symbol']]
open_orders.append(elm)
return sorted(open_orders, key=(lambda x: x['timestamp']))
except Exception as e:
logging.error(f'error fetching open orders {e}')
print_async_exception(fetched)
traceback.print_exc()
return False
async def fetch_positions(self) -> ([dict], float):
(fetched_positions, fetched_balance) = (None, None)
try:
(fetched_positions, fetched_balance) = (await asyncio.gather(self.cca.private_mix_get_mix_v1_position_allposition_v2({'marginCoin': 'USDT', 'productType': 'umcbl'}), self.cca.private_mix_get_mix_v1_account_accounts({'productType': 'umcbl'})))
balance = float([x for x in fetched_balance['data'] if (x['marginCoin'] == self.quote)][0]['available'])
positions = []
for elm in floatify(fetched_positions['data']):
if (elm['total'] == 0.0):
continue
positions.append({'symbol': self.symbol_ids_inv[elm['symbol']], 'position_side': elm['holdSide'], 'size': (abs(elm['total']) * (1.0 if (elm['holdSide'] == 'long') else (- 1.0))), 'price': elm['averageOpenPrice']})
return (positions, balance)
except Exception as e:
logging.error(f'error fetching positions and balance {e}')
print_async_exception(fetched_positions)
print_async_exception(fetched_balance)
traceback.print_exc()
return False
async def fetch_tickers(self):
fetched = None
try:
fetched = (await self.cca.public_mix_get_mix_v1_market_tickers(params={'productType': 'UMCBL'}))
tickers = self.cca.parse_tickers(fetched['data'])
return tickers
except Exception as e:
logging.error(f'error fetching tickers {e}')
print_async_exception(fetched)
traceback.print_exc()
if ('bybit does not have market symbol' in str(e)):
raise Exception('ccxt gives bad symbol error... attempting bot restart')
return False
async def fetch_ohlcv(self, symbol: str, timeframe='1m'):
fetched = None
try:
fetched = (await self.cca.fetch_ohlcv(symbol, timeframe=timeframe, limit=1000))
return fetched
except Exception as e:
logging.error(f'error fetching ohlcv for {symbol} {e}')
print_async_exception(fetched)
traceback.print_exc()
return False
async def fetch_pnls(self, start_time: int=None, end_time: int=None):
limit = 100
if ((start_time is None) and (end_time is None)):
return (await self.fetch_pnl())
all_fetched = {}
while True:
fetched = (await self.fetch_pnl(start_time=start_time, end_time=end_time))
if (fetched == []):
break
for elm in fetched:
all_fetched[elm['id']] = elm
if (len(fetched) < limit):
break
logging.info(f"debug fetching income {ts_to_date_utc(fetched[(- 1)]['timestamp'])}")
end_time = fetched[0]['timestamp']
return sorted([x for x in all_fetched.values() if (x['pnl'] != 0.0)], key=(lambda x: x['timestamp']))
async def fetch_pnl(self, start_time: int=None, end_time: int=None):
fetched = None
try:
if (end_time is None):
end_time = (utc_ms() + (((1000 * 60) * 60) * 24))
if (start_time is None):
start_time = 0
params = {'productType': 'umcbl', 'startTime': int(start_time), 'endTime': int(end_time)}
fetched = (await self.cca.private_mix_get_mix_v1_order_allfills(params=params))
pnls = []
for elm in fetched['data']:
pnls.append(elm)
pnls[(- 1)]['pnl'] = float(pnls[(- 1)]['profit'])
pnls[(- 1)]['timestamp'] = float(pnls[(- 1)]['cTime'])
pnls[(- 1)]['id'] = pnls[(- 1)]['tradeId']
return sorted(pnls, key=(lambda x: x['timestamp']))
except Exception as e:
logging.error(f'error fetching income {e}')
print_async_exception(fetched)
traceback.print_exc()
return False
async def execute_multiple(self, orders: [dict], type_: str, max_n_executions: int):
if (not orders):
return []
executions = []
for order in orders[:max_n_executions]:
execution = None
try:
execution = asyncio.create_task(getattr(self, type_)(order))
executions.append((order, execution))
except Exception as e:
logging.error(f'error executing {type_} {order} {e}')
print_async_exception(execution)
traceback.print_exc()
results = []
for execution in executions:
result = None
try:
result = (await execution[1])
results.append(result)
except Exception as e:
logging.error(f'error executing {type_} {execution} {e}')
print_async_exception(result)
traceback.print_exc()
return results
async def execute_cancellation(self, order: dict) -> dict:
executed = None
try:
executed = (await self.cca.cancel_order(order['id'], symbol=order['symbol']))
return {'symbol': executed['symbol'], 'side': order['side'], 'id': executed['id'], 'position_side': order['position_side'], 'qty': order['qty'], 'price': order['price']}
except Exception as e:
logging.error(f'error cancelling order {order} {e}')
print_async_exception(executed)
traceback.print_exc()
return {}
async def execute_cancellations(self, orders: [dict]) -> [dict]:
if (len(orders) > self.max_n_cancellations_per_batch):
try:
reduce_only_orders = [x for x in orders if x['reduce_only']]
rest = [x for x in orders if (not x['reduce_only'])]
orders = (reduce_only_orders + rest)[:self.max_n_cancellations_per_batch]
except Exception as e:
logging.error(f'debug filter cancellations {e}')
return (await self.execute_multiple(orders, 'execute_cancellation', self.max_n_cancellations_per_batch))
async def execute_order(self, order: dict) -> dict:
executed = None
try:
executed = (await self.cca.create_limit_order(symbol=order['symbol'], side=order['side'], amount=abs(order['qty']), price=order['price'], params={'reduceOnly': order['reduce_only'], 'timeInForceValue': 'post_only', 'side': self.order_side_map[order['side']][order['position_side']], 'clientOid': f"{self.broker_code}#{order['custom_id']}_{str(uuid4())}"[:64]}))
if (('symbol' not in executed) or (executed['symbol'] is None)):
executed['symbol'] = order['symbol']
for key in ['side', 'position_side', 'qty', 'price']:
if ((key not in executed) or (executed[key] is None)):
executed[key] = order[key]
return executed
except Exception as e:
logging.error(f'error executing order {order} {e}')
print_async_exception(executed)
traceback.print_exc()
return {}
async def execute_orders(self, orders: [dict]) -> [dict]:
return (await self.execute_multiple(orders, 'execute_order', self.max_n_creations_per_batch))
async def update_exchange_config(self):
pass |
class Solution():
def lcaDeepestLeaves(self, root: TreeNode) -> TreeNode:
def subtree_with_deepest(node):
if (node is None):
return (node, 0)
(lt, ll) = subtree_with_deepest(node.left)
(rt, rl) = subtree_with_deepest(node.right)
if (ll == rl):
return (node, (ll + 1))
elif (ll > rl):
return (lt, (ll + 1))
else:
return (rt, (rl + 1))
return subtree_with_deepest(root)[0] |
def join_returns(cfg, arg_names, function_ast=None):
join_args = [ir.Argument(function_ast, info=n, name=n) for n in arg_names]
join = ir.Block(function_ast, join_args, info='MERGE RETURNS')
returns = list(of_type[ir.Return](cfg.graph.nodes))
if returns:
cfg += CfgSimple.statement(join)
for ret in returns:
assert (len(ret.returns) == len(arg_names)), (ret.returns, arg_names)
goto = ir.Goto(ret.ast_node, join, ret.returns)
cfg = cfg.replace(ret, goto)
cfg = (cfg + (goto, join))
return (cfg, join_args) |
class InfoCharacteristic(Characteristic):
def __init__(self, addressfunc=None, modefunc=None):
Characteristic.__init__(self, {'uuid': BLE_INFO_CHAR, 'properties': ['read'], 'descriptors': [Descriptor({'uuid': '2901', 'value': array.array('B', [73, 110, 102, 111, 32, 112, 97, 99, 107, 101, 116])})], 'value': None})
self._value = []
self.addressfunc = addressfunc
self.modefunc = modefunc
def onReadRequest(self, offset, callback):
dp = p2pbuffer.data_packet()
try:
dp.infopacket['mac'] = self.addressfunc()
except Exception as e:
dp.infopacket['mac'] = '00:00:00:00:00:00'
dp.infopacket['unitno'] = int(Settings.Settings['Unit'])
dp.infopacket['build'] = int(rpieGlobals.BUILD)
dp.infopacket['name'] = Settings.Settings['Name']
dp.infopacket['type'] = int(rpieGlobals.NODE_TYPE_ID_RPI_EASY_STD)
dp.infopacket['cap'] = self.modefunc()
dp.encode(1)
data = list(base64.b64encode(dp.buffer[offset:]))
callback(Characteristic.RESULT_SUCCESS, data) |
class Calendar():
def __init__(self, ui):
self.page = ui.page
def days(self, month: int=None, content=None, year: int=None, width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str=None, options: dict=None, html_code: str=None, profile: types.PROFILE_TYPE=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
today = datetime.date.today()
month = (month or today.month)
content = (content or {})
dfl_options = {'overload': {'font-size': self.page.body.style.globals.font.normal(5), 'text-align': 'center', 'color': self.page.theme.danger.base, 'font-weight': 'bold', 'cursor': 'pointer'}, 'number': {'font-size': self.page.body.style.globals.font.normal(5), 'text-align': 'center'}, 'today': {'padding': '0 0 5px 0', 'border-bottom': '1px solid grey'}, 'header': {'font-size': self.page.body.style.globals.font.normal(3), 'background': self.page.theme.colors[(- 1)], 'color': self.page.theme.colors[0], 'padding': '5px 2px', 'text-align': 'center'}}
factor = ((100 / options.get('unit', 100)) if (options is not None) else 1)
if (options is not None):
dfl_options.update(options)
year = (year or today.year)
start = datetime.date(year, month, 1)
(days_data, tasks) = ([], {})
for values in content.values():
for t in values.keys():
tasks[t] = None
sorted_tasks = sorted(list(tasks))
for (i, t) in enumerate(sorted_tasks):
tasks[t] = dfl_options.get('colors', {}).get(t, self.page.theme.charts[i])
for _ in range((start.weekday() + 1)):
days_data.append({})
while (start.month == month):
day_tasks = content.get(start.isoformat(), {})
tasks_view = []
for (i, t) in enumerate(sorted_tasks):
tasks_view.append({'name': t, 'capacity': (factor * day_tasks.get(t, 0)), 'color': tasks[t]})
days_data.append({'today': (today == start), 'number': start.day, 'tasks': tasks_view, 'date': start.isoformat(), 'weekend': (start.weekday() >= 5)})
start += datetime.timedelta(days=1)
component = html.HtmlDates.Calendar(self.page, days_data, width, height, align, dfl_options, html_code, profile)
component.tasks = tasks
component.caption = ('%s %s' % (datetime.date(year, month, 1).strftime('%B'), year))
html.Html.set_component_skin(component)
return component
def timer(self, minutes: int, text: str='', width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str=None, options: dict=None, html_code: str=None, profile: types.PROFILE_TYPE=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
component = html.HtmlDates.Timer(self.page, minutes, text, width, height, align, options, html_code, profile)
html.Html.set_component_skin(component)
return component
def months(self, content: dict=None, width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str=None, options: dict=None, html_code: str=None, profile: types.PROFILE_TYPE=None):
today = datetime.date.today()
content = (content or {})
options = (options or {})
labels = options.get('months', ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'])
(rows, tasks) = ([], {})
for (i, l) in enumerate(labels):
record = []
for (j, k) in enumerate(sorted(content.get((i + 1), {}))):
record.append({'name': k, 'capacity': content[(j + 1)][k]})
tasks[k] = self.page.theme.charts[j]
html_code_chart = (('%s_%s' % (html_code, i)) if (html_code is not None) else html_code)
pie = self.page.ui.charts.chartJs.pie(record, y_columns=['capacity'], x_axis='name', html_code=html_code_chart, height=(150, 'px'), options=options, profile=profile)
pie.options.legend.display = False
pie.options.title.text = labels[i]
pie.options.title.display = True
pie.options.title.fontSize = self.page.body.style.globals.font.normal(5)
pie.options.title.fontColor = self.page.theme.colors[(- 1)]
rows.append(pie)
component = self.page.ui.row(rows, width=width, height=height, align=align, options=options, profile=profile)
component.tasks = tasks
component.pies = rows
component[(today.month - 1)].style.css.border = ('1px solid %s' % self.page.theme.success.light)
html.Html.set_component_skin(component)
return component
def legend(self, record: list, width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
data = []
if isinstance(record, dict):
for (k, v) in record.items():
data.append({'name': k, 'color': v})
else:
for (i, rec) in enumerate(record):
if isinstance(rec, dict):
row = dict(rec)
row['color'] = (self.page.theme.charts[i] if ('color' not in rec) else rec['color'])
else:
row = {'name': rec, 'color': self.page.theme.charts[i]}
data.append(row)
dfl_options = {'style': {'vertical-align': 'middle', 'border-radius': '5px', 'width': '10px', 'height': '10px', 'display': 'inline-block', 'margin-right': '2px'}}
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
if (options is not None):
dfl_options.update(options)
component = html.HtmlOthers.Legend(self.page, data, width, height, dfl_options, profile)
html.Html.set_component_skin(component)
return component
def forecast(self, month_period: int, content: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), position: str='top', options: dict=None, profile: types.PROFILE_TYPE=None):
today = datetime.date.today()
year = today.year
row = []
for i in range(month_period):
next_month = (today.month + i)
next_year = year
if (next_month > 11):
next_year = (year + (next_month // 11))
next_month = (next_month % 11)
calendar = self.page.ui.calendars.days(next_month, content, next_year, width, height, None, options, profile=profile)
if options.get('legend', True):
row.append([calendar, self.page.ui.calendars.legend(calendar.tasks)])
else:
row.append(calendar)
component = self.page.ui.grid([row], position=position, profile=profile)
html.Html.set_component_skin(component)
return component
def google(self, task, start: str, end: str, details=None, location=None, icon: str='google_plus', text: str='Add to Google Calendar', options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None):
icon = self.page.ui.icons.awesome(icon, options=options, profile=profile)
icon.icon.style.css.font_factor(5)
icon.icon.style.css.color = self.page.theme.greys[(- 1)]
icon.options.managed = False
google_url = '
component = self.page.ui.link(('%s %s' % (icon.html(), text)), self.page.js.objects.get(('%(url)s&text=%(task)s&dates=%(start)s/%(end)s&details=%(details)s&location=%(location)s' % {'url': google_url, 'task': task, 'start': start, 'end': end, 'details': (details or task), 'location': (location or '')})))
component.style.css.background = self.page.theme.greys[0]
component.style.css.color = self.page.theme.greys[(- 1)]
component.style.css.padding = '2px 5px'
component.style.css.margin = 2
component.style.css.display = 'inline-block'
component.style.css.border = ('1px solid %s' % self.page.theme.greys[3])
component.style.css.border_radius = 20
html.Html.set_component_skin(component)
return component
def agenda(self, task, start, end, details=None, location=None, icon: str='calendar', text: str='Add to Calendar', options: dict=None, profile: types.PROFILE_TYPE=None):
calendar_options = {'CALSCALE': 'GREGORIAN', 'VERSION': '2.0'}
events_options = {'DTSTART;VALUE=DATE': start, 'DTEND;VALUE=DATE': end, 'SUMMARY': (task or ''), 'LOCATION': (location or ''), 'DESCRIPTION': (details or ''), 'STATUS': 'CONFIRMED', 'SEQUENCE': 3}
str_calendar = ('BEGIN:VCALENDAR\n%s\n%%s\nEND:VCALENDAR' % '\n'.join([('%s:%s' % (k, v)) for (k, v) in calendar_options.items()]))
str_event = ('BEGIN:VEVENT\n%s\nEND:VEVENT' % '\n'.join([('%s:%s' % (k, v)) for (k, v) in events_options.items()]))
component = self.page.ui.links.data(("<i style='font-size:%s;color:%s' class='%s'></i> %s" % (self.page.body.style.globals.font.normal(5), self.page.theme.greys[(- 1)], icon, text)), (str_calendar % str_event), options=options, profile=profile)
component.attr['download'] = 'event.ics'
component.style.css.background = self.page.theme.greys[0]
component.style.css.color = self.page.theme.greys[(- 1)]
component.style.css.padding = '2px 5px'
component.style.css.margin = 2
component.style.css.display = 'inline-block'
component.style.css.border = ('1px solid %s' % self.page.theme.greys[3])
component.style.css.border_radius = 20
html.Html.set_component_skin(component)
return component
def pill(self, text: str, value=None, group=None, width: types.SIZE_TYPE=('auto', ''), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, tooltip: str=None, profile: types.PROFILE_TYPE=None, options: dict=None):
component = self.page.ui.text(text, width=width, height=height, html_code=html_code, tooltip=tooltip, profile=profile, options=options)
component.style.css.background = self.page.theme.greys[3]
component.options.style_select = 'pill_selected'
component.style.css.border_radius = 20
component.style.css.padding = '0 5px'
component.style.css.margin_right = 5
date = datetime.date.today()
if ((value is None) and text.endswith('D')):
date = (date - datetime.timedelta(days=int(text[:(- 1)])))
value = date.isoformat()
elif ((value is None) and text.endswith('W')):
date = (date - datetime.timedelta(days=(7 * int(text[:(- 1)]))))
value = date.isoformat()
elif ((value is None) and text.endswith('M')):
for _ in range(int(text[:(- 1)])):
date = (date - datetime.timedelta(days=date.day))
value = date.isoformat()
elif ((value is None) and text.endswith('Y')):
date = datetime.date((date.year - int(text[:(- 1)])), date.month, date.day)
value = date.isoformat()
component.attr['data-value'] = (value or text)
component.style.add_classes.div.color_background_hover()
if (group is not None):
self.page.body.style.custom_class({'background': ('%s !IMPORTANT' % self.page.theme.colors[6]), 'color': ('%s !IMPORTANT' % self.page.theme.greys[0])}, classname='pill_selected')
component.attr['data-group'] = group
html.Html.set_component_skin(component)
return component |
def _find_revert_offset(pc_list: List, source_map: deque, source_node: NodeBase, fn_node: NodeBase, fn_name: Optional[str]) -> None:
if source_map:
if ((len(pc_list) >= 8) and (pc_list[(- 8)]['op'] == 'CALLVALUE')):
pc_list[(- 1)].update(dev='Cannot send ether to nonpayable function', fn=pc_list[(- 8)].get('fn', '<unknown>'), offset=pc_list[(- 8)].get('offset'), path=pc_list[(- 8)].get('path'))
return
if (not fn_node):
return
next_offset = None
if (source_map and (source_map[0][2] != (- 1))):
next_offset = (source_map[0][0], (source_map[0][0] + source_map[0][1]))
if (next_offset and (next_offset != fn_node.offset) and is_inside_offset(next_offset, fn_node.offset)):
pc_list[(- 1)].update(path=str(source_node.contract_id), fn=fn_name, offset=next_offset)
return
if (fn_node[(- 1)].nodeType == 'ExpressionStatement'):
expr = fn_node[(- 1)].expression
if ((expr.nodeType == 'FunctionCall') and (expr.get('expression.name') in ('revert', 'require'))):
pc_list[(- 1)].update(path=str(source_node.contract_id), fn=fn_name, offset=expr.expression.offset) |
class BaseTestIssueCertificates(AEATestCaseEmpty):
def setup_class(cls):
super().setup_class()
shutil.copytree(os.path.join(CUR_PATH, 'data', 'dummy_connection'), os.path.join(cls.current_agent_context, 'connections', 'dummy'))
agent_config = cls.load_agent_config(cls.agent_name)
agent_config.author = FetchAICrypto.identifier
agent_config.connections.add(DummyConnection.connection_id)
dump_item_config(agent_config, Path(cls.current_agent_context))
def add_cert_requests(cls, cert_requests: List[CertRequest], connection_name: str):
cls.nested_set_config(f'connections.{connection_name}.cert_requests', cert_requests) |
class IPythonEditor(ILayoutWidget):
dirty = Bool(False)
path = Str()
show_line_numbers = Bool(True)
changed = Event()
key_pressed = Event(KeyPressedEvent)
def load(self, path=None):
def save(self, path=None):
def set_style(self, n, fore, back):
def select_line(self, lineno): |
.object(SpinnakerELB, 'configure_attributes')
.object(SpinnakerELB, 'add_backend_policy')
.object(SpinnakerELB, 'add_listener_policy')
('foremast.elb.create_elb.wait_for_task')
.object(SpinnakerELB, 'make_elb_json', return_value={})
('foremast.elb.create_elb.get_properties')
def test_elb_create_elb(mock_get_properties, mock_elb_json, mock_wait_for_task, mock_listener_policy, mock_backend_policy, mock_load_balancer_attributes):
elb = SpinnakerELB(app='myapp', env='dev', region='us-east-1')
elb.create_elb()
mock_listener_policy.assert_called_with(mock_elb_json()) |
def extractWriterupdatesCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_requirements_txt(tmp_path: Path) -> None:
with run_within_dir(tmp_path):
with Path('requirements.txt').open('w') as f:
f.write('foo >= "1.0"')
spec = DependencySpecificationDetector(Path('pyproject.toml')).detect()
assert (spec == DependencyManagementFormat.REQUIREMENTS_TXT) |
class Special():
def __init__(self, constructor, post_deserialize, import_str):
self.constructor = constructor
self.post_deserialize = post_deserialize
self.import_str = import_str
def get_post_deserialize(self, varname):
if self.post_deserialize:
return (self.post_deserialize % varname)
else:
return None |
class OptionSeriesAreaSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_gauge(elasticapm_client, prometheus):
metricset = PrometheusMetrics(MetricsRegistry(elasticapm_client))
gauge = prometheus_client.Gauge('a_bare_gauge', 'Bare gauge')
gauge_with_labels = prometheus_client.Gauge('gauge_with_labels', 'Gauge with labels', ['alabel', 'anotherlabel'])
gauge.set(5)
gauge_with_labels.labels(alabel='foo', anotherlabel='baz').set(7)
gauge_with_labels.labels(alabel='bar', anotherlabel='bazzinga').set(11)
gauge_with_labels.labels(alabel='foo', anotherlabel='baz').set(2)
data = list(metricset.collect())
assert (len(data) == 3)
assert (data[0]['samples']['prometheus.metrics.a_bare_gauge']['value'] == 5.0)
assert (data[1]['samples']['prometheus.metrics.gauge_with_labels']['value'] == 2.0)
assert (data[1]['tags'] == {'alabel': 'foo', 'anotherlabel': 'baz'})
assert (data[2]['samples']['prometheus.metrics.gauge_with_labels']['value'] == 11.0)
assert (data[2]['tags'] == {'alabel': 'bar', 'anotherlabel': 'bazzinga'}) |
def extractMoonlightnovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Punpunbikeshare(BikeShareSystem):
sync = True
meta = {'system': 'Smart Bike', 'company': ['BTS Group Holdings']}
def __init__(self, tag, feed_url, meta):
super(Punpunbikeshare, self).__init__(tag, meta)
self.feed_url = feed_url
def update(self, scraper=None):
if (scraper is None):
scraper = utils.PyBikesScraper()
data = json.loads(scraper.request(self.feed_url))
stations = []
for item in data['stations']:
name = item['stationName']
latitude = float(item['lat'])
longitude = float(item['lng'])
total_slots = int(item['bikeDockCount'])
bike_uids = [b['bikeId'] for b in item['bikeDocks'] if b['bikeId']]
bikes = len(bike_uids)
free = (total_slots - bikes)
extra = {'slots': total_slots, 'address': item['location'], 'uid': item['stationId'], 'bike_uids': bike_uids}
station = BikeShareStation(name, latitude, longitude, bikes, free, extra)
stations.append(station)
self.stations = stations |
def test_sandbox_priority():
prio = _PriorityCounter()
assert (prio.get_priority(BuildQueueTask({'build_id': '9', 'task_id': '9', 'project_owner': 'cecil', 'background': False, 'sandbox': 'cecil/foo--submitter'})) == 1)
assert (prio.get_priority(BuildQueueTask({'build_id': '9', 'task_id': '9-fedora-rawhide-x86_64', 'chroot': 'fedora-rawhide-x86_64', 'project_owner': 'cecil', 'background': True, 'sandbox': 'cecil/foo--submitter'})) == 1)
assert (prio.get_priority(BuildQueueTask({'build_id': '10', 'task_id': '10-fedora-rawhide-x86_64', 'chroot': 'fedora-rawhide-x86_64', 'project_owner': 'cecil', 'background': True, 'sandbox': 'cecil/foo--submitter'})) == 2)
assert (prio.get_priority(BuildQueueTask({'build_id': '11', 'task_id': '11-fedora-rawhide-x86_64', 'chroot': 'fedora-rawhide-x86_64', 'project_owner': 'cecil', 'background': True, 'sandbox': 'cecil/baz--submitter'})) == 1) |
class OptionSeriesBulletOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
_projects_ns.route('/')
class Project(Resource):
_to_parameters
_projects_ns.doc(params=fullname_params)
_projects_ns.marshal_with(project_model)
_projects_ns.response(HTTPStatus.OK.value, 'OK, Project data follows...')
_projects_ns.response(HTTPStatus.NOT_FOUND.value, 'No such Copr project found in database')
def get(self, ownername, projectname):
copr = get_copr(ownername, projectname)
return to_dict(copr) |
class OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class SeparatedCoords(Coords):
def __init__(self, separated_coords):
self.separated_coords = [copy.deepcopy(s) for s in separated_coords]
def from_dict(cls, tree):
if (tree['type'] != 'separated'):
raise ValueError('The type of coordinates should be "separated".')
return cls(tree['separated_coords'])
def to_dict(self):
tree = {'type': 'separated', 'separated_coords': self.separated_coords}
return tree
def __getitem__(self, i):
s0 = ((1,) * len(self))
j = ((len(self) - i) - 1)
output = self.separated_coords[i].reshape(((s0[:j] + ((- 1),)) + s0[(j + 1):]))
return np.broadcast_to(output, self.shape).ravel()
def size(self):
return np.prod(self.shape)
def __len__(self):
return len(self.separated_coords)
def dims(self):
return np.array([len(c) for c in self.separated_coords])
def shape(self):
return self.dims[::(- 1)]
def __iadd__(self, b):
for i in range(len(self)):
self.separated_coords[i] += b[i]
return self
def __imul__(self, f):
if np.isscalar(f):
for i in range(len(self)):
self.separated_coords[i] *= f
else:
for i in range(len(self)):
self.separated_coords[i] *= f[i]
return self
def __eq__(self, other):
if (type(self) != type(other)):
return False
return np.array_equal(self.separated_coords, other.separated_coords)
def reverse(self):
for i in range(len(self)):
self.separated_coords[i] = self.separated_coords[i][::(- 1)]
return self |
class TestHNSWJaccard(TestHNSW):
def _create_random_points(self, high=50, n=100, dim=10):
return np.random.randint(0, high, (n, dim))
def _create_index(self, sets, keys=None):
hnsw = HNSW(distance_func=jaccard_distance, m=16, ef_construction=100)
self._insert_points(hnsw, sets, keys)
return hnsw
def _search_index(self, index, queries, k=10):
return super()._search_index_dist(index, queries, jaccard_distance, k) |
def merge_ssz_branches(*branches):
o = {}
for branch in branches:
o = {**o, **branch}
keys = sorted(o.keys())[::(- 1)]
pos = 0
while (pos < len(keys)):
k = keys[pos]
if ((k in o) and ((k ^ 1) in o) and ((k // 2) not in o)):
o[(k // 2)] = hash((o[(k & (- 2))] + o[(k | 1)]))
keys.append((k // 2))
pos += 1
return {x: o[x] for x in o if (not (((x * 2) in o) and (((x * 2) + 1) in o)))} |
class TestInlineHilitePlainText(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.inlinehilite']
extension_configs = {'pymdownx.inlinehilite': {'style_plain_text': False}}
def test_unstyled_plaintext(self):
self.check_markdown('Lets test inline highlight no guessing and no text styling `import module`.', '<p>Lets test inline highlight no guessing and no text styling <code>import module</code>.</p>') |
.parametrize(('exclude', 'expected'), [(('.*file1',), [Path('.cache/file2.py'), Path('dir/subdir/file2.py'), Path('dir/subdir/file3.py'), Path('other_dir/subdir/file2.py')]), (('.cache|other.*subdir',), [Path('dir/subdir/file1.py'), Path('dir/subdir/file2.py'), Path('dir/subdir/file3.py')]), (('.*/subdir/',), [Path('.cache/file1.py'), Path('.cache/file2.py')])])
def test_regex_argument(exclude: tuple[str], expected: list[Path], tmp_path: Path) -> None:
with run_within_dir(tmp_path):
create_files([Path('.cache/file1.py'), Path('.cache/file2.py'), Path('dir/subdir/file1.py'), Path('dir/subdir/file2.py'), Path('dir/subdir/file3.py'), Path('other_dir/subdir/file1.py'), Path('other_dir/subdir/file2.py')])
files = PythonFileFinder(exclude=exclude, extend_exclude=(), using_default_exclude=False).get_all_python_files_in((Path(),))
assert (sorted(files) == expected) |
class ExpansionTile(ConstrainedControl):
def __init__(self, controls: Optional[List[Control]]=None, ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, title: Optional[Control]=None, subtitle: Optional[Control]=None, leading: Optional[Control]=None, trailing: Optional[Control]=None, controls_padding: PaddingValue=None, tile_padding: PaddingValue=None, affinity: Optional[TileAffinity]=None, expanded_alignment: Optional[Alignment]=None, expanded_cross_axis_alignment: CrossAxisAlignment=CrossAxisAlignment.CENTER, clip_behavior: Optional[ClipBehavior]=None, initially_expanded: Optional[bool]=None, maintain_state: Optional[bool]=None, text_color: Optional[str]=None, icon_color: Optional[str]=None, shape: Optional[OutlinedBorder]=None, bgcolor: Optional[str]=None, collapsed_bgcolor: Optional[str]=None, collapsed_icon_color: Optional[str]=None, collapsed_text_color: Optional[str]=None, collapsed_shape: Optional[OutlinedBorder]=None, on_change=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.controls = controls
self.controls_padding = controls_padding
self.expanded_alignment = expanded_alignment
self.expanded_cross_axis_alignment = expanded_cross_axis_alignment
self.tile_padding = tile_padding
self.leading = leading
self.title = title
self.subtitle = subtitle
self.trailing = trailing
self.affinity = affinity
self.clip_behavior = clip_behavior
self.maintain_state = maintain_state
self.initially_expanded = initially_expanded
self.shape = shape
self.text_color = text_color
self.icon_color = icon_color
self.bgcolor = bgcolor
self.collapsed_bgcolor = collapsed_bgcolor
self.collapsed_icon_color = collapsed_icon_color
self.collapsed_text_color = collapsed_text_color
self.collapsed_shape = collapsed_shape
self.on_change = on_change
def _get_control_name(self):
return 'expansiontile'
def _before_build_command(self):
super()._before_build_command()
self._set_attr_json('expandedAlignment', self.__expanded_alignment)
self._set_attr_json('controlsPadding', self.__controls_padding)
self._set_attr_json('tilePadding', self.__tile_padding)
self._set_attr_json('shape', self.__shape)
self._set_attr_json('collapsedShape', self.__collapsed_shape)
def _get_children(self):
children = []
if self.__controls:
for c in self.__controls:
c._set_attr_internal('n', 'controls')
children.append(c)
if self.__leading:
self.__leading._set_attr_internal('n', 'leading')
children.append(self.__leading)
if self.__title:
self.__title._set_attr_internal('n', 'title')
children.append(self.__title)
if self.__subtitle:
self.__subtitle._set_attr_internal('n', 'subtitle')
children.append(self.__subtitle)
if self.__trailing:
self.__trailing._set_attr_internal('n', 'trailing')
children.append(self.__trailing)
return children
def controls(self):
return self.__controls
def controls(self, value: Optional[List[Control]]):
self.__controls = (value if (value is not None) else [])
def controls_padding(self) -> PaddingValue:
return self.__controls_padding
_padding.setter
def controls_padding(self, value: PaddingValue):
self.__controls_padding = value
def tile_padding(self) -> PaddingValue:
return self.__tile_padding
_padding.setter
def tile_padding(self, value: PaddingValue):
self.__tile_padding = value
def expanded_alignment(self) -> Optional[Alignment]:
return self.__expanded_alignment
_alignment.setter
def expanded_alignment(self, value: Optional[Alignment]):
self.__expanded_alignment = value
def expanded_cross_axis_alignment(self) -> CrossAxisAlignment:
return self.__expanded_cross_axis_alignment
_cross_axis_alignment.setter
def expanded_cross_axis_alignment(self, value: CrossAxisAlignment):
self.__expanded_cross_axis_alignment = value
self._set_attr('crossAxisAlignment', (value.value if isinstance(value, CrossAxisAlignment) else value))
def affinity(self) -> TileAffinity:
return self.__affinity
def affinity(self, value: TileAffinity):
self.__affinity = value
self._set_attr('affinity', (value.value if isinstance(value, TileAffinity) else value))
def leading(self) -> Optional[Control]:
return self.__leading
def leading(self, value: Optional[Control]):
self.__leading = value
def title(self) -> Optional[Control]:
return self.__title
def title(self, value: Optional[Control]):
self.__title = value
def subtitle(self) -> Optional[Control]:
return self.__subtitle
def subtitle(self, value: Optional[Control]):
self.__subtitle = value
def trailing(self) -> Optional[Control]:
return self.__trailing
def trailing(self, value: Optional[Control]):
self.__trailing = value
def clip_behavior(self) -> Optional[ClipBehavior]:
return self.__clip_behavior
_behavior.setter
def clip_behavior(self, value: Optional[ClipBehavior]):
self.__clip_behavior = value
self._set_attr('clipBehavior', (value.value if isinstance(value, ClipBehavior) else value))
def maintain_state(self) -> Optional[bool]:
return self._get_attr('maintainState', data_type='bool', def_value=False)
_state.setter
def maintain_state(self, value: Optional[bool]):
self._set_attr('maintainState', value)
def initially_expanded(self) -> Optional[bool]:
return self._get_attr('initiallyExpanded', data_type='bool', def_value=False)
_expanded.setter
def initially_expanded(self, value: Optional[bool]):
self._set_attr('initiallyExpanded', value)
def shape(self) -> Optional[OutlinedBorder]:
return self.__shape
def shape(self, value: Optional[OutlinedBorder]):
self.__shape = value
def text_color(self):
return self._get_attr('textColor')
_color.setter
def text_color(self, value):
self._set_attr('textColor', value)
def icon_color(self):
return self._get_attr('iconColor')
_color.setter
def icon_color(self, value):
self._set_attr('iconColor', value)
def bgcolor(self):
return self._get_attr('bgColor')
def bgcolor(self, value):
self._set_attr('bgColor', value)
def collapsed_bgcolor(self):
return self._get_attr('collapsedBgColor')
_bgcolor.setter
def collapsed_bgcolor(self, value):
self._set_attr('collapsedBgColor', value)
def collapsed_icon_color(self):
return self._get_attr('collapsedIconColor')
_icon_color.setter
def collapsed_icon_color(self, value):
self._set_attr('collapsedIconColor', value)
def collapsed_text_color(self):
return self._get_attr('collapsedTextColor')
_text_color.setter
def collapsed_text_color(self, value):
self._set_attr('collapsedTextColor', value)
def collapsed_shape(self) -> Optional[OutlinedBorder]:
return self.__collapsed_shape
_shape.setter
def collapsed_shape(self, value: Optional[OutlinedBorder]):
self.__collapsed_shape = value
def on_change(self):
return self._get_event_handler('change')
_change.setter
def on_change(self, handler):
self._add_event_handler('change', handler)
self._set_attr('onChange', (True if (handler is not None) else None)) |
def test_move_block(proc_bar, golden):
c = _find_cursors(proc_bar, 'x = 1.0 ; x = 2.0')[0]
(p0, _) = c._move(c[0].prev().before())
(p1, _) = c._move(c.before())
(p2, _) = c._move(c.after())
(p3, _) = c._move(c[(- 1)].next().after())
(p4, _) = c._move(c[(- 1)].next(2).after())
(p5, _) = c._move(c[(- 1)].next(3).after())
assert (str(p1) == str(p2)), 'Both before and after should keep block in place.'
(pu0, _) = c._move(c.parent().before())
(pu1, _) = c._move(c.parent().after())
(pu2, _) = c._move(c.parent().parent().before())
(pu3, _) = c._move(c.parent().parent().prev().before())
(pu4, _) = c._move(c.parent().parent().after())
c2 = _find_cursors(proc_bar, 'x: _')[0]
(pd0, _) = c2._move(c[0].prev().before())
(pd1, _) = c2._move(c.before())
(pd2, _) = c2._move(c2[(- 1)].next().after())
c3 = _find_cursors(proc_bar, 'for j in _: _')[0]
(pl0, _) = c3._move(c3.parent().after())
all_tests = [p0, p1, p2, p3, p4, p5, pu0, pu1, pu2, pu3, pu4, pd0, pd1, pd2, pl0]
actual = '\n'.join((str(p) for p in all_tests))
assert (actual == golden) |
def test_builder_manages_duplicate_compilers(owned_package):
(_, _, compiler_output) = owned_package
manifest = build(BASE_MANIFEST, contract_type('Owned', compiler_output, abi=True, compiler=True, source_id=True), contract_type('Owned', compiler_output, alias='OwnedAlias', abi=True, compiler=True, source_id=True), validate())
contract_type_data = normalize_contract_type(compiler_output['Owned.sol']['Owned'], 'Owned.sol')
compiler_data = contract_type_data.pop('compiler')
contract_type_data.pop('deploymentBytecode')
contract_type_data.pop('devdoc')
contract_type_data.pop('userdoc')
compiler_data_with_contract_types = assoc(compiler_data, 'contractTypes', ['Owned', 'OwnedAlias'])
expected_with_contract_types = assoc(BASE_MANIFEST, 'contractTypes', {'Owned': assoc(contract_type_data, 'contractType', 'Owned'), 'OwnedAlias': assoc(contract_type_data, 'contractType', 'Owned')})
expected_with_contract_types['contractTypes']['Owned'].pop('contractType')
expected = assoc(expected_with_contract_types, 'compilers', [compiler_data_with_contract_types])
assert (manifest == expected) |
class ObjectType(MibNode):
units = ''
maxAccess = 'not-accessible'
status = 'current'
description = ''
reference = ''
def __init__(self, name, syntax=None):
MibNode.__init__(self, name)
self.syntax = syntax
def __eq__(self, other):
return (self.syntax == other)
def __ne__(self, other):
return (self.syntax != other)
def __lt__(self, other):
return (self.syntax < other)
def __le__(self, other):
return (self.syntax <= other)
def __gt__(self, other):
return (self.syntax > other)
def __ge__(self, other):
return (self.syntax >= other)
def __repr__(self):
representation = ('%s(%s' % (self.__class__.__name__, self.name))
if (self.syntax is not None):
representation += (', %r' % self.syntax)
representation += ')'
return representation
def getSyntax(self):
return self.syntax
def setSyntax(self, v):
self.syntax = v
return self
def getUnits(self):
return self.units
def setUnits(self, v):
self.units = v
return self
def getMaxAccess(self):
return self.maxAccess
def setMaxAccess(self, v):
self.maxAccess = v
return self
def getStatus(self):
return self.status
def setStatus(self, v):
self.status = v
return self
def getDescription(self):
return self.description
def setDescription(self, v):
self.description = v
return self
def getReference(self):
return self.reference
def setReference(self, v):
self.reference = v
return self
def asn1Print(self):
return ('\nOBJECT-TYPE\n SYNTAX %s\n UNITS "%s"\n MAX-ACCESS %s\n STATUS %s\n DESCRIPTION "%s"\n REFERENCE "%s" ' % (self.getSyntax().__class__.__name__, self.getUnits(), self.getMaxAccess(), self.getStatus(), self.getDescription(), self.getReference())) |
class GenericDirectorySource(IndexedSource):
INDEX_CLASS = None
DEFAULT_JSON_FILE = 'climetlab.index'
DEFAULT_DB_FILE = 'climetlab-2.db'
def __init__(self, path, db_path=None, _index=None, **kwargs):
if (_index is not None):
super().__init__(_index, **kwargs)
return
path = os.path.expanduser(path)
self.path = path
self.abspath = os.path.abspath(path)
if (db_path is None):
db_path = make_absolute(db_path, self.abspath, default=self.DEFAULT_DB_FILE)
if (not os.path.exists(db_path)):
raise NotIndexedDirectoryError(f"This directory has not been indexed. Try running 'climetlab index_directory {self.path}'.")
LOG.info(f'Using index file {db_path}')
index = self.INDEX_CLASS.from_existing_db(db_path=db_path)
super().__init__(index, **kwargs) |
def kube_server_version(version_json=None):
if (not version_json):
version_json = kube_version_json()
server_json = version_json.get('serverVersion', {})
if server_json:
server_major = strip_version(server_json.get('major', None))
server_minor = strip_version(server_json.get('minor', None))
return f'{server_major}.{server_minor}'
else:
return None |
def check_errors(flat_error_nodes, ignored_wires):
error_nodes = {}
for (node, raw_node, generated_nodes) in flat_error_nodes:
if (node not in error_nodes):
error_nodes[node] = {'raw_node': set(raw_node), 'generated_nodes': set()}
assert (error_nodes[node]['raw_node'] == set(raw_node))
error_nodes[node]['generated_nodes'].add(tuple(sorted(generated_nodes)))
for (node, error) in error_nodes.items():
combined_generated_nodes = set()
for generated_node in error['generated_nodes']:
combined_generated_nodes |= set(generated_node)
assert (error['raw_node'] == combined_generated_nodes), (node, error)
good_node = max(error['generated_nodes'], key=(lambda x: len(x)))
bad_nodes = (error['generated_nodes'] - set((good_node,)))
assert (max((len(generated_node) for generated_node in bad_nodes)) == 1)
for generate_node in bad_nodes:
for wire in generate_node:
if (wire not in ignored_wires):
return False
return True |
def test_cookies_jar():
class Foo():
def on_get(self, req, resp):
resp.set_cookie('has_permission', 'true')
def on_post(self, req, resp):
if (req.cookies['has_permission'] == 'true'):
resp.status = falcon.HTTP_200
else:
resp.status = falcon.HTTP_403
app = App()
app.add_route('/jars', Foo())
client = testing.TestClient(app)
response_one = client.simulate_get('/jars')
response_two = client.simulate_post('/jars', cookies=response_one.cookies)
assert (response_two.status == falcon.HTTP_200) |
class LogManager():
def __init__(self, dbt_log_manager):
self._dbt_log_manager = dbt_log_manager
def applicationbound(self):
with self._dbt_log_manager.applicationbound():
(yield)
def set_debug(self):
self._dbt_log_manager.set_debug()
LOGGER.set_level(logging.DEBUG)
def set_trace(self):
self._dbt_log_manager.set_debug()
LOGGER.set_level(TRACE) |
class CommitteeSearch(BaseModel):
__tablename__ = 'ofec_committee_fulltext_mv'
id = db.Column(db.String)
name = db.Column(db.String, doc=docs.COMMITTEE_NAME)
fulltxt = db.Column(TSVECTOR)
receipts = db.Column(db.Numeric(30, 2))
disbursements = db.Column(db.Numeric(30, 2))
independent_expenditures = db.Column(db.Numeric(30, 2))
total_activity = db.Column(db.Numeric(30, 2))
is_active = db.Column(db.Boolean, doc=docs.IS_COMMITTEE_ACTIVE) |
def test_config_cycle(testbot):
testbot.push_message('!plugin config Webserver')
m = testbot.pop_message()
assert ('Default configuration for this plugin (you can copy and paste this directly as a command)' in m)
assert ('Current configuration' not in m)
testbot.assertInCommand("!plugin config Webserver {'HOST': 'localhost', 'PORT': 3141, 'SSL': None}", 'Plugin configuration done.')
assert ('Current configuration' in testbot.exec_command('!plugin config Webserver'))
assert ('localhost' in testbot.exec_command('!plugin config Webserver')) |
class ModifyStateAdd(base_tests.SimpleProtocol):
def runTest(self):
logging.info('Running Modify_State_Add test')
of_ports = config['port_map'].keys()
of_ports.sort()
delete_all_flows(self.controller)
logging.info('Inserting a flow entry')
logging.info('Expecting active_count=1 in table_stats_reply')
(pkt, match) = wildcard_all_except_ingress(self, of_ports)
verify_tablestats(self, expect_active=1) |
def _warn_staging_overrides(ctx: click.core.Context, param: typing.Union[(click.core.Option, click.core.Parameter)], value: str) -> str:
if ctx.params.get('staging', False):
if (((param.name == 'url') and (value != constants.BASE_URL)) or ((param.name == 'id_provider') and (value != constants.IDP))):
click.echo(f'''
Warning: {param.name} and staging flags are both set. {param.name} will be ignored.
''', err=True)
if ((param.name == 'staging') and value):
if (ctx.params.get('url', constants.BASE_URL) != constants.BASE_URL):
click.echo('\nWarning: url and staging flags are both set. url will be ignored.\n', err=True)
if (ctx.params.get('id_provider', constants.IDP) != constants.IDP):
click.echo('\nWarning: id_provider and staging flags are both set. id_provider will be ignored.\n', err=True)
return value |
class EditForum(MethodView):
decorators = [allows.requires(IsAdmin, on_fail=FlashAndRedirect(message=_('You are not allowed to modify forums.'), level='danger', endpoint='management.overview'))]
form = EditForumForm
def get(self, forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
form = self.form(forum)
if forum.moderators:
form.moderators.data = ','.join([user.username for user in forum.moderators])
else:
form.moderators.data = None
return render_template('management/forum_form.html', form=form, title=_('Edit Forum'))
def post(self, forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
form = self.form(forum)
if form.validate_on_submit():
form.save()
flash(_('Forum updated.'), 'success')
return redirect(url_for('management.edit_forum', forum_id=forum.id))
elif forum.moderators:
form.moderators.data = ','.join([user.username for user in forum.moderators])
else:
form.moderators.data = None
return render_template('management/forum_form.html', form=form, title=_('Edit Forum')) |
class ExecuteWrapInstanceCommand(sublime_plugin.TextCommand):
def run(self, edit):
obj = WrapInstance.obj
value = WrapInstance.value
style = obj._style[value]
obj.insert_regions = []
for sel in obj.view.sel():
if (style == 'indent_block'):
obj.block(edit, sel, True)
elif (style == 'block'):
obj.block(edit, sel)
else:
obj.inline(edit, sel)
obj.select(edit) |
class HTTPMissingParam(HTTPBadRequest):
_args(allowed_positional=1)
def __init__(self, param_name, headers=None, **kwargs):
description = 'The "{0}" parameter is required.'
description = description.format(param_name)
super().__init__(title='Missing parameter', description=description, headers=headers, **kwargs) |
def test_named_type_cannot_be_redefined():
schema = {'type': 'record', 'namespace': 'test.avro.training', 'name': 'SomeMessage', 'fields': [{'name': 'is_error', 'type': 'boolean', 'default': False}, {'name': 'outcome', 'type': [{'type': 'record', 'name': 'SomeMessage', 'fields': []}, {'type': 'record', 'name': 'ErrorRecord', 'fields': [{'name': 'errors', 'type': {'type': 'map', 'values': 'string'}, 'doc': 'doc'}]}]}]}
with pytest.raises(SchemaParseException, match='redefined named type: test.avro.training.SomeMessage'):
parse_schema(schema)
schema = {'type': 'record', 'name': 'SomeMessage', 'fields': [{'name': 'field1', 'type': {'type': 'record', 'name': 'ThisName', 'fields': []}}, {'name': 'field2', 'type': {'type': 'enum', 'name': 'ThisName', 'symbols': ['FOO', 'BAR']}}]}
with pytest.raises(SchemaParseException, match='redefined named type: ThisName'):
parse_schema(schema)
schema = {'type': 'record', 'name': 'SomeMessage', 'fields': [{'name': 'field1', 'type': {'type': 'record', 'name': 'ThatName', 'fields': []}}, {'name': 'field2', 'type': {'type': 'fixed', 'name': 'ThatName', 'size': 8}}]}
with pytest.raises(SchemaParseException, match='redefined named type: ThatName'):
parse_schema(schema) |
class _Chain(References):
def __init__(self, sequence1: References, sequence2: References) -> None:
assert (sequence1.ndims == sequence2.ndims), 'cannot chain sequences with different ndims'
assert (sequence1 and sequence2), 'inefficient; at least one of the sequences is empty'
assert (not _merge_chain(sequence1, sequence2)), 'inefficient; this should have been `_Uniform` or `_Repeat`'
self.sequence1 = sequence1
self.sequence2 = sequence2
super().__init__(sequence1.ndims)
def __len__(self) -> int:
return (len(self.sequence1) + len(self.sequence2))
def __iter__(self) -> Iterator[Reference]:
return itertools.chain(self.sequence1, self.sequence2)
def get(self, index: int) -> Reference:
index = numeric.normdim(len(self), index)
n = len(self.sequence1)
if (index < n):
return self.sequence1.get(index)
else:
return self.sequence2.get((index - n))
def take(self, indices: numpy.ndarray) -> References:
_check_take(len(self), indices)
n = len(self.sequence1)
mask = numpy.less(indices, n)
return self.sequence1.take(numpy.compress(mask, indices)).chain(self.sequence2.take((numpy.compress((~ mask), indices) - n)))
def compress(self, mask: numpy.ndarray) -> References:
_check_compress(len(self), mask)
n = len(self.sequence1)
return self.sequence1.compress(mask[:n]).chain(self.sequence2.compress(mask[n:]))
def children(self) -> References:
return self.sequence1.children.chain(self.sequence2.children)
def edges(self) -> References:
return self.sequence1.edges.chain(self.sequence2.edges)
def getpoints(self, ischeme: str, degree: int) -> PointsSequence:
return self.sequence1.getpoints(ischeme, degree).chain(self.sequence2.getpoints(ischeme, degree)) |
class LatticeTyper(TyperBase[bt.BMGLatticeType]):
_dispatch: Dict[(type, Callable)]
def __init__(self) -> None:
TyperBase.__init__(self)
self._dispatch = {bn.Observation: self._type_observation, bn.Query: self._type_query, bn.DirichletNode: self._type_dirichlet, bn.AdditionNode: self._type_addition, bn.BroadcastNode: self._type_broadcast, bn.ChoiceNode: self._type_choice, bn.CholeskyNode: self._type_cholesky, bn.ColumnIndexNode: self._type_column_index, bn.ComplementNode: self._type_complement, bn.ElementwiseMultiplyNode: self._type_binary_elementwise_op, bn.ExpM1Node: self._type_expm1, bn.ExpNode: self._type_exp, bn.FillMatrixNode: self._type_broadcast, bn.IfThenElseNode: self._type_if, bn.LKJCholeskyNode: self._type_lkj_cholesky, bn.LogNode: self._type_log, bn.MatrixAddNode: self._type_binary_elementwise_op, bn.MatrixMultiplicationNode: self._type_matrix_multiplication, bn.MatrixScaleNode: self._type_matrix_scale, bn.MatrixExpNode: self._type_matrix_exp, bn.MatrixLogNode: self._type_matrix_log, bn.MatrixLog1mexpNode: self._type_matrix_log1mexp, bn.MatrixComplementNode: self._type_matrix_complement, bn.MatrixPhiNode: self._type_matrix_phi, bn.MatrixSumNode: self._type_matrix_sum, bn.MultiplicationNode: self._type_multiplication, bn.MatrixNegateNode: self._type_matrix_negate, bn.NegateNode: self._type_negate, bn.PowerNode: self._type_power, bn.SampleNode: self._type_sample, bn.ToMatrixNode: self._type_to_matrix, bn.ToNegativeRealMatrixNode: self._type_to_neg_real_matrix, bn.ToPositiveRealMatrixNode: self._type_to_pos_real_matrix, bn.ToRealMatrixNode: self._type_to_real_matrix, bn.VectorIndexNode: self._type_index, bn.TensorNode: self._type_tensor_node, bn.TransposeNode: self._type_transpose}
def _lattice_type_for_element_type(self, element_type: bt.BMGElementType) -> bt.BMGLatticeType:
if (element_type == bt.positive_real_element):
return bt.PositiveReal
if (element_type == bt.negative_real_element):
return bt.NegativeReal
if (element_type == bt.real_element):
return bt.Real
if (element_type == bt.probability_element):
return bt.Probability
if (element_type == bt.bool_element):
return bt.Boolean
if (element_type == bt.natural_element):
return bt.Natural
else:
raise ValueError('unrecognized element type')
def _type_binary_elementwise_op(self, node: bn.BinaryOperatorNode) -> bt.BMGLatticeType:
left_type = self[node.left]
right_type = self[node.right]
assert isinstance(left_type, bt.BMGMatrixType)
assert isinstance(right_type, bt.BMGMatrixType)
bsize = _broadcast_size(left_type, right_type)
if (bsize is None):
return bt.Untypable
(rows, cols) = bsize
op_type = bt.supremum(self._lattice_type_for_element_type(left_type.element_type), self._lattice_type_for_element_type(right_type.element_type))
if (bt.supremum(op_type, bt.NegativeReal) == bt.NegativeReal):
return bt.NegativeRealMatrix(rows, cols)
if (bt.supremum(op_type, bt.PositiveReal) == bt.PositiveReal):
return bt.PositiveRealMatrix(rows, cols)
return bt.RealMatrix(rows, cols)
_matrix_tpe_constructors = {bt.Real: (lambda r, c: bt.RealMatrix(r, c)), bt.PositiveReal: (lambda r, c: bt.PositiveRealMatrix(r, c)), bt.NegativeReal: (lambda r, c: bt.NegativeRealMatrix(r, c)), bt.Probability: (lambda r, c: bt.ProbabilityMatrix(r, c)), bt.Boolean: (lambda r, c: bt.BooleanMatrix(r, c)), bt.NaturalMatrix: (lambda r, c: bt.NaturalMatrix(r, c))}
def _type_tensor_node(self, node: bn.TensorNode) -> bt.BMGLatticeType:
size = node._size
element_type = bt.supremum(*[self[i] for i in node.inputs])
if (len(size) == 0):
return element_type
if (len(size) == 1):
rows = 1
columns = size[0]
elif (len(size) == 2):
rows = size[0]
columns = size[1]
else:
return bt.Untypable
return self._matrix_tpe_constructors[element_type](rows, columns)
def _type_matrix_exp(self, node: bn.MatrixExpNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert isinstance(op, bt.BMGMatrixType)
if isinstance(op, bt.NegativeRealMatrix):
return bt.ProbabilityMatrix(op.rows, op.columns)
return bt.PositiveRealMatrix(op.rows, op.columns)
def _type_matrix_phi(self, node: bn.MatrixPhiNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert isinstance(op, bt.BMGMatrixType)
return bt.ProbabilityMatrix(op.rows, op.columns)
def _type_matrix_log(self, node: bn.MatrixLogNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert isinstance(op, bt.BMGMatrixType)
if isinstance(op, bt.ProbabilityMatrix):
return bt.NegativeRealMatrix(op.rows, op.columns)
return bt.RealMatrix(op.rows, op.columns)
def _type_matrix_log1mexp(self, node: bn.MatrixLog1mexpNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert isinstance(op, bt.BMGMatrixType)
op_element_type = self._lattice_type_for_element_type(op.element_type)
assert (bt.supremum(bt.NegativeReal, op_element_type) == bt.NegativeReal)
return bt.RealMatrix(op.rows, op.columns)
def _type_matrix_complement(self, node: bn.MatrixComplementNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert (isinstance(op, bt.BroadcastMatrixType) or isinstance(op, bt.SimplexMatrix))
if isinstance(op, bt.SimplexMatrix):
return bt.SimplexMatrix(op.rows, op.columns)
op_element_type = self._lattice_type_for_element_type(op.element_type)
if (bt.supremum(bt.Boolean, op_element_type) == bt.Boolean):
return bt.BooleanMatrix(op.rows, op.columns)
if (bt.supremum(bt.Probability, op_element_type) == bt.Probability):
return bt.ProbabilityMatrix(op.rows, op.columns)
return bt.Untypable
def _type_matrix_sum(self, node: bn.MatrixSumNode) -> bt.BMGLatticeType:
operand_type = self[node.operand]
assert isinstance(operand_type, bt.BMGMatrixType)
operand_element_type = self._lattice_type_for_element_type(operand_type.element_type)
return operand_element_type
def _type_matrix_negate(self, node: bn.MatrixNegateNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 1)
op = self[node.operand]
assert (op is not bt.Untypable)
assert isinstance(op, bt.BMGMatrixType)
op_element_type = self._lattice_type_for_element_type(op.element_type)
if ((bt.supremum(bt.PositiveReal, op_element_type) == bt.PositiveReal) or (bt.supremum(bt.Probability, op_element_type) == bt.Probability)):
return bt.NegativeRealMatrix(op.rows, op.columns)
if (bt.supremum(bt.NegativeReal, op_element_type) == bt.NegativeReal):
return bt.PositiveRealMatrix(op.rows, op.columns)
return bt.RealMatrix(op.rows, op.columns)
def _type_observation(self, node: bn.Observation) -> bt.BMGLatticeType:
return self[node.observed]
def _type_query(self, node: bn.Query) -> bt.BMGLatticeType:
return self[node.operator]
def _type_dirichlet(self, node: bn.DirichletNode) -> bt.BMGLatticeType:
input_type = self[node.concentration]
rows = 1
columns = 1
if isinstance(input_type, bt.BMGMatrixType):
rows = input_type.rows
return bt.SimplexMatrix(rows, columns)
def _type_addition(self, node: bn.BMGNode) -> bt.BMGLatticeType:
op_type = bt.supremum(*[self[i] for i in node.inputs])
if (bt.supremum(op_type, bt.NegativeReal) == bt.NegativeReal):
return bt.NegativeReal
if (bt.supremum(op_type, bt.PositiveReal) == bt.PositiveReal):
return bt.PositiveReal
return bt.Real
def _type_column_index(self, node: bn.ColumnIndexNode) -> bt.BMGLatticeType:
lt = self[node.left]
assert isinstance(lt, bt.BMGMatrixType)
result = lt
if (isinstance(lt, bt.ZeroMatrix) or isinstance(lt, bt.OneHotMatrix)):
result = bt.Boolean
return result.with_dimensions(lt.rows, 1)
def _type_complement(self, node: bn.ComplementNode) -> bt.BMGLatticeType:
if (bt.supremum(self[node.operand], bt.Boolean) == bt.Boolean):
return bt.Boolean
return bt.Probability
def _type_exp(self, node: bn.ExpNode) -> bt.BMGLatticeType:
ot = self[node.operand]
if (bt.supremum(ot, bt.NegativeReal) == bt.NegativeReal):
return bt.Probability
return bt.PositiveReal
def _type_expm1(self, node: bn.ExpM1Node) -> bt.BMGLatticeType:
ot = self[node.operand]
if (bt.supremum(ot, bt.PositiveReal) == bt.PositiveReal):
return bt.PositiveReal
if (bt.supremum(ot, bt.NegativeReal) == bt.NegativeReal):
return bt.NegativeReal
return bt.Real
def _type_if(self, node: bn.IfThenElseNode) -> bt.BMGLatticeType:
result = bt.supremum(self[node.consequence], self[node.alternative])
if ((result == bt.Zero) or (result == bt.One)):
result = bt.Boolean
return result
def _type_choice(self, node: bn.ChoiceNode) -> bt.BMGLatticeType:
result = bt.supremum(*(self[node.inputs[i]] for i in range(1, len(node.inputs))))
if ((result == bt.Zero) or (result == bt.One)):
result = bt.Boolean
return result
def _type_cholesky(self, node: bn.CholeskyNode) -> bt.BMGLatticeType:
return self[node.operand]
def _type_index(self, node: bn.VectorIndexNode) -> bt.BMGLatticeType:
lt = self[node.left]
if isinstance(lt, bt.OneHotMatrix):
return bt.Boolean
if isinstance(lt, bt.ZeroMatrix):
return bt.Boolean
if isinstance(lt, bt.SimplexMatrix):
return bt.Probability
if isinstance(lt, bt.BMGMatrixType):
return lt.with_dimensions(1, 1)
return bt.Real
def _type_log(self, node: bn.LogNode) -> bt.BMGLatticeType:
ot = bt.supremum(self[node.operand], bt.Probability)
if (ot == bt.Probability):
return bt.NegativeReal
return bt.Real
def _type_multiplication(self, node: bn.MultiplicationNode) -> bt.BMGLatticeType:
ot = bt.supremum(*[self[i] for i in node.inputs])
it = bt.supremum(ot, bt.Probability)
if (bt.supremum(it, bt.Real) == bt.Real):
return it
return bt.Real
def _type_matrix_multiplication(self, node: bn.MatrixMultiplicationNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 2)
lt = self[node.left]
assert (lt is not bt.Untypable)
assert isinstance(lt, bt.BMGMatrixType)
rt = self[node.right]
assert (rt is not bt.Untypable)
assert isinstance(rt, bt.BMGMatrixType)
return bt.RealMatrix(lt.rows, rt.columns)
def _type_matrix_scale(self, node: bn.MatrixScaleNode) -> bt.BMGLatticeType:
assert (len(node.inputs) == 2)
lt = self[node.left]
assert (lt is not bt.Untypable)
assert (bt.supremum(lt, bt.Real) == bt.Real)
assert isinstance(lt, bt.BMGMatrixType)
lt = typing.cast(bt.BroadcastMatrixType, lt)
rt = self[node.right]
assert (rt is not bt.Untypable)
assert isinstance(rt, bt.BMGMatrixType)
ltm = lt.with_dimensions(rt.rows, rt.columns)
return bt.supremum(ltm, rt)
def _type_negate(self, node: bn.NegateNode) -> bt.BMGLatticeType:
ot = self[node.operand]
if (bt.supremum(ot, bt.PositiveReal) == bt.PositiveReal):
return bt.NegativeReal
if (bt.supremum(ot, bt.NegativeReal) == bt.NegativeReal):
return bt.PositiveReal
return bt.Real
def _type_power(self, node: bn.PowerNode) -> bt.BMGLatticeType:
inf_base = bt.supremum(self[node.left], bt.Probability)
inf_exp = bt.supremum(self[node.right], bt.PositiveReal)
if ((inf_base == bt.Probability) and (inf_exp == bt.Real)):
return bt.PositiveReal
if (bt.supremum(inf_base, bt.Real) == bt.Real):
return inf_base
return bt.Real
def _type_sample(self, node: bn.SampleNode) -> bt.BMGLatticeType:
return self[node.operand]
def _type_to_matrix(self, node: bn.ToMatrixNode) -> bt.BMGLatticeType:
assert (len(node.inputs) >= 3)
rows = node.inputs[0]
assert isinstance(rows, bn.NaturalNode)
columns = node.inputs[1]
assert isinstance(columns, bn.NaturalNode)
t = bt.supremum(*(self[item] for item in node.inputs.inputs[2:]))
if (bt.supremum(t, bt.Real) != bt.Real):
t = bt.Real
elif ((t == bt.One) or (t == bt.Zero)):
t = bt.Boolean
assert isinstance(t, bt.BMGMatrixType)
return t.with_dimensions(rows.value, columns.value)
def _type_broadcast(self, node: bn.BMGNode) -> bt.BMGLatticeType:
assert (isinstance(node, bn.BroadcastNode) or isinstance(node, bn.FillMatrixNode))
assert (len(node.inputs) == 3)
val = node.inputs[0]
rows = node.inputs[1]
assert isinstance(rows, bn.NaturalNode)
columns = node.inputs[2]
assert isinstance(columns, bn.NaturalNode)
t = self[val]
assert isinstance(t, bt.BMGMatrixType)
return t.with_dimensions(rows.value, columns.value)
def _type_to_real_matrix(self, node: bn.ToRealMatrixNode) -> bt.BMGLatticeType:
op = node.operand
t = self[op]
assert isinstance(t, bt.BMGMatrixType)
assert self.is_matrix(op)
return bt.RealMatrix(t.rows, t.columns)
def _type_to_pos_real_matrix(self, node: bn.ToPositiveRealMatrixNode) -> bt.BMGLatticeType:
op = node.operand
t = self[op]
assert isinstance(t, bt.BMGMatrixType)
assert self.is_matrix(op)
return bt.PositiveRealMatrix(t.rows, t.columns)
def _type_to_neg_real_matrix(self, node: bn.ToNegativeRealMatrixNode) -> bt.BMGLatticeType:
op = node.operand
t = self[op]
assert isinstance(t, bt.BMGMatrixType)
assert self.is_matrix(op)
return bt.NegativeRealMatrix(t.rows, t.columns)
def _type_transpose(self, node: bn.TransposeNode) -> bt.BMGLatticeType:
op = node.operand
t = self[op]
assert (t is not bt.Untypable)
assert isinstance(t, bt.BMGMatrixType)
assert self.is_matrix(op)
return bt.RealMatrix(t.columns, t.rows)
def _type_lkj_cholesky(self, node: bn.LKJCholeskyNode) -> bt.BMGLatticeType:
dim = node.dim
assert isinstance(dim, bn.ConstantNode)
dim_value = dim.value
assert isinstance(dim_value, int)
return bt.RealMatrix(dim_value, dim_value)
def _compute_type_inputs_known(self, node: bn.BMGNode) -> bt.BMGLatticeType:
for i in node.inputs:
if (self[i] == bt.Untypable):
return bt.Untypable
if isinstance(node, bn.UntypedConstantNode):
return bt.type_of_value(node.value)
t = type(node)
if (t in _requires_nothing):
result = _requires_nothing[t]
elif (t in _constant_matrix_graph_types):
assert isinstance(node, bn.ConstantTensorNode)
r = _constant_matrix_graph_types[t]
result = r.with_size(node.value.size())
elif (t in self._dispatch):
result = self._dispatch[t](node)
else:
result = bt.Untypable
assert ((result != bt.Zero) and (result != bt.One))
return result
def is_bool(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.Boolean) == bt.Boolean))
def is_natural(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.Natural) == bt.Natural))
def is_prob_or_bool(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.Probability) == bt.Probability))
def is_neg_real(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.NegativeReal) == bt.NegativeReal))
def is_pos_real(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.PositiveReal) == bt.PositiveReal))
def is_real(self, node: bn.BMGNode) -> bool:
t = self[node]
return ((t != bt.Untypable) and (bt.supremum(t, bt.Real) == bt.Real))
def is_matrix(self, node: bn.BMGNode) -> bool:
t = type(node)
if (t in _always_matrix_types):
return True
lattice_type = self[node]
if isinstance(lattice_type, bt.SimplexMatrix):
return True
if (isinstance(lattice_type, bt.BMGMatrixType) and ((lattice_type.rows != 1) or (lattice_type.columns != 1))):
return True
return False |
class OptionPlotoptionsColumnpyramidTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
class DerivablePaths():
def to_derivation_data(self) -> Dict[(str, Any)]:
return {'subpaths': list(self._sequence_watermarks.items())}
def set_row(self, row: Optional[MasterKeyRow]=None) -> None:
self._sequence_watermarks: Dict[(Sequence[int], int)] = {}
if (row is not None):
sequence_watermarks: Dict[(Sequence[int], int)] = defaultdict(int)
data = json.loads(row.derivation_data)
for (derivation_path, next_index) in data['subpaths']:
sequence_watermarks[tuple(derivation_path)] = next_index
self._sequence_watermarks.update(sequence_watermarks)
def allocate_indexes(self, derivation_path: Sequence[int], count: int) -> int:
next_index = self._sequence_watermarks.get(derivation_path, 0)
self._sequence_watermarks[derivation_path] = (next_index + count)
return next_index
def get_next_index(self, derivation_path: Sequence[int]) -> int:
return self._sequence_watermarks.get(derivation_path, 0) |
class ColorOptions(Option):
primary_color: str = RED
secondary_color: str = GREY
current_data_color: Optional[str] = None
reference_data_color: Optional[str] = None
additional_data_color: str = '#0a5f38'
color_sequence: Sequence[str] = COLOR_DISCRETE_SEQUENCE
fill_color: str = 'LightGreen'
zero_line_color: str = 'green'
non_visible_color: str = 'white'
underestimation_color: str = '#6574f7'
overestimation_color: str = '#ee5540'
majority_color: str = '#1acc98'
vertical_lines: str = 'green'
heatmap: str = 'RdBu_r'
def get_current_data_color(self):
return (self.current_data_color or self.primary_color)
def get_reference_data_color(self):
return (self.reference_data_color or self.secondary_color) |
class Command(BaseCommand):
args = '<on|off>'
help = f'run python manage.py maintenance_mode {args} to change maintenance-mode state'
def add_arguments(self, parser):
parser.add_argument('state')
parser.add_argument('--interactive', dest='interactive', action='store_true')
def get_maintenance_mode(self):
try:
value = core.get_maintenance_mode()
return value
except OSError as error:
raise CommandError(f'Unable to read state file at: {settings.MAINTENANCE_MODE_STATE_FILE_NAME}') from error
def set_maintenance_mode(self, value):
try:
core.set_maintenance_mode(value)
except OSError as error:
raise CommandError(f'Unable to write state file at: {settings.MAINTENANCE_MODE_STATE_FILE_NAME}') from error
def set_maintenance_mode_with_confirm(self, value, confirm_message, interactive):
if interactive:
if self.confirm(confirm_message):
self.set_maintenance_mode(value)
else:
self.set_maintenance_mode(value)
def confirm(self, message):
input_func = input
answer = input_func(message)
answer = answer.lower()
return (answer.find('y') == 0)
def handle(self, *args, **options):
verbosity = int(options['verbosity'])
verbose = (True if (verbosity == 3) else False)
interactive = options.get('interactive', False)
state = options['state']
state = state.lower()
value = self.get_maintenance_mode()
if (state in ['on', 'yes', 'true', '1']):
if value:
if verbose:
self.stdout.write('maintenance mode is already on')
return
self.set_maintenance_mode_with_confirm(True, 'maintenance mode on? (y/N) ', interactive)
elif (state in ['off', 'no', 'false', '0']):
if (not value):
if verbose:
self.stdout.write('maintenance mode is already off')
return
self.set_maintenance_mode_with_confirm(False, 'maintenance mode off? (y/N) ', interactive)
else:
raise CommandError(f'Invalid argument: {state!r} expected {self.args}')
if verbose:
state_str = ('on' if self.get_maintenance_mode() else 'off')
output = f'maintenance mode: {state_str}'
self.stdout.write(output)
return |
class tag_filter(object):
MODE_RETAIN = 1
MODE_DROP = 2
__mode = MODE_DROP
__text = None
filter_tag_list = None
filter_property_list = None
def __init__(self, text, filter_mode=MODE_DROP):
self.__mode = filter_mode
self.__text = text
self.filter_property_list = []
self.filter_tag_list = []
def change_mode(self, mode):
self.__mode = mode
self.filter_property_list = []
self.filter_tag_list = []
def __drop_tag(self):
pass
def __drop_property(self):
pass
def filter(self):
pass
def get_result(self):
pass
def __get_syntax_tree(self, sts):
pass |
def verify(color: 'Color', tolerance: float) -> bool:
channels = alg.no_nans(color[:(- 1)])
for (i, value) in enumerate(channels):
chan = color._space.CHANNELS[i]
a = chan.low
b = chan.high
if (chan.flags & FLG_ANGLE):
continue
if (not chan.bound):
a = b = None
if (((a is not None) and (value < (a - tolerance))) or ((b is not None) and (value > (b + tolerance)))):
return False
return True |
(name='prefix.delete_local', req_args=[ROUTE_DISTINGUISHER, PREFIX], opt_args=[VRF_RF])
def delete_local(route_dist, prefix, route_family=VRF_RF_IPV4):
try:
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_vrf_table(route_dist, prefix, route_family=route_family, is_withdraw=True)
return [{ROUTE_DISTINGUISHER: route_dist, PREFIX: prefix, VRF_RF: route_family}]
except BgpCoreError as e:
raise PrefixError(desc=e) |
class OptionSeriesWaterfallSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class ObtainJSONWebTokenMixin(JSONWebTokenMixin):
def __init_subclass_with_meta__(cls, name=None, **options):
assert getattr(cls, 'resolve', None), f'{(name or cls.__name__)}.resolve method is required in a JSONWebTokenMutation.'
super().__init_subclass_with_meta__(name=name, **options) |
def get_grouped_critpath_components(collection='master', component_type='rpm', components=None):
critpath_type = config.get('critpath.type')
if (critpath_type != 'json'):
if (not critpath_type):
critpath_type = '(default)'
raise ValueError(f'critpath.type {critpath_type} does not support groups')
critpath_components = {}
try:
critpath_components = read_critpath_json(collection).get(component_type, {})
except FileNotFoundError:
log.warning(f'No JSON file found for collection {collection}')
except json.JSONDecodeError:
log.warning(f'JSON file for collection {collection} is invalid')
if (components and critpath_components):
filtered_dict = defaultdict(list)
for (group, groupcomps) in critpath_components.items():
filteredcomps = [gcomp for gcomp in groupcomps if (gcomp in components)]
if filteredcomps:
filtered_dict[group].extend(filteredcomps)
critpath_components = dict(filtered_dict)
return critpath_components |
class OptionPlotoptionsSunburstLevelsColorvariation(Options):
def key(self):
return self._config_get(None)
def key(self, text: str):
self._config(text, js_type=False)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False) |
def _colorIsCGColorRef(color):
color = (('(CGColorRef)(' + color) + ')')
result = fb.evaluateExpressionValue('(unsigned long)CFGetTypeID({color}) == (unsigned long)CGColorGetTypeID()'.format(color=color))
if ((result.GetError() is not None) and (str(result.GetError()) != 'success')):
print('got error: {}'.format(result))
return False
else:
isCFColor = (result.GetValueAsUnsigned() != 0)
return isCFColor |
def warn_stacklevel() -> int:
try:
module_name = __name__.partition('.')[0]
module_path = Path(sys.modules[module_name].__file__)
module_is_folder = (module_path.name == '__init__.py')
if module_is_folder:
module_path = module_path.parent
for (level, frame) in enumerate(inspect.stack()):
frame_filename = Path(frame.filename)
del frame
if ((module_is_folder and (module_path not in frame_filename.parents)) or ((not module_is_folder) and (module_path != frame_filename))):
return level
except KeyError:
pass
return 0 |
def skip_input_signal_add_output_signal(single_output, out_flex_key, in_flex_key, st_flex_key):
def wrapper(f):
(f)
def decorated_function(*args, **kwargs):
(args, kwargs, fltr) = _skip_inputs(args, kwargs, [in_flex_key, st_flex_key])
cached_ctx = fltr[1]
if ((cached_ctx is not None) and ('triggered' in cached_ctx)):
ctx = context_value.get()
ctx['triggered_inputs'] = cached_ctx['triggered']
context_value.set(ctx)
outputs = f(*args, **kwargs)
return _append_output(outputs, datetime.utcnow().timestamp(), single_output, out_flex_key)
return decorated_function
return wrapper |
class OptionPlotoptionsBulletAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsBulletAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsBulletAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsBulletAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsBulletAccessibilityPoint) |
class BinaryOp(Node):
__slots__ = ('op', 'left', 'right', 'coord', '__weakref__')
def __init__(self, op, left, right, coord=None):
self.op = op
self.left = left
self.right = right
self.coord = coord
def children(self):
nodelist = []
if (self.left is not None):
nodelist.append(('left', self.left))
if (self.right is not None):
nodelist.append(('right', self.right))
return tuple(nodelist)
attr_names = ('op',) |
class RoleUser(ModelSimple):
allowed_values = {('value',): {'USER': 'user', 'BILLING': 'billing', 'ENGINEER': 'engineer', 'SUPERUSER': 'superuser'}}
validations = {}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
return {'value': (str,)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
def exposed_test_local_rpc_fetch():
print('Chromium Test')
rpc_interface = common.get_rpyc.RemoteFetchInterface()
rpc_interface.check_ok()
print('RPC:', rpc_interface)
print('Dispatching job engine')
rpc_interface.check_ok()
raw_job3 = WebMirror.JobUtils.buildjob(module='SmartWebRequest', call='smartGetItem', dispatchKey='fetcher', jobid=(- 1), args=[' kwargs={}, additionalData={'mode': 'fetch'}, postDelay=0)
ret3 = rpc_interface.dispatch_request(raw_job3)
print('Return 3: ')
pprint.pprint(ret3)
rpc_interface.check_ok()
raw_job4 = WebMirror.JobUtils.buildjob(module='SmartWebRequest', call='smartGetItem', dispatchKey='fetcher', jobid=(- 1), args=[' kwargs={}, additionalData={'mode': 'fetch'}, postDelay=0)
ret4 = rpc_interface.dispatch_request(raw_job4)
print('Return 4: ')
pprint.pprint(ret4)
rpc_interface.close() |
class DisrupterCable(Disrupter):
PULL_MESSAGE = "Pull the ethernet cable.\nIf you're using a VM then you can simulate this by disconnecting the network adapter from the VM settings.\nNOTE: You should press enter BEFORE pulling the cable to maximize the chance of detecting a leak.\n"
PLUG_MESSAGE = 'Replace the ethernet cable. You should press enter BEFORE replacing the cable to maximize the chance of detecting a leak.'
def __init__(self, device, parameters):
super().__init__(device, parameters)
self._restrict_parameters(must_disrupt=True)
self._pull = self._parameters.get('pull', True)
def setup(self):
msg = 'Ensure you have an Ethernet (wired) connection and at least one other network service, e.g. Wi-Fi\n'
if (not self._pull):
msg += 'Ensure that the cable is UNPLUGGED.'
message_and_await_enter(msg)
def disrupt(self):
msg = (DisrupterCable.PULL_MESSAGE if self._pull else DisrupterCable.PLUG_MESSAGE)
L.describe(msg)
message_and_await_enter(msg)
def restore(self):
msg = 'Ensure the ethernet cable is plugged back in'
L.describe(msg)
message_and_await_enter(msg)
def teardown(self):
self.restore()
super().teardown() |
class BaseIntegrityOneColumnTest(Test, ABC):
group: ClassVar = DATA_INTEGRITY_GROUP.id
_metric: ColumnSummaryMetric
column_name: ColumnName
def __init__(self, column_name: Union[(str, ColumnName)], is_critical: bool=True):
self.column_name = ColumnName.from_any(column_name)
super().__init__(is_critical=is_critical)
self._metric = ColumnSummaryMetric(self.column_name)
def metric(self):
return self._metric
def groups(self) -> Dict[(str, str)]:
return {GroupingTypes.ByFeature.id: self.column_name.display_name} |
class MagiclinkInternalRefsPattern(_MagiclinkReferencePattern):
ANCESTOR_EXCLUDES = ('a',)
def handleMatch(self, m, data):
if ((not self.user) or (not self.repo)):
return (None, None, None)
is_commit = m.group('commit')
is_diff = m.group('diff')
value = (m.group('commit') if is_commit else m.group('issue'))
value2 = (m.group('diff') if is_diff else None)
repo = self.repo
user = self.user
provider = self.provider
self.my_repo = True
self.my_user = True
el = etree.Element('a')
if is_diff:
self.process_compare(el, provider, user, repo, value, value2)
elif is_commit:
self.process_commit(el, provider, user, repo, value)
elif (not self.process_issues(el, provider, user, repo, value)):
return (m.group(0), m.start(0), m.end(0))
return (el, m.start(0), m.end(0)) |
def remove_tones(sentence):
words = sentence.split()
content = ''
underscored_content = ''
for word in words:
if (word == '.'):
word = 'pau'
last_char = word[(- 1)]
if last_char.isdigit():
c_word = ''.join((k for k in word[:(- 1)]))
u_word = ((c_word + '_') + last_char)
else:
c_word = word
u_word = (c_word + '_0')
words_dict[c_word]
content += (' ' + c_word)
underscored_content += (' ' + u_word)
return (underscored_content, content) |
class DecisionStateMachineBase(DecisionStateMachine):
id: DecisionId = None
state: DecisionState = DecisionState.CREATED
state_history: List[str] = field(default_factory=list)
def __post_init__(self):
self.state_history.append(str(self))
def get_state(self) -> DecisionState:
return self.state
def get_id(self) -> DecisionId:
return self.id
def is_done(self) -> bool:
return (self.state in (DecisionState.COMPLETED, DecisionState.COMPLETED_AFTER_CANCELLATION_DECISION_SENT))
def handle_decision_task_started_event(self):
if (self.state == DecisionState.CREATED):
self.state_history.append('handle_decision_task_started_event')
self.state = DecisionState.DECISION_SENT
self.state_history.append(str(self.state))
else:
pass
def cancel(self, immediate_cancellation_callback: Optional[Callable]) -> bool:
self.state_history.append('cancel')
result = False
if (self.state == DecisionState.CREATED):
self.state = DecisionState.COMPLETED
if immediate_cancellation_callback:
immediate_cancellation_callback()
elif (self.state == DecisionState.DECISION_SENT):
self.state = DecisionState.CANCELED_BEFORE_INITIATED
result = True
elif (self.state == DecisionState.INITIATED):
self.state = DecisionState.CANCELED_AFTER_INITIATED
result = True
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
return result
def handle_initiated_event(self, event: HistoryEvent):
self.state_history.append('handle_initiated_event')
if (self.state == DecisionState.DECISION_SENT):
self.state = DecisionState.INITIATED
elif (self.state == DecisionState.CANCELED_BEFORE_INITIATED):
self.state = DecisionState.CANCELED_AFTER_INITIATED
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def handle_initiation_failed_event(self, event: HistoryEvent):
self.state_history.append('handle_initiation_failed_event')
if (self.state in (DecisionState.INITIATED, DecisionState.DECISION_SENT, DecisionState.CANCELED_BEFORE_INITIATED)):
self.state = DecisionState.COMPLETED
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def handle_started_event(self, event: HistoryEvent):
self.state_history.append('handle_started_event')
def handle_completion_event(self):
self.state_history.append('handle_completion_event')
if (self.state in (DecisionState.CANCELED_AFTER_INITIATED, DecisionState.INITIATED)):
self.state = DecisionState.COMPLETED
elif (self.state == DecisionState.CANCELLATION_DECISION_SENT):
self.state = DecisionState.COMPLETED_AFTER_CANCELLATION_DECISION_SENT
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def handle_cancellation_initiated_event(self):
self.state_history.append('handle_cancellation_initiated_event')
if (self.state == DecisionState.CANCELLATION_DECISION_SENT):
pass
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def handle_cancellation_failure_event(self, event: HistoryEvent):
self.state_history.append('handle_cancellation_failure_event')
if (self.state == DecisionState.COMPLETED_AFTER_CANCELLATION_DECISION_SENT):
self.state = DecisionState.COMPLETED
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def handle_cancellation_event(self):
self.state_history.append('handle_cancellation_event')
if (self.state == DecisionState.CANCELLATION_DECISION_SENT):
self.state = DecisionState.COMPLETED
else:
self.fail_state_transition()
self.state_history.append(str(self.state))
def fail_state_transition(self):
raise IllegalStateException(((('id=' + str(self.id)) + ', transitions=') + str(self.state_history))) |
class Websocket(ASGIIngressMixin, _Websocket):
__slots__ = ['_scope', '_receive', '_send', '_accepted']
def __init__(self, scope: Scope, receive: Receive, send: Send):
super().__init__(scope, receive, send)
self._accepted = False
self._flow_receive = None
self._flow_send = None
self.receive = self._accept_and_receive
self.send = self._accept_and_send
def _asgi_spec_version(self) -> int:
return int(''.join(self._scope.get('asgi', {}).get('spec_version', '2.0').split('.')))
def _encode_headers(self, headers: Dict[(str, str)]) -> List[Tuple[(bytes, bytes)]]:
return [(key.encode('utf-8'), val.encode('utf-8')) for (key, val) in headers.items()]
async def accept(self, headers: Optional[Dict[(str, str)]]=None, subprotocol: Optional[str]=None):
if self._accepted:
return
message: Dict[(str, Any)] = {'type': 'websocket.accept', 'subprotocol': subprotocol}
if (headers and (self._asgi_spec_version > 20)):
message['headers'] = self._encode_headers(headers)
(await self._send(message))
self._accepted = True
self.receive = self._wrapped_receive
self.send = self._wrapped_send
async def _wrapped_receive(self) -> Any:
data = (await self._receive())
for method in self._flow_receive:
data = method(data)
return data
async def _wrapped_send(self, data: Any):
for method in self._flow_send:
data = method(data)
if isinstance(data, str):
(await self._send({'type': 'websocket.send', 'text': data}))
else:
(await self._send({'type': 'websocket.send', 'bytes': data})) |
def load_model(config, model, optimizer=None):
logger = get_logger()
global_config = config['Global']
checkpoints = global_config.get('checkpoints')
pretrained_model = global_config.get('pretrained_model')
best_model_dict = {}
if checkpoints:
if checkpoints.endswith('.pdparams'):
checkpoints = checkpoints.replace('.pdparams', '')
assert os.path.exists((checkpoints + '.pdparams')), 'The {}.pdparams does not exists!'.format(checkpoints)
params = paddle.load((checkpoints + '.pdparams'))
state_dict = model.state_dict()
new_state_dict = {}
for (key, value) in state_dict.items():
if (key not in params):
logger.warning('{} not in loaded params {} !'.format(key, params.keys()))
continue
pre_value = params[key]
if (list(value.shape) == list(pre_value.shape)):
new_state_dict[key] = pre_value
else:
logger.warning('The shape of model params {} {} not matched with loaded params shape {} !'.format(key, value.shape, pre_value.shape))
model.set_state_dict(new_state_dict)
if (optimizer is not None):
if os.path.exists((checkpoints + '.pdopt')):
optim_dict = paddle.load((checkpoints + '.pdopt'))
optimizer.set_state_dict(optim_dict)
else:
logger.warning('{}.pdopt is not exists, params of optimizer is not loaded'.format(checkpoints))
if os.path.exists((checkpoints + '.states')):
with open((checkpoints + '.states'), 'rb') as f:
states_dict = (pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1'))
best_model_dict = states_dict.get('best_model_dict', {})
if ('epoch' in states_dict):
best_model_dict['start_epoch'] = (states_dict['epoch'] + 1)
logger.info('resume from {}'.format(checkpoints))
elif pretrained_model:
load_pretrained_params(model, pretrained_model)
else:
logger.info('train from scratch')
return best_model_dict |
def flatten_multilists_attributes(data):
multilist_attrs = [['drop_infected'], ['store_infected'], ['drop_machine_learning'], ['store_machine_learning'], ['drop_blocked'], ['store_blocked'], ['drop_heuristic'], ['store_heuristic'], ['drop_intercepted'], ['store_intercepted']]
for attr in multilist_attrs:
flatten_single_path(data, attr, 0)
return data |
class OptionPlotoptionsPolygonSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_when_missing_values_is_ignore(df_vartypes):
df_na = df_vartypes.copy()
df_na.loc[(1, 'Age')] = np.nan
transformer = RelativeFeatures(variables=['Age', 'Marks'], reference=['Age', 'Marks'], func=['sub'], missing_values='ignore')
X = transformer.fit_transform(df_na)
ref = pd.DataFrame.from_dict({'Name': ['tom', 'nick', 'krish', 'jack'], 'City': ['London', 'Manchester', 'Liverpool', 'Bristol'], 'Age': [20, np.nan, 19, 18], 'Marks': [0.9, 0.8, 0.7, 0.6], 'dob': pd.date_range('2020-02-24', periods=4, freq='T'), 'Age_sub_Age': [0, np.nan, 0, 0], 'Marks_sub_Age': [(- 19.1), np.nan, (- 18.3), (- 17.4)], 'Age_sub_Marks': [19.1, np.nan, 18.3, 17.4], 'Marks_sub_Marks': [0.0, 0.0, 0.0, 0.0]})
pd.testing.assert_frame_equal(X, ref) |
def extractNovelAffairs(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.