code stringlengths 281 23.7M |
|---|
class OptionPlotoptionsTreegraphDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def main(self, context):
selection_mode = bpy.context.scene.tool_settings.uv_select_mode
me = bpy.context.active_object.data
bm = bmesh.from_edit_mesh(me)
uv_layers = bm.loops.layers.uv.verify()
selected_faces = {f for f in bm.faces if (all([loop[uv_layers].select for loop in f.loops]) and f.select)}
if (not selected_faces):
return
if self.bool_face:
islands = [[f] for f in bm.faces if (all([loop[uv_layers].select for loop in f.loops]) and f.select)]
else:
islands = utilities_uv.getSelectionIslands(bm, uv_layers, extend_selection_to_islands=True)
for faces in islands:
if self.bool_face:
calc_loops = faces[0].loops
avg_normal = faces[0].normal
else:
selected_faces_in_island = faces.intersection(selected_faces)
if selected_faces_in_island:
pre_calc_faces = selected_faces_in_island
else:
pre_calc_faces = faces
if (len(pre_calc_faces) == 1):
selected_face = next(iter(pre_calc_faces))
calc_loops = selected_face.loops
avg_normal = selected_face.normal
else:
calc_loops = []
calc_edges = set()
island_edges = {edge for face in pre_calc_faces for edge in face.edges}
island_loops = {loop for face in pre_calc_faces for loop in face.loops}
for edge in island_edges:
if (len({loop[uv_layers].uv.to_tuple(precision) for vert in edge.verts for loop in vert.link_loops if (loop in island_loops)}) == 2):
calc_edges.add(edge)
for loop in edge.link_loops:
if (loop in island_loops):
calc_loops.append(loop)
break
if (not calc_loops):
self.report({'ERROR_INVALID_INPUT'}, 'Invalid selection in an island: zero non-splitted edges.')
continue
avg_normal = Vector((0, 0, 0))
calc_faces = [face for face in pre_calc_faces if {edge for edge in face.edges}.issubset(calc_edges)]
if (not calc_faces):
self.report({'ERROR_INVALID_INPUT'}, 'Invalid selection in an island: no faces formed by unique edges.')
continue
for face in calc_faces:
avg_normal += face.normal
avg_normal /= len(calc_faces)
x = 0
y = 1
z = 2
max_size = max(map(abs, avg_normal))
if (((self.axis == '-1') and (abs(avg_normal.z) == max_size)) or (self.axis == '2')):
align_island(self, me, bm, uv_layers, faces, calc_loops, x, y, False, (avg_normal.z < 0))
elif (((self.axis == '-1') and (abs(avg_normal.y) == max_size)) or (self.axis == '1')):
align_island(self, me, bm, uv_layers, faces, calc_loops, x, z, (avg_normal.y > 0), False)
else:
align_island(self, me, bm, uv_layers, faces, calc_loops, y, z, (avg_normal.x < 0), False)
bpy.ops.uv.select_mode(type='VERTEX')
bpy.context.scene.tool_settings.uv_select_mode = selection_mode |
class port_status(message):
version = 5
type = 12
def __init__(self, xid=None, reason=None, desc=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (reason != None):
self.reason = reason
else:
self.reason = 0
if (desc != None):
self.desc = desc
else:
self.desc = ofp.port_desc()
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!B', self.reason))
packed.append(('\x00' * 7))
packed.append(self.desc.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_status()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 12)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.reason = reader.read('!B')[0]
reader.skip(7)
obj.desc = ofp.port_desc.unpack(reader)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.reason != other.reason):
return False
if (self.desc != other.desc):
return False
return True
def pretty_print(self, q):
q.text('port_status {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('reason = ')
value_name_map = {0: 'OFPPR_ADD', 1: 'OFPPR_DELETE', 2: 'OFPPR_MODIFY'}
if (self.reason in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.reason], self.reason)))
else:
q.text(('%#x' % self.reason))
q.text(',')
q.breakable()
q.text('desc = ')
q.pp(self.desc)
q.breakable()
q.text('}') |
.sandbox_test
def test_run_remote_merge_sort():
wf = rr.fetch_workflow(name='core.control_flow.run_merge_sort.merge_sort', version=FETCH_VERSION)
unsorted = [42, 41, 89, 21, 76, 94, 90, 6, 71, 9]
exec = rr.execute(wf, inputs={'numbers': unsorted, 'numbers_count': len(unsorted), 'run_local_at_count': 3}, wait=True)
assert (exec.outputs['o0'] == [6, 9, 21, 41, 42, 71, 76, 89, 90, 94]) |
class MethodCaller():
def __init__(self, _id, _obj):
self._id = _id
self._obj = _obj
def __call__(self, *args):
return self._obj._comobj.Invoke(self._id, *args)
def __getitem__(self, *args):
return self._obj._comobj.Invoke(self._id, *args, **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYGET))
def __setitem__(self, *args):
if _is_object(args[(- 1)]):
self._obj._comobj.Invoke(self._id, *args, **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYPUTREF))
else:
self._obj._comobj.Invoke(self._id, *args, **dict(_invkind=comtypes.automation.DISPATCH_PROPERTYPUT)) |
def make_flag(source, names, default=False, type=None, help='', takes_argument=0):
warnings.warn('clize.legacy.make_flag is deprecated. See DeprecationWarning, stacklevel=2)
kwargs = {}
kwargs['aliases'] = [util.name_py2cli(alias, kw=True) for alias in names]
if callable(source):
return MakeflagFuncParameter(source, takes_argument=takes_argument, **kwargs)
cls = MakeflagOptionParameter
kwargs['argument_name'] = source
kwargs['conv'] = (type or parser.is_true)
if (not takes_argument):
return parser.FlagParameter(value=True, **kwargs)
kwargs['default'] = default
kwargs['takes_argument'] = takes_argument
if ((takes_argument == 1) and (type is int)):
cls = MakeflagIntOptionParameter
return cls(**kwargs) |
def set_variable_data(save_data: list[int], data: tuple[(dict[(int, int)], dict[(int, int)])]) -> list[int]:
save_data = write_variable_length_int(save_data, len(data[0]))
for (key, value) in data[0].items():
save_data = write_variable_length_int(save_data, key)
save_data = write_variable_length_int(save_data, value)
save_data = write_variable_length_int(save_data, len(data[1]))
for (key, value) in data[1].items():
save_data = write_variable_length_int(save_data, key)
save_data = write(save_data, value, 1)
return save_data |
class OptionSeriesScatter3dSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class Events():
def __init__(self, events):
self.unfiltered_events = events
self.filter_expr = ''
self._load_events(events)
def _load_events(self, events):
self.events = events
self.events_by_key = {e.event_id(): e for e in self.events}
self.events_by_user = defaultdict(list)
for e in self.events:
self.events_by_user[e.username()].append(e)
def all(self):
return self.events
def users(self):
return self.events_by_user.keys()
def by_user(self, user):
return self.events_by_user.get(user, [])
def by_id(self, event_id) -> Optional[_UserIdentityType]:
return self.events_by_key.get(event_id)
def filter(self, expr):
if (not expr):
events = self.unfiltered_events
else:
events = _filter_events(self.unfiltered_events, expr)
self.filter_expr = expr
self._load_events(events)
return len(self.events) |
_decorator(removed, 'post')
class TreasuryAppropriationAccountAutocomplete(FilterQuerysetMixin, AutocompleteView):
serializer_class = TasSerializer
def get_queryset(self):
queryset = TreasuryAppropriationAccount.objects.all()
queryset = self.serializer_class.setup_eager_loading(queryset)
filtered_queryset = self.filter_records(self.request, queryset=queryset)
ordered_queryset = self.order_records(self.request, queryset=filtered_queryset)
return ordered_queryset |
class Promise(object):
def __init__(self, var: str, val: Union[(NodeOutput, _literals_models.Literal)]):
self._var = var
self._promise_ready = True
self._val = val
self._ref = None
self._attr_path: List[Union[(str, int)]] = []
if (val and isinstance(val, NodeOutput)):
self._ref = val
self._promise_ready = False
self._val = None
def __hash__(self):
return hash(id(self))
def __rshift__(self, other: Union[(Promise, VoidPromise)]):
if ((not self.is_ready) and other.ref):
self.ref.node.runs_before(other.ref.node)
return other
def with_var(self, new_var: str) -> Promise:
if self.is_ready:
return Promise(var=new_var, val=self.val)
return Promise(var=new_var, val=self.ref)
def is_ready(self) -> bool:
return self._promise_ready
def val(self) -> _literals_models.Literal:
return self._val
def ref(self) -> NodeOutput:
return self._ref
def var(self) -> str:
return self._var
def attr_path(self) -> List[Union[(str, int)]]:
return self._attr_path
def eval(self) -> Any:
if ((not self._promise_ready) or (self._val is None)):
raise ValueError('Cannot Eval with incomplete promises')
if ((self.val.scalar is None) or (self.val.scalar.primitive is None)):
raise ValueError('Eval can be invoked for primitive types only')
return get_primitive_val(self.val.scalar.primitive)
def is_(self, v: bool) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.EQ, v)
def is_false(self) -> ComparisonExpression:
return self.is_(False)
def is_true(self) -> ComparisonExpression:
return self.is_(True)
def is_none(self) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.EQ, None)
def __eq__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.EQ, other)
def __ne__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.NE, other)
def __gt__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.GT, other)
def __ge__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.GE, other)
def __lt__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.LT, other)
def __le__(self, other) -> ComparisonExpression:
return ComparisonExpression(self, ComparisonOps.LE, other)
def __bool__(self):
raise ValueError('Flytekit does not support Unary expressions or performing truth value testing, This is a limitation in python. For Logical `and\\or` use `&\\|` (bitwise) instead')
def __and__(self, other):
raise ValueError('Cannot perform Logical AND of Promise with other')
def __or__(self, other):
raise ValueError('Cannot perform Logical OR of Promise with other')
def with_overrides(self, *args, **kwargs):
if (not self.is_ready):
self.ref.node.with_overrides(*args, **kwargs)
return self
def __repr__(self):
if self._promise_ready:
return f'Resolved({self._var}={self._val})'
return f'Promise(node:{self.ref.node_id}.{self._var}.{self.attr_path})'
def __str__(self):
return str(self.__repr__())
def deepcopy(self) -> Promise:
new_promise = Promise(var=self.var, val=self.val)
new_promise._promise_ready = self._promise_ready
new_promise._ref = self._ref
new_promise._attr_path = deepcopy(self._attr_path)
return new_promise
def __getitem__(self, key) -> Promise:
return self._append_attr(key)
def __getattr__(self, key) -> Promise:
return self._append_attr(key)
def _append_attr(self, key) -> Promise:
new_promise = self.deepcopy()
new_promise._attr_path.append(key)
if (new_promise.ref is not None):
new_promise._ref = new_promise.ref.with_attr(key)
return new_promise |
class NoopDiscoveryService(Service):
def __init__(self, event_bus: EndpointAPI) -> None:
self.logger = get_logger('p2p.discovery.NoopDiscoveryService')
self._event_bus = event_bus
async def handle_get_peer_candidates_requests(self) -> None:
async for event in self._event_bus.stream(PeerCandidatesRequest):
self.logger.debug('Servicing request for more peer candidates')
(await self._event_bus.broadcast(event.expected_response_type()(tuple()), event.broadcast_config()))
async def handle_get_random_bootnode_requests(self) -> None:
async for event in self._event_bus.stream(RandomBootnodeRequest):
self.logger.debug('Servicing request for boot nodes')
(await self._event_bus.broadcast(event.expected_response_type()(tuple()), event.broadcast_config()))
async def run(self) -> None:
self.manager.run_daemon_task(self.handle_get_peer_candidates_requests)
self.manager.run_daemon_task(self.handle_get_random_bootnode_requests)
(await self.manager.wait_finished()) |
def resolve_venv_file(rootdir, kind, name, *, checkexists=False, applysuffix=True):
if (not kind):
if (not name):
return rootdir
dirname = name
name = None
if (kind == 'bin'):
if (os.name == 'nt'):
dirname = 'Scripts'
suffix = '.exe'
else:
dirname = 'bin'
suffix = ''
else:
dirname = kind
suffix = ''
if name:
basename = ((name + suffix) if (suffix and applysuffix) else name)
resolved = os.path.join(rootdir, dirname, basename)
else:
resolved = os.path.join(rootdir, dirname)
if (checkexists and (not _fs.check_file(resolved, expected=checkexists))):
return None
return resolved |
def update_verkle_tree_nocommitmentupdate(root_node, key, value):
current_node = root_node
stem = get_stem(key)
suffix = get_suffix(key)
index = None
depth = 0
while (current_node['node_type'] == VERKLE_TRIE_NODE_TYPE_INNER):
previous_node = current_node
index = stem[depth]
depth += 1
if (index in current_node):
current_node = current_node[index]
else:
current_node[index] = {'node_type': VERKLE_TRIE_NODE_TYPE_SUFFIX_TREE, 'stem': stem, suffix: value}
return
if (current_node['stem'] == stem):
current_node[suffix] = value
else:
old_suffix_tree = current_node
old_stem = old_suffix_tree['stem']
new_inner_node = {'node_type': VERKLE_TRIE_NODE_TYPE_INNER}
previous_node[index] = new_inner_node
previous_node = new_inner_node
while (old_stem[depth] == stem[depth]):
index = stem[depth]
new_inner_node = {'node_type': VERKLE_TRIE_NODE_TYPE_INNER}
previous_node[index] = new_inner_node
previous_node = new_inner_node
depth += 1
new_inner_node[stem[depth]] = {'node_type': VERKLE_TRIE_NODE_TYPE_SUFFIX_TREE, 'stem': stem, suffix: value}
new_inner_node[old_stem[depth]] = old_suffix_tree |
class UserFollowGroupListPost(ResourceList):
def before_post(cls, args, kwargs, data):
require_relationship(['group'], data)
data['user'] = current_user.id
user_follow_group = UserFollowGroup.query.filter_by(group_id=data['group'], user=current_user).first()
if user_follow_group:
raise ConflictError({'pointer': '/data/relationships/group'}, 'Group already followed')
view_kwargs = True
decorators = (jwt_required,)
schema = UserFollowGroupSchema
methods = ['POST']
data_layer = {'session': db.session, 'model': UserFollowGroup, 'methods': {'before_post': before_post}} |
def denumpy(obj, fallback=None):
if np.issubdtype(obj, np.complexfloating):
return complex(obj)
if np.issubdtype(obj, np.floating):
return float(obj)
if (np.issubdtype(obj, np.signedinteger) or np.issubdtype(obj, np.unsignedinteger)):
return int(obj)
if np.issubdtype(obj, np.bool_):
return bool(obj)
if (np.issubdtype(obj, np.str_) or np.issubdtype(obj, np.unicode_)):
return str(obj)
if (fallback is not None):
fallback()
return obj |
def test_callback_message_with_re_botcmd_and_alt_prefixes():
dummy_backend = DummyBackend({'BOT_ALT_PREFIXES': ('Err',), 'BOT_ALT_PREFIX_SEPARATORS': (',', ';')})
dummy_backend.callback_message(makemessage(dummy_backend, '!regex command with prefix'))
assert ('Regex command' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'Err regex command with prefix'))
assert ('Regex command' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'Err, regex command with prefix'))
assert ('Regex command' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'regex command without prefix'))
assert ('Regex command' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, '!regex command with capture group: Captured text'))
assert ('Captured text' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'regex command with capture group: Captured text'))
assert ('Captured text' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'This command also allows extra text in front - regex command with capture group: Captured text'))
assert ('Captured text' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'Err, regex command with capture group: Captured text'))
assert ('Captured text' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, 'Err This command also allows extra text in front - regex command with capture group: Captured text'))
assert ('Captured text' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, '!match_here'))
assert ('1' == dummy_backend.pop_message().body)
dummy_backend.callback_message(makemessage(dummy_backend, '!match_here match_here match_here'))
assert ('3' == dummy_backend.pop_message().body) |
def test_duration_primitive():
duration = timedelta(seconds=1)
obj = literals.Primitive(duration=duration)
assert (obj.integer is None)
assert (obj.boolean is None)
assert (obj.datetime is None)
assert (obj.duration == duration)
assert (obj.float_value is None)
assert (obj.string_value is None)
assert (obj.value == duration)
assert (obj != literals.Primitive(integer=0))
assert (obj != literals.Primitive(boolean=False))
assert (obj != literals.Primitive(datetime=datetime.now()))
assert (obj != literals.Primitive(duration=timedelta(minutes=1)))
assert (obj != literals.Primitive(float_value=1.0))
assert (obj != literals.Primitive(string_value='abc'))
obj2 = literals.Primitive.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
assert (obj2.integer is None)
assert (obj2.boolean is None)
assert (obj2.datetime is None)
assert (obj2.duration == duration)
assert (obj2.float_value is None)
assert (obj2.string_value is None)
assert (obj2.value == duration)
assert (obj2 != literals.Primitive(integer=0))
assert (obj2 != literals.Primitive(boolean=False))
assert (obj2 != literals.Primitive(datetime=datetime.now()))
assert (obj2 != literals.Primitive(duration=timedelta(minutes=1)))
assert (obj2 != literals.Primitive(float_value=1.0))
assert (obj2 != literals.Primitive(string_value='abc'))
with pytest.raises(Exception):
literals.Primitive(duration=1.0).to_flyte_idl() |
def _remove_circular(obj: _typing.Any, refs: (_typing.Set[_typing.Any] | None)=None):
if (refs is None):
refs = set()
if (id(obj) in refs):
return '[CIRCULAR]'
if (not isinstance(obj, (str, int, float, bool, type(None)))):
refs.add(id(obj))
if isinstance(obj, dict):
return {key: _remove_circular(value, refs) for (key, value) in obj.items()}
elif isinstance(obj, list):
return [_remove_circular(value, refs) for (_, value) in enumerate(obj)]
elif isinstance(obj, tuple):
return tuple((_remove_circular(value, refs) for (_, value) in enumerate(obj)))
else:
return obj |
class StrEnumType(DynamicType[T_S]):
def __init__(self, enum_type: Type[T_S], encoding: str='utf-8') -> None:
self.enum_type = enum_type
self.encoding = encoding
def isinstance(self, obj: Any) -> bool:
return isinstance(obj, self.enum_type)
def description(self) -> str:
return self.enum_type.__name__
def preprocess(self, obj: T_S) -> bytes:
type_ = pickle.dumps(type(obj))
type_len = get_len_bytes(type_)
str_ = bytes(obj.value, encoding=self.encoding)
str_len = get_len_bytes(str_)
return (((type_len + type_) + str_len) + str_)
def postprocess(self, obj_bytes: bytes) -> T_S:
(raw, curr) = get_next_bytes(obj_bytes, 0)
type_ = pickle.loads(raw)
(raw, curr) = get_next_bytes(obj_bytes, curr)
str_ = raw.decode(self.encoding)
return type_(str_)
def python_type(self) -> type:
return self.enum_type |
(GatherWebTargets)
class TKOSubsScan(luigi.Task):
requirements = ['go', 'tko-subs', 'masscan']
exception = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db_mgr = pipeline.models.db_manager.DBManager(db_location=self.db_location)
self.results_subfolder = (Path(self.results_dir) / 'tkosubs-results').expanduser().resolve()
self.output_file = (self.results_subfolder / 'tkosubs.csv')
def requires(self):
meets_requirements(self.requirements, self.exception)
args = {'results_dir': self.results_dir, 'rate': self.rate, 'target_file': self.target_file, 'top_ports': self.top_ports, 'interface': self.interface, 'ports': self.ports, 'exempt_list': self.exempt_list, 'db_location': self.db_location}
return GatherWebTargets(**args)
def output(self):
return SQLAlchemyTarget(connection_string=self.db_mgr.connection_string, target_table='target', update_id=self.task_id)
def parse_results(self):
with open(self.output_file, newline='') as f:
reader = csv.reader(f)
next(reader, None)
for row in reader:
domain = row[0]
is_vulnerable = row[3]
if ('true' in is_vulnerable.lower()):
tgt = self.db_mgr.get_or_create_target_by_ip_or_hostname(domain)
tgt.vuln_to_sub_takeover = True
self.db_mgr.add(tgt)
self.output().touch()
self.db_mgr.close()
self.output().touch()
def run(self):
self.results_subfolder.mkdir(parents=True, exist_ok=True)
domains = self.db_mgr.get_all_hostnames()
if (not domains):
return
command = [tools.get('tko-subs').get('path'), f"-domain={','.join(domains)}", f"-data={tools.get('tko-subs').get('providers')}", f'-output={self.output_file}']
subprocess.run(command)
self.parse_results() |
class TestSenderWithTimeoutMaxSizeNonCircular(unittest.TestCase):
Q_SIZE = 3
def setUp(self):
super(TestSenderWithTimeoutMaxSizeNonCircular, self).setUp()
self._server = mockserver.MockRecvServer('localhost')
self._sender = fluent.asyncsender.FluentSender(tag='test', port=self._server.port, queue_maxsize=self.Q_SIZE)
def tearDown(self):
try:
self._sender.close()
finally:
self._server.close()
def get_data(self):
return self._server.get_received()
def test_simple(self):
with self._sender as sender:
self.assertEqual(self._sender.queue_maxsize, self.Q_SIZE)
self.assertEqual(self._sender.queue_blocking, True)
self.assertEqual(self._sender.queue_circular, False)
ok = sender.emit('foo1', {'bar': 'baz1'})
self.assertTrue(ok)
ok = sender.emit('foo2', {'bar': 'baz2'})
self.assertTrue(ok)
ok = sender.emit('foo3', {'bar': 'baz3'})
self.assertTrue(ok)
ok = sender.emit('foo4', {'bar': 'baz4'})
self.assertTrue(ok)
ok = sender.emit('foo5', {'bar': 'baz5'})
self.assertTrue(ok)
data = self.get_data()
eq = self.assertEqual
print(data)
eq(5, len(data))
eq(3, len(data[0]))
eq('test.foo1', data[0][0])
eq({'bar': 'baz1'}, data[0][2])
self.assertTrue(data[0][1])
self.assertTrue(isinstance(data[0][1], int))
eq(3, len(data[2]))
eq('test.foo3', data[2][0])
eq({'bar': 'baz3'}, data[2][2]) |
.no_patch_conf()
def test_sso_config(mocker, tmpdir):
with PatchConfig(config_overwrite={'client_id': '1234', 'foundry_url': ' 'grant_type': 'authorization_code'}):
if ('jwt' in foundry_dev_tools.config.Configuration):
del foundry_dev_tools.config.Configuration['jwt']
client = FoundryRestClient()
mock_get_user_credentials = mocker.patch('palantir_oauth_client.get_user_credentials')
mock_get_user_credentials.return_value.token = 'access-token'
assert (client._headers()['Authorization'] == 'Bearer access-token')
assert client._headers()['User-Agent'].startswith('foundry-dev-tools') |
class OptionSeriesParetoSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Components():
def __init__(self, ui):
self.page = ui.page
def select(self, records=None, html_code=None, selected=None, width=(100, '%'), height=(None, '%'), profile=None, multiple=False, options=None):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
html_but = HtmlBsForms.BsSelect(self.page, [], html_code, (options or {}), profile, {'width': width, 'height': height})
html_but.init_selected = selected
if (records is not None):
for rec in records:
if (selected is not None):
if (rec['value'] == selected):
rec['selected'] = True
html_but.add_option(rec['value'], rec.get('name', rec['value']), selected=rec.get('selected', False))
if multiple:
html_but.attr['multiple'] = True
return html_but
def dropdown(self, records=None, text='', width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
def list(self, data=None, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item']
component = self.page.web.std.lists.list(data, None, width, height, html_code, None, options, profile)
component.attr['class'].initialise(['list-group'])
return component
def numbers(self, data=None, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item']
component = self.page.web.std.lists.list(data, None, width, height, html_code, None, options, profile)
component.attr['class'].initialise(['list-group', 'list-group-numbered'])
return component
def buttons(self, data=None, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item', 'list-group-item-action']
options['item_type'] = 'button'
component = self.page.web.std.lists.list(data, None, width, height, html_code, None, options, profile)
component.attr['class'].initialise(['list-group', 'list-group-numbered'])
return component
def flush(self, data=None, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item']
component = self.page.web.std.lists.list(data, None, width, height, html_code, None, options, profile)
component.attr['class'].initialise(['list-group', 'list-group-flush'])
return component
def badges(self, values, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item', 'd-flex', 'justify-content-between', 'align-items-center']
component = self.page.web.std.lists.list([], None, width, height, html_code, None, options, profile)
for (k, v) in values.items():
b = self.page.web.bs.images.pill(v)
li = self.page.web.std.lists.item()
li.innerPyHTML = [k]
li.set_html_content(b)
li.add_style(options['li_class'], clear_first=True)
component.add(li)
component.attr['class'].initialise(['list-group'])
return component
def checks(self, data=None, width=('auto', ''), height=(None, 'px'), html_code=None, options=None, profile=None):
options = (options or {})
options['li_class'] = ['list-group-item']
options['item_type'] = 'button'
component = self.page.web.std.lists.list([], None, width, height, html_code, None, options, profile)
for value in data:
check = self.page.web.bs.check(label=value)
check.add_style(['form-check-input', 'me-1'], clear_first=True)
li = self.page.web.std.lists.item('', tag='label')
li.set_html_content(check)
component.add(li)
component.attr['class'].initialise(['list-group'])
return component
def datalist(self, records=None, html_code=None, selected=None, width=(100, '%'), height=(None, '%'), profile=None, options=None):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsForms.BsDataList(self.page, [], html_code, (options or {}), profile, {'width': width, 'height': height})
component.init_selected = selected
if (records is not None):
for rec in records:
if (selected is not None):
if (rec['value'] == selected):
rec['selected'] = True
component.add_option(rec['value'], rec.get('name', rec['value']), selected=rec.get('selected', False))
component.attr['list'] = ('%sOptions' % component.htmlCode)
return component |
def normalize_name_ensip15(name: str) -> ENSNormalizedName:
if (not name):
raise InvalidName('Name cannot be empty')
elif isinstance(name, (bytes, bytearray)):
name = name.decode('utf-8')
raw_labels = name.split('.')
if any(((len(label) == 0) for label in raw_labels)):
raise InvalidName('Labels cannot be empty')
normalized_labels = []
for label_str in raw_labels:
_input = [ord(c) for c in label_str]
buffer: List[int] = []
tokens: List[Token] = []
while (len(_input) > 0):
emoji_codepoint = None
end_index = 1
while (end_index <= len(_input)):
current_emoji_sequence = _input[:end_index]
if (len(current_emoji_sequence) > MAX_LEN_EMOJI_PATTERN):
break
elif (65039 in current_emoji_sequence):
current_emoji_sequence.remove(65039)
_input.remove(65039)
end_index -= 1
if (current_emoji_sequence in NORMALIZATION_SPEC['emoji']):
emoji_codepoint = current_emoji_sequence
end_index += 1
if emoji_codepoint:
if (len(buffer) > 0):
tokens.append(TextToken(buffer))
buffer = []
tokens.append(EmojiToken(emoji_codepoint))
_input = _input[len(emoji_codepoint):]
else:
leading_codepoint = _input.pop(0)
if (leading_codepoint in NORMALIZATION_SPEC['ignored']):
pass
elif (leading_codepoint in NORMALIZATION_SPEC['mapped']):
mapped = NORMALIZATION_SPEC['mapped'][leading_codepoint]
for cp in mapped:
buffer.append(cp)
elif (leading_codepoint in VALID_CODEPOINTS):
buffer.append(leading_codepoint)
else:
raise InvalidName(f"Invalid character: '{chr(leading_codepoint)}' | codepoint {leading_codepoint} ({hex(leading_codepoint)})")
if ((len(buffer) > 0) and (len(_input) == 0)):
tokens.append(TextToken(buffer))
normalized_label = _build_and_validate_label_from_tokens(tokens)
normalized_labels.append(normalized_label)
return ENSNormalizedName(normalized_labels) |
class OptionPlotoptionsScatterSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def extractRookietranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
parser = argparse.ArgumentParser(description='Kr00k (CVE-2019-15126) testing script by ESET Research')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-i', '--interface', help='Wireless interface to use for live attack')
group.add_argument('-f', '--file', help='PCAP file to use for offline analysis')
parser.add_argument('-v', '--victim', help='MAC address of client (victim) to disassociate', type=mac_addr)
parser.add_argument('-b', '--bssid', help='BSSID of AP victim is connected to', type=mac_addr)
parser.add_argument('-e', '--essid', help='ESSID of AP victim is connected to (for auto-detection of BSSID)')
args = parser.parse_args()
args.disassoc_next = datetime.fromtimestamp(0)
args.disassoc_attempts = 0
args.disassoc_confirm = 0
args.disassoc_miss = 0
args.dec_pkts = 0
try:
args.wireshark = WiresharkSink()
args.wireshark.start()
time.sleep(1)
except OSError:
args.wireshark = None
if (args.interface is not None):
if (args.victim is None):
write('WARNING: No victim MAC address (-v) specified, using passive monitoring mode')
elif ((args.bssid is None) and (args.essid is None)):
write('WARNING: No BSSID (-b) or ESSID specified (-e), using passive monitoring mode')
elif (args.bssid is None):
write(('Waiting for beacon from network %s to detect BSSID' % args.essid))
filter = '\n type data subtype data or\n type data subtype qos-data or\n type mgt subtype assoc-req or\n type mgt subtype beacon\n '
if args.interface:
sniff(iface=args.interface, store=0, filter=filter, prn=partial(handle_pkt, args))
else:
sniff(offline=args.file, store=0, prn=partial(handle_pkt, args)) |
def extractWysteriatsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Aogan no Madoushi', 'Aogan no Madoushi', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(frozen=True)
class Credentials(Entry):
password: Optional[str] = ''
has_totp: bool = False
notes: Optional[str] = ''
uris: List[str] = field(default_factory=list)
further: Dict[(str, str)] = field(default_factory=dict)
def __getitem__(self, target: Target) -> Optional[Union[(str, List[str])]]:
if (target == Targets.USERNAME):
return self.username
elif (target == Targets.PASSWORD):
return self.password
elif (target == Targets.TOTP):
return self.totp
elif (target == Targets.NOTES):
return self.notes
elif target.is_uri():
return self.uris[target.uri_index()]
else:
return self.further.get(target.raw.removesuffix(' (field)'), None)
def totp(self):
if (not self.has_totp):
return ''
command = ['rbw', 'code', self.name]
if self.username:
command.extend([self.username])
if self.folder:
command.extend(['--folder', self.folder])
return run(command, capture_output=True, encoding='utf-8').stdout.strip() |
.usefixtures('use_tmpdir')
def test_loading_of_eclipse_configurations(monkeypatch):
source_file = inspect.getsourcefile(ecl_config.Ecl100Config)
assert (source_file is not None)
ecl_config_path = os.path.dirname(source_file)
monkeypatch.setenv('ECL100_SITE_CONFIG', 'file/does/not/exist')
with pytest.raises(OSError):
conf = ecl_config.Ecl100Config()
monkeypatch.setenv('ECL100_SITE_CONFIG', os.path.join(ecl_config_path, 'ecl100_config.yml'))
conf = ecl_config.Ecl100Config()
with open('file.yml', 'w', encoding='utf-8') as f:
f.write('this:\n -should\n-be\ninvalid:yaml?')
monkeypatch.setenv('ECL100_SITE_CONFIG', 'file.yml')
with pytest.raises(ValueError, match='Failed parse: file.yml as yaml'):
conf = ecl_config.Ecl100Config()
scalar_exe = 'bin/scalar_exe'
mpi_exe = 'bin/mpi_exe'
mpi_run = 'bin/mpi_run'
os.mkdir('bin')
for f in ['scalar_exe', 'mpi_exe', 'mpi_run']:
fname = os.path.join('bin', f)
with open(fname, 'w', encoding='utf-8') as fh:
fh.write('This is an exectable ...')
os.chmod(fname, stat.S_IEXEC)
intel_path = 'intel'
monkeypatch.setenv('ENV1', 'A')
monkeypatch.setenv('ENV2', 'C')
mocked_simulator_config = {ecl_config.Keys.env: {'LICENSE_SERVER': ''}, ecl_config.Keys.versions: {'2015': {ecl_config.Keys.scalar: {ecl_config.Keys.executable: scalar_exe}, ecl_config.Keys.mpi: {ecl_config.Keys.executable: mpi_exe, ecl_config.Keys.mpirun: mpi_run, ecl_config.Keys.env: {'I_MPI_ROOT': '$ENV1:B:$ENV2', 'TEST_VAR': '$ENV1.B.$ENV2 $UNKNOWN_VAR', 'P4_RSHCOMMAND': '', 'LD_LIBRARY_PATH': f'{intel_path}:$LD_LIBRARY_PATH', 'PATH': f'{intel_path}/bin64:$PATH'}}}, '2016': {ecl_config.Keys.scalar: {ecl_config.Keys.executable: '/does/not/exist'}, ecl_config.Keys.mpi: {ecl_config.Keys.executable: '/does/not/exist', ecl_config.Keys.mpirun: mpi_run}}, '2017': {ecl_config.Keys.mpi: {ecl_config.Keys.executable: mpi_exe, ecl_config.Keys.mpirun: '/does/not/exist'}}}}
with open('file.yml', 'w', encoding='utf-8') as filehandle:
filehandle.write(yaml.dump(mocked_simulator_config))
conf = ecl_config.Ecl100Config()
with pytest.raises(KeyError):
sim = conf.sim('2020')
with pytest.raises(OSError):
sim = conf.sim('2016')
with pytest.raises(OSError):
sim = conf.mpi_sim('2016')
with pytest.raises(OSError):
sim = conf.mpi_sim('2017')
with pytest.raises(KeyError):
sim = conf.sim('2017')
sim = conf.sim('2015')
mpi_sim = conf.mpi_sim('2015')
assert ('LICENSE_SERVER' in mpi_sim.env)
assert (mpi_sim.env['I_MPI_ROOT'] == 'A:B:C')
assert (mpi_sim.env['TEST_VAR'] == 'A.B.C $UNKNOWN_VAR')
assert (len(mpi_sim.env) == (1 + 5))
sim = conf.sim('2015')
assert (sim.executable == scalar_exe)
assert (sim.mpirun is None)
with pytest.raises(OSError, match="The executable: '/does/not/exist' can not be executed by user"):
simulators = conf.simulators()
simulators = conf.simulators(strict=False)
assert (len(simulators) == 2) |
def open_file(file: PathNameTypes, mode: str='r', buffering: int=(- 1), encoding: Optional[str]=None, errors: Optional[str]=None) -> TextIO:
if ('b' in mode):
raise ValueError('This function can only work in text mode.')
actual_wrapped_function = _open_file_builtin
if isinstance(file, Path):
actual_wrapped_function = _open_file_pathlib
return actual_wrapped_function(file, mode=mode, buffering=buffering, encoding=encoding, errors=errors) |
class OptionSeriesLineSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def flash(psl: pslab.ScienceLab, hexfile: str):
if (psl.interface.baudrate == 1000000):
psl.interface.timeout = 5
psl.enter_bootloader()
try:
bootattrs = mcbootflash.get_boot_attrs(psl)
except struct.error:
print('Flashing failed: PSLab is not in bootloader mode.')
return
mcbootflash.erase_flash(psl, bootattrs.memory_range, bootattrs.erase_size)
(total_bytes, chunks) = mcbootflash.chunked(hexfile, bootattrs)
written = 0
for chunk in chunks:
mcbootflash.write_flash(psl, chunk)
mcbootflash.checksum(psl, chunk)
written += len(chunk.data)
print(f'{written}/{total_bytes} bytes flashed.', end='\r')
print('', end='\n')
mcbootflash.self_verify(psl)
mcbootflash.reset(psl) |
class OptionSeriesWordcloudSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def extractHaitangcnWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class MapTestCase(unittest.TestCase):
projection = '+proj=lcc +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +datum=NAD83 +units=m +no_defs'
bounds = ((- 124.0), 20.5, (- 64.0), 49.0)
fixture = 'tests/fixtures/test.svg'
shp = 'tests/fixtures/cb_2014_us_nation_20m.json'
css = 'polygon{fill:green}'
def testMapWithStyle(self):
result = svgis.map(self.shp, style=self.css, scale=1000, crs=self.projection, bounds=self.bounds, clip=None)
self.assertIn(self.css, result)
style = 'tmp.css'
with open(style, 'w') as w:
w.write(self.css)
try:
result = svgis.map(self.shp, style=[style], scale=1000, crs=self.projection, bounds=self.bounds, clip=None)
self.assertIn(self.css, result)
finally:
os.remove('tmp.css')
def testMap(self):
a = svgis.map(self.shp, scale=1000, crs=self.projection, bounds=self.bounds, clip=False)
with open('a.svg', 'w') as A:
A.write(a)
try:
result = minidom.parseString(a).getElementsByTagName('svg').item(0)
fixture = minidom.parse(self.fixture).getElementsByTagName('svg').item(0)
result_vb = [float(x) for x in result.attributes.get('viewBox').value.split(',')]
fixture_vb = [float(x) for x in fixture.attributes.get('viewBox').value.split(',')]
for (r, f) in zip(result_vb, fixture_vb):
self.assertAlmostEqual(r, f, 5, 'viewbox doesnt match fixture')
finally:
os.remove('a.svg')
def testMapProjFile(self):
a = svgis.map(self.shp, scale=1000, crs='tests/fixtures/test.proj4', bounds=self.bounds, clip=False)
result = minidom.parseString(a).getElementsByTagName('svg').item(0)
fixture = minidom.parse(self.fixture).getElementsByTagName('svg').item(0)
result_vb = [float(x) for x in result.attributes.get('viewBox').value.split(',')]
fixture_vb = [float(x) for x in fixture.attributes.get('viewBox').value.split(',')]
for (r, f) in zip(result_vb, fixture_vb):
self.assertAlmostEqual(r, f, 5) |
def extractTrackestBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_index_loaded
def search_by_tags(query: str):
index = get_index()
stamp = utility.misc.get_milisec_stamp()
UI.latest = stamp
index.lastSearch = (query, ['-1'], 'tags')
res = findBySameTag(query, index.limit, [], index.pinned)
UI.print_search_results(['Anki', 'Tag', query], res['result'], stamp, UI._editor) |
_meter_band_type(ofproto.OFPMBT_DROP, ofproto.OFP_METER_BAND_DROP_SIZE)
class OFPMeterBandDrop(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, type_=None, len_=None):
super(OFPMeterBandDrop, self).__init__()
self.rate = rate
self.burst_size = burst_size
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset, self.type, self.len, self.rate, self.burst_size)
def parser(cls, buf, offset):
(type_, len_, rate, burst_size) = struct.unpack_from(ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset)
assert (cls.cls_meter_band_type == type_)
assert (cls.cls_meter_band_len == len_)
return cls(rate, burst_size) |
def extractThevermilliontranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('i will only fuck you', 'I only want to fuck you', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def configure_postgres_connection(key: str, host: str, port: int, dbname: str, username: str, password: str):
connection_secrets_data = {'host': host, 'port': port, 'dbname': dbname, 'username': username, 'password': password}
connection_secrets_path = ops_urls.CONNECTION_SECRETS.format(connection_key=key)
url = f'{FIDESOPS_V1_API_URL}{connection_secrets_path}'
response = requests.put(url, headers=oauth_header, json=connection_secrets_data)
if response.ok:
if (response.json()['test_status'] != 'failed'):
logger.info(f'Configured fidesops postgres connection secrets for via {url}')
return response.json()
raise RuntimeError(f'fidesops connection configuration failed! response.status_code={response.status_code}, response.json()={response.json()}') |
class OptionPlotoptionsHeatmapSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TxFilter(object):
def __init__(self, **filterparams):
self.min_epoch = filterparams.pop('min_epoch', None)
self.max_epoch = filterparams.pop('max_epoch', None)
self.min_slot = filterparams.pop('min_slot', None)
self.max_slot = filterparams.pop('max_slot', None)
self.min_absolute_slot = filterparams.pop('min_absolute_slot', None)
self.max_absolute_slot = filterparams.pop('max_absolute_slot', None)
self.min_height = filterparams.pop('min_height', None)
self.max_height = filterparams.pop('max_height', None)
self.unconfirmed = filterparams.pop('unconfirmed', False)
self.confirmed = filterparams.pop('confirmed', True)
_txid = filterparams.pop('txid', None)
_src_addr = filterparams.pop('src_addr', None)
_dest_addr = filterparams.pop('dest_addr', None)
if (len(filterparams) > 0):
raise ValueError('Excessive arguments for payment query: {}'.format(filterparams))
self._asks_chain_position = any(map((lambda x: (x is not None)), (self.min_epoch, self.max_epoch, self.min_slot, self.max_slot, self.min_absolute_slot, self.max_absolute_slot, self.min_height, self.max_height)))
if (self.unconfirmed and self._asks_chain_position):
warnings.warn('Blockchain position filtering ({max,min}_{epoch,slot,block}) has been requested while also asking for transactions not in ledger. These are mutually exclusive. As mempool transactions have no height at all, they will be excluded from the result.', RuntimeWarning)
self.src_addrs = self._get_addrset(_src_addr)
self.dest_addrs = self._get_addrset(_dest_addr)
if (_txid is None):
self.txids = []
else:
if isinstance(_txid, (bytes, str)):
txids = [_txid]
else:
iter(_txid)
txids = _txid
self.txids = list(map(validate_txid, txids))
def _get_addrset(self, addr):
if (addr is None):
return set()
else:
if isinstance(addr, (str, bytes)):
addrs = [addr]
else:
try:
iter(addr)
addrs = addr
except TypeError:
addrs = [addr]
return set(map(address, addrs))
def check(self, tx):
assert (((tx.status == 'in_ledger') and (tx.inserted_at is not None)) or ((tx.status != 'in_ledger') and (tx.inserted_at is None)))
ht = tx.inserted_at
if (ht is None):
if (not self.unconfirmed):
return False
if self._asks_chain_position:
return False
else:
if (not self.confirmed):
return False
if ((self.min_epoch is not None) and (ht.epoch < self.min_epoch)):
return False
if ((self.max_epoch is not None) and (ht.epoch > self.max_epoch)):
return False
if ((self.min_slot is not None) and (ht.slot < self.min_slot)):
return False
if ((self.max_slot is not None) and (ht.slot > self.max_slot)):
return False
if ((self.min_absolute_slot is not None) and (ht.absolute_slot < self.min_absolute_slot)):
return False
if ((self.max_absolute_slot is not None) and (ht.absolute_slot > self.max_absolute_slot)):
return False
if ((self.min_height is not None) and (ht.height < self.min_height)):
return False
if ((self.max_height is not None) and (ht.height > self.max_height)):
return False
if (self.txids and (tx.txid not in self.txids)):
return False
srcs = set(filter(None, map(operator.attrgetter('address'), tx.inputs)))
dests = set(filter(None, map(operator.attrgetter('address'), tx.inputs)))
if (self.src_addrs and (not self.src_addrs.intersection(srcs))):
return False
if (self.dest_addrs and (not self.dest_addrs.intersection(dests))):
return False
return True
def filter(self, txns):
return sorted(filter(self.check, txns), key=_ByHeight) |
def exposed_retransmit_nu_releases(all_releases=False):
header = Misc.NuForwarder.NuHeader.NuHeader()
print(header)
if (all_releases is False):
ago = (datetime.datetime.now() - datetime.timedelta(days=1))
header.transmit_since(earliest=ago)
else:
header.transmit_since() |
class TestEllipticRedistancing(object):
def setup_class(cls):
pass
def teardown_class(cls):
pass
def setup_method(self, method):
self._scriptdir = os.path.dirname(__file__)
def teardown_method(self, method):
pass
def test_ELLIPTIC_REDISTANCING_0(self):
vortex2D.ct.ELLIPTIC_REDISTANCING = 0
reload(rdls_p)
reload(vortex2D_so)
pnList = [(ncls_p, ncls_n), (rdls_p, rdls_n)]
self.so = vortex2D_so
pList = []
nList = []
sList = []
for (pModule, nModule) in pnList:
pList.append(pModule)
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule.__name__
nList.append(nModule)
for i in range(len(pnList)):
sList.append(default_s)
self.so.name += '_ELLIPTIC_REDIST_0'
petsc_options = PETSc.Options()
petsc_options.setValue('ncls_pc_type', 'lu')
petsc_options.setValue('ncls_ksp_type', 'preonly')
petsc_options.setValue('ncls_pc_factor_mat_solver_package', 'superlu')
petsc_options.setValue('rdls_pc_type', 'lu')
petsc_options.setValue('rdls_ksp_type', 'preonly')
petsc_options.setValue('rdls_pc_factor_mat_solver_package', 'superlu')
ns = proteus.NumericalSolution.NS_base(self.so, pList, nList, sList, opts)
ns.calculateSolution('rdls')
actual = tables.open_file('vortex_c0p1_level_1_ELLIPTIC_REDIST_0.h5', 'r')
assert np.isclose(np.amax(actual.root.u_t1), 0., atol=1e-10)
actual.close()
def test_ELLIPTIC_REDISTANCING_1(self):
vortex2D.ct.ELLIPTIC_REDISTANCING = 1
reload(rdls_p)
reload(vortex2D_so)
pnList = [(ncls_p, ncls_n), (rdls_p, rdls_n)]
self.so = vortex2D_so
pList = []
nList = []
sList = []
for (pModule, nModule) in pnList:
pList.append(pModule)
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule.__name__
nList.append(nModule)
for i in range(len(pnList)):
sList.append(default_s)
self.so.name += '_ELLIPTIC_REDIST_1'
petsc_options = PETSc.Options()
petsc_options.setValue('ncls_pc_type', 'lu')
petsc_options.setValue('ncls_ksp_type', 'preonly')
petsc_options.setValue('ncls_pc_factor_mat_solver_package', 'superlu')
petsc_options.setValue('rdls_pc_type', 'lu')
petsc_options.setValue('rdls_ksp_type', 'preonly')
petsc_options.setValue('rdls_pc_factor_mat_solver_package', 'superlu')
ns = proteus.NumericalSolution.NS_base(self.so, pList, nList, sList, opts)
ns.calculateSolution('rdls')
actual = tables.open_file('vortex_c0p1_level_1_ELLIPTIC_REDIST_1.h5', 'r')
assert np.isclose(np.amax(actual.root.u_t1), 0., atol=1e-10)
actual.close()
def test_ELLIPTIC_REDISTANCING_2(self):
vortex2D.ct.ELLIPTIC_REDISTANCING = 2
reload(default_p)
reload(default_so)
reload(rdls_p)
reload(vortex2D_so)
pnList = [(ncls_p, ncls_n), (rdls_p, rdls_n)]
self.so = vortex2D_so
pList = []
nList = []
sList = []
for (pModule, nModule) in pnList:
pList.append(pModule)
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule.__name__
nList.append(nModule)
for i in range(len(pnList)):
sList.append(default_s)
self.so.name += '_ELLIPTIC_REDIST_2'
petsc_options = PETSc.Options()
petsc_options.setValue('ncls_pc_type', 'lu')
petsc_options.setValue('ncls_ksp_type', 'preonly')
petsc_options.setValue('ncls_pc_factor_mat_solver_package', 'superlu')
petsc_options.setValue('rdls_pc_type', 'lu')
petsc_options.setValue('rdls_ksp_type', 'preonly')
petsc_options.setValue('rdls_pc_factor_mat_solver_package', 'superlu')
ns = proteus.NumericalSolution.NS_base(self.so, pList, nList, sList, opts)
ns.calculateSolution('rdls')
actual = tables.open_file('vortex_c0p1_level_1_ELLIPTIC_REDIST_2.h5', 'r')
Profiling.logEvent(('max u' + repr(np.amax(actual.root.u_t1))))
assert np.isclose(np.amax(actual.root.u_t1), 0., atol=1e-10)
actual.close()
def test_ELLIPTIC_REDISTANCING_3(self):
vortex2D.ct.ELLIPTIC_REDISTANCING = 3
reload(rdls_p)
reload(vortex2D_so)
pnList = [(ncls_p, ncls_n), (rdls_p, rdls_n)]
self.so = vortex2D_so
pList = []
nList = []
sList = []
for (pModule, nModule) in pnList:
pList.append(pModule)
if (pList[(- 1)].name == None):
pList[(- 1)].name = pModule.__name__
nList.append(nModule)
for i in range(len(pnList)):
sList.append(default_s)
self.so.name += '_ELLIPTIC_REDIST_3'
petsc_options = PETSc.Options()
petsc_options.setValue('ncls_pc_type', 'lu')
petsc_options.setValue('ncls_ksp_type', 'preonly')
petsc_options.setValue('ncls_pc_factor_mat_solver_package', 'superlu')
petsc_options.setValue('rdls_pc_type', 'lu')
petsc_options.setValue('rdls_ksp_type', 'preonly')
petsc_options.setValue('rdls_pc_factor_mat_solver_package', 'superlu')
ns = proteus.NumericalSolution.NS_base(self.so, pList, nList, sList, opts)
ns.calculateSolution('rdls')
actual = tables.open_file('vortex_c0p1_level_1_ELLIPTIC_REDIST_3.h5', 'r')
assert np.isclose(np.amax(actual.root.u_t1), 0., atol=1e-10)
actual.close() |
def test_address_assignments_not_propagated():
x = vars('x', 6)
z = vars('z', 6, aliased=True)
c = const(10)
instructions = [_assign(x[0], _addr(z[0])), _assign(x[1], x[0]), _call('scanf', [], [x[1]]), _assign(z[1], z[0]), _ret(z[1])]
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, instructions))
_run_expression_propagation(cfg)
assert ([i for i in cfg.instructions] == [_assign(x[0], _addr(z[0])), _assign(x[1], x[0]), _call('scanf', [], [x[0]]), _assign(z[1], z[0]), _ret(z[1])]) |
(name='init_flow_config')
def fixture_init_flow_config(monkeypatch, tmpdir):
conf = {'default_version': 'default', 'versions': {'default': {'scalar': {'executable': locate_flow_binary()}}}}
with tmpdir.as_cwd():
Path('flow_config.yml').write_text(yaml.dump(conf), encoding='utf-8')
monkeypatch.setenv('FLOW_SITE_CONFIG', 'flow_config.yml')
(yield) |
class Base(TreeNodeObject):
__version__ = 0
scene = Instance(TVTKScene, record=False)
running = Property(Bool, record=False)
name = Str('')
icon = 'module.ico'
type = Str('', record=False)
visible = Bool(True, desc='if the object is visible')
children_ui_list = Property(depends_on=['children'], record=False)
parent = WeakRef(record=False)
menu_helper = Instance(HasTraits, record=False)
recorder = Instance(Recorder, record=False)
_is_running = Bool(False)
_saved_state = Any('')
_HideShowAction = Instance(Action, kw={'name': 'Hide/Show', 'action': 'object._hideshow'})
_menu = Instance(Menu, transient=True)
_icon_path = Str()
_adder_node_class = None
_view_filename = Str(transient=True)
_module_view = Instance(View, transient=True)
__ = Python
def __get_pure_state__(self):
d = self.__dict__.copy()
for attr in ('scene', '_is_running', '__sync_trait__', '__traits_listener__', '_icon_path', '_menu', '_HideShowAction', 'menu_helper', 'parent', 'parent_', '_module_view', '_listener_cache', '_view_filename', 'mlab_source'):
d.pop(attr, None)
return d
def __getstate__(self):
return state_pickler.dumps(self)
def __setstate__(self, str_state):
self.__init__()
state = state_pickler.loads_state(str_state)
state_pickler.update_state(state)
self._saved_state = pickle.dumps(state)
if self.running:
self._load_saved_state()
def __deepcopy__(self, memo):
new = self.__class__()
saved_state = self._saved_state
if (len(saved_state) == 0):
state = state_pickler.get_state(self)
try:
st = state.children[0].children[4]
l_pos = st.seed.widget.position
st.seed.widget.position = [pos.item() for pos in l_pos]
except (IndexError, AttributeError):
pass
saved_state = pickle.dumps(state)
new._saved_state = saved_state
if new.running:
new._load_saved_state()
return new
def start(self):
self.running = True
self._load_saved_state()
def stop(self):
self.running = False
def add_child(self, child):
raise NotImplementedError
def remove_child(self, child):
raise NotImplementedError()
def remove(self):
if (self.parent is not None):
e = get_engine(self)
self.parent.remove_child(self)
if (e.current_object is self):
e.current_object = self.parent
def render(self):
s = self.scene
if (s is not None):
s.render()
def dialog_view(self):
view = self.trait_view()
icon = ((((self._icon_path + os.sep) + 'images') + os.sep) + self.icon)
view.icon = ImageResource(icon)
view.title = ('Edit%s: %s' % (self.type, self.name))
view.buttons = ['OK', 'Cancel']
return view
def trait_view(self, name=None, view_element=None):
if name:
return super(Base, self).trait_view(name, view_element)
view = self._load_view_cached(name, view_element)
return view
def tno_get_label(self, node):
if (self.name == ''):
self.name = self.__class__.__name__
return self.name
def tno_get_view(self, node):
view = self.trait_view()
view.kind = 'subpanel'
return view
def tno_confirm_delete(self, node):
if preference_manager.root.confirm_delete:
return None
else:
return True
def tno_get_menu(self, node):
if (self._menu is None):
return super(Base, self).tno_get_menu(node)
return self._menu
def tno_get_icon(self, node, is_expanded):
return self.icon
def tno_get_icon_path(self, node):
return self._icon_path
def tno_delete_child(self, node, index):
if (len(self.children_ui_list) > len(self.children)):
del self.children[(index - 1)]
else:
del self.children[index]
def tno_append_child(self, node, child):
self.children.append(child)
def tno_insert_child(self, node, index, child):
if (len(self.children_ui_list) > len(self.children)):
idx = (index - 1)
else:
idx = index
self.children[idx:idx] = [child]
def _get_running(self):
return self._is_running
def _set_running(self, new):
if (self._is_running == new):
return
else:
old = self._is_running
self._is_running = new
self.trait_property_changed('running', old, new)
def _get_children_ui_list(self):
if (((not preference_manager.root.show_helper_nodes) or (len(self.children) > 0)) or (self._adder_node_class is None) or ((not (self.type == ' scene')) and ('none' in self.output_info.datasets))):
return self.children
else:
return [self._adder_node_class(object=self)]
_trait_change('children[]')
def _trigger_children_ui_list(self, old, new):
self.trait_property_changed('children_ui_list', old, new)
def _visible_changed(self, value):
if (len(self.name) == 0):
self.tno_get_label(None)
if value:
self.name = self.name.replace(' [Hidden]', '')
else:
n = self.name
if (' [Hidden]' not in n):
self.name = ('%s [Hidden]' % n)
def _load_view_cached(self, name, view_element):
if (self._module_view is not None):
view = self._module_view
else:
logger.debug('No view found for [%s] in [%s]. Using the base class trait_view instead.', self, self._view_filename)
view = super(Base, self).trait_view(name, view_element)
return view
def _load_view_non_cached(self, name, view_element):
result = {}
view_filename = self._view_filename
try:
exec(compile(open(view_filename).read(), view_filename, 'exec'), {}, result)
view = result['view']
except IOError:
logger.debug('No view found for [%s] in [%s]. Using the base class trait_view instead.', self, view_filename)
view = super(Base, self).trait_view(name, view_element)
return view
def _hideshow(self):
if self.visible:
self.visible = False
else:
self.visible = True
def _load_saved_state(self):
saved_state = self._saved_state
if (len(saved_state) > 0):
state = pickle.loads(saved_state)
if hasattr(self, '__set_pure_state__'):
self.__set_pure_state__(state)
else:
state_pickler.set_state(self, state)
self._saved_state = ''
def __view_filename_default(self):
module = self.__module__.split('.')
class_filename = (module[(- 1)] + '.py')
module_dir_name = module[1:(- 1)]
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
view_filename = os.path.join(*((([base_dir] + module_dir_name) + UI_DIR_NAME) + [class_filename]))
return view_filename
def __module_view_default(self):
view_filename = self._view_filename
if os.path.exists(view_filename):
spec = importlib.util.spec_from_file_location('view', view_filename)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
view = module.view
else:
view = None
return view
def __menu_default(self):
extras = []
if (self.menu_helper is not None):
extras = (self.menu_helper.actions + self._extra_menu_items())
menu_actions = ((([Separator()] + extras) + [Separator(), self._HideShowAction, Separator()]) + deepcopy(standard_menu_actions))
return Menu(*menu_actions)
def __icon_path_default(self):
return resource_path()
def _extra_menu_items(self):
return [] |
.django_db
def test_code_not_found(client, monkeypatch):
mock_api_response(monkeypatch=monkeypatch, status=status.HTTP_200_OK, json_data={'cfdas': {'00.000': {'cfda': '00.000', 'posted': 1, 'closed': 3, 'archived': 962, 'forecasted': 0}}, 'errorMsgs': []})
response = client.get('/api/v2/references/cfda/totals/0.1/')
assert (response.status_code == status.HTTP_204_NO_CONTENT) |
class Shell():
shells = ['ipython3', 'python3']
def __init__(self, options):
self.options = options
self.conn = None
self.database = None
def adapt_uuid(u):
return u.hex
sqlite3.register_adapter(uuid.UUID, adapt_uuid)
self.conn = sqlite3.connect(self.options.db_name, detect_types=(sqlite3.PARSE_COLNAMES | sqlite3.PARSE_DECLTYPES), isolation_level=(None if self.options.autocommit else 'DEFERRED'))
if options.create_db:
self.conn.executescript(SCHEMA)
self.conn.row_factory = sqlite3.Row
self.database = Database(self.conn, self.options.db_name, verbose=options.verbose)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if (self.conn is not None):
self.conn.close()
def run(self):
if (self.options.interpreter == 'python3'):
self.python()
elif (self.options.interpreter == 'ipython3'):
self.ipython()
def imported_objects(self) -> Dict[(str, Any)]:
conn = self.conn
db = self.database
imported_objects = {'conn': conn, 'db': db, 'SHELL_BANNER': SHELL_BANNER, 'LONG_SHELL_BANNER': LONG_SHELL_BANNER, 'SCHEMA': SCHEMA, 'UUID': uuid.UUID, 'Database': Database, 'DbObject': DbObject, 'Document': Document, 'TextMetadata': TextMetadata, 'BinaryMetadata': BinaryMetadata, 'DocumentHasTextMetadata': DocumentHasTextMetadata, 'DocumentHasBinaryMetadata': DocumentHasBinaryMetadata, 'sqlite3': sqlite3, 'subprocess': subprocess, 'uuid': uuid, 'datetime': datetime, 'itertools': itertools, 'os': os}
return imported_objects
def ipython(self):
import IPython
from IPython.terminal.embed import InteractiveShellEmbed
imported_objects = self.imported_objects()
db = self.database
dbpath = pathlib.Path(db.name)
mtime = datetime.datetime.fromtimestamp(dbpath.stat().st_mtime)
banner1 = ((f'''IPython {IPython.__version__}
''' + SHELL_BANNER) + f'''
Connected to {db.name}, last modified {mtime.isoformat(sep=' ', timespec='minutes')}''')
ipshell = InteractiveShellEmbed(user_ns=imported_objects, argv=[], banner1=banner1)
ipshell()
def python(self):
import code
imported_objects = self.imported_objects()
db = self.database
try:
import readline
except ImportError:
pass
else:
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
readline_doc = getattr(readline, '__doc__', '')
if ((readline_doc is not None) and ('libedit' in readline_doc)):
readline.parse_and_bind('bind ^I rl_complete')
else:
readline.parse_and_bind('tab:complete')
if (not self.options.no_startup):
for pythonrc in OrderedDict({x: x for x in [os.environ.get('PYTHONSTARTUP'), os.path.expanduser('~/.pythonrc.py')]}):
if (not pythonrc):
continue
if (not os.path.isfile(pythonrc)):
continue
with open(pythonrc) as handle:
pythonrc_code = handle.read()
try:
exec(compile(pythonrc_code, pythonrc, 'exec'), imported_objects)
except Exception:
traceback.print_exc()
dbpath = pathlib.Path(db.name)
mtime = datetime.datetime.fromtimestamp(dbpath.stat().st_mtime)
python_version = sys.version.replace('\n', '')
code.interact(banner=((f'''python3 {python_version}
''' + SHELL_BANNER) + f'''
Connected to {db.name}, last modified {mtime.isoformat(sep=' ', timespec='minutes')}'''), local=imported_objects) |
def import_with_bcrypt():
users = [auth.ImportUserRecord(uid='some-uid', email='', password_hash=b'password_hash', password_salt=b'salt')]
hash_alg = auth.UserImportHash.bcrypt()
try:
result = auth.import_users(users, hash_alg=hash_alg)
for err in result.errors:
print('Failed to import user:', err.reason)
except exceptions.FirebaseError as error:
print('Error importing users:', error) |
class TestItemExporter(TestCase):
def setUp(self):
self.value_type = Mock()
self.value_type.has_text = Mock(return_value=True)
self.value_type.get_text = Mock(return_value='text')
self.value_type.has_editor_value = Mock(return_value=True)
self.value_type.get_editor_value = Mock(return_value=1)
self.model = Mock()
self.model.get_value = Mock(return_value=0.0)
self.model.get_value_type = Mock(return_value=self.value_type)
def test_get_data_(self):
exporter = ItemExporter(format=trivial_format)
result = exporter.get_data(self.model, [((0,), (0,))])
self.assertEqual(result, 1)
def test_add_data_(self):
exporter = ItemExporter(format=trivial_format)
data_wrapper = DataWrapper()
exporter.add_data(data_wrapper, self.model, [((0,), (0,))])
self.assertTrue(data_wrapper.has_format(trivial_format))
self.assertEqual(data_wrapper.get_mimedata('null/null'), b'1')
def test_add_data_length_0(self):
exporter = ItemExporter(format=trivial_format)
data_wrapper = DataWrapper()
exporter.add_data(data_wrapper, self.model, [])
self.assertFalse(data_wrapper.has_format(trivial_format))
def test_add_data_length_2(self):
exporter = ItemExporter(format=trivial_format)
data_wrapper = DataWrapper()
exporter.add_data(data_wrapper, self.model, [((), ()), ((0,), (0,))])
self.assertFalse(data_wrapper.has_format(trivial_format)) |
class TestCreateUserPermissions():
(scope='function')
def url(self) -> str:
return (V1_URL_PREFIX + USER_PERMISSIONS)
def test_create_user_permissions_not_authenticated(self, url, api_client):
response = api_client.post(url, headers={}, json={})
assert (HTTP_401_UNAUTHORIZED == response.status_code)
def test_create_user_permissions_wrong_scope(self, url, api_client, generate_auth_header):
auth_header = generate_auth_header([SAAS_CONFIG_READ])
response = api_client.post(url, headers=auth_header, json={})
assert (HTTP_403_FORBIDDEN == response.status_code)
def test_create_user_permissions_invalid_user_id(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user_id = 'bogus_user_id'
body = {'user_id': user_id, 'roles': [VIEWER]}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user_id}/permission', headers=auth_header, json=body)
permissions = FidesUserPermissions.get_by(db, field='user_id', value=user_id)
assert (HTTP_404_NOT_FOUND == response.status_code)
assert (permissions is None)
def test_create_user_permissions_no_client_to_update(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
body = {'user_id': user.id, 'roles': [APPROVER]}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
permissions = FidesUserPermissions.get_by(db, field='user_id', value=user.id)
response_body = response.json()
assert (HTTP_201_CREATED == response.status_code)
assert (response_body['id'] == permissions.id)
assert (permissions.roles == [APPROVER])
assert (not user.client)
user.delete(db)
def test_create_user_permissions_add_scopes_are_ignored(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
client = ClientDetail(hashed_secret='thisisatest', salt='thisisstillatest', scopes=[], roles=[VIEWER], user_id=user.id)
db.add(client)
db.commit()
body = {'user_id': user.id, 'roles': [APPROVER], 'scopes': [PRIVACY_REQUEST_READ]}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
permissions = FidesUserPermissions.get_by(db, field='user_id', value=user.id)
response_body = response.json()
assert (HTTP_201_CREATED == response.status_code)
assert (response_body['id'] == permissions.id)
assert (permissions.roles == [APPROVER])
user.delete(db)
def test_create_user_permissions_add_bad_role(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
body = {'user_id': user.id, 'roles': ['nonexistent role']}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
response_body = response.json()
assert (HTTP_422_UNPROCESSABLE_ENTITY == response.status_code)
assert ('value is not a valid enumeration member' in response_body['detail'][0]['msg'])
def test_create_user_permissions_roles_are_an_empty_list(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
body = {'user_id': user.id, 'roles': []}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
permissions = FidesUserPermissions.get_by(db, field='user_id', value=user.id)
response_body = response.json()
assert (HTTP_201_CREATED == response.status_code)
assert (response_body['roles'] == [])
assert (permissions.roles == [])
user.delete(db)
def test_create_user_permissions_no_role_key(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
body = {'user_id': user.id}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
assert (HTTP_422_UNPROCESSABLE_ENTITY == response.status_code)
def test_create_user_permissions_add_roles(self, db, api_client, generate_auth_header) -> None:
auth_header = generate_auth_header([USER_PERMISSION_CREATE])
user = FidesUser.create(db=db, data={'username': 'user_1', 'password': 'test_password'})
client = ClientDetail(hashed_secret='thisisatest', salt='thisisstillatest', scopes=[], roles=[], user_id=user.id)
db.add(client)
db.commit()
body = {'user_id': user.id, 'roles': [VIEWER]}
response = api_client.post(f'{V1_URL_PREFIX}/user/{user.id}/permission', headers=auth_header, json=body)
permissions = FidesUserPermissions.get_by(db, field='user_id', value=user.id)
response_body = response.json()
assert (HTTP_201_CREATED == response.status_code)
assert (response_body['id'] == permissions.id)
assert (permissions.roles == [VIEWER])
assert (client.roles == [VIEWER])
assert (client.scopes == []), "User create flow doesn't override client scopes"
user.delete(db)
.parametrize('acting_user,added_role,expected_response', [('owner_user', APPROVER, HTTP_201_CREATED), ('owner_user', VIEWER_AND_APPROVER, HTTP_201_CREATED), ('owner_user', VIEWER, HTTP_201_CREATED), ('owner_user', CONTRIBUTOR, HTTP_201_CREATED), ('owner_user', OWNER, HTTP_201_CREATED), ('contributor_user', APPROVER, HTTP_201_CREATED), ('contributor_user', VIEWER_AND_APPROVER, HTTP_201_CREATED), ('contributor_user', VIEWER, HTTP_201_CREATED), ('contributor_user', CONTRIBUTOR, HTTP_201_CREATED), ('contributor_user', OWNER, HTTP_403_FORBIDDEN), ('viewer_user', APPROVER, HTTP_403_FORBIDDEN), ('viewer_user', VIEWER_AND_APPROVER, HTTP_403_FORBIDDEN), ('viewer_user', VIEWER, HTTP_403_FORBIDDEN), ('viewer_user', CONTRIBUTOR, HTTP_403_FORBIDDEN), ('viewer_user', OWNER, HTTP_403_FORBIDDEN), ('viewer_and_approver_user', APPROVER, HTTP_403_FORBIDDEN), ('viewer_and_approver_user', VIEWER_AND_APPROVER, HTTP_403_FORBIDDEN), ('viewer_and_approver_user', VIEWER, HTTP_403_FORBIDDEN), ('viewer_and_approver_user', CONTRIBUTOR, HTTP_403_FORBIDDEN), ('viewer_and_approver_user', OWNER, HTTP_403_FORBIDDEN), ('approver_user', APPROVER, HTTP_403_FORBIDDEN), ('approver_user', VIEWER_AND_APPROVER, HTTP_403_FORBIDDEN), ('approver_user', VIEWER, HTTP_403_FORBIDDEN), ('approver_user', CONTRIBUTOR, HTTP_403_FORBIDDEN), ('approver_user', OWNER, HTTP_403_FORBIDDEN)])
def test_create_user_roles_permission_matrix(self, db, acting_user, added_role, expected_response, request, api_client):
acting_user = request.getfixturevalue(acting_user)
updated_user = FidesUser.create(db=db, data={'username': 'new_user', 'password': 'test_password'})
auth_header = generate_role_header_for_user(acting_user, roles=acting_user.permissions.roles)
body = {'user_id': updated_user.id, 'roles': [added_role]}
response = api_client.post(f'{V1_URL_PREFIX}/user/{updated_user.id}/permission', headers=auth_header, json=body)
assert (response.status_code == expected_response)
updated_user.delete(db) |
class Crawler(object):
def __init__(self, main_thread_count, raw_thread_count, lowrate):
self.lowrate = lowrate
self.process_lookup = {}
self.log = logging.getLogger('Main.Text.Manager')
WebMirror.rules.load_rules()
self.log.info('Scraper executing with %s main processes, %s raw scraper threads.', main_thread_count, raw_thread_count)
self.main_thread_count = main_thread_count
self.raw_thread_count = raw_thread_count
def start_aggregator(self):
agg_queue = multiprocessing.Queue(maxsize=URL_UPSERTER_QUEUE_SIZE)
with logSetup.stdout_lock:
self.main_job_agg = multiprocessing.Process(target=WebMirror.UrlUpserter.UpdateAggregator.launch_agg, args=(agg_queue,))
self.main_job_agg.start()
return agg_queue
def join_aggregator(self):
self.log.info('Asking Aggregator process to stop.')
runStatus.agg_run_state.value = 0
if hasattr(self, 'main_job_agg'):
while 1:
try:
self.main_job_agg.join(timeout=1)
break
except multiprocessing.TimeoutError:
print('Failed to join main_job_agg')
self.log.info('Aggregator joined.')
def start_main_job_fetcher(self):
self.main_job_fetcher = WebMirror.JobDispatcher.RpcJobManagerWrapper(lowrate=self.lowrate)
return self.main_job_fetcher.get_queues()
def start_raw_job_fetcher(self):
self.raw_job_fetcher = RawArchiver.RawJobDispatcher.RawJobFetcher()
return self.raw_job_fetcher.get_queue()
def join_job_fetcher(self):
if hasattr(self, 'main_job_fetcher'):
self.log.info('Asking main job source task to halt.')
self.main_job_fetcher.join_proc()
if hasattr(self, 'raw_job_fetcher'):
self.log.info('Asking raw job source task to halt.')
self.raw_job_fetcher.join_proc()
self.log.info('Job source halted.')
def launchProcessesFromQueue(self, processes, job_in_queue):
pass
def run_raw(self):
assert (self.main_thread_count >= 1)
assert (self.raw_thread_count >= 1)
new_url_aggreator_queue = multiprocessing.Queue(maxsize=(MAX_IN_FLIGHT_JOBS * 2))
raw_new_job_queue = self.start_raw_job_fetcher()
raw_kwargs = {'response_queue': new_url_aggreator_queue, 'new_job_queue': raw_new_job_queue, 'cookie_lock': runStatus.cookie_lock}
rawManager = MultiJobManager(max_tasks=self.raw_thread_count, target=RawArchiver.RawRunner.RawRunInstance.run, target_kwargs=raw_kwargs)
managers = [rawManager]
drain_queues = [raw_new_job_queue]
flush_queues = [new_url_aggreator_queue, raw_new_job_queue]
self.status_call = self.raw_job_fetcher.get_status
self.running_call = self.raw_job_fetcher.is_running
self._runloop(managers, drain_queues)
self._teardown(managers, drain_queues, flush_queues)
def run(self):
assert (self.main_thread_count >= 1)
assert (self.raw_thread_count >= 1)
new_url_aggreator_queue = self.start_aggregator()
main_new_job_queue = self.start_main_job_fetcher()
main_kwargs = {'response_queue': new_url_aggreator_queue, 'new_job_queue': main_new_job_queue, 'cookie_lock': runStatus.cookie_lock}
mainManager = MultiJobManager(max_tasks=self.main_thread_count, target=WebMirror.Runner.RunInstance.run, target_kwargs=main_kwargs)
managers = [mainManager]
drain_queues = [main_new_job_queue]
flush_queues = [new_url_aggreator_queue, main_new_job_queue]
self.status_call = self.main_job_fetcher.get_status
self.running_call = self.main_job_fetcher.is_running
self._runloop(managers, drain_queues)
self._teardown(managers, drain_queues, flush_queues)
def _runloop(self, managers, drain_queues):
cnt = 10
while runStatus.run_state.value:
try:
time.sleep(1)
cnt += 1
if (cnt >= 10):
cnt = 0
living = sum([manager.check_run_jobs() for manager in managers])
clok_locked = runStatus.cookie_lock.acquire(block=False)
if clok_locked:
runStatus.cookie_lock.release()
self.log.info('Living processes: %s (Cookie lock acquired: %s, queue sizes: %s, exiting: %s)', living, (not clok_locked), [q.qsize() for q in drain_queues], (runStatus.run_state.value == 0))
self.log.info('Job Queue Fillers: %s ', self.status_call())
if (not self.running_call()):
self.log.error('Job fetcher is dead. Aborting!')
runStatus.run_state.value = 0
if is_pypy:
collected = gc.collect()
self.log.info('Collected %s object with garbage collector', collected)
except KeyboardInterrupt:
self.log.info('Control C caught. Stopping scraper.')
runStatus.run_state.value = 0
break
except Exception:
print('Wat?')
traceback.print_exc()
with open(('error %s.txt' % time.time()), 'w') as fp:
fp.write('Manager crashed?\n')
fp.write(traceback.format_exc())
break
def _teardown(self, managers, drain_queues, flush_queues):
self.join_job_fetcher()
runStatus.run_state.value = 0
self.log.info('Crawler allowing ctrl+c to propagate.')
time.sleep(1)
runStatus.run_state.value = 0
time.sleep(1)
for manager in managers:
manager.join_jobs(drain_queues)
self.log.info('All processes halted.')
self.log.info('Flusing queues')
for job_queue in flush_queues:
try:
while 1:
job_queue.get_nowait()
except queue.Empty:
pass
self.join_aggregator() |
_push_mul.register(Mul)
def _push_mul_mul(expr, self, state):
if (expr.rank == 1):
(prio_child,) = tuple(filter((lambda child: (child.rank == 1)), expr.children))
pick_op = expr.children.index(prio_child)
else:
prio_child = expr.children[state.pick_op]
pick_op = state.pick_op
other_child = expr.children[flip(pick_op)]
if (state.coeff and (expr.rank == 1)):
assert 'So far Slate cannot express linear algebra as in case 1b. If that changes, remove the assertion.'
coeff = self(prio_child, ActionBag(None, pick_op))
pushed_other_child = self(other_child, ActionBag(state.coeff, pick_op))
return (Mul(pushed_other_child, prio_child) if pick_op else Mul(prio_child, pushed_other_child))
else:
coeff = self(prio_child, state)
return self(other_child, ActionBag(coeff, pick_op)) |
def enter_input(proc: pexpect.spawn, expect: str, input_s: str, timeout: int=5) -> str:
try:
proc.expect(expect, timeout=timeout)
except pexpect.exceptions.TIMEOUT as exc:
raise AssertionError(f'''Timeout waiting for prompt: `{expect}`.
Output-before: `{proc.before}`
Output-after: `{proc.after}`''') from exc
after = str(proc.after)
print(after)
proc.sendline(input_s)
return after |
def guess_harmony(wordmap):
if wordmap['harmony']:
return wordmap
if (not wordmap['kotus_tn']):
tn = 0
else:
tn = int(wordmap['kotus_tn'])
if ((tn in range(52, 79)) or (wordmap['pos'] == 'VERB')):
if wordmap['lemma'].endswith('a'):
wordmap['harmony'] = 'front'
elif wordmap['lemma'].endswith('a'):
wordmap['harmony'] = 'back'
elif (wordmap['lemma'].endswith('ei') and (tn == 1099)):
wordmap['harmony'] = 'front'
else:
fail_guess_because(wordmap, ['VERB'], ['a', 'a', 'ei'])
exit(1)
elif wordmap['pronunciation']:
lastbound = (- 1)
for bound in ['{WB}', '{XB}', '_', '#', ' ', '-']:
b = wordmap['pronunciation'].rfind(bound)
if (b > lastbound):
lastbound = b
lastback = lastbound
for back in ['a', 'o', 'u', 'A', 'O', 'U', 'a', 'a', 'a', 'o', 'u']:
b = wordmap['pronunciation'].rfind(back)
if (b > lastback):
lastback = b
lastfront = lastbound
for front in ['a', 'o', 'y', 'A', 'O', 'Y', 'u']:
f = wordmap['pronunciation'].rfind(front)
if (f > lastfront):
lastfront = f
if ((lastfront == lastbound) and (lastback == lastbound)):
wordmap['harmony'] = 'front'
elif (lastfront > lastback):
wordmap['harmony'] = 'front'
elif (lastback > lastfront):
wordmap['harmony'] = 'back'
else:
fail_guess_because(wordmap, [''], ['pronunciation'])
exit(1)
return wordmap |
class BaseDialog(BasePanel):
(NONMODAL, MODAL, POPUP) = list(range(3))
def init(self, ui, parent, style):
raise NotImplementedError
def create_dialog(self, parent, style):
self.control = control = _StickyDialog(self.ui, parent)
view = self.ui.view
control.setModal((style == BaseDialog.MODAL))
control.setWindowTitle((view.title or DefaultTitle))
control.finished.connect(self._on_finished)
def add_contents(self, panel, buttons):
if isinstance(panel, QtGui.QLayout):
w = QtGui.QWidget()
panel.setContentsMargins(0, 0, 0, 0)
w.setLayout(panel)
panel = w
if (panel is not None):
self.control._mw.setCentralWidget(panel)
if (buttons is not None):
self.control.layout().addWidget(buttons)
self._add_menubar()
self._add_toolbar()
self._add_statusbar()
def close(self, rc=True):
self.ui.dispose(rc)
self.ui = self.control = None
def display_ui(ui, parent, style):
ui.owner.init(ui, parent, style)
ui.control = ui.owner.control
ui.control._parent = parent
try:
ui.prepare_ui()
except BaseException:
ui.control.setParent(None)
ui.control.ui = None
ui.control = None
ui.owner = None
ui.result = False
raise
ui.handler.position(ui.info)
restore_window(ui)
if (ui._focus_control is not None):
ui._focus_control.setFocus()
ui._focus_control = None
if (style == BaseDialog.NONMODAL):
ui.control.show()
else:
ui.control.setWindowModality(QtCore.Qt.WindowModality.ApplicationModal)
ui.control.exec_()
def set_icon(self, icon=None):
from pyface.image_resource import ImageResource
if (not isinstance(icon, ImageResource)):
icon = self.default_icon()
self.control.setWindowIcon(icon.create_icon())
def _on_error(self, event):
errors = event.new
self.ok.setEnabled((errors == 0))
def _add_menubar(self):
menubar = self.ui.view.menubar
if isinstance(menubar, MenuBarSchema):
builder = self.ui.view.action_manager_builder
menubar = builder.create_action_manager(menubar)
if (menubar is not None):
self._last_group = self._last_parent = None
self.control.layout().setMenuBar(menubar.create_menu_bar(self.control, self))
self._last_group = self._last_parent = None
def _add_toolbar(self):
toolbar = self.ui.view.toolbar
if isinstance(toolbar, ToolBarSchema):
builder = self.ui.view.action_manager_builder
toolbar = builder.create_action_manager(toolbar)
if (toolbar is not None):
self._last_group = self._last_parent = None
qt_toolbar = toolbar.create_tool_bar(self.control, self)
qt_toolbar.setMovable(False)
self.control._mw.addToolBar(qt_toolbar)
self._last_group = self._last_parent = None
def _add_statusbar(self):
if (self.ui.view.statusbar is not None):
control = QtGui.QStatusBar()
control.setSizeGripEnabled(self.ui.view.resizable)
listeners = []
for item in self.ui.view.statusbar:
name = item.name
item_control = QtGui.QLabel()
item_control.setText(self.ui.get_extended_value(name))
width = abs(item.width)
stretch = 0
if (width <= 1.0):
stretch = int((100 * width))
else:
item_control.setMinimumWidth(int(width))
control.addWidget(item_control, stretch)
col = name.find('.')
obj = 'object'
if (col >= 0):
obj = name[:col]
name = name[(col + 1):]
obj = self.ui.context[obj]
set_text = self._set_status_text(item_control)
obj.observe(set_text, name, dispatch='ui')
listeners.append((obj, set_text, name))
self.control._mw.setStatusBar(control)
self.ui._statusbar = listeners
def _set_status_text(self, control):
def set_status_text(event):
text = event.new
control.setText(text)
return set_status_text |
class RunInstance(object):
def __init__(self, num, response_queue, new_job_queue, cookie_lock, nosig=True):
if nosig:
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.num = num
self.log = logging.getLogger('Main.Text.Web')
self.resp_queue = response_queue
self.cookie_lock = cookie_lock
self.new_job_queue = new_job_queue
self.archiver = None
def __del__(self):
db.delete_db_session()
def do_task(self):
hadjob = False
with common.database.session_context() as db_handle:
self.archiver = WebMirror.Engine.SiteArchiver(self.cookie_lock, new_job_queue=self.new_job_queue, response_queue=self.resp_queue, db_interface=db_handle)
for _ in range(500):
hadjob = self.archiver.taskProcess()
if (not hadjob):
return hadjob
return hadjob
def go(self):
self.log.info('RunInstance starting!')
hadjob = False
loop = 0
for dummy_x in range(50):
if (runStatus.run_state.value == 1):
hadjob = self.do_task()
else:
self.log.info('Thread %s exiting.', self.num)
break
loop += 1
if (not hadjob):
time.sleep(1)
if (runStatus.run_state.value != 1):
self.log.info('Thread %s saw exit flag while waiting for jobs. Runstate: %s', self.num, runStatus.run_state.value)
break
if (runStatus.run_state.value == 1):
self.log.info('Thread %s Exited with a non-die runstate!. Runstate: %s', self.num, runStatus.run_state.value)
else:
self.log.info('Thread %s halting. Runstate: %s', self.num, runStatus.run_state.value)
def run_prof(cls, num, total_worker_count, worker_num, response_queue, new_job_queue, cookie_lock, nosig=True):
logSetup.resetLoggingLocks()
common.process.name_process('proc fetcher processing worker w-profiling')
pid = os.getpid()
try:
cProfile.runctx('cls.run(num, response_queue, new_job_queue, cookie_lock, nosig)', globals(), locals(), ('prof%d.prof' % pid))
except Exception as e:
print('Wat?')
print('Wat?')
print('Wat?')
print('Wat?')
print('Wat?')
print('Wat?')
print('Wat?')
traceback.print_exc()
raise e
def run(cls, num, total_worker_count, worker_num, response_queue, new_job_queue, cookie_lock, nosig=True):
logSetup.resetLoggingLocks()
common.process.name_process('proc fetcher processing worker')
common.stuck.install_pystuck()
try:
run = cls(num, response_queue, new_job_queue, cookie_lock, nosig)
run.go()
except Exception:
print()
print('Exception in sub-process!')
traceback.print_exc() |
def use_direct_and_oddr(p, luts, connects):
p['oddr_mux_config'] = random.choice(('direct', 'lut', 'none'))
if p['io']:
if (p['oddr_mux_config'] != 'lut'):
p['tddr_mux_config'] = random.choice(('direct', 'lut', 'none'))
else:
p['tddr_mux_config'] = random.choice(('lut', 'none'))
else:
p['tddr_mux_config'] = 'none'
if random.randint(0, 1):
clknet = luts.get_next_output_net()
p['IS_CLK_INVERTED'] = 0
else:
clknet = 'bufg_o'
p['IS_CLK_INVERTED'] = random.randint(0, 1)
if (p['tddr_mux_config'] == 'direct'):
p['TINIT'] = random.randint(0, 1)
p['TSRTYPE'] = verilog.quote(random.choice(('SYNC', 'ASYNC')))
p['TDDR_CLK_EDGE'] = verilog.quote('OPPOSITE_EDGE')
p['TDDR_CLK_EDGE'] = verilog.quote(random.choice(('OPPOSITE_EDGE', 'SAME_EDGE')))
p['t_sr_used'] = random.choice(('None', 'S', 'R'))
if (p['t_sr_used'] == 'None'):
p['t_srnet'] = ''
elif (p['t_sr_used'] == 'S'):
p['srnet'] = luts.get_next_output_net()
p['t_srnet'] = '.S({}),\n'.format(p['srnet'])
elif (p['t_sr_used'] == 'R'):
p['srnet'] = luts.get_next_output_net()
p['t_srnet'] = '.R({}),\n'.format(p['srnet'])
print('\n (* KEEP, DONT_TOUCH, LOC = "{ologic_loc}" *)\n ODDR #(\n .INIT({TINIT}),\n .SRTYPE({TSRTYPE}),\n .DDR_CLK_EDGE({TDDR_CLK_EDGE}),\n .IS_C_INVERTED({IS_CLK_INVERTED})\n ) toddr_{site} (\n .C({cnet}),\n .D1({d1net}),\n .D2({d2net}),\n .CE({cenet}),\n {t_srnet}\n .Q(tddr_d_{site})\n );\n '.format(cnet=clknet, d1net=luts.get_next_output_net(), d2net=luts.get_next_output_net(), cenet=luts.get_next_output_net(), **p), file=connects)
if (p['tddr_mux_config'] == 'direct'):
print('\n assign {twire} = tddr_d_{site};'.format(**p), file=connects)
elif (p['tddr_mux_config'] == 'lut'):
print('\n assign {twire} = {lut};'.format(lut=luts.get_next_output_net(), **p), file=connects)
pass
elif (p['tddr_mux_config'] == 'none'):
pass
else:
assert False, p['tddr_mux_config']
if (p['oddr_mux_config'] == 'direct'):
p['QINIT'] = random.randint(0, 1)
p['SRTYPE'] = verilog.quote(random.choice(('SYNC', 'ASYNC')))
p['ODDR_CLK_EDGE'] = verilog.quote(random.choice(('OPPOSITE_EDGE', 'SAME_EDGE')))
p['o_sr_used'] = random.choice(('None', 'S', 'R'))
if (p['o_sr_used'] == 'None'):
p['o_srnet'] = ''
elif (p['o_sr_used'] == 'S'):
if ('srnet' not in p):
p['srnet'] = luts.get_next_output_net()
p['o_srnet'] = '.S({}),\n'.format(p['srnet'])
elif (p['o_sr_used'] == 'R'):
if ('srnet' not in p):
p['srnet'] = luts.get_next_output_net()
p['o_srnet'] = '.R({}),\n'.format(p['srnet'])
print('\n (* KEEP, DONT_TOUCH, LOC = "{ologic_loc}" *)\n ODDR #(\n .INIT({QINIT}),\n .SRTYPE({SRTYPE}),\n .DDR_CLK_EDGE({ODDR_CLK_EDGE}),\n .IS_C_INVERTED({IS_CLK_INVERTED})\n ) oddr_{site} (\n .C({cnet}),\n .D1({d1net}),\n .D2({d2net}),\n .CE({cenet}),\n {o_srnet}\n .Q(oddr_d_{site})\n );\n '.format(cnet=clknet, d1net=luts.get_next_output_net(), d2net=luts.get_next_output_net(), cenet=luts.get_next_output_net(), **p), file=connects)
if (p['oddr_mux_config'] == 'direct'):
print('\n assign {owire} = oddr_d_{site};'.format(**p), file=connects)
elif (p['oddr_mux_config'] == 'lut'):
print('\n assign {owire} = {lut};'.format(lut=luts.get_next_output_net(), **p), file=connects)
pass
elif (p['oddr_mux_config'] == 'none'):
pass
else:
assert False, p['oddr_mux_config'] |
def _requirement_without_reinitialization(ast: AbstractSyntaxTree, node: AbstractSyntaxTreeNode, variable: Variable) -> bool:
for ast_node in ast.get_reachable_nodes_pre_order(node):
assignment_visitor = AssignmentVisitor()
assignment_visitor.visit(ast_node)
for assignment in assignment_visitor.assignments:
if ((variable in assignment.definitions) and (variable not in assignment.requirements)):
return False
elif ((variable in assignment.definitions) and (variable in assignment.requirements)):
return True
elif (variable in assignment.requirements):
return True |
class OptionSeriesScatterStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesPolygonOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def test_wait_posted():
capture = Capture()
pool = DAGPool(dict(a=1, b=2, c=3))
eventlet.spawn(post_each, pool, capture)
gotten = pool.wait('bcdefg')
capture.add('got all')
assert_equal(gotten, dict(b=2, c=3, d='dval', e='eval', f='fval', g='gval'))
capture.validate([[], [], ['got all']]) |
class OptionPlotoptionsFunnel3dStatesInactive(Options):
def animation(self) -> 'OptionPlotoptionsFunnel3dStatesInactiveAnimation':
return self._config_sub_data('animation', OptionPlotoptionsFunnel3dStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def robot_cube_publisher(trans, rot):
marker_pub = rospy.Publisher('robot_marker', Marker, queue_size=1)
marker_data = Marker()
marker_data.type = marker_data.CUBE
marker_data.pose.position.x = trans[0]
marker_data.pose.position.y = trans[1]
qot = rot
marker_data.pose.orientation.x = qot[0]
marker_data.pose.orientation.y = qot[1]
marker_data.pose.orientation.z = qot[2]
marker_data.pose.orientation.w = qot[3]
marker_data.header.frame_id = '/odom'
marker_data.scale.x = 0.35
marker_data.scale.y = 0.35
marker_data.scale.z = 0.25
marker_data.color.a = 1
marker_data.color.r = 0
marker_data.color.g = 0
marker_data.color.b = 255
marker_pub.publish(marker_data) |
()
('algorithm', type=click.Choice(sorted(hashlib.algorithms_available)))
('digest')
def crack_hash(digest, algorithm):
for number in range(10000):
h = hashlib.new(algorithm, str(number).encode()).hexdigest()
if (h == digest):
print('Cracked! Password:', number)
return True
print('Unable to crack', digest)
return False |
def test_add_get_delete(spark_session, provide_config):
pc = DiskPersistenceBackedSparkCache(**provide_config)
assert (len(pc) == 0)
with pytest.raises(KeyError):
pc.get_dataset_identity_not_branch_aware('/d1')
insert = spark_session.createDataFrame(data=[[1, 2], [3, 4]], schema='a: int, b: int')
pc[to_dict('d1', 't11', '/d1')] = insert
retrieve = pc[to_dict('d1', 't11', '/d1')]
assert_frame_equal(insert.toPandas(), retrieve.toPandas())
assert (to_dict('d1', 't11', '/d1') in pc)
assert (pc.get_dataset_identity_not_branch_aware('/d1') == to_dict('d1', 't11', '/d1'))
assert (pc.dataset_has_schema(to_dict('d1', 't11', '/d1')) is True)
assert (len(pc) == 1)
del pc[to_dict('d1', 't11', '/d1')]
assert (len(pc) == 0) |
def upgrade():
op.drop_index(op.f('ix_boardmoderator_board_id'), table_name='boardmoderator')
op.drop_index(op.f('ix_boardmoderator_moderator_id'), table_name='boardmoderator')
op.drop_table('boardmoderator')
op.create_table('boardmoderator', sa.Column('board_id', sa.Integer(), nullable=False), sa.Column('moderator_id', sa.Integer(), nullable=False), sa.Column('roles', postgresql.ARRAY(sa.String()), nullable=False), sa.ForeignKeyConstraint(['board_id'], ['board.id']), sa.ForeignKeyConstraint(['moderator_id'], ['moderator.id']), sa.PrimaryKeyConstraint('board_id', 'moderator_id'))
op.create_index(op.f('ix_boardmoderator_roles'), 'boardmoderator', ['roles'], unique=False) |
def test_init_signed_transaction():
ledger_id = 'some_ledger'
body = {'key': 'value'}
st = SignedTransaction(ledger_id, body)
assert (st.ledger_id == ledger_id)
assert (st.body == body)
assert (str(st) == "SignedTransaction: ledger_id=some_ledger, body={'key': 'value'}")
assert (st == st) |
class ImplicitPlane(Component):
__version__ = 0
widget = Instance(tvtk.ImplicitPlaneWidget, args=(), kw={'key_press_activation': False, 'place_factor': 1.2, 'draw_plane': False, 'outline_translation': False}, record=True)
plane = Instance(tvtk.Plane, args=(), kw={'origin': (0.0, 0.0, 0.0), 'normal': (0, 0, 1)}, record=True)
normal = Property
origin = Property
_first = Bool(True)
_busy = Bool(False)
_bounds = Any
_widget_group = Group(Item(name='enabled'), Item(name='normal_to_x_axis'), Item(name='normal_to_y_axis'), Item(name='normal_to_z_axis'), Item(name='outline_translation'), Item(name='scale_enabled'), Item(name='tubing'), Item(name='draw_plane'), Item(name='normal'), Item(name='origin'))
view = View(Group(Item(name='widget', style='custom', editor=InstanceEditor(view=View(_widget_group))), show_labels=False))
def setup_pipeline(self):
self.widgets = [self.widget]
self._connect()
def update_pipeline(self):
if ((len(self.inputs) == 0) or (len(self.inputs[0].outputs) == 0)):
return
inp = self.inputs[0].outputs[0]
w = self.widget
self.configure_input(w, inp)
if self._first:
dsh = DataSetHelper(self.inputs[0].outputs[0])
self._bounds = dsh.get_bounds()
w.place_widget(*self._bounds)
self.origin = dsh.get_center()
self._first = False
else:
n = self.normal
self.normal = (n[0], n[1], (n[2] + 0.001))
self.normal = n
if (self.outputs != [inp]):
self.outputs = [inp]
else:
self.data_changed = True
def update_data(self):
self.data_changed = True
def update_plane(self):
self.widget.get_plane(self.plane)
self.update_data()
def _get_normal(self):
return self.widget.normal
def _set_normal(self, value):
w = self.widget
old = w.normal
w.normal = value
self.trait_property_changed('normal', old, value)
self.update_plane()
def _get_origin(self):
return self.widget.origin
def _set_origin(self, value):
w = tvtk.to_vtk(self.widget)
old = w.GetOrigin()
w.SetOrigin(list(value))
self.trait_property_changed('origin', old, value)
self.update_plane()
def _on_interaction_event(self, obj, event):
if (not self._busy):
self._busy = True
self.update_plane()
self._busy = False
def _on_normal_set(self):
w = self.widget
w.place_widget(*self._bounds)
w.update_traits()
def _connect(self):
w = self.widget
w.add_observer('InteractionEvent', self._on_interaction_event)
w.on_trait_change(self._on_normal_set, 'normal_to_x_axis')
w.on_trait_change(self._on_normal_set, 'normal_to_y_axis')
w.on_trait_change(self._on_normal_set, 'normal_to_z_axis')
w.on_trait_change(self._on_interaction_event)
for obj in (self.plane, w):
obj.on_trait_change(self.render) |
def get_last_closed_submission_date(is_quarter: Optional[bool]=None) -> Optional[dict]:
filters = {'submission_reveal_date__lte': now()}
if (is_quarter is not None):
filters['is_quarter'] = is_quarter
return DABSSubmissionWindowSchedule.objects.filter(**filters).order_by('-submission_fiscal_year', '-submission_fiscal_quarter', '-submission_fiscal_month').values().first() |
class M2MTest(TestModelMixin, TestBase):
def testM2MSave(self):
v1 = TestModelRelated.objects.create(name='v1')
v2 = TestModelRelated.objects.create(name='v2')
with reversion.create_revision():
obj = TestModel.objects.create()
obj.related.add(v1)
obj.related.add(v2)
version = Version.objects.get_for_object(obj).first()
self.assertEqual(set(version.field_dict['related']), {v1.pk, v2.pk}) |
def TorchScriptWrapper_v1(torchscript_model: Optional['torch.jit.ScriptModule']=None, convert_inputs: Optional[Callable]=None, convert_outputs: Optional[Callable]=None, mixed_precision: bool=False, grad_scaler: Optional[PyTorchGradScaler]=None, device: Optional['torch.device']=None) -> Model[(Any, Any)]:
if (convert_inputs is None):
convert_inputs = convert_pytorch_default_inputs
if (convert_outputs is None):
convert_outputs = convert_pytorch_default_outputs
return Model('pytorch_script', forward, attrs={'convert_inputs': convert_inputs, 'convert_outputs': convert_outputs}, shims=[TorchScriptShim(model=torchscript_model, mixed_precision=mixed_precision, grad_scaler=grad_scaler, device=device)], dims={'nI': None, 'nO': None}) |
def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
tarfilename = (apkfilename[:(- 4)] + '_src.tar.gz')
tarfile = os.path.join(unsigned_dir, tarfilename)
if os.path.exists(tarfile):
shutil.move(tarfile, os.path.join(output_dir, tarfilename))
logging.debug('...published %s', tarfilename)
else:
logging.debug('...no source tarball for %s', apkfilename) |
.parametrize('field,expected', [('no_a', False), ('no_b', False), ('no_c', False), ('no_d', False), ('no_e', False), ('no_f', False), ('yes_a', True), ('yes_b', True), ('yes_c', True), ('yes_d', True)])
def test_is_many(field: str, expected: bool) -> None:
class A():
no_a: Optional[int]
no_b: Union[(None, str)]
no_c: Union[(None, str, int)]
no_d: str
no_e: float
no_f: bool
yes_a: Optional[List[str]]
yes_b: List[Optional[str]]
yes_c: List[str]
yes_d: Union[(None, str, List[int])]
assert (_is_many(get_type_hints(A)[field]) == expected) |
def panel(ui, parent):
ui.info.bind_context()
content = ui._groups
if (len(content) <= 1):
panel = TraitsUIPanel(parent, (- 1))
if (len(content) == 1):
(sg_sizer, resizable, contents) = fill_panel_for_group(panel, content[0], ui)
sizer = panel.GetSizer()
if (sizer is not sg_sizer):
sizer.Add(sg_sizer, 1, wx.EXPAND)
sizer.Fit(panel)
return panel
nb = create_notebook_for_items(content, ui, parent, None)
nb.ui = ui
return nb |
class TestProtocolDocs():
def setup_class(cls):
markdown_parser = mistune.create_markdown(renderer=mistune.AstRenderer())
skill_doc_file = Path(ROOT_DIR, 'docs', 'protocol.md')
doc = markdown_parser(skill_doc_file.read_text())
cls.code_blocks = list(filter((lambda x: (x['type'] == 'block_code')), doc))
def test_custom_protocol(self):
offset = 0
locals_dict = {}
compile_and_exec(self.code_blocks[offset]['text'], locals_dict=locals_dict)
ActualPerformative = locals_dict['Performative']
compare_enum_classes(ActualPerformative, DefaultMessage.Performative)
compile_and_exec(self.code_blocks[(offset + 1)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 2)]['text'], locals_dict=locals_dict)
ExpectedErrorCode = locals_dict['ErrorCode']
compare_enum_classes(ExpectedErrorCode, DefaultMessage.ErrorCode)
_ = compile_and_exec(self.code_blocks[(offset + 3)]['text'], locals_dict=locals_dict)
def test_oef_search_protocol(self):
offset = 5
locals_dict = {'Enum': Enum}
compile_and_exec(self.code_blocks[offset]['text'], locals_dict=locals_dict)
ActualPerformative = locals_dict['Performative']
compare_enum_classes(OefSearchMessage.Performative, ActualPerformative)
compile_and_exec(self.code_blocks[(offset + 1)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 3)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 2)]['text'], locals_dict=locals_dict)
locals_dict['OefSearchMessage'] = OefSearchMessage
compile_and_exec(self.code_blocks[(offset + 4)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 5)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 6)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 7)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 8)]['text'], locals_dict=locals_dict)
compile_and_exec(self.code_blocks[(offset + 9)]['text'], locals_dict=locals_dict)
assert (locals_dict['query_data'] == {'search_term': 'country', 'search_value': 'UK', 'constraint_type': '=='})
compile_and_exec(self.code_blocks[(offset + 10)]['text'], locals_dict=locals_dict)
ActualOefErrorOperation = locals_dict['OefErrorOperation']
ExpectedOefErrorOperation = OefErrorOperation
compare_enum_classes(ExpectedOefErrorOperation, ActualOefErrorOperation)
def test_fipa_protocol(self):
offset = 15
locals_dict = {'Enum': Enum}
compile_and_exec(self.code_blocks[offset]['text'], locals_dict=locals_dict)
ActualFipaPerformative = locals_dict['Performative']
ExpectedFipaPerformative = FipaMessage.Performative
compare_enum_classes(ExpectedFipaPerformative, ActualFipaPerformative) |
.django_db
def test_for_bogus_agencies(client, agency_data):
resp = client.post('/api/v2/autocomplete/awarding_agency/', content_type='application/json', data={'search_text': 'Results'})
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 0)
assert (AgencyAutocompleteMatview.objects.filter(pk=1).count() == 0)
assert (AgencyAutocompleteMatview.objects.filter(pk=2).count() == 0)
assert (AgencyAutocompleteMatview.objects.count() == 4) |
def groupnorm_gen_func_call(func_attrs: Dict[(str, Any)], indent=' ') -> str:
output_name = ''
assert (len(func_attrs['outputs']) == 1)
assert (1 <= len(func_attrs['inputs'])), 'expected at least 1 inputs but got {}'.format(len(func_attrs['inputs']))
output_name = func_attrs['outputs'][0]._attrs['name']
(input_name, gamma_name, beta_name) = get_input_names(func_attrs)
input_shape = func_attrs['inputs'][0]._attrs['shape']
output_shape = func_attrs['outputs'][0]._attrs['shape']
eps = func_attrs['eps']
return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], output=output_name, input=input_name, gamma=gamma_name, beta=beta_name, N=input_shape[0]._attrs['name'], H=('&' + input_shape[1]._attrs['name']), W=('&' + input_shape[2]._attrs['name']), HO=('&' + output_shape[1]._attrs['name']), WO=('&' + output_shape[2]._attrs['name']), eps=eps, indent=indent) |
def log(msgdict):
dct = msgdict.copy()
TaskLogEntry.add(**msgdict)
dc_id = dct.pop('dc_id')
owner_id = dct.pop('owner_id')
del dct['user_id']
del dct['object_pk']
del dct['content_type']
dct['time'] = dct['time'].isoformat()
_cache_log(_cache_log_key(settings.TASK_LOG_STAFF_ID, dc_id), dct)
if (owner_id not in User.get_super_admin_ids()):
_cache_log(_cache_log_key(owner_id, dc_id), dct) |
(st.permutations(range(10)))
def test_out_of_order_line(insertion_order):
tracker = RootTracker()
for node in insertion_order:
tracker.add(node, (node - 1))
for expected_root in range(9):
for node in range(expected_root, 10):
(root, depth) = tracker.get_root(node)
assert (depth == (node - expected_root))
assert (root == expected_root)
(prune_root_id, _) = tracker.get_root(9)
tracker.prune(prune_root_id)
with pytest.raises(ValidationError):
tracker.get_root(prune_root_id) |
def run_compile(proc_list, h_file_name: str):
file_stem = str(Path(h_file_name).stem)
lib_name = sanitize_str(file_stem)
(fwd_decls, body) = compile_to_strings(lib_name, proc_list)
source = f'''#include "{h_file_name}"
{body}'''
header_guard = f'{lib_name}_H'.upper()
header = f'''
#pragma once
#ifndef {header_guard}
#define {header_guard}
#ifdef __cplusplus
extern "C" {{
#endif
{fwd_decls}
#ifdef __cplusplus
}}
#endif
#endif // {header_guard}
'''
return (source, header) |
def execute_psql_command(psql_command: str, host, port=5432, user=os.getenv('PGUSER', default='postgres')) -> bytes:
if (host in ['localhost', '127.0.0.1']):
shell_cmd = f'sudo runuser -u {user} -- psql -c "{psql_command}"'
else:
shell_cmd = f'psql --host={host} --port={port} --username={user} -c "{psql_command}"'
try:
return check_output(split(shell_cmd))
except CalledProcessError as error:
logging.error(f'''Error during PostgreSQL installation:
{error.stderr}''')
raise |
def send_ticket_email(ticket_data, ticket_svg):
event_name = ticket_data['event'].name
first_name = ticket_data['first_name']
last_name = ticket_data['last_name']
email_to = ticket_data['email']
ticket_code = ticket_data['ticket'].code
email = EmailMultiAlternatives()
email.subject = get_ticket_subject(event_name)
(body_txt, body_html) = get_ticket_body(first_name, last_name, event_name)
email.body = body_txt
email.attach_alternative(body_html, 'text/html')
email.to = [email_to]
email.attach('Ticket-{}.pdf'.format(ticket_code), cairosvg.svg2pdf(bytestring=ticket_svg), 'application/pdf')
email.send(fail_silently=False) |
class TestFlyDiveCommand(BaseEvenniaCommandTest):
def setUp(self):
super().setUp()
(self.grid, err) = xyzgrid.XYZGrid.create('testgrid')
self.map_data13a = {'map': MAP13a, 'zcoord': (- 2)}
self.map_data13b = {'map': MAP13b, 'zcoord': (- 1)}
self.map_data13c = {'map': MAP13c, 'zcoord': 0}
self.map_data13d = {'map': MAP13d, 'zcoord': 1}
self.grid.add_maps(self.map_data13a, self.map_data13b, self.map_data13c, self.map_data13d)
self.grid.spawn()
def tearDown(self):
self.grid.delete()
([((0, 0, (- 2)), 'fly', False, (0, 0, (- 2))), ((1, 1, (- 2)), 'fly', True, (1, 1, (- 1))), ((1, 1, (- 1)), 'fly', True, (1, 1, 0)), ((1, 1, 0), 'fly', True, (1, 1, 1)), ((1, 1, 1), 'fly', False, (1, 1, 1)), ((0, 0, 1), 'fly', False, (0, 0, 1)), ((0, 0, 1), 'dive', False, (0, 0, 1)), ((1, 1, 1), 'dive', True, (1, 1, 0)), ((1, 1, 0), 'dive', True, (1, 1, (- 1))), ((1, 1, (- 1)), 'dive', True, (1, 1, (- 2))), ((1, 1, (- 2)), 'dive', False, (1, 1, (- 2)))])
def test_fly_and_dive(self, startcoord, cmdstring, success, endcoord):
start_room = xyzgrid.XYZRoom.objects.get_xyz(xyz=startcoord)
self.char1.move_to(start_room)
self.call(commands.CmdFlyAndDive(), '', ('You' if success else "Can't"), cmdstring=cmdstring)
self.assertEqual(self.char1.location.xyz, endcoord) |
def test_modes():
_ = td.ModeSpec(num_modes=2)
_ = td.ModeSpec(num_modes=1, target_neff=1.0)
options = [None, 'lowest', 'highest', 'central']
for opt in options:
_ = td.ModeSpec(num_modes=3, track_freq=opt)
with pytest.raises(pydantic.ValidationError):
_ = td.ModeSpec(num_modes=3, track_freq='middle')
with pytest.raises(pydantic.ValidationError):
_ = td.ModeSpec(num_modes=3, track_freq=4) |
def get_widths2(seq):
widths = {}
r = []
for v in seq:
if isinstance(v, list):
if r:
char1 = r[(- 1)]
for (i, (w, vx, vy)) in enumerate(choplist(3, v)):
widths[(char1 + i)] = (w, (vx, vy))
r = []
elif isnumber(v):
r.append(v)
if (len(r) == 5):
(char1, char2, w, vx, vy) = r
for i in range(char1, (char2 + 1)):
widths[i] = (w, (vx, vy))
r = []
return widths |
def addition_fixer(bmg: BMGraphBuilder, typer: LatticeTyper) -> NodeFixer:
def fixer(node: bn.BMGNode) -> NodeFixerResult:
if ((not isinstance(node, bn.AdditionNode)) or (len(node.inputs) != 2)):
return Inapplicable
left = node.inputs[0]
right = node.inputs[1]
if (bn.is_one(left) and isinstance(right, bn.NegateNode) and typer.is_prob_or_bool(right.operand)):
return bmg.add_complement(right.operand)
if (bn.is_one(right) and isinstance(left, bn.NegateNode) and typer.is_prob_or_bool(left.operand)):
return bmg.add_complement(left.operand)
return Inapplicable
return fixer |
class OptionPlotoptionsDependencywheelSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsDependencywheelSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsDependencywheelSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsDependencywheelSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsDependencywheelSonificationContexttracksMappingLowpassResonance) |
class TestInfraConfigFreeFunctions(unittest.TestCase):
('time.time', return_value=444)
def test_post_update_status_stage_flow_incomplete(self, mock_time: MagicMock) -> None:
config = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, status=PrivateComputationInstanceStatus.ID_MATCHING_STARTED, status_update_ts=123, instances=[], game_type=PrivateComputationGameType.ATTRIBUTION, num_pid_containers=10, num_mpc_containers=20, num_files_per_mpc_container=100, status_updates=[])
original_end_ts = config.end_ts
expected_status_updates = [StatusUpdate(status=config.status, status_update_ts=444, status_update_ts_delta=0)]
post_update_status(config)
self.assertEqual(expected_status_updates, config.status_updates)
self.assertEqual(original_end_ts, config.end_ts)
('time.time', return_value=555)
def test_post_update_status_stage_flow_complete(self, mock_time: MagicMock) -> None:
config = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_COMPLETED, status_update_ts=123, instances=[], game_type=PrivateComputationGameType.ATTRIBUTION, num_pid_containers=10, num_mpc_containers=20, num_files_per_mpc_container=100, status_updates=[])
expected_status_updates = [StatusUpdate(status=config.status, status_update_ts=555, status_update_ts_delta=0)]
post_update_status(config)
self.assertEqual(expected_status_updates, config.status_updates)
self.assertEqual(555, config.end_ts)
def test_append_status_updates(self) -> None:
pass
def test_raise_containers_error(self) -> None:
pass
def test_not_valid_containers(self) -> None:
pass |
class OptionPlotoptionsFunnel3dSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def dedupe_ofmsgs(input_ofmsgs, random_order, flowkey):
deduped_input_ofmsgs = {flowkey(ofmsg): ofmsg for ofmsg in input_ofmsgs}
if random_order:
ofmsgs = list(deduped_input_ofmsgs.values())
random.shuffle(ofmsgs)
return ofmsgs
return sort_flows(deduped_input_ofmsgs.values()) |
def enforce_single_project(enforcer, project_id, policy_filename):
policy = file_loader.read_and_parse_file(policy_filename)
if (not isinstance(policy, list)):
raise InvalidParsedPolicyFileError(('Invalid parsed policy file: found %s expected list' % type(policy)))
project_policies = [(project_id, policy)]
enforcer_results = enforcer.run(project_policies)
for result in enforcer_results.results:
result.gce_firewall_enforcement.policy_path = policy_filename
result.run_context = enforcer_log_pb2.ENFORCER_ONE_PROJECT
return enforcer_results |
def test_deepcopy_overridden():
provider = providers.Resource(init_fn)
object_provider = providers.Object(object())
provider.override(object_provider)
provider_copy = providers.deepcopy(provider)
object_provider_copy = provider_copy.overridden[0]
assert (provider is not provider_copy)
assert (provider.args == provider_copy.args)
assert isinstance(provider, providers.Resource)
assert (object_provider is not object_provider_copy)
assert isinstance(object_provider_copy, providers.Object) |
class OrderStatisticsEventSchema(Schema):
class Meta():
type_ = 'order-statistics-event'
self_view = 'v1.order_statistics_event_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str()
identifier = fields.Str()
tickets = fields.Method('tickets_count')
orders = fields.Method('orders_count')
sales = fields.Method('sales_count')
def tickets_count(self, obj):
obj_id = obj.id
total = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id)).scalar()
draft = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'draft')).scalar()
cancelled = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'cancelled')).scalar()
pending = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'pending')).scalar()
expired = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'expired')).scalar()
placed = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'placed')).scalar()
completed = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((Order.event_id == obj_id), (Order.status == 'completed')).scalar()
max = db.session.query(func.sum(Ticket.quantity.label('sum'))).filter((Ticket.event_id == obj_id), (Ticket.deleted_at == None)).scalar()
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0), 'max': (max or 0)}
return result
def orders_count(self, obj):
obj_id = obj.id
total = get_count(db.session.query(Order).filter((Order.event_id == obj_id)))
draft = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'draft')))
cancelled = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'cancelled')))
pending = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'pending')))
expired = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'expired')))
placed = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'placed')))
completed = get_count(db.session.query(Order).filter((Order.event_id == obj_id), (Order.status == 'completed')))
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0)}
return result
def sales_count(self, obj):
obj_id = obj.id
total = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id)).scalar()
draft = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'draft')).scalar()
cancelled = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'cancelled')).scalar()
pending = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'pending')).scalar()
expired = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'expired')).scalar()
placed = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'placed')).scalar()
completed = db.session.query(func.sum(Order.amount.label('sum'))).filter((Order.event_id == obj_id), (Order.status == 'completed')).scalar()
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0)}
return result |
.skipif((os.environ.get('TEST_SCOPE') == 'CICD'), reason="Don't run this on CI/CD")
.parametrize(('provider', 'feature', 'subfeature'), global_features(only_async)['ungrouped_providers'])
class TestAsyncSubFeatures(CommonAsyncTests):
.skipif((os.environ.get('TEST_SCOPE') == 'CICD-OPENSOURCE'), reason='Skip in opensource package cicd workflow')
def test_async_job(self, provider, feature, subfeature):
self._test_async_job(provider, feature, subfeature)
def test_api_get_job_result_saved_output(self, provider, feature, subfeature):
self._test_api_get_job_result_saved_output(provider, feature, subfeature)
.skipif((os.environ.get('TEST_SCOPE') == 'CICD-OPENSOURCE'), reason='Skip in opensource package cicd workflow')
def test_launch_job_invalid_parameters(self, provider, feature, subfeature):
self._test_launch_job_invalid_parameters(provider, feature, subfeature)
.skipif((os.environ.get('TEST_SCOPE') == 'CICD-OPENSOURCE'), reason='Skip in opensource package cicd workflow')
def test_get_job_result_does_not_exist(self, provider, feature, subfeature):
self._test_get_job_result_does_not_exist(provider, feature, subfeature) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.