code stringlengths 281 23.7M |
|---|
def ip_from_request(request, exclude=None) -> str:
if (exclude is None):
exclude = settings.UPSTREAM_IPS
if hasattr(request, 'getClientIP'):
remote_addr = request.getClientIP()
forwarded = request.getHeader('x-forwarded-for')
else:
remote_addr = request.META.get('REMOTE_ADDR')
forwarded = request.META.get('HTTP_X_FORWARDED_FOR')
addresses = [remote_addr]
if forwarded:
addresses.extend((x.strip() for x in forwarded.split(',')))
for addr in reversed(addresses):
if all(((not match_ip(addr, pattern)) for pattern in exclude)):
return addr
logger.log_warn('ip_from_request: No valid IP address found in request. Using remote_addr.')
return remote_addr |
class RawValueOp(Node):
def __init__(self, args):
super().__init__([])
(value, final) = args
self.value = value
self.final_value = final
def forward(self, **kwargs):
return self.value
def follow(self, **kwargs):
return fmap(('*', self.value))
def final(self, args, operands=None, result=None, **kwargs):
return self.final_value |
.slow
.skipif((not GPU_TESTS_ENABLED), reason='requires GPU')
def test_generate_deterministic(dolly_generator):
prompts = ['What is the Rust programming language?', 'What is spaCy?']
answers = ['Rust is a multi-paradigm, high-level, general-purpose programming language. Rust is designed to have a small, consistent, and predictable language surface. Rust is also designed to be efficient, and to have a small memory footprint. Rust is designed to be safe, and to have a well-defined memory model. Rust is also designed to be concurrent, and to have a good support for concurrent programming. Rust is designed to be fast, and to have a good support for performance-critical code. Rust is also designed to be modular, and to have a good support for modular programming. Rust is designed to have a good support for internationalization and localization.\n\n', 'SpaCy is a natural language processing (NLP) library for Python that provides tokenization, part-of-speech (POS) tagging, named entity recognition (NER), and dependency parsing.\n\n']
assert (dolly_generator(prompts, config=GreedyGeneratorConfig()) == answers)
assert (dolly_generator(prompts[::(- 1)], config=GreedyGeneratorConfig()) == answers[::(- 1)]) |
def build_lm_graph(ngram_counts, vocab):
graph = gtn.Graph(False)
lm_order = len(ngram_counts)
assert (lm_order > 1), "build_lm_graph doesn't work for unigram LMs"
state_to_node = {}
def get_node(state):
node = state_to_node.get(state, None)
if (node is not None):
return node
is_start = (state == tuple([vocab[BOS]]))
is_end = (vocab[EOS] in state)
node = graph.add_node(is_start, is_end)
state_to_node[state] = node
return node
for counts in ngram_counts:
for ngram in counts.keys():
(istate, ostate) = (ngram[0:(- 1)], ngram[(1 - lm_order):])
inode = get_node(istate)
onode = get_node(ostate)
(prob, bckoff) = counts[ngram]
lbl = (ngram[(- 1)] if (ngram[(- 1)] != vocab[EOS]) else gtn.epsilon)
graph.add_arc(inode, onode, lbl, lbl, prob)
if ((bckoff is not None) and (vocab[EOS] not in ngram)):
bnode = get_node(ngram[1:])
graph.add_arc(onode, bnode, gtn.epsilon, gtn.epsilon, bckoff)
return graph |
def test_adding_a_extra_volume_with_volume_mount_as_yaml():
config = '\nextraVolumes:\n - name: extras\n emptyDir: {}\nextraVolumeMounts:\n - name: extras\n mountPath: /usr/share/extras\n readOnly: true\n'
r = helm_template(config)
extraVolume = r['statefulset'][uname]['spec']['template']['spec']['volumes']
assert ({'name': 'extras', 'emptyDir': {}} in extraVolume)
extraVolumeMounts = r['statefulset'][uname]['spec']['template']['spec']['containers'][0]['volumeMounts']
assert ({'name': 'extras', 'mountPath': '/usr/share/extras', 'readOnly': True} in extraVolumeMounts) |
_os(*metadata.platforms)
def main():
(server, ip, port) = common.serve_web()
url = f'
mshta = 'C:\\Users\\Public\\mshta.exe'
dropped = 'C:\\Users\\Public\\posh.exe'
common.copy_file(EXE_FILE, mshta)
cmd = f'Invoke-WebRequest -Uri {url} -OutFile {dropped}'
common.log('Using a fake mshta to drop and execute an .exe')
common.execute([mshta, '/c', cmd], timeout=10)
common.execute([mshta, '/c', dropped], timeout=10, kill=True)
common.remove_file(mshta)
common.remove_file(dropped) |
class RegReplacePanelSaveCommand(sublime_plugin.TextCommand):
def is_existing_name(self, name):
original_name = self.view.settings().get('regreplace.name', None)
rules = sublime.load_settings('reg_replace_rules.sublime-settings').get('replacements', {})
msg = ("The name '%s' already exists in the replacment list. Do you want to replace existing rule?" % name)
return (not ((name == original_name) or (name not in rules) or sublime.ok_cancel_dialog(msg)))
def run(self, edit):
obj = ConvertPythonSrc2Obj().convert(self.view.substr(sublime.Region(0, self.view.size())))[0]
if (obj is None):
return
if (not obj.get('name')):
error('A valid name must be provided!')
elif ((obj.get('scope') is None) and (obj.get('find') is None)):
error('A valid find pattern or scope must be provided!')
elif (not self.is_existing_name(obj['name'])):
try:
if (obj.get('find') is not None):
if obj.get('selection_inputs', False):
pass
elif obj.get('literal', False):
flags = 0
pattern = re.escape(obj['find'])
if obj.get('literal_ignorecase', False):
flags = re.I
re.compile(pattern, flags)
else:
extend = sublime.load_settings('reg_replace.sublime-settings').get('extended_back_references', False)
if extend:
bre.compile_search(obj['find'])
else:
re.compile(obj['find'])
settings = sublime.load_settings('reg_replace_rules.sublime-settings')
rules = settings.get('replacements', {})
rules[obj['name']] = obj
settings.set('replacements', rules)
sublime.save_settings('reg_replace_rules.sublime-settings')
self.view.settings().set('regreplace.name', obj['name'])
except Exception as e:
error(('Regex compile failed!\n\n%s' % str(e))) |
class OptionSeriesBellcurveSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.usefixtures('use_tmpdir', 'init_eclrun_config')
.dict(os.environ, {'LSB_JOBID': 'some-id'})
def test_env(eclrun_conf):
with open('eclrun', 'w', encoding='utf-8') as f, open('DUMMY.DATA', 'w', encoding='utf-8'):
f.write('#!/usr/bin/env python\nimport os\nimport json\nwith open("env.json", "w") as f:\n json.dump(dict(os.environ), f)\n')
os.chmod('eclrun', (os.stat('eclrun').st_mode | stat.S_IEXEC))
econfig = ecl_config.Ecl100Config()
eclrun_config = ecl_config.EclrunConfig(econfig, '2019.3')
erun = ecl_run.EclRun('DUMMY', None, check_status=False)
with mock.patch.object(erun, '_get_run_command', mock.MagicMock(return_value='./eclrun')):
erun.runEclipse(eclrun_config=eclrun_config)
with open('env.json', encoding='utf-8') as f:
run_env = json.load(f)
eclrun_env = eclrun_conf['eclrun_env']
for (k, v) in eclrun_env.items():
if (v is None):
assert (k not in run_env)
continue
if (k == 'PATH'):
assert run_env[k].startswith(v)
else:
assert (v == run_env[k]) |
class OptionSeriesArcdiagramOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesArcdiagramOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesArcdiagramOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesArcdiagramOnpointPosition':
return self._config_sub_data('position', OptionSeriesArcdiagramOnpointPosition) |
class Router():
def __init__(self, routes: typing.Optional[typing.Sequence[BaseRoute]]=None, redirect_slashes: bool=True, default: typing.Optional[ASGIApp]=None, on_startup: typing.Optional[typing.Sequence[typing.Callable[([], typing.Any)]]]=None, on_shutdown: typing.Optional[typing.Sequence[typing.Callable[([], typing.Any)]]]=None, lifespan: typing.Optional[Lifespan[typing.Any]]=None, *, middleware: typing.Optional[typing.Sequence[Middleware]]=None) -> None:
self.routes = ([] if (routes is None) else list(routes))
self.redirect_slashes = redirect_slashes
self.default = (self.not_found if (default is None) else default)
self.on_startup = ([] if (on_startup is None) else list(on_startup))
self.on_shutdown = ([] if (on_shutdown is None) else list(on_shutdown))
if (on_startup or on_shutdown):
warnings.warn('The on_startup and on_shutdown parameters are deprecated, and they will be removed on version 1.0. Use the lifespan parameter instead. See more about it on DeprecationWarning)
if lifespan:
warnings.warn('The `lifespan` parameter cannot be used with `on_startup` or `on_shutdown`. Both `on_startup` and `on_shutdown` will be ignored.')
if (lifespan is None):
self.lifespan_context: Lifespan[typing.Any] = _DefaultLifespan(self)
elif inspect.isasyncgenfunction(lifespan):
warnings.warn('async generator function lifespans are deprecated, use an function instead', DeprecationWarning)
self.lifespan_context = asynccontextmanager(lifespan)
elif inspect.isgeneratorfunction(lifespan):
warnings.warn('generator function lifespans are deprecated, use an function instead', DeprecationWarning)
self.lifespan_context = _wrap_gen_lifespan_context(lifespan)
else:
self.lifespan_context = lifespan
self.middleware_stack = self.app
if middleware:
for (cls, args, kwargs) in reversed(middleware):
self.middleware_stack = cls(self.middleware_stack, *args, **kwargs)
async def not_found(self, scope: Scope, receive: Receive, send: Send) -> None:
if (scope['type'] == 'websocket'):
websocket_close = WebSocketClose()
(await websocket_close(scope, receive, send))
return
if ('app' in scope):
raise HTTPException(status_code=404)
else:
response = PlainTextResponse('Not Found', status_code=404)
(await response(scope, receive, send))
def url_path_for(self, name: str, /, **path_params: typing.Any) -> URLPath:
for route in self.routes:
try:
return route.url_path_for(name, **path_params)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def startup(self) -> None:
for handler in self.on_startup:
if is_async_callable(handler):
(await handler())
else:
handler()
async def shutdown(self) -> None:
for handler in self.on_shutdown:
if is_async_callable(handler):
(await handler())
else:
handler()
async def lifespan(self, scope: Scope, receive: Receive, send: Send) -> None:
started = False
app: typing.Any = scope.get('app')
(await receive())
try:
async with self.lifespan_context(app) as maybe_state:
if (maybe_state is not None):
if ('state' not in scope):
raise RuntimeError('The server does not support "state" in the lifespan scope.')
scope['state'].update(maybe_state)
(await send({'type': 'lifespan.startup.complete'}))
started = True
(await receive())
except BaseException:
exc_text = traceback.format_exc()
if started:
(await send({'type': 'lifespan.shutdown.failed', 'message': exc_text}))
else:
(await send({'type': 'lifespan.startup.failed', 'message': exc_text}))
raise
else:
(await send({'type': 'lifespan.shutdown.complete'}))
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
(await self.middleware_stack(scope, receive, send))
async def app(self, scope: Scope, receive: Receive, send: Send) -> None:
assert (scope['type'] in (' 'websocket', 'lifespan'))
if ('router' not in scope):
scope['router'] = self
if (scope['type'] == 'lifespan'):
(await self.lifespan(scope, receive, send))
return
partial = None
for route in self.routes:
(match, child_scope) = route.matches(scope)
if (match == Match.FULL):
scope.update(child_scope)
(await route.handle(scope, receive, send))
return
elif ((match == Match.PARTIAL) and (partial is None)):
partial = route
partial_scope = child_scope
if (partial is not None):
scope.update(partial_scope)
(await partial.handle(scope, receive, send))
return
root_path = scope.get('route_root_path', scope.get('root_path', ''))
path = scope.get('route_path', re.sub(('^' + root_path), '', scope['path']))
if ((scope['type'] == ' and self.redirect_slashes and (path != '/')):
redirect_scope = dict(scope)
if path.endswith('/'):
redirect_scope['route_path'] = path.rstrip('/')
redirect_scope['path'] = redirect_scope['path'].rstrip('/')
else:
redirect_scope['route_path'] = (path + '/')
redirect_scope['path'] = (redirect_scope['path'] + '/')
for route in self.routes:
(match, child_scope) = route.matches(redirect_scope)
if (match != Match.NONE):
redirect_url = URL(scope=redirect_scope)
response = RedirectResponse(url=str(redirect_url))
(await response(scope, receive, send))
return
(await self.default(scope, receive, send))
def __eq__(self, other: typing.Any) -> bool:
return (isinstance(other, Router) and (self.routes == other.routes))
def mount(self, path: str, app: ASGIApp, name: typing.Optional[str]=None) -> None:
route = Mount(path, app=app, name=name)
self.routes.append(route)
def host(self, host: str, app: ASGIApp, name: typing.Optional[str]=None) -> None:
route = Host(host, app=app, name=name)
self.routes.append(route)
def add_route(self, path: str, endpoint: typing.Callable[([Request], typing.Union[(typing.Awaitable[Response], Response)])], methods: typing.Optional[typing.List[str]]=None, name: typing.Optional[str]=None, include_in_schema: bool=True) -> None:
route = Route(path, endpoint=endpoint, methods=methods, name=name, include_in_schema=include_in_schema)
self.routes.append(route)
def add_websocket_route(self, path: str, endpoint: typing.Callable[([WebSocket], typing.Awaitable[None])], name: typing.Optional[str]=None) -> None:
route = WebSocketRoute(path, endpoint=endpoint, name=name)
self.routes.append(route)
def route(self, path: str, methods: typing.Optional[typing.List[str]]=None, name: typing.Optional[str]=None, include_in_schema: bool=True) -> typing.Callable:
warnings.warn('The `route` decorator is deprecated, and will be removed in version 1.0.0.Refer to for the recommended approach.', DeprecationWarning)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_route(path, func, methods=methods, name=name, include_in_schema=include_in_schema)
return func
return decorator
def websocket_route(self, path: str, name: typing.Optional[str]=None) -> typing.Callable:
warnings.warn('The `websocket_route` decorator is deprecated, and will be removed in version 1.0.0. Refer to for the recommended approach.', DeprecationWarning)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_websocket_route(path, func, name=name)
return func
return decorator
def add_event_handler(self, event_type: str, func: typing.Callable[([], typing.Any)]) -> None:
assert (event_type in ('startup', 'shutdown'))
if (event_type == 'startup'):
self.on_startup.append(func)
else:
self.on_shutdown.append(func)
def on_event(self, event_type: str) -> typing.Callable:
warnings.warn('The `on_event` decorator is deprecated, and will be removed in version 1.0.0. Refer to for recommended approach.', DeprecationWarning)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_event_handler(event_type, func)
return func
return decorator |
def verify_privacy_notice_and_historical_records(db: Session, notice_history_list: List[SafeStr]) -> None:
privacy_notice_count: int = db.query(PrivacyNotice).join(PrivacyNoticeHistory, (PrivacyNoticeHistory.privacy_notice_id == PrivacyNotice.id)).filter(PrivacyNoticeHistory.id.in_(notice_history_list)).distinct().count()
if (privacy_notice_count < len(notice_history_list)):
raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail='Invalid privacy notice histories in request') |
class Model(abc.ABC):
def __call__(self, data, axis=(- 1)):
if (not isinstance(data, _np.ndarray)):
raise TypeError(f'Model should take ndarray as input data, not {type(data)}.')
if (data.dtype.kind not in ('b', 'i', 'u', 'f', 'c')):
raise ValueError(f'Model should take numerical ndarray as input data, not {data.dtype}).')
if (axis == (- 1)):
axis = (len(data.shape) - 1)
results = self._compute(data, axis=axis)
check_shape = [d for (i, d) in enumerate(results.shape) if (i != axis)]
origin_shape = [d for (i, d) in enumerate(data.shape) if (i != axis)]
if (check_shape != origin_shape):
raise ValueError(f'Model instance {self.__class__} does not preserve dimensions of data properly on call.')
return results
def _compute(self, data, axis):
pass
def max_data_value(self):
pass |
class StyleDefinition(object):
def __init__(self, name, style, default_highlight, icon_path, minimap):
self.name = name
self.color = style.get('color', default_highlight['color'])
self.style = select_bracket_style(style.get('style', default_highlight['style']), minimap)
self.underline = (self.style & sublime.DRAW_EMPTY_AS_OVERWRITE)
self.endpoints = style.get('endpoints', False)
(self.icon, self.small_icon, self.open_icon, self.small_open_icon, self.close_icon, self.small_close_icon) = select_bracket_icons(style.get('icon', default_highlight['icon']), icon_path)
self.no_icon = ''
self.clear()
def clear(self):
self.selections = []
self.open_selections = []
self.close_selections = []
self.center_selections = []
self.content_selections = [] |
def _listToString(l):
text = []
for value in l:
if isinstance(value, dict):
value = _dictToString(value)
elif isinstance(value, list):
value = _listToString(value)
elif isinstance(value, tuple):
value = _tupleToString(value)
elif isinstance(value, numberTypes):
value = repr(value)
elif isinstance(value, str):
value = ('"%s"' % value)
text.append(value)
if (not text):
return ''
return ('[%s]' % ', '.join(text)) |
.django_db
def test_double_eclipsing_filters(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {'require': [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)], 'exclude': [_fa_path(BASIC_TAS)]})
assert (resp.json()['results'] == [_subaward1()]) |
def browse_sortkey_reverse(prop):
if prop.startswith('-'):
prop = prop[1:]
reverse = True
else:
reverse = False
if (prop == 'text'):
return ((lambda x: ((x.is_directory == reverse), (x.link.text.lower() if (x.link and x.link.text) else x.name))), reverse)
if (prop == 'size'):
return ((lambda x: ((x.is_directory == reverse), x.stats.st_size)), reverse)
return ((lambda x: ((x.is_directory == reverse), getattr(x, prop, None))), reverse) |
def add_data_dictionary_to_zip(working_dir, zip_file_path):
write_to_log(message='Adding data dictionary to zip file')
data_dictionary_file_name = 'Data_Dictionary_Crosswalk.xlsx'
data_dictionary_file_path = os.path.join(working_dir, data_dictionary_file_name)
logger.info(f'Retrieving the data dictionary from S3. Bucket: {settings.DATA_DICTIONARY_S3_BUCKET_NAME} Key: {settings.DATA_DICTIONARY_S3_KEY}')
logger.info(f'Saving the data dictionary to: {data_dictionary_file_path}')
download_s3_object(bucket_name=settings.DATA_DICTIONARY_S3_BUCKET_NAME, key=settings.DATA_DICTIONARY_S3_KEY, file_path=data_dictionary_file_path, retry_count=settings.DATA_DICTIONARY_DOWNLOAD_RETRY_COUNT)
append_files_to_zip_file([data_dictionary_file_path], zip_file_path) |
(scope='function')
def system_with_no_uses(db: Session) -> System:
system = System.create(db=db, data={'fides_key': f'system_fides_key', 'name': f'system-{uuid4()}', 'description': 'tcf_relevant_system', 'organization_fides_key': 'default_organization', 'system_type': 'Service'})
return system |
def extractHeadpatsnovelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('vorpal bunny and fortress uncle', 'Vorpal Bunny and Fortress Uncle', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestListItemObserverEqualHash(unittest.TestCase):
def test_not_equal_notify(self):
observer1 = ListItemObserver(notify=False, optional=False)
observer2 = ListItemObserver(notify=True, optional=False)
self.assertNotEqual(observer1, observer2)
def test_not_equal_optional(self):
observer1 = ListItemObserver(notify=True, optional=True)
observer2 = ListItemObserver(notify=True, optional=False)
self.assertNotEqual(observer1, observer2)
def test_not_equal_different_type(self):
observer1 = ListItemObserver(notify=False, optional=False)
imposter = mock.Mock()
imposter.notify = False
imposter.optional = False
self.assertNotEqual(observer1, imposter)
def test_equal_observers(self):
observer1 = ListItemObserver(notify=False, optional=False)
observer2 = ListItemObserver(notify=False, optional=False)
self.assertEqual(observer1, observer2)
self.assertEqual(hash(observer1), hash(observer2))
def test_slots(self):
observer = ListItemObserver(notify=True, optional=False)
with self.assertRaises(AttributeError):
observer.__dict__
with self.assertRaises(AttributeError):
observer.__weakref__
def test_eval_repr_roundtrip(self):
observer = ListItemObserver(notify=True, optional=False)
self.assertEqual(eval(repr(observer)), observer) |
class EmmetConvertDataUrl(sublime_plugin.TextCommand):
def run(self, edit):
caret = get_caret(self.view)
syntax_name = syntax.from_pos(self.view, caret)
if syntax.is_html(syntax_name):
convert.convert_html(self.view, edit, caret)
elif syntax.is_css(syntax_name):
convert.convert_css(self.view, edit, caret) |
def upgrade():
enum = ENUM('unverified', 'valid', 'rejected', name='nu_item_enum', create_type=False)
enum.create(op.get_bind(), checkfirst=True)
op.execute('ALTER TABLE nu_release_item ALTER COLUMN reviewed DROP DEFAULT')
op.alter_column('nu_release_item', 'reviewed', existing_type=sa.BOOLEAN(), type_=enum, existing_nullable=False, nullable=False, server_default=sa.text("'unverified'"), existing_server_default=sa.text('false'), postgresql_using="CASE WHEN reviewed = false THEN 'unverified'::nu_item_enum ELSE 'valid'::nu_item_enum END")
op.execute("ALTER TABLE nu_release_item ALTER COLUMN reviewed SET DEFAULT 'unverified'") |
def nd_grid(*vecs):
vecs = [np.array(v, copy=False).ravel() for v in vecs]
shape = tuple([v.size for v in vecs])
sz = np.prod(shape)
arrs = []
for (i, (v, n)) in enumerate(zip(vecs, shape)):
newshape = list(shape)
newshape.insert(0, newshape.pop(i))
arr = np.repeat(v, (sz / n)).reshape(newshape)
transorder = range(len(arr.shape))[1:]
transorder.insert(i, 0)
arrs.append(arr.transpose(transorder))
return np.array(arrs) |
class ModelRegistry():
def __init__(self, database: databases.Database) -> None:
self.database = database
self.models = {}
self._metadata = sqlalchemy.MetaData()
def metadata(self):
for model_cls in self.models.values():
model_cls.build_table()
return self._metadata
async def create_all(self):
url = self._get_database_url()
engine = create_async_engine(url)
async with self.database:
async with engine.begin() as conn:
(await conn.run_sync(self.metadata.create_all))
(await engine.dispose())
async def drop_all(self):
url = self._get_database_url()
engine = create_async_engine(url)
async with self.database:
async with engine.begin() as conn:
(await conn.run_sync(self.metadata.drop_all))
(await engine.dispose())
def _get_database_url(self) -> str:
url = self.database.url
if (not url.driver):
if (url.dialect == 'postgresql'):
url = url.replace(driver='asyncpg')
elif (url.dialect == 'mysql'):
url = url.replace(driver='aiomysql')
elif (url.dialect == 'sqlite'):
url = url.replace(driver='aiosqlite')
return str(url) |
def get_wishbone_user_port_ios(_id, aw, dw):
return [('user_port_{}'.format(_id), 0, Subsignal('adr', Pins(aw)), Subsignal('dat_w', Pins(dw)), Subsignal('dat_r', Pins(dw)), Subsignal('sel', Pins((dw // 8))), Subsignal('cyc', Pins(1)), Subsignal('stb', Pins(1)), Subsignal('ack', Pins(1)), Subsignal('we', Pins(1)), Subsignal('err', Pins(1)))] |
class TestHashingDataWrapper():
data_loaders = []
if os.path.exists(config['sf1_data_path']):
data_loaders.append(SF1QuarterlyData())
if (secrets['mongodb_adminusername'] is not None):
data_loaders.append(SF1QuarterlyDataMongo())
.parametrize('data_loader', data_loaders)
.parametrize('tickers', [['AAPL', 'ZRAN', 'TSLA', 'WORK'], ['INTC', 'ZRAN', 'XRDC', 'XOM', 'PNK'], ['INTC', 'ZRAN', 'XRDC', 'XOM'], ['NVDA'], ['ZRAN']])
def test_load(self, tickers, data_loader):
hashing_data_loader = HashingDataWrapper(data_loader)
df = data_loader.load(tickers)
df1 = hashing_data_loader.load(tickers)
df2 = hashing_data_loader.load(tickers)
assert (type(df) == type(df1))
assert (type(df) == type(df2))
assert (df.shape == df1.shape)
assert (df.shape == df2.shape) |
def test_scalar_area(f):
f.assign(1)
assert (abs((assemble((f * ds_t)) - 1.0)) < 1e-07)
assert (abs((assemble((f * ds_b)) - 1.0)) < 1e-07)
assert (abs((assemble((f * ds_tb)) - 2.0)) < 1e-07)
assert (abs((assemble((f * ds_v)) - 4.0)) < 1e-07)
assert (abs((assemble((f('+') * dS_h)) - 3.0)) < 1e-07)
assert (abs((assemble((f('-') * dS_h)) - 3.0)) < 1e-07)
assert (abs((assemble((f('+') * dS_v)) - (6.0 + (4 * sqrt(2))))) < 1e-07)
assert (abs((assemble((f('-') * dS_v)) - (6.0 + (4 * sqrt(2))))) < 1e-07) |
class EthstatsService(Service):
logger = logging.getLogger('trinity.components.ethstats.Service')
def __init__(self, boot_info: BootInfo, event_bus: EndpointAPI, server_url: str, server_secret: str, node_id: str, node_contact: str, stats_interval: int) -> None:
self.boot_info = boot_info
self.event_bus = event_bus
self.server_url = server_url
self.server_secret = server_secret
self.node_id = node_id
self.node_contact = node_contact
self.stats_interval = stats_interval
async def run(self) -> None:
with self.get_chain() as chain:
self.chain = chain
while self.manager.is_running:
self.logger.info('Connecting to %s...', self.server_url)
async with websockets.connect(self.server_url) as websocket:
client: EthstatsClient = EthstatsClient(websocket, self.node_id)
client_manager = self.manager.run_child_service(client)
self.manager.run_task(self.server_handler, client, client_manager)
self.manager.run_task(self.statistics_handler, client, client_manager)
(await client_manager.wait_finished())
self.logger.info('Connection to %s closed', self.server_url)
self.logger.info('Reconnecting in 5s...')
(await asyncio.sleep(5))
async def server_handler(self, client: EthstatsClient, manager: ManagerAPI) -> None:
while manager.is_running:
message: EthstatsMessage = (await client.recv())
if (message.command == 'node-pong'):
(await client.send_latency(((timestamp_ms() - message.data['clientTime']) // 2)))
else:
self.logger.debug('Unhandled message received: %s: %r', message.command, message)
async def statistics_handler(self, client: EthstatsClient, manager: ManagerAPI) -> None:
(await client.send_hello(self.server_secret, self.get_node_info()))
while manager.is_running:
(await client.send_node_ping())
(await client.send_stats((await self.get_node_stats())))
(await client.send_block(self.get_node_block()))
(await asyncio.sleep(self.stats_interval))
def get_node_info(self) -> EthstatsData:
return {'name': self.node_id, 'contact': self.node_contact, 'node': construct_trinity_client_identifier(), 'net': self.boot_info.trinity_config.network_id, 'port': self.boot_info.trinity_config.port, 'os': platform.system(), 'os_v': platform.release(), 'client': '0.1.1', 'canUpdateHistory': False}
def get_node_block(self) -> EthstatsData:
head = self.chain.get_canonical_head()
return {'number': head.block_number, 'hash': head.hex_hash, 'difficulty': head.difficulty, 'totalDifficulty': self.chain.get_score(head.hash), 'transactions': [], 'uncles': []}
async def get_node_stats(self) -> EthstatsData:
try:
peer_count = (await asyncio.wait_for(self.event_bus.request(PeerCountRequest(), TO_NETWORKING_BROADCAST_CONFIG), timeout=1)).peer_count
except asyncio.TimeoutError:
self.logger.warning('Timeout: PeerPool did not answer PeerCountRequest')
peer_count = 0
return {'active': True, 'uptime': 100, 'peers': peer_count}
def get_chain(self) -> ContextManager[ChainAPI]:
return get_eth1_chain_with_remote_db(self.boot_info, self.event_bus) |
class CoverSearchPane(Gtk.Box):
def __init__(self, plugin, selection_color):
super(CoverSearchPane, self).__init__()
self.set_orientation(Gtk.Orientation.VERTICAL)
self.selection_color = selection_color
self.file = ''
self.basepath = ('file://' + plugin.plugin_info.get_data_dir())
self.load_templates(plugin)
if webkit_support():
self.init_gui()
self.clear()
def load_templates(self, plugin):
path = rb.find_plugin_file(plugin, 'tmpl/albumartsearch-tmpl.html')
self.template = Template(filename=path, default_filters=['decode.utf8'], module_directory='/tmp/', encoding_errors='replace')
path = rb.find_plugin_file(plugin, 'tmpl/albumartsearchempty-tmpl.html')
self.empty_template = Template(filename=path, default_filters=['decode.utf8'], module_directory='/tmp/', encoding_errors='replace')
path = rb.find_plugin_file(plugin, 'tmpl/artistartsearch-tmpl.html')
self.artist_template = Template(filename=path, default_filters=['decode.utf8'], module_directory='/tmp/', encoding_errors='replace')
self.styles = rb.find_plugin_file(plugin, 'tmpl/main.css')
def init_gui(self):
from gi.repository import WebKit
self.webview = WebKit.WebView()
settings = self.webview.get_settings()
settings.set_property('enable-default-context-menu', False)
self.webview.set_settings(settings)
scroll = Gtk.ScrolledWindow()
scroll.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scroll.add(self.webview)
self.pack_start(scroll, expand=True, fill=True, padding=0)
self.show_all()
self.webview.connect('notify::title', self.set_cover)
def do_search(self, coverobject, callback):
print('coverart-search do_search')
if (coverobject is self.current_searchobject):
return
self.current_searchobject = coverobject
self.callback = callback
if isinstance(coverobject, Album):
artist = coverobject.artist
album_name = coverobject.name
if (album_name.upper() == 'UNKNOWN'):
album_name = ''
if (artist.upper() == 'UNKNOWN'):
artist = ''
if (not ((album_name == '') and (artist == ''))):
artist = rb3compat.unicodestr(artist.replace('&', '&'), 'utf-8')
album_name = rb3compat.unicodestr(album_name.replace('&', '&'), 'utf-8')
self.render_album_art_search(artist, album_name)
else:
artist_name = coverobject.name
if (artist_name.upper() == 'UNKNOWN'):
artist_name = ''
if (not (artist_name == '')):
artist = rb3compat.unicodestr(artist_name.replace('&', '&'), 'utf-8')
self.render_artist_art_search(artist)
def render_album_art_search(self, artist, album_name):
temp_file = self.template.render(artist=artist, album=album_name, stylesheet=self.styles, selection_color=self.selection_color)
print('here')
self.webview.load_string(temp_file, 'text/html', 'utf-8', self.basepath)
def render_artist_art_search(self, artist):
temp_file = self.artist_template.render(artist=artist, stylesheet=self.styles, selection_color=self.selection_color)
print('here')
self.webview.load_string(temp_file, 'text/html', 'utf-8', self.basepath)
def clear(self):
self.current_searchobject = None
temp_file = self.empty_template.render(stylesheet=self.styles)
self.webview.load_string(temp_file, 'text/html', 'utf-8', self.basepath)
def set_cover(self, webview, arg):
title = webview.get_title()
print(title)
if title:
self.callback(self.current_searchobject, uri=title) |
('foremast.datapipeline.datapipeline.boto3.Session.client')
('foremast.datapipeline.datapipeline.get_details')
('foremast.datapipeline.datapipeline.get_properties')
def test_create_datapipeline(mock_get_properties, mock_get_details, mock_boto3):
generated = {'project': 'test'}
properties = copy.deepcopy(TEST_PROPERTIES)
mock_get_details.return_value.data = generated
mock_get_properties.return_value = properties
mock_boto3.return_value.create_pipeline.return_value = {'pipelineId': '1234'}
dp = AWSDataPipeline(app='test_app', env='test_env', region='us-east-1', prop_path='other')
dp.create_datapipeline()
assert (dp.pipeline_id == '1234') |
def test_message_reply(session):
message = {'messageMetadata': {'threadKey': {'otherUserFbId': 1234}, 'messageId': 'mid.$XYZ', 'offlineThreadingId': '', 'actorFbId': 1234, 'timestamp': , 'tags': ['source:messenger:web', 'cg-enabled', 'sent', 'inbox'], 'threadReadStateEffect': 3, 'skipBumpThread': False, 'skipSnippetUpdate': False, 'unsendType': 'can_unsend', 'folderId': {'systemFolderId': 0}}, 'body': 'xyz', 'attachments': [], 'irisSeqId': 1111111, 'messageReply': {'replyToMessageId': {'id': 'mid.$ABC'}, 'status': 0}, 'requestContext': {'apiArgs': '...'}, 'irisTags': ['DeltaNewMessage']}
reply = {'messageMetadata': {'threadKey': {'otherUserFbId': 1234}, 'messageId': 'mid.$ABC', 'offlineThreadingId': '', 'actorFbId': 4321, 'timestamp': , 'tags': ['inbox', 'sent', 'source:messenger:web']}, 'body': 'abc', 'attachments': [], 'requestContext': {'apiArgs': '...'}, 'irisTags': []}
data = {'message': message, 'repliedToMessage': reply, 'status': 0}
thread = User(session=session, id='1234')
assert (MessageReplyEvent(author=User(session=session, id='1234'), thread=thread, message=MessageData(thread=thread, id='mid.$XYZ', author='1234', created_at=datetime.datetime(2017, 7, 14, 2, 40, tzinfo=datetime.timezone.utc), text='xyz', reply_to_id='mid.$ABC'), replied_to=MessageData(thread=thread, id='mid.$ABC', author='4321', created_at=datetime.datetime(2020, 9, 13, 12, 26, 40, tzinfo=datetime.timezone.utc), text='abc')) == parse_client_delta(session, {'deltaMessageReply': data})) |
class ColoredFormatter(logging.Formatter):
YELLOW = '\x1b[33;20m'
RED = '\x1b[31;20m'
BOLD_RED = '\x1b[31;1m'
RESET = '\x1b[0m'
FORMAT = '%(asctime)s %(levelname)s %(message)s'
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
FORMATS = {logging.WARNING: ((YELLOW + FORMAT) + RESET), logging.ERROR: ((RED + FORMAT) + RESET), logging.CRITICAL: ((BOLD_RED + FORMAT) + RESET)}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno, self.FORMAT)
formatter = logging.Formatter(log_fmt, self.DATE_FORMAT)
return formatter.format(record) |
class OptionPlotoptionsColumnSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
(factory)
_all(__all__)
class Satellite(Score):
def __init__(self, ratio=0.05, name='score-sat', **kwargs):
super(Satellite, self).__init__(**kwargs)
self.name = name
self.ratio = ratio
def compute(image, **kwargs):
colid = kwargs.get('collection_id')
year = kwargs.get('year')
rate = kwargs.get('ratio', 0.05)
name = kwargs.get('name', 'sat-score')
year_str = ee.Number(year).format()
prior_list = ee.List(priority.SeasonPriority.ee_relation.get(year_str))
index = prior_list.indexOf(colid)
exists = ee.Number(index).neq((- 1))
factor = ee.Number(rate).multiply(index)
factor = ee.Number(ee.Algorithms.If(exists, factor, 1))
sat_score = ee.Number(1).subtract(factor)
score_img = ee.Image.constant(sat_score).rename(name).toFloat()
score_img = score_img.set(name.upper(), sat_score)
return score_img
def map(self, collection, **kwargs):
col = kwargs.get('col')
year = kwargs.get('year')
def wrap(img):
y = (ee.Number(year) if year else img.date().get('year'))
score = self.compute(img, collection_id=col.id, year=y, rate=self.ratio, name=self.name)
return img.addBands(score).set(self.name, score.get(self.name))
return collection.map(wrap) |
class TestCreateMessage(BaseEvenniaTest):
msgtext = '\n Qui laborum voluptas quis commodi ipsum quo temporibus eum. Facilis\n assumenda facilis architecto in corrupti. Est placeat eum amet qui beatae\n reiciendis. Accusamus vel aspernatur ab ex. Quam expedita sed expedita\n consequuntur est dolorum non exercitationem.\n '
def test_create_msg__simple(self):
msg = create.create_message(self.char1, self.msgtext, header='TestHeader')
msg.senders = 'ExternalSender'
msg.receivers = self.char2
msg.receivers = 'ExternalReceiver'
self.assertEqual(msg.message, self.msgtext)
self.assertEqual(msg.header, 'TestHeader')
self.assertEqual(msg.senders, [self.char1, 'ExternalSender'])
self.assertEqual(msg.receivers, [self.char2, 'ExternalReceiver'])
def test_create_msg__custom(self):
locks = 'foo:false();bar:true()'
tags = ['tag1', 'tag2', 'tag3']
msg = create.create_message(self.char1, self.msgtext, header='TestHeader', receivers=[self.char1, self.char2, 'ExternalReceiver'], locks=locks, tags=tags)
self.assertEqual(set(msg.receivers), set([self.char1, self.char2, 'ExternalReceiver']))
self.assertTrue(all(((lock in msg.locks.all()) for lock in locks.split(';'))))
self.assertEqual(msg.tags.all(), tags) |
()
('-k', 'key', default=None, help='Key')
('-d', 'decrypt', is_flag=True, default=False, help='Decrypt instead of encrypt')
('--ttl', 'ttl', default=0, type=click.INT, help='Time To Live for timestamp verification')
('-i', type=click.File('rb'), default='-', help='Input file (default: stdin)')
('-o', type=click.File('wb'), default='-', help='Output file (default: stdout)')
def cmd_crypto_fernet(key, decrypt, ttl, i, o):
habucfg = loadcfg()
if (not key):
if ('FERNET_KEY' in habucfg):
key = habucfg['FERNET_KEY']
else:
print(ERROR_NOKEY, file=sys.stderr)
sys.exit(1)
if (not ttl):
ttl = None
cipher = Fernet(key)
data = i.read()
if decrypt:
try:
token = cipher.decrypt(data, ttl)
except Exception as e:
print('Error decrypting', file=sys.stderr)
sys.exit(1)
else:
token = cipher.encrypt(data)
print(token.decode(), end='') |
def extractCornerofbooksandstuffCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_upstream():
def t1(a: int) -> typing.Dict[(str, str)]:
return {'a': str(a)}
def t2(a: typing.Dict[(str, str)]) -> str:
return ' '.join([v for (k, v) in a.items()])
def t3() -> str:
return 'hello'
def my_wf(a: int) -> str:
return t2(a=t1(a=a))
serialized = OrderedDict()
wf_spec = get_serializable(serialized, serialization_settings, my_wf)
(task_templates, wf_specs, lp_specs) = gather_dependent_entities(serialized)
fwf = FlyteWorkflow.promote_from_model(wf_spec.template, sub_workflows={}, node_launch_plans={}, tasks={k: FlyteTask.promote_from_model(t) for (k, t) in task_templates.items()})
assert (len(fwf.flyte_nodes[0].upstream_nodes) == 0)
assert (len(fwf.flyte_nodes[1].upstream_nodes) == 1)
def parent(a: int) -> (str, str):
first = my_wf(a=a)
second = t3()
return (first, second)
serialized = OrderedDict()
wf_spec = get_serializable(serialized, serialization_settings, parent)
(task_templates, wf_specs, lp_specs) = gather_dependent_entities(serialized)
sub_wf_dict = {s.id: s for s in wf_spec.sub_workflows}
fwf = FlyteWorkflow.promote_from_model(wf_spec.template, sub_workflows=sub_wf_dict, node_launch_plans={}, tasks={k: FlyteTask.promote_from_model(v) for (k, v) in task_templates.items()})
assert (len(fwf.flyte_nodes[0].upstream_nodes) == 0)
assert (len(fwf.flyte_nodes[1].upstream_nodes) == 0) |
class OFPFlowStatsRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags, table_id, out_port, out_group, cookie, cookie_mask, match):
super(OFPFlowStatsRequestBase, self).__init__(datapath, flags)
self.table_id = table_id
self.out_port = out_port
self.out_group = out_group
self.cookie = cookie
self.cookie_mask = cookie_mask
self.match = match
def _serialize_stats_body(self):
offset = ofproto.OFP_MULTIPART_REQUEST_SIZE
msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_0_PACK_STR, self.buf, offset, self.table_id, self.out_port, self.out_group, self.cookie, self.cookie_mask)
offset += ofproto.OFP_FLOW_STATS_REQUEST_0_SIZE
self.match.serialize(self.buf, offset) |
class DefStmt(Stmt):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.lhs = (build_var(**kwargs['lhs']) if isinstance(kwargs['lhs'], dict) else kwargs['lhs'])
self.rhs = (build_exp(**kwargs['rhs']) if isinstance(kwargs['rhs'], dict) else kwargs['rhs'])
def __repr__(self):
return '{} = {}'.format(repr(self.lhs), repr(self.rhs))
def __str__(self):
return '{} = {}'.format(str(self.lhs), str(self.rhs)) |
class MVARTest(unittest.TestCase):
def setUpClass(cls):
cls.maxDiff = None
def test_decompile_toXML(self):
mvar = newTable('MVAR')
font = TTFont()
mvar.decompile(MVAR_DATA, font)
self.assertEqual(getXML(mvar.toXML), MVAR_XML)
def test_decompile_toXML_lazy(self):
mvar = newTable('MVAR')
font = TTFont(lazy=True)
mvar.decompile(MVAR_DATA, font)
self.assertEqual(getXML(mvar.toXML), MVAR_XML)
def test_compile_fromXML(self):
mvar = newTable('MVAR')
font = TTFont()
for (name, attrs, content) in parseXML(MVAR_XML):
mvar.fromXML(name, attrs, content, font=font)
data = MVAR_DATA
self.assertEqual(hexStr(mvar.compile(font)), hexStr(data)) |
class ComplexSurfaceApodizer(OpticalElement):
def __init__(self, amplitude, surface, refractive_index):
self.amplitude = amplitude
self.surface = surface
self.refractive_index = refractive_index
def phase_for(self, wavelength):
wavenumber = ((2 * np.pi) / wavelength)
opd = ((self.refractive_index - 1) * self.surface)
return (opd * wavenumber)
def forward(self, wavefront):
opd = ((self.refractive_index(wavefront.wavelength) - 1) * self.surface)
wf = wavefront.copy()
wf.electric_field *= (self.amplitude * np.exp(((1j * opd) * wf.wavenumber)))
return wf
def backward(self, wavefront):
opd = ((self.refractive_index(wavefront.wavelength) - 1) * self.surface)
wf = wavefront.copy()
wf.electric_field *= (self.amplitude * np.exp((((- 1j) * opd) * wf.wavenumber)))
return wf |
class PerimeterPenTest(unittest.TestCase):
def test_PScontour_clockwise_line_first(self):
pen = PerimeterPen(None)
draw1_(pen)
self.assertEqual(1589, round(pen.value))
def test_PScontour_counterclockwise_line_last(self):
pen = PerimeterPen(None)
draw2_(pen)
self.assertEqual(1589, round(pen.value))
def test_PScontour_clockwise_line_last(self):
pen = PerimeterPen(None)
draw3_(pen)
self.assertEqual(1589, round(pen.value))
def test_PScontour_counterclockwise_line_first(self):
pen = PerimeterPen(None)
draw4_(pen)
self.assertEqual(1589, round(pen.value))
def test_TTcontour_clockwise_line_first(self):
pen = PerimeterPen(None)
draw5_(pen)
self.assertEqual(1589, round(pen.value))
def test_TTcontour_counterclockwise_line_last(self):
pen = PerimeterPen(None)
draw6_(pen)
self.assertEqual(1589, round(pen.value))
def test_TTcontour_clockwise_line_last(self):
pen = PerimeterPen(None)
draw7_(pen)
self.assertEqual(1589, round(pen.value))
def test_TTcontour_counterclockwise_line_first(self):
pen = PerimeterPen(None)
draw8_(pen)
self.assertEqual(1589, round(pen.value)) |
_bp.route((app.config['FLICKET'] + 'delete_ticket/<ticket_id>/'), methods=['GET', 'POST'])
_required
def delete_ticket(ticket_id):
if (not g.user.is_admin):
flash(gettext('You are not authorised to delete tickets.'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
form = ConfirmPassword()
ticket = FlicketTicket.query.filter_by(id=ticket_id).first()
if form.validate_on_submit():
images = FlicketUploads.query.filter_by(topic_id=ticket_id)
for i in images:
os.remove(os.path.join(os.getcwd(), ((app.config['ticket_upload_folder'] + '/') + i.file_name)))
db.session.delete(i)
for post in ticket.posts:
history = FlicketHistory.query.filter_by(post=post).all()
for h in history:
db.session.delete(h)
post.user.total_posts -= 1
db.session.delete(post)
user = ticket.user
user.total_posts -= 1
db.session.delete(ticket)
db.session.commit()
flash(gettext('Ticket deleted.'), category='success')
return redirect(url_for('flicket_bp.tickets'))
return render_template('flicket_deletetopic.html', form=form, ticket=ticket, title='Delete Ticket') |
class ClassTimePicker(GrpCls.ClassHtml):
def __init__(self, component: primitives.HtmlModel=None, page: primitives.PageModel=None):
super(ClassTimePicker, self).__init__(component=component, page=page)
self.classList['main'].add(CssToastTimePicker(self.page))
self.classList['main'].add(CssToastTimePickerSelect(self.page)) |
def get_output_options(view: sublime.View, inline=False):
opt = {'output.field': field, 'output.format': (not inline)}
if (syntax.doc_syntax(view) == 'html'):
opt['output.attributeQuotes'] = get_settings('attribute_quotes')
opt['output.selfClosingStyle'] = get_settings('markup_style')
opt['output.compactBoolean'] = (get_settings('markup_style') == 'html')
if get_settings('comment'):
opt['comment.enabled'] = True
template = get_settings('comment_template')
if template:
opt['comment.after'] = template
opt['bem.enabled'] = get_settings('bem')
opt['stylesheet.shortHex'] = get_settings('short_hex')
return opt |
class ContractEvent(BaseContractEvent):
w3: 'Web3'
def get_logs(self, argument_filters: Optional[Dict[(str, Any)]]=None, fromBlock: Optional[BlockIdentifier]=None, toBlock: Optional[BlockIdentifier]=None, block_hash: Optional[HexBytes]=None) -> Iterable[EventData]:
event_abi = self._get_event_abi()
if (argument_filters is not None):
event_arg_names = get_abi_input_names(event_abi)
if (not all(((arg in event_arg_names) for arg in argument_filters.keys()))):
raise Web3ValidationError("When filtering by argument names, all argument names must be present in the contract's event ABI.")
_filter_params = self._get_event_filter_params(event_abi, argument_filters, fromBlock, toBlock, block_hash)
logs = self.w3.eth.get_logs(_filter_params)
all_event_logs = tuple((get_event_data(self.w3.codec, event_abi, entry) for entry in logs))
filtered_logs = self._process_get_logs_argument_filters(event_abi, all_event_logs, argument_filters)
return filtered_logs
def create_filter(self, *, argument_filters: Optional[Dict[(str, Any)]]=None, fromBlock: Optional[BlockIdentifier]=None, toBlock: BlockIdentifier='latest', address: Optional[ChecksumAddress]=None, topics: Optional[Sequence[Any]]=None) -> LogFilter:
filter_builder = EventFilterBuilder(self._get_event_abi(), self.w3.codec)
self._set_up_filter_builder(argument_filters, fromBlock, toBlock, address, topics, filter_builder)
log_filter = filter_builder.deploy(self.w3)
log_filter.log_entry_formatter = get_event_data(self.w3.codec, self._get_event_abi())
log_filter.builder = filter_builder
return log_filter
def build_filter(self) -> EventFilterBuilder:
builder = EventFilterBuilder(self._get_event_abi(), self.w3.codec, formatter=get_event_data(self.w3.codec, self._get_event_abi()))
builder.address = self.address
return builder |
class AuthTestCase(TestCase):
def setUp(self):
self.username = 'user'
self.password = 'password'
User.objects.create_user(self.username, password=self.password)
self.url_testing = reverse('django_mqtt:mqtt_auth')
self.client = Client()
_settings(MQTT_ACL_ALLOW=True)
def test_login_acl_allow_true(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': self.password})
self.assertEqual(response.status_code, 200)
_settings(MQTT_ACL_ALLOW=False)
def test_login_acl_allow_false(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': self.password})
self.assertEqual(response.status_code, 403)
def test_wrong_login(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': 'wrong'})
self.assertEqual(response.status_code, 403)
def test_wrong_user(self):
response = self.client.post(self.url_testing, {'username': 'wrong', 'password': 'wrong'})
self.assertEqual(response.status_code, 403)
def test_wrong_no_password(self):
response = self.client.post(self.url_testing, {'username': self.username})
self.assertEqual(response.status_code, 403)
def test_wrong_no_username(self):
response = self.client.post(self.url_testing, {'password': self.password})
self.assertEqual(response.status_code, 403)
def test_wrong_no_data(self):
response = self.client.post(self.url_testing, {})
self.assertEqual(response.status_code, 403) |
def extractHanuchuWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestOrganizationFieldsCreateUpdateDelete(SingleCreateApiTestCase, SingleDeleteApiTestCase, SingleUpdateApiTestCase):
__test__ = True
ZenpyType = OrganizationField
object_kwargs = dict(description='test', title='i am test', key='somethingsomethingsomething')
ignore_update_kwargs = ['key']
api_name = 'organization_fields' |
def _assemble_broker_submission_records() -> list:
base_record = {'created_at': None, 'updated_at': None, 'submission_id': None, 'user_id': None, 'cgac_code': None, 'reporting_start_date': None, 'reporting_end_date': None, 'is_quarter_format': False, 'number_of_errors': 0, 'number_of_warnings': 0, 'publish_status_id': 2, 'publishable': False, 'reporting_fiscal_period': 0, 'reporting_fiscal_year': 0, 'is_fabs': False, 'publishing_user_id': None, 'frec_code': None}
default_submission_record = copy.copy(base_record)
default_submission_record['submission_id'] = (- 9999)
return [default_submission_record] |
def has_traitsui():
try:
import traitsui
except ImportError:
return False
from pyface.toolkit import toolkit
if toolkit.toolkit.startswith('qt'):
from pyface.qt import is_qt6
if is_qt6:
return (Version(traitsui.__version__) >= Version('7.4'))
return True |
def test_request_generator_content_picklable():
def content() -> typing.Iterator[bytes]:
(yield b'test 123')
request = ' content=content())
pickle_request = pickle.loads(pickle.dumps(request))
with pytest.raises(
pickle_request.content
with pytest.raises(
pickle_request.read()
request = ' content=content())
request.read()
pickle_request = pickle.loads(pickle.dumps(request))
assert (pickle_request.content == b'test 123') |
class ForwardingRuleScannerTest(ForsetiTestCase):
def test_forwarding_rules_scanner_all_match(self):
rules_local_path = get_datafile_path(__file__, 'forward_rule_test_1.yaml')
scanner = forwarding_rule_scanner.ForwardingRuleScanner({}, {}, mock.MagicMock(), '', '', rules_local_path)
project_id = 'abc-123'
gcp_forwarding_rules_resource_data = [{'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.99', 'IPProtocol': 'UDP', 'portRange': '4500-4500', 'ports': [], 'target': ' 'loadBalancingScheme': 'EXTERNAL'}, {'id': '23', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.23', 'IPProtocol': 'TCP', 'ports': [8080], 'target': ' 'loadBalancingScheme': 'INTERNAL'}, {'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.46', 'IPProtocol': 'ESP', 'ports': [], 'target': ' 'loadBalancingScheme': 'EXTERNAL'}, {'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.35', 'IPProtocol': 'TCP', 'portRange': '4500-4500', 'target': ' 'loadBalancingScheme': 'EXTERNAL'}]
gcp_forwarding_rules_resource_objs = []
for gcp_forwarding_rule_resource_data in gcp_forwarding_rules_resource_data:
gcp_forwarding_rules_resource_objs.append(fr.ForwardingRule.from_dict(project_id, '', gcp_forwarding_rule_resource_data))
violations = scanner._find_violations(gcp_forwarding_rules_resource_objs)
self.assertEqual(0, len(violations))
def test_forwarding_rules_scanner_no_match(self):
rules_local_path = get_datafile_path(__file__, 'forward_rule_test_1.yaml')
scanner = forwarding_rule_scanner.ForwardingRuleScanner({}, {}, mock.MagicMock(), '', '', rules_local_path)
project_id = 'abc-123'
gcp_forwarding_rules_resource_data = [{'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.99', 'IPProtocol': 'TCP', 'portRange': '4500-4500', 'ports': [], 'target': ' 'loadBalancingScheme': 'EXTERNAL'}, {'id': '23', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.23', 'IPProtocol': 'TCP', 'ports': [8081], 'target': ' 'loadBalancingScheme': 'INTERNAL'}, {'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.101.46', 'IPProtocol': 'ESP', 'ports': [], 'target': ' 'loadBalancingScheme': 'EXTERNAL'}, {'id': '46', 'creationTimestamp': '2017-06-01 04:19:37', 'name': 'abc-123', 'description': '', 'region': ' 'IPAddress': '198.51.100.35', 'IPProtocol': 'TCP', 'portRange': '4400-4500', 'target': ' 'loadBalancingScheme': 'EXTERNAL'}]
gcp_forwarding_rules_resource_objs = []
for gcp_forwarding_rule_resource_data in gcp_forwarding_rules_resource_data:
gcp_forwarding_rules_resource_objs.append(fr.ForwardingRule.from_dict(project_id, '', gcp_forwarding_rule_resource_data))
violations = scanner._find_violations(gcp_forwarding_rules_resource_objs)
self.assertEqual(4, len(violations)) |
def api_login_required(f):
(f)
def decorated_function(*args, **kwargs):
if (flask.g.user is not None):
return f(*args, **kwargs)
retval = _shared_api_login_required_wrapper()
if (retval is not None):
return retval
return f(*args, **kwargs)
return decorated_function |
def test_get_relevant_subnets():
input_path = sb.complete_data_path(0)
def subnets_stay_equal(sb_code_info, hv_subnet, lv_subnets):
(new_hv_subnet, new_lv_subnets) = sb.get_relevant_subnets(sb_code_info, input_path=input_path)
assert (hv_subnet == new_hv_subnet)
assert (lv_subnets == new_lv_subnets)
hv_subnet_list = ['HV1', 'HV2']
mv_subnet_list = [('MV%i.%i' % (i, j)) for j in [101, 201] for i in range(1, 5)]
lv_subnet_list = [('LV%i.%i' % (i, j)) for i in range(1, 7) for j in [101, 201, 301, 401]]
unexpected_lv_subnet_list = ['LV5.101', 'LV6.101', 'LV1.301', 'LV2.301', 'LV1.401']
for u in unexpected_lv_subnet_list:
lv_subnet_list.remove(u)
sb_code_params = [1, 'EHV', 'HVMVLV', 'mixed', 'all', '1', True]
(hv_subnets, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (sorted(hv_subnets) == sorted(((['EHV1'] + hv_subnet_list) + mv_subnet_list[:4])))
assert pd.Series(((hv_subnet_list + mv_subnet_list) + lv_subnet_list)).isin(lv_subnets).all()
subnets_stay_equal(sb.complete_grid_sb_code(1), hv_subnets, lv_subnets)
(hv_subnet, lv_subnets) = sb.get_relevant_subnets('1-complete_data-mixed-all-1-sw', input_path=input_path)
assert (hv_subnet == 'complete_data')
assert (lv_subnets == '')
sb_code_params = [1, 'EHV', 'HV', 'mixed', 'all', '0', False]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'EHV1')
assert (lv_subnets == ['HV1', 'HV2'])
sb_code_params = [1, 'EHV', 'HV', 'mixed', 2, '0', False]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'EHV1')
assert (lv_subnets == ['HV2'])
sb_code_params = [1, 'EHV', '', 'mixed', '', '0', True]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'EHV1')
assert (lv_subnets == [])
sb_code_params = [1, 'HV', 'MV', 'urban', 'all', '0', False]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'HV2')
assert (30 > len(lv_subnets) > 8)
assert pd.Series(['MV1.201', 'MV1.205', 'MV2.203', 'MV3.202', 'MV4.203']).isin(lv_subnets).all()
sb_code_params = [1, 'HV', '', 'mixed', '', '1', True]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'HV1')
assert (lv_subnets == [])
sb_code_params = [1, 'MV', 'LV', 'semiurb', '3.209', '0', False]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'MV2.101')
assert (lv_subnets == ['LV3.209'])
sb_code_params = [1, 'MV', 'LV', 'semiurb', '2.211', '1', True]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'MV2.101')
assert (lv_subnets == ['LV2.211'])
(hv_subnet, lv_subnets) = sb.get_relevant_subnets('1-MV-rural--0-no_sw', input_path=input_path)
assert (hv_subnet == 'MV1.101')
assert (lv_subnets == [])
sb_code_params = [1, 'LV', '', 'urban6', '', '0', False]
(hv_subnet, lv_subnets) = sb.get_relevant_subnets(sb_code_params, input_path=input_path)
assert (hv_subnet == 'LV6.201')
assert (lv_subnets == [])
(hv_subnet, lv_subnets) = sb.get_relevant_subnets('1-LV-semiurb5--0-sw', input_path=input_path)
assert (hv_subnet == 'LV5.201')
assert (lv_subnets == [])
(hv_subnet, lv_subnets) = sb.get_relevant_subnets('1-LV-semiurb4--0-no_sw', input_path=input_path)
assert (hv_subnet == 'LV4.101')
assert (lv_subnets == []) |
class OptionSeriesOrganizationMarkerStatesHover(Options):
def animation(self) -> 'OptionSeriesOrganizationMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesOrganizationMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
def _protobuf_python_implementation() -> Generator:
PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION = 'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'
saved_mods = {}
for (mod_name, mod) in list(sys.modules.items()):
if mod_name.startswith('google.protobuf'):
saved_mods[mod_name] = mod
del sys.modules[mod_name]
prev_os_env = os.environ.get(PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION)
os.environ[PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION] = 'python'
(yield)
if (prev_os_env is None):
del os.environ[PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION]
else:
os.environ[PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION] = prev_os_env
for mod_name in list(sys.modules.keys()):
if mod_name.startswith('google.protobuf'):
del sys.modules[mod_name]
for (mod_name, mod) in saved_mods.items():
sys.modules[mod_name] = mod |
def test_call_with_init_keyword_args(singleton_cls):
provider = singleton_cls(Example, init_arg1='i1', init_arg2='i2')
instance1 = provider()
instance2 = provider()
assert (instance1.init_arg1 == 'i1')
assert (instance1.init_arg2 == 'i2')
assert (instance2.init_arg1 == 'i1')
assert (instance2.init_arg2 == 'i2')
assert (instance1 is instance2)
assert isinstance(instance1, Example)
assert isinstance(instance2, Example) |
class Test_gre(unittest.TestCase):
version = 0
gre_proto = ETH_TYPE_IP
nvgre_proto = ETH_TYPE_TEB
checksum = 17421
seq_number = 10
key = 256100
vsid = 1000
flow_id = 100
gre = gre.gre(version=version, protocol=gre_proto, checksum=checksum, key=key, seq_number=seq_number)
def test_key_setter(self):
self.gre.key = self.key
eq_(self.gre._key, self.key)
eq_(self.gre._vsid, self.vsid)
eq_(self.gre._flow_id, self.flow_id)
def test_key_setter_none(self):
self.gre.key = None
eq_(self.gre._key, None)
eq_(self.gre._vsid, None)
eq_(self.gre._flow_id, None)
self.gre.key = self.key
def test_vsid_setter(self):
self.gre.vsid = self.vsid
eq_(self.gre._key, self.key)
eq_(self.gre._vsid, self.vsid)
eq_(self.gre._flow_id, self.flow_id)
def test_flowid_setter(self):
self.gre.flow_id = self.flow_id
eq_(self.gre._key, self.key)
eq_(self.gre._vsid, self.vsid)
eq_(self.gre._flow_id, self.flow_id)
def test_nvgre_init(self):
nvgre = gre.nvgre(version=self.version, vsid=self.vsid, flow_id=self.flow_id)
eq_(nvgre.version, self.version)
eq_(nvgre.protocol, self.nvgre_proto)
eq_(nvgre.checksum, None)
eq_(nvgre.seq_number, None)
eq_(nvgre._key, self.key)
eq_(nvgre._vsid, self.vsid)
eq_(nvgre._flow_id, self.flow_id)
def test_parser(self):
files = ['gre_full_options', 'gre_no_option', 'gre_nvgre_option']
for f in files:
for (_, buf) in pcaplib.Reader(open(((GENEVE_DATA_DIR + f) + '.pcap'), 'rb')):
pkt = packet.Packet(buf)
gre_pkt = pkt.get_protocol(gre.gre)
ok_(isinstance(gre_pkt, gre.gre), ('Failed to parse Gre message: %s' % pkt))
pkt.serialize()
eq_(buf, pkt.data, ("b'%s' != b'%s'" % (binary_str(buf), binary_str(pkt.data)))) |
def test_transaction_name_is_route(app, elasticapm_client):
client = TestClient(app)
response = client.get('/hi/shay')
assert (response.status_code == 200)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
assert (transaction['name'] == 'GET /hi/{name}')
assert (transaction['context']['request']['url']['pathname'] == '/hi/shay') |
def _replace_submodules(gm: GraphModule, replacement: torch.nn.Module) -> None:
gm.delete_all_unused_submodules()
if isinstance(replacement, GraphModule):
replacement.graph.lint()
def try_get_submodule(mod: torch.nn.Module, target: str) -> Optional[torch.nn.Module]:
try:
mod_match = mod.get_submodule(target)
return mod_match
except AttributeError:
return None
for node in gm.graph.nodes:
if ((node.op == 'call_module') or (node.op == 'get_attr')):
gm_submod = try_get_submodule(gm, node.target)
replacement_submod = try_get_submodule(replacement, node.target)
if (gm_submod is not None):
continue
elif (replacement_submod is not None):
new_submod = copy.deepcopy(getattr(replacement, node.target))
gm.add_submodule(node.target, new_submod)
else:
raise RuntimeError('Attempted to create a "', node.op, f'" node during subgraph rewriting with target {node.target}, but the referenced submodule does not exist in either the original GraphModule `gm` or the replacement GraphModule `replacement`')
gm.graph.lint() |
def str2int(number):
number = str(number)
original_input = number
try:
return int(number)
except:
try:
return int(number[:(- 2)])
except:
pass
if (number[(- 2):] == 'th'):
number = number[:(- 2)]
if (number[(- 1)] == 'f'):
number = (number[:(- 1)] + 've')
elif (number[(- 2):] == 'ie'):
number = (number[:(- 2)] + 'y')
elif (number[(- 3):] == 'nin'):
number += 'e'
if (i := _STR2INT_MAP.get(number)):
return i
number = number.replace(' and ', ' ')
numbers = [word.lower() for word in re.split('[-\\s\\,]', number) if word]
sums = []
for word in numbers:
if (i := _STR2INT_MAP.get(word)):
if (not len(sums)):
sums = [i]
elif (sums[(- 1)] < i):
sums[(- 1)] = (sums[(- 1)] * i)
else:
sums.append(i)
elif (i := _STR2INT_ADJS.get(word)):
sums.append(i)
else:
raise ValueError(f'String {original_input} cannot be converted to int.')
return sum(sums) |
class OptionPlotoptionsNetworkgraphStates(Options):
def hover(self) -> 'OptionPlotoptionsNetworkgraphStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsNetworkgraphStatesHover)
def inactive(self) -> 'OptionPlotoptionsNetworkgraphStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsNetworkgraphStatesInactive)
def normal(self) -> 'OptionPlotoptionsNetworkgraphStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsNetworkgraphStatesNormal)
def select(self) -> 'OptionPlotoptionsNetworkgraphStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsNetworkgraphStatesSelect) |
class OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class Pianobar(IntervalModule):
settings = ('format', ('songfile', 'File generated by pianobar eventcmd'), ('ctlfile', 'Pianobar fifo file'), ('color', 'The color of the text'))
format = '{songtitle} -- {songartist}'
required = ('format', 'songfile', 'ctlfile')
color = '#FFFFFF'
on_leftclick = 'playpause'
on_rightclick = 'next_song'
on_upscroll = 'increase_volume'
on_downscroll = 'decrease_volume'
def run(self):
with open(self.songfile, 'r') as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {'full_text': self.format.format(songtitle=sn, songartist=sa), 'color': self.color}
def playpause(self):
open(self.ctlfile, 'w').write('p')
def next_song(self):
open(self.ctlfile, 'w').write('n')
def increase_volume(self):
open(self.ctlfile, 'w').write(')')
def decrease_volume(self):
open(self.ctlfile, 'w').write('(') |
.django_db
def test_fabs_quarterly(client, monkeypatch, fabs_award_with_quarterly_submission, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_count_endpoint(client, url, ['M'], ['07'])
assert (resp.data['count'] == 1) |
class List(Value):
def __init__(self, elements):
super().__init__()
self.elements = elements
def added_to(self, other):
new_list = self.copy()
if isinstance(other, List):
new_list.elements.extend(other.elements)
else:
new_list.elements.append(other)
return (new_list, None)
def subbed_by(self, other):
if isinstance(other, Number):
new_list = self.copy()
try:
new_list.elements.pop(other.value)
return (new_list, None)
except:
return (None, RTError(other.pos_start, other.pos_end, detailsMessages['outOfIndex'], self.context))
else:
return (None, Value.illegal_operation(self, other))
def multed_by(self, other):
new_list = self.copy()
new_list.elements.extend((new_list.elements * other.value))
return (new_list, None)
def take_item(self, other):
res = RTResult()
if isinstance(other, Number):
try:
return res.success(self.elements[other.value])
except:
return res.failure(RTError(other.pos_start, other.pos_end, detailsMessages['outOfIndex'], self.context))
else:
return res.failure(Value.illegal_operation(self, other))
def copy(self):
copy = List(self.elements)
copy.set_pos(self.pos_start, self.pos_end)
copy.set_context(self.context)
return copy
def __str__(self):
return f"[{', '.join([str(x) for x in self.elements])}]"
def __repr__(self):
return f"[{', '.join([str(x) for x in self.elements])}]" |
class EventListener():
def __init__(self, event_queue: asyncio.Queue, protect: ProtectApiClient, detection_types: List[str], ignore_cameras: List[str]):
self._event_queue: asyncio.Queue = event_queue
self._protect: ProtectApiClient = protect
self._unsub = None
self.detection_types: List[str] = detection_types
self.ignore_cameras: List[str] = ignore_cameras
async def start(self):
logger.debug('Subscribed to websocket')
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
while True:
(await asyncio.sleep(60))
(await self._check_websocket_and_reconnect())
def _websocket_callback(self, msg: WSSubscriptionMessage) -> None:
logger.websocket_data(msg)
assert isinstance(msg.new_obj, Event)
if (msg.action != WSAction.UPDATE):
return
if (msg.new_obj.camera_id in self.ignore_cameras):
return
if ('end' not in msg.changed_data):
return
if (msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]):
return
if ((msg.new_obj.type is EventType.MOTION) and ('motion' not in self.detection_types)):
logger.extra_debug(f'Skipping unwanted motion detection event: {msg.new_obj.id}')
return
if ((msg.new_obj.type is EventType.RING) and ('ring' not in self.detection_types)):
logger.extra_debug(f'Skipping unwanted ring event: {msg.new_obj.id}')
return
elif (msg.new_obj.type is EventType.SMART_DETECT):
for event_smart_detection_type in msg.new_obj.smart_detect_types:
if (event_smart_detection_type not in self.detection_types):
logger.extra_debug(f'Skipping unwanted {event_smart_detection_type} detection event: {msg.new_obj.id}')
return
while self._event_queue.full():
logger.extra_debug('Event queue full, waiting 1s...')
sleep(1)
self._event_queue.put_nowait(msg.new_obj)
if ('-' in msg.new_obj.id):
msg.new_obj.id = msg.new_obj.id.split('-')[0]
logger.debug(f'Adding event {msg.new_obj.id} to queue (Current download queue={self._event_queue.qsize()})')
async def _check_websocket_and_reconnect(self):
logger.extra_debug('Checking the status of the websocket...')
if self._protect.check_ws():
logger.extra_debug('Websocket is connected.')
else:
self._protect.connect_event.clear()
logger.warning('Lost connection to Unifi Protect.')
self._unsub()
(await self._protect.close_session())
while True:
logger.warning('Attempting reconnect...')
try:
(await self._protect.close_session())
self._protect._bootstrap = None
(await self._protect.update(force=True))
if self._protect.check_ws():
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
break
else:
logger.error('Unable to establish connection to Unifi Protect')
except Exception as e:
logger.error('Unexpected exception occurred while trying to reconnect:', exc_info=e)
(await asyncio.sleep(10))
self._protect.connect_event.set()
logger.info('Re-established connection to Unifi Protect and to the websocket.') |
def _scan_flow_scalar(stream: StreamBuffer, style: Literal[("'", '"')], is_key: bool=False) -> (KeyToken | ValueToken):
double = (style == '"')
chunks = []
start_mark = stream.get_position()
quote = stream.peek()
stream.forward()
chunks.extend(_scan_flow_scalar_non_spaces(stream, double, start_mark))
while (stream.peek() != quote):
chunks.extend(_scan_flow_scalar_spaces(stream, start_mark))
chunks.extend(_scan_flow_scalar_non_spaces(stream, double, start_mark))
stream.forward()
end_mark = stream.get_position()
return (KeyToken(start_mark, end_mark, ''.join(chunks), style) if is_key else ValueToken(start_mark, end_mark, ''.join(chunks), style)) |
class TestEvAdventureCombatBaseHandler(_CombatTestBase):
def setUp(self):
super().setUp()
self.combathandler = combat_base.EvAdventureCombatBaseHandler.get_or_create_combathandler(self.location, key='combathandler')
def test_combathandler_msg(self):
self.location.msg_contents = Mock()
self.combathandler.msg('test_message')
self.location.msg_contents.assert_called_with('test_message', exclude=[], from_obj=None, mapping={'testchar': self.combatant, 'testmonster': self.target})
def test_get_combat_summary(self):
self.combathandler.get_sides = Mock(return_value=([self.combatant], [self.target]))
result = str(self.combathandler.get_combat_summary(self.combatant))
self.assertEqual(strip_ansi(result), ' testchar (Perfect) vs testmonster (Perfect) ')
self.combathandler.get_sides = Mock(return_value=([self.target], [self.combatant]))
result = str(self.combathandler.get_combat_summary(self.target))
self.assertEqual(strip_ansi(result), ' testmonster (Perfect) vs testchar (Perfect) ') |
_action_type(ofproto.OFPAT_SET_FIELD, ofproto.OFP_ACTION_SET_FIELD_SIZE)
class OFPActionSetField(OFPAction):
def __init__(self, field=None, **kwargs):
super(OFPActionSetField, self).__init__()
assert (len(kwargs) == 1)
key = list(kwargs.keys())[0]
value = kwargs[key]
assert isinstance(key, (str, six.text_type))
assert (not isinstance(value, tuple))
self.key = key
self.value = value
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(ofproto.OFP_ACTION_SET_FIELD_PACK_STR, buf, offset)
(n, value, mask, _len) = ofproto.oxm_parse(buf, (offset + 4))
(k, uv) = ofproto.oxm_to_user(n, value, mask)
action = cls(**{k: uv})
action.len = len_
return action
def serialize(self, buf, offset):
(n, value, mask) = ofproto.oxm_from_user(self.key, self.value)
len_ = ofproto.oxm_serialize(n, value, mask, buf, (offset + 4))
self.len = utils.round_up((4 + len_), 8)
msg_pack_into('!HH', buf, offset, self.type, self.len)
pad_len = (self.len - (4 + len_))
msg_pack_into(('%dx' % pad_len), buf, ((offset + 4) + len_))
def to_jsondict(self):
return {self.__class__.__name__: {'field': ofproto.oxm_to_jsondict(self.key, self.value), 'len': self.len, 'type': self.type}}
def from_jsondict(cls, dict_):
(k, v) = ofproto.oxm_from_jsondict(dict_['field'])
return OFPActionSetField(**{k: v})
def stringify_attrs(self):
(yield (self.key, self.value)) |
_blueprint.route('/project/<project_id>/delete/versions', methods=['GET', 'POST'])
_required
def delete_project_versions(project_id):
project = models.Project.get(Session, project_id)
if (not project):
flask.abort(404)
if (not is_admin()):
flask.abort(401)
form = anitya.forms.ConfirmationForm()
confirm = flask.request.form.get('confirm', False)
if form.validate_on_submit():
if confirm:
versions = []
for version in project.versions_obj:
Session.delete(version)
versions.append(str(version))
project.latest_version = None
utilities.publish_message(project=project.__json__(), topic='project.version.remove.v2', message=dict(agent=flask.g.user.username, project=project.name, versions=versions))
Session.add(project)
Session.commit()
flask.flash('All versions were removed')
return flask.redirect(flask.url_for('anitya_ui.project', project_id=project.id))
return flask.render_template('project_versions_delete.html', current='projects', project=project, form=form) |
class TiltElement(SurfaceApodizer):
def __init__(self, angle, orientation=0, refractive_index=2.0):
self._angle = angle
self._orientation = orientation
super().__init__(self.tilt_sag, refractive_index)
def angle(self):
return self._angle
def angle(self, new_angle):
self._angle = new_angle
self.surface_sag = self.tilt_sag
def orientation(self):
return self._orientation
def orientation(self, new_orientation):
self._orientation = new_orientation
self.surface_sag = self.tilt_sag
def tilt_sag(self, grid):
return Field((grid.rotated(self._orientation).y * np.tan(self._angle)), grid) |
def documentify(symbol, short_description=None, long_description=None, text=None, examples=[], evaluation_examples=[], numerical_examples=[]):
documentified.add(symbol)
title = str(symbol)
if (title == 'True_'):
title = 'True'
if (title == 'False_'):
title = 'False'
if (title == 'Home'):
path = 'index'
else:
path = escape_title(title)
print('DOCUMENTIFY', path, title)
indexed.write((title + '\n'))
outtext = html_start
outtext += html_sidebar(title)
if (title == 'Home'):
outtext += '<h1>Welcome to the Grim documentation</h1>'
else:
outtext += ('<h1>%s</h1>' % title)
if text:
outtext += markup_text(text)
for example in examples:
(code, explanation) = example
outtext += code_example_html2(code, explanation)
if evaluation_examples:
outtext += '<h2>Symbolic evaluation examples</h2>'
for example in evaluation_examples:
if (len(example) == 1):
(code,) = example
explanation = ''
else:
(code, explanation) = example
outtext += code_example_html2(code, explanation, evaluate=True)
if numerical_examples:
outtext += '<h2>Numerical evaluation examples</h2>'
for example in numerical_examples:
if (len(example) == 1):
(code,) = example
explanation = ''
else:
(code, explanation) = example
outtext += code_example_html2(code, explanation, evaluate=True, enclosure=True)
outtext += ('<div style="margin-top: 2em; font-style: italic">Last updated: %s</div>' % timestamp)
outtext += html_end
open(('build/grimdoc/%s.html' % path), 'w').write(outtext) |
_type(ofproto.OFPTMPT_EVICTION)
class OFPTableModPropEviction(OFPTableModProp):
def __init__(self, type_=None, length=None, flags=None):
self.type = type_
self.length = length
self.flags = flags
def parser(cls, buf):
eviction = cls()
(eviction.type, eviction.length, eviction.flags) = struct.unpack_from(ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0)
return eviction
def serialize(self):
self.length = ofproto.OFP_TABLE_MOD_PROP_EVICTION_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0, self.type, self.length, self.flags)
return buf |
('bodhi.server.models.work_on_bugs_task', mock.Mock())
('bodhi.server.models.fetch_test_cases_task', mock.Mock())
class TestUpdateEdit(BasePyTestCase):
def test_add_build_to_locked_update(self):
data = {'edited': model.Update.query.first().alias, 'builds': ["can't", 'do', 'this']}
request = mock.MagicMock()
request.db = self.db
update = model.Update.query.first()
update.locked = True
self.db.flush()
with pytest.raises(model.LockedUpdateException):
model.Update.edit(request, data)
def test_remove_builds_from_locked_update(self):
data = {'edited': model.Update.query.first().alias, 'builds': []}
request = mock.MagicMock()
request.db = self.db
update = model.Update.query.first()
update.locked = True
self.db.flush()
with pytest.raises(model.LockedUpdateException):
model.Update.edit(request, data)
('bodhi.server.models.log.warning')
def test_add_bugs_bodhi_not_configured(self, warning):
config['bodhi_email'] = None
update = model.Update.query.first()
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [12345]}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(Message):
model.Update.edit(request, data)
warning.assert_called_with('Not configured to handle bugs')
def test_empty_display_name(self):
update = model.Update.query.first()
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(Message):
model.Update.edit(request, data)
update = model.Update.query.first()
assert (update.display_name == '')
def test_gating_required_false(self):
config['test_gating.required'] = False
update = model.Update.query.first()
update.test_gating_status = None
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(update_schemas.UpdateEditV2):
with mock.patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave:
greenwave_response = {'policies_satisfied': False, 'summary': 'what have you done', 'applicable_policies': ['bodhiupdate_bodhipush_openqa_workstation'], 'unsatisfied_requirements': [{'testcase': 'dist.rpmdeplint', 'item': {'item': 'bodhi-2.0-1.fc17', 'type': 'koji_build'}, 'type': 'test-result-failed', 'scenario': None}, {'testcase': 'dist.rpmdeplint', 'item': {'item': update.alias, 'type': 'bodhi_update'}, 'type': 'test-result-failed', 'scenario': None}]}
mock_greenwave.return_value = greenwave_response
model.Update.edit(request, data)
update = model.Update.query.first()
assert (update.test_gating_status is None)
def test_gating_required_true(self):
config['test_gating.required'] = True
update = model.Update.query.first()
update.test_gating_status = None
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(update_schemas.UpdateEditV2):
with mock.patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave:
greenwave_response = {'policies_satisfied': False, 'summary': 'what have you done', 'applicable_policies': ['bodhiupdate_bodhipush_openqa_workstation'], 'unsatisfied_requirements': [{'testcase': 'dist.rpmdeplint', 'item': {'item': 'bodhi-2.0-1.fc17', 'type': 'koji_build'}, 'type': 'test-result-failed', 'scenario': None}, {'testcase': 'dist.rpmdeplint', 'item': {'item': update.alias, 'type': 'bodhi_update'}, 'type': 'test-result-failed', 'scenario': None}]}
mock_greenwave.return_value = greenwave_response
model.Update.edit(request, data)
update = model.Update.query.first()
assert (update.test_gating_status == model.TestGatingStatus.failed)
def test_rawhide_update_edit_move_to_testing(self):
config['test_gating.required'] = True
update = model.Build.query.filter_by(nvr='bodhi-2.0-1.fc17').one().update
update.status = model.UpdateStatus.pending
update.release.composed_by_bodhi = False
update.builds[0].signed = True
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(update_schemas.UpdateEditV2):
with mock.patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave:
greenwave_response = {'policies_satisfied': False, 'summary': 'what have you done', 'applicable_policies': ['bodhiupdate_bodhipush_openqa_workstation'], 'unsatisfied_requirements': [{'testcase': 'dist.rpmdeplint', 'item': {'item': 'bodhi-2.0-1.fc17', 'type': 'koji_build'}, 'type': 'test-result-failed', 'scenario': None}, {'testcase': 'dist.rpmdeplint', 'item': {'item': update.alias, 'type': 'bodhi_update'}, 'type': 'test-result-failed', 'scenario': None}]}
mock_greenwave.return_value = greenwave_response
model.Update.edit(request, data)
assert (update.status == model.UpdateStatus.testing)
assert (update.test_gating_status == model.TestGatingStatus.failed)
def test_rawhide_update_edit_stays_pending(self):
config['test_gating.required'] = True
update = model.Build.query.filter_by(nvr='bodhi-2.0-1.fc17').one().update
update.status = model.UpdateStatus.pending
update.release.composed_by_bodhi = False
update.builds[0].signed = False
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(update_schemas.UpdateEditV2):
with mock.patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave:
greenwave_response = {'policies_satisfied': False, 'summary': 'what have you done', 'applicable_policies': ['bodhiupdate_bodhipush_openqa_workstation'], 'unsatisfied_requirements': [{'testcase': 'dist.rpmdeplint', 'item': {'item': 'bodhi-2.0-1.fc17', 'type': 'koji_build'}, 'type': 'test-result-failed', 'scenario': None}, {'testcase': 'dist.rpmdeplint', 'item': {'item': update.alias, 'type': 'bodhi_update'}, 'type': 'test-result-failed', 'scenario': None}]}
mock_greenwave.return_value = greenwave_response
model.Update.edit(request, data)
assert (update.status == model.UpdateStatus.pending)
assert (update.test_gating_status == model.TestGatingStatus.failed)
def test_not_rawhide_update_signed_stays_pending(self):
config['test_gating.required'] = True
update = model.Build.query.filter_by(nvr='bodhi-2.0-1.fc17').one().update
update.status = model.UpdateStatus.pending
update.release.composed_by_bodhi = True
update.builds[0].signed = True
data = {'edited': update.alias, 'builds': [update.builds[0].nvr], 'bugs': [], 'display_name': ' '}
request = mock.MagicMock()
request.db = self.db
request.identity.name = 'tester'
with mock_sends(update_schemas.UpdateEditV2):
with mock.patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave:
greenwave_response = {'policies_satisfied': False, 'summary': 'what have you done', 'applicable_policies': ['bodhiupdate_bodhipush_openqa_workstation'], 'unsatisfied_requirements': [{'testcase': 'dist.rpmdeplint', 'item': {'item': 'bodhi-2.0-1.fc17', 'type': 'koji_build'}, 'type': 'test-result-failed', 'scenario': None}, {'testcase': 'dist.rpmdeplint', 'item': {'item': update.alias, 'type': 'bodhi_update'}, 'type': 'test-result-failed', 'scenario': None}]}
mock_greenwave.return_value = greenwave_response
model.Update.edit(request, data)
assert (update.status == model.UpdateStatus.pending)
assert (update.test_gating_status == model.TestGatingStatus.failed) |
class KeyCode():
def __init__(self, component: Optional[primitives.HtmlModel]=None, source_event: Optional[str]=None, page=None):
(self.__events_per_source, self._component, self.source_event) = ({}, component, (source_event or component.dom.varId))
self._page = (page or self._component.page)
def custom(self, rule: str, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, source_event: Optional[str]=None):
if ((not profile) and self._page.profile):
if (self._component is not None):
profile = {'name': ('%s[key=%s]' % (self._component.htmlCode, rule))}
else:
profile = {'name': ('Page[key=%s]' % rule)}
source_event = (source_event or self.source_event)
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
if (source_event not in self.__events_per_source):
self.__events_per_source[source_event] = {}
self.__events_per_source[source_event].setdefault(rule, {})['content'] = js_funcs
self.__events_per_source[source_event][rule]['profile'] = profile
def key(self, key_code: int, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
if ((not profile) and self._page.profile):
if (self._component is not None):
profile = {'name': ('%s[key=%s]' % (self._component.htmlCode, key_code))}
else:
profile = {'name': ('Page[key=%s]' % key_code)}
source_event = (source_event or self.source_event)
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
tag = ('event.which == %s' % key_code)
if (reset or (source_event not in self.__events_per_source)):
self.__events_per_source[source_event] = {}
self.__events_per_source[source_event].setdefault(tag, {}).setdefault('content', []).extend(js_funcs)
self.__events_per_source[source_event][tag]['profile'] = profile
def any(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, source_event: Optional[str]=None):
self.custom('true', js_funcs, profile, source_event)
def enter(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(13, js_funcs, profile, reset, source_event)
def tab(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(9, js_funcs, profile, reset, source_event)
def backspace(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(8, js_funcs, profile, reset, source_event)
def shift_with(self, key: str, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, source_event: Optional[str]=None):
self.custom(('(event.shiftKey) && (event.which == %s)' % ord(key)), js_funcs, profile, source_event)
def save(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, source_event: Optional[str]=None):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
self.custom('(event.ctrlKey) && (event.which == 83)', ((['event.preventDefault()'] + js_funcs) + ['return false']), profile, source_event)
def shift(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(16, js_funcs, profile, reset, source_event)
def control(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(17, js_funcs, profile, reset, source_event)
def alt(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(18, js_funcs, profile, reset, source_event)
def space(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(32, js_funcs, profile, reset, source_event)
def right(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(39, js_funcs, profile, reset, source_event)
def left(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(37, js_funcs, profile, reset, source_event)
def up(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(38, js_funcs, profile, reset, source_event)
def down(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(40, js_funcs, profile, reset, source_event)
def delete(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(46, js_funcs, profile, reset, source_event)
def escape(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, reset: bool=False, source_event: Optional[str]=None):
self.key(27, js_funcs, profile, reset, source_event)
def get_event(self):
event = {}
for (source, event_funcs) in self.__events_per_source.items():
event[source] = {'content': [], 'profile': self._page.profile}
for (rule, js_funcs) in event_funcs.items():
event[source]['content'].append(('if(%s){%s}' % (rule, JsUtils.jsConvertFncs(js_funcs['content'], toStr=True, profile=js_funcs['profile']))))
return event |
def try_get_constant_num(arg: Any) -> (bool, Any):
if isinstance(arg, (float, int)):
return (True, arg)
elif isinstance(arg, IntImm):
return (True, arg.value())
elif isinstance(arg, IntVarTensor):
var = arg._attrs['int_var']
return try_get_constant_num(var)
else:
return (False, None) |
class DcNodeView(APIView):
serializer = DcNodeSerializer
order_by_default = ('node__hostname',)
order_by_field_map = {'hostname': 'node__hostname'}
def __init__(self, request, hostname, data):
super(DcNodeView, self).__init__(request)
self.data = data
self.hostname = hostname
if hostname:
self.node = get_object(request, Node, {'hostname': hostname}, exists_ok=True, noexists_fail=True)
self.dcnode = get_object(request, DcNode, {'dc': request.dc, 'node': self.node}, sr=('dc', 'node'))
else:
self.node = Node
self.dcnode = get_dc_nodes(request, prefetch_vms_count=self.extended, order_by=self.order_by)
def get(self, many=False):
if self.extended:
self.serializer = ExtendedDcNodeSerializer
if (many or (not self.hostname)):
if (self.full or self.extended):
if self.dcnode:
res = self.serializer(self.request, self.dcnode, many=True).data
else:
res = []
else:
res = list(self.dcnode.values_list('node__hostname', flat=True))
else:
if self.extended:
self.dcnode.vms = self.node.vm_set.filter(dc=self.request.dc).count()
self.dcnode.real_vms = self.node.vm_set.filter(dc=self.request.dc, slavevm__isnull=True).count()
res = self.serializer(self.request, self.dcnode).data
return SuccessTaskResponse(self.request, res)
def post(self):
(node, dcnode) = (self.node, self.dcnode)
(request, data) = (self.request, self.data)
try:
strategy = int(data.get('strategy', DcNode.SHARED))
except ValueError:
strategy = DcNode.SHARED
if (strategy == DcNode.SHARED):
dcnode.cpu = dcnode.ram = dcnode.disk = 0
data.pop('cpu', None)
data.pop('ram', None)
data.pop('disk', None)
try:
add_storage = int(data.pop('add_storage', DcNode.NS_ATTACH_NONE))
except ValueError:
add_storage = DcNode.NS_ATTACH_NONE
ser = DcNodeSerializer(request, dcnode, data=data)
if (not ser.is_valid()):
return FailureTaskResponse(request, ser.errors, obj=node)
ser.object.save(update_resources=False)
DcNode.update_all(node=node)
ser.reload()
if add_storage:
from api.utils.views import call_api_view
from api.dc.storage.views import dc_storage
ns = NodeStorage.objects.filter(node=node)
if (add_storage != DcNode.NS_ATTACH_ALL):
ns = ns.filter(storage__access=add_storage)
for zpool in ns.values_list('zpool', flat=True):
try:
zpool_node = ('%%s' % (zpool, node.hostname))
res = call_api_view(request, 'POST', dc_storage, zpool_node, data={}, log_response=True)
if (res.status_code == 201):
logger.info('POST dc_storage(%s) was successful: %s', zpool_node, res.data)
else:
logger.error('POST dc_storage(%s) failed: %s: %s', zpool_node, res.status_code, res.data)
except Exception as ex:
logger.exception(ex)
return SuccessTaskResponse(request, ser.data, status=status.HTTP_201_CREATED, obj=node, detail_dict=ser.detail_dict(), msg=LOG_NODE_ATTACH)
def put(self):
(node, dcnode) = (self.node, self.dcnode)
ser = DcNodeSerializer(self.request, dcnode, data=self.data, partial=True)
if (not ser.is_valid()):
return FailureTaskResponse(self.request, ser.errors, obj=node)
ser.object.save(update_resources=False)
DcNode.update_all(node=node)
ser.reload()
return SuccessTaskResponse(self.request, ser.data, obj=node, detail_dict=ser.detail_dict(), msg=LOG_NODE_UPDATE)
def delete(self):
(node, dcnode) = (self.node, self.dcnode)
if dcnode.dc.vm_set.filter(node=node).exists():
raise PreconditionRequired(_('Node has VMs in datacenter'))
if dcnode.dc.backup_set.filter(node=node).exists():
raise PreconditionRequired(_('Node has VM backups in datacenter'))
ser = DcNodeSerializer(self.request, dcnode)
ser.object.delete()
DcNode.update_all(node=node)
ser.data
return SuccessTaskResponse(self.request, None, obj=node, detail_dict=ser.detail_dict(), msg=LOG_NODE_DETACH) |
def set_my_description(token, description=None, language_code=None):
method_url = 'setMyDescription'
payload = {}
if (description is not None):
payload['description'] = description
if (language_code is not None):
payload['language_code'] = language_code
return _make_request(token, method_url, params=payload, method='post') |
class LegacyTransactionFactory(factory.Factory):
class Meta():
model = _FakeTransaction
nonce = factory.Sequence((lambda n: n))
gas_price = 1
gas = 21000
to = ZERO_ADDRESS
value = 0
data = b''
def _create(cls, model_class: Type[BaseTransactionFields], *args: Any, chain_id: int=None, **kwargs: Any) -> BaseTransactionFields:
if ('vrs' in kwargs):
(v, r, s) = kwargs.pop('vrs')
else:
if ('private_key' in kwargs):
private_key = kwargs.pop('private_key')
else:
private_key = PrivateKeyFactory()
tx_for_signing = FrontierUnsignedTransaction(**kwargs)
signed_tx = tx_for_signing.as_signed_transaction(private_key)
v = signed_tx.v
r = signed_tx.r
s = signed_tx.s
return model_class(**kwargs, v=v, r=r, s=s) |
class FaucetStackDHCPSingleVLANTest(FaucetTopoTestBase):
NUM_DPS = 2
NUM_HOSTS = 5
NUM_VLANS = 1
N_TAGGED = 1
N_UNTAGGED = 4
SOFTWARE_ONLY = True
def host_ip_address(_host_index, _vlan_index):
return '0.0.0.0'
def setUp(self):
def set_up(self):
super().setUp()
network_graph = networkx.path_graph(self.NUM_DPS)
dp_options = {}
for dp_i in network_graph.nodes():
dp_options.setdefault(dp_i, {'group_table': self.GROUP_TABLE, 'ofchannel_log': ((self.debug_log_path + str(dp_i)) if self.debug_log_path else None), 'hardware': (self.hardware if ((dp_i == 0) and self.hw_dpid) else 'Open vSwitch')})
if (dp_i == 0):
dp_options[dp_i]['stack'] = {'priority': 1}
switch_links = list(network_graph.edges())
link_vlans = {edge: None for edge in switch_links}
host_links = {0: [0], 1: [0], 2: [1], 3: [1], 4: [0]}
host_vlans = {0: 0, 1: 0, 2: 0, 3: 0, 4: [0]}
mininet_host_options = {h_i: {'ip': '0.0.0.0'} for h_i in range((self.NUM_HOSTS - 1))}
mininet_host_options[4] = {'vlan_intfs': {0: '10.1.0.20/24'}, 'ip': '0.0.0.0'}
vlan_options = {v_i: {'faucet_vips': [self.faucet_vip(v_i)], 'faucet_mac': self.faucet_mac(v_i)} for v_i in range(self.NUM_VLANS)}
self.build_net(host_links=host_links, host_vlans=host_vlans, switch_links=switch_links, link_vlans=link_vlans, n_vlans=self.NUM_VLANS, dp_options=dp_options, vlan_options=vlan_options, mininet_host_options=mininet_host_options)
self.start_net()
def test_dhcp_ip_allocation(self):
self.set_up()
iprange = '10.1.0.10,10.1.0.20'
router = '10.1.0.254'
vlan = 100
host = self.net.get(self.topo.hosts_by_id[4])
host.create_dnsmasq(self.tmpdir, iprange, router, vlan, host.vlan_intfs[0])
for host_n in range((self.NUM_HOSTS - 1)):
host = self.net.get(self.topo.hosts_by_id[host_n])
host.run_dhclient(self.tmpdir)
self.assertEqual(self.net.get(self.topo.hosts_by_id[0]).return_ip()[:(- 3)], '10.1.0.10')
self.assertEqual(self.net.get(self.topo.hosts_by_id[1]).return_ip()[:(- 3)], '10.1.0.11')
self.assertEqual(self.net.get(self.topo.hosts_by_id[2]).return_ip()[:(- 3)], '10.1.0.12')
self.assertEqual(self.net.get(self.topo.hosts_by_id[3]).return_ip()[:(- 3)], '10.1.0.13')
self.check_host_connectivity_by_id(0, 1)
self.check_host_connectivity_by_id(1, 2)
self.check_host_connectivity_by_id(2, 3) |
def remap_cb_input(mode=0):
def _remap_cb_input(value):
if (mode == 2):
return value
if isinstance(value, tuple):
mapped_value = tuple([value[0].copy(), value[1].copy()])
for i in mapped_value:
for (key, msg) in i.items():
if (key not in ['node_tick', 't_n']):
if (mode == 0):
i[key] = filter_info_for_printing(msg.info)
else:
i[key] = msg.msgs
else:
mapped_value = value.copy()
for (key, msg) in mapped_value.items():
if (key not in ['node_tick', 't_n']):
if (mode == 0):
mapped_value[key] = filter_info_for_printing(msg.info)
else:
mapped_value[key] = msg.msgs
return mapped_value
return _remap_cb_input |
def get_assign_name(target: Any) -> Any:
if (isinstance(target, astroid.nodes.AssignName) or isinstance(target, astroid.nodes.Name)):
return target.name
elif (isinstance(target, astroid.nodes.AssignAttr) or isinstance(target, astroid.nodes.Attribute)):
return ((get_assign_name(target.expr) + '.') + target.attrname)
else:
return None |
class SKLearnForestRegressorTransformer(SKLearnForestTransformer):
def __init__(self, model: RandomForestRegressor, feature_names: Sequence[str]):
super().__init__(model, feature_names)
def build_aggregator_output(self) -> Dict[(str, Any)]:
return {'weighted_sum': {'weights': ([(1.0 / len(self._model.estimators_))] * len(self._model.estimators_))}}
def determine_target_type(self) -> str:
return 'regression'
def model_type(self) -> str:
return TYPE_REGRESSION |
def test_agent_fingerprint_different_fingerprints():
config = Mock()
config.fingerprint = {}
package_dir = Path('path', 'to', 'dir')
error_regex = (f'''Fingerprints for package {re.escape(str(package_dir))} do not match:
Expected: {dict()}
Actual: {dict(foo='bar')}
''' + "Please fingerprint the package before continuing: 'aea fingerprint")
with pytest.raises(ValueError, match=error_regex):
with mock.patch('aea.configurations.base._compute_fingerprint', return_value={'foo': 'bar'}):
_compare_fingerprints(config, package_dir, False, PackageType.AGENT) |
class OptionSeriesLollipopSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def compare_codes(input_code, standard_codes, bitwise_comparison=True):
if isinstance(standard_codes, (list, tuple)):
for code in standard_codes:
if compare_codes(input_code, code, bitwise_comparison):
return True
return False
elif (input_code == standard_codes):
return True
elif bitwise_comparison:
return (input_code & standard_codes)
else:
return False |
class OptionPlotoptionsStreamgraphSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_grad()
def get_mel_from_audio(audio: torch.Tensor, sample_rate=44100, n_fft=2048, win_length=2048, hop_length=512, f_min=40, f_max=16000, n_mels=128, center=True, power=1.0, pad_mode='reflect', norm='slaney', mel_scale='slaney') -> torch.Tensor:
assert (audio.ndim == 2), 'Audio tensor must be 2D (1, n_samples)'
assert (audio.shape[0] == 1), 'Audio tensor must be mono'
transform = get_mel_transform(sample_rate=sample_rate, n_fft=n_fft, win_length=win_length, hop_length=hop_length, f_min=f_min, f_max=f_max, n_mels=n_mels, center=center, power=power, pad_mode=pad_mode, norm=norm, mel_scale=mel_scale).to(audio.device)
mel = transform(audio)
mel = dynamic_range_compression(mel)
return mel[0] |
class TestTabSlugsCombineHeader(util.MdCase):
extension = ['pymdownx.tabbed', 'toc', 'pymdownx.details']
extension_configs = {'pymdownx.tabbed': {'slugify': slugify(case='lower'), 'combine_header_slug': True}}
def test_combine_header_slug(self):
md = '\n ### Here is some text\n\n === "First Tab"\n content\n\n ### Another header\n\n ??? "title"\n === "Second Tab"\n content\n '
self.check_markdown(md, '\n <h3 id="here-is-some-text">Here is some text</h3>\n <div class="tabbed-set" data-tabs="1:1"><input checked="checked" id="here-is-some-text-first-tab" name="__tabbed_1" type="radio" /><label for="here-is-some-text-first-tab">First Tab</label><div class="tabbed-content">\n <p>content</p>\n </div>\n </div>\n <h3 id="another-header">Another header</h3>\n <details>\n <summary>title</summary>\n <div class="tabbed-set" data-tabs="2:1"><input checked="checked" id="another-header-second-tab" name="__tabbed_2" type="radio" /><label for="another-header-second-tab">Second Tab</label><div class="tabbed-content">\n <p>content</p>\n </div>\n </div>\n </details>\n ', True)
def test_no_header(self):
md = '\n === "A Tab"\n content\n '
self.check_markdown(md, '\n <div class="tabbed-set" data-tabs="1:1"><input checked="checked" id="a-tab" name="__tabbed_1" type="radio" /><label for="a-tab">A Tab</label><div class="tabbed-content">\n <p>content</p>\n </div>\n </div>\n ', True)
def test_header_after(self):
md = '\n === "A Tab"\n content\n\n # Header\n '
self.check_markdown(md, '\n <div class="tabbed-set" data-tabs="1:1"><input checked="checked" id="a-tab" name="__tabbed_1" type="radio" /><label for="a-tab">A Tab</label><div class="tabbed-content">\n <p>content</p>\n </div>\n </div>\n <h1 id="header">Header</h1>\n ', True) |
def language_detection_arguments(provider_name: str):
return {'text': "Ogni individuo ha diritto all'istruzione. L'istruzione deve essere gratuita almeno per quanto riguarda le classi elementari e fondamentali. L'istruzione elementare deve essere obbligatoria. L'istruzione tecnica e professionale deve essere messa alla portata di tutti e l'istruzione superiore deve essere egualmente accessibile a tutti sulla base del merito.\nL'istruzione deve essere indirizzata al pieno sviluppo della personalita umana ed al rafforzamento del rispetto dei diritti umani e delle liberta fondamentali. Essa deve promuovere la comprensione, la tolleranza, l'amicizia fra tutte le Nazioni, i gruppi razziali e religiosi, e deve favorire l'opera delle Nazioni Unite per il mantenimento della pace.\nI genitori hanno diritto di priorita nella scelta del genere di istruzione da impartire ai loro figli."} |
_log_on_failure_all
class TestP2PLibp2PReceiveEnvelope(BaseTestP2PLibp2p):
def test_envelope_routed(self):
addr_1 = self.connection1.node.address
addr_2 = self.connection2.node.address
msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello')
envelope = Envelope(to=addr_2, sender=addr_1, message=msg)
with mock.patch.object(self.connection2.logger, 'exception') as _mock_logger, mock.patch.object(self.connection2.node.pipe, 'read', side_effect=Exception('some error')):
self.multiplexer1.put(envelope)
delivered_envelope = self.multiplexer2.get(block=True, timeout=20)
_mock_logger.assert_has_calls([call('Failed to read. Exception: some error. Try reconnect to node and read again.')])
assert (delivered_envelope is not None)
assert (delivered_envelope.to == envelope.to)
assert (delivered_envelope.sender == envelope.sender)
assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id)
assert (delivered_envelope.message != envelope.message)
msg = DefaultMessage.serializer.decode(delivered_envelope.message)
msg.to = delivered_envelope.to
msg.sender = delivered_envelope.sender
assert (envelope.message == msg) |
.django_db
def test_missing_render_inline_actions_from_readonly_fields(rf, admin_user, admin_site, article):
from test_proj.blog import admin
class ArticleAdmin(admin.InlineActionsModelAdminMixin, admin.admin.ModelAdmin):
list_display = ('name',)
inline_actions = None
def get_readonly_fields(self, *args, **kwargs):
return []
request = rf.get(f'/admin/blog/articles/{article.id}/')
request.user = admin_user
admin = ArticleAdmin(Article, admin_site)
admin.changeform_view(request) |
class TestLoadOutput():
.parametrize(('provider', 'feature', 'subfeature', 'phase'), global_features(return_phase=True)['ungrouped_providers'])
def test_load_output_valid_paramters(self, provider, feature, subfeature, phase):
if (('create' in phase) or ('delete' in phase) or ('upload' in phase)):
pytest.skip("create, delete and upload phase don't have a output.json")
output = load_output(provider, feature, subfeature, phase)
assert isinstance(output, dict), 'output should be a dict'
try:
output['original_response']
output['standardized_response']
except KeyError:
pytest.fail('Original_response and standradized_response not found') |
class IteratedEnsembleSmoother(BaseRunModel):
_simulation_arguments: SIESRunArguments
def __init__(self, simulation_arguments: SIESRunArguments, config: ErtConfig, storage: StorageAccessor, queue_config: QueueConfig, experiment_id: UUID, analysis_config: IESSettings, update_settings: UpdateSettings):
super().__init__(simulation_arguments, config, storage, queue_config, experiment_id, phase_count=2)
self.support_restart = False
self.analysis_config = analysis_config
self.update_settings = update_settings
self.sies_step_length = functools.partial(steplength_exponential, min_steplength=analysis_config.ies_min_steplength, max_steplength=analysis_config.ies_max_steplength, halflife=analysis_config.ies_dec_steplength)
self.sies_smoother = None
def iteration(self) -> int:
if (self.sies_smoother is None):
return 1
else:
return self.sies_smoother.iteration
def analyzeStep(self, prior_storage: EnsembleAccessor, posterior_storage: EnsembleAccessor, ensemble_id: str, iteration: int, initial_mask: npt.NDArray[np.bool_]) -> SmootherSnapshot:
self.setPhaseName('Analyzing...', indeterminate=True)
self.setPhaseName('Pre processing update...', indeterminate=True)
self.ert.runWorkflows(HookRuntime.PRE_UPDATE, self._storage, prior_storage)
try:
(smoother_snapshot, self.sies_smoother) = iterative_smoother_update(prior_storage, posterior_storage, self.sies_smoother, ensemble_id, self.ert.update_configuration, update_settings=self.update_settings, analysis_config=self.analysis_config, sies_step_length=self.sies_step_length, initial_mask=initial_mask, rng=self.rng, progress_callback=functools.partial(self.smoother_event_callback, iteration), log_path=self.ert_config.analysis_config.log_path)
except ErtAnalysisError as e:
raise ErtRunError(f'Update algorithm failed with the following error: {e}') from e
self.setPhaseName('Post processing update...', indeterminate=True)
self.ert.runWorkflows(HookRuntime.POST_UPDATE, self._storage, posterior_storage)
return smoother_snapshot
def run_experiment(self, evaluator_server_config: EvaluatorServerConfig) -> RunContext:
self.checkHaveSufficientRealizations(self._simulation_arguments.active_realizations.count(True), self._simulation_arguments.minimum_required_realizations)
iteration_count = self.simulation_arguments.num_iterations
phase_count = (iteration_count + 1)
self.setPhaseCount(phase_count)
log_msg = f"Running SIES for {iteration_count} iteration{('s' if (iteration_count != 1) else '')}."
logger.info(log_msg)
self.setPhaseName(log_msg, indeterminate=True)
target_case_format = self._simulation_arguments.target_case
prior = self._storage.create_ensemble(self._experiment_id, ensemble_size=self._simulation_arguments.ensemble_size, name=(target_case_format % 0))
self.set_env_key('_ERT_ENSEMBLE_ID', str(prior.id))
initial_mask = np.array(self._simulation_arguments.active_realizations, dtype=bool)
prior_context = RunContext(sim_fs=prior, runpaths=self.run_paths, initial_mask=initial_mask, iteration=0)
sample_prior(prior_context.sim_fs, prior_context.active_realizations, random_seed=self._simulation_arguments.random_seed)
self._evaluate_and_postprocess(prior_context, evaluator_server_config)
self.ert.runWorkflows(HookRuntime.PRE_FIRST_UPDATE, self._storage, prior_context.sim_fs)
for current_iter in range(1, (iteration_count + 1)):
states = [RealizationStorageState.HAS_DATA, RealizationStorageState.INITIALIZED]
self.send_event(RunModelUpdateBeginEvent(iteration=(current_iter - 1)))
self.send_event(RunModelStatusEvent(iteration=(current_iter - 1), msg='Creating posterior ensemble..'))
posterior = self._storage.create_ensemble(self._experiment_id, name=(target_case_format % current_iter), ensemble_size=prior_context.sim_fs.ensemble_size, iteration=current_iter, prior_ensemble=prior_context.sim_fs)
posterior_context = RunContext(sim_fs=posterior, runpaths=self.run_paths, initial_mask=prior_context.sim_fs.get_realization_mask_from_state(states), iteration=current_iter)
update_success = False
for _iteration in range(self._simulation_arguments.num_retries_per_iter):
smoother_snapshot = self.analyzeStep(prior_storage=prior_context.sim_fs, posterior_storage=posterior_context.sim_fs, ensemble_id=str(prior_context.sim_fs.id), iteration=(current_iter - 1), initial_mask=initial_mask)
analysis_success = (current_iter < self.iteration)
if analysis_success:
update_success = True
break
self._evaluate_and_postprocess(prior_context, evaluator_server_config)
if update_success:
self.send_event(RunModelUpdateEndEvent(iteration=(current_iter - 1), smoother_snapshot=smoother_snapshot))
self._evaluate_and_postprocess(posterior_context, evaluator_server_config)
else:
raise ErtRunError(f'Iterated ensemble smoother stopped: maximum number of iteration retries ({self._simulation_arguments.num_retries_per_iter} retries) reached for iteration {current_iter}')
prior_context = posterior_context
self.setPhase(phase_count, 'Experiment completed.')
return posterior_context
def name(cls) -> str:
return 'Iterated ensemble smoother' |
class TestBedForMacs2Function(unittest.TestCase):
def test_bed_for_macs2_with_2010_(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__data))
bed = bed_for_macs2(macsxls)
self.assertEqual(bed.header(), ['chr', 'abs_summit-100', 'abs_summit+100'])
self.assertEqual(bed[0]['chr'], 'chr1')
self.assertEqual(bed[0]['abs_summit-100'], ( - 100))
self.assertEqual(bed[0]['abs_summit+100'], ( + 100))
self.assertEqual(bed[4]['chr'], 'chr1')
self.assertEqual(bed[4]['abs_summit-100'], ( - 100))
self.assertEqual(bed[4]['abs_summit+100'], ( + 100))
def test_bed_for_macs2_with_2010_(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__data))
bed = bed_for_macs2(macsxls)
self.assertEqual(bed.header(), ['chr', 'abs_summit-100', 'abs_summit+100'])
self.assertEqual(bed[0]['chr'], 'chr1')
self.assertEqual(bed[0]['abs_summit-100'], (6214792 - 100))
self.assertEqual(bed[0]['abs_summit+100'], (6214792 + 100))
self.assertEqual(bed[4]['chr'], 'chr1')
self.assertEqual(bed[4]['abs_summit-100'], (4858423 - 100))
self.assertEqual(bed[4]['abs_summit+100'], (4858423 + 100))
def test_bed_for_macs2_with_2010__broad(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__broad_data))
self.assertRaises(Exception, bed_for_macs2, macsxls)
def test_bed_for_macs2_with_140beta(self):
macsxls = MacsXLS(fp=io.StringIO(MACS140beta_data))
self.assertRaises(Exception, bed_for_macs2, macsxls) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.