code stringlengths 281 23.7M |
|---|
_register_parser
_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REPLY)
class OFPQueueGetConfigReply(MsgBase):
def __init__(self, datapath, queues=None, port=None):
super(OFPQueueGetConfigReply, self).__init__(datapath)
self.queues = queues
self.port = port
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPQueueGetConfigReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf)
(msg.port,) = struct.unpack_from(ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE)
msg.queues = []
offset = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE
while (offset < msg_len):
queue = OFPPacketQueue.parser(msg.buf, offset)
msg.queues.append(queue)
offset += queue.len
return msg |
def distance(point_p, point_q, coordinate_system='cartesian', ellipsoid=None):
check_coordinate_system(coordinate_system)
if (coordinate_system == 'cartesian'):
dist = distance_cartesian(point_p, point_q)
if (coordinate_system == 'spherical'):
dist = distance_spherical(point_p, point_q)
if (coordinate_system == 'geodetic'):
dist = distance_geodetic(point_p, point_q, ellipsoid)
return dist |
def test_mariadb_connector_build_uri(connection_config_mariadb, db: Session):
connector = MariaDBConnector(configuration=connection_config_mariadb)
s = connection_config_mariadb.secrets
port = ((s['port'] and f":{s['port']}") or '')
uri = f"mariadb+pymysql://{s['username']}:{s['password']}{s['host']}{port}/{s['dbname']}"
assert (connector.build_uri() == uri)
connection_config_mariadb.secrets = {'username': 'mariadb_user', 'password': 'mariadb_pw', 'host': 'host.docker.internal', 'dbname': 'mariadb_example', 'port': '3307'}
connection_config_mariadb.save(db)
assert (connector.build_uri() == 'mariadb+pymysql://mariadb_user:mariadb_.internal:3307/mariadb_example')
connection_config_mariadb.secrets = {'username': 'mariadb_user', 'password': 'mariadb_pw', 'host': 'host.docker.internal', 'dbname': 'mariadb_example'}
connection_config_mariadb.save(db)
assert (connector.build_uri() == 'mariadb+pymysql://mariadb_user:mariadb_.internal:3306/mariadb_example')
connection_config_mariadb.secrets = {'username': 'mariadb_user', 'host': 'host.docker.internal', 'dbname': 'mariadb_example'}
connection_config_mariadb.save(db)
assert (connector.build_uri() == 'mariadb+pymysql://mariadb_.internal:3306/mariadb_example')
connection_config_mariadb.secrets = {'host': 'host.docker.internal', 'dbname': 'mariadb_example'}
assert (connector.build_uri() == 'mariadb+pymysql://host.docker.internal:3306/mariadb_example') |
((MAGICK_VERSION_NUMBER < 1800), reason='Complex requires ImageMagick-7.0.8.')
def test_complex():
with Image(width=1, height=1, pseudo='xc:gray25') as a:
with Image(width=1, height=1, pseudo='xc:gray50') as b:
a.image_add(b)
a.iterator_reset()
with a.complex('add') as img:
assert (a.signature != img.signature) |
class TestTachoMotorMaxSpeedValue(ptc.ParameterizedTestCase):
def test_max_speed_value(self):
self.assertEqual(self._param['motor'].max_speed, motor_info[self._param['motor'].driver_name]['max_speed'])
def test_max_speed_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].max_speed = 'ThisShouldNotWork' |
_api_tests
def test_get_favorites():
result = get_favorites(screen_name='twitter', count=5, tweet_mode='extended')
assert (type(result) == pd.core.frame.DataFrame)
assert (len(result) <= 5)
assert ('tweet_full_text' in result)
assert all([(embd_uid == uid) for (embd_uid, uid) in zip([x['id'] for x in result['tweet_user']], result['user_id'])]) |
class SpiderHandler(BaseHandler):
_separate(APP_DEBUG)
.authenticated
def get(self, slug=None):
print()
app_log.info('SpiderHandler.get... ')
app_log.info('SpiderHandler.get / slug : %s', slug)
slug_ = self.request.arguments
app_log.info('SpiderHandler.get / slug_ : \n %s', pformat(slug_))
query_contrib = self.filter_slug(slug_, slug_class='crawl')
app_log.info('SpiderHandler.get / query_contrib : \n %s ', pformat(query_contrib))
app_log.info('SpiderHandler.get / next : ')
next_url = query_contrib['next']
app_log.info('next_url : %s', next_url)
spider_id = query_contrib['spider_id']
spider_oid = ObjectId(spider_id)
test_limit = query_contrib.get('test_limit', None)
app_log.info('SpiderHandler.get / test_limit : %s ', test_limit)
app_log.info('SpiderHandler.get / spider_id : %s', spider_id)
print(spider_oid, type(spider_oid))
try:
spider_config = self.application.coll_spiders.find_one({'_id': spider_oid})
except:
spider_config = None
try:
spider_reactive_running = self.application.coll_spiders.find_one({'$and': [{'scraper_config.parse_reactive': True}, {'scraper_log.is_running': True}, {'$not': {'_id': spider_oid}}]})
app_log.info('SpiderHandler.get --- another spider_reactive_running : \n%s', spider_reactive_running)
except:
spider_reactive_running = None
if (spider_config == None):
app_log.warning('SpiderHandler.get --- !!! spider_id -%s- not found : test spider with test_config', spider_id)
self.error_msg = self.add_error_message_to_slug(error_string=('ERROR !!! there is no spider configuration with -%s- spider_id in the DB' % str(spider_id)), args_to_delete=QUERY_CRAWL_BY_DEFAULT.keys())
self.redirect(('/contributors' + self.error_msg))
elif (spider_reactive_running != None):
app_log.warning('SpiderHandler.get --- !!! another reactive spider is already running ...')
self.error_msg = self.add_error_message_to_slug(error_string='ERROR !!! another reactive spider is already running, please retry later', args_to_delete=QUERY_CRAWL_BY_DEFAULT.keys())
self.redirect(('/contributors' + self.error_msg))
else:
is_running = spider_config['scraper_log']['is_running']
is_reactive = spider_config['scraper_config']['parse_reactive']
spider_name = spider_config['infos']['name']
if (is_running == True):
app_log.warning('SpiderHandler.get --- spider %s with id : %s- is already running ', spider_name, spider_id)
self.error_msg = self.add_error_message_to_slug(error_string=('the contributor - %s - is already running' % spider_name), args_to_delete=QUERY_CRAWL_BY_DEFAULT.keys())
self.redirect(('/contributors' + self.error_msg))
else:
if (next_url != '1'):
self.redirect(('/contributors?page_n=' + str(next_url)))
else:
self.redirect('/contributors')
app_log.info('SpiderHandler.get --- spider_id : %s ', spider_id)
app_log.info('SpiderHandler.get --- spider_config : %s ', pformat(spider_config['infos']))
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_running', value=True)
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_data_available', value=False)
if (test_limit != None):
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_tested', value=False)
app_log.info('SpiderHandler.get --- starting spider runner --- ')
app_log.info('SpiderHandler.get --- creating data model list from fields in db ')
data_model = list(self.application.coll_model.find({}))
app_log.info('SpiderHandler.get --- data_model[:3] from db : \n %s \n...', pformat(data_model[:3]))
(yield self.run_spider(datamodel=data_model, spider_id=spider_id, spider_oid=spider_oid, spider_config=spider_config, current_user_id=self.get_current_user_id(), test_limit=test_limit, countdown=DEFAULT_COUNTDOWN))
_separate(APP_DEBUG)
_on_executor
def run_spider(self, datamodel, spider_id, spider_oid, spider_config, current_user_id, test_limit=None, countdown=None):
print()
app_log.info('SpiderHandler.run_spider --- ')
app_log.info('SpiderHandler.run_spider / testing the non-blocking decorator with a time.sleep... ')
time.sleep(1)
for i in range(countdown):
time.sleep(1)
app_log.info(('SpiderHandler.run_spider ---\n--- start spider %s in %s' % (str(spider_id), (countdown - i))))
time.sleep(1)
app_log.info('SpiderHandler.run_spider / now let it run... ')
result = run_generic_spider(user_id=current_user_id, spider_id=str(spider_id), datamodel=datamodel, run_spider_config=spider_config, test_limit=test_limit)
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_working', value=True)
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_tested', value=True)
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_running', value=False)
self.update_spider_log(spider_id=spider_id, spider_oid=spider_oid, log_to_update='is_data_available', value=True)
raise gen.Return(result) |
def get_current_alertmanager_version():
version = UNKNOWN_VERSION
try:
process_result = subprocess.run([(ALERTMANAGER_INSTALLED_PATH + 'alertmanager'), '--version'], capture_output=True, text=True)
process_output = ((process_result.stdout + '\n') + process_result.stderr)
result = re.search('version (?P<version>[^ ]+)', process_output)
if result:
version = result.group('version')
except FileNotFoundError:
return False
return version |
_dependency(plt, 'matplotlib')
def plot_scatter_1(data, label=None, title=None, x_val=0, ax=None):
ax = check_ax(ax)
x_data = ((np.ones_like(data) * x_val) + np.random.normal(0, 0.025, data.shape))
ax.scatter(x_data, data, s=36, alpha=set_alpha(len(data)))
if label:
ax.set_ylabel(label, fontsize=LABEL_SIZE)
ax.set(xticks=[x_val], xticklabels=[label])
if title:
ax.set_title(title, fontsize=TITLE_FONTSIZE)
ax.tick_params(axis='x', labelsize=TICK_LABELSIZE)
ax.tick_params(axis='y', labelsize=TICK_LABELSIZE)
ax.set_xlim([(- 0.5), 0.5]) |
.skip
def test_deployments_changed_source(tp_path, deployments, mainnet_uri):
address = '0xdAC17F958D2ee523aC13D831ec7'
path = tp_path.joinpath(f'build/deployments/mainnet/{address}.json')
with path.open() as fp:
build_json = json.load(fp)
build_json['bytecode'] += 'ff'
with path.open('w') as fp:
json.dump(build_json, fp)
package_config = ETHPM_CONFIG.copy()
package_config['settings']['deployment_networks'] = ['mainnet']
(manifest, _) = ethpm.create_manifest(tp_path, package_config)
assert manifest['deployments'][mainnet_uri]
assert (address not in [i['address'] for i in manifest['deployments'][mainnet_uri].values()]) |
def flag_project(session, project, reason, user_email, user_id):
flag = models.ProjectFlag(user=user_email, project=project, reason=reason)
session.add(flag)
try:
session.flush()
except exc.SQLAlchemyError as err:
_log.exception(err)
session.rollback()
raise exceptions.AnityaException('Could not flag this project.')
publish_message(project=project.__json__(), topic='project.flag', message=dict(agent=user_id, project=project.name, reason=reason, packages=[pkg.__json__() for pkg in project.packages]))
session.commit()
return flag |
_os(*metadata.platforms)
def main():
if (Path(ISO).is_file() and Path(PS_SCRIPT).is_file()):
print(f'[+] - ISO File {ISO} will be mounted and executed via powershell')
command = f'powershell.exe -ExecutionPol Bypass -c import-module {PS_SCRIPT}; ExecFromISO -ISOFile {ISO} -procname {PROC};'
common.execute(command)
common.execute(['taskkill', '/f', '/im', 'notepad.exe'])
print(f'[+] - RTA Done!') |
def verticalMetricsKeptInSync(varfont):
current_os2_vmetrics = [getattr(varfont['OS/2'], attr) for attr in ('sTypoAscender', 'sTypoDescender', 'sTypoLineGap')]
metrics_are_synced = (current_os2_vmetrics == [getattr(varfont['hhea'], attr) for attr in ('ascender', 'descender', 'lineGap')])
(yield metrics_are_synced)
if metrics_are_synced:
new_os2_vmetrics = [getattr(varfont['OS/2'], attr) for attr in ('sTypoAscender', 'sTypoDescender', 'sTypoLineGap')]
if (current_os2_vmetrics != new_os2_vmetrics):
for (attr, value) in zip(('ascender', 'descender', 'lineGap'), new_os2_vmetrics):
setattr(varfont['hhea'], attr, value) |
def test_span_finder_component():
nlp = Language()
docs = [nlp('This is an example.'), nlp('This is the second example.')]
docs[0].spans[TRAINING_KEY] = [docs[0][3:4]]
docs[1].spans[TRAINING_KEY] = [docs[1][3:5]]
span_finder = nlp.add_pipe('experimental_span_finder', config={'training_key': TRAINING_KEY})
nlp.initialize()
docs = list(span_finder.pipe(docs))
assert ('span_candidates' in docs[0].spans) |
class dnsHost(Module):
config = Config({Option('DOMAIN', 'Provide your target Domain', True): str('laet4x.com')})
def run(self):
domain = self.config.option('DOMAIN').value
print(("\n Analyzing '%s'..." % domain))
request = requests.get((' + domain))
res = request.text
print('\n', res) |
class LoginView(View):
def post(self, request):
res = {'code': 425, 'msg': '!', 'self': None}
form = LoginForm(request.data, request=request)
if (not form.is_valid()):
(res['self'], res['msg']) = clean_form(form)
return JsonResponse(res)
user = form.cleaned_data.get('user')
auth.login(request, user)
res['code'] = 0
return JsonResponse(res) |
class ShellCommand():
def __init__(self, *args, **kwargs) -> None:
self.verbose = kwargs.pop('verbose', False)
for arg in ('stdout', 'stderr'):
if (arg not in kwargs):
kwargs[arg] = subprocess.PIPE
self.cmdline = ' '.join([quote(x) for x in args])
if self.verbose:
print(f'---- running: {self.cmdline}')
self.proc = subprocess.run(args, **kwargs)
def status(self) -> bool:
try:
self.proc.check_returncode()
return True
except Exception as e:
return False
def check(self, what: str) -> bool:
if self.status():
return True
else:
print(f'==== COMMAND FAILED: {what}')
print(f'---- command line: {self.cmdline}')
if self.stdout:
print('---- stdout ----')
print(self.stdout)
print('---- end stdout ----')
if self.stderr:
print('---- stderr ----')
print(self.stderr)
print('---- end stderr ----')
return False
def stdout(self) -> str:
return self.proc.stdout.decode('utf-8')
def stderr(self) -> str:
return self.proc.stderr.decode('utf-8')
def run_with_retry(cls, what: str, *args, **kwargs) -> bool:
try_count = 0
retries = kwargs.pop('retries', 3)
sleep_seconds = kwargs.pop('sleep_seconds', 5)
while (try_count < retries):
if (try_count > 0):
print(f'Sleeping for {sleep_seconds} before retrying command')
time.sleep(sleep_seconds)
if cls.run(what, *args, **kwargs):
return True
try_count += 1
return False
def run(cls, what: str, *args, **kwargs) -> bool:
return ShellCommand(*args, **kwargs).check(what) |
class TransformerCausalLM(Generic[ConfigT], CausalLMModule[(ConfigT, KeyValueCache)]):
decoder: TransformerDecoder
output_embeddings: Module
def forward(self, piece_ids: Tensor, attention_mask: AttentionMask, cache: Optional[List[KeyValueCache]]=None, positions: Optional[Tensor]=None, store_cache: bool=False) -> CausalLMOutputWithCache[KeyValueCache]:
decoder_output = self.decoder(piece_ids, attention_mask, cache=cache, store_cache=store_cache, positions=positions)
logits = self.output_embeddings(decoder_output.last_hidden_layer_state)
return CausalLMOutputWithCache(all_outputs=decoder_output.all_outputs, cache=decoder_output.cache, logits=logits) |
class OptionSeriesTreemapStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsBulletSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def retrieve_airflow_info(dag_id: Optional[str], host: str, username: str, password: str) -> Generator:
airflow_api_config = Configuration(host=host, username=username, password=password)
with ApiClient(configuration=airflow_api_config) as api_client:
airflow_api_tree = AirflowApiTree(api_client)
airflow_dags = airflow_api_tree.get_dags(dag_id)
(yield airflow_dags)
for airflow_dag in airflow_dags:
airflow_tasks = airflow_dag.get_tasks()
(yield (airflow_dag.dag_id, airflow_tasks)) |
class OptionSeriesSolidgaugeEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class NodesListWidget(QTreeWidget):
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent
self.setHeaderLabels([_('Connected node'), _('Height')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if (not item):
return
server = item.data(0, Qt.UserRole)
if (not server):
return
def use_as_server():
self.parent.follow_server(server)
menu = QMenu()
menu.addAction(_('Use as server'), use_as_server)
menu.exec_(self.viewport().mapToGlobal(position))
def keyPressEvent(self, event):
if (event.key() in [Qt.Key_F2, Qt.Key_Return]):
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def chain_name(self, chain, our_chain):
if (chain is our_chain):
return f'our_chain'
(_chain, common_height) = our_chain.common_chain_and_height(chain)
fork_height = (common_height + 1)
headers_obj = app_state.headers
header = headers_obj.header_at_height(chain, fork_height)
prefix = hash_to_hex_str(header.hash).lstrip('00')[0:10]
return f'{prefix}{fork_height}'
def update(self, network):
self.clear()
self.addChild = self.addTopLevelItem
chains = network.sessions_by_chain()
our_chain = network.chain()
for (chain, sessions) in chains.items():
if (len(chains) > 1):
name = self.chain_name(chain, our_chain)
x = QTreeWidgetItem([name, ('%d' % chain.height)])
x.setData(0, Qt.UserRole, None)
else:
x = self
host_counts = {}
for session in sessions:
host_counts[session.server.host] = (host_counts.get(session.server.host, 0) + 1)
for session in sessions:
extra_name = ''
if (host_counts[session.server.host] > 1):
extra_name = f' (port: {session.server.port})'
extra_name += (' (main server)' if (session.server is network.main_server) else '')
item = QTreeWidgetItem([(session.server.host + extra_name), str(session.tip.height)])
item.setData(0, Qt.UserRole, session.server)
x.addChild(item)
if (len(chains) > 1):
self.addTopLevelItem(x)
x.setExpanded(True)
h = self.header()
h.setStretchLastSection(False)
h.setSectionResizeMode(0, QHeaderView.Stretch)
h.setSectionResizeMode(1, QHeaderView.ResizeToContents) |
class normalize(Decorator):
def __init__(self, name, values=None, **kwargs):
assert ((name is None) or isinstance(name, str))
self.name = name
if isinstance(values, str):
assert (kwargs.get('type') is None), f"Cannot mix values={values} and type={kwargs.get('type')}"
if ('(' in values):
m = re.match('(.+)\\((.+)\\)', values)
type = m.group(1)
args = m.group(2).split(',')
else:
type = values
args = []
if args:
for (name, value) in zip(OPTIONS[type], args):
kwargs[name] = value
kwargs['type'] = type
else:
kwargs['values'] = values
if (('aliases' in kwargs) and isinstance(kwargs['aliases'], str)):
(_, ext) = os.path.splitext(kwargs['aliases'])
if (ext in ('.json', '.yaml', '.yml')):
path = kwargs['aliases']
if (not os.path.isabs(path)):
caller = os.path.dirname(inspect.stack()[1].filename)
path = os.path.join(caller, path)
kwargs['aliases'] = load_json_or_yaml(path)
self.kwargs = kwargs
def register(self, manager):
manager.register_normalize(self) |
class OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class GreetingWorkflow():
_method
async def get_status(self, arg1: QueryArgType1, arg2: QueryArgType2) -> QueryRetType:
raise NotImplementedError
_method
async def push_status(self, arg1: SignalArgType1, arg2: SignalArgType2):
raise NotImplementedError
_method(task_queue=TASK_QUEUE)
async def get_greeting(self) -> None:
raise NotImplementedError |
def test_interference_graph_of_group_first_graph_a():
interference_graph = construct_graph(1)
sub_graph = interference_graph.get_subgraph_of(InsertionOrderedSet([x_1, x_3]))
assert (InsertionOrderedSet(sub_graph.nodes) == InsertionOrderedSet([x_1]))
assert (set(sub_graph.edges) == set()) |
def construct_field(name_or_field, **params):
if isinstance(name_or_field, collections.abc.Mapping):
if params:
raise ValueError('construct_field() cannot accept parameters when passing in a dict.')
params = name_or_field.copy()
if ('type' not in params):
if ('properties' in params):
name = 'object'
else:
raise ValueError('construct_field() needs to have a "type" key.')
else:
name = params.pop('type')
return Field.get_dsl_class(name)(**params)
if isinstance(name_or_field, Field):
if params:
raise ValueError('construct_field() cannot accept parameters when passing in a construct_field object.')
return name_or_field
return Field.get_dsl_class(name_or_field)(**params) |
def test_setFromModule():
import os
from proteus import Context
with open('context_module.py', 'w') as f:
f.write('nnx=11; T=10.0; g=9.8\n')
sys.path.append(os.getcwd())
import context_module
os.remove('context_module.py')
Context.setFromModule(context_module)
check_eq(Context.context) |
def _preprocess(csource):
csource = _r_comment.sub(' ', csource)
macros = {}
for match in _r_define.finditer(csource):
(macroname, macrovalue) = match.groups()
macros[macroname] = macrovalue
csource = _r_define.sub('', csource)
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
matches = list(_r_partial_enum.finditer(csource))
for (number, match) in enumerate(reversed(matches)):
p = match.start()
if (csource[p] == '='):
p2 = csource.find('...', p, match.end())
assert (p2 > p)
csource = ('%s,__dotdotdot%d__ %s' % (csource[:p], number, csource[(p2 + 3):]))
else:
assert (csource[p:(p + 3)] == '...')
csource = ('%s __dotdotdot%d__ %s' % (csource[:p], number, csource[(p + 3):]))
return (csource.replace('...', ' __dotdotdot__ '), macros) |
def test_revert_previous_kick(paragon_chain):
head = paragon_chain.get_canonical_head()
clique = get_clique(paragon_chain)
snapshot = validate_seal_and_get_snapshot(clique, head)
assert (len(snapshot.tallies) == 0)
alice_votes_bob = make_next_header(paragon_chain, head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH)
snapshot = validate_seal_and_get_snapshot(clique, alice_votes_bob)
assert (snapshot.get_sorted_signers() == [ALICE, BOB])
alice_kicks_bob = make_next_header(paragon_chain, alice_votes_bob, ALICE_PK, coinbase=BOB, nonce=NONCE_DROP)
snapshot = validate_seal_and_get_snapshot(clique, alice_kicks_bob)
assert (snapshot.get_sorted_signers() == [ALICE, BOB])
assert (snapshot.tallies[BOB].action == VoteAction.KICK)
assert (snapshot.tallies[BOB].votes == 1)
alice_votes_bob = make_next_header(paragon_chain, alice_kicks_bob, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH, difficulty=1)
snapshot = validate_seal_and_get_snapshot(clique, alice_votes_bob)
assert (snapshot.get_sorted_signers() == [ALICE, BOB])
assert (BOB not in snapshot.tallies) |
def extractAthanasiafrostWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class u5Ex(object):
def __init__(self):
pass
def uOfX(self, x):
return (((x[0] ** 2) + (x[1] ** 2)) + (x[2] ** 2))
def uOfXT(self, X, T):
return self.uOfX(X)
def duOfX(self, X):
du = (2.0 * numpy.reshape(X[0:3], (3,)))
return du
def duOfXT(self, X, T):
return self.duOfX(X) |
def enable_disk_checkpointing(dirname=None, comm=COMM_WORLD, cleanup=True):
tape = get_working_tape()
if ('firedrake' not in tape._package_data):
tape._package_data['firedrake'] = DiskCheckpointer(dirname, comm, cleanup)
if (not disk_checkpointing()):
continue_disk_checkpointing() |
class TestRegistryPackage(unittest.TestCase):
def setUpClass(cls) -> None:
assert ('registry_data' in package_configs), f'Missing registry_data in {PACKAGE_FILE}'
cls.registry_config = package_configs['registry_data']
stack_version = Version.parse(cls.registry_config['conditions']['kibana.version'].strip('^'), optional_minor_and_patch=True)
if (stack_version >= Version.parse('8.12.0')):
RegistryPackageManifestV3.from_dict(cls.registry_config)
else:
RegistryPackageManifestV1.from_dict(cls.registry_config)
def test_registry_package_config(self):
registry_config = self.registry_config.copy()
registry_config['version'] += '7.1.1.'
with self.assertRaises(ValidationError):
RegistryPackageManifestV1.from_dict(registry_config) |
(accept=('application/json', 'text/json'), renderer='json', error_handler=bodhi.server.services.errors.json_handler)
(accept='application/javascript', renderer='jsonp', error_handler=bodhi.server.services.errors.jsonp_handler)
(accept='text/html', renderer='override.html', error_handler=bodhi.server.services.errors.html_handler)
def get_override(request):
nvr = request.matchdict.get('nvr')
build = Build.get(nvr)
if (not build):
request.errors.add('url', 'nvr', 'No such build')
request.errors.status = HTTPNotFound.code
return
if (not build.override):
request.errors.add('url', 'nvr', 'No buildroot override for this build')
request.errors.status = HTTPNotFound.code
return
return dict(override=build.override) |
def test_align_convert_align_mafft_fasta_to_nexus(o_dir, e_dir, request):
program = 'bin/align/phyluce_align_convert_one_align_to_another'
output = os.path.join(o_dir, 'mafft-fasta-to-nexus')
cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft'), '--output', output, '--input-format', 'fasta', '--output-format', 'nexus', '--cores', '1']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
output_files = glob.glob(os.path.join(output, '*'))
assert output_files, 'There are no output files'
for output_file in output_files:
name = os.path.basename(output_file)
expected_file = os.path.join(e_dir, 'mafft-fasta-to-nexus', name)
observed = open(output_file).read()
expected = open(expected_file).read()
assert (observed == expected) |
def evaluate(node, g, loop=False):
if (loop and isinstance(node, ast.Break)):
raise Break
if (loop and isinstance(node, ast.Continue)):
raise Continue
if isinstance(node, ast.Expr):
_eval = ast.Expression(node.value)
(yield eval(compile(_eval, '<string>', 'eval'), g))
elif isinstance(node, ast.If):
if eval(compile(ast.Expression(node.test), '<string>', 'eval'), g):
for n in node.body:
(yield from evaluate(n, g, loop))
elif node.orelse:
for n in node.orelse:
(yield from evaluate(n, g, loop))
elif isinstance(node, ast.While):
while eval(compile(ast.Expression(node.test), '<string>', 'eval'), g):
try:
for n in node.body:
(yield from evaluate(n, g, True))
except Break:
break
except Continue:
continue
else:
for n in node.orelse:
(yield from evaluate(n, g, loop))
elif isinstance(node, ast.For):
for x in eval(compile(ast.Expression(node.iter), '<string>', 'eval'), g):
if isinstance(node.target, ast.Tuple):
for (e, t) in enumerate(node.target.dims):
g[t.id] = x[e]
else:
g[node.target.id] = x
try:
for n in node.body:
(yield from evaluate(n, g, True))
except Break:
break
except Continue:
continue
else:
for n in node.orelse:
(yield from evaluate(n, g, loop))
elif isinstance(node, ast.Try):
try:
for n in node.body:
(yield from evaluate(n, g, loop))
except Exception as e:
(yield from evaluate_except(node, e, g, loop))
else:
for n in node.orelse:
(yield from evaluate(n, g, loop))
finally:
for n in node.finalbody:
(yield from evaluate(n, g, loop))
elif (PY311 and isinstance(node, ast.TryStar)):
try:
for n in node.body:
(yield from evaluate(n, g, loop))
except ExceptionGroup as e:
for n in node.handlers:
if n.name:
g[n.name] = e
(m, e) = e.split(eval(compile(ast.Expression(n.type), '<string>', 'eval'), g))
if (m is not None):
for ne in n.body:
(yield from evaluate(ne, g, loop))
if (e is None):
break
if (e is not None):
raise e
except Exception as e:
(yield from evaluate_except(node, e, g, loop))
else:
for n in node.orelse:
(yield from evaluate(n, g, loop))
finally:
for n in node.finalbody:
(yield from evaluate(n, g, loop))
elif (PY310 and isinstance(node, ast.Match)):
s = eval(compile(ast.Expression(node.subject), '<string>', 'eval'), g)
for c in node.cases:
if compare_match(s, g, c.pattern):
if ((not c.guard) or eval(compile(ast.Expression(c.guard), '<string>', 'eval'), g)):
for n in c.body:
(yield from evaluate(n, g, loop))
break
elif isinstance(node, ast.With):
(yield from evaluate_with(node, g, loop))
else:
_exec = ast.Module([node], [])
exec(compile(_exec, '<string>', 'exec'), g)
(yield None) |
def combine_clusters(signature_clusters, options):
(deletion_signature_clusters, insertion_signature_clusters, inversion_signature_clusters, tandem_duplication_signature_clusters, insertion_from_signature_clusters, translocation_signature_clusters) = signature_clusters
inversion_candidates = []
for inv_cluster in inversion_signature_clusters:
inversion_candidates.append(CandidateInversion(inv_cluster.contig, inv_cluster.start, inv_cluster.end, inv_cluster.members, inv_cluster.score, inv_cluster.std_span, inv_cluster.std_pos))
tan_dup_candidates = []
for tan_dup_cluster in tandem_duplication_signature_clusters:
(source_contig, source_start, source_end) = tan_dup_cluster.get_source()
(dest_contig, dest_start, dest_end) = tan_dup_cluster.get_destination()
num_copies = int(round(((dest_end - dest_start) / (source_end - source_start))))
fully_covered = (True if sum([sig.fully_covered for sig in tan_dup_cluster.members]) else False)
tan_dup_candidates.append(CandidateDuplicationTandem(tan_dup_cluster.source_contig, tan_dup_cluster.source_start, tan_dup_cluster.source_end, num_copies, fully_covered, tan_dup_cluster.members, tan_dup_cluster.score, tan_dup_cluster.std_span, tan_dup_cluster.std_pos))
breakend_candidates = []
for tra_cluster in translocation_signature_clusters:
breakend_candidates.append(CandidateBreakend(tra_cluster.source_contig, tra_cluster.source_start, tra_cluster.direction1, tra_cluster.dest_contig, tra_cluster.dest_start, tra_cluster.direction2, tra_cluster.members, tra_cluster.score, tra_cluster.std_span, tra_cluster.std_pos))
logging.info('Combine inserted regions with translocation breakpoints..')
(new_insertion_from_clusters, inserted_regions_to_remove_1) = merge_translocations_at_insertions(translocation_signature_clusters, insertion_signature_clusters, options)
insertion_from_signature_clusters.extend(new_insertion_from_clusters)
logging.info('Create interspersed duplication candidates and flag cut&paste insertions..')
int_duplication_candidates = flag_cutpaste_candidates(insertion_from_signature_clusters, deletion_signature_clusters, options)
int_duplication_iterator = iter(sorted(int_duplication_candidates, key=(lambda cand: cand.get_destination())))
tan_duplication_iterator = iter(sorted(tan_dup_candidates, key=(lambda cand: cand.get_destination())))
int_duplications_end = False
tan_duplications_end = False
inserted_regions_to_remove_2 = []
try:
current_int_duplication = next(int_duplication_iterator)
except StopIteration:
int_duplications_end = True
try:
current_tan_duplication = next(tan_duplication_iterator)
except StopIteration:
tan_duplications_end = True
for (inserted_region_index, inserted_region) in enumerate(insertion_signature_clusters):
(contig1, start1, end1) = inserted_region.get_source()
length1 = (end1 - start1)
if (not int_duplications_end):
(contig2, start2, end2) = current_int_duplication.get_destination()
while ((contig2 < contig1) or ((contig2 == contig1) and (end2 < start1))):
try:
current_int_duplication = next(int_duplication_iterator)
(contig2, start2, end2) = current_int_duplication.get_destination()
except StopIteration:
int_duplications_end = True
break
if (not int_duplications_end):
length2 = (end2 - start2)
if ((contig2 == contig1) and (start2 < end1) and (((length1 - length2) / max(length1, length2)) < 0.2)):
inserted_regions_to_remove_2.append(inserted_region_index)
else:
if (not tan_duplications_end):
(contig2, start2, end2) = current_tan_duplication.get_destination()
while ((contig2 < contig1) or ((contig2 == contig1) and (end2 < start1))):
try:
current_tan_duplication = next(tan_duplication_iterator)
(contig2, start2, end2) = current_tan_duplication.get_destination()
except StopIteration:
tan_duplications_end = True
break
if (not tan_duplications_end):
length2 = (end2 - start2)
if ((contig2 == contig1) and (start2 < end1) and (((length1 - length2) / max(length1, length2)) < 0.2)):
inserted_regions_to_remove_2.append(inserted_region_index)
all_inserted_regions_to_remove = sorted(list(set((inserted_regions_to_remove_1 + inserted_regions_to_remove_2))), reverse=True)
for ins_index in all_inserted_regions_to_remove:
del insertion_signature_clusters[ins_index]
deletion_candidates = []
for del_cluster in deletion_signature_clusters:
if (del_cluster.score > 0):
deletion_candidates.append(CandidateDeletion(del_cluster.contig, del_cluster.start, del_cluster.end, del_cluster.members, del_cluster.score, del_cluster.std_span, del_cluster.std_pos))
novel_insertion_candidates = prepare_insertion_candidates(insertion_signature_clusters, options)
logging.info('Cluster interspersed duplication candidates one more time..')
final_int_duplication_candidates = partition_and_cluster_candidates(int_duplication_candidates, options, 'interspersed duplication candidates')
return (deletion_candidates, inversion_candidates, final_int_duplication_candidates, tan_dup_candidates, novel_insertion_candidates, breakend_candidates) |
class FrameContinue(FrameWithArgs, CallFlags):
PREFIX = ''
def __init__(self):
FrameWithArgs.__init__(self)
CallFlags.__init__(self)
self.csumtype = 0
def read_payload(self, fp: IOWrapper, size: int):
offset = 0
self.flags = fp.read_byte((self.PREFIX + '.flags'))
offset += 1
self.csumtype = fp.read_byte((self.PREFIX + '.csumtype'))
offset += 1
if (self.csumtype != 0):
raise NotImplementedError('Checksum type not supported')
self.args = []
arg_count = 1
while (offset < size):
arg = Arg.read_arg(fp, offset, size, self.is_more_fragments_follow(), (self.PREFIX + ('.args[%d]' % arg_count)))
self.args.append(arg)
offset += arg.size()
arg_count += 1
def get_payload_size(self):
return ((_.flag(1) + _.csumtype(1)) + sum([arg.size() for arg in self.args]))
def write_payload(self, fp: IOWrapper):
offset = 0
fp.write_byte(self.flags)
offset += 1
fp.write_byte(self.csumtype)
offset += 1
for arg in self.args:
arg.write_arg(fp)
offset += arg.size()
assert (offset == self.get_payload_size()) |
class FilterClass():
def __init__(self, app, program_version, db: FrontendDatabase, **_):
self._program_version = program_version
self._app = app
self.db = db
self._setup_filters()
def _filter_print_program_version(self, *_):
return f'{self._program_version}'
def _filter_replace_uid_with_file_name(self, input_data):
tmp = input_data.__str__()
uid_list = flt.get_all_uids_in_string(tmp)
with get_shared_session(self.db.frontend) as frontend_db:
for item in uid_list:
file_name = frontend_db.get_file_name(item)
tmp = tmp.replace(f'>{item}<', f'>{file_name}<')
return tmp
def _filter_replace_uid_with_hid(self, input_data, root_uid=None):
tmp = str(input_data)
if (tmp == 'None'):
return ' '
uid_list = flt.get_all_uids_in_string(tmp)
for item in uid_list:
tmp = tmp.replace(item, self.db.frontend.get_hid(item, root_uid=root_uid))
return tmp
def _filter_replace_comparison_uid_with_hid(self, input_data, root_uid=None):
tmp = self._filter_replace_uid_with_hid(input_data, root_uid)
res = tmp.split(';')
return ' || '.join(res)
def _filter_replace_uid_with_hid_link(self, input_data, root_uid=None):
content = str(input_data)
if (content == 'None'):
return ' '
uid_list = flt.get_all_uids_in_string(content)
for uid in uid_list:
hid = self.db.frontend.get_hid(uid, root_uid=root_uid)
content = content.replace(uid, f'<a style="text-reset" href="/analysis/{uid}/ro/{root_uid}">{hid}</a>')
return content
def _filter_nice_uid_list(self, uids, root_uid=None, selected_analysis=None, filename_only=False):
root_uid = none_to_none(root_uid)
if (not is_list_of_uids(uids)):
return uids
analyzed_uids = self.db.frontend.get_data_for_nice_list(uids, root_uid)
number_of_unanalyzed_files = (len(uids) - len(analyzed_uids))
first_item = analyzed_uids.pop(0)
return render_template('generic_view/nice_fo_list.html', fo_list=analyzed_uids, u_show_id=random_collapse_id(), number_of_unanalyzed_files=number_of_unanalyzed_files, root_uid=root_uid, selected_analysis=selected_analysis, first_item=first_item, filename_only=filename_only)
def _nice_virtual_path_list(self, virtual_path_list: list[list[str]], root_uid: (str | None)=None) -> list[str]:
root_uid = none_to_none(root_uid)
path_list = []
all_uids = {uid for uid_list in virtual_path_list for uid in uid_list}
hid_dict = self.db.frontend.get_hid_dict(all_uids, root_uid=root_uid)
for uid_list in virtual_path_list:
components = [self._virtual_path_element_to_span(hid_dict.get(uid, uid), uid, root_uid, uid_list[(- 1)]) for uid in uid_list]
path_list.append(' '.join(components))
return path_list
def _virtual_path_element_to_span(hid_element: str, uid: str, root_uid: str, current_file_uid: str) -> str:
hid = cap_length_of_element(hid_element)
if (uid == current_file_uid):
return f'<span class="badge badge-secondary">{hid}</span>'
return f'<span class="badge badge-primary"> <a style="color: #fff" href="/analysis/{uid}/ro/{root_uid}"> {hid} </a></span>'
def _render_firmware_detail_tabular_field(firmware_meta_data):
return render_template('generic_view/firmware_detail_tabular_field.html', firmware=firmware_meta_data)
def _render_general_information_table(firmware: MetaEntry, root_uid: str, other_versions, selected_analysis, file_tree_paths):
return render_template('generic_view/general_information.html', firmware=firmware, root_uid=root_uid, other_versions=other_versions, selected_analysis=selected_analysis, file_tree_paths=file_tree_paths)
def _split_user_and_password_type_entry(result: dict):
new_result = {}
for (key, value) in result.items():
if (':' in key):
(*user_elements, password_type) = key.split(':')
user = ':'.join(user_elements)
else:
user = key
password_type = 'unix'
new_result.setdefault(user, {})[password_type] = value
return new_result
def check_auth(self, _):
return config.frontend.authentication.enabled
def data_to_chart_limited(self, data, limit: (int | None)=None, color_list=None):
limit = (self._get_chart_element_count() if (limit is None) else limit)
try:
(label_list, value_list) = (list(d) for d in zip(*data))
except ValueError:
return None
(label_list, value_list) = flt.set_limit_for_data_to_chart(label_list, limit, value_list)
color_list = (get_color_list(len(value_list), limit=limit) if (color_list is None) else color_list)
return {'labels': label_list, 'datasets': [{'data': value_list, 'backgroundColor': color_list, 'borderColor': '#fff', 'borderWidth': 2}]}
def _get_chart_element_count(self):
limit = config.frontend.max_elements_per_chart
if (limit > 100):
logging.warning('Value of "max_elements_per_chart" in configuration is too large.')
return 100
return limit
def data_to_chart(self, data):
color_list = (get_color_list(1) * len(data))
return self.data_to_chart_limited(data, limit=0, color_list=color_list)
def _setup_filters(self):
self._app.jinja_env.add_extension('jinja2.ext.do')
self._app.jinja_env.filters['all_items_equal'] = (lambda data: (len({str(value) for value in data.values()}) == 1))
self._app.jinja_env.filters['as_ascii_table'] = flt.as_ascii_table
self._app.jinja_env.filters['auth_enabled'] = self.check_auth
self._app.jinja_env.filters['base64_encode'] = flt.encode_base64_filter
self._app.jinja_env.filters['bytes_to_str'] = flt.bytes_to_str_filter
self._app.jinja_env.filters['data_to_chart'] = self.data_to_chart
self._app.jinja_env.filters['data_to_chart_limited'] = self.data_to_chart_limited
self._app.jinja_env.filters['data_to_chart_with_value_percentage_pairs'] = flt.data_to_chart_with_value_percentage_pairs
self._app.jinja_env.filters['decompress'] = flt.decompress
self._app.jinja_env.filters['dict_to_json'] = json.dumps
self._app.jinja_env.filters['firmware_detail_tabular_field'] = self._render_firmware_detail_tabular_field
self._app.jinja_env.filters['fix_cwe'] = flt.fix_cwe
self._app.jinja_env.filters['format_duration'] = flt.format_duration
self._app.jinja_env.filters['format_string_list_with_offset'] = flt.filter_format_string_list_with_offset
self._app.jinja_env.filters['get_canvas_height'] = flt.get_canvas_height
self._app.jinja_env.filters['get_searchable_crypto_block'] = flt.get_searchable_crypto_block
self._app.jinja_env.filters['get_unique_keys_from_list_of_dicts'] = flt.get_unique_keys_from_list_of_dicts
self._app.jinja_env.filters['hex'] = hex
self._app.jinja_env.filters['hide_dts_binary_data'] = flt.hide_dts_binary_data
self._app.jinja_env.filters['infection_color'] = flt.infection_color
self._app.jinja_env.filters['is_list'] = (lambda item: isinstance(item, list))
self._app.jinja_env.filters['json_dumps'] = json.dumps
self._app.jinja_env.filters['link_cve'] = flt.replace_cve_with_link
self._app.jinja_env.filters['link_cwe'] = flt.replace_cwe_with_link
self._app.jinja_env.filters['list_group'] = flt.list_group
self._app.jinja_env.filters['list_group_collapse'] = flt.list_group_collapse
self._app.jinja_env.filters['list_to_line_break_string'] = flt.list_to_line_break_string
self._app.jinja_env.filters['list_to_line_break_string_no_sort'] = flt.list_to_line_break_string_no_sort
self._app.jinja_env.filters['md5_hash'] = get_md5
self._app.jinja_env.filters['min'] = min
self._app.jinja_env.filters['nice_generic'] = flt.generic_nice_representation
self._app.jinja_env.filters['nice_number'] = flt.nice_number_filter
self._app.jinja_env.filters['nice_time'] = time_format
self._app.jinja_env.filters['nice_uid_list'] = self._filter_nice_uid_list
self._app.jinja_env.filters['nice_unix_time'] = flt.nice_unix_time
self._app.jinja_env.filters['nice_virtual_path_list'] = self._nice_virtual_path_list
self._app.jinja_env.filters['number_format'] = flt.byte_number_filter
self._app.jinja_env.filters['octal_to_readable'] = flt.octal_to_readable
self._app.jinja_env.filters['print_program_version'] = self._filter_print_program_version
self._app.jinja_env.filters['regex_meta'] = flt.comment_out_regex_meta_chars
self._app.jinja_env.filters['remaining_time'] = elapsed_time
self._app.jinja_env.filters['render_analysis_tags'] = flt.render_analysis_tags
self._app.jinja_env.filters['render_general_information'] = self._render_general_information_table
self._app.jinja_env.filters['render_query_title'] = flt.render_query_title
self._app.jinja_env.filters['render_fw_tags'] = flt.render_fw_tags
self._app.jinja_env.filters['replace_comparison_uid_with_hid'] = self._filter_replace_comparison_uid_with_hid
self._app.jinja_env.filters['replace_uid_with_file_name'] = self._filter_replace_uid_with_file_name
self._app.jinja_env.filters['replace_uid_with_hid_link'] = self._filter_replace_uid_with_hid_link
self._app.jinja_env.filters['replace_uid_with_hid'] = self._filter_replace_uid_with_hid
self._app.jinja_env.filters['replace_underscore'] = flt.replace_underscore_filter
self._app.jinja_env.filters['version_is_compatible'] = flt.version_is_compatible
self._app.jinja_env.filters['sort_chart_list_by_name'] = flt.sort_chart_list_by_name
self._app.jinja_env.filters['sort_chart_list_by_value'] = flt.sort_chart_list_by_value
self._app.jinja_env.filters['sort_comments'] = flt.sort_comments
self._app.jinja_env.filters['sort_cve'] = flt.sort_cve_results
self._app.jinja_env.filters['sort_privileges'] = (lambda privileges: sorted(privileges, key=(lambda role: len(privileges[role])), reverse=True))
self._app.jinja_env.filters['sort_roles'] = flt.sort_roles_by_number_of_privileges
self._app.jinja_env.filters['sort_users'] = flt.sort_users_by_name
self._app.jinja_env.filters['split_user_and_password_type'] = self._split_user_and_password_type_entry
self._app.jinja_env.filters['text_highlighter'] = flt.text_highlighter
self._app.jinja_env.filters['uids_to_link'] = flt.uids_to_link
self._app.jinja_env.filters['user_has_role'] = flt.user_has_role
self._app.jinja_env.filters['version_links'] = flt.create_firmware_version_links
self._app.jinja_env.filters['vulnerability_class'] = flt.vulnerability_class
self._app.jinja_env.filters['_linter_reformat_issues'] = flt.linter_reformat_issues |
class hashie(plugins.Plugin):
__author__ = 'junohea.'
__version__ = '1.0.1'
__license__ = 'GPL3'
__description__ = '\n Attempt to automatically convert pcaps to a crackable format.\n If successful, the files containing the hashes will be saved \n in the same folder as the handshakes. \n The files are saved in their respective Hashcat format:\n - EAPOL hashes are saved as *.2500\n - PMKID hashes are saved as *.16800\n All PCAP files without enough information to create a hash are\n stored in a file that can be read by the webgpsmap plugin.\n \n Why use it?:\n - Automatically convert handshakes to crackable formats! \n We dont all upload our hashes online ;)\n - Repair PMKID handshakes that hcxpcaptool misses\n - If running at time of handshake capture, on_handshake can\n be used to improve the chance of the repair succeeding\n - Be a completionist! Not enough packets captured to crack a network?\n This generates an output file for the webgpsmap plugin, use the\n location data to revisit networks you need more packets for!\n \n Additional information:\n - Currently requires hcxpcaptool compiled and installed\n - Attempts to repair PMKID hashes when hcxpcaptool cant find the SSID\n - hcxpcaptool sometimes has trouble extracting the SSID, so we \n use the raw 16800 output and attempt to retrieve the SSID via tcpdump\n - When access_point data is available (on_handshake), we leverage \n the reported AP name and MAC to complete the hash\n - The repair is very basic and could certainly be improved!\n Todo:\n Make it so users dont need hcxpcaptool (unless it gets added to the base image)\n Phase 1: Extract/construct 2500/16800 hashes through tcpdump commands\n Phase 2: Extract/construct 2500/16800 hashes entirely in python\n Improve the code, a lot\n '
def __init__(self):
logging.info('[hashie] plugin loaded')
self.lock = Lock()
def on_config_changed(self, config):
handshake_dir = config['bettercap']['handshakes']
if (('interval' not in self.options) or (not self.status.newer_then_hours(self.options['interval']))):
logging.info('[hashie] Starting batch conversion of pcap files')
with self.lock:
self._process_stale_pcaps(handshake_dir)
def on_handshake(self, agent, filename, access_point, client_station):
with self.lock:
handshake_status = []
fullpathNoExt = filename.split('.')[0]
name = filename.split('/')[(- 1):][0].split('.')[0]
if os.path.isfile((fullpathNoExt + '.2500')):
handshake_status.append('Already have {}.2500 (EAPOL)'.format(name))
elif self._writeEAPOL(filename):
handshake_status.append('Created {}.2500 (EAPOL) from pcap'.format(name))
if os.path.isfile((fullpathNoExt + '.16800')):
handshake_status.append('Already have {}.16800 (PMKID)'.format(name))
elif self._writePMKID(filename, access_point):
handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name))
if handshake_status:
logging.info(('[hashie] Good news:\n\t' + '\n\t'.join(handshake_status)))
def _writeEAPOL(self, fullpath):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[(- 1):][0].split('.')[0]
result = subprocess.getoutput('hcxpcaptool -o {}.2500 {} >/dev/null 2>&1'.format(fullpathNoExt, fullpath))
if os.path.isfile((fullpathNoExt + '.2500')):
logging.debug('[hashie] [+] EAPOL Success: {}.2500 created'.format(filename))
return True
else:
return False
def _writePMKID(self, fullpath, apJSON):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[(- 1):][0].split('.')[0]
result = subprocess.getoutput('hcxpcaptool -k {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt, fullpath))
if os.path.isfile((fullpathNoExt + '.16800')):
logging.debug('[hashie] [+] PMKID Success: {}.16800 created'.format(filename))
return True
else:
result = subprocess.getoutput('hcxpcaptool -K {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt, fullpath))
if os.path.isfile((fullpathNoExt + '.16800')):
if (self._repairPMKID(fullpath, apJSON) == False):
logging.debug('[hashie] [-] PMKID Fail: {}.16800 could not be repaired'.format(filename))
return False
else:
logging.debug('[hashie] [+] PMKID Success: {}.16800 repaired'.format(filename))
return True
else:
logging.debug('[hashie] [-] Could not attempt repair of {} as no raw PMKID file was created'.format(filename))
return False
def _repairPMKID(self, fullpath, apJSON):
hashString = ''
clientString = []
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[(- 1):][0].split('.')[0]
logging.debug('[hashie] Repairing {}'.format(filename))
with open((fullpathNoExt + '.16800'), 'r') as tempFileA:
hashString = tempFileA.read()
if (apJSON != ''):
clientString.append('{}:{}'.format(apJSON['mac'].replace(':', ''), apJSON['hostname'].encode('hex')))
else:
result = subprocess.getoutput('hcxpcaptool -X /tmp/{} {} >/dev/null 2>&1'.format(filename, fullpath))
if os.path.isfile(('/tmp/' + filename)):
with open(('/tmp/' + filename), 'r') as tempFileB:
temp = tempFileB.read().splitlines()
for line in temp:
clientString.append(((line.split(':')[0] + ':') + line.split(':')[1].strip('\n').encode().hex()))
os.remove('/tmp/{}'.format(filename))
tcpCatOut = subprocess.check_output((('tcpdump -ennr ' + fullpath) + ' "(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)" 2>/dev/null | sed -E \'s/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g\''), shell=True).decode('utf-8')
if (':' in tcpCatOut):
for i in tcpCatOut.split('\n'):
if (':' in i):
clientString.append(((i.split('\t')[0].replace(':', '') + ':') + i.split('\t')[1].strip('\n').encode().hex()))
if clientString:
for line in clientString:
if (line.split(':')[0] == hashString.split(':')[1]):
hashString = ((hashString.strip('\n') + ':') + line.split(':')[1])
if ((len(hashString.split(':')) == 4) and (not hashString.endswith(':'))):
with open((fullpath.split('.')[0] + '.16800'), 'w') as tempFileC:
logging.debug('[hashie] Repaired: {} ({})'.format(filename, hashString))
tempFileC.write((hashString + '\n'))
return True
else:
logging.debug('[hashie] Discarded: {} {}'.format(line, hashString))
else:
os.remove((fullpath.split('.')[0] + '.16800'))
return False
def _process_stale_pcaps(self, handshake_dir):
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
failed_jobs = []
successful_jobs = []
lonely_pcaps = []
for (num, handshake) in enumerate(handshakes_list):
fullpathNoExt = handshake.split('.')[0]
pcapFileName = handshake.split('/')[(- 1):][0]
if (not os.path.isfile((fullpathNoExt + '.2500'))):
if self._writeEAPOL(handshake):
successful_jobs.append(('2500: ' + pcapFileName))
else:
failed_jobs.append(('2500: ' + pcapFileName))
if (not os.path.isfile((fullpathNoExt + '.16800'))):
if self._writePMKID(handshake, ''):
successful_jobs.append(('16800: ' + pcapFileName))
else:
failed_jobs.append(('16800: ' + pcapFileName))
if (not os.path.isfile((fullpathNoExt + '.2500'))):
lonely_pcaps.append(handshake)
logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
if ((((num + 1) % 50) == 0) or ((num + 1) == len(handshakes_list))):
logging.info('[hashie] Batch job: {}/{} done ({} fails)'.format((num + 1), len(handshakes_list), len(lonely_pcaps)))
if successful_jobs:
logging.info('[hashie] Batch job: {} new handshake files created'.format(len(successful_jobs)))
if lonely_pcaps:
logging.info('[hashie] Batch job: {} networks without enough packets to create a hash'.format(len(lonely_pcaps)))
self._getLocations(lonely_pcaps)
def _getLocations(self, lonely_pcaps):
with open('/root/.incompletePcaps', 'w') as isIncomplete:
count = 0
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[(- 1):][0]
fullpathNoExt = pcapFile.split('.')[0]
isIncomplete.write((filename + '\n'))
if (os.path.isfile((fullpathNoExt + '.gps.json')) or os.path.isfile((fullpathNoExt + '.geo.json')) or os.path.isfile((fullpathNoExt + '.paw-gps.json'))):
count += 1
if (count != 0):
logging.info('[hashie] Used {} GPS/GEO/PAW-GPS files to find lonely networks, go check webgpsmap! ;)'.format(str(count)))
else:
logging.info('[hashie] Could not find any GPS/GEO/PAW-GPS files for the lonely networks'.format(str(count)))
def _getLocationsCSV(self, lonely_pcaps):
locations = []
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[(- 1):][0].split('.')[0]
fullpathNoExt = pcapFile.split('.')[0]
if os.path.isfile((fullpathNoExt + '.gps.json')):
with open((fullpathNoExt + '.gps.json'), 'r') as tempFileA:
data = json.load(tempFileA)
locations.append((((((filename + ',') + str(data['Latitude'])) + ',') + str(data['Longitude'])) + ',50'))
elif os.path.isfile((fullpathNoExt + '.geo.json')):
with open((fullpathNoExt + '.geo.json'), 'r') as tempFileB:
data = json.load(tempFileB)
locations.append(((((((filename + ',') + str(data['location']['lat'])) + ',') + str(data['location']['lng'])) + ',') + str(data['accuracy'])))
elif os.path.isfile((fullpathNoExt + '.paw-gps.json')):
with open((fullpathNoExt + '.paw-gps.json'), 'r') as tempFileC:
data = json.load(tempFileC)
locations.append((((((filename + ',') + str(data['lat'])) + ',') + str(data['long'])) + ',50'))
if locations:
with open('/root/locations.csv', 'w') as tempFileD:
for loc in locations:
tempFileD.write((loc + '\n'))
logging.info('[hashie] Used {} GPS/GEO files to find lonely networks, load /root/locations.csv into a mapping app and go say hi!'.format(len(locations))) |
class OptionSeriesWindbarbSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_fmpq():
Q = flint.fmpq
assert (Q() == Q(0))
assert (Q(0) != Q(1))
assert (Q(0) == 0)
assert (0 == Q(0))
assert (Q(2) != 1)
assert (1 != Q(2))
assert (Q(1) != ())
assert (Q(1, 2) != 1)
assert (Q(2, 3) == Q(flint.fmpz(2), long(3)))
assert (Q((- 2), (- 4)) == Q(1, 2))
assert (Q('1') == Q(1))
assert (Q('1/2') == Q(1, 2))
assert raises((lambda : Q('1.0')), ValueError)
assert raises((lambda : Q('1.5')), ValueError)
assert raises((lambda : Q('1/2/3')), ValueError)
assert raises((lambda : Q([])), TypeError)
assert raises((lambda : Q(1, [])), TypeError)
assert raises((lambda : Q([], 1)), TypeError)
assert (bool(Q(0)) == False)
assert (bool(Q(1)) == True)
assert ((Q(1, 3) + Q(2, 3)) == 1)
assert ((Q(1, 3) - Q(2, 3)) == Q((- 1), 3))
assert ((Q(1, 3) * Q(2, 3)) == Q(2, 9))
assert ((Q(1, 3) + 2) == Q(7, 3))
assert ((2 + Q(1, 3)) == Q(7, 3))
assert ((Q(1, 3) - 2) == Q((- 5), 3))
assert ((2 - Q(1, 3)) == Q(5, 3))
assert ((Q(1, 3) * 2) == Q(2, 3))
assert ((2 * Q(1, 3)) == Q(2, 3))
assert ((Q(2, 3) / Q(4, 5)) == Q(5, 6))
assert ((Q(2, 3) / 5) == Q(2, 15))
assert ((Q(2, 3) / flint.fmpz(5)) == Q(2, 15))
assert ((5 / Q(2, 3)) == Q(15, 2))
assert ((flint.fmpz(5) / Q(2, 3)) == Q(15, 2))
assert ((Q(2, 3) / 5) == Q(2, 15))
assert ((Q(1, 2) ** 2) == Q(1, 4))
assert ((Q(1, 2) ** (- 2)) == Q(4))
assert raises((lambda : (Q(0) ** (- 1))), ZeroDivisionError)
assert raises((lambda : (Q(1, 2) ** Q(1, 2))), TypeError)
assert raises((lambda : (Q(1, 2) ** [])), TypeError)
assert raises((lambda : ([] ** Q(1, 2))), TypeError)
assert raises((lambda : pow(Q(1, 2), 2, 3)), AssertionError)
megaz = (flint.fmpz(2) ** 8000000)
megaq = Q(megaz)
assert raises((lambda : (megaq ** megaz)), OverflowError)
assert raises((lambda : (Q(1, 2) + [])), TypeError)
assert raises((lambda : (Q(1, 2) - [])), TypeError)
assert raises((lambda : (Q(1, 2) * [])), TypeError)
assert raises((lambda : (Q(1, 2) / [])), TypeError)
assert raises((lambda : ([] + Q(1, 2))), TypeError)
assert raises((lambda : ([] - Q(1, 2))), TypeError)
assert raises((lambda : ([] * Q(1, 2))), TypeError)
assert raises((lambda : ([] / Q(1, 2))), TypeError)
assert ((Q(1, 2) == 1) is False)
assert ((Q(1, 2) != 1) is True)
assert ((Q(1, 2) < 1) is True)
assert ((Q(1, 2) <= 1) is True)
assert ((Q(1, 2) > 1) is False)
assert ((Q(1, 2) >= 1) is False)
assert ((Q(1, 2) == Q(3, 4)) is False)
assert ((Q(1, 2) != Q(3, 4)) is True)
assert ((Q(1, 2) < Q(3, 4)) is True)
assert ((Q(1, 2) <= Q(3, 4)) is True)
assert ((Q(1, 2) > Q(3, 4)) is False)
assert ((Q(1, 2) >= Q(3, 4)) is False)
assert ((Q(1, 2) == Q(1, 2)) is True)
assert ((Q(1, 2) != Q(1, 2)) is False)
assert ((Q(1, 2) < Q(1, 2)) is False)
assert ((Q(1, 2) <= Q(1, 2)) is True)
assert ((Q(1, 2) > Q(1, 2)) is False)
assert ((Q(1, 2) >= Q(1, 2)) is True)
assert raises((lambda : (Q(1, 2) > [])), TypeError)
assert raises((lambda : ([] < Q(1, 2))), TypeError)
ctx.pretty = False
assert (repr(Q((- 2), 3)) == 'fmpq(-2,3)')
assert (repr(Q(3)) == 'fmpq(3)')
ctx.pretty = True
assert (str(Q((- 2), 3)) == '-2/3')
assert (str(Q(3)) == '3')
assert (Q(2, 3).p == Q(2, 3).numer() == Q(2, 3).numerator == 2)
assert (Q(2, 3).q == Q(2, 3).denom() == Q(2, 3).denominator == 3)
assert ((+ Q(5, 7)) == Q(5, 7))
assert ((- Q(5, 7)) == Q((- 5), 7))
assert ((- Q((- 5), 7)) == Q(5, 7))
assert (abs(Q(5, 7)) == Q(5, 7))
assert (abs((- Q(5, 7))) == Q(5, 7))
assert raises((lambda : Q(1, 0)), ZeroDivisionError)
assert raises((lambda : (Q(1, 2) / Q(0))), ZeroDivisionError)
assert raises((lambda : (Q(1, 2) / 0)), ZeroDivisionError)
assert (Q(5, 3).floor() == flint.fmpz(1))
assert (Q((- 5), 3).floor() == flint.fmpz((- 2)))
assert (Q(5, 3).ceil() == flint.fmpz(2))
assert (Q((- 5), 3).ceil() == flint.fmpz((- 1)))
assert (type(int(Q(5, 3))) is int)
assert (type(math.floor(Q(5, 3))) is flint.fmpz)
assert (type(math.ceil(Q(5, 3))) is flint.fmpz)
assert (type(math.trunc(Q(5, 3))) is flint.fmpz)
assert (type(round(Q(5, 3))) is flint.fmpz)
assert (type(round(Q(5, 3))) is flint.fmpz)
assert (type(round(Q(5, 3), 0)) is flint.fmpq)
assert (type(round(Q(5, 3), 1)) is flint.fmpq)
assert (int(Q(5, 3)) == 1)
assert (math.floor(Q(5, 3)) == flint.fmpz(1))
assert (math.ceil(Q(5, 3)) == flint.fmpz(2))
assert (math.trunc(Q(5, 3)) == flint.fmpz(1))
assert (round(Q(5, 3)) == flint.fmpz(2))
assert (int(Q((- 5), 3)) == flint.fmpz((- 1)))
assert (math.floor(Q((- 5), 3)) == flint.fmpz((- 2)))
assert (math.ceil(Q((- 5), 3)) == flint.fmpz((- 1)))
assert (math.trunc(Q((- 5), 3)) == flint.fmpz((- 1)))
assert (round(Q((- 5), 3)) == (- 2))
assert (round(Q(100, 3), 2) == Q(3333, 100))
assert (round(Q(100, 3), 0) == Q(33, 1))
assert (round(Q(100, 3), (- 1)) == Q(30, 1))
assert (round(Q(100, 3), (- 2)) == Q(0))
d = {}
d[Q(1, 2)] = 3
d[Q(1, 2)] = 4
assert (d == {Q(1, 2): 4})
assert (Q((- 5), 3).height_bits() == 3)
assert (Q((- 5), 3).height_bits(signed=True) == (- 3))
cases = [((lambda q: q.next()), [Q(0), Q(1), Q((- 1)), Q(1, 2), Q((- 1), 2), Q(2), Q((- 2)), Q(1, 3), Q((- 1), 3), Q(3)]), ((lambda q: q.next(signed=False)), [Q(0), Q(1), Q(1, 2), Q(2), Q(1, 3), Q(3), Q(2, 3), Q(3, 2), Q(1, 4), Q(4)]), ((lambda q: q.next(minimal=False)), [Q(0), Q(1), Q((- 1)), Q(1, 2), Q((- 1), 2), Q(2), Q((- 2)), Q(1, 3), Q((- 1), 3), Q(3, 2)]), ((lambda q: q.next(signed=False, minimal=False)), [Q(0), Q(1), Q(1, 2), Q(2), Q(1, 3), Q(3, 2), Q(2, 3), Q(3), Q(1, 4), Q(4, 3)])]
for (func, values) in cases:
for (val1, val2) in zip(values[:(- 1)], values[1:]):
assert (func(val1) == val2)
raises((lambda : Q((- 1)).next(signed=False)), ValueError)
OE = OverflowError
cases = [(flint.fmpq.bernoulli, [OE, Q(1), Q((- 1), 2), Q(1, 6), Q(0), Q((- 1), 30)]), ((lambda n: flint.fmpq.bernoulli(n, cache=True)), [OE, Q(1), Q((- 1), 2), Q(1, 6), Q(0), Q((- 1), 30)]), (flint.fmpq.harmonic, [OE, Q(0), Q(1), Q(3, 2), Q(11, 6), Q(25, 12)]), ((lambda n: flint.fmpq.dedekind_sum(n, 3)), [(- Q(1, 18)), 0, Q(1, 18), (- Q(1, 18)), 0, Q(1, 18), (- Q(1, 18))])]
is_exception = (lambda v: (isinstance(v, type) and issubclass(v, Exception)))
for (func, values) in cases:
for (n, val) in enumerate(values, (- 1)):
if is_exception(val):
assert raises((lambda : func(n)), val)
else:
assert (func(n) == val) |
class CrossAttention(fl.Attention):
def __init__(self, embedding_dim: int, cross_embedding_dim: (int | None)=None, num_heads: int=1, inner_dim: (int | None)=None, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
super().__init__(embedding_dim=embedding_dim, key_embedding_dim=cross_embedding_dim, num_heads=num_heads, inner_dim=inner_dim, is_optimized=False, device=device, dtype=dtype)
self.cross_embedding_dim = (cross_embedding_dim or embedding_dim)
self.insert(index=0, module=fl.Parallel(fl.GetArg(index=0), fl.GetArg(index=1), fl.GetArg(index=1))) |
class ArtistManager(GObject.Object):
instance = None
progress = GObject.property(type=float, default=0)
__gsignals__ = {'sort': (GObject.SIGNAL_RUN_LAST, None, (object,))}
def __init__(self, plugin, album_manager, shell):
super(ArtistManager, self).__init__()
self.db = plugin.shell.props.db
self.shell = shell
self.plugin = plugin
self.cover_man = ArtistCoverManager(plugin, self)
self.cover_man.album_manager = album_manager
self.model = ArtistsModel(album_manager)
self.loader = ArtistLoader(self, album_manager)
self._connect_signals()
def _connect_signals(self):
self.loader.connect('model-load-finished', self._load_finished_callback)
self.connect('sort', self._sort_artist)
def _sort_artist(self, widget, param):
toolbar_type = param
if ((not toolbar_type) or (toolbar_type == 'artist')):
self.model.sort()
def _load_finished_callback(self, *args):
self.cover_man.load_covers() |
class SimpleApplication(HasStrictTraits):
gui = Instance(IGUI)
application_running = Event()
def __init__(self):
super().__init__()
self.gui = GUI()
def start(self):
self.gui.set_trait_later(self, 'application_running', True)
self.gui.start_event_loop()
def stop(self):
self.gui.stop_event_loop() |
def extractTyralionsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _loop_once():
global patch, name, path, monitor
global p, device, rate, blocksize, nchans, format, info, stream, lock, control, trigger, devinfo, block, offset, autoscale
global BUFFER, t, last, vco_pitch, vco_sin, vco_tri, vco_saw, vco_sqr, lfo_depth, lfo_frequency, adsr_attack, adsr_decay, adsr_sustain, adsr_release, vca_envelope, frequency, period, wave_sin, wave_tri, wave_saw, wave_sqr, waveform, lfo_envelope, adsr_envelope
BUFFER = ''
for t in range(offset, (offset + blocksize)):
with lock:
trigger.time = t
last = trigger.last
vco_pitch = control.vco_pitch
vco_sin = control.vco_sin
vco_tri = control.vco_tri
vco_saw = control.vco_saw
vco_sqr = control.vco_sqr
lfo_depth = control.lfo_depth
lfo_frequency = control.lfo_frequency
adsr_attack = control.adsr_attack
adsr_decay = control.adsr_decay
adsr_sustain = control.adsr_sustain
adsr_release = control.adsr_release
vca_envelope = control.vca_envelope
if (vco_pitch > 0):
frequency = (math.pow(2, ((vco_pitch / 12) - 4)) * 261.63)
period = (rate / frequency)
wave_sin = ((vco_sin * (math.sin((((math.pi * frequency) * t) / rate)) + 1)) / 2)
wave_tri = (((vco_tri * float(abs(((t % period) - (period / 2))))) / period) * 2)
wave_saw = ((vco_saw * float((t % period))) / period)
wave_sqr = (vco_sqr * float(((t % period) > (period / 2))))
waveform = ((((wave_sin + wave_tri) + wave_saw) + wave_sqr) * 127)
else:
waveform = 0
lfo_envelope = ((math.sin((((math.pi * lfo_frequency) * t) / rate)) + 1) / 2)
lfo_envelope = (lfo_depth + ((1 - lfo_depth) * lfo_envelope))
waveform = (lfo_envelope * waveform)
if ((adsr_attack > 0) and ((t - last) < adsr_attack)):
adsr_envelope = ((t - last) / adsr_attack)
elif ((adsr_decay > 0) and (((t - last) - adsr_attack) < adsr_decay)):
adsr_envelope = (1.0 - ((0.5 * ((t - last) - adsr_attack)) / adsr_decay))
elif ((adsr_sustain > 0) and ((((t - last) - adsr_attack) - adsr_decay) < adsr_sustain)):
adsr_envelope = 0.5
elif ((adsr_release > 0) and (((((t - last) - adsr_attack) - adsr_decay) - adsr_sustain) < adsr_release)):
adsr_envelope = (0.5 - ((0.5 * ((((t - last) - adsr_attack) - adsr_decay) - adsr_sustain)) / adsr_release))
else:
adsr_envelope = 0
waveform = (adsr_envelope * waveform)
waveform = (vca_envelope * waveform)
BUFFER = (BUFFER + chr(int(waveform)))
stream.write(BUFFER)
offset = (offset + blocksize)
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
('foremast.elb.create_elb.boto3.session.Session')
('foremast.elb.create_elb.get_properties')
def test_elb_add_listener_policy(mock_get_properties, mock_boto3_session):
test_app = 'myapp'
test_port = 80
test_policy_list = ['policy_name']
json_data = {'job': [{'listeners': [{'externalPort': test_port, 'listenerPolicies': test_policy_list}]}]}
client = mock_boto3_session.return_value.client.return_value
elb = SpinnakerELB(app='myapp', env='dev', region='us-east-1')
elb.add_listener_policy(json.dumps(json_data))
client.set_load_balancer_policies_of_listener.assert_called_with(LoadBalancerName=test_app, LoadBalancerPort=test_port, PolicyNames=test_policy_list) |
class FilesLoader(jinja2.BaseLoader):
def __init__(self, files):
self.files = files
def get_source(self, environment, template):
for template_file in self.files:
if (os.path.basename(template_file) == template):
with open(template_file) as f:
contents = f.read()
mtime = os.path.getmtime(template_file)
return (contents, template_file, (lambda : (mtime == os.path.getmtime(template_file))))
else:
raise jinja2.TemplateNotFound(template)
def list_templates(self):
found = [os.path.basename(f) for f in self.files]
return sorted(found) |
class NXFlowStatsRequest(NXStatsRequest):
def __init__(self, datapath, flags, out_port, table_id, rule=None):
super(NXFlowStatsRequest, self).__init__(datapath, flags, ofproto.NXST_FLOW)
self.out_port = out_port
self.table_id = table_id
self.rule = rule
self.match_len = 0
def _serialize_vendor_stats_body(self):
if (self.rule is not None):
offset = (ofproto.NX_STATS_MSG_SIZE + ofproto.NX_FLOW_STATS_REQUEST_SIZE)
self.match_len = nx_match.serialize_nxm_match(self.rule, self.buf, offset)
msg_pack_into(ofproto.NX_FLOW_STATS_REQUEST_PACK_STR, self.buf, ofproto.NX_STATS_MSG_SIZE, self.out_port, self.match_len, self.table_id) |
def extractJjkjuuutranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
args = parse_args(argv=(None if sys.argv[1:] else ['--help']))
if args.version:
print(VERSION)
sys.exit(0)
if (args.token is None):
print("Please specify a valid token with the -t flag or the 'GITLAB_TOKEN' environment variable")
sys.exit(1)
if (args.url is None):
print("Please specify a valid gitlab base url with the -u flag or the 'GITLAB_URL' environment variable")
sys.exit(1)
elif ((args.dest is None) and (args.print is False)):
print('Please specify a destination for the gitlab tree')
sys.exit(1)
config_logging(args)
includes = split(args.include)
excludes = split(args.exclude)
tree = GitlabTree(args.url, args.token, args.method, args.naming, args.archived.api_value, includes, excludes, args.file, args.concurrency, args.recursive, args.verbose, args.root_group, args.dont_checkout, args.dont_store_token)
log.debug('Reading projects tree from gitlab at [%s]', args.url)
tree.load_tree()
if tree.is_empty():
log.fatal('The tree is empty, check your include/exclude patterns and/or a root_group value or run with more verbosity for debugging')
sys.exit(1)
if args.print:
tree.print_tree(args.print_format)
else:
tree.sync_tree(args.dest) |
def test_substitute_loop_node_with_parent():
asgraph = AbstractSyntaxInterface()
code_node = asgraph._add_code_node([Assignment(var('a'), const(2))])
inner_loop = asgraph.add_endless_loop_with_body(code_node)
outer_loop = asgraph.add_endless_loop_with_body(inner_loop)
replacement_loop = asgraph.factory.create_while_loop_node(condition=LogicCondition.initialize_symbol('a', asgraph.factory.logic_context))
asgraph.substitute_loop_node(inner_loop, replacement_loop)
assert ((set(asgraph.edges) == {(replacement_loop, code_node), (outer_loop, replacement_loop)}) and (len(asgraph.nodes) == 3) and (asgraph.get_roots == (outer_loop,))) |
def _split_addr(addr):
e = ValueError(('Invalid IP address and port pair: "%s"' % addr))
pair = addr.rsplit(':', 1)
if (len(pair) != 2):
raise e
(addr, port) = pair
if (addr.startswith('[') and addr.endswith(']')):
addr = addr.lstrip('[').rstrip(']')
if (not ip.valid_ipv6(addr)):
raise e
elif (not ip.valid_ipv4(addr)):
raise e
return (addr, int(port, 0)) |
def main(POST_ID=None) -> None:
global redditid, reddit_object
reddit_object = get_subreddit_threads(POST_ID)
redditid = id(reddit_object)
(length, number_of_comments) = save_text_to_mp3(reddit_object)
length = math.ceil(length)
get_screenshots_of_reddit_posts(reddit_object, number_of_comments)
bg_config = {'video': get_background_config('video'), 'audio': get_background_config('audio')}
download_background_video(bg_config['video'])
download_background_audio(bg_config['audio'])
chop_background(bg_config, length, reddit_object)
make_final_video(number_of_comments, length, reddit_object, bg_config) |
def wrap_data_response(data):
if (data is not None):
return Response(return_code=str(SUCCESS), error_msg=None, data=MessageToJson(data, preserving_proto_field_name=True))
else:
return Response(return_code=str(RESOURCE_DOES_NOT_EXIST), error_msg=ReturnCode.Name(RESOURCE_DOES_NOT_EXIST).lower(), data=None) |
def unpackb(packed, **kwargs):
if ('object_pairs_hook' not in kwargs):
object_hook = kwargs.get('object_hook')
for decoder in msgpack_decoders.get_all().values():
object_hook = functools.partial(decoder, chain=object_hook)
kwargs['object_hook'] = object_hook
return _unpackb(packed, **kwargs) |
def extractWasabilinusWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("The Forerunner's Odyssey", "The Forerunner's Odyssey", 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_routes.route('/<int:group_id>/contact-organizer', methods=['POST'])
_required
def contact_group_organizer(group_id):
group = Group.query.get_or_404(group_id)
organizer_role = Role.query.filter_by(name='organizer').first()
group_roles = UsersGroupsRoles.query.filter_by(group_id=group_id, role_id=organizer_role.id, accepted=True).all()
organizers_emails = list(set(list(map((lambda x: x.email), group_roles))))
email = strip_tags(request.json.get('email'))
context = {'attendee_name': current_user.fullname, 'attendee_email': current_user.email, 'group_name': group.name, 'email': email}
organizer_mail = "{attendee_name} ({attendee_email}) has a question for you about your group {group_name}: <br/><br/><div style='white-space: pre-line;'>{email}</div>"
action = MailType.CONTACT_GROUP_ORGANIZERS
mail = MAILS[action]
send_email(to=group.user.email, action=action, subject=((group.name + ': Question from ') + current_user.fullname), html=organizer_mail.format(**context), bcc=organizers_emails, reply_to=current_user.email)
send_email(to=current_user.email, action=MailType.CONTACT_GROUP_ORGANIZERS, subject=(group.name + ': Organizers are succesfully contacted'), html=render_template(mail['template'], group_name=group.name, email_copy=email))
return jsonify(success=True) |
class base():
elements = [const.FIRE, const.EARTH, const.AIR, const.WATER]
temperaments = [const.CHOLERIC, const.MELANCHOLIC, const.SANGUINE, const.PHLEGMATIC]
genders = [const.MASCULINE, const.FEMININE]
factions = [const.DIURNAL, const.NOCTURNAL]
sunseasons = [const.SPRING, const.SUMMER, const.AUTUMN, const.WINTER]
elementTemperament = {const.FIRE: const.CHOLERIC, const.EARTH: const.MELANCHOLIC, const.AIR: const.SANGUINE, const.WATER: const.PHLEGMATIC}
temperamentElement = {const.CHOLERIC: const.FIRE, const.MELANCHOLIC: const.EARTH, const.SANGUINE: const.AIR, const.PHLEGMATIC: const.WATER}
elementQuality = {const.FIRE: [const.HOT, const.DRY], const.EARTH: [const.COLD, const.DRY], const.AIR: [const.HOT, const.HUMID], const.WATER: [const.COLD, const.HUMID]}
temperamentQuality = {const.CHOLERIC: [const.HOT, const.DRY], const.MELANCHOLIC: [const.COLD, const.DRY], const.SANGUINE: [const.HOT, const.HUMID], const.PHLEGMATIC: [const.COLD, const.HUMID]}
moonphaseElement = {const.MOON_FIRST_QUARTER: const.AIR, const.MOON_SECOND_QUARTER: const.FIRE, const.MOON_THIRD_QUARTER: const.EARTH, const.MOON_LAST_QUARTER: const.WATER}
sunseasonElement = {const.SPRING: const.AIR, const.SUMMER: const.FIRE, const.AUTUMN: const.EARTH, const.WINTER: const.WATER} |
class OptionPlotoptionsCylinderSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestLinearDisplayP3(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--display-p3-linear 0.82246 0.03319 0.01708)'), ('orange', 'color(--display-p3-linear 0.88926 0.39697 0.04432)'), ('yellow', 'color(--display-p3-linear 1 1 0.08948)'), ('green', 'color(--display-p3-linear 0.03832 0.2087 0.01563)'), ('blue', 'color(--display-p3-linear 0 0 0.91052)'), ('indigo', 'color(--display-p3-linear 0.05787 0.00234 0.20446)'), ('violet', 'color(--display-p3-linear 0.74283 0.2442 0.80925)'), ('white', 'color(--display-p3-linear 1 1 1)'), ('gray', 'color(--display-p3-linear 0.21586 0.21586 0.21586)'), ('black', 'color(--display-p3-linear 0 0 0)'), ('color(--display-p3-linear 0 0.50196 0)', 'color(--display-p3-linear 0 0.50196 0)'), ('color(--display-p3-linear 0 0.50196 0 / 0.5)', 'color(--display-p3-linear 0 0.50196 0 / 0.5)'), ('color(--display-p3-linear 50% 50% 50% / 50%)', 'color(--display-p3-linear 0.5 0.5 0.5 / 0.5)'), ('color(--display-p3-linear none none none / none)', 'color(--display-p3-linear none none none / none)'), ('color(--display-p3-linear 0% 0% 0%)', 'color(--display-p3-linear 0 0 0)'), ('color(--display-p3-linear 100% 100% 100%)', 'color(--display-p3-linear 1 1 1)'), ('color(--display-p3-linear -100% -100% -100%)', 'color(--display-p3-linear -1 -1 -1)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color1).convert('display-p3-linear'), Color(color2)) |
def to_datetime_list(datetimes):
if isinstance(datetimes, (datetime.datetime, np.datetime64)):
return to_datetime_list([datetimes])
if isinstance(datetimes, (list, tuple)):
if ((len(datetimes) == 3) and isinstance(datetimes[1], str) and (datetimes[1].lower() == 'to')):
return mars_like_date_list(to_datetime(datetimes[0]), to_datetime(datetimes[2]), 1)
if ((len(datetimes) == 5) and isinstance(datetimes[1], str) and isinstance(datetimes[3], str) and (datetimes[1].lower() == 'to') and (datetimes[3].lower() == 'by')):
return mars_like_date_list(to_datetime(datetimes[0]), to_datetime(datetimes[2]), int(datetimes[4]))
return [to_datetime(x) for x in datetimes]
datetimes = get_wrapper(datetimes)
return to_datetime_list(datetimes.to_datetime_list()) |
class AlienInvasion():
def __init__(self):
pygame.init()
self.clock = pygame.time.Clock()
self.settings = Settings()
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
pygame.display.set_caption('Alien Invasion')
self.stats = GameStats(self)
self.ship = Ship(self)
self.bullets = pygame.sprite.Group()
self.aliens = pygame.sprite.Group()
self._create_fleet()
self.game_active = True
def run_game(self):
while True:
self._check_events()
if self.game_active:
self.ship.update()
self._update_bullets()
self._update_aliens()
self._update_screen()
self.clock.tick(60)
def _check_events(self):
for event in pygame.event.get():
if (event.type == pygame.QUIT):
sys.exit()
elif (event.type == pygame.KEYDOWN):
self._check_keydown_events(event)
elif (event.type == pygame.KEYUP):
self._check_keyup_events(event)
def _check_keydown_events(self, event):
if (event.key == pygame.K_RIGHT):
self.ship.moving_right = True
elif (event.key == pygame.K_LEFT):
self.ship.moving_left = True
elif (event.key == pygame.K_q):
sys.exit()
elif (event.key == pygame.K_SPACE):
self._fire_bullet()
def _check_keyup_events(self, event):
if (event.key == pygame.K_RIGHT):
self.ship.moving_right = False
elif (event.key == pygame.K_LEFT):
self.ship.moving_left = False
def _fire_bullet(self):
if (len(self.bullets) < self.settings.bullets_allowed):
new_bullet = Bullet(self)
self.bullets.add(new_bullet)
def _update_bullets(self):
self.bullets.update()
for bullet in self.bullets.copy():
if (bullet.rect.bottom <= 0):
self.bullets.remove(bullet)
self._check_bullet_alien_collisions()
def _check_bullet_alien_collisions(self):
collisions = pygame.sprite.groupcollide(self.bullets, self.aliens, True, True)
if (not self.aliens):
self.bullets.empty()
self._create_fleet()
def _ship_hit(self):
if (self.stats.ships_left > 0):
self.stats.ships_left -= 1
self.bullets.empty()
self.aliens.empty()
self._create_fleet()
self.ship.center_ship()
sleep(0.5)
else:
self.game_active = False
def _update_aliens(self):
self._check_fleet_edges()
self.aliens.update()
if pygame.sprite.spritecollideany(self.ship, self.aliens):
self._ship_hit()
self._check_aliens_bottom()
def _check_aliens_bottom(self):
for alien in self.aliens.sprites():
if (alien.rect.bottom >= self.settings.screen_height):
self._ship_hit()
break
def _create_fleet(self):
alien = Alien(self)
(alien_width, alien_height) = alien.rect.size
(current_x, current_y) = (alien_width, alien_height)
while (current_y < (self.settings.screen_height - (3 * alien_height))):
while (current_x < (self.settings.screen_width - (2 * alien_width))):
self._create_alien(current_x, current_y)
current_x += (2 * alien_width)
current_x = alien_width
current_y += (2 * alien_height)
def _create_alien(self, x_position, y_position):
new_alien = Alien(self)
new_alien.x = x_position
new_alien.rect.x = x_position
new_alien.rect.y = y_position
self.aliens.add(new_alien)
def _check_fleet_edges(self):
for alien in self.aliens.sprites():
if alien.check_edges():
self._change_fleet_direction()
break
def _change_fleet_direction(self):
for alien in self.aliens.sprites():
alien.rect.y += self.settings.fleet_drop_speed
self.settings.fleet_direction *= (- 1)
def _update_screen(self):
self.screen.fill(self.settings.bg_color)
for bullet in self.bullets.sprites():
bullet.draw_bullet()
self.ship.blitme()
self.aliens.draw(self.screen)
pygame.display.flip() |
_routes.route('/token/refresh', methods=['POST'])
_refresh_token_required
def refresh_token():
current_user = get_jwt_identity()
expiry_time = timedelta(minutes=90)
new_token = create_access_token(identity=current_user, fresh=False, expires_delta=expiry_time)
return jsonify({'access_token': new_token}) |
class _ZebraInterfaceNbrAddress(_ZebraMessageBody):
_HEADER_FMT = '!I'
HEADER_SIZE = struct.calcsize(_HEADER_FMT)
def __init__(self, ifindex, family, prefix):
super(_ZebraInterfaceNbrAddress, self).__init__()
self.ifindex = ifindex
self.family = family
if isinstance(prefix, (IPv4Prefix, IPv6Prefix)):
prefix = prefix.prefix
self.prefix = prefix
def parse(cls, buf, version=_DEFAULT_VERSION):
(ifindex,) = struct.unpack_from(cls._HEADER_FMT, buf)
rest = buf[cls.HEADER_SIZE:]
(family, prefix, _) = _parse_zebra_family_prefix(rest)
return cls(ifindex, family, prefix)
def serialize(self, version=_DEFAULT_VERSION):
(self.family, body_bin) = _serialize_zebra_family_prefix(self.prefix)
return (struct.pack(self._HEADER_FMT, self.ifindex) + body_bin) |
_handler(func=(lambda message: True))
def all_messages(message):
if (message.text == 'Done'):
markup = telebot.types.ReplyKeyboardRemove()
bot.send_message(message.from_user.id, 'Done with Keyboard', reply_markup=markup)
elif (message.text == 'Symbols'):
bot.send_message(message.from_user.id, 'Special characters', reply_markup=keyboard('Symbols'))
elif (message.text == 'Normal'):
bot.send_message(message.from_user.id, 'Normal Keyboard', reply_markup=keyboard('Normal'))
elif (message.text == 'Caps Lock'):
bot.send_message(message.from_user.id, 'Caps Lock', reply_markup=keyboard('Caps'))
elif (message.text == 'Delete'):
bot.delete_message(message.from_user.id, message.message_id)
else:
bot.send_message(message.chat.id, message.text) |
(scope='function')
def mailchimp_override_connection_config(db: session, mailchimp_override_config, mailchimp_secrets) -> Generator:
fides_key = mailchimp_override_config['fides_key']
connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': mailchimp_secrets, 'saas_config': mailchimp_override_config})
(yield connection_config)
connection_config.delete(db) |
def read_kaggle_dataset(dataset_name, data_types=None, delimiter=','):
dataset_files = kaggle.api.dataset_list_files(dataset_name)
try:
kaggle.api.dataset_download_files(dataset_name, USER_DATASETS, unzip=True)
except Exception as e:
print(e)
file_path = ((USER_DATASETS + '/') + dataset_files.files[0].name)
with open(file_path, 'rb') as file:
file_bytes = file.read()
dataset_file = io.BytesIO(file_bytes)
df = pd.read_csv(dataset_file, encoding='utf8', dtype=data_types, header=0)
return df |
def add_hydrogens(ref_geom, ref_zmat, geom, inner=False):
G1 = geom_to_graph(ref_geom)
G2 = geom_to_graph(geom)
subgraph = find_subgraph(G1, G2)
(h_map, h_tot) = missing_hydrogens(G1, G2, subgraph)
print(h_map, h_tot)
coords3d = np.zeros(((len(geom.atoms) + h_tot), 3))
print(coords3d.shape)
present = list()
for (g1, g2) in subgraph.items():
print(g1, g2)
coords3d[g1] = geom.coords3d[g2]
present.append(g1)
assert (set(present) == set(range(len(present))))
start_at = len(present)
geom = geom_from_zmat(ref_zmat, coords3d=coords3d, start_at=start_at)
return geom |
class OptionSeriesPyramidSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
test_suite = TestLoader().discover('./tests', pattern='*_test.py')
test_results = TextTestRunner(verbosity=2).run(test_suite) |
class InsertPhpConstructorPropertyCommand(sublime_plugin.TextCommand):
placeholder = 'PROPERTY'
def description(self):
return 'Insert a constructor argument.'
def is_enabled(self):
return ('php' in self.view.settings().get('syntax').lower())
def run(self, edit):
self.edit = edit
self.regions = []
self.visibility = get_setting('visibility', 'private')
self.add_property(self.placeholder)
self.add_constructor(self.placeholder)
sel = self.view.sel()
sel.clear()
sel.add_all(self.regions)
self.view.show(sel)
def add_property(self, prop_name):
text = (prop_name + ';')
properties = self.find_properties()
if properties:
pos = properties[(- 1)].end()
else:
pos = self.find_class_opening_bracket()
text += '\n'
pos += self.view_insert(pos, (('\n\t' + self.visibility) + ' $'))
self.view_insert(pos, text)
cursor_start = pos
cursor_end = (cursor_start + len(prop_name))
self.add_region(cursor_start, cursor_end)
def add_constructor(self, prop_name):
constructor = self.view.find('__construct\\s*\\(', 0)
if constructor:
constructor_start = constructor.end()
constructor_end = self.view.find('\\)', constructor_start).begin()
else:
text = '\n\tpublic function __construct()\n\t{\n\t}'
properties = self.find_properties()
if properties:
pos = properties[(- 1)].end()
text = ('\n' + text)
else:
pos = self.find_class_opening_bracket()
self.view_insert(pos, text)
constructor = self.view.find('__construct\\s*\\(\\)', 0)
constructor_start = constructor_end = (constructor.end() - 1)
constructor_args = self.view.substr(sublime.Region(constructor_start, constructor_end))
arg_pos = constructor_end
text = ('$' + prop_name)
last_newline = self.view.find_by_class(constructor_end, False, sublime.CLASS_LINE_END)
last_word = self.view.find_by_class(constructor_end, False, sublime.CLASS_SUB_WORD_START)
is_multiline_constructor = (last_newline > last_word)
if is_multiline_constructor:
arg_pos = last_newline
if (constructor_args.strip() != ''):
arg_pos += self.view_insert(arg_pos, ',')
arg_pos += self.view_insert(arg_pos, '\n\t\t')
cursor_start = (arg_pos + 1)
else:
cursor_start = (constructor_end + 1)
if (constructor_args.strip() != ''):
text = (', ' + text)
cursor_start += 2
self.view_insert(arg_pos, text)
cursor_end = (cursor_start + len(prop_name))
self.add_region(cursor_start, cursor_end)
constructor_close = self.view.find('\\}', constructor_end).begin()
last_newline = self.view.find_by_class(constructor_close, False, sublime.CLASS_LINE_START)
cursor_start = (last_newline + self.view_insert(last_newline, '\t\t$this->'))
self.view_insert(cursor_start, (((prop_name + ' = $') + prop_name) + ';\n'))
cursor_end = (cursor_start + len(prop_name))
self.add_region(cursor_start, cursor_end)
cursor_start = (cursor_end + 4)
cursor_end = (cursor_start + len(prop_name))
self.add_region(cursor_start, cursor_end)
def find_class_opening_bracket(self):
pos = self.view.find('class\\s+[0-9A-Za-z_]+', 0).end()
return self.view.find('\\{', pos).end()
def find_properties(self):
return self.view.find_all('(public|protected|private)\\s+\\$[A-Za-z_]+;')
def add_region(self, start, end):
self.regions.append(sublime.Region(start, end))
def view_insert(self, pos, text):
return self.view.insert(self.edit, pos, text) |
def queue_privacy_request(privacy_request_id: str, from_webhook_id: Optional[str]=None, from_step: Optional[str]=None) -> str:
cache: FidesopsRedis = get_cache()
logger.info('queueing privacy request')
task = run_privacy_request.delay(privacy_request_id=privacy_request_id, from_webhook_id=from_webhook_id, from_step=from_step)
try:
cache.set(get_async_task_tracking_cache_key(privacy_request_id), task.task_id)
except DataError:
logger.debug('Error tracking task_id for request with id {}', privacy_request_id)
return task.task_id |
def extractMahoutsuki(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (('Uncategorized' in item['tags']) and chp and (('Chapter' in item['title']) or ('prologue' in item['title']))):
return buildReleaseMessageWithType(item, 'Le Festin de Vampire', vol, chp, frag=frag, postfix=postfix)
return False |
class QtPlugin(QtPluginBase):
device: str
def create_handler(self, window: HandlerWindow) -> QtHandlerBase:
return QtHandler(window, self.device)
def show_settings_dialog(self, window: ElectrumWindow, keystore: Hardware_KeyStore) -> None:
device_id = self.choose_device(window, keystore)
if device_id:
SettingsDialog(window, self, keystore, device_id).exec_()
def request_trezor_init_settings(self, wizard, method, model):
vbox = QVBoxLayout()
next_enabled = True
label = QLabel(_('Enter a label to name your device:'))
name = QLineEdit()
hl = QHBoxLayout()
hl.addWidget(label)
hl.addWidget(name)
hl.addStretch(1)
vbox.addLayout(hl)
def clean_text(widget):
text = widget.toPlainText().strip()
return ' '.join(text.split())
gb = QGroupBox()
hbox1 = QHBoxLayout()
gb.setLayout(hbox1)
vbox.addWidget(gb)
gb.setTitle(_('Select your seed length:'))
bg_numwords = QButtonGroup()
for (i, count) in enumerate([12, 18, 24]):
rb = QRadioButton(gb)
rb.setText((_('%d words') % count))
bg_numwords.addButton(rb)
bg_numwords.setId(rb, i)
hbox1.addWidget(rb)
rb.setChecked(True)
cb_pin = QCheckBox(_('Enable PIN protection'))
cb_pin.setChecked(True)
vbox.addWidget(WWLabel(RECOMMEND_PIN))
vbox.addWidget(cb_pin)
passphrase_msg = WWLabel(PASSPHRASE_HELP_SHORT)
passphrase_warning = WWLabel(PASSPHRASE_NOT_PIN)
passphrase_warning.setStyleSheet('color: red')
cb_phrase = QCheckBox(_('Enable passphrases'))
cb_phrase.setChecked(False)
vbox.addWidget(passphrase_msg)
vbox.addWidget(passphrase_warning)
vbox.addWidget(cb_phrase)
if ((method == TIM_RECOVER) and (not (model == 'T'))):
gb_rectype = QGroupBox()
hbox_rectype = QHBoxLayout()
gb_rectype.setLayout(hbox_rectype)
vbox.addWidget(gb_rectype)
gb_rectype.setTitle(_('Select recovery type:'))
bg_rectype = QButtonGroup()
rb1 = QRadioButton(gb_rectype)
rb1.setText(_('Scrambled words'))
bg_rectype.addButton(rb1)
bg_rectype.setId(rb1, RECOVERY_TYPE_SCRAMBLED_WORDS)
hbox_rectype.addWidget(rb1)
rb1.setChecked(True)
rb2 = QRadioButton(gb_rectype)
rb2.setText(_('Matrix'))
bg_rectype.addButton(rb2)
bg_rectype.setId(rb2, RECOVERY_TYPE_MATRIX)
hbox_rectype.addWidget(rb2)
else:
bg_rectype = None
wizard.exec_layout(vbox, next_enabled=next_enabled)
item = bg_numwords.checkedId()
pin = cb_pin.isChecked()
recovery_type = (bg_rectype.checkedId() if bg_rectype else None)
return (item, name.text(), pin, cb_phrase.isChecked(), recovery_type) |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'DoS-policy6')
if data['firewall_dos_policy6']:
resp = firewall_dos_policy6(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_dos_policy6'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class SlurmInfoWatcher(core.InfoWatcher):
def _make_command(self) -> tp.Optional[tp.List[str]]:
to_check = {x.split('_')[0] for x in (self._registered - self._finished)}
if (not to_check):
return None
command = ['sacct', '-o', 'JobID,State,NodeList', '--parsable2']
for jid in to_check:
command.extend(['-j', str(jid)])
return command
def get_state(self, job_id: str, mode: str='standard') -> str:
info = self.get_info(job_id, mode=mode)
return (info.get('State') or 'UNKNOWN')
def read_info(self, string: tp.Union[(bytes, str)]) -> tp.Dict[(str, tp.Dict[(str, str)])]:
if (not isinstance(string, str)):
string = string.decode()
lines = string.splitlines()
if (len(lines) < 2):
return {}
names = lines[0].split('|')
all_stats: tp.Dict[(str, tp.Dict[(str, str)])] = {}
for line in lines[1:]:
stats = {x: y.strip() for (x, y) in zip(names, line.split('|'))}
job_id = stats['JobID']
if ((not job_id) or ('.' in job_id)):
continue
try:
multi_split_job_id = read_job_id(job_id)
except Exception as e:
warnings.warn(f'''Could not interpret {job_id} correctly (please open an issue):
{e}''', DeprecationWarning)
continue
for split_job_id in multi_split_job_id:
all_stats['_'.join(split_job_id[:2])] = stats
if (len(split_job_id) >= 3):
for index in range(int(split_job_id[1]), (int(split_job_id[2]) + 1)):
all_stats[f'{split_job_id[0]}_{index}'] = stats
return all_stats |
def unknown_actions():
iam_actions_from_api_calls = set()
for api_call in all_aws_api_methods():
x = api_call.split(':')
r = Record((x[0] + '.amazonaws.com'), x[1])
statement = r.to_statement()
if (statement is not None):
iam_actions_from_api_calls.add(statement.Action[0].json_repr())
known_actions = all_known_iam_permissions()
return iam_actions_from_api_calls.difference(known_actions) |
def test_get_entities_in_file():
e = get_entities_in_file(WORKFLOW_FILE, False)
assert (e.workflows == ['my_wf', 'wf_with_none'])
assert (e.tasks == ['get_subset_df', 'print_all', 'show_sd', 'task_with_optional', 'test_union1', 'test_union2'])
assert (e.all() == ['my_wf', 'wf_with_none', 'get_subset_df', 'print_all', 'show_sd', 'task_with_optional', 'test_union1', 'test_union2']) |
def _vm_shutdown(vm):
cmd = ('vmadm stop -F ' + vm.uuid)
lock = ('vmadm stop ' + vm.uuid)
meta = {'replace_text': ((vm.uuid, vm.hostname),), 'msg': LOG_STOP_FORCE, 'vm_uuid': vm.uuid}
(tid, err) = execute(ERIGONES_TASK_USER, None, cmd, meta=meta, lock=lock, callback=False, expires=None, queue=vm.node.fast_queue, nolog=True, ping_worker=False, check_user_tasks=False)
if err:
logger.error('Failed (%s) to create internal shutdown task for old master VM %s', err, vm)
else:
logger.info('Created internal shutdown task %s for old master VM %s', tid, vm)
return (tid, err) |
class OptionAccessibilityKeyboardnavigationSeriesnavigation(Options):
def mode(self):
return self._config_get('normal')
def mode(self, text: str):
self._config(text, js_type=False)
def pointNavigationEnabledThreshold(self):
return self._config_get(False)
def pointNavigationEnabledThreshold(self, flag: bool):
self._config(flag, js_type=False)
def rememberPointFocus(self):
return self._config_get(False)
def rememberPointFocus(self, flag: bool):
self._config(flag, js_type=False)
def skipNullPoints(self):
return self._config_get(True)
def skipNullPoints(self, flag: bool):
self._config(flag, js_type=False) |
def test_deepcopy_with_sys_streams():
provider = providers.Callable(example)
provider.add_args(sys.stdin)
provider.add_kwargs(a2=sys.stdout)
provider_copy = providers.deepcopy(provider)
assert (provider is not provider_copy)
assert isinstance(provider_copy, providers.Callable)
assert (provider.args[0] is sys.stdin)
assert (provider.kwargs['a2'] is sys.stdout) |
def process_pinned_projects_post(owner, url_on_success):
if isinstance(owner, models.Group):
UsersLogic.raise_if_not_in_group(flask.g.user, owner)
form = PinnedCoprsForm(owner)
if (not form.validate_on_submit()):
return render_pinned_projects(owner, form=form)
PinnedCoprsLogic.delete_by_owner(owner)
for (i, copr_id) in enumerate(filter(None, form.copr_ids.data)):
PinnedCoprsLogic.add(owner, int(copr_id), i)
db.session.commit()
return flask.redirect(url_on_success) |
class DataHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
def wrap_func(handler, args):
eid = extract_eid(args)
if ('data' in args):
data = json.loads(args['data'])
if (eid not in handler.state):
handler.state[eid] = {'jsons': {}, 'reload': {}}
if (('win' in args) and (args['win'] is None)):
handler.state[eid]['jsons'] = data
else:
handler.state[eid]['jsons'][args['win']] = data
broadcast_envs(handler)
elif (('win' in args) and (args['win'] is None)):
handler.write(json.dumps(handler.state[eid]['jsons']))
else:
assert (args['win'] in handler.state[eid]['jsons']), "Window {} doesn't exist in env {}".format(args['win'], eid)
handler.write(json.dumps(handler.state[eid]['jsons'][args['win']]))
_auth
def post(self):
args = tornado.escape.json_decode(tornado.escape.to_basestring(self.request.body))
self.wrap_func(self, args) |
.django_db
def test_spending_by_geography_failure(client, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_geography/', content_type='application/json', data=json.dumps({'scope': 'test', 'filters': {}}))
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY) |
class Command(betterproto.Message):
command_type: v1enums.CommandType = betterproto.enum_field(1)
schedule_activity_task_command_attributes: 'ScheduleActivityTaskCommandAttributes' = betterproto.message_field(2, group='attributes')
start_timer_command_attributes: 'StartTimerCommandAttributes' = betterproto.message_field(3, group='attributes')
complete_workflow_execution_command_attributes: 'CompleteWorkflowExecutionCommandAttributes' = betterproto.message_field(4, group='attributes')
fail_workflow_execution_command_attributes: 'FailWorkflowExecutionCommandAttributes' = betterproto.message_field(5, group='attributes')
request_cancel_activity_task_command_attributes: 'RequestCancelActivityTaskCommandAttributes' = betterproto.message_field(6, group='attributes')
cancel_timer_command_attributes: 'CancelTimerCommandAttributes' = betterproto.message_field(7, group='attributes')
cancel_workflow_execution_command_attributes: 'CancelWorkflowExecutionCommandAttributes' = betterproto.message_field(8, group='attributes')
request_cancel_external_workflow_execution_command_attributes: 'RequestCancelExternalWorkflowExecutionCommandAttributes' = betterproto.message_field(9, group='attributes')
record_marker_command_attributes: 'RecordMarkerCommandAttributes' = betterproto.message_field(10, group='attributes')
continue_as_new_workflow_execution_command_attributes: 'ContinueAsNewWorkflowExecutionCommandAttributes' = betterproto.message_field(11, group='attributes')
start_child_workflow_execution_command_attributes: 'StartChildWorkflowExecutionCommandAttributes' = betterproto.message_field(12, group='attributes')
signal_external_workflow_execution_command_attributes: 'SignalExternalWorkflowExecutionCommandAttributes' = betterproto.message_field(13, group='attributes')
upsert_workflow_search_attributes_command_attributes: 'UpsertWorkflowSearchAttributesCommandAttributes' = betterproto.message_field(14, group='attributes') |
def filter_by_defc_closed_periods() -> Q:
q = Q()
for sub in final_submissions_for_all_fy():
if (((sub.fiscal_year == REPORTING_PERIOD_MIN_YEAR) and (sub.fiscal_period >= REPORTING_PERIOD_MIN_MONTH)) or (sub.fiscal_year > REPORTING_PERIOD_MIN_YEAR)):
q |= ((Q(submission__reporting_fiscal_year=sub.fiscal_year) & Q(submission__quarter_format_flag=sub.is_quarter)) & Q(submission__reporting_fiscal_period__lte=sub.fiscal_period))
if (not q):
q = Q(pk__isnull=True)
return (q & Q(submission__reporting_period_start__gte=str(REPORTING_PERIOD_MIN_DATE))) |
def test_synchronizer_run_raises_exception_with_run_called_twice(sample_directory, output_filename):
def sync_function(trace_object):
return trace_object.samples.array
input_filename = f'{sample_directory}/synchronization/ets_file.ets'
ths = estraces.read_ths_from_ets_file(input_filename)[:10]
synchronizer = scared.Synchronizer(ths, output_filename, sync_function)
synchronizer.run()
with pytest.raises(scared.SynchronizerError):
synchronizer.run()
ths.close() |
class TestGetRuleDetail():
(scope='function')
def rule(self, policy: Policy) -> Rule:
return policy.get_rules_for_action(ActionType.access.value)[0]
(scope='function')
def url(self, policy: Policy, rule: Rule) -> str:
return (V1_URL_PREFIX + RULE_DETAIL_URI.format(policy_key=policy.key, rule_key=rule.key))
def test_get_rule_detail_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert (resp.status_code == 401)
def test_get_rule_detail_wrong_scope(self, url, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header(scopes=[scopes.POLICY_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 403)
def test_get_invalid_rule(self, url, api_client: TestClient, generate_auth_header, policy):
auth_header = generate_auth_header(scopes=[scopes.RULE_READ])
url = (V1_URL_PREFIX + RULE_DETAIL_URI.format(policy_key=policy.key, rule_key='bad'))
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 404)
def test_get_rule_returns_rule_target(self, api_client: TestClient, generate_auth_header, policy, rule: Rule, url):
auth_header = generate_auth_header(scopes=[scopes.RULE_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 200)
data = resp.json()
assert (data['key'] == rule.key)
assert ('targets' in data)
assert (len(data['targets']) == 1)
rule_target_data = data['targets'][0]
rule_target_data['data_category'] = rule.get_target_data_categories() |
def update_preview_frame_slider() -> gradio.Slider:
if is_video(facefusion.globals.target_path):
video_frame_total = count_video_frame_total(facefusion.globals.target_path)
return gradio.Slider(maximum=video_frame_total, visible=True)
return gradio.Slider(value=None, maximum=None, visible=False) |
(('s', 'expected'), [param('abc', 'abc', id='no_esc'), param('\\', '\\\\', id='esc_backslash'), param('\\\\\\', '', id='esc_backslash_x3'), param('()', '\\(\\)', id='esc_parentheses'), param('[]', '\\[\\]', id='esc_brackets'), param('{}', '\\{\\}', id='esc_braces'), param(':=,', '\\:\\=\\,', id='esc_symbols'), param(' \t', '\\ \\ \\\t', id='esc_ws'), param('ab\\(cd{ef}[gh]): ij,kl\t', 'ab\\\\\\(cd\\{ef\\}\\[gh\\]\\)\\:\\ ij\\,kl\\\t', id='esc_mixed')])
def test_escape_special_characters(s: str, expected: str) -> None:
escaped = escape_special_characters(s)
assert (escaped == expected) |
class TreeContentProvider(ContentProvider):
def get_elements(self, element):
return self.get_children(element)
def get_parent(self, element):
return None
def get_children(self, element):
raise NotImplementedError()
def has_children(self, element):
raise NotImplementedError() |
def InstallSysroot(target_platform, target_arch):
sysroot_dict = GetSysrootDict(target_platform, target_arch)
tarball_filename = sysroot_dict['Tarball']
tarball_sha1sum = sysroot_dict['Sha1Sum']
linux_dir = os.path.dirname(SCRIPT_DIR)
sysroot = os.path.join(linux_dir, sysroot_dict['SysrootDir'])
url = ('%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, tarball_sha1sum, tarball_filename))
stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp):
with open(stamp) as s:
if (s.read() == url):
return
print(('Installing Debian %s %s root image: %s' % (target_platform, target_arch, sysroot)))
if os.path.isdir(sysroot):
shutil.rmtree(sysroot)
os.mkdir(sysroot)
tarball = os.path.join(sysroot, tarball_filename)
print(('Downloading %s' % url))
sys.stdout.flush()
sys.stderr.flush()
for _ in range(3):
try:
response = urlopen(url)
with open(tarball, 'wb') as f:
f.write(response.read())
break
except Exception:
pass
else:
raise Error(('Failed to download %s' % url))
sha1sum = GetSha1(tarball)
if (sha1sum != tarball_sha1sum):
raise Error(('Tarball sha1sum is wrong.Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)))
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball)
with open(stamp, 'w') as s:
s.write(url) |
def test_custom_form_complex_fields_complete(db, client, jwt, user):
attendee = get_complex_custom_form_attendee(db, user)
data = json.dumps({'data': {'type': 'attendee', 'id': str(attendee.id), 'attributes': {'firstname': 'Areeb', 'lastname': 'Jamal', 'job-title': 'Software Engineer', 'complex-field-values': {'best-friend': 'Tester'}}}})
response = client.patch(f'/v1/attendees/{attendee.id}', content_type='application/vnd.api+json', headers=jwt, data=data)
db.session.refresh(attendee)
assert (response.status_code == 200)
assert (attendee.firstname == 'Areeb')
assert (attendee.lastname == 'Jamal')
assert (attendee.job_title == 'Software Engineer')
assert (attendee.complex_field_values['best_friend'] == 'Tester') |
def test_data(db):
baker.make('references.CGAC', cgac_code='000', agency_name='Agency 000', agency_abbreviation='A000')
baker.make('references.CGAC', cgac_code='002', agency_name='Agency 002', agency_abbreviation='A002')
award = baker.make('search.AwardSearch', award_id=1)
taa = baker.make('accounts.TreasuryAppropriationAccount', allocation_transfer_agency_id=None, agency_id='000', beginning_period_of_availability=None, ending_period_of_availability=None, availability_type_code=None, main_account_code='2121', sub_account_code='212')
baker.make('awards.FinancialAccountsByAwards', treasury_account=taa, award=award)
taa = baker.make('accounts.TreasuryAppropriationAccount', allocation_transfer_agency_id='000', agency_id='001', beginning_period_of_availability='123456', ending_period_of_availability='234567', availability_type_code=None, main_account_code='1234', sub_account_code='321')
baker.make('awards.FinancialAccountsByAwards', treasury_account=taa, award=award)
taa = baker.make('accounts.TreasuryAppropriationAccount', allocation_transfer_agency_id='001', agency_id='002', beginning_period_of_availability='923456', ending_period_of_availability='934567', availability_type_code='X', main_account_code='9234', sub_account_code='921')
baker.make('awards.FinancialAccountsByAwards', treasury_account=taa, award=award)
taa = baker.make('accounts.TreasuryAppropriationAccount', allocation_transfer_agency_id='001', agency_id='002', beginning_period_of_availability='923456', ending_period_of_availability='934567', availability_type_code='X', main_account_code='9234', sub_account_code='921')
baker.make('awards.FinancialAccountsByAwards', treasury_account=taa, award=award) |
def test_should_erase_return_data_with_revert(computation):
assert (computation.get_gas_remaining() == 100)
computation.return_data = b'\x1337'
with computation:
raise Revert('Triggered VMError for tests')
assert (not computation.should_erase_return_data)
assert (computation.return_data == b'\x1337') |
class YahooQuarterlyData():
def __init__(self, data_path: str, quarter_count: Optional[int]=None):
self.data_path = data_path
self.quarter_count = quarter_count
def load(self, index: List[str]) -> pd.DataFrame:
result = []
for ticker in index:
path = '{}/quarterly/{}.csv'.format(self.data_path, ticker)
if (not os.path.exists(path)):
continue
df = pd.read_csv(path)
df['ticker'] = ticker
if (self.quarter_count is not None):
df = df[:self.quarter_count]
result.append(df)
if (len(result) > 0):
result = pd.concat(result, axis=0).reset_index(drop=True)
else:
return None
result['date'] = result['date'].astype(np.datetime64)
result = result.drop_duplicates(['ticker', 'date'])
result.index = range(len(result))
return result |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.