code stringlengths 281 23.7M |
|---|
def _create_temporary_connection():
connect = SQLiteTempConnect.create_temporary_db()
connect.create_temp_tables({'user': {'columns': {'id': 'INTEGER PRIMARY KEY', 'name': 'TEXT', 'age': 'INTEGER'}, 'data': [(1, 'Tom', 10), (2, 'Jerry', 16), (3, 'Jack', 18), (4, 'Alice', 20), (5, 'Bob', 22)]}})
return connect |
.parametrize('value,expected', [([, 'wei'], ''), ([, 'kwei'], ''), ([, 'mwei'], ''), ([, 'gwei'], ''), ([, 'szabo'], '1000000'), ([, 'finney'], '1000'), ([, 'ether'], '1'), ([, 'kether'], '0.001'), ([, 'grand'], '0.001'), ([, 'mether'], '0.000001'), ([, 'gether'], '0.'), ([, 'tether'], '0.')])
def test_from_wei(value, expected):
assert (from_wei(*value) == decimal.Decimal(expected)) |
def list(fips_dir, proj_dir, pattern):
dirs = get_config_dirs(fips_dir, proj_dir)
res = OrderedDict()
for curDir in dirs:
res[curDir] = []
paths = glob.glob('{}/*.yml'.format(curDir))
for path in paths:
fname = os.path.split(path)[1]
fname = os.path.splitext(fname)[0]
res[curDir].append(fname)
res[curDir].sort()
return res |
class TestAtLeastOneCheckboxIsChecked(unittest.TestCase):
def setUp(self):
self.not_empty_messages = {'missing': 'a missing value message'}
class CheckForCheckboxSchema(Schema):
agree = validators.StringBool(messages=self.not_empty_messages)
self.schema = CheckForCheckboxSchema()
def test_Schema_with_input_present(self):
result = self.schema.to_python({'agree': 'yes'})
self.assertTrue(result['agree'])
def test_Schema_with_input_missing(self):
try:
self.schema.to_python({})
except Invalid as exc:
error_message = exc.error_dict['agree'].msg
self.assertEqual(self.not_empty_messages['missing'], error_message, error_message)
else:
self.fail('missing input not detected') |
def draw_overlapped_ways(types: list[dict[(str, str)]], path: Path) -> None:
grid: Grid = Grid()
for (index, tags) in enumerate(types):
node_1: OSMNode = grid.add_node({}, 8, (index + 1))
node_2: OSMNode = grid.add_node({}, (len(types) + 9), (index + 1))
grid.add_way(tags, [node_1, node_2])
grid.add_text(', '.join((f'{k}={tags[k]}' for k in tags)), 0, (index + 1))
for (index, tags) in enumerate(types):
node_1: OSMNode = grid.add_node({}, (index + 9), 0)
node_2: OSMNode = grid.add_node({}, (index + 9), (len(types) + 1))
grid.add_way(tags, [node_1, node_2])
grid.draw(path) |
def prompt_password(ctx: click.Context, param: str, value: str) -> str:
config_value = ctx.obj['CONFIG'].user.password
if value:
return value
if config_value:
click.echo('> Password found in configuration.')
return config_value
value = click.prompt(text='Password', hide_input=True)
return value |
class TestPrometheusConnection():
def setup(self):
self.metrics = {}
configuration = ConnectionConfig(connection_id=PrometheusConnection.connection_id, port=9090)
self.some_skill = 'some/skill:0.1.0'
self.agent_address = 'my_address'
self.agent_public_key = 'my_public_key'
self.protocol_specification_id = PublicId.from_str('fetchai/prometheus:1.1.7')
identity = Identity('name', address=self.agent_address, public_key=self.agent_public_key)
self.prometheus_con = PrometheusConnection(identity=identity, configuration=configuration, data_dir=MagicMock())
self.loop = asyncio.get_event_loop()
self.prometheus_address = str(PrometheusConnection.connection_id)
self.dialogues = PrometheusDialogues(self.some_skill)
async def send_add_metric(self, title: str, metric_type: str) -> None:
(msg, sending_dialogue) = self.dialogues.create(counterparty=self.prometheus_address, performative=PrometheusMessage.Performative.ADD_METRIC, title=title, type=metric_type, description='a gauge', labels={})
assert (sending_dialogue is not None)
envelope = Envelope(to=msg.to, sender=msg.sender, message=msg)
(await self.prometheus_con.send(envelope))
async def send_update_metric(self, title: str, update_func: str) -> None:
(msg, sending_dialogue) = self.dialogues.create(counterparty=self.prometheus_address, performative=PrometheusMessage.Performative.UPDATE_METRIC, title=title, callable=update_func, value=1.0, labels={})
assert (sending_dialogue is not None)
assert (sending_dialogue.last_message is not None)
envelope = Envelope(to=msg.to, sender=msg.sender, message=msg)
(await self.prometheus_con.send(envelope))
def teardown(self):
self.loop.run_until_complete(self.prometheus_con.disconnect())
.asyncio
async def test_connection(self):
assert (self.prometheus_con.state == ConnectionStates.disconnected), 'should not be connected yet'
(await self.prometheus_con.connect())
assert (self.prometheus_con.state == ConnectionStates.connected), 'should be connected'
(await self.send_add_metric('some_metric', 'Gauge'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 200)
assert (msg.message == 'New Gauge successfully added: some_metric.')
(await self.send_add_metric('some_metric', 'Gauge'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 409)
assert (msg.message == 'Metric already exists.')
(await self.send_add_metric('cool_metric', 'CoolBar'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 404)
assert (msg.message == 'CoolBar is not a recognized prometheus metric.')
(await self.send_update_metric('some_metric', 'inc'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 200)
assert (msg.message == 'Metric some_metric successfully updated.')
(await self.send_update_metric('some_metric', 'set'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 200)
assert (msg.message == 'Metric some_metric successfully updated.')
(await self.send_update_metric('cool_metric', 'inc'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 404)
assert (msg.message == 'Metric cool_metric not found.')
(await self.send_update_metric('some_metric', 'go'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 400)
assert (msg.message == 'Update function go not found for metric some_metric.')
(await self.send_update_metric('some_metric', 'name'))
envelope = (await self.prometheus_con.receive())
msg = cast(PrometheusMessage, envelope.message)
assert (msg.performative == PrometheusMessage.Performative.RESPONSE)
assert (msg.code == 400)
assert (msg.message == 'Failed to update metric some_metric: name is not a valid update function.')
with pytest.raises(AEAEnforceError):
envelope = Envelope(to='some_address', sender='me', message=Mock(spec=Message))
(await self.prometheus_con.channel.send(envelope))
msg = PrometheusMessage(PrometheusMessage.Performative.RESPONSE, code=0, message='')
envelope = Envelope(to=self.prometheus_address, sender=self.some_skill, message=msg)
(await self.prometheus_con.channel.send(envelope))
with pytest.raises(ValueError):
(msg, _) = self.dialogues.create(counterparty=self.prometheus_address, performative=PrometheusMessage.Performative.UPDATE_METRIC, title='', callable='', value=1.0, labels={})
envelope = Envelope(to=self.prometheus_address, sender=self.some_skill, message=msg)
envelope._protocol_specification_id = 'bad_id'
(await self.prometheus_con.channel.send(envelope))
.asyncio
async def test_disconnect(self):
(await self.prometheus_con.disconnect())
assert (self.prometheus_con.state == ConnectionStates.disconnected), 'should be disconnected' |
def make(apps, apks, repodir, archive):
from fdroidserver.update import METADATA_VERSION
if ((not hasattr(common.options, 'nosign')) or (not common.options.nosign)):
common.assert_config_keystore(common.config)
sortedids = sorted(apps, key=(lambda appid: common.get_app_display_name(apps[appid]).upper()))
sortedapps = collections.OrderedDict()
for appid in sortedids:
sortedapps[appid] = apps[appid]
repodict = collections.OrderedDict()
repodict['timestamp'] = datetime.utcnow().replace(tzinfo=timezone.utc)
repodict['version'] = METADATA_VERSION
if (common.config['repo_maxage'] != 0):
repodict['maxage'] = common.config['repo_maxage']
if archive:
repodict['name'] = common.config['archive_name']
repodict['icon'] = common.config.get('archive_icon', common.default_config['repo_icon'])
repodict['description'] = common.config['archive_description']
archive_url = common.config.get('archive_url', (common.config['repo_url'][:(- 4)] + 'archive'))
repodict['address'] = archive_url
if ('archive_web_base_url' in common.config):
repodict['webBaseUrl'] = common.config['archive_web_base_url']
repo_section = os.path.basename(urllib.parse.urlparse(archive_url).path)
else:
repodict['name'] = common.config['repo_name']
repodict['icon'] = common.config.get('repo_icon', common.default_config['repo_icon'])
repodict['address'] = common.config['repo_url']
if ('repo_web_base_url' in common.config):
repodict['webBaseUrl'] = common.config['repo_web_base_url']
repodict['description'] = common.config['repo_description']
repo_section = os.path.basename(urllib.parse.urlparse(common.config['repo_url']).path)
add_mirrors_to_repodict(repo_section, repodict)
requestsdict = collections.OrderedDict()
for command in ('install', 'uninstall'):
packageNames = []
key = (command + '_list')
if (key in common.config):
if isinstance(common.config[key], str):
packageNames = [common.config[key]]
elif all((isinstance(item, str) for item in common.config[key])):
packageNames = common.config[key]
else:
raise TypeError(_('only accepts strings, lists, and tuples'))
requestsdict[command] = packageNames
fdroid_signing_key_fingerprints = load_stats_fdroid_signing_key_fingerprints()
make_v0(sortedapps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints)
make_v1(sortedapps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints)
make_v2(sortedapps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints, archive)
make_website(sortedapps, repodir, repodict) |
def upgrade():
op.create_table('event_sub_topics', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('slug', sa.String(), nullable=False), sa.Column('event_topic_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['event_topic_id'], ['event_topics.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'))
op.execute('CREATE UNIQUE INDEX slug_event_topic_id ON event_sub_topics (slug, event_topic_id)')
op.alter_column('events', 'sub_topic', new_column_name='event_sub_topic_id')
op.alter_column('events_version', 'sub_topic', new_column_name='event_sub_topic_id')
op.execute("INSERT INTO event_sub_topics(name, slug, event_topic_id) SELECT DISTINCT event_sub_topic_id, lower(replace(regexp_replace(event_sub_topic_id, '& |,', '', 'g'), ' ', '-')), event_topic_id FROM events where not exists (SELECT 1 FROM event_sub_topics where event_sub_topics.name=events.event_sub_topic_id) and event_sub_topic_id is not null")
op.execute('UPDATE events SET event_sub_topic_id = (SELECT id FROM event_sub_topics WHERE event_sub_topics.name=events.event_sub_topic_id ORDER BY id DESC LIMIT 1)')
op.execute('ALTER TABLE events ALTER COLUMN event_sub_topic_id TYPE integer USING event_sub_topic_id::integer')
op.create_foreign_key(None, 'events', 'event_sub_topics', ['event_sub_topic_id'], ['id'], ondelete='CASCADE')
op.execute('UPDATE events_version SET event_sub_topic_id = (SELECT id FROM event_sub_topics WHERE event_sub_topics.name=events_version.event_sub_topic_id ORDER BY id DESC LIMIT 1)')
op.execute('ALTER TABLE events_version ALTER COLUMN event_sub_topic_id TYPE integer USING event_sub_topic_id::integer')
op.execute("UPDATE event_types set slug=replace(slug, '/', '-') where slug like '%/%'")
op.execute("UPDATE event_topics set slug=replace(slug, '/', '-') where slug like '%/%'")
op.execute("UPDATE event_sub_topics set slug=replace(slug, '/', '-') where slug like '%/%'") |
class ComparerTopimage():
def extract(self, item, list_article_candidate):
list_topimage = []
for article_candidate in list_article_candidate:
if (article_candidate.topimage is not None):
article_candidate.topimage = self.image_absoulte_path(item['url'], article_candidate.topimage)
list_topimage.append((article_candidate.topimage, article_candidate.extractor))
if (len(list_topimage) == 0):
return None
list_newspaper = [x for x in list_topimage if (x[1] == 'newspaper')]
if (len(list_newspaper) == 0):
return list_topimage[0][0]
else:
return list_newspaper[0][0]
def image_absoulte_path(self, url, image):
if (not re.match(re_ image)):
topimage = urljoin(url, image)
return topimage
return image |
class TestGetAlgBW(unittest.TestCase):
def test_no_iterations(self):
elapsedTimeNs = 30000
dataSize = 90000
numIters = 0
(avgIterNS, algBW) = comms_utils.getAlgBW(elapsedTimeNs, dataSize, numIters)
self.assertEqual(0.0, avgIterNS, algBW)
def test_iterations(self):
elapsedTimeNs = 30000
dataSize = 90000
numIters = 3
(avgIterNS, algBW) = comms_utils.getAlgBW(elapsedTimeNs, dataSize, numIters)
self.assertEqual(10000.0, avgIterNS)
self.assertEqual(9.0, algBW) |
def zernike_to_noll(n, m):
i = (int(((((n + 0.5) ** 2) + 1) / 2)) + 1)
Nn = ((((n + 1) * (n + 2)) // 2) + 1)
for j in range(i, (i + Nn)):
(nn, mm) = noll_to_zernike(j)
if ((nn == n) and (mm == m)):
return j
raise ValueError(('Could not find noll index for (%d,%d)' % n), m) |
class Acordeon(AssetClr):
name = 'clr-accordeon'
def __str__(self):
return '\n<clr-accordion>\n <clr-accordion-panel>\n <clr-accordion-title>Item 1</clr-accordion-title>\n <clr-accordion-content *clrIfExpanded>Content 1</clr-accordion-content>\n </clr-accordion-panel>\n</clr-accordion>\n' |
class _WSContextManager():
def __init__(self, ws, task_req):
self._ws = ws
self._task_req = task_req
async def __aenter__(self):
ready_waiter = create_task(self._ws.wait_ready())
(await asyncio.wait([ready_waiter, self._task_req], return_when=asyncio.FIRST_COMPLETED))
if ready_waiter.done():
(await ready_waiter)
else:
(await self._task_req)
(await ready_waiter)
return self._ws
async def __aexit__(self, exc_type, exc, tb):
(await self._ws.close())
(await self._task_req) |
class OptionPlotoptionsScatterDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpassResonance) |
class ChannelBase(object):
def __init__(self, function_control, name, rgb_color, channel_index, channel_mode):
self.control = function_control
self.name = name
self.rgb_color = rgb_color
self.index = channel_index
self.mode = channel_mode
def get_value(self, color):
if (self.mode == 'hsv'):
return color.get_hsva()[self.index]
else:
return color.get_rgba()[self.index]
def get_value_index(self, color):
return int((1 + ((self.control.height - 1) * (1.0 - self.get_value(color)))))
def get_index_value(self, y):
return min(1.0, max(0.0, (1.0 - (float(y) / (self.control.height - 1)))))
def set_value(self, color, new_value_on_this_channel):
if (self.mode == 'hsv'):
hsva = [color.get_hsva()[0], color.get_hsva()[1], color.get_hsva()[2], color.get_hsva()[3]]
hsva[self.index] = new_value_on_this_channel
if (hsva[0] >= (1.0 - 1e-05)):
hsva[0] = (1.0 - 1e-05)
color.set_hsva(hsva[0], hsva[1], hsva[2], hsva[3])
else:
rgba = [color.get_rgba()[0], color.get_rgba()[1], color.get_rgba()[2], color.get_rgba()[3]]
rgba[self.index] = new_value_on_this_channel
color.set_rgba(rgba[0], rgba[1], rgba[2], rgba[3])
def set_value_index(self, color, y):
self.set_value(color, self.get_index_value(y))
def get_pos_index(self, f):
return int((f * (self.control.width - 1)))
def get_index_pos(self, idx):
return ((1.0 * idx) / (self.control.width - 1))
def paint(self, painter):
raise NotImplementedError |
_ns.route('/<username>/<coprname>/module/<id>')
_ns.route('/g/<group_name>/<coprname>/module/<id>')
_with_copr
def copr_module(copr, id):
module = ModulesLogic.get(id).first()
formatter = HtmlFormatter(style='autumn', linenos=False, noclasses=True)
pretty_yaml = highlight(module.yaml, get_lexer_by_name('YAML'), formatter)
unique_chroots = []
unique_name_releases = set()
for chroot in copr.active_chroots_sorted:
if (chroot.name_release in unique_name_releases):
continue
unique_chroots.append(chroot)
unique_name_releases.add(chroot.name_release)
try:
module.modulemd
except ValueError as ex:
msg = 'Unable to parse module YAML. The most probable reason is that the module YAML is in an old modulemd version and/or has some mistake in it. Hint: {}'.format(ex)
raise CoprHttpException(msg, code=422) from ex
return flask.render_template('coprs/detail/module.html', copr=copr, module=module, yaml=pretty_yaml, unique_chroots=unique_chroots) |
class Text3D(ModuleFactory):
_target = Instance(modules.Text3D, ())
scale = Either(CFloat(1), CArray(shape=(3,)), help='The scale of the text, in figure units.\n Either a float, or 3-tuple of floats.')
orientation = CArray(shape=(3,), adapts='orientation', desc='the angles giving the orientation of the\n text. If the text is oriented to the camera,\n these angles are referenced to the axis of the\n camera. If not, these angles are referenced to\n the z axis.')
orient_to_camera = Bool(True, adapts='orient_to_camera', desc='if the text is kept oriented to the\n camera, or is pointing in a specific direction,\n regardless of the camera position.')
def __init__(self, x, y, z, text, **kwargs):
if (not ('scale' in kwargs)):
kwargs['scale'] = 1
super(Text3D, self).__init__(None, **kwargs)
self._target.text = text
self._target.position = (x, y, z)
def _scale_changed(self):
scale = self.scale
if isinstance(scale, numbers.Number):
scale = (scale * np.ones((3,)))
self._target.scale = scale |
class Mondriaan(flx.Widget):
CSS = '\n .flx-Mondriaan {background: #000;}\n .flx-Mondriaan .edge {background:none;}\n .flx-Mondriaan .white {background:#fff;}\n .flx-Mondriaan .red {background:#f23;}\n .flx-Mondriaan .blue {background:#249;}\n .flx-Mondriaan .yellow {background:#ff7;}\n '
def init(self):
with MyHBox():
with MyVBox(flex=2):
with MyVBox(flex=4, spacing=30):
flx.Widget(flex=1, css_class='white')
flx.Widget(flex=1, css_class='white')
with MyVBox(flex=2, css_class='blue'):
flx.Widget(flex=1, css_class='edge')
flx.Widget(flex=1, css_class='edge')
with MyVBox(flex=6):
with MyVBox(flex=4, spacing=30, css_class='red'):
flx.Widget(flex=1, css_class='edge')
flx.Widget(flex=1, css_class='edge')
with MyHBox(flex=2):
flx.Widget(flex=6, css_class='white')
with MyVBox(flex=1):
flx.Widget(flex=1, css_class='white')
flx.Widget(flex=1, css_class='yellow') |
def downgrade():
with op.batch_alter_table('user') as batch_op:
batch_op.alter_column('id', existing_type=sqlalchemy_utils.types.uuid.UUIDType(), type_=sa.NUMERIC(precision=16), existing_nullable=False)
with op.batch_alter_table('templates') as batch_op:
batch_op.alter_column('id', existing_type=sqlalchemy_utils.types.uuid.UUIDType(), type_=sa.NUMERIC(precision=16), existing_nullable=False)
op.create_table('sessions', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('session_id', sa.VARCHAR(length=255), nullable=True), sa.Column('data', sa.BLOB(), nullable=True), sa.Column('expiry', sa.DATETIME(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('session_id')) |
_required
_required
_required
_POST
def alert_list_table(request):
context = collect_view_data(request, 'mon_alert_list')
try:
api_data = json.loads(request.POST.get('alert_filter', None))
except (ValueError, TypeError):
context['error'] = 'Unexpected error: could not parse alert filter.'
else:
context['alert_filter'] = api_data
res = call_api_view(request, 'GET', mon_alert_list, data=api_data)
if (res.status_code == 200):
context['alerts'] = res.data['result']
elif (res.status_code == 201):
context['error'] = 'Unexpected error: got into an API loop.'
else:
context['error'] = res.data.get('result', {}).get('error', res.data)
return render(request, 'gui/mon/alert_table.html', context) |
def get_channel_videos(channel_id):
res = youtube_client.channels().list(id=channel_id, part='contentDetails').execute()
videos = {}
for item in res['items']:
playlist_id = item['contentDetails']['relatedPlaylists']['uploads']
res = youtube_client.playlistItems().list(playlistId=playlist_id, part='snippet', maxResults=youtube_video_max_result).execute()
for video in res['items']:
video_id = video['snippet']['resourceId']['videoId']
video['videoId'] = video_id
videos[video_id] = video
return {k: v for (k, v) in sorted(videos.items(), key=(lambda item: item[1]['snippet']['publishedAt']), reverse=True)} |
class TestTimeField(FieldValues):
valid_inputs = {'13:00': datetime.time(13, 0), datetime.time(13, 0): datetime.time(13, 0)}
invalid_inputs = {'abc': ['Time has wrong format. Use one of these formats instead: hh:mm[:ss[.uuuuuu]].'], '99:99': ['Time has wrong format. Use one of these formats instead: hh:mm[:ss[.uuuuuu]].']}
outputs = {datetime.time(13, 0): '13:00:00', datetime.time(0, 0): '00:00:00', '00:00:00': '00:00:00', None: None, '': None}
field = serializers.TimeField() |
class QueueTrigger(WebMirror.TimedTriggers.TriggerBase.TriggerBaseClass, WebMirror.JobDispatcher.RpcMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.check_open_rpc_interface()
def get_create_job(self, sess, url):
have = sess.query(self.db.WebPages).filter((self.db.WebPages.url == url)).scalar()
if have:
return have
else:
parsed = urllib.parse.urlparse(url)
root = urllib.parse.urlunparse((parsed[0], parsed[1], '', '', '', ''))
new = self.db.WebPages(url=url, starturl=root, netloc=parsed.netloc, distance=50000, is_text=True, priority=500000, type='unknown', fetchtime=datetime.datetime.now())
sess.add(new)
sess.commit()
return new
def enqueue_url(self, sess, url, module='SmartWebRequest'):
print('Enqueueing ')
job = self.get_create_job(sess, url)
job.state = 'fetching'
sess.commit()
raw_job = WebMirror.JobUtils.buildjob(module='PersistentSmartWebRequest', call='smartGetItem', dispatchKey='fetcher', jobid=job.id, args=[job.url], kwargs={}, additionalData={'mode': 'fetch'}, postDelay=0)
self.rpc_interface.put_job(raw_job)
def go(self):
with self.db.session_context() as sess:
for url in self.get_urls():
self.enqueue_url(sess, url)
def get_urls(self):
pass |
def test_validate_generator():
_registry.schedules('test_schedule.v2')
def test_schedule():
while True:
(yield 10)
cfg = {'': 'test_schedule.v2'}
result = my_registry.resolve({'test': cfg})['test']
assert isinstance(result, GeneratorType)
_registry.optimizers('test_optimizer.v2')
def test_optimizer2(rate: Generator) -> Generator:
return rate
cfg = {'': 'test_optimizer.v2', 'rate': {'': 'test_schedule.v2'}}
result = my_registry.resolve({'test': cfg})['test']
assert isinstance(result, GeneratorType)
_registry.optimizers('test_optimizer.v3')
def test_optimizer3(schedules: Dict[(str, Generator)]) -> Generator:
return schedules['rate']
cfg = {'': 'test_optimizer.v3', 'schedules': {'rate': {'': 'test_schedule.v2'}}}
result = my_registry.resolve({'test': cfg})['test']
assert isinstance(result, GeneratorType)
_registry.optimizers('test_optimizer.v4')
def test_optimizer4(*schedules: Generator) -> Generator:
return schedules[0] |
def test_auth_role():
obj = _common.AuthRole(assumable_iam_role='rollie-pollie')
assert (obj.assumable_iam_role == 'rollie-pollie')
assert (not obj.kubernetes_service_account)
obj2 = _common.AuthRole.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
obj = _common.AuthRole(kubernetes_service_account='service-account-name')
assert (obj.kubernetes_service_account == 'service-account-name')
assert (not obj.assumable_iam_role)
obj2 = _common.AuthRole.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
obj = _common.AuthRole(assumable_iam_role='rollie-pollie', kubernetes_service_account='service-account-name')
assert (obj.assumable_iam_role == 'rollie-pollie')
assert (obj.kubernetes_service_account == 'service-account-name')
obj2 = _common.AuthRole.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2) |
class IPFSGatewayBackend(IPFSOverHTTPBackend):
def base_uri(self) -> str:
return IPFS_GATEWAY_PREFIX
def pin_assets(self, file_or_dir_path: Path) -> List[Dict[(str, str)]]:
raise CannotHandleURI('IPFS gateway is currently disabled, please use a different IPFS backend.')
def fetch_uri_contents(self, uri: str) -> bytes:
raise CannotHandleURI('IPFS gateway is currently disabled, please use a different IPFS backend.') |
def test_reset_last_overriding():
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
container = _Container()
overriding_container1 = _OverridingContainer1()
overriding_container2 = _OverridingContainer2()
container.override(overriding_container1)
container.override(overriding_container2)
container.reset_last_overriding()
assert (container.overridden == (overriding_container1,))
assert container.p11.overridden, (overriding_container1.p11,) |
class Container(containers.DeclarativeContainer):
wiring_config = containers.WiringConfiguration(modules=['.handlers'])
config = providers.Configuration(yaml_files=['config.yml'])
giphy_client = providers.Factory(giphy.GiphyClient, api_key=config.giphy.api_key, timeout=config.giphy.request_timeout)
search_service = providers.Factory(services.SearchService, giphy_client=giphy_client) |
class TestSnippetDedent(util.MdCase):
extension = ['pymdownx.snippets', 'pymdownx.superfences']
extension_configs = {'pymdownx.snippets': {'base_path': [os.path.join(BASE, '_snippets')], 'dedent_subsections': True}}
def test_dedent_section(self):
self.check_markdown('\n ```text\n ---8<--- "indented.txt:py-section"\n ```\n ', '\n <div class="highlight"><pre><span></span><code>def some_method(self, param):\n """Docstring."""\n\n return param\n </code></pre></div>\n ', True)
def test_dedent_lines(self):
self.check_markdown('\n ```text\n ---8<--- "indented.txt:5:8"\n ```\n ', '\n <div class="highlight"><pre><span></span><code>def some_method(self, param):\n """Docstring."""\n\n return param\n </code></pre></div>\n ', True)
def test_dedent_indented(self):
self.check_markdown('\n Paragraph\n\n ---8<--- "indented.txt:py-section"\n ', '\n <p>Paragraph</p>\n <pre><code>def some_method(self, param):\n """Docstring."""\n\n return param\n </code></pre>\n ', True) |
class CacheBackend(CacheBackendT, Service):
logger = logger
Unavailable: Type[BaseException] = CacheUnavailable
operational_errors: ClassVar[Tuple[(Type[BaseException], ...)]] = ()
invalidating_errors: ClassVar[Tuple[(Type[BaseException], ...)]] = ()
irrecoverable_errors: ClassVar[Tuple[(Type[BaseException], ...)]] = ()
def __init__(self, app: AppT, url: Union[(URL, str)]='memory://', **kwargs: Any) -> None:
self.app = app
self.url = URL(url)
Service.__init__(self, **kwargs)
async def _get(self, key: str) -> Optional[bytes]:
...
async def _set(self, key: str, value: bytes, timeout: Optional[float]=None) -> None:
...
async def _delete(self, key: str) -> None:
...
async def get(self, key: str) -> Optional[bytes]:
async with self._recovery_context(key):
return (await self._get(key))
async def set(self, key: str, value: bytes, timeout: Optional[float]=None) -> None:
async with self._recovery_context(key):
(await self._set(key, value, timeout))
async def delete(self, key: str) -> None:
async with self._recovery_context(key):
(await self._delete(key))
async def _recovery_context(self, key: str) -> AsyncGenerator:
try:
(yield)
except self.irrecoverable_errors as exc:
self.log.exception(E_CACHE_IRRECOVERABLE, exc)
raise self.Unavailable(exc)
except self.invalidating_errors as exc:
self.log.warning(E_CACHE_INVALIDATING, key, exc, exc_info=1)
try:
(await self._delete(key))
except (self.operational_errors + self.invalidating_errors) as exc:
self.log.exception(E_CANNOT_INVALIDATE, key, exc)
raise self.Unavailable()
except self.operational_errors as exc:
self.log.warning(E_CACHE_INOPERATIONAL, exc, exc_info=1)
raise self.Unavailable()
def _repr_info(self) -> str:
return f'url={self.url!r}' |
.benchmark
def test_baker_gs_opt_synthesis(fixture_store):
for (i, fix) in enumerate(fixture_store):
print(i, fix)
tot_cycles = 0
converged = 0
bags = fixture_store['results_bag']
for (k, v) in bags.items():
if (not k.startswith('test_baker_gs_opt')):
continue
print(k)
try:
tot_cycles += v['cycles']
energy_matches = (v['energy'] == pytest.approx(v['ref_energy']))
converged += (1 if (v['is_converged'] and energy_matches) else 0)
for (kk, vv) in v.items():
print('\t', kk, vv)
except KeyError:
print('\tFailed!')
print(f'Total cycles: {tot_cycles}')
print(f'Converged: {converged}/{len(bags)}') |
def test_adding_a_node_affinity():
config = '\nnodeAffinity:\n preferredDuringSchedulingIgnoredDuringExecution:\n - weight: 100\n preference:\n matchExpressions:\n - key: mylabel\n operator: In\n values:\n - myvalue\n'
r = helm_template(config)
assert (r['statefulset'][name]['spec']['template']['spec']['affinity']['nodeAffinity'] == {'preferredDuringSchedulingIgnoredDuringExecution': [{'weight': 100, 'preference': {'matchExpressions': [{'key': 'mylabel', 'operator': 'In', 'values': ['myvalue']}]}}]}) |
def main():
(ref_get_time, ref_set_time) = new_style_value().measure()
benchmarks = [global_value, old_style_value, new_style_value, property_value, any_value, int_value, range_value, change_value, monitor_value, delegate_value, delegate_2_value, delegate_3_value]
for benchmark in benchmarks:
run_benchmark(benchmark, ref_get_time, ref_set_time) |
class OptionPlotoptionsPackedbubbleSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsPackedbubbleSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsPackedbubbleSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsPackedbubbleSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsPackedbubbleSonificationTracksMappingLowpassResonance) |
def _invalidate_thread_cache(s: Session, old_thread: ThreadModel, board: BoardModel):
key = cache_key('thread', board.name, old_thread.refno)
stub_key = cache_key('thread_stub', board.name, old_thread.refno)
old_thread_posts_cache = cache.get(key)
old_thread_posts = None
if old_thread_posts_cache:
old_thread_posts = ThreadModel.from_cache(old_thread_posts_cache).posts
q = s.query(ThreadOrmModel)
q = q.filter_by(id=old_thread.id)
q = q.options(lazyload(ThreadOrmModel.posts))
res = q.one_or_none()
if (not res):
cache.delete(key)
cache.delete(stub_key)
return
thread = ThreadModel.from_orm_model(res, include_board=True, include_posts=True, cached_thread_posts=old_thread_posts)
thread_cache = thread.to_cache(include_board=True, include_posts=True)
cache.set(key, thread_cache, timeout=0)
thread_stub = ThreadStubModel.from_thread(thread, include_snippets=True)
thread_stub_cache = thread_stub.to_cache()
cache.set(stub_key, thread_stub_cache, timeout=0)
return (thread, thread_stub) |
class InferenceGraph():
def __init__(self, inference_block: InferenceBlock):
self.perception_blocks = inference_block.perception_blocks
self.node_graph = nx.DiGraph()
out_keys = inference_block.out_keys
self.in_key_shapes = dict()
self._total_num_of_params = self._get_num_of_parameters()
self._build_inference_graph(out_keys=out_keys)
def save(self, name: str, save_path: str) -> None:
self._draw(name=name, figure_size=(18, 12))
full_save_path = os.path.join(save_path, (name + '.pdf'))
if (not os.path.exists(full_save_path)):
print(f"Graphical depiction of the model '{name}' saved at: {os.path.abspath(full_save_path)}")
plt.savefig(full_save_path, transparent=True)
full_save_path = full_save_path.replace('.pdf', '.figure.pkl')
pickle.dump(plt.gcf(), open(full_save_path, 'wb'))
plt.clf()
plt.close()
def show(self, name: str, block_execution: bool) -> None:
self._draw(name=name, figure_size=None)
plt.show(block=block_execution)
def _draw(self, name: str, figure_size: Optional[Tuple[(int, int)]]) -> None:
fig = plt.figure(figsize=figure_size)
plt.clf()
renderer = fig.canvas.get_renderer()
labels = nx.get_node_attributes(self.node_graph, 'label')
is_block = nx.get_node_attributes(self.node_graph, 'is_block')
num_params = nx.get_node_attributes(self.node_graph, 'num_params')
pos = nx.nx_agraph.graphviz_layout(self.node_graph, prog='dot')
max_params_per_node = max(num_params.values())
bbox_heights = dict()
for node in self.node_graph.nodes():
(x, y) = pos[node]
if (not is_block[node]):
color = 'indianred'
alpha = 0.15
elif (num_params[node] == 0):
color = 'powderblue'
alpha = 0.1
else:
color = 'lightskyblue'
alpha = (((float(num_params[node]) / max_params_per_node) * 0.3) + 0.1)
obj = plt.text(x, y, labels[node], picker=True, ha='center', va='center', bbox=dict(boxstyle='round', facecolor=color, alpha=alpha))
plt.plot(x, y, 'bo', markersize=0)
bbox_height = obj.get_window_extent(renderer).height
bbox_heights[node] = bbox_height
for (src_node, dst_node, attrs) in self.node_graph.edges(data=True):
(src_x, src_y) = pos[src_node]
(dst_x, dst_y) = pos[dst_node]
src_y -= (((bbox_heights[src_node] // 4) * 3) + 2)
dst_y += (((bbox_heights[dst_node] // 4) * 3) + 2)
plt.annotate('', xy=(dst_x, dst_y), xytext=(src_x, src_y), arrowprops=dict(arrowstyle='->'))
plt.axis('off')
title = f"Graphical depiction of '{name}'"
title += f' with {self._total_num_of_params:,} parameters'
plt.title(title)
plt.tight_layout()
def _get_num_of_parameters(self) -> int:
return sum((pp.numel() for block in self.perception_blocks.values() for pp in block.parameters()))
def _get_block_name_for_out_key(self, out_key: str) -> str:
for (block_name, block) in self.perception_blocks.items():
if (out_key in block.out_keys):
return block_name
def _build_inference_graph(self, out_keys: List[str], parent: Optional[str]=None) -> None:
for out_key in out_keys:
block_key = self._get_block_name_for_out_key(out_key)
created_output_node = False
if (out_key not in self.node_graph.nodes()):
if (block_key in self.perception_blocks):
out_index = self.perception_blocks[block_key].out_keys.index(out_key)
shape = self.perception_blocks[block_key].out_shapes()[out_index]
label = f'''{out_key}
shapes: {shape}'''
else:
shape = self.in_key_shapes[out_key]
label = f'''{out_key}
shape: {shape}'''
self.node_graph.add_node(out_key, label=label, is_block=False, num_params=0)
created_output_node = True
if (parent is not None):
self.node_graph.add_edge(out_key, parent)
if ((block_key in self.perception_blocks) and created_output_node):
(curr_block, block_id) = self._add_node_to_node_graph(block_key)
self.node_graph.add_edge(block_id, out_key)
for (i, in_key) in enumerate(curr_block.in_keys):
self.in_key_shapes[in_key] = curr_block.in_shapes[i]
self._build_inference_graph(out_keys=curr_block.in_keys, parent=block_id)
def _add_node_to_node_graph(self, block_key: str) -> Tuple[(nn.Module, str)]:
curr_block = self.perception_blocks[block_key]
block_id = '{}\n({})'.format(curr_block.__class__.__name__, block_key)
num_of_block_params = sum((pp.numel() for pp in curr_block.parameters()))
label = str(curr_block).replace('\t', '')
if (num_of_block_params > 0):
label += f'''
#{num_of_block_params:,} = {((num_of_block_params / self._total_num_of_params) * 100):.1f}%'''
self.node_graph.add_node(block_id, label=label, is_block=True, num_params=num_of_block_params, block_id=block_key)
return (curr_block, block_id) |
class LinguaMakoExtractor(Extractor, MessageExtractor):
extensions = ['.mako']
default_config = {'encoding': 'utf-8', 'comment-tags': ''}
def __call__(self, filename, options, fileobj=None):
self.options = options
self.filename = filename
self.python_extractor = get_extractor('x.py')
if (fileobj is None):
fileobj = open(filename, 'rb')
return self.process_file(fileobj)
def process_python(self, code, code_lineno, translator_strings):
source = code.getvalue().strip()
if source.endswith(compat.b(':')):
source += compat.b(' pass')
code = io.BytesIO(source)
for msg in self.python_extractor(self.filename, self.options, code, code_lineno):
if translator_strings:
msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, msg.flags, compat.u(' ').join((translator_strings + [msg.comment])), msg.tcomment, msg.location)
(yield msg) |
.parametrize('fork_version, valid, result', [((b'\x12' * 4), True, b'\x03\x00\x00\x00\rf`\x8a\xf5W\xf4\xfa\xdb\xfc\xe2H\xac7\xf6\xe7c\x9c\xe3q\x10\x0cC\xd1Z\xad\x05\xcb'), ((b'\x12' * 5), False, None), ((b'\x12' * 3), False, None)])
def test_compute_deposit_domain(fork_version, valid, result):
if valid:
assert (compute_deposit_domain(fork_version) == result)
else:
with pytest.raises(ValueError):
compute_deposit_domain(fork_version) |
def Run(params):
args = params.args
config_file = params.config_file
config = params.config
if (len(args) < 2):
msg = 'Repository (dir name|--all|--current|--recursive) to track not passed'
Print(msg)
return Status(msg, False)
repos = config.repos
msgs = []
args = args[1:]
join = os.path.join
isdir = os.path.isdir
if (('--all' in args) or ('--current' in args) or ('--recursive' in args)):
if [arg for arg in args if (not arg.startswith('--'))]:
Print('If an option is passed in mu register, no other dir names should be passed.')
return
if (('--all' in args) or ('--current' in args)):
args = [repo for repo in os.listdir('.') if isdir(join(repo, '.git'))]
elif ('--recursive' in args):
args = []
search_paths = [os.path.realpath('.')]
for (root, directories, filenames) in os.walk('.'):
if ('.git' in directories):
directories.remove('.git')
for (idx, directory) in enumerate(directories):
if os.path.islink(join(root, directory)):
directory = os.path.realpath(join(root, directory))
if any(((directory + '/').startswith((search_path + '/')) for search_path in search_paths)):
continue
directories[idx] = directory
search_paths.append(os.path.realpath(join(root, directory)))
if isdir(join(root, directory, '.git')):
args.append(os.path.relpath(join(root, directory)))
new_args = []
for arg in args:
if (arg.endswith('\\') or arg.endswith('/')):
arg = arg[:(- 1)]
new_args.append(arg)
args = new_args
group_repos = config.groups.get(config.current_group, None)
for repo in args:
if (repo in repos):
msg = ('Repository: %s skipped, already registered' % (repo,))
else:
repos.append(repo)
msg = ('Repository: %s registered' % (repo,))
if (group_repos is not None):
if (repo not in group_repos):
group_repos.append(repo)
msg += (' (added to group "%s")' % config.current_group)
else:
msg += (' (already in group "%s")' % config.current_group)
Print(msg)
msgs.append(msg)
with open(config_file, 'w') as f:
f.write(str(config))
return Status('\n'.join(msgs), True, config) |
class bsn_stats_reply(experimenter_stats_reply):
subtypes = {}
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
def __init__(self, xid=None, flags=None, subtype=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (subtype != None):
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
(subtype,) = reader.peek('!L', 20)
subclass = bsn_stats_reply.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
obj.subtype = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.subtype != other.subtype):
return False
return True
def pretty_print(self, q):
q.text('bsn_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
class IndexCreator():
def __init__(self):
self.inames = OrderedDict()
self.namer = UniqueNameGenerator(forced_prefix='i_')
def __call__(self, extents):
extents += ((1,) if (len(extents) == 0) else ())
indices = []
if isinstance(extents[0], tuple):
for ext_per_block in extents:
idxs = self._create_indices(ext_per_block)
indices.append(idxs)
return tuple(indices)
else:
return self._create_indices(extents)
def _create_indices(self, extents):
indices = []
for ext in extents:
name = self.namer()
indices.append(pym.Variable(name))
self.inames[name] = int(ext)
return tuple(indices)
def domains(self):
return create_domains(self.inames.items()) |
(scope='module')
def stabilityai_sdxl_base_path(test_weights_path: Path) -> Path:
r = ((test_weights_path / 'stabilityai') / 'stable-diffusion-xl-base-1.0')
if (not r.is_dir()):
warn(f'could not find Stability SDXL base weights at {r}, skipping')
pytest.skip(allow_module_level=True)
return r |
class ReplyForm(PostForm):
track_topic = BooleanField(_('Track this topic'), default=False, validators=[Optional()])
def __init__(self, *args, **kwargs):
self.post = kwargs.get('obj', None)
PostForm.__init__(self, *args, **kwargs)
def save(self, user, topic):
if (self.post is None):
self.post = Post(content=self.content.data)
else:
self.post.date_modified = time_utcnow()
self.post.modified_by = user.username
if self.track_topic.data:
user.track_topic(topic)
else:
user.untrack_topic(topic)
current_app.pluggy.hook.flaskbb_form_post_save(form=self, post=self.post)
return self.post.save(user=user, topic=topic) |
((MAGICK_VERSION_NUMBER < 1802), reason='Minimum Bounding Box requires ImageMagick-7.0.10.')
def test_minimum_bounding_box():
with Image(filename='wizard:') as img:
img.fuzz = (0.1 * img.quantum_range)
img.background_color = 'white'
mbr = img.minimum_bounding_box()
assert (img.width > mbr.get('width', img.width))
assert (img.height > mbr.get('height', img.height)) |
class OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def main():
p = argparse.ArgumentParser()
p.add_argument('--gpu', '-g', type=int, default=(- 1))
p.add_argument('--pretrained_model', '-P', type=str, default='baseline.pth')
p.add_argument('--input', '-i', required=True)
p.add_argument('--output_dir', '-o', type=str, default='')
p.add_argument('--full_mode', '-n', default=False)
args = p.parse_args()
print(args)
input_file = args.input
full_mode = bool(args.full_mode)
(model, device) = load_model(pretrained_model=args.pretrained_model)
if full_mode:
separate(input=input_file, model=model, device=device, output_dir=args.output_dir)
for (stem, model_name) in [('vocals', 'htdemucs'), (None, 'htdemucs'), (None, 'htdemucs_6s')]:
separator(tracks=[Path(input_file)], out=Path(args.output_dir), model=model_name, shifts=1, overlap=0.5, stem=stem, int24=False, float32=False, clip_mode='rescale', mp3=True, mp3_bitrate=320, verbose=False)
else:
separate(input=input_file, model=model, device=device, output_dir=args.output_dir, only_no_vocals=True)
convert_to_mp3(input_file, input_file) |
class TestFrontendClient(object):
def _get_fake_response():
resp = Munch()
resp.headers = {'Copr-FE-BE-API-Version': '666'}
resp.status_code = 200
resp.data = 'ok\n'
return resp
def setup_method(self, method):
self.opts = Munch(frontend_base_url=' frontend_auth='')
self.fc = FrontendClient(self.opts)
self.data = {'foo': 'bar', 'bar': [1, 3, 5]}
self.url_path = 'sub_path'
self.build_id = 12345
self.task_id = '12345-fedora-20-x86_64'
self.chroot_name = 'fedora-20-x86_64'
def mask_frontend_request(self):
with mock.patch('copr_common.request.SafeRequest._send_request') as obj:
(yield obj)
def test_post_to_frontend(self, f_request_method):
(name, method) = f_request_method
method.return_value.status_code = 200
self.fc.send(self.url_path, method=name, data=self.data)
assert method.called
def test_post_to_frontend_wrappers(self, f_request_method):
(name, method) = f_request_method
method.return_value.status_code = 200
call = getattr(self.fc, name)
if (name == 'get'):
call(self.url_path)
else:
call(self.url_path, self.data)
assert method.called
def test_post_to_frontend_repeated_first_try_ok(self, mask_frontend_request, mc_time):
mc_time.time.return_value = 0
response = self._get_fake_response()
mask_frontend_request.return_value = response
assert (self.fc.post(self.data, self.url_path) == response)
assert (not mc_time.sleep.called)
def test_post_to_frontend_repeated_second_try_ok(self, f_request_method, mask_frontend_request, mc_time):
(method_name, method) = f_request_method
response = self._get_fake_response()
mask_frontend_request.side_effect = [RequestRetryError(), response]
mc_time.time.return_value = 0
assert (self.fc.send(self.url_path, data=self.data, method=method_name) == response)
assert mc_time.sleep.called
def test_post_to_frontend_err_400(self, post_req, mc_time):
response = Response()
response.status_code = 404
response.reason = 'NOT FOUND'
post_req.side_effect = [RequestRetryError(), response]
mc_time.time.return_value = 0
with pytest.raises(FrontendClientException):
assert (self.fc.post(self.data, self.url_path) == response)
assert mc_time.sleep.called
def test_post_to_frontend_repeated_all_attempts_failed(self, mask_frontend_request, caplog, mc_time):
mc_time.time.side_effect = [0, 0, 5, (5 + 10), ((5 + 10) + 15), (((5 + 10) + 15) + 20), 1000]
mask_frontend_request.side_effect = RequestRetryError()
with pytest.raises(FrontendClientException):
self.fc.post(self.data, self.url_path)
assert (mc_time.sleep.call_args_list == [mock.call(x) for x in [5, 10, 15, 20, 25]])
records = [x for x in caplog.records if ('Retry request' in x.msg)]
assert (len(records) == 5)
def test_post_to_frontend_repeated_indefinitely(self, mask_frontend_request, caplog, mc_time):
mc_time.time.return_value = 1
self.fc.try_indefinitely = True
mask_frontend_request.side_effect = ([RequestRetryError() for _ in range(100)] + [FrontendClientException()])
with pytest.raises(FrontendClientException):
self.fc.post(self.data, self.url_path)
assert mc_time.sleep.called
records = [x for x in caplog.records if ('Retry request' in x.msg)]
assert (len(records) == 100)
def test_retries_on_outdated_frontend(self, mask_frontend_request, caplog):
response = self._get_fake_response()
response.headers['Copr-FE-BE-API-Version'] = '0'
mask_frontend_request.side_effect = ([response for _ in range(100)] + [Exception('sorry')])
with pytest.raises(Exception):
self.fc.try_indefinitely = True
self.fc.post(self.url_path, self.data)
assert (len(mask_frontend_request.call_args_list) == 101)
assert ('Sending POST request to frontend' in caplog.records[0].getMessage())
assert ('Copr FE/BE API is too old on Frontend' in caplog.records[1].msg)
def test_update(self):
ptfr = MagicMock()
self.fc.post = ptfr
self.fc.update(self.data)
assert (ptfr.call_args == mock.call('update', self.data))
def test_starting_build(self):
ptfr = MagicMock()
self.fc.post = ptfr
for val in [True, False]:
ptfr.return_value.json.return_value = {'can_start': val}
assert (self.fc.starting_build(self.data) == val)
def test_starting_build_err(self):
ptfr = MagicMock()
self.fc.post = ptfr
with pytest.raises(FrontendClientException):
self.fc.starting_build(self.data)
def test_starting_build_err_2(self):
ptfr = MagicMock()
self.fc.post = ptfr
ptfr.return_value.json.return_value = {}
with pytest.raises(FrontendClientException):
self.fc.starting_build(self.data)
def test_reschedule_build(self):
ptfr = MagicMock()
self.fc.post = ptfr
self.fc.reschedule_build(self.build_id, self.task_id, self.chroot_name)
expected = mock.call('reschedule_build_chroot', {'build_id': self.build_id, 'task_id': self.task_id, 'chroot': self.chroot_name})
assert (ptfr.call_args == expected) |
class TestVOF(object):
def setup_class(cls):
pass
def teardown_class(cls):
pass
def setup_method(self, method):
reload(default_n)
reload(thelper_vof)
self.pList = [thelper_vof_p]
self.nList = [thelper_vof_n]
self.sList = [default_s]
self.so = default_so
self.so.tnList = self.nList[0].tnList
self._scriptdir = os.path.dirname(__file__)
self.sim_names = []
self.aux_names = []
def teardown_method(self, method):
pass
def test_supg(self):
thelper_vof.ct.STABILIZATION_TYPE = 0
thelper_vof.ct.FCT = False
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_SUPG')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_SUPG.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_SUPG_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_TaylorGalerkin(self):
thelper_vof.ct.STABILIZATION_TYPE = 1
thelper_vof.ct.FCT = False
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_TaylorGalerkin')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_TaylorGalerkin.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_TaylorGalerkin_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_EV1(self):
thelper_vof.ct.STABILIZATION_TYPE = 2
thelper_vof.ct.ENTROPY_TYPE = 1
thelper_vof.ct.cE = 1.0
thelper_vof.ct.FCT = True
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_EV1')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_EV1.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_EV1_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_EV2(self):
thelper_vof.ct.STABILIZATION_TYPE = 2
thelper_vof.ct.ENTROPY_TYPE = 1
thelper_vof.ct.cE = 0.1
thelper_vof.ct.FCT = True
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_EV2')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_EV2.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_EV2_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=8)
actual.close()
def test_SmoothnessBased(self):
thelper_vof.ct.STABILIZATION_TYPE = 3
thelper_vof.ct.FCT = True
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_SmoothnessBased')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_SmoothnessBased.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_SmoothnessBased_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close()
def test_stab4(self):
thelper_vof.ct.STABILIZATION_TYPE = 4
thelper_vof.ct.FCT = True
reload(default_n)
reload(thelper_vof_p)
reload(thelper_vof_n)
self.so.name = (self.pList[0].name + '_stab4')
ns = proteus.NumericalSolution.NS_base(self.so, self.pList, self.nList, self.sList, opts)
self.sim_names.append(ns.modelList[0].name)
ns.calculateSolution('vof')
actual = tables.open_file('vof_level_0_stab4.h5', 'r')
expected_path = ((('comparison_files/' + 'comparison_') + 'vof_level_0_stab4_') + '_u_t2.csv')
np.testing.assert_almost_equal(np.fromfile(os.path.join(self._scriptdir, expected_path), sep=','), np.array(actual.root.u_t2).flatten(), decimal=10)
actual.close() |
class FlashbotsPrivateTransactionResponse():
w3: Web3
tx: SignedTxAndHash
max_block_number: int
def __init__(self, w3: Web3, signed_tx: HexBytes, max_block_number: int):
self.w3 = w3
self.max_block_number = max_block_number
self.tx = {'signed_transaction': signed_tx, 'hash': self.w3.sha3(signed_tx)}
def wait(self) -> bool:
while True:
try:
self.w3.eth.get_transaction(self.tx['hash'])
return True
except TransactionNotFound:
if (self.w3.eth.block_number > self.max_block_number):
return False
time.sleep(1)
def receipt(self) -> Optional[TxReceipt]:
if self.wait():
return self.w3.eth.get_transaction_receipt(self.tx['hash'])
else:
return None |
class IODict(BaseDict):
def __init__(self, *args, **kwargs):
if (len(args) == 1):
arg = args[0]
if (type_util.is_string(arg) or type_util.is_path(arg)):
d = IODict._decode_init(arg, **kwargs)
super().__init__(d)
return
super().__init__(*args, **kwargs)
def _decode_init(s, **kwargs):
autodetected_format = io_util.autodetect_format(s)
default_format = (autodetected_format or 'json')
format = kwargs.pop('format', default_format).lower()
return IODict._decode(s, format, **kwargs)
def _decode(s, format, **kwargs):
data = None
try:
data = io_util.decode(s, format, **kwargs)
except ExtrasRequireModuleNotFoundError as e:
raise e
except Exception as error:
error_traceback = traceback.format_exc()
raise ValueError(f'''{error_traceback}
Unexpected error / Invalid data or url or filepath argument: {s}
{error}''') from None
if type_util.is_dict(data):
return data
elif type_util.is_list(data):
return {'values': data}
else:
raise ValueError(f'Invalid data type: {type(data)}, expected dict or list.')
def _encode(d, format, **kwargs):
s = io_util.encode(d, format, **kwargs)
return s
def from_base64(cls, s, subformat='json', encoding='utf-8', **kwargs):
kwargs['subformat'] = subformat
kwargs['encoding'] = encoding
return cls(s, format='base64', **kwargs)
def from_cli(cls, s, **kwargs):
return cls(s, format='cli', **kwargs)
def from_csv(cls, s, columns=None, columns_row=True, **kwargs):
kwargs['columns'] = columns
kwargs['columns_row'] = columns_row
return cls(s, format='csv', **kwargs)
def from_html(cls, s, **kwargs):
return cls(s, format='html', **kwargs)
def from_ini(cls, s, **kwargs):
return cls(s, format='ini', **kwargs)
def from_json(cls, s, **kwargs):
return cls(s, format='json', **kwargs)
def from_pickle(cls, s, **kwargs):
return cls(s, format='pickle', **kwargs)
def from_plist(cls, s, **kwargs):
return cls(s, format='plist', **kwargs)
def from_query_string(cls, s, **kwargs):
return cls(s, format='query_string', **kwargs)
def from_toml(cls, s, **kwargs):
return cls(s, format='toml', **kwargs)
def from_xls(cls, s, sheet=0, columns=None, columns_row=True, **kwargs):
kwargs['sheet'] = sheet
kwargs['columns'] = columns
kwargs['columns_row'] = columns_row
return cls(s, format='xls', **kwargs)
def from_xml(cls, s, **kwargs):
return cls(s, format='xml', **kwargs)
def from_yaml(cls, s, **kwargs):
return cls(s, format='yaml', **kwargs)
def to_base64(self, subformat='json', encoding='utf-8', **kwargs):
kwargs['subformat'] = subformat
kwargs['encoding'] = encoding
return self._encode(self.dict(), 'base64', **kwargs)
def to_cli(self, **kwargs):
raise NotImplementedError
def to_csv(self, key='values', columns=None, columns_row=True, **kwargs):
kwargs['columns'] = columns
kwargs['columns_row'] = columns_row
return self._encode(self.dict()[key], 'csv', **kwargs)
def to_html(self, **kwargs):
raise NotImplementedError
def to_ini(self, **kwargs):
return self._encode(self.dict(), 'ini', **kwargs)
def to_json(self, **kwargs):
return self._encode(self.dict(), 'json', **kwargs)
def to_pickle(self, **kwargs):
return self._encode(self.dict(), 'pickle', **kwargs)
def to_plist(self, **kwargs):
return self._encode(self.dict(), 'plist', **kwargs)
def to_query_string(self, **kwargs):
return self._encode(self.dict(), 'query_string', **kwargs)
def to_toml(self, **kwargs):
return self._encode(self.dict(), 'toml', **kwargs)
def to_xml(self, **kwargs):
return self._encode(self.dict(), 'xml', **kwargs)
def to_xls(self, key='values', sheet=0, columns=None, columns_row=True, format='xlsx', **kwargs):
raise NotImplementedError
def to_yaml(self, **kwargs):
return self._encode(self.dict(), 'yaml', **kwargs) |
def extractChocolateotakuWordpressCom(item):
badwords = ['Rant', 'Rants', 'review']
if any([(bad in item['tags']) for bad in badwords]):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('ihha', 'Quick Wear: I Have a Heroine Aura', 'translated'), ('ccgdd', 'Civil Servant Gets a Gentle Dream', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TunerPayload(BasePayload):
def __init__(self, s3_config: Optional[_T]=None):
super().__init__(s3_config=s3_config)
def _update_payload(base_payload, input_classes, ignore_classes, payload):
attr_fields = get_attr_fields(input_classes=input_classes)
ignore_fields = get_attr_fields(input_classes=ignore_classes)
for (k, v) in base_payload.items():
if (k not in ignore_fields):
if (k != 'config'):
if isinstance(v, dict):
if (k not in attr_fields):
raise TypeError(f'Referring to a class space `{k}` that is undefined')
for i_keys in v.keys():
if (i_keys not in attr_fields[k]):
raise ValueError(f'Provided an unknown argument named `{k}.{i_keys}`')
if ((k in payload) and isinstance(v, dict)):
payload[k].update(v)
else:
payload[k] = v
for (ik, iv) in v.items():
if ('bounds' in iv):
iv['bounds'] = tuple(iv['bounds'])
return payload
def _handle_payload_override(payload, key, value):
key_split = key.split('.')
curr_ref = payload
for (idx, split) in enumerate(key_split):
if ((idx == 0) and (split not in payload)):
payload.update({split: {}})
if (idx == (len(key_split) - 1)):
if (isinstance(curr_ref, dict) and isinstance(value, bool)):
if (value is not False):
curr_ref[split] = value
elif (isinstance(curr_ref, dict) and (not isinstance(value, bool))):
curr_ref[split] = value
else:
raise ValueError(f'cmd-line override failed for `{key}` -- Failed to find key `{split}` within lowest level Dict')
else:
curr_ref = curr_ref[split]
return payload |
def extractBlessedmangoWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('sss_architect', 'Expelled SSS-rank architect, rebuilding the demon kings castle!', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_image_widget(duration: float, width: int, height: int, filename: str, pkg: typing.Optional[str]=None, start_at: float=BEGINNING, position: POSITION_T=RANDOM) -> Stimulus:
position = get_position(position, (width, height))
return QtStimulus(start_at=start_at, duration=duration, qt_type='QLabelWithPictureFromFilename', callbacks=[('move', position), ('resize', (width, height)), ('setPictureFromFilename', get_filepath(filename, pkg))]) |
('ecs_deploy.cli.get_client')
def test_deploy_change_environment_variable_empty_string(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME, '-e', 'application', 'foo', ''))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Deploying based on task definition: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert (u'Changed environment "foo" of container "application" to: ""' in result.output)
assert (u'Successfully created revision: 2' in result.output)
assert (u'Successfully deregistered revision: 1' in result.output)
assert (u'Successfully changed task definition to: test-task:2' in result.output)
assert (u'Deployment successful' in result.output) |
.django_db
def test_primitives():
data_types = OrderedDict([('test', primatives.ColumnDefinition(name='test', data_type='int', not_nullable=True)), ('tube', primatives.ColumnDefinition(name='tube', data_type='text', not_nullable=False))])
single_key_column = [data_types['test']]
double_key_column = [data_types['test'], data_types['tube']]
assert (cc(primatives.make_cast_column_list([], {})) == '')
assert (cc(primatives.make_cast_column_list(['test'], data_types)) == 'cast("test" as int) as "test"')
assert (cc(primatives.make_cast_column_list(['test'], data_types, 't')) == 'cast("t"."test" as int) as "test"')
assert (cc(primatives.make_cast_column_list(['test', 'tube'], data_types, 't')) == 'cast("t"."test" as int) as "test", cast("t"."tube" as text) as "tube"')
assert (cc(primatives.make_change_detector_conditional([], 'a', 'b')) == '')
assert (cc(primatives.make_change_detector_conditional(['test'], 'a', 'b')) == '"a"."test" is distinct from "b"."test"')
assert (cc(primatives.make_change_detector_conditional(['test', 'tube'], 'a', 'b')) == '"a"."test" is distinct from "b"."test" or "a"."tube" is distinct from "b"."tube"')
assert (cc(primatives.make_column_list([])) == '')
assert (cc(primatives.make_column_list(['test'])) == '"test"')
assert (cc(primatives.make_column_list(['test'], 't')) == '"t"."test"')
assert (cc(primatives.make_column_list(['test', 'tube'], 't')) == '"t"."test", "t"."tube"')
assert (cc(primatives.make_column_list(['test', 'tube'], 't', {'tube': SQL('now()')})) == '"t"."test", now()')
assert (cc(primatives.make_column_setter_list([], 't')) == '')
assert (cc(primatives.make_column_setter_list(['test'], 't')) == '"test" = "t"."test"')
assert (cc(primatives.make_column_setter_list(['test', 'tube'], 't')) == '"test" = "t"."test", "tube" = "t"."tube"')
assert (cc(primatives.make_column_setter_list(['test', 'tube'], 't', {'tube': SQL('now()')})) == '"test" = "t"."test", "tube" = now()')
assert (cc(primatives.make_composed_qualified_table_name('test')) == '"test"')
assert (cc(primatives.make_composed_qualified_table_name('test', 'tube')) == '"tube"."test"')
assert (cc(primatives.make_composed_qualified_table_name('test', 'tube', 't')) == '"tube"."test" as "t"')
assert (cc(primatives.make_composed_qualified_table_name('test', alias='t')) == '"test" as "t"')
assert (cc(primatives.make_join_conditional([], 'a', 'b')) == '')
assert (cc(primatives.make_join_conditional(single_key_column, 'a', 'b')) == '"a"."test" = "b"."test"')
assert (cc(primatives.make_join_conditional(double_key_column, 'a', 'b')) == '"a"."test" = "b"."test" and "a"."tube" is not distinct from "b"."tube"')
assert (cc(primatives.make_join_excluder_conditional([], 't')) == '')
assert (cc(primatives.make_join_excluder_conditional(single_key_column, 't')) == '"t"."test" is null')
assert (cc(primatives.make_join_excluder_conditional(double_key_column, 't')) == '"t"."test" is null and "t"."tube" is null')
table = SQL('{}').format(Identifier('my_table'))
assert (cc(primatives.make_join_to_table_conditional([], 't', table)) == '')
assert (cc(primatives.make_join_to_table_conditional(single_key_column, 't', table)) == '"t"."test" = "my_table"."test"')
assert (cc(primatives.make_join_to_table_conditional(double_key_column, 't', table)) == '"t"."test" = "my_table"."test" and "t"."tube" is not distinct from "my_table"."tube"')
assert (cc(primatives.make_typed_column_list([], {})) == '')
assert (cc(primatives.make_typed_column_list(['test'], data_types)) == '"test" int')
assert (cc(primatives.make_typed_column_list(['test', 'tube'], data_types)) == '"test" int, "tube" text')
assert (standardize_whitespace(cc(primatives.wrap_dblink_query('testdblink', 'select now()', 'r', ['test'], data_types))) == 'select "r"."test" from dblink(\'testdblink\', \'select now()\') as "r" ("test" int)') |
class TestArchChecker(unittest.TestCase):
def __init__(self, *args, **kwargs):
logSetup.initLogging()
super().__init__(*args, **kwargs)
self.maxDiff = None
def setUp(self):
self.addCleanup(self.dropDatabase)
self.db = Tests.basePhashTestSetup.TestDb()
self.verifyDatabaseLoaded()
def dropDatabase(self):
self.db.tearDown()
self.db.close()
def _reprDatabase(self, db):
for row in db:
row = list(row)
row[1] = row[1].replace('/media/Storage/Scripts/Deduper/Tests', '{cwd}')
print(('\t%s, ' % list(row)))
def verifyDatabaseLoaded(self):
expect = list(CONTENTS)
expect.sort()
items = list(self.db.getItems())
items.sort()
if (items != expect):
self._reprDatabase(items)
self.assertEqual(items, expect)
def test_getItemsSimple(self):
self.verifyDatabaseLoaded()
def test_skipSolid(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml_w.zip'.format(cwd=cwd))
self.assertEqual(ck.getMatchingArchives(), {})
p_expect = {'{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd): {('test.txt', 'test.txt'): True}, '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd): {('test.txt', 'test.txt'): True}, '{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd): {('test.txt', 'test.txt'): True}}
matching = ck.getPhashMatchingArchives()
print('Expected')
pprint.pprint(p_expect)
print('Matching')
pprint.pprint(matching)
self.assertEqual(matching, p_expect)
def test_isBinaryUnique(self):
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
self.assertFalse(ck.isBinaryUnique())
ck = TestArchiveChecker('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
self.assertTrue(ck.isBinaryUnique())
def test_isPhashUnique(self):
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
self.assertTrue(ck.isPhashUnique())
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertTrue(ck.isPhashUnique())
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertFalse(ck.isPhashUnique())
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
self.assertFalse(ck.isPhashUnique())
def test_getBestMatch_1(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertFalse(ck.getBestPhashMatch())
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertFalse(ck.getBestPhashMatch())
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd))
def test_getBestMatch_2(self):
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/z_sml_u.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
def test_getAllMatches_1(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/regular.zip'.format(cwd=cwd))
self.assertEqual(ck.getMatchingArchives(), {'{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd): {'e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg', 'funny-pictures-cat-will-do-science.jpg', 'funny-pictures-kitten-rules-a-tower.jpg'}, '{cwd}/test_ptree/small_and_regular.zip'.format(cwd=cwd): {'e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg', 'funny-pictures-cat-will-do-science.jpg', 'funny-pictures-kitten-rules-a-tower.jpg'}, '{cwd}/test_ptree/small_and_regular_half_common.zip'.format(cwd=cwd): {'e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg'}})
expect = ck.getPhashMatchingArchives()
self.assertEqual(expect, {'{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd): {('e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'e61ec521-155d-4a3a-956d-2544d4367e02.jpg'): True, ('funny-pictures-cat-looks-like-an-owl.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg'): True, ('funny-pictures-cat-will-do-science.jpg', 'funny-pictures-cat-will-do-science.jpg'): True, ('funny-pictures-kitten-rules-a-tower.jpg', 'funny-pictures-kitten-rules-a-tower.jpg'): True}, '{cwd}/test_ptree/small_and_regular.zip'.format(cwd=cwd): {('e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'e61ec521-155d-4a3a-956d-2544d4367e02.jpg'): True, ('funny-pictures-cat-looks-like-an-owl.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg'): True, ('funny-pictures-cat-will-do-science.jpg', 'funny-pictures-cat-will-do-science.jpg'): True, ('funny-pictures-kitten-rules-a-tower.jpg', 'funny-pictures-kitten-rules-a-tower.jpg'): True}, '{cwd}/test_ptree/small_and_regular_half_common.zip'.format(cwd=cwd): {('e61ec521-155d-4a3a-956d-2544d4367e02.jpg', 'e61ec521-155d-4a3a-956d-2544d4367e02.jpg'): True, ('funny-pictures-cat-looks-like-an-owl.jpg', 'funny-pictures-cat-looks-like-an-owl.jpg'): True}})
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertEqual(ck.getMatchingArchives(), {})
self.assertEqual(ck.getPhashMatchingArchives(), {})
def test_getAllMatches_2(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertEqual(ck.getMatchingArchives(), {})
expect = {'{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd): {('superheroes-batman-superman-i-would-watch-the-hell-out-of-this.jpg', 'superheroes-batman-superman-i-would-watch-the-hell-out-of-this.jpg'): True, ('funny-pictures-kitten-rules-a-tower-ps.png', 'funny-pictures-kitten-rules-a-tower-ps.png'): True, ('funny-pictures-cat-will-do-science-ps.png', 'funny-pictures-cat-will-do-science-ps.png'): True, ('e61ec521-155d-4a3a-956d-2544d4367e02-ps.png', 'e61ec521-155d-4a3a-956d-2544d4367e02-ps.png'): True, ('funny-pictures-cat-looks-like-an-owl-ps.png', 'funny-pictures-cat-looks-like-an-owl-ps.png'): True}, '{cwd}/test_ptree/small_and_regular.zip'.format(cwd=cwd): {('funny-pictures-kitten-rules-a-tower-ps.png', 'funny-pictures-kitten-rules-a-tower-ps.png'): True, ('funny-pictures-cat-will-do-science-ps.png', 'funny-pictures-cat-will-do-science-ps.png'): True, ('e61ec521-155d-4a3a-956d-2544d4367e02-ps.png', 'e61ec521-155d-4a3a-956d-2544d4367e02-ps.png'): True, ('funny-pictures-cat-looks-like-an-owl-ps.png', 'funny-pictures-cat-looks-like-an-owl-ps.png'): True}, '{cwd}/test_ptree/regular.zip'.format(cwd=cwd): {('funny-pictures-kitten-rules-a-tower-ps.png', 'funny-pictures-kitten-rules-a-tower-ps.png'): True, ('funny-pictures-cat-will-do-science-ps.png', 'funny-pictures-cat-will-do-science-ps.png'): True, ('e61ec521-155d-4a3a-956d-2544d4367e02-ps.png', 'e61ec521-155d-4a3a-956d-2544d4367e02-ps.png'): True, ('funny-pictures-cat-looks-like-an-owl-ps.png', 'funny-pictures-cat-looks-like-an-owl-ps.png'): True}, '{cwd}/test_ptree/small_and_regular_half_common.zip'.format(cwd=cwd): {('e61ec521-155d-4a3a-956d-2544d4367e02-ps.png', 'e61ec521-155d-4a3a-956d-2544d4367e02-ps.png'): True, ('funny-pictures-cat-looks-like-an-owl-ps.png', 'funny-pictures-cat-looks-like-an-owl-ps.png'): True}}
match = ck.getPhashMatchingArchives()
print('Match:')
print(match)
print()
self.assertEqual(expect, match)
def test_getAllMatches_3(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
match_expected = {'{cwd}/test_ptree/z_sml_u.zip'.format(cwd=cwd): {'(s).jpg'}, '{cwd}/test_ptree/z_sml_w.zip'.format(cwd=cwd): {'test.txt'}, '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd): {'test.txt'}}
pmatch_expected = {'{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd): {('(s).jpg', '(s).jpg'): True, ('test.txt', 'test.txt'): True}, '{cwd}/test_ptree/z_sml_u.zip'.format(cwd=cwd): {('(s).jpg', '(s).jpg'): True}, '{cwd}/test_ptree/z_sml_w.zip'.format(cwd=cwd): {('test.txt', 'test.txt'): True}}
match = ck.getMatchingArchives()
self.assertEqual(match, match_expected)
actual_pmatch = ck.getPhashMatchingArchives()
self.assertEqual(actual_pmatch, pmatch_expected)
def test_getAllMatches_4(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
match_expected = {'{cwd}/test_ptree/testArch.zip'.format(cwd=cwd): {'Lolcat_this_is_mah_job.png', 'Lolcat_this_is_mah_job_small.jpg'}, '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd): {'Lolcat_this_is_mah_job.jpg', 'lolcat-crocs.jpg', 'Lolcat_this_is_mah_job.png', 'Lolcat_this_is_mah_job_small.jpg', 'lolcat-oregon-trail.jpg'}}
pmatch_expected = {'{cwd}/test_ptree/testArch.zip'.format(cwd=cwd): {('Lolcat_this_is_mah_job.jpg', 'Lolcat_this_is_mah_job.jpg'): True, ('Lolcat_this_is_mah_job.png', 'Lolcat_this_is_mah_job.png'): True, ('Lolcat_this_is_mah_job_small.jpg', 'Lolcat_this_is_mah_job_small.jpg'): True}, '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd): {('Lolcat_this_is_mah_job.jpg', 'Lolcat_this_is_mah_job.jpg'): True, ('Lolcat_this_is_mah_job.png', 'Lolcat_this_is_mah_job.png'): True, ('Lolcat_this_is_mah_job_small.jpg', 'Lolcat_this_is_mah_job_small.jpg'): True, ('lolcat-crocs.jpg', 'lolcat-crocs.jpg'): True, ('lolcat-oregon-trail.jpg', 'lolcat-oregon-trail.jpg'): True}}
match = ck.getMatchingArchives()
pmatch = ck.getPhashMatchingArchives()
pprint.pprint(pmatch)
self.assertEqual(match, match_expected)
self.assertEqual(pmatch, pmatch_expected)
def test_noMatch(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml_u.zip'.format(cwd=cwd))
self.assertEqual(ck.getMatchingArchives(), {})
self.assertEqual(ck.getPhashMatchingArchives(), {})
def test_itemRemoved(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/z_sml_u.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/regular.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
val = ck.getBestPhashMatch()
pprint.pprint(val)
self.assertFalse(val)
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertFalse(ck.getBestPhashMatch())
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/allArch.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestPhashMatch())
self.assertFalse(ck.getBestBinaryMatch())
def test_junkFileFiltering(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
print(ck.getBestBinaryMatch())
print(ck.getBestPhashMatch())
self.assertEqual(ck.getBestBinaryMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd))
def test_addArchive(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)
self.db.deleteBasePath(archPath)
self.assertFalse(self.db.getLikeBasePath(archPath))
self.assertFalse(self.db.getItems(fspath=archPath))
ck = TestArchiveChecker(archPath)
ck.addArch()
expect = []
for item in CONTENTS:
if (item[1] == archPath):
expect.append(list(item[1:]))
have = self.db.getItems(fspath=archPath)
have = [list(item[1:]) for item in have]
have.sort()
expect.sort()
self.assertEqual(have, expect)
def test_deleteArchive_1(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)
ck = TestArchiveChecker(archPath)
ck.deleteArch()
self.assertFalse(os.path.exists(archPath))
def test_deleteArchive_2(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)
ck = TestArchiveChecker(archPath)
ck.deleteArch(moveToPath=os.path.join(cwd, 'test_ptree'))
self.assertFalse(os.path.exists(archPath))
toPath = archPath.replace('/', ';')
toPath = os.path.join(cwd, 'test_ptree', toPath)
self.assertTrue(os.path.exists(toPath))
def test_deleteArchive_3(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)
ck = TestArchiveChecker(archPath)
self.assertTrue(os.path.exists(archPath))
ck.deleteArch(moveToPath='/this/path/does/not/exist')
self.assertTrue(os.path.exists(archPath))
def test_isArchive(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)
self.assertTrue(TestArchiveChecker.isArchive(archPath))
with open('{cwd}/test_ptree/testTextFile.zip'.format(cwd=cwd), 'w') as fp:
fp.write('testing\n')
fp.write('file!\n')
self.assertFalse(TestArchiveChecker.isArchive('{cwd}/test_ptree/testTextFile.zip'.format(cwd=cwd)))
def test_addArchive_1(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/z_sml.zip'.format(cwd=cwd)
(status, bestMatch, commonArches) = deduplicator.ProcessArchive.processDownload(archPath, checkClass=TestArchiveChecker)
matchPath = '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd)
self.assertEqual(status, 'deleted was-duplicate')
self.assertEqual(bestMatch, matchPath)
expect = {2: ['{cwd}/test_ptree/z_reg.zip'.format(cwd=cwd), '{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd)]}
self.assertEqual(commonArches, expect)
def test_addArchive_2(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/test_ptree/small.zip'.format(cwd=cwd)
(status, bestMatch, commonArches) = deduplicator.ProcessArchive.processDownload(archPath, checkClass=TestArchiveChecker)
matchPath = '{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd)
self.assertEqual(status, 'deleted was-duplicate phash-duplicate')
self.assertEqual(bestMatch, matchPath)
expect = {4: ['/media/Storage/Scripts/Deduper/Tests/test_ptree/regular.zip', '/media/Storage/Scripts/Deduper/Tests/test_ptree/small_and_regular.zip'], 5: ['/media/Storage/Scripts/Deduper/Tests/test_ptree/regular-u.zip']}
self.assertEqual(commonArches, expect)
def test_addArchive_3(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
os.remove('{cwd}/test_ptree/small_and_regular.zip'.format(cwd=cwd))
archPath = '{cwd}/test_ptree/regular.zip'.format(cwd=cwd)
(status, bestMatch, commonArches) = deduplicator.ProcessArchive.processDownload(archPath, checkClass=TestArchiveChecker)
expect = {2: ['{cwd}/test_ptree/small_and_regular_half_common.zip'.format(cwd=cwd)]}
self.assertFalse(status)
self.assertFalse(bestMatch)
self.assertEqual(commonArches, expect)
def test_addArchive_4(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
archPath = '{cwd}/lol/wat/herp/derp.zip'.format(cwd=cwd)
(status, bestMatch, commonArches) = deduplicator.ProcessArchive.processDownload(archPath, checkClass=TestArchiveChecker)
self.assertEqual(status, 'damaged')
self.assertFalse(bestMatch)
self.assertEqual(commonArches, {})
def test_missingResolutionData(self):
self.verifyDatabaseLoaded()
cwd = os.path.dirname(os.path.realpath(__file__))
os.remove('{cwd}/test_ptree/z_reg_junk.zip'.format(cwd=cwd))
ck = TestArchiveChecker('{cwd}/test_ptree/small.zip'.format(cwd=cwd))
self.assertFalse(ck.getBestBinaryMatch())
self.assertEqual(ck.getBestPhashMatch(), '{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd))
rows = []
rows.extend(self.db.getLikeBasePath('{cwd}/test_ptree/regular.zip'.format(cwd=cwd)))
rows.extend(self.db.getLikeBasePath('{cwd}/test_ptree/regular-u.zip'.format(cwd=cwd)))
rows = [row for row in rows if row[3]]
rows.sort()
for row in rows:
(fsPath, intPath) = (row[0], row[1])
self.db.updateDbEntry(fsPath=fsPath, internalPath=intPath, imgx=None, imgy=None)
self.assertFalse(ck.getBestPhashMatch()) |
.requires_eclipse
def test_failed_run(init_ecl100_config, source_root):
shutil.copy((source_root / 'test-data/eclipse/SPE1_ERROR.DATA'), 'SPE1_ERROR.DATA')
econfig = ecl_config.Ecl100Config()
sim = econfig.sim('2019.3')
erun = ecl_run.EclRun('SPE1_ERROR', sim)
with pytest.raises(RuntimeError, match='ERROR AT TIME 0.0 DAYS '):
erun.runEclipse() |
class Banheiro():
def __init__(self):
self.it = []
def entrar(self, obj):
self.it.append(obj)
def sair(self, pos=0):
return self.it.pop(pos)
def __len__(self):
return len(self.it)
def __contains__(self, obj):
return (obj in self.it)
def __repr__(self):
return f'Fila({self.it})' |
def run_test(thr, shape, dtype, axes=None):
data = numpy.random.normal(size=shape).astype(dtype)
fft = FFT(data, axes=axes)
fftc = fft.compile(thr)
shift = FFTShift(data, axes=axes)
shiftc = shift.compile(thr)
data_dev = thr.to_device(data)
t_start = time.time()
fftc(data_dev, data_dev)
thr.synchronize()
t_gpu_fft = (time.time() - t_start)
t_start = time.time()
shiftc(data_dev, data_dev)
thr.synchronize()
t_gpu_shift = (time.time() - t_start)
data_dev = thr.to_device(data)
t_start = time.time()
fftc(data_dev, data_dev)
shiftc(data_dev, data_dev)
thr.synchronize()
t_gpu_separate = (time.time() - t_start)
data_gpu = data_dev.get()
data_dev = thr.to_device(data)
res_dev = thr.empty_like(data_dev)
shift_tr = fftshift(data, axes=axes)
fft2 = fft.parameter.output.connect(shift_tr, shift_tr.input, new_output=shift_tr.output)
fft2c = fft2.compile(thr)
t_start = time.time()
fft2c(res_dev, data_dev)
thr.synchronize()
t_gpu_combined = (time.time() - t_start)
t_start = time.time()
numpy.fft.fftn(data, axes=axes)
t_cpu_fft = (time.time() - t_start)
t_start = time.time()
numpy.fft.fftshift(data, axes=axes)
t_cpu_shift = (time.time() - t_start)
t_start = time.time()
data_ref = numpy.fft.fftn(data, axes=axes)
data_ref = numpy.fft.fftshift(data_ref, axes=axes)
t_cpu_all = (time.time() - t_start)
data_gpu2 = res_dev.get()
assert numpy.allclose(data_ref, data_gpu)
assert numpy.allclose(data_ref, data_gpu2)
return dict(t_gpu_fft=t_gpu_fft, t_gpu_shift=t_gpu_shift, t_gpu_separate=t_gpu_separate, t_gpu_combined=t_gpu_combined, t_cpu_fft=t_cpu_fft, t_cpu_shift=t_cpu_shift, t_cpu_all=t_cpu_all) |
class OptionPlotoptionsParetoSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class LegacyKeylist(Keylist):
def __init__(self, keylist):
super(LegacyKeylist, self).__init__(keylist.c)
self.original_keylist = keylist
self.c = keylist.c
self.fingerprint = keylist.fingerprint
self.url = keylist.url
self.keyserver = keylist.keyserver
self.use_proxy = keylist.use_proxy
self.proxy_host = keylist.proxy_host
self.proxy_port = keylist.proxy_port
self.last_checked = keylist.last_checked
self.last_synced = keylist.last_synced
self.last_failed = keylist.last_failed
self.error = keylist.error
self.warning = keylist.warning
self.q = keylist.q
def get_keyserver(self):
if (self.keyserver != b''):
return self.keyserver
return self.default_keyserver
def get_fingerprint_list(self, msg_bytes):
fingerprints = []
invalid_fingerprints = []
for line in msg_bytes.split(b'\n'):
if (b'#' in line):
line = line.split(b'#')[0]
if (line.strip() == b''):
continue
if self.c.valid_fp(line):
fingerprints.append(line)
else:
invalid_fingerprints.append(line)
if (len(invalid_fingerprints) > 0):
raise InvalidFingerprints(invalid_fingerprints)
return fingerprints
def get_msg_sig_url(self):
return (self.url + b'.sig')
def refresh(common, cancel_q, keylist, force=False):
common.log('LegacyKeylist', 'refresh', 'Refreshing keylist {}'.format(keylist.url.decode()))
keylist.q.add_message(RefresherMessageQueue.STATUS_STARTING)
if (not keylist.should_refresh(force=force)):
common.log('LegacyKeylist', 'refresh', "Keylist doesn't need refreshing {}".format(keylist.url.decode()))
return keylist.result_object('skip')
if (not common.internet_available()):
common.log('LegacyKeylist', 'refresh', 'No internet, skipping {}'.format(keylist.url.decode()))
return keylist.result_object('skip')
result = keylist.refresh_keylist_uri()
if (result['type'] == 'success'):
msg_bytes = result['data']
else:
return result
if (cancel_q.qsize() > 0):
common.log('LegacyKeylist', 'refresh', 'canceling early {}'.format(keylist.url.decode()))
return keylist.result_object('cancel')
result = keylist.refresh_keylist_signature_uri()
if (result['type'] == 'success'):
msg_sig_bytes = result['data']
else:
return result
if (cancel_q.qsize() > 0):
common.log('LegacyKeylist', 'refresh', 'canceling early {}'.format(keylist.url.decode()))
return keylist.result_object('cancel')
result = keylist.validate_authority_key()
if (result['type'] != 'success'):
return result
if (cancel_q.qsize() > 0):
common.log('LegacyKeylist', 'refresh', 'canceling early {}'.format(keylist.url.decode()))
return keylist.result_object('cancel')
result = keylist.refresh_verify_signature(msg_sig_bytes, msg_bytes)
if (result['type'] != 'success'):
return result
if (cancel_q.qsize() > 0):
common.log('LegacyKeylist', 'refresh', 'canceling early {}'.format(keylist.url.decode()))
return keylist.result_object('cancel')
new_keylist_uri = None
first_line = msg_bytes.split(b'\n')[0].strip()
if (first_line.startswith(b'#') and (b'=' in first_line)):
parts = [s.strip() for s in first_line[1:].split(b'=')]
if (parts[0] == b'new_keylist_uri'):
new_keylist_uri = parts[1]
common.log('LegacyKeylist', 'refresh', 'Legacy keylist wants to redirect to: {}'.format(new_keylist_uri))
keylist.original_keylist.url = new_keylist_uri
return Keylist.refresh(common, cancel_q, keylist.original_keylist, force)
try:
fingerprints = [fp.decode() for fp in keylist.get_fingerprint_list(msg_bytes)]
except InvalidFingerprints as e:
return keylist.result_object('error', 'Invalid fingerprints: {}'.format(e))
(fingerprints_to_fetch, invalid_fingerprints) = keylist.refresh_build_fingerprints_lists(fingerprints)
total_keys = len(fingerprints_to_fetch)
keylist.q.add_message(RefresherMessageQueue.STATUS_IN_PROGRESS, total_keys, 0)
result = keylist.refresh_fetch_fingerprints(fingerprints_to_fetch, total_keys, cancel_q)
if (result['type'] == 'success'):
notfound_fingerprints = result['data']
else:
return result
return keylist.result_object('success', data={'keylist': keylist, 'invalid_fingerprints': invalid_fingerprints, 'notfound_fingerprints': notfound_fingerprints}) |
class FandoghCommand(Command):
def invoke(self, ctx):
try:
self._check_for_new_version()
self._check_for_error_collection_permission()
return super(FandoghCommand, self).invoke(ctx)
except CommandParameterException as exp:
click.echo(format_text(exp.message, TextStyle.FAIL), err=True)
sys.exit(101)
except FandoghAPIError as exp:
debug('APIError. status code: {}, content: {}'.format(exp.response.status_code, exp.response.content))
click.echo(format_text(exp.message, TextStyle.FAIL), err=True)
sys.exit(102)
except VersionException as exp:
click.echo(format_text('New Version of {} is available, please update to continue using Fandogh services using : `pip install {} --upgrade`'.format(NAME, NAME), TextStyle.FAIL), err=True)
except AuthenticationError:
click.echo(format_text("Please login first. You can do it by running 'fandogh login' command", TextStyle.FAIL), err=True)
sys.exit(103)
except requests.exceptions.RequestException as req_err:
click.echo(format_text('Error in your network connection! trying again might help to fix this issue \nif it is keep happening, please inform us!', TextStyle.FAIL), err=True)
collect(self, ctx, req_err)
sys.exit(104)
except ValidationException as e:
click.echo(format_text(e.message, TextStyle.FAIL), err=True)
collect(self, ctx, e)
except Exception as exp:
collect(self, ctx, exp)
raise exp
def _check_for_new_version(self):
latest_version = self._get_latest_version()
if (latest_version is None):
return None
version_diff = get_current_version().compare(latest_version)
if (version_diff < (- 2)):
click.echo(format_text('New version is available, please update to new version using `pip install {} --upgrade` to access latest bugfixes'.format(NAME), TextStyle.WARNING))
debug('New Version is available: {}'.format(latest_version))
elif (version_diff < 0):
debug('New Version is available: {}'.format(latest_version))
raise VersionException()
def _get_latest_version(self):
try:
cached_version_info = get_user_config().get('version_info')
if (cached_version_info is None):
latest_version = get_latest_version()
last_check = datetime.now()
else:
(last_check, latest_version) = (cached_version_info.get('last_check', None), Version(cached_version_info.get('latest_version', None)))
if ((latest_version is None) or ((datetime.now() - last_check) > timedelta(hours=6))):
latest_version = get_latest_version()
last_check = datetime.now()
get_user_config().set('version_info', dict(last_check=last_check, latest_version=str(latest_version)))
return latest_version
except Exception as exp:
collect(self, {'error': 'Error while getting latest version'}, exp)
return None
def _check_for_error_collection_permission(self):
def get_value(value):
if (value is None):
return None
value = str(value).lower()
if (value in ['no', '0', 'n', 'false']):
return 'NO'
return 'YES'
collect_error = get_user_config().get('collect_error')
if (collect_error is None):
env_value = get_value(os.environ.get('COLLECT_ERROR'))
if (env_value is not None):
get_user_config().set('collect_error', env_value)
else:
confirmed = click.confirm('Would you like to let Fandogh CLI to send context information in case any unhandled error happens?')
if confirmed:
get_user_config().set('collect_error', 'YES')
else:
get_user_config().set('collect_error', 'NO') |
_frequency(timedelta(days=1))
def fetch_price(zone_key: str='TR', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
if (target_datetime is None):
target_datetime = datetime.now(tz=TR_TZ)
data = fetch_data(target_datetime=target_datetime, kind='price')
all_data_points = []
for item in data:
data_point = {'zoneKey': zone_key, 'datetime': arrow.get(item.get('date')).datetime.replace(tzinfo=TR_TZ), 'price': item.get('price'), 'source': 'epias.com.tr', 'currency': 'TRY'}
all_data_points += [data_point]
return all_data_points |
class OptionPlotoptionsDumbbellEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsDependencywheelAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsDependencywheelAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsDependencywheelAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsDependencywheelAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsDependencywheelAccessibilityPoint) |
.skipif((not has_hf_transformers), reason='requires huggingface transformers')
.parametrize('model', _FRENCH_MODELS)
def test_against_hf_tokenizers_french(model, french_sample_texts):
compare_tokenizer_outputs_with_hf_tokenizer(french_sample_texts, model.model, Tokenizer, pad_token=model.pad_token) |
class TestProcessValue(object):
def setting_info1(self):
return {'value_type': 'rgbgradientv2', 'rgbgradientv2_header': {'color_field_length': 139, 'duration_length': 2, 'maxgradient': 14}, 'led_id': 2}
.parametrize('color', ['#FF2200', '#ff2200', 'FF2200', 'ff2200', '#F20', '#f20', 'F20', 'f20'])
def test_valid_color_hex_string(self, setting_info1, color):
bytes_ = rgbgradientv2.process_value(setting_info1, color)
assert (bytes_ == [2, 29, 1, 2, 49, 81, 255, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 15, 32, 2, 0, 0, 255, 0, 220, 5, 138, 2, 0, 0, 0, 0, 1, 0, 0, 0, 232, 3])
def test_named_colors(self, setting_info1):
bytes_ = rgbgradientv2.process_value(setting_info1, 'red')
assert (bytes_ == [2, 29, 1, 2, 49, 81, 255, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 15, 0, 0, 0, 0, 255, 0, 220, 5, 138, 2, 0, 0, 0, 0, 1, 0, 0, 0, 232, 3])
def test_not_valid_color_string(self, setting_info1):
with pytest.raises(ValueError):
rgbgradientv2.process_value(setting_info1, 'hello')
.parametrize('color', [(255, 24, 0), [255, 24, 0]])
def test_valid_color_tuple(self, setting_info1, color):
bytes_ = rgbgradientv2.process_value(setting_info1, color)
assert (bytes_ == [2, 29, 1, 2, 49, 81, 255, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 15, 128, 1, 0, 0, 255, 0, 220, 5, 138, 2, 0, 0, 0, 0, 1, 0, 0, 0, 232, 3])
def test_not_valid_color_tuple_2_channels(self, setting_info1):
with pytest.raises(ValueError):
rgbgradientv2.process_value(setting_info1, [255, 24])
def test_not_valid_color_tuple_wrong_range(self, setting_info1):
with pytest.raises(ValueError):
rgbgradientv2.process_value(setting_info1, [(- 1), 256, 1337])
def test_not_valid_color_ints_wrong_type(self, setting_info1):
with pytest.raises(ValueError):
rgbgradientv2.process_value(setting_info1, ['ff', '18', '00'])
def test_valid_rgbgradient_dict(self, setting_info1):
bytes_ = rgbgradientv2.process_value(setting_info1, {'duration': 1000, 'colors': [{'pos': 0, 'color': 'red'}, {'pos': 33, 'color': '#00FF00'}, {'pos': 66, 'color': (0, 0, 255)}]})
assert (bytes_ == [2, 29, 1, 2, 49, 81, 255, 200, 0, 0, 0, 244, 12, 0, 0, 74, 1, 1, 0, 0, 244, 12, 0, 74, 1, 2, 0, 12, 0, 244, 0, 84, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 15, 0, 0, 0, 0, 255, 0, 220, 5, 138, 2, 0, 0, 0, 0, 1, 0, 3, 0, 232, 3])
.parametrize('color', ['rgbgradient(colors=0%: #ff0000, 33%: #00ff00, 66%: #0000ff)', 'rgbgradient(colors=0: #ff0000, 33: #00ff00, 66: #0000ff)', 'rgbgradient(colors=0:#ff0000,33:#00ff00,66:#0000ff)', 'rgbgradient(colors=0%: red, 33%: lime, 66%: blue)', 'rgbgradient(duration=1000; colors=0%: #ff0000, 33%: #00ff00, 66%: #0000ff)', 'rgbgradient(colors=0%: #ff0000, 33%: #00ff00, 66%: #0000ff; duration=1000)'])
def test_valid_rgbgradient(self, setting_info1, color):
bytes_ = rgbgradientv2.process_value(setting_info1, color)
assert (bytes_ == [2, 29, 1, 2, 49, 81, 255, 200, 0, 0, 0, 244, 12, 0, 0, 74, 1, 1, 0, 0, 244, 12, 0, 74, 1, 2, 0, 12, 0, 244, 0, 84, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 15, 0, 0, 0, 0, 255, 0, 220, 5, 138, 2, 0, 0, 0, 0, 1, 0, 3, 0, 232, 3]) |
class MORXRearrangementTest(unittest.TestCase):
def setUpClass(cls):
cls.maxDiff = None
cls.font = FakeFont(['.nodef', 'A', 'B', 'C'])
def test_decompile_toXML(self):
table = newTable('morx')
table.decompile(MORX_REARRANGEMENT_DATA, self.font)
self.assertEqual(getXML(table.toXML), MORX_REARRANGEMENT_XML)
def test_compile_fromXML(self):
table = newTable('morx')
for (name, attrs, content) in parseXML(MORX_REARRANGEMENT_XML):
table.fromXML(name, attrs, content, font=self.font)
self.assertEqual(hexStr(table.compile(self.font)), hexStr(MORX_REARRANGEMENT_DATA)) |
(cls=ClickAliasedGroup, help=cmd_help)
('-v', '--verbose', help='Enable verbose logging.', is_flag=True, default=False)
def cli(**options: Dict[(str, Any)]) -> None:
if options['verbose']:
level = logging.DEBUG
fmt = '[%(asctime)s] [%(name)s:%(lineno)d] [%(levelname)s] %(message)s'
else:
level = logging.INFO
fmt = '[%(levelname)s] %(message)s'
logging.basicConfig(level=level, format=fmt, datefmt='%Y-%m-%d %H:%M:%S %z') |
class Components():
def __init__(self, page):
self.page = page
if (self.page.ext_packages is None):
self.page.ext_packages = {}
self.page.ext_packages.update(PkgImports.CLARITY)
self.page.cssImport.add('/core')
self.page.cssImport.add('/city')
self.page.body.attr['cds-text'] = 'body'
def button(self, text='', icon=None, width=(None, '%'), height=(None, 'px'), align='left', html_code=None, tooltip=None, profile=None, options=None):
self.page.jsImports.add('/button')
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
html_but = HtmlClrForms.Btn(self.page, text, html_code, (options or {}), profile, {'width': width, 'height': height})
return html_but |
_required
def UserEdit(request, username):
profile = UserProfile.objects.get(user__username=username)
user = User.objects.get(username=username)
if (not (request.user.is_authenticated() and (request.user.id == user.id))):
messages.add_message(request, messages.INFO, 'You cannot edit this profile')
return HttpResponseRedirect('/404')
if (request.method == 'POST'):
profile_form = UserProfileForm(request.POST, request.FILES, instance=profile)
if profile_form.is_valid():
user = profile_form.save()
messages.add_message(request, messages.INFO, 'Your profile has been updated.')
return HttpResponseRedirect(('/people/%s' % user.username))
else:
logger.debug('profile form contained errors:')
logger.debug(profile_form.errors)
else:
profile_form = UserProfileForm(instance=profile)
if profile.image:
has_image = True
else:
has_image = False
return render(request, 'registration/registration_form.html', {'form': profile_form, 'has_image': has_image, 'existing_user': True}) |
class UploadFile():
def __init__(self, file):
self.file = file
self.file_extension = self.get_extension()
if self.file_extension:
self.file_name = self.random_filename(self.file.filename, characters=8)
else:
self.file_name = None
self.upload_folder = None
self.target_file = None
def get_extension(self):
try:
_ext = self.file.filename.rsplit('.', 1)[1]
return _ext
except IndexError:
return False
def random_filename(self, file_name, characters=8):
new_file_name = secure_filename(file_name)
while True:
chars = (string.ascii_lowercase + string.digits)
output_string = ''.join((random.choice(chars) for _ in range(characters)))
new_file_name = ((output_string + '.') + self.file_extension)
if (not os.path.isfile(new_file_name)):
break
return new_file_name
def upload_file(self):
if (self.file_name and self.upload_folder):
self.target_file = os.path.join(self.upload_folder, self.file_name)
else:
return False
if FlicketConfig.extension_allowed(self.file_name):
self.file.save(self.target_file)
return self.file
else:
return False |
class ConfigDictGenerator():
serial = 0
def create_config_dict(file_name):
with open(file_name, 'r+', encoding='utf-8') as config_file:
bogus_values = []
for value in config_file.readlines():
bogus_values.append(('%s' % value[1:2]))
for value in V2_TOP_CONFS:
for bogus in bogus_values:
to_write = ('%s%s' % (value, bogus))
rev_to_write = ('%s%s' % (bogus, value))
if ((to_write in bogus_values) or (rev_to_write in bogus_values) or (value in bogus_values)):
continue
config_file.write(('\n"%s"' % to_write))
config_file.write(('\n"%s"' % rev_to_write))
for conf_obj in [ACL, Meter, Port, Router, DP, VLAN]:
for value in conf_obj.defaults:
for bogus in bogus_values:
to_write = ('%s%s' % (value, bogus))
rev_to_write = ('%s%s' % (bogus, value))
if ((to_write in bogus_values) or (rev_to_write in bogus_values) or (value in bogus_values)):
continue
config_file.write(('\n"%s"' % to_write))
config_file.write(('\n"%s"' % rev_to_write))
def create_examples(self, file_base, file_name):
ex_curr = 0
num_hosts = 1
num_vlans = 2
def get_serialno(*_args, **_kwargs):
self.serial += 1
return self.serial
def create_config(network_graph, stack=True):
host_links = {}
host_vlans = {}
dp_options = {}
host_n = 0
for dp_i in network_graph.nodes():
for _ in range(num_hosts):
for v_i in range(num_vlans):
host_links[host_n] = [dp_i]
host_vlans[host_n] = v_i
host_n += 1
dp_options[dp_i] = {'hardware': 'GenericTFM'}
if ((dp_i == 0) and stack):
dp_options[dp_i]['stack'] = {'priority': 1}
switch_links = (list(network_graph.edges()) * 2)
if stack:
link_vlans = {link: None for link in switch_links}
else:
link_vlans = {link: list(range(num_vlans)) for link in switch_links}
topo = FaucetFakeOFTopoGenerator('ovstype', 'portsock', 'testname', len(network_graph.nodes()), False, host_links, host_vlans, switch_links, link_vlans, start_port=1, port_order=[0, 1, 2, 3], get_serialno=get_serialno)
config = topo.get_config(num_vlans, dp_options=dp_options)
return config
configs = []
topologies = graph_atlas_g()
for graph in topologies:
if ((not graph) or (not networkx.is_connected(graph))):
continue
if (len(graph.nodes()) > 4):
break
for stack in (True, False):
configs.append(create_config(graph, stack=stack))
for config in configs:
ex_fn = os.path.join(file_base, ('%s_%s' % (file_name, ex_curr)))
with open(ex_fn, 'w+', encoding='utf-8') as ex_file:
ex_file.write(config)
ex_curr += 1 |
def upgrade():
c = get_context()
insp = sa.inspect(c.connection.engine)
groups_permissions_pkey = 'groups_permissions_pkey'
groups_pkey = 'groups_pkey'
groups_resources_permissions_pkey = 'groups_resources_permissions_pkey'
users_groups_pkey = 'users_groups_pkey'
users_permissions_pkey = 'users_permissions_pkey'
users_resources_permissions_pkey = 'users_resources_permissions_pkey'
if isinstance(c.connection.engine.dialect, PGDialect):
op.execute('ALTER INDEX groups_unique_group_name_key RENAME to ix_groups_uq_group_name_key')
op.drop_constraint('groups_permissions_perm_name_check', 'groups_permissions')
op.execute('\n ALTER TABLE groups_permissions\n ADD CONSTRAINT ck_groups_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));\n ')
op.drop_constraint('groups_resources_permissions_perm_name_check', 'groups_resources_permissions')
op.execute('\n ALTER TABLE groups_resources_permissions\n ADD CONSTRAINT ck_groups_resources_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));\n ')
op.drop_constraint('user_permissions_perm_name_check', 'users_permissions')
op.execute('\n ALTER TABLE users_permissions\n ADD CONSTRAINT ck_user_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));\n ')
op.drop_constraint('users_resources_permissions_perm_name_check', 'users_resources_permissions')
op.execute('\n ALTER TABLE users_resources_permissions\n ADD CONSTRAINT ck_users_resources_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));\n ')
op.execute('ALTER INDEX users_email_key2 RENAME to ix_users_uq_lower_email')
op.execute('ALTER INDEX users_username_uq2 RENAME to ix_users_ux_lower_username')
if (groups_permissions_pkey == insp.get_pk_constraint('groups_permissions')['name']):
op.execute('ALTER INDEX groups_permissions_pkey RENAME to pk_groups_permissions')
if (groups_pkey == insp.get_pk_constraint('groups')['name']):
op.execute('ALTER INDEX groups_pkey RENAME to pk_groups')
if (groups_resources_permissions_pkey == insp.get_pk_constraint('groups_resources_permissions')['name']):
op.execute('ALTER INDEX groups_resources_permissions_pkey RENAME to pk_groups_resources_permissions')
if (users_groups_pkey == insp.get_pk_constraint('users_groups')['name']):
op.execute('ALTER INDEX users_groups_pkey RENAME to pk_users_groups')
if (users_permissions_pkey == insp.get_pk_constraint('users_permissions')['name']):
op.execute('ALTER INDEX users_permissions_pkey RENAME to pk_users_permissions')
if (users_resources_permissions_pkey == insp.get_pk_constraint('users_resources_permissions')['name']):
op.execute('ALTER INDEX users_resources_permissions_pkey RENAME to pk_users_resources_permissions')
if ('external_identities_pkey' == insp.get_pk_constraint('external_identities')['name']):
op.execute('ALTER INDEX external_identities_pkey RENAME to pk_external_identities')
if ('external_identities_local_user_name_fkey' in [c['name'] for c in insp.get_foreign_keys('external_identities')]):
op.drop_constraint('external_identities_local_user_name_fkey', 'external_identities', type_='foreignkey')
op.create_foreign_key(None, 'external_identities', 'users', remote_cols=['user_name'], local_cols=['local_user_name'], onupdate='CASCADE', ondelete='CASCADE')
if ('groups_permissions_group_id_fkey' in [c['name'] for c in insp.get_foreign_keys('groups_permissions')]):
op.drop_constraint('groups_permissions_group_id_fkey', 'groups_permissions', type_='foreignkey')
op.create_foreign_key(None, 'groups_permissions', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('groups_group_name_key' in [c['name'] for c in insp.get_unique_constraints('groups')]):
op.execute('ALTER INDEX groups_group_name_key RENAME to uq_groups_group_name')
if ('groups_resources_permissions_group_id_fkey' in [c['name'] for c in insp.get_foreign_keys('groups_resources_permissions')]):
op.drop_constraint('groups_resources_permissions_group_id_fkey', 'groups_resources_permissions', type_='foreignkey')
op.create_foreign_key(None, 'groups_resources_permissions', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('groups_resources_permissions_resource_id_fkey' in [c['name'] for c in insp.get_foreign_keys('groups_resources_permissions')]):
op.drop_constraint('groups_resources_permissions_resource_id_fkey', 'groups_resources_permissions', type_='foreignkey')
op.create_foreign_key(None, 'groups_resources_permissions', 'resources', remote_cols=['resource_id'], local_cols=['resource_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('resources_pkey' == insp.get_pk_constraint('resources')['name']):
op.execute('ALTER INDEX resources_pkey RENAME to pk_resources')
if ('resources_owner_group_id_fkey' in [c['name'] for c in insp.get_foreign_keys('resources')]):
op.drop_constraint('resources_owner_group_id_fkey', 'resources', type_='foreignkey')
op.create_foreign_key(None, 'resources', 'groups', remote_cols=['id'], local_cols=['owner_group_id'], onupdate='CASCADE', ondelete='SET NULL')
if ('resources_owner_user_id_fkey' in [c['name'] for c in insp.get_foreign_keys('resources')]):
op.drop_constraint('resources_owner_user_id_fkey', 'resources', type_='foreignkey')
op.create_foreign_key(None, 'resources', 'users', remote_cols=['id'], local_cols=['owner_user_id'], onupdate='CASCADE', ondelete='SET NULL')
if ('resources_parent_id_fkey' in [c['name'] for c in insp.get_foreign_keys('resources')]):
op.drop_constraint('resources_parent_id_fkey', 'resources', type_='foreignkey')
op.create_foreign_key(None, 'resources', 'resources', remote_cols=['resource_id'], local_cols=['parent_id'], onupdate='CASCADE', ondelete='SET NULL')
if ('users_pkey' == insp.get_pk_constraint('users')['name']):
op.execute('ALTER INDEX users_pkey RENAME to pk_users')
if ('users_email_key' in [c['name'] for c in insp.get_unique_constraints('users')]):
op.execute('ALTER INDEX users_email_key RENAME to uq_users_email')
if ('users_user_name_key' in [c['name'] for c in insp.get_unique_constraints('users')]):
op.execute('ALTER INDEX users_user_name_key RENAME to uq_users_user_name')
if ('users_groups_group_id_fkey' in [c['name'] for c in insp.get_foreign_keys('users_groups')]):
op.drop_constraint('users_groups_group_id_fkey', 'users_groups', type_='foreignkey')
op.create_foreign_key(None, 'users_groups', 'groups', remote_cols=['id'], local_cols=['group_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('users_groups_user_id_fkey' in [c['name'] for c in insp.get_foreign_keys('users_groups')]):
op.drop_constraint('users_groups_user_id_fkey', 'users_groups', type_='foreignkey')
op.create_foreign_key(None, 'users_groups', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('users_permissions_user_id_fkey' in [c['name'] for c in insp.get_foreign_keys('users_permissions')]):
op.drop_constraint('users_permissions_user_id_fkey', 'users_permissions', type_='foreignkey')
op.create_foreign_key(None, 'users_permissions', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('users_resources_permissions_resource_id_fkey' in [c['name'] for c in insp.get_foreign_keys('users_resources_permissions')]):
op.drop_constraint('users_resources_permissions_resource_id_fkey', 'users_resources_permissions', type_='foreignkey')
op.create_foreign_key(None, 'users_resources_permissions', 'resources', remote_cols=['resource_id'], local_cols=['resource_id'], onupdate='CASCADE', ondelete='CASCADE')
if ('users_resources_permissions_user_id_fkey' in [c['name'] for c in insp.get_foreign_keys('users_resources_permissions')]):
op.drop_constraint('users_resources_permissions_user_id_fkey', 'users_resources_permissions', type_='foreignkey')
op.create_foreign_key(None, 'users_resources_permissions', 'users', remote_cols=['id'], local_cols=['user_id'], onupdate='CASCADE', ondelete='CASCADE') |
class OptionPlotoptionsItemMarkerStates(Options):
def hover(self) -> 'OptionPlotoptionsItemMarkerStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsItemMarkerStatesHover)
def normal(self) -> 'OptionPlotoptionsItemMarkerStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsItemMarkerStatesNormal)
def select(self) -> 'OptionPlotoptionsItemMarkerStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsItemMarkerStatesSelect) |
def test_deployment_config(dashboard_user, config):
response = dashboard_user.get('dashboard/api/deploy_config')
assert (response.status_code == 200)
data = response.json
assert (data['database_name'] == config.database_name)
assert (data['outlier_detection_constant'] == config.outlier_detection_constant)
assert (data['timezone'] == str(config.timezone))
assert (data['colors'] == config.colors) |
class SchedulerBaseTester(unittest.TestCase):
def setUp(self):
super(SchedulerBaseTester, self).setUp()
from stalker import Studio
self.test_studio = Studio(name='Test Studio')
self.kwargs = {'studio': self.test_studio}
self.test_scheduler_base = SchedulerBase(**self.kwargs)
def test_studio_argument_is_skipped(self):
self.kwargs.pop('studio')
new_scheduler_base = SchedulerBase(**self.kwargs)
assert (new_scheduler_base.studio is None)
def test_studio_argument_is_None(self):
self.kwargs['studio'] = None
new_scheduler_base = SchedulerBase(**self.kwargs)
assert (new_scheduler_base.studio is None)
def test_studio_attribute_is_None(self):
self.test_scheduler_base.studio = None
assert (self.test_scheduler_base.studio is None)
def test_studio_argument_is_not_a_Studio_instance(self):
self.kwargs['studio'] = 'not a studio instance'
with pytest.raises(TypeError) as cm:
SchedulerBase(**self.kwargs)
assert (str(cm.value) == 'SchedulerBase.studio should be an instance of stalker.models.studio.Studio, not str')
def test_studio_attribute_is_not_a_Studio_instance(self):
with pytest.raises(TypeError) as cm:
self.test_scheduler_base.studio = 'not a studio instance'
assert (str(cm.value) == 'SchedulerBase.studio should be an instance of stalker.models.studio.Studio, not str')
def test_studio_argument_is_working_properly(self):
assert (self.test_scheduler_base.studio == self.kwargs['studio'])
def test_studio_attribute_is_working_properly(self):
from stalker import Studio
new_studio = Studio(name='Test Studio 2')
self.test_scheduler_base.studio = new_studio
assert (self.test_scheduler_base.studio == new_studio)
def test_schedule_method_will_raise_not_implemented_error(self):
base = SchedulerBase()
with pytest.raises(NotImplementedError) as cm:
base.schedule()
assert (str(cm.value) == '') |
def get_location(location_slug):
if location_slug:
try:
location = Location.objects.filter(slug=location_slug).first()
except:
raise LocationDoesNotExistException(('The requested location does not exist: %s' % location_slug))
elif (Location.objects.count() == 1):
location = Location.objects.get(id=1)
else:
raise LocationNotUniqueException('You did not specify a location and yet there is more than one location defined. Please specify a location.')
return location |
def character_aware_flip90(art: str) -> str:
art_lines = art.split('\n')
art_char_list = [[(flip90_character_alternatives[char] if (char in flip90_character_alternatives.keys()) else char) for char in list(line)] for line in art_lines]
char_list_flipped = list(map(list, zip(*art_char_list)))
output = ''
for line in char_list_flipped[::(- 1)]:
output += (''.join(line) + '\n')
return output[:(- 1)] |
def new_table_from_expr(name, expr, const, temporary):
assert isinstance(name, Id)
elems = expr.type.elems
if any(((t <= T.unknown) for t in elems.values())):
return objects.TableInstance.make(sql.null, expr.type, [])
if (('id' in elems) and (not const)):
msg = "Field 'id' already exists. Rename it, or use 'const table' to copy it as-is."
raise Signal.make(T.NameError, None, msg)
table = T.table(dict(elems), name=name, pk=([] if const else [['id']]), temporary=temporary)
if (not const):
table.elems['id'] = T.t_id
db_query(sql.compile_type_def(name, table))
if temporary:
get_var('__unwind__').append((lambda : drop_table(table)))
(read_only, flat_columns) = table_flat_for_insert(table)
if ((get_db().target == sql.bigquery) and ('id' in read_only)):
to_exclude = (['id'] if ('id' in expr.type.elems) else [])
proj = ast.Projection(expr, [ast.NamedField('id', objects.Instance.make(sql.RawSql(T.string, 'GENERATE_UUID()'), T.string, [])), ast.NamedField(None, ast.Ellipsis(None, to_exclude))])
expr = cast_to_instance(proj)
read_only.remove('id')
flat_columns.insert(0, 'id')
expr = exclude_fields(expr, (set(read_only) & set(elems)))
db_query(sql.Insert(name, flat_columns, expr.code), expr.subqueries)
return objects.new_table(table) |
class js_search_data():
def __init__(self):
self.children: list = []
self.parent: js_data = None
self.after = None
self.before = None
self.data = None
def __repr__(self):
return json.dumps(self.children)
def to_list(self):
output = []
for child in self.children:
if (type(child) is js_data):
output.append(child.to_list())
else:
output.append(child)
return output |
(bot, 'chat')
def handle(this, username, message, *args):
if (username == bot.username):
return
if message.startswith('can see'):
try:
(x, y, z) = map((lambda v: int(v)), message.split('see')[1].replace(',', ' ').split())
except Exception:
bot.chat('Bad syntax')
elif message.startswith('pos'):
say_position(username)
elif message.startswith('wearing'):
say_equipment(username)
elif message.startswith('block'):
say_block_under()
elif message.startswith('spawn'):
say_spawn()
elif message.startswith('quit'):
quit_game(username)
else:
bot.chat("That's nice") |
.standalone
def main():
mayavi.new_scene()
r = VTKXMLFileReader()
filename = join(mayavi2.get_data_dir(dirname(abspath(__file__))), 'fire_ug.vtu')
r.initialize(filename)
mayavi.add_source(r)
r.point_scalars_name = 'u'
o = Outline()
mayavi.add_module(o)
c = Contour()
mayavi.add_filter(c)
n = PolyDataNormals()
mayavi.add_filter(n)
aa = SetActiveAttribute()
mayavi.add_filter(aa)
aa.point_scalars_name = 't'
s = Surface(enable_contours=True)
mayavi.add_module(s) |
class TermOrder(SimplificationRule):
def apply(self, operation: Operation) -> list[tuple[(Expression, Expression)]]:
if (operation.operation not in COMMUTATIVE_OPERATIONS):
return []
if (not isinstance(operation, BinaryOperation)):
raise TypeError(f'Expected BinaryOperation, got {type(operation)}')
if (isinstance(operation.left, Constant) and (not isinstance(operation.right, Constant))):
return [(operation, BinaryOperation(operation.operation, [operation.right, operation.left], operation.type, operation.tags))]
else:
return [] |
class Principal(object):
def __init__(self, value=None, default_realm=None, type=None):
self.type = constants.PrincipalNameType.NT_UNKNOWN
self.components = []
self.realm = None
if (value is None):
return
try:
if isinstance(value, unicode):
value = value.encode('utf-8')
except NameError:
if isinstance(value, bytes):
value = value.decode('utf-8')
if isinstance(value, Principal):
self.type = value.type
self.components = value.components[:]
self.realm = value.realm
elif isinstance(value, str):
m = re.match('((?:[^\\\\]|\\\\.)+?)(((?:[^\\\\]|\\\\.)+))?$', value)
if (not m):
raise KerberosException('invalid principal syntax')
def unquote_component(comp):
return re.sub('\\\\(.)', '\\1', comp)
if (m.group(2) is not None):
self.realm = unquote_component(m.group(3))
else:
self.realm = default_realm
self.components = [unquote_component(qc) for qc in re.findall('(?:[^\\\\/]|\\\\.)+', m.group(1))]
elif (len(value) == 2):
self.components = value[0]
self.realm = value[(- 1)]
if isinstance(self.components, str):
self.components = [self.components]
elif (len(value) >= 2):
self.components = value[0:(- 1)]
self.realm = value[(- 1)]
else:
raise KerberosException('invalid principal value')
if (type is not None):
self.type = type
def __eq__(self, other):
if isinstance(other, str):
other = Principal(other)
return (((self.type == constants.PrincipalNameType.NT_UNKNOWN.value) or (other.type == constants.PrincipalNameType.NT_UNKNOWN.value) or (self.type == other.type)) and all(map((lambda a, b: (a == b)), self.components, other.components)) and (self.realm == other.realm))
def __str__(self):
def quote_component(comp):
return re.sub('([\\\\/])', '\\\\\\1', comp)
ret = '/'.join([quote_component(c) for c in self.components])
if (self.realm is not None):
ret += ('' + self.realm)
return ret
def __repr__(self):
return (((((('Principal((' + repr(self.components)) + ', ') + repr(self.realm)) + '), t=') + str(self.type)) + ')')
def from_asn1(self, data, realm_component, name_component):
name = data.getComponentByName(name_component)
self.type = constants.PrincipalNameType(name.getComponentByName('name-type')).value
self.components = [str(c) for c in name.getComponentByName('name-string')]
self.realm = str(data.getComponentByName(realm_component))
return self
def components_to_asn1(self, name):
name.setComponentByName('name-type', int(self.type))
strings = name.setComponentByName('name-string').getComponentByName('name-string')
for (i, c) in enumerate(self.components):
strings.setComponentByPosition(i, ensure_binary(c))
return name |
def convert_old_fl_trainer_config_to_new(trainer):
if ('synctrainer' == trainer['type'].lower()):
trainer['_base_'] = 'base_sync_trainer'
elif ('asynctrainer' == trainer['type'].lower()):
trainer['_base_'] = 'base_async_trainer'
elif ('privatesynctrainer' == trainer['type'].lower()):
trainer['_base_'] = 'base_private_sync_trainer'
del trainer['type']
if ('channel_config' in trainer):
trainer['channel'] = trainer['channel_config']
del trainer['channel_config']
_handle_optimizer(trainer)
_handle_lr_scheduler(trainer)
_handle_trainer_to_client_params(trainer)
_handle_timeout_simulator(trainer)
_handle_active_user_selector(trainer)
_handle_aggregator(trainer)
_handle_training_event_generator(trainer)
_handle_async_weight(trainer)
_handle_private_client_config(trainer)
_handle_private_reducer_config(trainer) |
(Output('truncate-hrv-status', 'children'), [Input('truncate-hrv-button', 'n_clicks'), Input('recovery-metric-dropdown-input-submit', 'n_clicks')], [State('truncate-date', 'value')])
def reset_hrv_plan(n_clicks, metric_n_clicks, hrv_date):
ctx = dash.callback_context
if ctx.triggered:
latest = ctx.triggered[0]['prop_id'].split('.')[0]
try:
if (latest == 'recovery-metric-dropdown-input-submit'):
app.session.execute(delete(workoutStepLog))
app.session.commit()
if (latest == 'truncate-hrv-button'):
date = datetime.strptime(hrv_date, '%Y-%m-%d').date()
app.server.logger.info('Resetting HRV workout plan workflow to step 0 on {}'.format(date))
app.session.execute(delete(workoutStepLog).where((workoutStepLog.date > date)))
query = app.session.query(workoutStepLog).filter((workoutStepLog.date == date)).first()
query.workout_step = 0
query.workout_step_desc = 'Low'
query.rationale = 'You manually restarted the hrv workout plan workflow today'
query.athlete_id = 1
query.completed = 0
app.session.commit()
athlete_info = app.session.query(athlete).filter((athlete.athlete_id == 1)).first()
training_workflow(min_non_warmup_workout_time=athlete_info.min_non_warmup_workout_time, metric=athlete_info.recovery_metric)
app.session.remove()
return html.H6('HRV Plan Reset!')
except BaseException as e:
app.session.rollback()
app.server.logger.error('Error resetting hrv workout plan: {}'.format(e))
app.session.remove()
return html.H6('Error Resetting HRV Plan')
return '' |
def toolkit_object(name, raise_exceptions=False):
global _toolkit
if (_toolkit is None):
toolkit()
obj = _toolkit(name)
if (raise_exceptions and (obj.__name__ == 'Unimplemented')):
raise RuntimeError("Can't import {} for backend {}".format(repr(name), _toolkit.toolkit))
return obj |
def get_talent_data() -> dict[(int, list[dict[(str, int)]])]:
total_cats = next_int(4)
talents: dict[(int, list[dict[(str, int)]])] = {}
for _ in range(total_cats):
cat_id = next_int(4)
cat_data: list[dict[(str, int)]] = []
number_of_talents = next_int(4)
for _ in range(number_of_talents):
talent_id = next_int(4)
talent_level = next_int(4)
talent = {'id': talent_id, 'level': talent_level}
cat_data.append(talent)
talents[cat_id] = cat_data
return talents |
def __calculate_track_length(current_track, next_track):
(_, _, begin_minute, begin_second, begin_frame) = current_track
(_, _, end_minute, end_second, end_frame) = next_track
length_minutes = (end_minute - begin_minute)
length_seconds = (end_second - begin_second)
length_frames = (end_frame - begin_frame)
length = (((length_minutes * 60) + length_seconds) + (length_frames / 75))
return length |
def module_available(calculator):
try:
import_name = IMPORT_DICT[calculator]
except KeyError:
return False
try:
_ = importlib.import_module(import_name)
available = True
except (ModuleNotFoundError, ImportError):
available = False
return available |
class PLSFileParser(object):
NOT_SET = type('NotSetType', (object,), {})
parser_class = (configparser.SafeConfigParser if hasattr(configparser, 'SafeConfigParser') else configparser.ConfigParser)
def __init__(self, path):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
self._parser = self.parser_class()
self._parser.read(path)
def getint(self, section, key, fallback=NOT_SET):
try:
return self._parser.getint(section, key)
except (configparser.NoOptionError, ValueError):
if (fallback is self.NOT_SET):
raise
return fallback
def get(self, section, key, fallback=NOT_SET):
try:
return self._parser.get(section, key)
except (configparser.NoOptionError, ValueError):
if (fallback is self.NOT_SET):
raise
return fallback |
class OptionPlotoptionsErrorbarLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsErrorbarLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsErrorbarLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class Platform(GowinPlatform):
def __init__(self):
GowinPlatform.__init__(self, 'GW1N-LV1QN48C6/I5', _io, [], toolchain='gowin', devicename='GW1N-1')
self.toolchain.options['use_done_as_gpio'] = 1
self.toolchain.options['use_reconfign_as_gpio'] = 1
def create_programmer(self):
return OpenFPGALoader('tangnano') |
class OptionSeriesScatterSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class AsyncLogFilter(AsyncFilter):
data_filter_set = None
data_filter_set_regex = None
data_filter_set_function = None
log_entry_formatter = None
filter_params: FilterParams = None
builder: AsyncEventFilterBuilder = None
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.log_entry_formatter = kwargs.pop('log_entry_formatter', self.log_entry_formatter)
if ('data_filter_set' in kwargs):
self.set_data_filters(kwargs.pop('data_filter_set'))
super().__init__(*args, **kwargs)
def format_entry(self, entry: LogReceipt) -> LogReceipt:
if self.log_entry_formatter:
return self.log_entry_formatter(entry)
return entry
def set_data_filters(self, data_filter_set: Collection[Tuple[(TypeStr, Any)]]) -> None:
self.data_filter_set = data_filter_set
if any(data_filter_set):
self.data_filter_set_function = match_fn(self.eth_module.codec, data_filter_set)
def is_valid_entry(self, entry: LogReceipt) -> bool:
if (not self.data_filter_set):
return True
return bool(self.data_filter_set_function(entry['data'])) |
def test_create_client_and_secret(db, config):
(new_client, secret) = ClientDetail.create_client_and_secret(db, config.security.oauth_client_id_length_bytes, config.security.oauth_client_secret_length_bytes)
assert (new_client.hashed_secret is not None)
assert (hash_with_salt(secret.encode(config.security.encoding), new_client.salt.encode(config.security.encoding)) == new_client.hashed_secret)
assert (new_client.scopes == [])
assert (new_client.roles == [])
assert (new_client.systems == []) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.