code stringlengths 281 23.7M |
|---|
def gen_primes_opt():
(yield 2)
D = {}
for q in itertools.count(3, step=2):
p = D.pop(q, None)
if (not p):
D[(q * q)] = q
(yield q)
else:
x = ((q + p) + p)
while (x in D):
x += (p + p)
D[x] = p |
def test_get_general_stats(stats_updater, backend_db):
stats = stats_updater.get_general_stats()
assert (stats['number_of_firmwares'] == 0), 'number of firmwares not correct'
assert (stats['number_of_unique_files'] == 0), 'number of files not correct'
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
backend_db.add_object(fw)
backend_db.add_object(parent_fo)
backend_db.add_object(child_fo)
stats = stats_updater.get_general_stats()
assert (stats['number_of_firmwares'] == 1), 'number of firmwares not correct'
assert (stats['number_of_unique_files'] == 3), 'number of files not correct' |
def send_email(to, action, subject, html, attachments=None, bcc=None, reply_to=None):
from .tasks import get_smtp_config, send_email_task_sendgrid, send_email_task_smtp
if (not MessageSettings.is_enabled(action)):
logger.info('Mail of type %s is not enabled. Hence, skipping...', action)
return
if isinstance(to, User):
logger.warning('to argument should be an email string, not a User object')
to = to.email
if string_empty(to):
logger.warning('Recipient cannot be empty')
return False
email_service = get_settings()['email_service']
email_from_name = get_settings()['email_from_name']
if (email_service == 'smtp'):
email_from = (((email_from_name + '<') + get_settings()['email_from']) + '>')
else:
email_from = get_settings()['email_from']
payload = {'to': to, 'from': email_from, 'subject': subject, 'html': html, 'attachments': attachments, 'bcc': bcc, 'reply_to': reply_to}
if (not (current_app.config['TESTING'] or (email_service == 'disable'))):
if (email_service == 'smtp'):
smtp_status = check_smtp_config(get_smtp_config())
if smtp_status:
send_email_task_smtp.delay(payload)
else:
logger.error('SMTP is not configured properly. Cannot send email.')
elif (email_service == 'sendgrid'):
key = get_settings().get('sendgrid_key')
if key:
payload['fromname'] = email_from_name
send_email_task_sendgrid.delay(payload)
else:
logger.error('SMTP & sendgrid have not been configured properly')
else:
logger.error('Invalid Email Service Setting: %s. Skipping email', email_service)
else:
logger.warning('Email Service is disabled in settings, so skipping email')
mail_recorder = current_app.config['MAIL_RECORDER']
mail_recorder.record(payload)
mail = Mail(recipient=to, action=action, subject=subject, message=html)
save_to_db(mail, 'Mail Recorded')
record_activity('mail_event', email=to, action=action, subject=subject)
return True |
def get_client(configdict=None, configfile=None, autoconnect=False, version_min=VERSION_MIN, version_max=VERSION_MAX):
logger = logging.getLogger(__name__)
logger.info('Creating client object and testing connection')
builder = Builder(configdict=configdict, configfile=configfile, autoconnect=autoconnect, version_min=version_min, version_max=version_max)
try:
builder.connect()
except Exception as exc:
logger.critical('Exception encountered: %s', exc)
raise ClientException from exc
return builder.client |
class DiskCache(CacheHandler):
lock = threading.RLock()
_fs_transaction_suffix = '.__mt_cache'
_fs_mode = 384
def __init__(self, cache_dir: str='cache', threshold: int=500, default_expire: int=300):
super().__init__(default_expire=default_expire)
self._threshold = threshold
self._path = os.path.join(current.app.root_path, cache_dir)
if (not os.path.exists(self._path)):
os.mkdir(self._path)
def _get_filename(self, key: str) -> str:
khash = hashlib_sha1(key).hexdigest()
return os.path.join(self._path, khash)
def _del_file(self, filename: str):
try:
os.remove(filename)
except Exception:
pass
def _list_dir(self) -> List[str]:
return [os.path.join(self._path, fn) for fn in os.listdir(self._path) if (not fn.endswith(self._fs_transaction_suffix))]
def _prune(self):
with self.lock:
entries = self._list_dir()
if (len(entries) > self._threshold):
now = time.time()
try:
for (i, fpath) in enumerate(entries):
remove = False
f = LockedFile(fpath, 'rb')
exp = pickle.load(f.file)
f.close()
remove = ((exp <= now) or ((i % 3) == 0))
if remove:
self._del_file(fpath)
except Exception:
pass
def get(self, key: str) -> Any:
filename = self._get_filename(key)
try:
with self.lock:
now = time.time()
f = LockedFile(filename, 'rb')
exp = pickle.load(f.file)
if (exp < now):
f.close()
return None
val = pickle.load(f.file)
f.close()
except Exception:
return None
return val
_convert_duration_
def set(self, key: str, value: Any, **kwargs):
filename = self._get_filename(key)
filepath = os.path.join(self._path, filename)
with self.lock:
self._prune()
if os.path.exists(filepath):
self._del_file(filepath)
try:
(fd, tmp) = tempfile.mkstemp(suffix=self._fs_transaction_suffix, dir=self._path)
with os.fdopen(fd, 'wb') as f:
pickle.dump(kwargs['expiration'], f, 1)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
os.rename(tmp, filename)
os.chmod(filename, self._fs_mode)
except Exception:
pass
def clear(self, key: Optional[str]=None):
with self.lock:
if (key is not None):
filename = self._get_filename(key)
try:
os.remove(filename)
return
except Exception:
return
for name in self._list_dir():
self._del_file(name) |
def test_invalid_cookies_are_ignored():
vals = [chr(i) for i in range(31)]
vals += [chr(i) for i in range(127, 255)]
vals += '()<>,;:\\"/[]?={} \t'.split()
for c in vals:
headers = [('Cookie', (('good_cookie=foo;bad' + c) + 'cookie=bar'))]
environ = testing.create_environ(headers=headers)
req = falcon.Request(environ)
assert (req.cookies['good_cookie'] == 'foo')
assert ((('bad' + c) + 'cookie') not in req.cookies) |
class WorkflowTaskFailedCause(betterproto.Enum):
WORKFLOW_TASK_FAILED_CAUSE_UNSPECIFIED = 0
WORKFLOW_TASK_FAILED_CAUSE_UNHANDLED_COMMAND = 1
WORKFLOW_TASK_FAILED_CAUSE_BAD_SCHEDULE_ACTIVITY_ATTRIBUTES = 2
WORKFLOW_TASK_FAILED_CAUSE_BAD_REQUEST_CANCEL_ACTIVITY_ATTRIBUTES = 3
WORKFLOW_TASK_FAILED_CAUSE_BAD_START_TIMER_ATTRIBUTES = 4
WORKFLOW_TASK_FAILED_CAUSE_BAD_CANCEL_TIMER_ATTRIBUTES = 5
WORKFLOW_TASK_FAILED_CAUSE_BAD_RECORD_MARKER_ATTRIBUTES = 6
WORKFLOW_TASK_FAILED_CAUSE_BAD_COMPLETE_WORKFLOW_EXECUTION_ATTRIBUTES = 7
WORKFLOW_TASK_FAILED_CAUSE_BAD_FAIL_WORKFLOW_EXECUTION_ATTRIBUTES = 8
WORKFLOW_TASK_FAILED_CAUSE_BAD_CANCEL_WORKFLOW_EXECUTION_ATTRIBUTES = 9
WORKFLOW_TASK_FAILED_CAUSE_BAD_REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_ATTRIBUTES = 10
WORKFLOW_TASK_FAILED_CAUSE_BAD_CONTINUE_AS_NEW_ATTRIBUTES = 11
WORKFLOW_TASK_FAILED_CAUSE_START_TIMER_DUPLICATE_ID = 12
WORKFLOW_TASK_FAILED_CAUSE_RESET_STICKY_TASK_QUEUE = 13
WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE = 14
WORKFLOW_TASK_FAILED_CAUSE_BAD_SIGNAL_WORKFLOW_EXECUTION_ATTRIBUTES = 15
WORKFLOW_TASK_FAILED_CAUSE_BAD_START_CHILD_EXECUTION_ATTRIBUTES = 16
WORKFLOW_TASK_FAILED_CAUSE_FORCE_CLOSE_COMMAND = 17
WORKFLOW_TASK_FAILED_CAUSE_FAILOVER_CLOSE_COMMAND = 18
WORKFLOW_TASK_FAILED_CAUSE_BAD_SIGNAL_INPUT_SIZE = 19
WORKFLOW_TASK_FAILED_CAUSE_RESET_WORKFLOW = 20
WORKFLOW_TASK_FAILED_CAUSE_BAD_BINARY = 21
WORKFLOW_TASK_FAILED_CAUSE_SCHEDULE_ACTIVITY_DUPLICATE_ID = 22
WORKFLOW_TASK_FAILED_CAUSE_BAD_SEARCH_ATTRIBUTES = 23 |
def get_identified_release_signers(entry):
signature_addresses = [('rt', '1Bu6ABvLAXn1ARFo1gjq6sogpajGbp6iK6'), ('kyuupichan', '1BH8E3TkuJMCcH5WGD11kVweKZuhh6vb7V')]
release_version = entry['version']
release_date = entry['date']
release_signatures = entry.get('signatures', [])
message = (release_version + release_date)
signed_names = set()
for signature in release_signatures:
for (signer_name, signer_address) in signature_addresses:
if (signer_name not in signed_names):
if PublicKey.verify_message_and_address(signature, message, signer_address):
signed_names.add(signer_name)
break
return signed_names |
class OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesPackedbubbleSonificationDefaultspeechoptionsMappingVolume) |
def search_hnsw_jaccard_topk(index_data, query_data, index_params, k):
(index_sets, index_keys, _, index_cache) = index_data
(query_sets, query_keys, _) = query_data
cache_key = json.dumps(index_params)
if (cache_key not in index_cache):
print('Building HNSW Index.')
start = time.perf_counter()
index = HNSW(distance_func=compute_jaccard_distance, **index_params)
for i in tqdm.tqdm(range(len(index_keys)), desc='Indexing', unit=' set', total=len(index_keys)):
index.insert(i, index_sets[i])
indexing_time = (time.perf_counter() - start)
print('Indexing time: {:.3f}.'.format(indexing_time))
index_cache[cache_key] = (index, {'indexing_time': indexing_time})
(index, indexing) = index_cache[cache_key]
print('Querying.')
times = []
results = []
for (query_set, query_key) in tqdm.tqdm(zip(query_sets, query_keys), total=len(query_keys), desc='Querying', unit=' query'):
start = time.perf_counter()
result = index.query(query_set, k)
result = [(index_keys[i], (1.0 - dist)) for (i, dist) in result]
duration = (time.perf_counter() - start)
times.append(duration)
results.append((query_key, result))
return (indexing, results, times) |
def get_application_rate_limit_status(consumed_only=True):
twtr = Twython(**get_application_rate_limit_status.get_auth_params())
ratelimit = twtr.get_application_rate_limit_status()
limit_df = pd.DataFrame()
for resource in ratelimit['resources']:
temp_df = pd.DataFrame(ratelimit['resources'][resource]).T
limit_df = pd.concat([limit_df, temp_df], sort=False)
limit_df['reset'] = pd.to_datetime(limit_df['reset'], unit='s')
limit_df['resource'] = limit_df.index.str.split('/').str[1]
limit_df.index.name = 'endpoint'
limit_df = limit_df.sort_values(['resource'])
limit_df = limit_df.reset_index()
if consumed_only:
print((' ' * 12), 'Rate limit as of:', pd.Timestamp.now(tz='UTC').strftime('%Y-%m-%-d %H:%M:%S'))
return limit_df[limit_df['limit'].ne(limit_df['remaining'])]
return limit_df |
class PerUserGaussianDurationDistribution(IDurationDistribution):
def __init__(self, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=PerUserGaussianDurationDistributionConfig, **kwargs)
super().__init__(**kwargs)
def _set_defaults_in_cfg(cls, cfg):
pass
def training_duration(self, num_training_examples: int) -> float:
return self.bounded_gaussian_sample() |
def process_identifier(identifier, column_names=[]):
if (hasattr(identifier, 'tokens') and (identifier.value in column_names)):
if is_chinese(identifier.value):
new_value = get_new_value(identifier.value)
identifier.value = new_value
identifier.normalized = new_value
identifier.tokens = [sqlparse.sql.Token(sqlparse.tokens.Name, new_value)]
elif hasattr(identifier, 'tokens'):
for token in identifier.tokens:
if isinstance(token, sqlparse.sql.Function):
process_function(token)
elif (token.ttype in sqlparse.tokens.Name):
new_value = get_new_value(token.value)
token.value = new_value
token.normalized = new_value
elif (token.value in column_names):
new_value = get_new_value(token.value)
token.value = new_value
token.normalized = new_value
token.tokens = [sqlparse.sql.Token(sqlparse.tokens.Name, new_value)] |
class HSplitData(Transformer, HasInputCol, DefaultParamsReadable, DefaultParamsWritable):
perc = Param(Params._dummy(), 'perc', 'perc')
_only
def __init__(self, perc=None, inputCol=None):
super(HSplitData, self).__init__()
self._setDefault(perc=None)
self._setDefault(inputCol=None)
kwargs = self._input_kwargs
self._set(**kwargs)
_only
def setParams(self, perc=None):
kwargs = self._input_kwargs
return self._set(**kwargs)
def setValue(self, perc=None):
return self._set(perc=perc)
def getValue(self):
return self.getOrDefault(self.perc)
def _transform(self, df):
perc = self.getValue()
(train, test) = df.randomSplit([perc, (1 - perc)], seed=42)
if (self.getInputCol() == 'train'):
return train
elif (self.getInputCol() == 'test'):
return test
return (train, test) |
class SelectTool(BaseTool):
threshold = Float(5.0)
selection_mode = Enum('toggle', 'multi', 'single', 'off')
multiselect_modifier = Instance(KeySpec, args=(None, 'control'), allow_none=True)
def _get_selection_state(self, event):
raise NotImplementedError
def _get_selection_token(self, event):
return event
def _select(self, token, append=True):
raise NotImplementedError
def _deselect(self, token, append=True):
raise NotImplementedError
def normal_left_down(self, event):
if (self.selection_mode != 'off'):
(already_selected, clicked) = self._get_selection_state(event)
modifier_down = self.multiselect_modifier.match(event)
token = self._get_selection_token(event)
if ((self.selection_mode == 'single') or ((self.selection_mode == 'multi') and (not modifier_down))):
if (clicked and (not already_selected)):
if ((self.selection_mode == 'single') or (not modifier_down)):
self._select(token, append=False)
else:
self._select(token, append=True)
event.handled = True
else:
self._deselect(token)
elif clicked:
if already_selected:
self._deselect(token)
else:
self._select(token)
event.handled = True
return |
def singleFactor(factors, chart, factor, obj, aspect=None):
objID = (obj if (type(obj) == str) else obj.id)
res = {'factor': factor, 'objID': objID, 'aspect': aspect}
if (type(obj) == str):
res['element'] = props.sign.element[obj]
elif (objID == const.SUN):
sunseason = props.sign.sunseason[obj.sign]
res['sign'] = obj.sign
res['sunseason'] = sunseason
res['element'] = props.base.sunseasonElement[sunseason]
elif (objID == const.MOON):
phase = chart.getMoonPhase()
res['phase'] = phase
res['element'] = props.base.moonphaseElement[phase]
elif (objID in const.LIST_SEVEN_PLANETS):
if aspect:
res['sign'] = obj.sign
res['element'] = props.sign.element[obj.sign]
else:
res['element'] = obj.element()
try:
res['element']
factors.append(res)
except KeyError:
pass
return res |
def contenttype(filename, default='text/plain'):
i = filename.rfind('.')
if (i >= 0):
default = CONTENT_TYPE.get(filename[i:].lower(), default)
j = filename.rfind('.', 0, i)
if (j >= 0):
default = CONTENT_TYPE.get(filename[j:].lower(), default)
if default.startswith('text/'):
default += '; charset=utf-8'
return default |
class TestRefineTargetPathToAll():
def test_refine_target_path_to_scalar_value(self):
data = {'A': 'a', 'B': 'b'}
assert (refine_target_path(data, ['A']) == ['A'])
def test_refine_target_path_to_nested_value(self):
data = {'A': {'B': {'C': 'D', 'E': 'F', 'G': 'G'}}}
assert (refine_target_path(data, ['A', 'B', 'C']) == ['A', 'B', 'C'])
def test_refine_target_path_to_top_level_array(self):
data = {'A': ['a', 'b', 'c'], 'D': ['e', 'f', 'g']}
assert (refine_target_path(data, ['A']) == [['A', 0], ['A', 1], ['A', 2]])
def test_refine_target_path_to_nested_array(self):
data = {'A': {'B': {'C': ['d', 'e', 'f'], 'D': ['g', 'h', 'i']}}}
assert (refine_target_path(data, ['A', 'B', 'C']) == [['A', 'B', 'C', 0], ['A', 'B', 'C', 1], ['A', 'B', 'C', 2]])
def test_refine_target_path_to_embedded_object_in_arrays(self):
data = {'A': [{'B': 'C', 'D': 'E', 'F': 'G'}, {'D': 'J'}, {'B': 'I', 'D': 'K', 'F': 'J'}]}
assert (refine_target_path(data, ['A', 'F']) == [['A', 0, 'F'], ['A', 2, 'F']])
def test_refined_target_path_array_of_arrays(self):
data = {'A': [['B', 'C', 'D', 'C', 'E']], 'C': ['E', 'F']}
assert (refine_target_path(data, ['A']) == [['A', 0, 0], ['A', 0, 1], ['A', 0, 2], ['A', 0, 3], ['A', 0, 4]]) |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Sabor', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nome_do_sabor', models.CharField(max_length=50))]), migrations.CreateModel(name='Sorvete', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('unidades', models.PositiveIntegerField(verbose_name='Unidades')), ('preco_de_venda', models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Preco de venda')), ('preco_de_custo', models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Preco de Custo')), ('sabores', models.ManyToManyField(to='core.Sabor'))])] |
class LiteEthMACPreambleInserter(Module):
def __init__(self, dw):
assert (dw in [8, 16, 32, 64])
self.sink = stream.Endpoint(eth_phy_description(dw))
self.source = stream.Endpoint(eth_phy_description(dw))
preamble = Signal(64, reset=eth_preamble)
count = Signal(max=((64 // dw) if (dw != 64) else 2), reset_less=True)
self.submodules.fsm = fsm = FSM(reset_state='IDLE')
fsm.act('IDLE', self.sink.ready.eq(1), NextValue(count, 0), If(self.sink.valid, self.sink.ready.eq(0), NextState('PREAMBLE')))
fsm.act('PREAMBLE', self.source.valid.eq(1), chooser(preamble, count, self.source.data, n=(64 // dw)), If(self.source.ready, If((count == ((64 // dw) - 1)), NextState('COPY')).Else(NextValue(count, (count + 1)))))
self.comb += [self.source.data.eq(self.sink.data), self.source.last_be.eq(self.sink.last_be)]
fsm.act('COPY', self.sink.connect(self.source, omit={'data', 'last_be'}), If(((self.sink.valid & self.sink.last) & self.source.ready), NextState('IDLE'))) |
_blueprint.route('/project/<project_id>/map/<pkg_id>', methods=['GET', 'POST'])
_required
def edit_project_mapping(project_id, pkg_id):
project = models.Project.get(Session, project_id)
if (not project):
flask.abort(404)
package = models.Packages.by_id(Session, pkg_id)
if (not package):
flask.abort(404)
distros = models.Distro.all(Session)
distro_names = []
for distro in distros:
distro_names.append(distro.name)
form = anitya.forms.MappingForm(package_name=package.package_name, distro=package.distro_name, distros=distro_names)
if form.validate_on_submit():
try:
utilities.map_project(Session, project=project, package_name=form.package_name.data, distribution=form.distro.data, user_id=flask.g.user.username, old_package_name=package.package_name, old_distro_name=package.distro_name)
Session.commit()
flask.flash('Mapping edited')
except exceptions.AnityaInvalidMappingException as err:
err.link = flask.url_for('anitya_ui.project', project_id=err.project_id)
flask.flash(err.message, 'error')
except exceptions.AnityaException as err:
flask.flash(str(err), 'error')
return flask.redirect(flask.url_for('anitya_ui.project', project_id=project_id))
return flask.render_template('mapping.html', current='projects', project=project, package=package, form=form) |
class _RegData():
def __init__(self, rmap):
self._rmap = rmap
def val(self):
rdata = self._rmap._if.read(self._rmap.DATA_ADDR)
return ((rdata >> self._rmap.DATA_VAL_POS) & self._rmap.DATA_VAL_MSK)
def val(self, val):
rdata = self._rmap._if.read(self._rmap.DATA_ADDR)
rdata = (rdata & (~ (self._rmap.DATA_VAL_MSK << self._rmap.DATA_VAL_POS)))
rdata = (rdata | (val << self._rmap.DATA_VAL_POS))
self._rmap._if.write(self._rmap.DATA_ADDR, rdata) |
def fortios_firewall_ssh(data, fos, check_mode):
fos.do_member_operation('firewall.ssh', 'local-ca')
if data['firewall_ssh_local_ca']:
resp = firewall_ssh_local_ca(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_ssh_local_ca'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
('delete', cls=FandoghCommand)
('--name', '-n', 'archive_name', prompt='Service Archive Name')
def archive_delete(archive_name):
if click.confirm('are you sure you want to delete service archive with name {}'.format(archive_name)):
click.echo('you are about to delete archive with name {}.'.format(archive_name))
click.echo('It might take a while!')
message = present((lambda : delete_service_archive(archive_name)))
click.echo(message) |
_module()
class NaiveTTSDataset(NaiveDataset):
processing_pipeline = [dict(type='PickKeys', keys=['path', 'mel', 'contents', 'speaker']), dict(type='Transpose', keys=[('mel', 1, 0)])]
collating_pipeline = [dict(type='FilterByLength', key='mel', dim=0, min_length=1, max_length=2048), dict(type='ListToDict'), dict(type='PadStack', keys=[('mel', (- 2)), ('contents', (- 1))]), dict(type='ToTensor', keys=[('speaker', torch.int64), ('contents', torch.int64)])] |
class ShowOrMovie(BaseModel):
name: str = Field(description='The name of the movie or tv show')
season: Optional[str] = Field(description='Season of TV show. Extract as a digit stripping Season prefix.')
year: Optional[str] = Field(description='Year when the movie / tv show was released')
latest_episode: Optional[str] = Field(description='Date when the latest episode was released')
link: Optional[str] = Field(description='Link to the movie / tv show.')
('name')
def name_must_not_be_empty(cls, v):
if (not v):
raise ValueError('Name must not be empty')
return v |
class DSAudio():
def __init__(self) -> None:
self.microphone_mute = 0
self.microphone_led = 0
def setMicrophoneLED(self, value):
if (not isinstance(value, bool)):
raise TypeError('MicrophoneLED can only be a bool')
self.microphone_led = value
def setMicrophoneState(self, state: bool):
if (not isinstance(state, bool)):
raise TypeError('state needs to be bool')
self.setMicrophoneLED(state)
self.microphone_mute = state |
def test_get_draft_event_attendees_admin(db, client, admin_jwt):
attendee = get_minimal_attendee(db, event_status='draft')
response = client.get(f'/v1/events/{attendee.event_id}/attendees', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
assert (len(json.loads(response.data)['data']) == 1) |
class OptionSeriesParetoSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesParetoSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesParetoSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesParetoSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesParetoSonificationTracksMappingTremoloSpeed) |
class OptionSeriesLineSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def handle_auto_mining(func):
(func)
def func_wrapper(self, *args, **kwargs):
if self.auto_mine_transactions:
transaction_hash = func(self, *args, **kwargs)
self.mine_block()
else:
snapshot = self.take_snapshot()
try:
transaction_hash = func(self, *args, **kwargs)
pending_transaction = self.get_transaction_by_hash(transaction_hash)
self._pending_transactions = remove_matching_transaction_from_list(self._pending_transactions, pending_transaction)
cleaned_transaction = _clean_pending_transaction(pending_transaction)
self._pending_transactions.append(cleaned_transaction)
finally:
self.revert_to_snapshot(snapshot)
return transaction_hash
def _clean_pending_transaction(pending_transaction):
cleaned_transaction = dissoc(pending_transaction, 'type')
if ('gas_price' and ('max_fee_per_gas' in pending_transaction)):
cleaned_transaction = dissoc(cleaned_transaction, 'gas_price')
return cleaned_transaction
return func_wrapper |
def custom_get_next_loc_key(self, instr):
if ((not instr.offset) or ((not instr.l) and (self.asm_block.lines[(- 1)] == instr))):
return [i for i in self.asm_block.bto if (i.c_t == 'c_next')][0].loc_key
loc_key = self.loc_db.get_or_create_offset_location((instr.offset + instr.l))
self.split_offset = (instr.offset + instr.l)
return loc_key |
def clean_caches(path):
for (dirname, subdirlist, filelist) in os.walk(path):
for f in filelist:
if f.endswith('pyc'):
try:
os.remove(os.path.join(dirname, f))
except FileNotFoundError:
pass
if dirname.endswith('__pycache__'):
shutil.rmtree(dirname) |
def construct_contract_response(requested_award_dict: dict) -> OrderedDict:
response = OrderedDict()
award = fetch_award_details(requested_award_dict, FPDS_AWARD_FIELDS)
if (not award):
return None
response.update(award)
account_data = fetch_account_details_award(award['id'])
response.update(account_data)
transaction = fetch_fpds_details_by_pk(award['_trx'], FPDS_CONTRACT_FIELDS)
response['parent_award'] = fetch_contract_parent_award_details(award['_parent_award_piid'], award['_fpds_parent_agency_id'])
response['latest_transaction_contract_data'] = transaction
response['funding_agency'] = fetch_agency_details(response['_funding_agency_id'])
if response['funding_agency']:
response['funding_agency']['office_agency_name'] = transaction['_funding_office_name']
response['awarding_agency'] = fetch_agency_details(response['_awarding_agency_id'])
if response['awarding_agency']:
response['awarding_agency']['office_agency_name'] = transaction['_awarding_office_name']
response['period_of_performance'] = OrderedDict([('start_date', award['_start_date']), ('end_date', award['_end_date']), ('last_modified_date', transaction['_last_modified']), ('potential_end_date', transaction['_period_of_perf_potential_e'])])
response['recipient'] = create_recipient_object(transaction)
response['executive_details'] = create_officers_object(award)
response['place_of_performance'] = create_place_of_performance_object(transaction)
if transaction['product_or_service_code']:
response['psc_hierarchy'] = fetch_psc_hierarchy(transaction['product_or_service_code'])
if transaction['naics']:
response['naics_hierarchy'] = fetch_naics_hierarchy(transaction['naics'])
response['total_outlay'] = fetch_total_outlays(award['id'])
return delete_keys_from_dict(response) |
_decorator(deprecated, name='list')
class TASBalancesAggregate(FilterQuerysetMixin, AggregateQuerysetMixin, CachedDetailViewSet):
serializer_class = AggregateSerializer
def get_queryset(self):
queryset = AppropriationAccountBalances.objects.filter(submission__is_final_balances_for_fy=True)
queryset = self.filter_records(self.request, queryset=queryset)
queryset = self.aggregate(self.request, queryset=queryset)
queryset = self.order_records(self.request, queryset=queryset)
return queryset |
class ChooseTask(Task):
def __init__(self, dic, tips, array=False) -> None:
self.tips = tips
self.dic = dic
self.array = array
super().__init__(Task.TASK_TYPE_CHOOSE)
def __choose(data, tips, array):
if array:
count = 1
dic = {}
for e in data:
dic[count] = e
count += 1
else:
dic = data
dic[0] = 'quit'
choose = (- 1)
for key in dic:
PrintUtils.print_delay('[{}]:{}'.format(key, dic[key]), 0.005)
choose = None
choose_item = config_helper.get_input_value()
while True:
if choose_item:
choose = str(choose_item['choose'])
print(':', choose_item)
else:
choose = input('[]:')
choose_item = None
if choose.isdecimal():
if ((int(choose) in dic.keys()) or (int(choose) == 0)):
choose = int(choose)
break
config_helper.record_choose({'choose': choose, 'desc': dic[choose]})
PrintUtils.print_fish()
return (choose, dic[choose])
def run(self):
PrintUtils.print_delay('RUN Choose Task:[]')
PrintUtils.print_delay(self.tips, 0.001)
return ChooseTask.__choose(self.dic, self.tips, self.array) |
def create_mmseqs_db(dbprefix, in_fasta):
cmd = f"{MMSEQS2} createdb '{in_fasta}' '{dbprefix}'"
print(colorify((' ' + cmd), 'yellow'))
try:
completed_process = subprocess.run(cmd, capture_output=True, check=True, shell=True)
except subprocess.CalledProcessError as cpe:
raise EmapperException(('Error running mmseqs: ' + cpe.stderr.decode('utf-8').strip().split('\n')[(- 1)]))
return |
def _get_config_args(conf: FitlogConfig):
if inspect.isclass(conf):
conf = conf()
config_dict = {k: conf.__getattribute__(k) for k in dir(conf) if (not k.startswith('_'))}
for (k, v) in config_dict.items():
if inspect.isfunction(v):
config_dict[k] = v.__name__
return config_dict |
def task_gettext():
pot = f'./{PACKAGE}/locale/{PACKAGE}.pot'
sources = glob.glob(f'./{PACKAGE}/**/*.py', recursive=True)
sources = [i for i in sources if ('__version__.py' not in i)]
command = ((('xgettext --add-comments=TRANSLATORS --from-code=UTF-8 -o ' + pot) + ' ') + ' '.join(sources))
sources.append(README_BASE)
return {'actions': [command, ['cp', README_BASE, './.cache/README.rst'], ['sphinx-build', '-b', 'gettext', '-C', '-D', 'master_doc=README', '-D', 'gettext_additional_targets=literal-block,image', './.cache', './readme_translations/locale/', './.cache/README.rst'], ['rm', './.cache/README.rst']], 'targets': [pot, './readme_translations/locale/README.pot'], 'file_dep': sources} |
class TestSymlink(unittest.TestCase):
def setUp(self):
self.example_dir = ExampleDirLinks()
self.wd = self.example_dir.create_directory()
def tearDown(self):
self.example_dir.delete_directory()
def test_not_a_link(self):
self.assertRaises(Exception, Symlink, self.example_dir.path('spider.txt'))
def test_target(self):
self.assertEqual(Symlink(self.example_dir.path('itsy-bitsy.txt')).target, 'spider.txt')
self.assertEqual(Symlink(self.example_dir.path('broken.txt')).target, 'missing.txt')
self.assertEqual(Symlink(self.example_dir.path('absolute.txt')).target, self.example_dir.path('fly.txt'))
self.assertEqual(Symlink(self.example_dir.path('absolutely_broken.txt')).target, self.example_dir.path('absolutely_missing.txt'))
self.assertEqual(Symlink(self.example_dir.path('web/relative.txt')).target, '../spider.txt')
self.assertEqual(Symlink(self.example_dir.path('web2')).target, 'web')
def test_is_absolute(self):
self.assertTrue(Symlink(self.example_dir.path('absolute.txt')).is_absolute)
self.assertTrue(Symlink(self.example_dir.path('absolutely_broken.txt')).is_absolute)
self.assertFalse(Symlink(self.example_dir.path('itsy-bitsy.txt')).is_absolute)
self.assertFalse(Symlink(self.example_dir.path('broken.txt')).is_absolute)
self.assertFalse(Symlink(self.example_dir.path('web/relative.txt')).is_absolute)
self.assertFalse(Symlink(self.example_dir.path('web2')).is_absolute)
def test_is_broken(self):
self.assertFalse(Symlink(self.example_dir.path('absolute.txt')).is_broken)
self.assertTrue(Symlink(self.example_dir.path('absolutely_broken.txt')).is_broken)
self.assertFalse(Symlink(self.example_dir.path('itsy-bitsy.txt')).is_broken)
self.assertTrue(Symlink(self.example_dir.path('broken.txt')).is_broken)
self.assertFalse(Symlink(self.example_dir.path('web/relative.txt')).is_broken)
self.assertFalse(Symlink(self.example_dir.path('web2')).is_broken)
def test_resolve_target(self):
self.assertEqual(Symlink(self.example_dir.path('itsy-bitsy.txt')).resolve_target(), self.example_dir.path('spider.txt'))
self.assertEqual(Symlink(self.example_dir.path('absolute.txt')).resolve_target(), self.example_dir.path('fly.txt'))
self.assertEqual(Symlink(self.example_dir.path('web/relative.txt')).resolve_target(), self.example_dir.path('spider.txt'))
self.assertEqual(Symlink(self.example_dir.path('web2')).resolve_target(), self.example_dir.path('web'))
def test_update_target(self):
symlink = Symlink(self.example_dir.path('itsy-bitsy.txt'))
self.assertEqual(symlink.target, 'spider.txt')
symlink.update_target('spider2.txt')
self.assertEqual(symlink.target, 'spider2.txt') |
def filter_firewall_mms_profile_data(json):
option_list = ['avnotificationtable', 'bwordtable', 'carrier_endpoint_prefix', 'carrier_endpoint_prefix_range_max', 'carrier_endpoint_prefix_range_min', 'carrier_endpoint_prefix_string', 'carrierendpointbwltable', 'comment', 'dupe', 'extended_utm_log', 'flood', 'mm1', 'mm1_addr_hdr', 'mm1_addr_source', 'mm1_convert_hex', 'mm1_outbreak_prevention', 'mm1_retr_dupe', 'mm1_retrieve_scan', 'mm1comfortamount', 'mm1comfortinterval', 'mm1oversizelimit', 'mm3', 'mm3_outbreak_prevention', 'mm3oversizelimit', 'mm4', 'mm4_outbreak_prevention', 'mm4oversizelimit', 'mm7', 'mm7_addr_hdr', 'mm7_addr_source', 'mm7_convert_hex', 'mm7_outbreak_prevention', 'mm7comfortamount', 'mm7comfortinterval', 'mm7oversizelimit', 'mms_antispam_mass_log', 'mms_av_block_log', 'mms_av_oversize_log', 'mms_av_virus_log', 'mms_carrier_endpoint_filter_log', 'mms_checksum_log', 'mms_checksum_table', 'mms_notification_log', 'mms_web_content_log', 'mmsbwordthreshold', 'name', 'notif_msisdn', 'notification', 'outbreak_prevention', 'remove_blocked_const_length', 'replacemsg_group']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class InitializeBatchsizeWidget(QtWidgets.QDialog):
_DEFAULT_WINDOW_WIDTH = 350
def __init__(self, current_batchsize='-1', parent=None) -> None:
super().__init__(parent)
self.setModal(False)
self.setWindowTitle('initialize batchsize')
self.initUI()
self.updateUI(current_batchsize)
def initUI(self):
self.setFixedWidth(self._DEFAULT_WINDOW_WIDTH)
set_font(self, font_size=BASE_FONT_SIZE)
base_layout = QtWidgets.QVBoxLayout()
layout = QtWidgets.QVBoxLayout()
lbl_name = QtWidgets.QLabel('Input string to initialize batch size.')
set_font(lbl_name, font_size=LARGE_FONT_SIZE, bold=True)
self.ledit_character = QtWidgets.QLineEdit()
self.ledit_character.setText('-1')
self.ledit_character.setPlaceholderText('initialization_character_string')
layout.addWidget(lbl_name)
layout.addWidget(self.ledit_character)
base_layout.addLayout(layout)
btn = QtWidgets.QDialogButtonBox((QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel))
btn.accepted.connect(self.accept)
btn.rejected.connect(self.reject)
base_layout.addWidget(btn)
self.setLayout(base_layout)
def updateUI(self, current_batchsize):
self.ledit_character.setText(str(current_batchsize))
def get_properties(self) -> InitializeBatchsizeProperties:
character = self.ledit_character.text().strip()
return InitializeBatchsizeProperties(initialization_character_string=character)
def accept(self) -> None:
invalid = False
props = self.get_properties()
print(props)
err_msgs = []
if (props.initialization_character_string == ''):
err_msgs.append('- initialization_character_string is not set.')
invalid = True
if invalid:
for m in err_msgs:
print(m)
MessageBox.error(err_msgs, 'initialize batchsize', parent=self)
return
return super().accept() |
def test_empty_universe():
with StandardMath.use_in(globals()):
assert (strict_simplify(FA(x, F)) != F)
assert (strict_simplify(TE(x, F)) != F)
assert (strict_simplify((FA(x, F(x)) & G)) != FA(y, (F(y) & G)))
assert (strict_simplify((FA(x, F(x)) | G)) == FA(y, (F(y) | G)))
assert (strict_simplify((TE(x, F(x)) & G)) == TE(y, (F(y) & G)))
assert (strict_simplify((TE(x, F(x)) | G)) != TE(y, (F(y) | G)))
assert strict_proves(TE(x, bot), bot)
assert strict_proves(top, FA(x, top))
assert strict_proves(FA(x, (R(x) >> S(x))), (FA(y, R(y)) >> FA(z, S(z))))
assert strict_proves(FA(x, (R(x) & S(x))), (FA(y, R(y)) & FA(z, S(z))))
assert strict_proves((FA(x, (R(x) >> S(x))), TE(y, R(y))), TE(z, S(z)))
assert strict_proves(TE(x, (R(x) & S(x))), (TE(y, R(y)) & TE(z, S(z))))
assert strict_proves((TE(x, R(x)) | TE(y, S(y))), TE(z, (R(z) | S(z))))
assert strict_proves(TE(x, (R(x) | S(x))), (TE(y, R(y)) | TE(z, S(z))))
assert strict_proves(FA(x, R(x)), (~ TE(y, (~ R(y)))))
assert strict_proves(TE(x, (~ R(x))), (~ FA(y, R(y))))
assert strict_proves(FA(x, (~ R(x))), (~ TE(y, R(y))))
assert strict_proves((~ TE(x, R(x))), FA(y, (~ R(y))))
assert strict_proves(R(j), TE(x, R(x)))
assert strict_proves((~ TE(x, (~ R(x)))), FA(y, R(y)))
assert strict_proves((~ FA(x, (~ R(x)))), TE(y, R(y)))
assert strict_proves((~ FA(x, R(x))), TE(y, (~ R(y))))
assert strict_proves(FA(x, (~ (~ D(x)))), FA(x, D(x)))
assert strict_proves((~ TE(x, R(x))), FA(y, (~ R(y))))
assert strict_proves(top, (TE(x, D(x)) | FA(x, (~ D(x)))))
assert strict_proves(top, (TE(x, (~ D(x))) | FA(x, D(x))))
assert strict_proves(TE(x, top), TE(x, (D(x) >> FA(y, D(y)))))
assert strict_proves(TE(x, (~ (~ D(x)))), TE(x, D(x)))
assert strict_proves(FA(x, (C(x) | D(x))), (FA(x, C(x)) | TE(x, D(x))))
assert strict_proves(FA(x, (H(j) >> T(x))), (H(j) >> FA(x, T(x))))
assert strict_proves(TE(x, (R(x) >> B(x))), (FA(x, R(x)) >> TE(x, B(x))))
assert strict_proves((~ FA(x, bot)), TE(x, top))
assert strict_proves(FA(x, TE(y, (F(y) | G(x)))), FA(x, (G(x) | TE(x, F(x)))))
assert strict_proves((FA(x, FA(y, FA(z, ((S(x, y) & S(y, z)) >> S(x, z))))), (~ TE(x, S(x, x)))), FA(x, FA(y, (S(x, y) >> (~ S(y, x))))))
assert strict_proves((FA(x, G(x)) | FA(x, B(x))), FA(x, (G(x) | B(x))))
assert strict_proves(TE(z, FA(k, P(z, k))), FA(y, TE(x, P(x, y))))
assert strict_proves(TE(x, (C(x) & B(x))), TE(x, (B(x) & C(x))))
assert strict_proves(TE(x, (C(x, i) & B(x, j))), TE(x, (C(x, i) >> B(x, j))))
assert strict_proves(FA(x, (C(x) & B(x))), FA(x, (B(x) & C(x))))
assert strict_proves(FA(x, (C(x) & B(x))), (FA(x, C(x)) & FA(x, B(x))))
assert strict_proves(FA(x, bot), (~ TE(x, top)))
assert strict_proves((((~ TE(x, G(x))) | FA(x, F(x))), (C(j) >> FA(x, D(x)))), FA(y, FA(z, ((~ G(z)) | (F(y) & (C(j) >> D(y)))))))
assert strict_proves((FA(x, G(x)) | TE(x, F(x))), FA(x, TE(y, (F(y) | G(x)))))
assert strict_proves(((P | TE(x, W)) >> FA(z, R)), FA(z, FA(x, ((P | W) >> R))))
assert proves((TE(x, F(x)) | TE(x, G(x))), TE(x, TE(y, (F(x) | G(y)))))
assert proves(TE(x, FA(y, P(x, y))), FA(y, TE(x, P(x, y))))
assert proves(TE(x, FA(y, bot)), bot)
assert proves(top, FA(x, TE(y, top)))
assert proves(FA(x, (TE(y, F(y)) | G(x))), FA(x, TE(y, (F(y) | G(x)))))
assert proves(TE(x, FA(y, (F(y) & G(x)))), TE(x, (FA(y, F(y)) & G(x))))
assert strict_proves(TE(x, (~ R(x))), TE(y, (R(y) >> (R(j) & R(k)))))
assert strict_proves(P(c), TE(x, P(x)))
assert strict_proves(P(c), TE(x, top))
assert strict_proves((P(c) & (~ P(c))), TE(x, top))
assert strict_proves((P(c) | (~ P(c))), TE(x, top))
assert (not strict_proves((FA(x, R(x)) >> FA(y, S(y))), FA(z, (R(z) >> S(z)))))
assert (not strict_proves((TE(x, R(x)) & TE(y, S(y))), TE(z, (R(z) & S(z)))))
assert (not strict_proves(TE(x, R(x)), FA(y, R(y))))
assert (not strict_proves(top, TE(x, top)))
assert (not strict_proves(top, TE(x, (D(x) >> FA(y, D(y))))))
assert (not strict_proves((R(j), FA(x, (R(x) >> S(x)))), S(j)))
assert (not strict_proves((FA(x, R(x)) >> FA(y, S(y))), TE(x, FA(y, ((~ R(x)) | S(y))))))
assert (not strict_proves(FA(x, R(x)), TE(y, R(y))))
assert (not strict_proves((T(i), FA(x, (T(x) >> T(s(x))))), T(s(i))))
assert (not strict_proves(top, TE(x, (R(x) >> (R(j) & R(k))))))
assert (not strict_proves((FA(x, (~ F(x))), FA(x, F(x))), bot))
assert strict_proves(top, Eq(a, a))
assert strict_proves((Eq(a, b) & Eq(b, c)), Eq(a, c))
assert strict_proves((Eq(a, b) & Eq(b, c)), Eq(c, a))
assert strict_proves((Eq(a, b) & F(a)), F(b))
assert strict_proves(((Eq(a, b) | Eq(a, c)), F(a)), (F(b) | F(c)))
assert strict_proves(FA(x, Eq(a, x)), Eq(a, b))
assert strict_proves(Eq(a, b), Eq(b, a))
assert strict_proves(Eq(a, b), Eq(f(a), f(b))) |
class EthereumHelper(Helper):
def is_transaction_settled(tx_receipt: JSONLike) -> bool:
is_successful = False
if (tx_receipt is not None):
is_successful = (tx_receipt.get('status', 0) == 1)
return is_successful
def get_contract_address(tx_receipt: JSONLike) -> Optional[str]:
contract_address = cast(Optional[str], tx_receipt.get('contractAddress', None))
return contract_address
def is_transaction_valid(tx: dict, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool:
is_valid = False
if (tx is not None):
is_valid = ((tx.get('input') == tx_nonce) and (tx.get('value') == amount) and (tx.get('from') == client) and (tx.get('to') == seller))
return is_valid
def generate_tx_nonce(seller: Address, client: Address) -> str:
time_stamp = int(time.time())
aggregate_hash = Web3.keccak(b''.join([seller.encode(), client.encode(), time_stamp.to_bytes(32, 'big')]))
return aggregate_hash.hex()
def get_address_from_public_key(cls, public_key: str) -> str:
keccak_hash = Web3.keccak(hexstr=public_key)
raw_address = keccak_hash[(- 20):].hex().upper()
address = Web3.toChecksumAddress(raw_address)
return address
def recover_message(cls, message: bytes, signature: str, is_deprecated_mode: bool=False) -> Tuple[(Address, ...)]:
if is_deprecated_mode:
enforce((len(message) == 32), 'Message must be hashed to exactly 32 bytes.')
with warnings.catch_warnings():
warnings.simplefilter('ignore')
address = Account.recoverHash(message_hash=message, signature=signature)
else:
signable_message = encode_defunct(primitive=message)
address = Account.recover_message(signable_message=signable_message, signature=signature)
return (address,)
def recover_public_keys_from_message(cls, message: bytes, signature: str, is_deprecated_mode: bool=False) -> Tuple[(str, ...)]:
if (not is_deprecated_mode):
signable_message = encode_defunct(primitive=message)
message = _hash_eip191_message(signable_message)
hash_bytes = HexBytes(message)
if (len(hash_bytes) != 32):
raise ValueError('The message hash must be exactly 32-bytes')
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = keys.Signature(signature_bytes=signature_bytes_standard)
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return (str(pubkey),)
def get_hash(message: bytes) -> str:
digest = Web3.keccak(message).hex()
return digest
def load_contract_interface(cls, file_path: Path) -> Dict[(str, str)]:
with open_file(file_path, 'r') as interface_file_ethereum:
contract_interface = json.load(interface_file_ethereum)
for key in [_ABI, _BYTECODE]:
if (key not in contract_interface):
raise ValueError(f'Contract {file_path} missing key {key}.')
return contract_interface |
class OptionSeriesHistogramSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_dihedrals_block(molsetup, indexmap, name):
label_by_index = {}
atomidx_by_index = {}
for atomidx in molsetup.dihedral_partaking_atoms:
(a, b, c, d) = atomidx
if (molsetup.atom_ignore[a] or molsetup.atom_ignore[b] or molsetup.atom_ignore[c] or molsetup.atom_ignore[d]):
continue
bond_id = molsetup.get_bond_id(b, c)
if (not molsetup.bond[bond_id]['rotatable']):
continue
index = molsetup.dihedral_partaking_atoms[atomidx]
atomidx_by_index.setdefault(index, set())
atomidx_by_index[index].add(atomidx)
label = (molsetup.dihedral_labels[atomidx] if (atomidx in molsetup.dihedral_labels) else None)
if (label is None):
label = ('from_meeko_%d' % index)
label_by_index.setdefault(index, set())
label_by_index[index].add(label)
spent_labels = set()
for index in label_by_index:
label = '_'.join(label_by_index[index])
number = 0
while (label in spent_labels):
number += 1
label = ('_'.join(label_by_index[index]) + ('_v%d' % number))
label_by_index[index] = label
spent_labels.add(label)
text = ''
for index in label_by_index:
text += ('[Interaction: %s, %s]\n' % (name, label_by_index[index]))
text += 'type = dihedral\n'
atomidx_strings = []
for atomidx in atomidx_by_index[index]:
string = ','.join([('%d' % (indexmap[i] + 1)) for i in atomidx])
atomidx_strings.append(string)
text += ('elements = {%s}\n' % '|'.join(atomidx_strings))
text += ('parameters = %s\n' % _aux_fourier_conversion(molsetup.dihedral_interactions[index]))
text += '\n'
return text |
def _validate_inbound_access_list(access_list):
if (not is_list_like(access_list)):
raise ValidationError('access_list is not list-like')
for entry in access_list:
if ((not is_dict(entry)) and (len(entry) != 2)):
raise ValidationError(f'access_list entry not properly formatted: {entry}')
address = entry.get('address')
storage_keys = entry.get('storage_keys')
if (not is_hex_address(address)):
raise ValidationError(f'access_list address must be a hexadecimal address: {address}')
if (not is_list_like(storage_keys)):
raise ValidationError(f'access_list storage keys are not list-like: {storage_keys}')
if ((len(storage_keys) > 0) and (not all((is_32byte_hex_string(k) for k in storage_keys)))):
raise ValidationError(f'one or more access list storage keys not formatted properly: {storage_keys}') |
class SevenSegmentDisplay(LEDBoard):
def __init__(self, *pins, **kwargs):
if ((len(pins) < 7) or (len(pins) > 8)):
raise ValueError('SevenSegmentDisplay must have 7 or 8 pins')
for pin in pins:
assert (not isinstance(pin, LEDCollection))
pwm = kwargs.pop('pwm', False)
active_high = kwargs.pop('active_high', True)
initial_value = kwargs.pop('initial_value', False)
if kwargs:
raise TypeError(('unexpected keyword argument: %s' % kwargs.popitem()[0]))
self._layouts = {'1': (False, True, True, False, False, False, False), '2': (True, True, False, True, True, False, True), '3': (True, True, True, True, False, False, True), '4': (False, True, True, False, False, True, True), '5': (True, False, True, True, False, True, True), '6': (True, False, True, True, True, True, True), '7': (True, True, True, False, False, False, False), '8': (True, True, True, True, True, True, True), '9': (True, True, True, True, False, True, True), '0': (True, True, True, True, True, True, False), 'A': (True, True, True, False, True, True, True), 'B': (False, False, True, True, True, True, True), 'C': (True, False, False, True, True, True, False), 'D': (False, True, True, True, True, False, True), 'E': (True, False, False, True, True, True, True), 'F': (True, False, False, False, True, True, True), 'G': (True, False, True, True, True, True, False), 'H': (False, True, True, False, True, True, True), 'I': (False, False, False, False, True, True, False), 'J': (False, True, True, True, True, False, False), 'K': (True, False, True, False, True, True, True), 'L': (False, False, False, True, True, True, False), 'M': (True, False, True, False, True, False, False), 'N': (True, True, True, False, True, True, False), 'O': (True, True, True, True, True, True, False), 'P': (True, True, False, False, True, True, True), 'Q': (True, True, False, True, False, True, True), 'R': (True, True, False, False, True, True, False), 'S': (True, False, True, True, False, True, True), 'T': (False, False, False, True, True, True, True), 'U': (False, False, True, True, True, False, False), 'V': (False, True, True, True, True, True, False), 'W': (False, True, False, True, False, True, False), 'X': (False, True, True, False, True, True, True), 'Y': (False, True, True, True, False, True, True), 'Z': (True, True, False, True, True, False, True), '-': (False, False, False, False, False, False, True), ' ': (False, False, False, False, False, False, False), '=': (False, False, False, True, False, False, True)}
super(SevenSegmentDisplay, self).__init__(*pins, pwm=pwm, active_high=active_high, initial_value=initial_value)
def display(self, char):
char = str(char).upper()
if (len(char) > 1):
raise ValueError('only a single character can be displayed')
if (char not in self._layouts):
raise ValueError(('there is no layout for character - %s' % char))
layout = self._layouts[char]
for led in range(7):
self[led].value = layout[led]
def display_hex(self, hexnumber):
self.display(hex(hexnumber)[2:])
def decimal_point(self):
if (len(self) > 7):
return self[7].value
else:
raise OutputDeviceError('there is no 8th pin for the decimal point')
_point.setter
def decimal_point(self, value):
if (len(self) > 7):
self[7].value = value
else:
raise OutputDeviceError('there is no 8th pin for the decimal point')
def set_char_layout(self, char, layout):
char = str(char).upper()
if (len(char) != 1):
raise ValueError('only a single character can be used in a layout')
if (len(layout) != 7):
raise ValueError('a character layout must have 7 segments')
self._layouts[char] = layout |
class Forums(MethodView):
decorators = [allows.requires(IsAdmin, on_fail=FlashAndRedirect(message=_('You are not allowed to modify forums.'), level='danger', endpoint='management.overview'))]
def get(self):
categories = Category.query.order_by(Category.position.asc()).all()
return render_template('management/forums.html', categories=categories) |
def extractWhimsicalreadsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def copy_func_kwargs(func_with_kwargs: _typing.Callable[(P, _typing.Any)]) -> _typing.Callable[([_typing.Callable[(..., R)]], _typing.Callable[(P, R)])]:
def return_func(func: _typing.Callable[(..., R)]) -> _typing.Callable[(P, R)]:
return _typing.cast(_typing.Callable[(P, R)], func)
return return_func |
class Restart(Generic[T]):
ldm: T
num_steps: int = 10
num_iterations: int = 2
start_time: float = 0.1
end_time: float = 2
def __post_init__(self) -> None:
assert isinstance(self.ldm.scheduler, DDIM), 'Restart sampling only works with DDIM scheduler'
def __call__(self, x: torch.Tensor, /, clip_text_embedding: torch.Tensor, condition_scale: float=7.5, **kwargs: torch.Tensor) -> torch.Tensor:
original_scheduler = self.ldm.scheduler
new_scheduler = DDIM(self.ldm.scheduler.num_inference_steps, device=self.device, dtype=self.dtype)
new_scheduler.timesteps = self.timesteps
self.ldm.scheduler = new_scheduler
for _ in range(self.num_iterations):
noise = torch.randn_like(input=x, device=self.device, dtype=self.dtype)
x = add_noise_interval(new_scheduler, x=x, noise=noise, initial_timestep=self.timesteps[(- 1)], target_timestep=self.timesteps[0])
for step in range((len(self.timesteps) - 1)):
x = self.ldm(x, step=step, clip_text_embedding=clip_text_embedding, condition_scale=condition_scale, **kwargs)
self.ldm.scheduler = original_scheduler
return x
_property
def start_step(self) -> int:
sigmas = (self.ldm.scheduler.noise_std / self.ldm.scheduler.cumulative_scale_factors)
return int(torch.argmin(input=torch.abs(input=(sigmas[self.ldm.scheduler.timesteps] - self.start_time))))
_property
def end_timestep(self) -> int:
sigmas = (self.ldm.scheduler.noise_std / self.ldm.scheduler.cumulative_scale_factors)
return int(torch.argmin(input=torch.abs(input=(sigmas - self.end_time))))
_property
def timesteps(self) -> torch.Tensor:
return torch.round(torch.linspace(start=int(self.ldm.scheduler.timesteps[self.start_step]), end=self.end_timestep, steps=self.num_steps)).flip(0).to(device=self.device, dtype=torch.int64)
def device(self) -> torch.device:
return self.ldm.device
def dtype(self) -> torch.dtype:
return self.ldm.dtype |
def _kill_master(master_container: str) -> None:
try:
output = call_command_in_container(master_container, ['redis-cli', '-p', '6379', 'SHUTDOWN', 'NOSAVE'])
assert (not output)
except AssertionError as e:
assert ('Call to docker-compose failed with exit code 137' in e.args[0]) |
def setup_broker_fdw():
with connection.cursor() as cursor:
with open('usaspending_api/etl/management/setup_broker_fdw.sql') as infile:
logger.info(connections.databases['data_broker'])
for raw_sql in infile.read().split('\n\n\n'):
logger.info(('SETUP BROKER FDW: Running SQL => ' + str(raw_sql)))
cursor.execute(raw_sql, connections.databases['data_broker']) |
class Attenuator(lg.Node):
ATTENUATOR_INPUT = lg.Topic(RandomMessage)
ATTENUATOR_OUTPUT = lg.Topic(RandomMessage)
config: AttenuatorConfig
def output(self, _in: float) -> float:
return (pow(10, (self.config.attenuation / 20)) * _in)
(ATTENUATOR_INPUT)
(ATTENUATOR_OUTPUT)
async def attenuate(self, message: RandomMessage) -> lg.AsyncPublisher:
current_time = time.time()
output_data = np.array([self.output(_in) for _in in message.data])
(yield (self.ATTENUATOR_OUTPUT, RandomMessage(timestamp=current_time, data=output_data))) |
class IndexRangeEnvironment():
lt = '<'
leq = '<='
eq = '=='
def get_pred_reads(expr):
if isinstance(expr, LoopIR.Read):
return {expr.name}
elif isinstance(expr, LoopIR.USub):
return IndexRangeEnvironment.get_pred_reads(expr.arg)
elif isinstance(expr, LoopIR.BinOp):
return (IndexRangeEnvironment.get_pred_reads(expr.lhs) | IndexRangeEnvironment.get_pred_reads(expr.rhs))
else:
return set()
def __init__(self, proc, fast=True) -> None:
assert isinstance(proc, LoopIR.proc)
preds_reads = set()
if (not fast):
for pred in proc.preds:
preds_reads = (preds_reads | IndexRangeEnvironment.get_pred_reads(pred))
self.proc = proc
self.env = ChainMap()
for arg in proc.args:
if isinstance(arg.type, LoopIR.Size):
self.env[arg.name] = arg_range_analysis(proc, arg, fast=(arg.name not in preds_reads))
def enter_scope(self):
self.env = self.env.new_child()
def exit_scope(self):
self.env = self.env.parents
def add_loop_iter(self, sym, lo_expr, hi_expr):
(lo, _) = index_range_analysis(lo_expr, self.env)
(_, hi) = index_range_analysis(hi_expr, self.env)
if (hi is not None):
hi = (hi - 1)
sym_range = (lo, hi)
if ((sym_range[0] is not None) and (sym_range[1] is not None) and (sym_range[0] > sym_range[1])):
sym_range = (None, None)
self.env[sym] = sym_range
def _check_range(range0, op, range1):
if ((range0[1] is None) or (range1[0] is None)):
return False
if (op == IndexRangeEnvironment.lt):
return (range0[1] < range1[0])
elif (op == IndexRangeEnvironment.leq):
return (range0[0] <= range1[0])
else:
if ((range0[0] is None) or (range1[1] is None)):
return False
return (range0[0] == range0[1] == range1[0] == range1[1])
def check_expr_bound(self, expr0, op, expr1):
expr0_range = index_range_analysis(expr0, self.env)
expr1_range = index_range_analysis(expr1, self.env)
return IndexRangeEnvironment._check_range(expr0_range, op, expr1_range)
def check_expr_bounds(self, expr0, op0, expr1, op1, expr2):
expr0_range = index_range_analysis(expr0, self.env)
expr1_range = index_range_analysis(expr1, self.env)
expr2_range = index_range_analysis(expr2, self.env)
return (IndexRangeEnvironment._check_range(expr0_range, op0, expr1_range) and IndexRangeEnvironment._check_range(expr1_range, op1, expr2_range)) |
.usefixtures('mycmd')
class TestWhich():
def test_dir_cmd(self, mycmd):
assert (which('noexists/mycmd') is None)
assert (which(mycmd) == mycmd)
def test_cmd_path(self, tmpdir, mycmd):
path = str(tmpdir)
assert (which('mycmd') is None)
assert (which('mycmd', path=path) == mycmd)
os.environ['PATH'] = ((path + os.pathsep) + os.environ.get('PATH', os.defpath))
assert (which('mycmd') == mycmd) |
class OptionPlotoptionsVennSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def start_repl(p, prompt=' >> '):
save_last = '_'
p.set_output_format('rich')
display = p._display
interp = p._interp
console = display.console
console.print(f'[purple]Preql {__version__}{__branch__} interactive prompt. Type help() for help[/purple]')
try:
session = PromptSession(style=style_from_pygments_cls(make_preql_style()), lexer=PygmentsLexer(GoLexer), completer=Autocompleter(interp.state), validator=MyValidator(), history=FileHistory(str((Path.home() / '.preql_history'))), auto_suggest=AutoSuggestFromHistory(), color_depth=ColorDepth.TRUE_COLOR)
def multiline_filter():
text = get_app().layout.get_buffer_by_name('DEFAULT_BUFFER').text
return (not _code_is_valid(text))
while True:
try:
code = session.prompt(prompt, multiline=multiline_filter)
if (not code.strip()):
continue
start_time = time()
try:
if (code == '.'):
with context(state=p._interp.state):
console.print(table_more(), overflow='ellipsis')
continue
res = p._run_code(code, '<repl>')
if ((res is not None) and (res is not objects.null)):
assert isinstance(res, Object), (res, type(res))
if save_last:
p._interp.set_var(save_last, res)
with context(state=p._interp.state):
res_repr = res.repr()
if (isinstance(res_repr, str) and (res.type == T.string)):
if (len(res_repr) > 200):
res_repr = ((res_repr[:100] + '...') + res_repr[(- 100):])
display.print(res_repr)
except Signal as s:
display.print_exception(s)
continue
except ExitInterp as e:
return e.value
except Exception as e:
repl_log.exception(e)
raise
duration = (time() - start_time)
if (duration > 1):
repl_log.info(('(Query took %.2f seconds)' % duration))
except KeyboardInterrupt:
repl_log.info('Interrupted (Ctrl+C)')
except (KeyboardInterrupt, EOFError):
repl_log.info('Exiting Preql interaction') |
def cmag_h_f(gen, t, srcs):
src = srcs[0]
re_2 = gen.emit_binop('*', [src.parts[0], src.parts[0]], Float)
i_2 = gen.emit_binop('*', [src.parts[1], src.parts[1]], Float)
j_2 = gen.emit_binop('*', [src.parts[2], src.parts[2]], Float)
k_2 = gen.emit_binop('*', [src.parts[3], src.parts[3]], Float)
ret = gen.emit_binop('+', [re_2, i_2], Float)
ret = gen.emit_binop('+', [ret, j_2], Float)
ret = gen.emit_binop('+', [ret, k_2], Float)
return ret |
def do_all_tests() -> Tuple[(int, int)]:
store = Connection()
prompt = store.recvuntil('\n> ')
if ('please provide S' in prompt):
from collections import namedtuple
from proof import cmd_solve
lines = prompt.split('\n')
prefix = lines[1].split('"')[1]
challenge = lines[4].split('"')[1]
if ((len(lines) > 5) and ('len(S)' in lines[5])):
length = int(lines[5].split(' == ')[(- 1)])
else:
length = 20
print(f'doing proof of work, {prefix} -> {challenge} (len {length})')
Args = namedtuple('Args', ['prefix', 'challenge', 'length'])
proof = cmd_solve(Args(prefix, challenge, length))
print(f'solved : {proof}')
store.sendline(proof)
check = store.recvline()
if (('invalid' in check) or ('timeout' in check)):
print('proof of work failed!')
exit(1)
prompt = (check + store.recvuntil('\n> '))
success = 0
failure = 0
test_names = [g for g in globals().keys() if g.startswith('test_')]
for name in test_names:
func = globals()[name]
if callable(func):
if do_test(func, store):
print(f'{name}: pass')
success += 1
else:
print(f'{name}: fail')
failure += 1
return (success, failure) |
class IncorrectCredentialsTests(TestCase):
def test_incorrect_credentials(self):
class IncorrectCredentialsAuth(BaseAuthentication):
def authenticate(self, request):
raise exceptions.AuthenticationFailed('Bad credentials')
request = factory.get('/')
view = MockView.as_view(authentication_classes=(IncorrectCredentialsAuth,), permission_classes=())
response = view(request)
assert (response.status_code == status.HTTP_403_FORBIDDEN)
assert (response.data == {'detail': 'Bad credentials'}) |
def test_deposits_on_staggered_dynasties(casper, concise_casper, funded_accounts, validation_keys, deposit_amount, new_epoch, induct_validator, deposit_validator, send_vote, mk_suggested_vote):
initial_validator = induct_validator(funded_accounts[0], validation_keys[0], deposit_amount)
for i in range(3):
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
for (account, key) in zip(funded_accounts[1:], validation_keys[1:]):
deposit_validator(account, key, deposit_amount)
assert (concise_casper.deposit_size(initial_validator) == concise_casper.total_curdyn_deposits_in_wei())
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
assert (concise_casper.deposit_size(initial_validator) == concise_casper.total_curdyn_deposits_in_wei())
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
assert (concise_casper.deposit_size(initial_validator) == concise_casper.total_prevdyn_deposits_in_wei()) |
class APISiteDetailTests(APITestCase):
def setUp(self):
create_site()
def test_get_site(self):
url = urljoin(urlroot, 'sites/securethe.news/')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['name'], 'Secure the News') |
class _Xor(_BinaryType):
__pnmltype__ = 'xor'
def __init__(self, left, right):
self._left = left
self._right = right
_iterable(self, left, right)
def __repr__(self):
return ('(%s ^ %s)' % (repr(self._left), repr(self._right)))
def __contains__(self, value):
if (value in self._left):
return (value not in self._right)
else:
return (value in self._right)
def __iter__(self):
self.__iterable__()
for value in self._left:
if (value not in self._right):
(yield value)
for value in self._right:
if (value not in self._left):
(yield value) |
def read_file_if_exists(filename: typing.Optional[str], encoding=None) -> typing.Optional[str]:
if (not filename):
return None
filename = pathlib.Path(filename)
logger.debug(f'Reading file contents from [{filename}] with current directory [{os.getcwd()}].')
return filename.read_text(encoding=encoding) |
def add_output_tensor_nodes(postprocessed_tensors, output_collection_name='inference_op'):
detection_fields = fields.DetectionResultFields
label_id_offset = 1
boxes = postprocessed_tensors.get(detection_fields.detection_boxes)
scores = postprocessed_tensors.get(detection_fields.detection_scores)
multiclass_scores = postprocessed_tensors.get(detection_fields.detection_multiclass_scores)
raw_boxes = postprocessed_tensors.get(detection_fields.raw_detection_boxes)
raw_scores = postprocessed_tensors.get(detection_fields.raw_detection_scores)
classes = (postprocessed_tensors.get(detection_fields.detection_classes) + label_id_offset)
keypoints = postprocessed_tensors.get(detection_fields.detection_keypoints)
masks = postprocessed_tensors.get(detection_fields.detection_masks)
num_detections = postprocessed_tensors.get(detection_fields.num_detections)
outputs = {}
outputs[detection_fields.detection_boxes] = tf.identity(boxes, name=detection_fields.detection_boxes)
outputs[detection_fields.detection_scores] = tf.identity(scores, name=detection_fields.detection_scores)
if (multiclass_scores is not None):
outputs[detection_fields.detection_multiclass_scores] = tf.identity(multiclass_scores, name=detection_fields.detection_multiclass_scores)
outputs[detection_fields.detection_classes] = tf.identity(classes, name=detection_fields.detection_classes)
outputs[detection_fields.num_detections] = tf.identity(num_detections, name=detection_fields.num_detections)
if (raw_boxes is not None):
outputs[detection_fields.raw_detection_boxes] = tf.identity(raw_boxes, name=detection_fields.raw_detection_boxes)
if (raw_scores is not None):
outputs[detection_fields.raw_detection_scores] = tf.identity(raw_scores, name=detection_fields.raw_detection_scores)
if (keypoints is not None):
outputs[detection_fields.detection_keypoints] = tf.identity(keypoints, name=detection_fields.detection_keypoints)
if (masks is not None):
outputs[detection_fields.detection_masks] = tf.identity(masks, name=detection_fields.detection_masks)
for output_key in outputs:
tf.add_to_collection(output_collection_name, outputs[output_key])
return outputs |
class PickedData(HasTraits):
valid = Trait(false_bool_trait, desc='specifies the validity of the pick event')
point_id = Int((- 1), desc='the picked point ID')
cell_id = Int((- 1), desc='the picked cell ID')
world_pick = Trait(false_bool_trait, desc='specifies if the pick is a world pick.')
coordinate = Array('d', (3,), labels=['x', 'y', 'z'], cols=3, desc='the coordinate of the picked point')
text_actor = Instance(tvtk.TextActor)
renwin = Instance(TVTKScene)
data = Any |
class Components():
def __init__(self, ui):
self.page = ui.page
def dismiss(self, text='', icon=None, category='primary', width=(None, '%'), height=(None, 'px'), html_code=None, tooltip=None, profile=None, options=None):
btn = self.page.web.bs.button(text, icon, category, width, height, html_code, tooltip, profile, options)
btn.add_style(['btn-close'], clear_first=True)
btn.attr['data-bs-dismiss'] = 'toast'
btn.aria.label = 'Close'
return btn
def container(self, components=None, label=None, color=None, width=(100, '%'), icon=None, height=(None, 'px'), editable=False, align='left', padding=None, html_code=None, helper=None, options=None, profile=None, position=None):
component = self.page.web.std.div(components, label, color, width, icon, height, editable, align, padding, html_code, 'div', helper, options, profile, position)
component.add_style(['toast-container'], clear_first=True)
return component
def fixed(self, components=None, label=None, color=None, width=(100, '%'), icon=None, height=(None, 'px'), editable=False, align='left', padding=None, html_code=None, helper=None, options=None, profile=None, position=None):
component = self.page.web.std.div(components, label, color, width, icon, height, editable, align, padding, html_code, 'div', helper, options, profile, position)
component.add_style(['position-fixed', 'bottom-0', 'end-0', 'p-3'], clear_first=True)
return component
def custom(self, values=None, html_code=None, width=(100, '%'), height=(None, '%'), profile=None, options=None):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsToast(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
component.flex_container = True
component.attr['role'] = 'alert'
component.aria.live = 'assertive'
component.aria.atomic = 'true'
if (values is not None):
for v in values:
component.add_to_body(v)
return component
def button(self, toast, text='', icon=None, category='primary', width=(None, '%'), height=(None, 'px'), html_code=None, tooltip=None, profile=None, options=None):
btn = self.page.web.bs.button(text, icon, category, width, height, html_code, tooltip, profile, options)
btn.attr['data-bs-toggle'] = 'toast'
btn.attr['data-bs-target'] = ('#%s' % toast.htmlCode)
return btn |
def load_env(domain_name, task_name, seed, frame_stack, action_repeat):
env = suite.load(domain_name=domain_name, task_name=task_name, environment_kwargs={'flat_observation': True}, task_kwargs={'random': seed})
env = pixels.Wrapper(env, pixels_only=True, render_kwargs={'width': 84, 'height': 84, 'camera_id': 0})
env = wrappers.CanonicalSpecWrapper(env)
env = magi_wrappers.TakeKeyWrapper(env, 'pixels')
env = wrappers.ActionRepeatWrapper(env, action_repeat)
env = magi_wrappers.FrameStackingWrapper(env, num_frames=frame_stack)
env = wrappers.SinglePrecisionWrapper(env)
return env |
def test_migrating_simple_asset_3(migration_test_data, create_pymel, create_maya_env):
data = migration_test_data
migration_recipe = {data['asset2'].id: {'new_name': 'Asset 3A', 'new_code': 'asset3a', 'new_parent_id': data['assets_task2'].id}, data['asset2_model'].id: {'new_parent_id': data['asset2'].id, 'takes': {'Main': {'new_name': 'Main', 'versions': [data['asset2_model_main_v003'].version_number]}, 'Take1': {'new_name': 'Take1', 'versions': [data['asset2_model_take1_v003'].version_number]}}}}
assert (data['assets_task2'].children == [])
amt = AssetMigrationTool()
amt.migration_recipe = migration_recipe
amt.migrate()
assert (data['assets_task2'].children != [])
new_asset = data['assets_task2'].children[0]
assert isinstance(new_asset, Asset)
assert (new_asset.name == 'Asset 3A')
assert (new_asset.code == 'asset3a')
assert (new_asset.type == data['asset2'].type)
assert (new_asset.children != [])
model_task = new_asset.children[0]
assert isinstance(model_task, Task)
assert (model_task.name == 'Model')
assert (model_task.versions != [])
assert (len(model_task.versions) == 2)
assert (Version.query.filter((Version.task == model_task)).filter((Version.take_name == 'Main')).count() == 1)
assert (Version.query.filter((Version.task == model_task)).filter((Version.take_name == 'Take1')).count() == 1) |
class Container(containers.DeclarativeContainer):
service = providers.Singleton(Service, value='foo')
client_attribute = providers.Factory(Client, value=service.provided.value)
client_item = providers.Factory(Client, value=service.provided[0])
client_attribute_item = providers.Factory(Client, value=service.provided.values[0])
client_method_call = providers.Factory(Client, value=service.provided.get_value.call())
client_method_closure_call = providers.Factory(Client, value=service.provided.get_closure.call().call())
client_provided_call = providers.Factory(Client, value=service.provided.call()) |
def extractTranslationnoobBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Power and Wealth', 'Power and Wealth', 'translated'), ('Extraordinary Genius', 'Extraordinary Genius', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
chp_prefixes = [('Extraordinary Genius - Cp', 'Extraordinary Genius', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CircleWidget(QWidget):
def __init__(self, parent=None):
super(CircleWidget, self).__init__(parent)
self.nframe = 0
self.setBackgroundRole(QPalette.Base)
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
def minimumSizeHint(self):
return QSize(50, 50)
def sizeHint(self):
return QSize(180, 180)
def next(self):
self.nframe += 1
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing, True)
painter.translate((self.width() / 2), (self.height() / 2))
for diameter in range(0, 64, 9):
delta = abs(((self.nframe % 64) - (diameter / 2)))
alpha = ((255 - ((delta * delta) / 4)) - diameter)
if (alpha > 0):
painter.setPen(QPen(QColor(0, (diameter / 2), 127, alpha), 3))
painter.drawEllipse(QRectF(((- diameter) / 2.0), ((- diameter) / 2.0), diameter, diameter)) |
def test_optimize_transparency(fx_asset):
with Image(filename=str(fx_asset.joinpath('nocomments.gif'))) as img1:
with Image(img1) as img2:
try:
img2.optimize_transparency()
assert (img1.signature != img2.signature)
assert (img1.size == img2.size)
except AttributeError as e:
warnings.warn(('MagickOptimizeImageTransparency not present on system. ' + repr(e))) |
class OptionPlotoptionsSeriesSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Command(AbstractElasticsearchIndexer):
def create_controller(self, config: dict) -> AbstractElasticsearchIndexerController:
extra_conf = {'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension', 'spark.sql.catalog.spark_catalog': 'org.apache.spark.sql.delta.catalog.DeltaCatalog', 'spark.sql.legacy.parquet.datetimeRebaseModeInWrite': 'LEGACY', 'spark.sql.legacy.parquet.int96RebaseModeInWrite': 'LEGACY', 'spark.sql.jsonGenerator.ignoreNullFields': 'false'}
spark_created_by_command = False
spark = get_active_spark_session()
if (not spark):
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark)
return DeltaLakeElasticsearchIndexerController(config, spark, spark_created_by_command) |
class OptionSeriesItemSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class DbModelView(BaseModelView):
def get_pk_value(self, model):
return str(model.key())
def scaffold_list_columns(self):
return sorted([k for (k, v) in self.model.__dict__.iteritems() if isinstance(v, db.Property)])
def scaffold_sortable_columns(self):
return [k for (k, v) in self.model.__dict__.iteritems() if (isinstance(v, db.Property) and getattr(v, 'indexed', None))]
def init_search(self):
return None
def is_valid_filter(self):
pass
def scaffold_filters(self):
pass
def scaffold_form(self):
return wt_db.model_form(self.model())
def get_list(self, page, sort_field, sort_desc, search, filters):
q = self.model.all()
if sort_field:
if sort_desc:
sort_field = ('-' + sort_field)
q.order(sort_field)
results = q.fetch(self.page_size, offset=(page * self.page_size))
return (q.count(), results)
def get_one(self, encoded_key):
return db.get(db.Key(encoded=encoded_key))
def create_model(self, form):
try:
model = self.model()
form.populate_obj(model)
model.put()
return model
except Exception as ex:
if (not self.handle_view_exception(ex)):
logging.exception('Failed to create record.')
return False
def update_model(self, form, model):
try:
form.populate_obj(model)
model.put()
return True
except Exception as ex:
if (not self.handle_view_exception(ex)):
logging.exception('Failed to update record.')
return False
def delete_model(self, model):
try:
model.delete()
return True
except Exception as ex:
if (not self.handle_view_exception(ex)):
logging.exception('Failed to delete record.')
return False |
class cycle(_coconut_has_iter):
__slots__ = ('times',)
def __new__(cls, iterable, times=None):
self = _coconut.super(cycle, cls).__new__(cls, iterable)
if (times is None):
self.times = None
else:
self.times = _coconut.operator.index(times)
if (self.times < 0):
raise _coconut.ValueError('cycle: times cannot be negative')
return self
def __reduce__(self):
return (self.__class__, (self.iter, self.times))
def __copy__(self):
return self.__class__(self.get_new_iter(), self.times)
def __repr__(self):
return ('cycle(%s, %r)' % (_coconut.repr(self.iter), self.times))
def __iter__(self):
i = 0
while ((self.times is None) or (i < self.times)):
for x in self.get_new_iter():
(yield x)
i += 1
def __contains__(self, elem):
return (elem in self.iter)
def __getitem__(self, index):
if (not _coconut.isinstance(index, _coconut.slice)):
if ((self.times is not None) and ((index // _coconut.len(self.iter)) >= self.times)):
raise _coconut.IndexError('cycle index out of range')
return self.iter[(index % _coconut.len(self.iter))]
if (self.times is None):
return map(self.__getitem__, count()[index])
else:
return map(self.__getitem__, range(0, _coconut.len(self))[index])
def __len__(self):
if ((self.times is None) or (not _coconut.isinstance(self.iter, _coconut.abc.Sized))):
return _coconut.NotImplemented
return (_coconut.len(self.iter) * self.times)
def __reversed__(self):
if (self.times is None):
raise _coconut.TypeError((_coconut.repr(self) + ' object is not reversible'))
return self.__class__(reversed(self.get_new_iter()), self.times)
def count(self, elem):
return (self.iter.count(elem) * (float('inf') if (self.times is None) else self.times))
def index(self, elem):
if (elem not in self.iter):
raise _coconut.ValueError(((_coconut.repr(elem) + ' not in ') + _coconut.repr(self)))
return self.iter.index(elem) |
def traversal_paired_dependency() -> Traversal:
projects = Collection(name='Project', fields=[ScalarField(name='project_id'), ScalarField(name='organization_id'), ScalarField(name='org_leader_email', identity='email'), ScalarField(name='project_name')])
users = Collection(name='User', after={CollectionAddress('mysql', 'Project')}, fields=[ScalarField(name='project', references=[(FieldAddress('mysql', 'Project', 'project_id'), 'from')]), ScalarField(name='organization', references=[(FieldAddress('mysql', 'Project', 'organization_id'), 'from')]), ScalarField(name='username'), ScalarField(name='email', identity='email'), ScalarField(name='position')], grouped_inputs={'project', 'organization', 'email'})
mysql = GraphDataset(name='mysql', collections=[projects, users], connection_key='mysql')
graph = DatasetGraph(mysql)
identity = {'email': ''}
return Traversal(graph, identity) |
class Mode(StrEnum):
aim = auto()
ball_in_hand = auto()
calculate = auto()
call_shot = auto()
cam_load = auto()
cam_save = auto()
game_over = auto()
menu = auto()
pick_ball = auto()
purgatory = auto()
shot = auto()
stroke = auto()
view = auto()
none = auto() |
def accessible_role_based_events(view, view_args, view_kwargs, *args, **kwargs):
if (('POST' in request.method) or ('withRole' in request.args)):
verify_jwt_in_request()
user = current_user
if (('GET' in request.method) and user.is_staff):
return view(*view_args, **view_kwargs)
view_kwargs['user_id'] = user.id
return view(*view_args, **view_kwargs) |
_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats)
_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags, port_no, type_=None):
super(OFPPortStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.port_no) |
class CMYKFloydsterBill(Processor):
__slots__ = ('gcr', 'overprinter')
def __init__(self, gcr=20):
self.gcr = max(min(100, gcr), 0)
self.overprinter = pipeline.BandFork(Atkinson, mode='CMYK')
self.overprinter.update({'C': SlowFloydSteinberg()})
def process(self, image):
return pipeline.Pipe(gcr.BasicGCR(self.gcr), self.overprinter).process(image) |
def test_private_keys():
builder = AEABuilder()
builder.set_name('aea_1')
builder.add_private_key('fetchai')
builder.add_private_key('fetchai', is_connection=True)
assert builder._connection_private_key_paths
assert builder._private_key_paths
builder.remove_private_key('fetchai')
builder.remove_private_key('fetchai', is_connection=True)
assert (not builder._connection_private_key_paths)
assert (not builder._private_key_paths) |
class HasMargin(TraitType):
klass = Margin
default_value = Margin(0)
info_text = 'a Margin instance, or an integer in the range from -32 to 32 or a tuple with 1, 2 or 4 integers in that range that can be used to define one'
def validate(self, object, name, value):
if isinstance(value, int):
try:
value = self.klass(value)
except Exception:
self.error(object, name, value)
elif isinstance(value, tuple):
try:
value = self.klass(*value)
except Exception:
self.error(object, name, value)
if isinstance(value, self.klass):
return value
self.error(object, name, value)
def get_default_value(self):
dv = self.default_value
dvt = self.default_value_type
if (dvt < 0):
if isinstance(dv, int):
dv = self.klass(dv)
elif isinstance(dv, tuple):
dv = self.klass(*dv)
if (not isinstance(dv, self.klass)):
return super().get_default_value()
self.default_value_type = dvt = DefaultValue.callable_and_args
dv = (self.klass, (), dv.trait_get())
return (dvt, dv) |
def test_create_list_and_delete_pets(client: TestClient):
response = client.get('/api/v1/pet/1')
assert (response.status_code == 404)
assert (response.json() == {'detail': 'No pet found with ID 1'})
response = client.post('/api/v1/pet', json=SAMPLE_PET)
assert (response.status_code == 200)
assert (response.json() == {'id': 1, **SAMPLE_PET, 'device_hid': None})
response = client.get('/api/v1/pet/1')
assert (response.status_code == 200)
assert (response.json() == {'id': 1, **SAMPLE_PET, 'device_hid': None})
response = client.get('/api/v1/pet')
assert (response.status_code == 200)
assert (len(response.json()) == 1)
response = client.delete('/api/v1/pet/1')
assert (response.status_code == 200)
response = client.get('/api/v1/pet/1')
assert (response.status_code == 404)
assert (response.json() == {'detail': 'No pet found with ID 1'}) |
.usefixtures('use_tmpdir')
def test_report_with_failed_exit_message_argument(reporter):
msg = Exited(Job({'name': 'job1'}, 0), 1).with_error('massive_failure')
reporter.status_dict = reporter._init_job_status_dict(msg.timestamp, 0, [msg.job])
reporter.report(msg)
with open(STATUS_file, 'r', encoding='utf-8') as f:
assert ('EXIT: 1/massive_failure' in f.readline())
with open(ERROR_file, 'r', encoding='utf-8') as f:
content = ''.join(f.readlines())
assert ('<job>job1</job>' in content), 'ERROR file missing job'
assert ('<reason>massive_failure</reason>' in content), 'ERROR file missing reason'
assert ('stderr: Not redirected' in content), 'ERROR had invalid stderr information'
with open(STATUS_json, 'r', encoding='utf-8') as f:
content = ''.join(f.readlines())
assert ('"status": "Failure"' in content), 'status.json missing Failure status'
assert ('"error": "massive_failure"' in content), 'status.json missing error message'
assert (reporter.status_dict['jobs'][0]['end_time'] is not None) |
def _get_nested_field_names(hierarchy, root_field_names):
matching = set()
for name in root_field_names:
if (name == hierarchy):
matching.add('*')
elif hierarchy:
prefix = '{0}{1}'.format(hierarchy, EXPAND_DELIMITER)
if name.startswith(prefix):
matching.add(name[len(prefix):])
else:
matching.add(name)
return matching |
class OptionSeriesSankeySonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def draw_wstate_tree(svm):
import matplotlib.pyplot as plt
import networkx as nx
from networkx.drawing.nx_agraph import write_dot, graphviz_layout
G = nx.DiGraph()
pending_list = [svm.root_wstate]
while len(pending_list):
root = pending_list.pop()
for (trace, children) in root.trace_to_children.items():
for c in children:
G.add_edge(repr(root), repr(c), label=trace)
pending_list.append(c)
pos = graphviz_layout(G, prog='dot')
edge_labels = nx.get_edge_attributes(G, 'label')
nx.draw(G, pos)
nx.draw_networkx_edge_labels(G, pos, edge_labels, font_size=8)
nx.draw_networkx_labels(G, pos, font_size=10)
plt.show() |
class SlideText(QWidget):
ps_value_changed = pyqtSignal(float)
def __init__(self, wget, num_range=(0.0, 1.0), maxlen=100000, interval=10000, step=1000, decimals=2, default_value=0.0):
super().__init__(wget)
self._num_range = tuple([float(min(num_range)), float(max(num_range))])
self._maxlen = int(maxlen)
self._interval = int(interval)
self._step = int(step)
self._decimals = int(decimals)
self._value = float(default_value)
self._emitting = True
local_grid_layout = QGridLayout(self)
local_grid_layout.setContentsMargins(0, 0, 0, 0)
local_grid_layout.setHorizontalSpacing(2)
local_grid_layout.setVerticalSpacing(9)
self.lab_local = QLabel(self)
local_grid_layout.addWidget(self.lab_local, 0, 0, 1, 1)
self.dsp_local = NoWheelDoubleSpinBox(self)
self.dsp_local.setMinimum(self._num_range[0])
self.dsp_local.setMaximum(self._num_range[1])
self.dsp_local.setSingleStep((((self._step * 1.0) / self._maxlen) * (self._num_range[1] - self._num_range[0])))
self.dsp_local.setDecimals(self._decimals)
self.dsp_local.setValue(self._value)
self.dsp_local.setContextMenuPolicy(Qt.NoContextMenu)
self.dsp_local.setButtonSymbols(QDoubleSpinBox.NoButtons)
local_grid_layout.addWidget(self.dsp_local, 0, 1, 1, 1)
self.dsp_local.valueChanged.connect(self.value_changed_from_dsp)
spacer = QSpacerItem(5, 5, QSizePolicy.Expanding, QSizePolicy.Minimum)
local_grid_layout.addItem(spacer, 0, 2, 1, 1)
self.sdr_local = NoWheelSlider(self)
self.sdr_local.setMinimum(0)
self.sdr_local.setMaximum(self._maxlen)
self.sdr_local.setSingleStep(self._step)
self.sdr_local.setPageStep(0)
self.sdr_local.setOrientation(Qt.Horizontal)
self.sdr_local.setTickPosition(NoWheelSlider.TicksAbove)
self.sdr_local.setTickInterval(self._interval)
self.sdr_local.setValue(self.value_dsp_to_sdr(self._value))
local_grid_layout.addWidget(self.sdr_local, 1, 0, 1, 3)
self.sdr_local.valueChanged.connect(self.value_changed_from_sdr)
def norm_dsp_value(self, dsp_value):
norm_value = float(dsp_value)
norm_value = (self._num_range[0] if (norm_value < self._num_range[0]) else norm_value)
norm_value = (self._num_range[1] if (norm_value > self._num_range[1]) else norm_value)
return norm_value
def norm_sdr_value(self, sdr_value):
norm_value = int(sdr_value)
norm_value = (0 if (norm_value < 0) else norm_value)
norm_value = (self._maxlen if (norm_value > self._maxlen) else norm_value)
return norm_value
def value_dsp_to_sdr(self, dsp_value):
norm_value = self.norm_dsp_value(dsp_value)
return int((((norm_value - self._num_range[0]) / (self._num_range[1] - self._num_range[0])) * self._maxlen))
def value_sdr_to_dsp(self, sdr_value):
norm_value = self.norm_sdr_value(sdr_value)
return float((((norm_value / self._maxlen) * (self._num_range[1] - self._num_range[0])) + self._num_range[0]))
def set_disabled(self, state):
self.dsp_local.setDisabled(bool(state))
self.sdr_local.setDisabled(bool(state))
def set_value(self, value):
norm_value = self.norm_dsp_value(value)
self._emitting = False
self._value = norm_value
self.dsp_local.setValue(self._value)
self.sdr_local.setValue(self.value_dsp_to_sdr(self._value))
self._emitting = True
def get_value(self):
return self._value
def set_text(self, text):
self.lab_local.setText(text)
def set_num_range(self, num_range):
sdr_value = self.value_dsp_to_sdr(self._value)
self._emitting = False
self._num_range = tuple([float(min(num_range)), float(max(num_range))])
self.dsp_local.setMinimum(self._num_range[0])
self.dsp_local.setMaximum(self._num_range[1])
self.dsp_local.setSingleStep((((self._step * 1.0) / self._maxlen) * (self._num_range[1] - self._num_range[0])))
self._value = self.value_sdr_to_dsp(sdr_value)
self.dsp_local.setValue(self._value)
self._emitting = True
def get_num_range(self):
return self._num_range
def value_changed_from_dsp(self, value):
if (value != self._value):
self._value = self.norm_dsp_value(value)
self.sdr_local.setValue(self.value_dsp_to_sdr(self._value))
if self._emitting:
self.ps_value_changed.emit(self._value)
def value_changed_from_sdr(self, value):
if (value != self.value_dsp_to_sdr(self._value)):
self._value = self.value_sdr_to_dsp(value)
self.dsp_local.setValue(self._value)
if self._emitting:
self.ps_value_changed.emit(self._value) |
class IAMClient(object):
USER_MANAGED = 'USER_MANAGED'
SYSTEM_MANAGED = 'SYSTEM_MANAGED'
KEY_TYPES = frozenset([USER_MANAGED, SYSTEM_MANAGED])
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = IamRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_curated_roles(self, parent=None):
try:
paged_results = self.repository.roles.list(parent=parent, view='FULL')
flattened_results = api_helpers.flatten_list_results(paged_results, 'roles')
LOGGER.debug('Getting information about organization roles, parent = %s, flattened_results = %s', parent, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('project_roles', e, 'parent', parent)
LOGGER.exception(api_exception)
raise api_exception
def get_organization_roles(self, org_id):
name = self.repository.organizations_roles.get_name(org_id)
try:
paged_results = self.repository.organizations_roles.list(name, view='FULL')
flattened_results = api_helpers.flatten_list_results(paged_results, 'roles')
LOGGER.debug('Getting information about custom organization roles, org_id = %s, flattened_results = %s', org_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('organizations_roles', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_project_roles(self, project_id):
name = self.repository.projects_roles.get_name(project_id)
try:
paged_results = self.repository.projects_roles.list(name, view='FULL')
flattened_results = api_helpers.flatten_list_results(paged_results, 'roles')
LOGGER.debug('Getting the information about custom project roles, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('projects_roles', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_service_accounts(self, project_id):
name = self.repository.projects_serviceaccounts.get_name(project_id)
try:
paged_results = self.repository.projects_serviceaccounts.list(name)
flattened_results = api_helpers.flatten_list_results(paged_results, 'accounts')
LOGGER.debug('Getting service accounts associated with a project, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('serviceAccounts', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_service_account_iam_policy(self, name):
try:
results = self.repository.projects_serviceaccounts.get_iam_policy(name)
LOGGER.debug('Getting the IAM Policy associated with the service account, name = %s, results = %s', name, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('serviceAccountIamPolicy', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_service_account_keys(self, name, key_type=None):
def _service_account_not_found(error):
sa_not_found_pattern = '^Service account .*? does not exist.$'
if isinstance(error, errors.HttpError):
if ((str(error.resp.status) == '404') and error.resp.get('content-type', '').startswith('application/json')):
error_resp = json.loads(error.content.decode('utf-8'))
error_details = error_resp.get('error', {})
error_message = error_details.get('message', '')
LOGGER.debug(error_message)
if re.match(sa_not_found_pattern, error_message):
return True
return False
try:
kwargs = {}
if key_type:
if (key_type not in self.KEY_TYPES):
raise ValueError(('Key type %s is not a valid key type.' % key_type))
kwargs['keyTypes'] = key_type
results = self.repository.projects_serviceaccounts_keys.list(name, **kwargs)
flattened_results = api_helpers.flatten_list_results(results, 'keys')
LOGGER.debug('Getting the keys associated with the given service account, name = %s, key_type = %s, flattened_results = %s', name, key_type, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
if _service_account_not_found(e):
LOGGER.debug("Service account %s doesn't exist", name)
return []
api_exception = api_errors.ApiExecutionError('serviceAccountKeys', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception |
def enable_schedule(fledge_url, sch_name):
conn =
conn.request('PUT', '/fledge/schedule/enable', json.dumps({'schedule_name': sch_name}))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert ('scheduleId' in jdoc)
return jdoc |
def test_delete_but_fo_is_in_fw(admin_db, common_db, backend_db):
(fo, fw) = create_fw_with_child_fo()
fw2 = create_test_firmware()
fw2.uid = 'fw2_uid'
fo.parents.append(fw2.uid)
fo.parent_firmware_uids.add(fw2.uid)
fo.virtual_file_path.update({fw2.uid: [f'|{fw2.uid}|/some/path']})
backend_db.insert_multiple_objects(fw, fw2, fo)
(removed_vps, deleted_files) = admin_db.delete_firmware(fw.uid)
assert (removed_vps == 1)
assert (deleted_files == 1)
assert (fo.uid not in admin_db.intercom.deleted_files)
fo_entry = common_db.get_object(fo.uid)
assert (fw.uid not in fo_entry.virtual_file_path)
assert (fw2.uid in fo_entry.virtual_file_path)
assert (fw.uid in admin_db.intercom.deleted_files)
assert (common_db.exists(fw.uid) is False)
assert (common_db.exists(fo.uid) is True), 'should have been spared by cascade delete because it is in another FW'
fo_db = common_db.get_object(fo.uid)
assert (fo_db.virtual_file_path == {'fw2_uid': ['|fw2_uid|/some/path']}), 'entry of fw should be deleted from vfp' |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
user32 = 'C:\\Windows\\System32\\user32.dll'
dll = 'C:\\Users\\Public\\wow64log.dll'
ps1 = 'C:\\Users\\Public\\Invoke-ImageLoad.ps1'
rcedit = 'C:\\Users\\Public\\rcedit.exe'
common.copy_file(user32, dll)
common.copy_file(PS1_FILE, ps1)
common.copy_file(RENAMER, rcedit)
common.log('Modifying the OriginalFileName attribute to invalidate the signature')
common.execute([rcedit, dll, '--set-version-string', 'OriginalFilename', 'wow64log.dll'])
common.log('Loading wow64log.dll and spawning a high integrity process')
common.execute([powershell, '-c', f'Import-Module {ps1}; Invoke-ImageLoad {dll}; powershell'], timeout=10)
common.remove_files(dll, ps1, rcedit) |
.parametrize('w3_accounts, w3_coinbase, method, from_field_added, from_field_value', ((SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'eth_call', True, SAMPLE_ADDRESS), (SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'eth_estimateGas', True, SAMPLE_ADDRESS), (SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'eth_sendTransaction', True, SAMPLE_ADDRESS), (SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'eth_gasPrice', False, None), (SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'eth_blockNumber', False, None), (SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, 'meow', False, None), (SAMPLE_ADDRESS_LIST, None, 'eth_call', True, SAMPLE_ADDRESS_LIST[0]), (SAMPLE_ADDRESS_LIST, None, 'eth_estimateGas', True, SAMPLE_ADDRESS_LIST[0]), (SAMPLE_ADDRESS_LIST, None, 'eth_sendTransaction', True, SAMPLE_ADDRESS_LIST[0]), (SAMPLE_ADDRESS_LIST, None, 'eth_gasPrice', False, None), (SAMPLE_ADDRESS_LIST, None, 'eth_blockNumber', False, None), (SAMPLE_ADDRESS_LIST, None, 'meow', False, None), (None, SAMPLE_ADDRESS, 'eth_call', True, SAMPLE_ADDRESS), (None, SAMPLE_ADDRESS, 'eth_estimateGas', True, SAMPLE_ADDRESS), (None, SAMPLE_ADDRESS, 'eth_sendTransaction', True, SAMPLE_ADDRESS), (None, SAMPLE_ADDRESS, 'eth_gasPrice', False, SAMPLE_ADDRESS), (None, SAMPLE_ADDRESS, 'eth_blockNumber', False, SAMPLE_ADDRESS), (None, SAMPLE_ADDRESS, 'meow', False, SAMPLE_ADDRESS), (None, None, 'eth_call', True, None), (None, None, 'eth_estimateGas', True, None), (None, None, 'eth_sendTransaction', True, None), (None, None, 'eth_gasPrice', False, None), (None, None, 'eth_blockNumber', False, None), (None, None, 'meow', False, None)))
def test_default_transaction_fields_middleware(w3_accounts, w3_coinbase, method, from_field_added, from_field_value):
def mock_request(_method, params):
return params
mock_w3 = Mock()
mock_w3.eth.accounts = w3_accounts
mock_w3.eth.coinbase = w3_coinbase
middleware = default_transaction_fields_middleware(mock_request, mock_w3)
base_params = {'chainId': 5}
filled_transaction = middleware(method, [base_params])
filled_params = filled_transaction[0]
assert (('from' in filled_params.keys()) == from_field_added)
if ('from' in filled_params.keys()):
assert (filled_params['from'] == from_field_value)
filled_transaction[0].pop('from', None)
assert (filled_transaction[0] == base_params) |
def DoStageMem(block_cursor, buf_name, w_exprs, new_name, use_accum_zero=False):
proc = block_cursor.get_root()
new_name = Sym(new_name)
(buf_name, buf_typ, mem) = _DoStageMem_FindBufData(proc, buf_name, block_cursor[0]._node).result()
buf_typ = (buf_typ if (not isinstance(buf_typ, T.Window)) else buf_typ.as_tensor)
if (len(w_exprs) != len(buf_typ.shape())):
raise SchedulingError(f"expected windowing of '{buf_name}' to have {len(buf_typ.shape())} indices, but only got {len(w_exprs)}")
shape = [LoopIR.BinOp('-', w[1], w[0], T.index, w[0].srcinfo) for w in w_exprs if isinstance(w, tuple)]
if all((isinstance(w, LoopIR.expr) for w in w_exprs)):
new_typ = buf_typ.basetype()
else:
new_typ = T.Tensor(shape, False, buf_typ.basetype())
def rewrite_idx(idx):
assert (len(idx) == len(w_exprs))
return [LoopIR.BinOp('-', i, w[0], T.index, i.srcinfo) for (i, w) in zip(idx, w_exprs) if isinstance(w, tuple)]
def rewrite_win(w_idx):
assert (len(w_idx) == len(w_exprs))
def off_w(w, off):
if isinstance(w, LoopIR.Interval):
lo = LoopIR.BinOp('-', w.lo, off, T.index, w.srcinfo)
hi = LoopIR.BinOp('-', w.hi, off, T.index, w.srcinfo)
return LoopIR.Interval(lo, hi, w.srcinfo)
else:
assert isinstance(w, LoopIR.Point)
pt = LoopIR.BinOp('-', w.pt, off, T.index, w.srcinfo)
return LoopIR.Point(pt, w.srcinfo)
return [off_w(w_i, w_e[0]) for (w_i, w_e) in zip(w_idx, w_exprs)]
ir = block_cursor.get_root()
block = [s._node for s in block_cursor]
if use_accum_zero:
n_dims = len(buf_typ.shape())
Check_BufferReduceOnly(ir, block, buf_name, n_dims)
n_dims = len(buf_typ.shape())
basetyp = (new_typ.basetype() if isinstance(new_typ, T.Tensor) else new_typ)
srcinfo = block[0].srcinfo
new_alloc = [LoopIR.Alloc(new_name, new_typ, mem, None, srcinfo)]
(ir, fwd) = block_cursor[0].before()._insert(new_alloc)
def get_inner_stmt(loop_nest_c):
node = loop_nest_c._node
if (not isinstance(node, LoopIR.For)):
return loop_nest_c
return get_inner_stmt(loop_nest_c.body()[0])
def insert_safety_guards(ir, fwd, ctxt_stmt_c, access, buf_typ):
def check_cond(cond):
ctxt_stmt = ctxt_stmt_c._node
true_node = LoopIR.Const(True, T.bool, ctxt_stmt.srcinfo)
try:
Check_ExprEqvInContext(ir, cond, [ctxt_stmt], true_node)
return True
except SchedulingError:
return False
const_0 = LoopIR.Const(0, T.int, access.srcinfo)
conds = []
for i in zip(access.idx, buf_typ.shape()):
lower_bound_cond = LoopIR.BinOp('<=', const_0, i[0], T.bool, access.srcinfo)
if (not check_cond(lower_bound_cond)):
conds.append(lower_bound_cond)
upper_bound_cond = LoopIR.BinOp('<', i[0], i[1], T.bool, access.srcinfo)
if (not check_cond(upper_bound_cond)):
conds.append(upper_bound_cond)
if (len(conds) == 0):
return (ir, fwd)
cond = conds[0]
for c in conds[1:]:
cond = LoopIR.BinOp('and', cond, c, T.bool, cond.srcinfo)
def guard_wrapper(body):
return LoopIR.If(cond, body, [], None, srcinfo)
(ir, fwd_wrap) = ctxt_stmt_c.parent().body()._wrap(guard_wrapper, 'body')
fwd = _compose(fwd_wrap, fwd)
return (ir, fwd)
(isR, isW) = Check_BufferRW(ir, block, buf_name, n_dims)
if isR:
load_iter = [Sym(f'i{i}') for (i, _) in enumerate(shape)]
load_widx = [LoopIR.Read(s, [], T.index, srcinfo) for s in load_iter]
if use_accum_zero:
load_rhs = LoopIR.Const(0.0, basetyp, srcinfo)
else:
cp_load_widx = load_widx.copy()
load_ridx = []
for w in w_exprs:
if isinstance(w, tuple):
load_ridx.append(LoopIR.BinOp('+', cp_load_widx.pop(0), w[0], T.index, srcinfo))
else:
load_ridx.append(w)
load_rhs = LoopIR.Read(buf_name, load_ridx, basetyp, srcinfo)
load_nest = [LoopIR.Assign(new_name, basetyp, None, load_widx, load_rhs, None, srcinfo)]
for (i, n) in reversed(list(zip(load_iter, shape))):
loop = LoopIR.For(i, LoopIR.Const(0, T.index, srcinfo), n, load_nest, LoopIR.Seq(), None, srcinfo)
load_nest = [loop]
(ir, fwd_ins) = fwd(block_cursor[0]).before()._insert(load_nest)
fwd = _compose(fwd_ins, fwd)
if (not use_accum_zero):
load_nest_c = fwd(block_cursor[0]).prev()
(ir, fwd) = insert_safety_guards(ir, fwd, get_inner_stmt(load_nest_c), load_rhs, buf_typ)
if isW:
store_iter = [Sym(f'i{i}') for (i, _) in enumerate(shape)]
store_ridx = [LoopIR.Read(s, [], T.index, srcinfo) for s in store_iter]
cp_store_ridx = store_ridx.copy()
store_widx = []
for w in w_exprs:
if isinstance(w, tuple):
store_widx.append(LoopIR.BinOp('+', cp_store_ridx.pop(0), w[0], T.index, srcinfo))
else:
store_widx.append(w)
store_rhs = LoopIR.Read(new_name, store_ridx, basetyp, srcinfo)
store_stmt = (LoopIR.Reduce if use_accum_zero else LoopIR.Assign)
store_nest = [store_stmt(buf_name, basetyp, None, store_widx, store_rhs, None, srcinfo)]
for (i, n) in reversed(list(zip(store_iter, shape))):
loop = LoopIR.For(i, LoopIR.Const(0, T.index, srcinfo), n, store_nest, LoopIR.Seq(), None, srcinfo)
store_nest = [loop]
(ir, fwd_ins) = fwd(block_cursor[(- 1)]).after()._insert(store_nest)
fwd = _compose(fwd_ins, fwd)
store_nest_c = fwd(block_cursor[(- 1)]).next()
store_stmt_c = get_inner_stmt(store_nest_c)
(ir, fwd) = insert_safety_guards(ir, fwd, store_stmt_c, store_stmt_c._node, buf_typ)
def mk_read(c):
rd = c._node
if isinstance(rd, LoopIR.Read):
return {'name': new_name, 'idx': rewrite_idx(rd.idx), 'type': rd.type}
elif isinstance(rd, LoopIR.WindowExpr):
w_idx = rewrite_win(rd.idx)
return {'name': new_name, 'idx': w_idx, 'type': T.Window(new_typ, rd.type.as_tensor, new_name, w_idx)}
def mk_write(c):
s = c._node
return {'name': new_name, 'idx': rewrite_idx(s.idx)}
for c in block_cursor:
(ir, fwd) = _replace_reads(ir, fwd, c, buf_name, mk_read)
(ir, fwd) = _replace_writes(ir, fwd, c, buf_name, mk_write)
new_block_c = fwd(block_cursor[0]).as_block().expand(0, (len(block_cursor) - 1))
if isR:
new_block_c = new_block_c.expand(1, 0)
if isW:
new_block_c = new_block_c.expand(0, 1)
alloc_c = new_block_c[0].prev()
Check_Bounds(ir, alloc_c._node, [c._node for c in new_block_c])
return (ir, fwd) |
class OptionChartOptions3dFrameRight(Options):
def color(self):
return self._config_get('transparent')
def color(self, text: str):
self._config(text, js_type=False)
def size(self):
return self._config_get(1)
def size(self, num: float):
self._config(num, js_type=False)
def visible(self):
return self._config_get('Default')
def visible(self, flag: str):
self._config(flag, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.