code stringlengths 281 23.7M |
|---|
.skipif(mypy.__file__.endswith('.py'), reason='Non-compiled mypy is too slow')
.parametrize('config_filename,python_filename,output_filename', cases)
def test_mypy_results(config_filename, python_filename, output_filename, tmpdir, monkeypatch):
from mypy import api as mypy_api
os.chdir(tmpdir)
root_dir = Path(__file__).parent
thinc_root_dir = Path(__file__).parent.parent.parent.parent
if ('--pyargs' not in sys.argv):
monkeypatch.setenv('MYPYPATH', str(thinc_root_dir))
tmpdir_path = Path(tmpdir)
full_config_path: Path = (root_dir / f'configs/{config_filename}')
full_module_path: Path = (root_dir / f'modules/{python_filename}')
full_output_path: Path = (root_dir / f'outputs/{output_filename}')
full_tmp_config_path: Path = (tmpdir_path / config_filename)
full_tmp_module_path: Path = (tmpdir_path / python_filename)
shutil.copy(str(full_config_path), tmpdir)
shutil.copy(str(full_module_path), tmpdir)
expected_out = ''
expected_err = ''
expected_returncode = 1
expected_out = full_output_path.read_text()
cache_dir = (tmpdir_path / f'.mypy_cache/test-{config_filename[:(- 4)]}')
command = [str(full_tmp_module_path), '--config-file', str(full_tmp_config_path), '--cache-dir', str(cache_dir), '--show-error-codes']
print(f'''
Executing: mypy {' '.join(command)}''')
actual_result = mypy_api.run(command)
(actual_out, actual_err, actual_returncode) = actual_result
actual_out = '\n'.join(['.py:'.join(line.split('.py:')[1:]) for line in actual_out.split('\n') if line]).strip()
actual_out = re.sub('\\n\\s*\\n', '\\n', actual_out)
if (GENERATE and (output_filename is not None)):
full_output_path.write_text(actual_out)
else:
assert (actual_out.strip() == expected_out.strip()), actual_out
assert (actual_err == expected_err)
assert (actual_returncode == expected_returncode) |
.repeat(5)
def test_signal_workflow():
factory = WorkerFactory('localhost', 7933, DOMAIN)
worker = factory.new_worker(TASK_LIST)
worker.register_workflow_implementation_type(TestSignalWorkflowImpl)
factory.start()
client = WorkflowClient.new_client(domain=DOMAIN)
workflow: TestSignalWorkflow = client.new_workflow_stub(TestSignalWorkflow)
execution = WorkflowClient.start(workflow.get_greetings)
sleep(randint(0, 20))
workflow.wait_for_name('Bob')
sleep(randint(0, 20))
workflow.exit()
sleep(randint(0, 20))
result = client.wait_for_close(execution)
worker.stop()
assert (result == ['Hello Bob!']) |
class TrainOptions(BaseOptions):
def initialize(self, parser):
BaseOptions.initialize(self, parser)
parser.add_argument('--display_freq', type=int, default=1000, help='frequency of showing training results on screen')
parser.add_argument('--print_freq', type=int, default=1000, help='frequency of showing training results on console')
parser.add_argument('--save_latest_freq', type=int, default=2000, help='frequency of saving the latest results')
parser.add_argument('--save_epoch_freq', type=int, default=10, help='frequency of saving checkpoints at the end of epochs')
parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model')
parser.add_argument('--which_epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model')
parser.add_argument('--niter', type=int, default=100, help='# of iter at starting learning rate. This is NOT the total #epochs. Totla #epochs is niter + niter_decay')
parser.add_argument('--niter_decay', type=int, default=100, help='# of iter to linearly decay learning rate to zero')
parser.add_argument('--optimizer', type=str, default='adam')
parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam')
parser.add_argument('--beta2', type=float, default=0.999, help='momentum term of adam')
parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam')
parser.add_argument('--D_steps_per_G', type=int, default=1, help='number of discriminator iterations per generator iterations.')
parser.add_argument('--mcl', action='store_true', help='Use marginal contrastive learning')
parser.add_argument('--nce_w', type=float, default=0.01, help='weight for marginal contrastive loss')
parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer')
parser.add_argument('--lambda_feat', type=float, default=10.0, help='weight for feature matching loss')
parser.add_argument('--lambda_vgg', type=float, default=10.0, help='weight for vgg loss')
parser.add_argument('--no_ganFeat_loss', action='store_true', help='if specified, do *not* use discriminator feature matching loss')
parser.add_argument('--gan_mode', type=str, default='hinge', help='(ls|original|hinge)')
parser.add_argument('--netD', type=str, default='multiscale', help='(n_layers|multiscale|image)')
parser.add_argument('--no_TTUR', action='store_true', help='Use TTUR training scheme')
parser.add_argument('--which_perceptual', type=str, default='5_2', help='relu5_2 or relu4_2')
parser.add_argument('--weight_perceptual', type=float, default=0.01)
parser.add_argument('--weight_mask', type=float, default=0.0, help='weight of warped mask loss, used in direct/cycle')
parser.add_argument('--real_reference_probability', type=float, default=0.7, help='self-supervised training probability')
parser.add_argument('--hard_reference_probability', type=float, default=0.2, help='hard reference training probability')
parser.add_argument('--weight_gan', type=float, default=10.0, help='weight of all loss in stage1')
parser.add_argument('--novgg_featpair', type=float, default=10.0, help='in no vgg setting, use pair feat loss in domain adaptation')
parser.add_argument('--D_cam', type=float, default=0.0, help='weight of CAM loss in D')
parser.add_argument('--warp_self_w', type=float, default=0.0, help='push warp self to ref')
parser.add_argument('--fm_ratio', type=float, default=0.1, help='vgg fm loss weight comp with ctx loss')
parser.add_argument('--use_22ctx', action='store_true', help='if true, also use 2-2 in ctx loss')
parser.add_argument('--ctx_w', type=float, default=1.0, help='ctx loss weight')
parser.add_argument('--mask_epoch', type=int, default=(- 1), help='useful when noise_for_mask is true, first train mask_epoch with mask, the rest epoch with noise')
self.isTrain = True
return parser |
def test_owly_expand_method():
params = urlencode({'apiKey': 'TEST_KEY', 'shortUrl': shorten})
body = json.dumps({'results': {'longUrl': expanded}})
mock_url = f'{owly.api_url}expand?{params}'
responses.add(responses.GET, mock_url, body=body, match_querystring=True)
expanded_result = owly.expand(shorten)
assert (expanded_result == expanded) |
class TxData(NamedTuple):
height: Optional[int] = None
position: Optional[int] = None
fee: Optional[int] = None
date_added: Optional[int] = None
date_updated: Optional[int] = None
def __repr__(self):
return f'TxData(height={self.height},position={self.position},fee={self.fee},date_added={self.date_added},date_updated={self.date_updated})'
def __eq__(self, other: object) -> bool:
if (not isinstance(other, TxData)):
return NotImplemented
return ((self.height == other.height) and (self.position == other.position) and (self.fee == other.fee)) |
def extractBlopkunnovelcubWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _create_post_response(post_result):
if (request.headers.get('X-Requested-With') == 'XMLHttpRequest'):
return jsonify({'boardName': post_result.board_name, 'threadRefno': post_result.thread_refno, 'postRefno': post_result.post_refno})
else:
return redirect(url_for_post(post_result.board_name, post_result.thread_refno, post_result.post_refno)) |
def amazon_data_extraction_formatter(responses: List[dict]) -> DataExtractionDataClass:
blocks = _convert_response_to_blocks_with_id(responses)
items: Sequence[ItemDataExtraction] = []
for (_, block) in blocks.items():
if (block['BlockType'] != 'KEY_VALUE_SET'):
continue
if (block['EntityTypes'] != ['KEY']):
continue
if (len(block.get('Relationships', [])) < 2):
continue
item = {}
try:
for relation in block['Relationships']:
if (relation['Type'] == 'CHILD'):
item['key'] = blocks[relation['Ids'][0]]['Text']
elif (relation['Type'] == 'VALUE'):
value_id = relation['Ids'][0]
child = blocks[blocks[value_id]['Relationships'][0]['Ids'][0]]
item['value'] = child['Text']
item['bounding_box'] = BBox.from_json(child['Geometry']['BoundingBox'], modifiers=(lambda x: x.title()))
item['confidence_score'] = (child['Confidence'] / 100)
items.append(ItemDataExtraction(**item))
except KeyError:
continue
return DataExtractionDataClass(fields=items) |
class ExtractedTopic(BaseModel):
category: str
importance: float
_validator('category', mode='before')
def valid_category(cls, value):
if (not isinstance(value, str)):
raise TypeError(f'Category must be a string, not {type(value)}')
value = value.title()
return value
_validator('importance', mode='before')
def valid_confidence(cls, value):
if (not isinstance(value, (float, int))):
raise TypeError(f'Importance must be a float, not {type(value)}')
if ((value < 0) or (value > 1)):
raise ValueError(f'{value} is not allowed. Importance must be between 0 and 1')
return round(value, 2) |
def init_filters(app):
_filter('currency_symbol')
def currency_symbol_filter(currency_code):
symbol = CurrencyCodes().get_symbol(currency_code)
return (symbol if symbol else currency_code)
_filter('money')
def money_filter(amount, email, currency):
return convert_to_user_locale(email, amount=amount, currency=currency)
_filter('datetime')
def simple_datetime_display(date, timezone=None, format='%B %d, %Y %H:%M (%Z%z)'):
if (not date):
return ''
if timezone:
date = date.replace(tzinfo=pytz.timezone('UTC')).astimezone(pytz.timezone(timezone))
return date.strftime(format)
_filter('date')
def simple_date_display(date, timezone=None):
return simple_datetime_display(date, timezone, 'MMMM d, yyyy')
_filter('humanize')
def humanize_filter(time):
return humanize_helper(time)
_filter('nl2br')
def nl2br(text):
if (not text):
return text
return text.replace('\n', '<br/>')
_filter('strip_tags')
def strip(text):
if (not text):
return text
return strip_tags(text) |
def get_vc_model_and_transform(model_name, device='cpu', use_compression_layer=False):
if (model_name not in MODEL_NAMES):
raise NameError('Invalid vc model name')
cfg_path = os.path.join(vc_models_abs_path, 'conf', 'model', f'{model_name}.yaml')
main_model_cfg = omegaconf.OmegaConf.load(cfg_path)
if use_compression_layer:
if ('model' in main_model_cfg.model):
model_cfg = main_model_cfg.model.model
else:
model_cfg = main_model_cfg.model
model_cfg.global_pool = (not use_compression_layer)
model_cfg.use_cls = (not use_compression_layer)
(model, embedding_dim, transform, metadata) = hydra.utils.call(main_model_cfg)
return (model, transform, embedding_dim) |
def test_arg_range2():
def foo(N: size, K: size):
assert (N >= 50)
assert (K > 20)
pass
assert (arg_range_analysis(foo._loopir_proc, foo._loopir_proc.args[0], fast=False) == (50, None))
assert (arg_range_analysis(foo._loopir_proc, foo._loopir_proc.args[1], fast=False) == (21, None))
assert (arg_range_analysis(foo._loopir_proc, foo._loopir_proc.args[0], fast=True) == (1, None)) |
class Timeout():
def __init__(self, timeout: typing.Union[(TimeoutTypes, UnsetType)]=UNSET, *, connect: typing.Union[(None, float, UnsetType)]=UNSET, read: typing.Union[(None, float, UnsetType)]=UNSET, write: typing.Union[(None, float, UnsetType)]=UNSET, pool: typing.Union[(None, float, UnsetType)]=UNSET) -> None:
if isinstance(timeout, Timeout):
assert (connect is UNSET)
assert (read is UNSET)
assert (write is UNSET)
assert (pool is UNSET)
self.connect = timeout.connect
self.read = timeout.read
self.write = timeout.write
self.pool = timeout.pool
elif isinstance(timeout, tuple):
self.connect = timeout[0]
self.read = timeout[1]
self.write = (None if (len(timeout) < 3) else timeout[2])
self.pool = (None if (len(timeout) < 4) else timeout[3])
elif (not (isinstance(connect, UnsetType) or isinstance(read, UnsetType) or isinstance(write, UnsetType) or isinstance(pool, UnsetType))):
self.connect = connect
self.read = read
self.write = write
self.pool = pool
else:
if isinstance(timeout, UnsetType):
raise ValueError(' must either include a default, or set all four parameters explicitly.')
self.connect = (timeout if isinstance(connect, UnsetType) else connect)
self.read = (timeout if isinstance(read, UnsetType) else read)
self.write = (timeout if isinstance(write, UnsetType) else write)
self.pool = (timeout if isinstance(pool, UnsetType) else pool)
def as_dict(self) -> typing.Dict[(str, typing.Optional[float])]:
return {'connect': self.connect, 'read': self.read, 'write': self.write, 'pool': self.pool}
def __eq__(self, other: typing.Any) -> bool:
return (isinstance(other, self.__class__) and (self.connect == other.connect) and (self.read == other.read) and (self.write == other.write) and (self.pool == other.pool))
def __repr__(self) -> str:
class_name = self.__class__.__name__
if (len({self.connect, self.read, self.write, self.pool}) == 1):
return f'{class_name}(timeout={self.connect})'
return f'{class_name}(connect={self.connect}, read={self.read}, write={self.write}, pool={self.pool})' |
class Statusbar(GObject.Object):
__gsignals__ = {'display-status': (GObject.SIGNAL_RUN_LAST, None, (object,))}
custom_statusbar_enabled = GObject.property(type=bool, default=False)
def __init__(self, source):
super(Statusbar, self).__init__()
self.status = ''
self._source_statusbar = SourceStatusBar(source)
self._custom_statusbar = CustomStatusBar(source.status_label)
self.current_statusbar = self._source_statusbar
self._connect_signals(source)
self._connect_properties()
def _connect_properties(self):
gs = GSetting()
settings = gs.get_setting(gs.Path.PLUGIN)
settings.bind(gs.PluginKey.CUSTOM_STATUSBAR, self, 'custom_statusbar_enabled', Gio.SettingsBindFlags.GET)
def _connect_signals(self, source):
self.connect('notify::custom-statusbar-enabled', self._custom_statusbar_enabled_changed)
self.connect('display-status', self._update)
def _custom_statusbar_enabled_changed(self, *args):
self.current_statusbar.hide()
if self.custom_statusbar_enabled:
self.current_statusbar = self._custom_statusbar
else:
self.current_statusbar = self._source_statusbar
self.current_statusbar.show()
self.current_statusbar.update(self.status)
def _generate_status(self, albums=None):
self.status = ''
if albums:
track_count = 0
duration = 0
for album in albums:
track_count += album.track_count
duration += (album.duration / 60)
if (len(albums) == 1):
self.status = rb3compat.unicodedecode((_('%s by %s') % (album.name, album.artist)), 'UTF-8')
else:
self.status = rb3compat.unicodedecode((_('%d selected albums') % len(albums)), 'UTF-8')
if (track_count == 1):
self.status += rb3compat.unicodedecode(_(' with 1 track'), 'UTF-8')
else:
self.status += rb3compat.unicodedecode((_(' with %d tracks') % track_count), 'UTF-8')
if (duration == 1):
self.status += rb3compat.unicodedecode(_(' and a duration of 1 minute'), 'UTF-8')
else:
self.status += rb3compat.unicodedecode((_(' and a duration of %d minutes') % duration), 'UTF-8')
def _update(self, widget, current_view):
albums = current_view.get_selected_objects()
self._generate_status(albums)
self.current_statusbar.update(self.status) |
_required
def is_track_organizer(view, view_args, view_kwargs, *args, **kwargs):
user = current_user
event_id = kwargs['event_id']
if user.is_staff:
return view(*view_args, **view_kwargs)
if (user.is_track_organizer(event_id) or user.has_event_access(event_id)):
return view(*view_args, **view_kwargs)
raise ForbiddenError({'source': ''}, 'Track Organizer access is Required.') |
_metrics.timeit
def __sic_range(key=None, context=None, randomstate=None, df=None):
if (randomstate is None):
randomstate = np.random
if (not context.has_generator(key)):
generator = SicRangeGenerator(randomstate)
context.add_generator(key, generator)
generator = context.get_generator(key)
return generator.make() |
class DUT(Module):
def __init__(self, dw=8):
self.dw = dw
self.submodules.phy_model = phy.PHY(8, debug=False)
self.submodules.mac_model = mac.MAC(self.phy_model, debug=False, loopback=False)
self.submodules.arp_model = arp.ARP(self.mac_model, mac_address, ip_address, debug=False)
self.submodules.ip_model = ip.IP(self.mac_model, mac_address, ip_address, debug=False, loopback=False)
self.submodules.udp_model = udp.UDP(self.ip_model, ip_address, debug=False, loopback=True)
self.submodules.core = LiteEthUDPIPCore(self.phy_model, mac_address, ip_address, 100000)
udp_port = self.core.udp.crossbar.get_port(22136, dw)
self.submodules.streamer = PacketStreamer(eth_udp_user_description(dw))
self.submodules.logger = PacketLogger(eth_udp_user_description(dw))
self.comb += [Record.connect(self.streamer.source, udp_port.sink), udp_port.sink.ip_address.eq(), udp_port.sink.src_port.eq(4660), udp_port.sink.dst_port.eq(22136), udp_port.sink.length.eq((64 // (dw // 8))), Record.connect(udp_port.source, self.logger.sink)] |
class DocTransformer(Transformer):
def as_list(self, items):
return items
section_items = as_list
sections = as_list
text = Text
header = Text
start = _inline(DocString)
_inline
def section(self, name, items):
return Section(name.rstrip(':'), items)
_inline
def defin(self, name, text):
return Defin(name.rstrip(':'), text) |
def record_traffic(log: list, logfile: str):
tx = []
rx = []
for b in log:
direction = b[:2]
data = b[2:]
if (direction == b'TX'):
tx.append(list(data))
rx.append([])
elif (direction == b'RX'):
rx[(- 1)] += list(data)
else:
raise ValueError(f'Unknown direction: {direction}')
print([tx, rx])
json.dump([tx, rx], open(logfile, 'w')) |
class OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesBoxplotSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _create_list_of_mock_iam_resources():
policy_resources = []
for data in fasd.IAM_POLICY_RESOURCES:
policy = mock.MagicMock()
policy.data = json.dumps(data['iam_policy'])
policy.parent = mock.MagicMock()
policy.parent.type = data['parent_type']
policy.parent.name = data['parent_name']
policy.parent.full_name = data['parent_full_name']
policy_resources.append(policy)
return policy_resources |
class CommandLine():
def __init__(self, script_file_path: str, commands: List[str]):
self._parts: List[CmdPart] = list()
self.append(_get_python_interpreter())
self.append(script_file_path)
self.command_line_bulder = FormToCommandLineBuilder(self)
self.command_line_bulder.add_command_args(0)
for (i, command) in enumerate(commands):
self.append(command)
self.command_line_bulder.add_command_args((i + 1))
def append(self, part: str, secret: bool=False):
self._parts.append(CmdPart(part, secret))
def get_commandline(self, obfuscate: bool=False) -> List[str]:
return [('******' if (cmd_part.secret and obfuscate) else str(cmd_part)) for cmd_part in self._parts]
def get_download_field_infos(self):
return [fi for fi in self.command_line_bulder.field_infos if (fi.generate_download_link and fi.link_name)]
def after_script_executed(self):
for fi in self.command_line_bulder.field_infos:
fi.after_script_executed() |
.django_db(transaction=True)
def test_download_awards_without_columns(client, monkeypatch, download_test_data, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
resp = client.post('/api/v2/download/awards/', content_type='application/json', data=json.dumps({'filters': {'award_type_codes': ['A']}, 'columns': []}))
assert (resp.status_code == status.HTTP_200_OK)
assert ('.zip' in resp.json()['file_url']) |
class ManageUsers(MethodView):
decorators = [allows.requires(IsAtleastModerator, on_fail=FlashAndRedirect(message=_('You are not allowed to manage users'), level='danger', endpoint='management.overview'))]
form = UserSearchForm
def get(self):
page = request.args.get('page', 1, type=int)
form = self.form()
users = User.query.order_by(User.id.asc()).paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template('management/users.html', users=users, search_form=form)
def post(self):
page = request.args.get('page', 1, type=int)
form = self.form()
if form.validate():
users = form.get_results().paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template('management/users.html', users=users, search_form=form)
users = User.query.order_by(User.id.asc()).paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template('management/users.html', users=users, search_form=form) |
def action_for_platform(platform_name: str, skip: bool=True) -> Callable:
def decorator(pytest_func):
is_different = (platform.system() != platform_name)
if (is_different is skip):
return pytest_func
def action(*args, **kwargs):
if skip:
pytest.skip(f"Skipping the test since it doesn't work on {platform_name}.")
else:
pytest.skip(f'Skipping the test since it works only on {platform_name}.')
if isinstance(pytest_func, type):
return type(pytest_func.__name__, (pytest_func,), {'setup_class': action, 'setup': action, 'setUp': action, '_skipped': True})
(pytest_func)
def wrapper(*args, **kwargs):
action(*args, **kwargs)
return wrapper
return decorator |
def test_update_pet(client: TestClient):
response = client.post('/api/v1/pet', json=SAMPLE_PET)
pet_id = response.json()['id']
response = client.put(f'/api/v1/pet/{pet_id}', json={'name': 'Bad Kitty!'})
assert (response.status_code == 200)
assert (response.json()['name'] == 'Bad Kitty!') |
class MainTest(object):
def run_main(*args):
main([str(p) for p in args if p])
def test_no_output(self, test_paths):
ttf_path = test_paths[0]
self.run_main(ttf_path)
output_path = str(ttf_path).replace('.ttf', '.cubic.ttf')
font = TTFont(output_path)
assert (font['head'].glyphDataFormat == 1)
assert (os.stat(ttf_path).st_size > os.stat(output_path).st_size)
def test_output_file(self, test_paths):
ttf_path = test_paths[0]
output_path = (str(ttf_path) + '.cubic')
self.run_main(ttf_path, '-o', output_path)
font = TTFont(output_path)
assert (font['head'].glyphDataFormat == 1)
def test_stats(self, test_paths):
ttf_path = test_paths[0]
self.run_main(ttf_path, '--verbose')
def test_all_cubic(self, test_paths):
ttf_path = test_paths[0]
self.run_main(ttf_path, '-c')
output_path = str(ttf_path).replace('.ttf', '.cubic.ttf')
font = TTFont(output_path)
assert (font['head'].glyphDataFormat == 1) |
class ComboEntryField(Gtk.Box):
def __init__(self, values):
Gtk.Box.__init__(self)
self.combo = Gtk.ComboBoxText.new_with_entry()
for value in values:
self.combo.append_text(value)
self.pack_start(self.combo, True, True, 0)
self.combo.show()
def get_state(self):
return self.combo.get_active_text()
def set_state(self, state):
self.combo.get_child().set_text(state) |
class UploadAttachment(object):
def __init__(self, files):
self.files = files
self.upload_folder = app.config['ticket_upload_folder']
self.new_files = None
def are_attachments(self):
if (len(self.files) == 0):
return False
if (self.files[0].filename == ''):
return False
return True
def upload_files(self):
if (not self.are_attachments()):
return False
self.new_files = list()
for file in self.files:
uploaded_file = UploadFile(file)
uploaded_file.upload_folder = self.upload_folder
new_file_name = False
if uploaded_file.upload_file():
new_file_name = uploaded_file.file_name
self.new_files.append((file.filename, new_file_name))
return self.new_files
def populate_db(self, flicketobject):
topic = None
post = None
if (type(flicketobject).__name__ == 'FlicketTicket'):
topic = flicketobject
if (type(flicketobject).__name__ == 'FlicketPost'):
post = flicketobject
if self.new_files:
for new_file in self.new_files:
if (new_file[1] is False):
flash('There was a problem uploading one or more of the files.', category='warning')
else:
new_image = FlicketUploads(topic=topic, post=post, filename=new_file[1], original_filename=new_file[0])
db.session.add(new_image) |
def test_snr_compute(partitioned_datas):
d = scared.SNRDistinguisher()
d.update(traces=partitioned_datas.traces_1, data=partitioned_datas.data_1)
d.update(traces=partitioned_datas.traces_2, data=partitioned_datas.data_2)
results = d.compute()
assert np.array_equal(partitioned_datas.result_snr, results) |
class iosxe_device_interfaces(models.Model):
iosxe_device = models.ForeignKey(iosxe_device, on_delete=models.CASCADE)
name = models.CharField(max_length=30)
description = models.CharField(max_length=100, blank=True)
admin_status = models.CharField(max_length=20, null=True)
oper_status = models.CharField(max_length=20, null=True)
last_change = models.CharField(max_length=20, null=True)
phys_address = models.CharField(max_length=30, blank=True)
duplex_mode = models.CharField(max_length=20, blank=True)
port_speed = models.CharField(max_length=20, blank=True)
ipv4 = models.CharField(max_length=40, blank=True) |
class DockerDaemonEventsMonitor():
def __init__(self, docker_driver, mongodb_driver):
super(DockerDaemonEventsMonitor, self).__init__()
self.mongodb_driver = mongodb_driver
self.docker_driver = docker_driver
def run(self):
while True:
try:
for event in self.docker_driver.docker_events():
e = json.loads(event.decode('UTF-8').replace('\n', ''))
if (('Actor' in e) and ('Attributes' in e['Actor'])):
iter = list(e['Actor']['Attributes'])
for key in iter:
if ('.' in key):
del e['Actor']['Attributes'][key]
self.mongodb_driver.bulk_insert_docker_daemon_events([e])
except requests.packages.urllib3.exceptions.ReadTimeoutError:
pass
except bson.errors.InvalidDocument as ex:
message = 'Unexpected exception of type {0} occurred: {1!r}'.format(type(ex).__name__, (ex.get_message() if (type(ex).__name__ == 'DagdaError') else ex.args))
DagdaLogger.get_logger().error(message)
if InternalServer.is_debug_logging_enabled():
traceback.print_exc() |
class FeatureToProcess():
def __init__(self, order: int, source: pd.Series, compare=None, source_target=None, compare_target=None, predetermined_type: FeatureType=None, predetermined_type_target: FeatureType=None):
self.order = order
source.name = str(source.name)
if (compare is not None):
compare.name = str(compare.name)
if (source_target is not None):
source_target.name = str(source_target.name)
if (compare_target is not None):
compare_target.name = str(compare_target.name)
self.source = source
self.source_counts = None
self.source_target = source_target
self.compare = compare
self.compare_counts = None
self.compare_target = compare_target
if predetermined_type:
self.predetermined_type = predetermined_type
else:
self.predetermined_type = FeatureType.TYPE_UNKNOWN
if predetermined_type_target:
if (predetermined_type_target not in (FeatureType.TYPE_BOOL, FeatureType.TYPE_NUM)):
if (predetermined_type_target == FeatureType.TYPE_CAT):
raise ValueError('TARGET values can only be of NUMERICAL or BOOLEAN type for now.\nCATEGORICAL type was detected; if you meant the target to be\nNUMERICAL, use a FeatureConfig(force_num=...) object.')
else:
raise ValueError('TARGET values can only be of NUMERICAL or BOOLEAN type for now.')
self.predetermined_type_target = predetermined_type_target
else:
self.predetermined_type_target = FeatureType.TYPE_UNKNOWN
def is_target(self):
return (self.order == (- 1))
def __repr__(self):
out = str()
if (self.source is not None):
out = (out + f'Src: {self.source.name} ')
if (self.source_target is not None):
out = (out + f'(Target: {self.source_target.name}) ')
if (self.compare is not None):
out = (out + f'[WITH COMPARISON]')
return out |
.parametrize('name, normalized_name, namehash_hex', SETUP_NAME_TEST_CASES)
def test_setup_name(ens, name, normalized_name, namehash_hex):
address = ens.w3.eth.accounts[3]
assert (not ens.name(address))
owner = ens.owner('tester.eth')
ens.setup_name(name, address)
assert (ens.name(address) == normalized_name)
node = Web3.to_bytes(hexstr=HexStr(namehash_hex))
assert (ens.resolver(normalized_name).caller.addr(node) == address)
assert (ens.owner(name) == owner)
new_address = ens.w3.eth.accounts[4]
ens.setup_address(name, None)
ens.setup_name(name, new_address)
assert (ens.name(new_address) == normalized_name)
assert (ens.address(name) == new_address)
assert (not ens.name(address))
ens.setup_name(None, address)
ens.setup_address(name, None)
assert (not ens.name(address))
assert (not ens.address(name)) |
.django_db
def test_GmArchive(Q, auth_header):
from player.tests import PlayerFactory
PlayerFactory.create()
PlayerFactory.create()
import random
gid = random.randint(100, )
game = {'gameId': gid, 'name': 'foo!', 'type': 'THBattle2v2', 'flags': {}, 'players': [1, 2], 'winners': [1], 'deserters': [2], 'startedAt': '2020-12-02T15:43:05Z', 'duration': 333}
rst = Q('\n mutation TestGmArchive($game: GameInput!) {\n GmArchive(game: $game, archive: "AAAA") {\n id\n }\n }\n ', variables={'game': game}, headers=auth_header)
assert ('errors' not in rst)
assert (rst['data']['GmArchive']['id'] == gid)
models.Game.objects.get(id=gid)
models.GameArchive.objects.get(game_id=gid) |
def test_get_award_financial_csv_sources():
original = VALUE_MAPPINGS['award_financial']['filter_function']
VALUE_MAPPINGS['award_financial']['filter_function'] = MagicMock(returned_value='')
csv_sources = download_generation.get_download_sources({'download_types': ['award_financial'], 'account_level': 'treasury_account', 'filters': {}})
VALUE_MAPPINGS['award_financial']['filter_function'] = original
assert (len(csv_sources) == 3)
assert (csv_sources[0].file_type == 'treasury_account')
assert (csv_sources[0].source_type == 'award_financial')
assert (csv_sources[0].extra_file_type == 'Contracts_')
assert (csv_sources[1].file_type == 'treasury_account')
assert (csv_sources[1].source_type == 'award_financial')
assert (csv_sources[1].extra_file_type == 'Assistance_') |
class TestWorkflowMethod(unittest.TestCase):
def setUp(self) -> None:
pass
def test_method_annotated_plain(self):
fn = DummyWorkflow.method_annotated_plain
assert fn._workflow_method
attributes = dir(fn._workflow_method)
self.assertIn('_name', attributes)
self.assertEqual('DummyWorkflow::method_annotated_plain', fn._workflow_method._name)
self.assertIn('_workflow_id', attributes)
self.assertIn('_workflow_id_reuse_policy', attributes)
self.assertIn('_execution_start_to_close_timeout_seconds', attributes)
self.assertIn('_task_start_to_close_timeout_seconds', attributes)
self.assertIn('_task_list', attributes)
def test_method_annotated_decorator_call(self):
fn = DummyWorkflow.method_annotated_decorator_call
attributes = dir(fn._workflow_method)
self.assertIn('_name', attributes)
self.assertEqual('DummyWorkflow::method_annotated_decorator_call', fn._workflow_method._name)
self.assertIn('_workflow_id', attributes)
self.assertIn('_workflow_id_reuse_policy', attributes)
self.assertIn('_execution_start_to_close_timeout_seconds', attributes)
self.assertIn('_task_start_to_close_timeout_seconds', attributes)
self.assertIn('_task_list', attributes)
def test_method_with_decorator_call_arguments(self):
fn = DummyWorkflow.method_with_decorator_call_arguments
self.assertEqual('NAME', fn._workflow_method._name)
self.assertEqual('WORKFLOW_ID', fn._workflow_method._workflow_id)
self.assertEqual(WorkflowIdReusePolicy.AllowDuplicate, fn._workflow_method._workflow_id_reuse_policy)
self.assertEqual(99999, fn._workflow_method._execution_start_to_close_timeout_seconds)
self.assertEqual(123456, fn._workflow_method._task_start_to_close_timeout_seconds)
self.assertEqual('TASK_LIST', fn._workflow_method._task_list) |
class SD_Log_Tests(SD_VM_Local_Test):
def setUp(self):
self.vm_name = 'sd-log'
super(SD_Log_Tests, self).setUp()
def test_sd_log_package_installed(self):
self.assertTrue(self._package_is_installed('securedrop-log'))
def test_sd_log_redis_is_installed(self):
self.assertTrue(self._package_is_installed('redis'))
self.assertTrue(self._package_is_installed('redis-server'))
def test_log_utility_installed(self):
self.assertTrue(self._fileExists('/usr/sbin/securedrop-log'))
self.assertTrue(self._fileExists('/etc/qubes-rpc/securedrop.Log'))
def test_sd_log_has_no_custom_rsyslog(self):
self.assertFalse(self._fileExists('/etc/rsyslog.d/sdlog.conf'))
def test_sd_log_service_running(self):
results = self._run('sudo systemctl is-active securedrop-log')
assert (results == 'active')
def test_redis_service_running(self):
results = self._run('sudo systemctl is-active redis')
assert (results == 'active')
def test_logs_are_flowing(self):
cmd_output = self._run('ls -1 /home/user/QubesIncomingLogs')
log_dirs = cmd_output.split('\n')
self.assertTrue(('sd-app' in log_dirs))
def test_log_dirs_properly_named(self):
cmd_output = self._run('ls -1 /home/user/QubesIncomingLogs')
log_dirs = cmd_output.split('\n')
self.assertFalse(('host' in log_dirs))
def test_gpg_domain_configured(self):
self.qubes_gpg_domain_configured(self.vm_name) |
class OptionPlotoptionsPictorialSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsPictorialSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsPictorialSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsPictorialSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsPictorialSonificationTracksMappingLowpassResonance) |
def make_ner_response_parser(labels: List[str], lang: str) -> Callable:
nlp = spacy.blank(lang)
def _parse_response(text: str, example: Optional[Dict]=None) -> Dict:
output = []
for line in text.strip().split('\n'):
if (line and (':' in line)):
(label, phrases) = line.split(':', 1)
label = normalize_label(label)
if (label in labels):
if phrases.strip():
phrases = [phrase.strip() for phrase in phrases.strip().split(',')]
output.append((label, phrases))
example = _fmt_response(output, example)
return example
def _fmt_response(response: List[Tuple[(str, List[str])]], example: Dict):
doc = nlp.make_doc(example['text'])
spacy_spans = []
for (label, phrases) in response:
label = normalize_label(label)
if (label in labels):
offsets = _find_substrings(doc.text, phrases)
for (start, end) in offsets:
span = doc.char_span(start, end, alignment_mode='contract', label=label)
if (span is not None):
spacy_spans.append(span)
spacy_spans = filter_spans(spacy_spans)
spans = [{'label': span.label_, 'start': span.start_char, 'end': span.end_char, 'token_start': span.start, 'token_end': (span.end - 1)} for span in spacy_spans]
return {'spans': spans}
return _parse_response |
class CmdGiveUp(CmdEvscapeRoom):
key = 'give up'
aliases = ('abort', 'chicken out', 'quit', 'q')
def func(self):
from .menu import run_evscaperoom_menu
nchars = len(self.room.get_all_characters())
if (nchars == 1):
warning = _QUIT_WARNING_LAST_CHAR.format(roomname=self.room.name)
warning = _QUIT_WARNING.format(warning=warning)
else:
warning = _QUIT_WARNING_CAN_COME_BACK.format(roomname=self.room.name)
warning = _QUIT_WARNING.format(warning=warning)
ret = (yield warning)
if (ret.upper() == 'QUIT'):
self.msg('|R ... Oh. Okay then. Off you go.|n\n')
(yield 1)
self.room.log(f'QUIT: {self.caller.key} used the quit command')
self.room.msg_room(self.caller, f'|r{self.caller.key} gave up and was whisked away!|n')
self.room.at_object_leave(self.caller, self.caller.home)
self.caller.move_to(self.caller.home, quiet=True, move_hooks=False, move_type='teleport')
run_evscaperoom_menu(self.caller)
else:
self.msg("|gYou're staying? That's the spirit!|n") |
class Glyph(Component):
__version__ = 0
glyph_type = Enum('vector', 'tensor', desc='if the glyph is vector or tensor')
scale_mode = RevPrefixMap({'scale_by_vector': 1, 'scale_by_vector_components': 2, 'data_scaling_off': 3, 'scale_by_scalar': 0}, default_value='scale_by_scalar', desc='if scaling is done using scalar or vector/normal magnitude')
color_mode = RevPrefixMap({'color_by_vector': 2, 'color_by_scalar': 1, 'no_coloring': 0}, default_value='color_by_scalar', desc='if coloring is done by scalar or vector/normal magnitude')
color_mode_tensor = RevPrefixMap({'scalars': 1, 'eigenvalues': 2, 'no_coloring': 0}, default_value='scalars', desc='if coloring is done by scalar or eigenvalues')
mask_input_points = Bool(False, desc='if input points are masked')
mask_points = Instance(tvtk.MaskPoints, args=(), kw={'random_mode': True}, record=True)
glyph = Instance(tvtk.Object, allow_none=False, record=True)
glyph_source = Instance(glyph_source.GlyphSource, allow_none=False, record=True)
module = Instance(Module)
show_scale_mode = Bool(True)
_updating = Bool(False)
view = View(Group(Item(name='mask_input_points'), Group(Item(name='mask_points', enabled_when='object.mask_input_points', style='custom', resizable=True), show_labels=False), label='Masking'), Group(Group(Item(name='scale_mode', enabled_when='show_scale_mode', visible_when='show_scale_mode'), Item(name='color_mode', enabled_when='glyph_type == "vector"', visible_when='glyph_type == "vector"'), Item(name='color_mode_tensor', enabled_when='glyph_type == "tensor"', visible_when='glyph_type == "tensor"')), Group(Item(name='glyph', style='custom', resizable=True), show_labels=False), label='Glyph', selected=True), Group(Item(name='glyph_source', style='custom', resizable=True), show_labels=False, label='Glyph Source'), resizable=True)
def __get_pure_state__(self):
d = super(Glyph, self).__get_pure_state__()
for attr in ('module', '_updating'):
d.pop(attr, None)
return d
def setup_pipeline(self):
self._glyph_type_changed(self.glyph_type)
self.glyph_source = glyph_source.GlyphSource()
self.glyph_source.on_trait_change(self._update_source, 'pipeline_changed')
self.mask_points.on_trait_change(self.render)
def update_pipeline(self):
if ((len(self.inputs) == 0) or (len(self.inputs[0].outputs) == 0)):
return
self._mask_input_points_changed(self.mask_input_points)
if (self.glyph_type == 'vector'):
self._color_mode_changed(self.color_mode)
else:
self._color_mode_tensor_changed(self.color_mode_tensor)
self._scale_mode_changed(self.scale_mode)
self.outputs = [self.glyph]
self.pipeline_changed = True
def update_data(self):
self._scale_mode_changed(self.scale_mode)
self.data_changed = True
def render(self):
if (not self._updating):
super(Glyph, self).render()
def start(self):
if self.running:
return
self.glyph_source.start()
super(Glyph, self).start()
def stop(self):
if (not self.running):
return
self.glyph_source.stop()
super(Glyph, self).stop()
def has_output_port(self):
return True
def get_output_object(self):
return self.glyph.output_port
def _update_source(self):
self.configure_source_data(self.glyph, self.glyph_source.outputs[0])
def _glyph_source_changed(self, value):
self.configure_source_data(self.glyph, value.outputs[0])
def _color_mode_changed(self, value):
if (len(self.inputs) == 0):
return
if (value != 'no_coloring'):
self.glyph.color_mode = value
def _color_mode_tensor_changed(self, value):
if (len(self.inputs) == 0):
return
self._updating = True
if (value != 'no_coloring'):
self.glyph.color_mode = value
self.glyph.color_glyphs = True
else:
self.glyph.color_glyphs = False
self._updating = False
self.render()
def _scale_mode_changed(self, value):
if ((self.module is None) or (len(self.inputs) == 0) or (self.glyph_type == 'tensor')):
return
self._updating = True
try:
glyph = self.glyph
glyph.scale_mode = value
mm = self.module.module_manager
if (glyph.scale_mode == 'scale_by_scalar'):
glyph.range = tuple(mm.scalar_lut_manager.data_range)
else:
glyph.range = tuple(mm.vector_lut_manager.data_range)
finally:
self._updating = False
self.render()
def _mask_input_points_changed(self, value):
inputs = self.inputs
if (len(inputs) == 0):
return
if value:
mask = self.mask_points
self.configure_connection(mask, inputs[0].outputs[0])
self.configure_connection(self.glyph, mask)
else:
self.configure_connection(self.glyph, inputs[0])
self.glyph.update()
def _glyph_type_changed(self, value):
if (self.glyph_type == 'vector'):
self.glyph = tvtk.Glyph3D(clamping=True)
else:
self.glyph = tvtk.TensorGlyph(scale_factor=0.1)
self.show_scale_mode = False
self.glyph.on_trait_change(self.render)
def _scene_changed(self, old, new):
super(Glyph, self)._scene_changed(old, new)
self.glyph_source.scene = new |
def _get_kwargs(*, client: Client, limit: Union[(Unset, None, int)]=100, offset: Union[(Unset, None, int)]=0, since: Union[(Unset, None, str)]=UNSET, url_query: Union[(Unset, None, str)]=UNSET) -> Dict[(str, Any)]:
url = '{}/logs/'.format(client.base_url)
headers: Dict[(str, str)] = client.get_headers()
cookies: Dict[(str, Any)] = client.get_cookies()
params: Dict[(str, Any)] = {}
params['limit'] = limit
params['offset'] = offset
params['since'] = since
params['url'] = url_query
params = {k: v for (k, v) in params.items() if ((v is not UNSET) and (v is not None))}
return {'method': 'get', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects, 'params': params} |
class TreeWithControls(flx.TreeWidget):
def key_down(self, e):
ev = self._create_key_event(e)
if ev.key.startswith('Arrow'):
e.preventDefault()
return ev
('key_down')
def _handle_highlighting(self, *events):
for ev in events:
if ev.modifiers:
continue
if (ev.key == 'Escape'):
self.highlight_hide()
elif (ev.key == ' '):
if (self.max_selected == 0):
self.highlight_toggle_checked()
else:
self.highlight_toggle_selected()
elif (ev.key == 'Enter'):
self.highlight_toggle_checked()
elif (ev.key == 'ArrowRight'):
item = self.highlight_get()
if (item and item.items):
item.collapsed = None
elif (ev.key == 'ArrowLeft'):
item = self.highlight_get()
if (item and item.items):
item.collapsed = True
elif (ev.key == 'ArrowDown'):
self.highlight_show(1)
elif (ev.key == 'ArrowUp'):
self.highlight_show((- 1)) |
def profile():
num_attempts = 5
attempts = []
for _ in range(num_attempts):
profiler = Profile()
start = time.time()
profiler.runcall(test)
duration = (time.time() - start)
attempts.append((duration, profiler))
attempts.sort()
profile_file = 'profile.{}.prof'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
attempts[0][1].dump_stats(profile_file)
print('{}s (best of {}), profile saved as: {}'.format(attempts[0][0], num_attempts, profile_file)) |
class HostCRDNo8080(AmbassadorTest):
target: ServiceType
def init(self):
self.edge_stack_cleartext_host = False
self.add_default_ = False
self.add_default_ = False
self.target = HTTP()
def manifests(self) -> str:
return (self.format((((('\n---\napiVersion: v1\nkind: Secret\nmetadata:\n name: {self.path.k8s}-secret\n labels:\n kat-ambassador-id: {self.ambassador_id}\ntype: kubernetes.io/tls\ndata:\n tls.crt: ' + TLSCerts['localhost'].k8s_crt) + '\n tls.key: ') + TLSCerts['localhost'].k8s_key) + '\n---\napiVersion: getambassador.io/v3alpha1\nkind: Listener\nmetadata:\n name: {self.path.k8s}-listener\n labels:\n kat-ambassador-id: {self.ambassador_id}\nspec:\n ambassador_id: [ {self.ambassador_id} ]\n port: 8443\n protocol: HTTPS\n securityModel: XFP\n hostBinding:\n namespace:\n from: ALL\n---\napiVersion: getambassador.io/v3alpha1\nkind: Host\nmetadata:\n name: {self.path.k8s}-host\n labels:\n kat-ambassador-id: {self.ambassador_id}\nspec:\n ambassador_id: [ {self.ambassador_id} ]\n hostname: {self.path.fqdn}\n acmeProvider:\n authority: none\n tlsSecret:\n name: {self.path.k8s}-secret\n mappingSelector:\n matchLabels:\n hostname: {self.path.fqdn}\n requestPolicy:\n insecure:\n action: Reject\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nmetadata:\n name: {self.path.k8s}-target-mapping\n labels:\n hostname: {self.path.fqdn}\nspec:\n ambassador_id: [ {self.ambassador_id} ]\n prefix: /target/\n service: {self.target.path.fqdn}\n')) + super().manifests())
def scheme(self) -> str:
return '
def queries(self):
(yield Query(self.url('target/'), insecure=True))
(yield Query(self.url('target/', scheme=' error=['EOF', 'connection refused'])) |
def build_dummy_structured_environment() -> DummyStructuredEnvironment:
observation_conversion = PreProcessingObservationConversion()
maze_env = DummyEnvironment(core_env=DummyCoreEnvironment(observation_conversion.space()), action_conversion=[DictActionConversion()], observation_conversion=[observation_conversion])
return DummyStructuredEnvironment(maze_env=maze_env) |
class Animation(object):
def __init__(self, fps):
self.fps = fps
rcParams.update({'font.size': 36})
self.fig = plt.figure(facecolor='white', edgecolor='white', figsize=(16, 12))
self.reset()
def reset(self):
self.fig.clf()
ax = self.fig.add_subplot(111)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
ax.axis(np.add(ax.axis(), [(- 0.5), 0.5, (- 0.5), 0.5]))
ax.axis('off')
self.ax = ax
self.txt = plt.figtext(0.5, 0.1, '', horizontalalignment='center', wrap=True)
self.scenes = []
def add_scene(self, scene):
self.scenes.append(scene)
def caption(self, text):
self.txt.set_text(text)
return self.txt
def frames(self):
return sum((s.frames for s in self.scenes))
def __call__(self, i):
ii = i
for s in self.scenes:
if (ii < s.frames):
return s(ii)
else:
ii -= s.frames
def save(self, filename, extra_args=['-vcodec', 'libx264']):
anim = animation.FuncAnimation(self.fig, self, self.frames, init_func=init, interval=(1.0 / self.fps))
anim.save(filename, fps=self.fps, extra_args=extra_args) |
def _add_group_command(sub: GroupCommand, sub_proc: argparse.ArgumentParser) -> None:
subcommands = sub.subcommands
sub_subparsers = sub_proc.add_subparsers(dest='subcommand', metavar='COMMAND')
sub_subparsers.required = True
for command in sorted(subcommands, key=(lambda x: x.name)):
_add_command(sub_subparsers, command) |
class HeuristicLunarLanderPolicy(Policy):
def __init__(self):
self.action_space = gym.make('LunarLander-v2').action_space
(Policy)
def needs_state(self) -> bool:
return False
(Policy)
def seed(self, seed: int) -> None:
pass
(Policy)
def compute_action(self, observation: ObservationType, maze_state: Optional[MazeStateType]=None, env: Optional[BaseEnv]=None, actor_id: ActorID=None, deterministic: bool=False) -> ActionType:
s = observation['observation']
angle_targ = ((s[0] * 0.5) + (s[2] * 1.0))
if (angle_targ > 0.4):
angle_targ = 0.4
if (angle_targ < (- 0.4)):
angle_targ = (- 0.4)
hover_targ = (0.55 * np.abs(s[0]))
angle_todo = (((angle_targ - s[4]) * 0.5) - (s[5] * 1.0))
hover_todo = (((hover_targ - s[1]) * 0.5) - (s[3] * 0.5))
if (s[6] or s[7]):
angle_todo = 0
hover_todo = ((- s[3]) * 0.5)
a = 0
if ((hover_todo > np.abs(angle_todo)) and (hover_todo > 0.05)):
a = 2
elif (angle_todo < (- 0.05)):
a = 3
elif (angle_todo > (+ 0.05)):
a = 1
return {'action': a}
(Policy)
def compute_top_action_candidates(self, observation: ObservationType, num_candidates: Optional[int], maze_state: Optional[MazeStateType], env: Optional[BaseEnv], actor_id: Union[(str, int)]=None) -> Tuple[(Sequence[ActionType], Sequence[float])]:
raise NotImplementedError |
def test_mismatch():
test_data = load_test_data(only_mismatch=True)
count = 0
mismatch = 0
for (key, value) in test_data:
p = TPN(key)
(vol, chp, frag, post) = (p.getVolume(), p.getChapter(), p.getFragment(), p.getPostfix())
if (len(value) == 4):
(e_vol, e_chp, e_frag, e_post) = value
if ((e_chp == 0.0) and (chp is None)):
e_chp = None
bad = False
if ((vol != e_vol) or (chp != e_chp) or (frag != e_frag)):
bad = True
print(p)
print('Parsed: v{}, c{}, f{}'.format(vol, chp, frag))
print('Expect: v{}, c{}, f{}'.format(e_vol, e_chp, e_frag))
print()
if (e_post != post):
bad = True
print(p)
print('Post mismatch - Parsed: {}'.format(post))
print('Post mismatch - Expect: {}'.format(e_post))
if bad:
mismatch += 1
count += 1
print('{} Items with parsed output'.format(count))
print('{} Items mismatch in new parser'.format(mismatch))
print('Total items: {}'.format(len(test_data))) |
('/settings', methods=['GET', 'POST'])
def settings():
config_load = tomlkit.loads(Path('config.toml').read_text())
config = gui.get_config(config_load)
checks = gui.get_checks()
if (request.method == 'POST'):
data = request.form.to_dict()
config = gui.modify_settings(data, config_load, checks)
return render_template('settings.html', file='config.toml', data=config, checks=checks) |
def convert_to_partial_task(task=None):
inner_tasks = aliased(Task)
subquery = DBSession.query(Task.id).filter((Task.id == inner_tasks.parent_id))
return DBSession.query(Task.id, Task.name, Task.entity_type, Task.status_id, subquery.exists().label('has_children'), array_agg(User.name).label('resources')).outerjoin(Task_Resources, (Task.__table__.c.id == Task_Resources.c.task_id)).outerjoin(User, (Task_Resources.c.resource_id == User.id)).group_by(Task.id, Task.name, Task.entity_type, Task.status_id, subquery.exists().label('has_children')).filter((Task.id == task.id)).first() |
class _TestConBuff(BaseBuff):
key = 'tcb'
name = 'tcb'
flavor = 'condbuff'
triggers = ['condtest']
def conditional(self, *args, **kwargs):
return self.owner.db.cond1
def at_trigger(self, trigger: str, attacker=None, defender=None, damage=0, *args, **kwargs):
(defender.db.att, defender.db.dmg) = (attacker, damage) |
_frozen_dataclass_decorator
class DeviceInfo(APIModelBase):
uses_password: bool = False
name: str = ''
friendly_name: str = ''
mac_address: str = ''
compilation_time: str = ''
model: str = ''
manufacturer: str = ''
has_deep_sleep: bool = False
esphome_version: str = ''
project_name: str = ''
project_version: str = ''
webserver_port: int = 0
voice_assistant_version: int = 0
legacy_bluetooth_proxy_version: int = 0
bluetooth_proxy_feature_flags: int = 0
suggested_area: str = ''
def bluetooth_proxy_feature_flags_compat(self, api_version: APIVersion) -> int:
if (api_version < APIVersion(1, 9)):
flags: int = 0
if (self.legacy_bluetooth_proxy_version >= 1):
flags |= BluetoothProxyFeature.PASSIVE_SCAN
if (self.legacy_bluetooth_proxy_version >= 2):
flags |= BluetoothProxyFeature.ACTIVE_CONNECTIONS
if (self.legacy_bluetooth_proxy_version >= 3):
flags |= BluetoothProxyFeature.REMOTE_CACHING
if (self.legacy_bluetooth_proxy_version >= 4):
flags |= BluetoothProxyFeature.PAIRING
if (self.legacy_bluetooth_proxy_version >= 5):
flags |= BluetoothProxyFeature.CACHE_CLEARING
return flags
return self.bluetooth_proxy_feature_flags |
(frozen=True, eq=False, order=False, hash=False, repr=True)
class UserSettings(MethodView):
form = attr.ib(factory=settings_form_factory)
settings_update_handler = attr.ib(factory=settings_update_handler)
decorators = [login_required]
def get(self):
return self.render()
def post(self):
if self.form.validate_on_submit():
try:
self.settings_update_handler.apply_changeset(current_user, self.form.as_change())
except StopValidation as e:
self.form.populate_errors(e.reasons)
return self.render()
except PersistenceError:
logger.exception('Error while updating user settings')
flash(_('Error while updating user settings'), 'danger')
return self.redirect()
flash(_('Settings updated.'), 'success')
return self.redirect()
return self.render()
def render(self):
return render_template('user/general_settings.html', form=self.form)
def redirect(self):
return redirect(url_for('user.settings')) |
def test_queue_list(tmp_path, capsys):
args = helpers.setup_temp_env(tmp_path)
reqid = 'req-compile-bench--nobody-mac'
queue_file = ((((tmp_path / 'BENCH') / 'QUEUES') / 'mac') / 'queue.json')
queue_file.write_text(json.dumps({'jobs': [reqid], 'paused': False}))
__main__._parse_and_main([*args, 'queue', 'list'], __file__)
content = json.loads(queue_file.read_text())
assert (content['jobs'] == [reqid])
captured = capsys.readouterr()
assert (captured.out.strip() == textwrap.dedent('\n Queue (linux)\n no jobs queued\n Queue (mac)\n 1 req-compile-bench--nobody-mac\n\n (total: 1)\n ').strip()) |
class TestClassLoadingFunctions(object):
def test_can_load_a_single_class(self):
LastTopicsFeed = get_class('forum_feeds.feeds', 'LastTopicsFeed')
assert ('machina.apps.forum_feeds.feeds' == LastTopicsFeed.__module__)
def test_can_load_many_classes(self):
(PostForm, TopicForm) = get_classes('forum_conversation.forms', ['PostForm', 'TopicForm'])
assert ('machina.apps.forum_conversation.forms' == PostForm.__module__)
assert ('machina.apps.forum_conversation.forms' == TopicForm.__module__)
def test_raises_if_the_module_label_is_incorrect(self):
with pytest.raises(AppNotFoundError):
get_class('foo.bar', 'Forum')
def test_raises_if_the_class_name_is_incorrect(self):
with pytest.raises(ClassNotFoundError):
get_class('forum.models', 'Foo')
def test_get_class_with_app_config(self):
apps = list(settings.INSTALLED_APPS)
idx = apps.index('tests._testsite.apps.forum_conversation')
apps[idx] += '.apps.ForumConversationAppConfig'
with override_settings(INSTALLED_APPS=apps):
get_class('forum_conversation.models', 'Post')
def test_raise_importerror_if_app_raises_importerror(self):
apps = list(settings.INSTALLED_APPS)
apps[apps.index('machina.apps.forum')] = 'tests._testsite.importerror_app.forum'
with override_settings(INSTALLED_APPS=apps):
with pytest.raises(ImportError):
get_class('forum.dummy', 'Dummy')
def test_raise_importerror_if_the_app_is_installed_but_the_module_does_not_exist(self):
with pytest.raises(AppNotFoundError):
get_class('forum.xyz', 'Xyz') |
.parametrize('step_length', [0.1, 0.2])
def test_mb_eulerpc(step_length):
calc = MullerBrownPot()
geom = calc.get_saddles(0)
irc_kwargs = {'step_length': step_length}
irc = EulerPC(geom, **irc_kwargs)
irc.run()
forward_coords = irc.all_coords[0]
backward_coords = irc.all_coords[(- 1)]
assert (np.linalg.norm((forward_coords - ((- 0.558), 1.441, 0.0))) <= 0.02)
assert (np.linalg.norm((backward_coords - ((- 0.05), 0.466, 0.0))) <= 0.005) |
class InterComListener(InterComRedisInterface):
CONNECTION_TYPE = 'test'
def get_next_task(self):
try:
task_obj = self.redis.queue_get(self.CONNECTION_TYPE)
except RedisError as exc:
logging.error(f'Could not get next task: {exc!s}', exc_info=True)
return None
if (task_obj is not None):
(task, task_id) = task_obj
task = self.post_processing(task, task_id)
logging.debug(f'{self.CONNECTION_TYPE}: New task received: {task}')
return task
return None
def post_processing(self, task, task_id):
return task |
class TestFingerprintAgent(AEATestCaseEmpty):
def test_fingerprint(self):
r = self.invoke('fingerprint')
assert ('calculated' in r.stdout)
click_context = MagicMock()
click_context.obj = Context(self._get_cwd(), '', registry_path=None)
click_context.obj.config['skip_consistency_check'] = True
_check_aea_project([click_context], check_finger_prints=True)
(Path(self._get_cwd()) / 'some_file.txt').write_text('sdfds')
with pytest.raises(ClickException, match='Fingerprints for package .* do not match'):
_check_aea_project([click_context], check_finger_prints=True)
self.invoke('fingerprint')
_check_aea_project([click_context], check_finger_prints=True) |
def mgmt_worker_start(sender):
mgmt_startup_task_id = generate_internal_task_id()
cq.send_task(MGMT_STARTUP_TASK, args=(mgmt_startup_task_id,), task_id=mgmt_startup_task_id, queue=Q_MGMT)
status_check_task_id = generate_internal_task_id()
cq.send_task(STATUS_CHECK_TASK, args=(status_check_task_id,), task_id=status_check_task_id, queue=Q_MGMT) |
class OptionPlotoptionsBubbleSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TLSOriginationSecret(AmbassadorTest):
def init(self):
self.xfail = 'FIXME: IHA'
self.target = HTTP()
def manifests(self) -> str:
return (f'''
---
apiVersion: v1
kind: Secret
metadata:
name: test-origination-secret
labels:
kat-ambassador-id: tlsoriginationsecret
type: kubernetes.io/tls
data:
tls.crt: {TLSCerts['localhost'].k8s_crt}
tls.key: {TLSCerts['localhost'].k8s_key}
''' + super().manifests())
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
fingerprint = hashlib.sha1((((TLSCerts['localhost'].pubcert + '\n') + TLSCerts['localhost'].privkey) + '\n').encode('utf-8')).hexdigest().upper()
(yield (self, f'''
---
apiVersion: getambassador.io/v3alpha1
kind: Module
ambassador_id: [{self.ambassador_id}]
name: tls
config:
upstream:
secret: test-origination-secret
upstream-files:
cert_chain_file: /tmp/ambassador/snapshots/default/secrets-decoded/test-origination-secret/{fingerprint}.crt
private_key_file: /tmp/ambassador/snapshots/default/secrets-decoded/test-origination-secret/{fingerprint}.key
'''))
(yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.target.path.k8s}\nprefix: /{self.name}/\nservice: {self.target.path.fqdn}\ntls: upstream\n')))
(yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.target.path.k8s}-files\nprefix: /{self.name}-files/\nservice: {self.target.path.fqdn}\ntls: upstream-files\n')))
def queries(self):
(yield Query(self.url((self.name + '/'))))
(yield Query(self.url((self.name + '-files/'))))
def check(self):
for r in self.results:
assert r.backend
assert r.backend.request
assert r.backend.request.tls.enabled |
class OptionSeriesFunnelOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def main():
with open('../config.yaml') as f:
config_dict = yaml.full_load(f)
config = dm.Config(**config_dict)
ctx = zmq.Context()
sock_sender = ctx.socket(zmq.PUSH)
sock_sender.connect(config.zmq_input_address)
sock_receiver = ctx.socket(zmq.PULL)
sock_receiver.bind(config.zmq_output_address)
model_num = int(input(f'''Choose model
1: {stub_model}
2: {stateful_model}: '''))
if (model_num == 1):
model = stub_model
else:
model = stateful_model
number_of_request = int(input('Write number of requests: '))
uid_generator = uuid4_string_generator()
for _ in range(number_of_request):
request_info = dm.RequestInfo(input=np.array(range(10)), parameters={})
req = dm.RequestObject(uid=next(uid_generator), request_info=request_info, source_id='test_client_1', model=model)
sock_sender.send_pyobj(req)
print('Start listenning')
while True:
result = sock_receiver.recv_pyobj()
print(f'Result batch {result}') |
def test_fill_recursive_config():
valid_config = {'outer_req': 1, 'level2_req': {'hello': 4, 'world': 7}}
(filled, _, validation) = my_registry._fill(valid_config, ComplexSchema)
assert (filled['outer_req'] == 1)
assert (filled['outer_opt'] == 'default value')
assert (filled['level2_req']['hello'] == 4)
assert (filled['level2_req']['world'] == 7)
assert (filled['level2_opt']['required'] == 1)
assert (filled['level2_opt']['optional'] == 'default value') |
def main() -> None:
import argparse
parser = argparse.ArgumentParser(description='Measure and compare the performance of GEMM cuBlas and cuTlass')
parser.add_argument('-m', '--msize', type=int, default=1024)
parser.add_argument('-n', '--nsize', type=int, default=1024)
parser.add_argument('-k', '--ksize', type=int, default=1024)
parser.add_argument('-t', '--dtype', type=str, default='float32')
parser.add_argument('--steps', type=int, default=100)
parser.add_argument('--warmups', type=int, default=10)
args = parser.parse_args()
d = [(args.msize, args.nsize, args.ksize)]
run(args, d) |
def test_transform_method(df_vartypes, df_na):
transformer = MockClass()
transformer.fit(df_vartypes)
assert_frame_equal(transformer._check_transform_input_and_state(df_vartypes), df_vartypes)
assert_frame_equal(transformer._check_transform_input_and_state(df_vartypes[['City', 'Age', 'Name', 'Marks', 'dob']]), df_vartypes)
with pytest.raises(ValueError):
transformer.fit(df_na)
df_na = df_na.fillna(inf)
with pytest.raises(ValueError):
assert transformer.fit(df_na)
with pytest.raises(ValueError):
assert transformer._check_transform_input_and_state(df_vartypes[['Age', 'Marks']]) |
class GEMMBiasTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(GEMMBiasTestCase, self).__init__(*args, **kwargs)
self._test_id = 0
def _test_rcr(self, Ms, N, K, test_name, dtype='float16', allow_sm90=False, force_sm90=False):
target = detect_target(allow_cutlass_sm90=allow_sm90, force_cutlass_sm90=force_sm90)
tolerance_limits = _TOLERANCE_LIMITS[dtype]
MDim = shape_utils.gen_int_var_min_max(Ms, name='m')
X = Tensor(shape=[MDim, IntImm(K)], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[IntImm(N), IntImm(K)], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[IntImm(N)], dtype=dtype, name='input_2', is_input=True)
OP = ops.gemm_rcr_bias()
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'gemm_rcr_bias_{test_name}_{self._test_id}')
self._test_id += 1
for M in Ms:
X_pt = get_random_torch_tensor([M, K], dtype)
W_pt = get_random_torch_tensor([N, K], dtype)
B_pt = get_random_torch_tensor([N], dtype)
Y_pt = torch.nn.functional.linear(X_pt, W_pt, bias=B_pt)
y = get_torch_empty_tensor([M, N], dtype)
module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt, 'input_2': B_pt}, [y])
if ((X_pt.nelement() == 0) or (W_pt.nelement() == 0)):
pass
else:
torch.testing.assert_close(Y_pt, y, **tolerance_limits)
def test_rcr_zero_size(self):
target = detect_target()
if (type(target).__name__ != 'FBCUDA'):
self._test_rcr([2], N=64, K=0, test_name='zero_k')
self._test_rcr([2], N=0, K=4, test_name='zero_n')
self._test_rcr([0], N=4, K=4, test_name='zero_m')
def test_rcr_static(self):
self._test_rcr([4096], N=4, K=4, test_name='static')
self._test_rcr([1000], N=81, K=1024, test_name='static')
self._test_rcr([67200], N=3, K=256, test_name='static')
def test_rcr_static_rocm(self):
self._test_rcr([4096], N=4, K=4, test_name='static')
self._test_rcr([1000], N=81, K=1024, test_name='static')
self._test_rcr([67200], N=3, K=256, test_name='static')
def test_rcr_bfloat16_bf16(self):
dtype = 'bfloat16'
self._test_rcr([4], N=2, K=11, test_name=f'static_{dtype}', dtype=dtype)
self._test_rcr([128], N=64, K=1024, test_name=f'static_{dtype}', dtype=dtype)
self._test_rcr([1, 7, 64, 127], N=64, K=1024, test_name=f'dynamic_m_{dtype}', dtype=dtype)
def test_rcr_sm90(self) -> None:
with env_variables(INSIDE_RE_WORKER='1', FORCE_PROFILE='1'):
self._test_rcr(Ms=[128], N=32, K=32, test_name='target_fp16_allow_sm90', dtype='float16', allow_sm90=True)
self._test_rcr(Ms=[128], N=32, K=32, test_name='target_fp16_force_sm90', dtype='float16', force_sm90=True)
with env_variables(AIT_FORCE_CUTLASS_SM90_KERNELS='1', INSIDE_RE_WORKER='1', FORCE_PROFILE='1'):
with self.assertRaisesRegex(expected_exception=RuntimeError, expected_regex='No GEMM op instances are left after filtering'):
self._test_rcr(Ms=[128], N=32, K=28, test_name='wrong_alignment_force_sm90', dtype='float16')
self._test_rcr(Ms=[128], N=32, K=32, test_name='static_fp16_force_sm90', dtype='float16')
self._test_rcr(Ms=[128], N=32, K=32, test_name='static_fp32_force_sm90', dtype='float32')
self._test_rcr(Ms=[128], N=32, K=32, test_name='static_bf16_force_sm90', dtype='bfloat16')
def _test_rrr(self, Ms, N, K, test_name, dtype='float16'):
target = detect_target()
tolerance_limits = _TOLERANCE_LIMITS[dtype]
MDim = shape_utils.gen_int_var_min_max(Ms, name='m')
X = Tensor(shape=[MDim, IntImm(K)], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[IntImm(K), IntImm(N)], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[IntImm(N)], dtype=dtype, name='input_2', is_input=True)
OP = ops.gemm_rrr_bias()
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'gemm_rrr_bias_{test_name}_{self._test_id}')
self._test_id += 1
for M in Ms:
X_pt = get_random_torch_tensor([M, K], dtype)
W_pt = get_random_torch_tensor([N, K], dtype)
B_pt = get_random_torch_tensor([N], dtype)
Y_pt = torch.nn.functional.linear(X_pt, W_pt, bias=B_pt)
W_transpose_pt = torch.transpose(W_pt, 0, 1).contiguous()
y = get_torch_empty_tensor([M, N], dtype)
module.run_with_tensors({'input_0': X_pt, 'input_1': W_transpose_pt, 'input_2': B_pt}, [y])
if ((X_pt.nelement() == 0) or (W_pt.nelement() == 0)):
pass
else:
torch.testing.assert_close(Y_pt, y, **tolerance_limits)
def test_rrr_zero_size(self):
target = detect_target()
if (type(target).__name__ != 'FBCUDA'):
self._test_rrr([2], N=64, K=0, test_name='zero_k')
self._test_rrr([2], N=0, K=4, test_name='zero_n')
self._test_rrr([0], N=4, K=4, test_name='zero_m')
def test_rrr_static(self):
self._test_rrr([4096], N=4, K=4, test_name='static')
self._test_rrr([1000], N=81, K=1024, test_name='static')
self._test_rrr([67200], N=3, K=256, test_name='static')
def test_rrr_static_rocm(self):
self._test_rrr([4096], N=4, K=4, test_name='static')
self._test_rrr([1000], N=81, K=1024, test_name='static')
self._test_rrr([67200], N=3, K=256, test_name='static')
def test_rrr_bfloat16_bf16(self):
dtype = 'bfloat16'
self._test_rrr([4], N=2, K=11, test_name=f'static_{dtype}', dtype=dtype)
self._test_rrr([128], N=64, K=1024, test_name=f'static_{dtype}', dtype=dtype)
self._test_rrr([1, 7, 64, 127], N=64, K=1024, test_name=f'dynamic_m_{dtype}', dtype=dtype) |
class ECoachingScores(object):
swagger_types = {'category': 'str', 'score': 'float'}
attribute_map = {'category': 'category', 'score': 'score'}
def __init__(self, category=None, score=None):
self._category = None
self._score = None
self.discriminator = None
if (category is not None):
self.category = category
if (score is not None):
self.score = score
def category(self):
return self._category
def category(self, category):
allowed_values = ['Global', 'Acceleration', 'Break', 'AirCondioner', 'ColdEngine', 'TirePressure', 'Slope', 'Speed', 'StartStop']
if (category not in allowed_values):
raise ValueError('Invalid value for `category` ({0}), must be one of {1}'.format(category, allowed_values))
self._category = category
def score(self):
return self._score
def score(self, score):
if ((score is not None) and (score > 10)):
raise ValueError('Invalid value for `score`, must be a value less than or equal to `10`')
if ((score is not None) and (score < 0)):
raise ValueError('Invalid value for `score`, must be a value greater than or equal to `0`')
self._score = score
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(ECoachingScores, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, ECoachingScores)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class TestIPAddressField(FieldValues):
valid_inputs = {'127.0.0.1': '127.0.0.1', '192.168.33.255': '192.168.33.255', '2001:0db8:85a3:0042:1000:8a2e:0370:7334': '2001:db8:85a3:42:1000:8a2e:370:7334', '2001:cdba:0:0:0:0:3257:9652': '2001:cdba::3257:9652', '2001:cdba::3257:9652': '2001:cdba::3257:9652'}
invalid_inputs = {'127001': ['Enter a valid IPv4 or IPv6 address.'], '127.': ['Enter a valid IPv4 or IPv6 address.'], '2001:::9652': ['Enter a valid IPv4 or IPv6 address.'], '2001:0db8:85a3:0042:1000:8a2e:0370:73341': ['Enter a valid IPv4 or IPv6 address.'], 1000: ['Enter a valid IPv4 or IPv6 address.']}
outputs = {}
field = serializers.IPAddressField() |
_frequency(timedelta(days=2))
def fetch_wind_solar_forecasts(zone_key: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
if (not session):
session = Session()
domain = ENTSOE_DOMAIN_MAPPINGS[zone_key]
try:
raw_renewable_forecast = query_wind_solar_production_forecast(domain, session, target_datetime=target_datetime)
except Exception as e:
raise ParserException(parser='ENTSOE.py', message=f'Failed to fetch renewable forecast for {zone_key}', zone_key=zone_key) from e
if (raw_renewable_forecast is None):
raise ParserException(parser='ENTSOE.py', message=f'No production per mode forecast data found for {zone_key}', zone_key=zone_key)
parsed = parse_production(raw_renewable_forecast, logger, zone_key, forecasted=True)
return parsed.to_list() |
class OptionPlotoptionsPieSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_extend_appropriately_extends_memory(memory):
memory.extend(start_position=0, size=10)
assert (memory._bytes == bytearray(32))
memory.extend(start_position=30, size=32)
assert (memory._bytes == bytearray(64))
memory.extend(start_position=48, size=10)
assert (memory._bytes == bytearray(64)) |
def missing_widevine_libs():
if (system_os() != 'Linux'):
return None
if cmd_exists('ldd'):
widevinecdm = widevinecdm_path()
if (not os.access(widevinecdm, os.X_OK)):
log(0, 'Changing {path} permissions to 744.', path=widevinecdm)
os.chmod(widevinecdm, 484)
missing_libs = []
cmd = ['ldd', widevinecdm]
output = run_cmd(cmd, sudo=False)
if output['success']:
for line in output['output'].splitlines():
if ('=>' not in str(line)):
continue
lib_path = str(line).strip().split('=>')
lib = lib_path[0].strip()
path = lib_path[1].strip()
if (path == 'not found'):
missing_libs.append(lib)
if missing_libs:
log(4, 'Widevine is missing the following libraries: {libs}', libs=missing_libs)
return missing_libs
log(0, 'There are no missing Widevine libraries! :-)')
return None
log(4, 'Failed to check for missing Widevine libraries.')
return None |
_events
class EventObject(DefaultObject):
_events = {'drop': (['character', 'obj'], OBJECT_DROP), 'get': (['character', 'obj'], OBJECT_GET), 'time': (['object'], OBJECT_TIME, None, time_event)}
_property
def callbacks(self):
return CallbackHandler(self)
def at_get(self, getter, **kwargs):
super().at_get(getter, **kwargs)
self.callbacks.call('get', getter, self)
def at_drop(self, dropper, **kwargs):
super().at_drop(dropper, **kwargs)
self.callbacks.call('drop', dropper, self) |
class BSLNTest(unittest.TestCase):
def setUpClass(cls):
cls.maxDiff = None
cls.font = FakeFont((['.notdef'] + [g for g in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']))
def decompileToXML(self, data, xml):
table = newTable('bsln')
table.decompile(data, self.font)
self.assertEqual(getXML(table.toXML), xml)
def compileFromXML(self, xml, data):
table = newTable('bsln')
for (name, attrs, content) in parseXML(xml):
table.fromXML(name, attrs, content, font=self.font)
self.assertEqual(hexStr(table.compile(self.font)), hexStr(data))
def testFormat0(self):
self.decompileToXML(BSLN_FORMAT_0_DATA, BSLN_FORMAT_0_XML)
self.compileFromXML(BSLN_FORMAT_0_XML, BSLN_FORMAT_0_DATA)
def testFormat1(self):
self.decompileToXML(BSLN_FORMAT_1_DATA, BSLN_FORMAT_1_XML)
self.compileFromXML(BSLN_FORMAT_1_XML, BSLN_FORMAT_1_DATA)
def testFormat2(self):
self.decompileToXML(BSLN_FORMAT_2_DATA, BSLN_FORMAT_2_XML)
self.compileFromXML(BSLN_FORMAT_2_XML, BSLN_FORMAT_2_DATA)
def testFormat3(self):
self.decompileToXML(BSLN_FORMAT_3_DATA, BSLN_FORMAT_3_XML)
self.compileFromXML(BSLN_FORMAT_3_XML, BSLN_FORMAT_3_DATA) |
def parse_cmdline(cmdline_args):
optparser = PssOptionParser(usage='usage: %prog [options] <pattern> [files]', description=DESCRIPTION, prog='pss', version=('pss %s' % __version__))
optparser.add_option('--help-types', action='store_true', dest='help_types', help='Display supported file types')
optparser.add_option('--show-type-list', action='store_true', dest='show_type_list', help=optparse.SUPPRESS_HELP)
group_searching = optparse.OptionGroup(optparser, 'Searching')
group_searching.add_option('-i', '--ignore-case', action='store_true', dest='ignore_case', default=False, help='Ignore case distinctions in the pattern')
group_searching.add_option('--smart-case', action='store_true', dest='smart_case', default=False, help='Ignore case distinctions in the pattern, only if the pattern contains no upper case. Ignored if -i is specified')
group_searching.add_option('-v', '--invert-match', action='store_true', dest='invert_match', default=False, help='Invert match: show non-matching lines')
group_searching.add_option('-w', '--word-regexp', action='store_true', dest='word_regexp', default=False, help='Force the pattern to match only whole words')
group_searching.add_option('-Q', '--literal', action='store_true', dest='literal', default=False, help='Quote all metacharacters; the pattern is literal')
group_searching.add_option('-U', '--universal-newlines', action='store_true', dest='universal_newlines', default=False, help='Use PEP 278 universal newline support when opening files')
optparser.add_option_group(group_searching)
group_output = optparse.OptionGroup(optparser, 'Search output')
group_output.add_option('--match', action='store', dest='match', metavar='PATTERN', help='Specify the search pattern explicitly')
group_output.add_option('-m', '--max-count', action='store', dest='max_count', metavar='NUM', default=sys.maxsize, type='int', help='Stop searching in each file after NUM matches')
group_output.add_option('--with-filename', action='store_true', dest='prefix_filename', default=True, help=' '.join('Print the filename before matches (default). If\n --noheading is specified, the filename will be prepended to each\n matching line. Otherwise it is printed once for all the matches\n in the file.'.split()))
group_output.add_option('--no-filename', action='store_false', dest='prefix_filename', help='Suppress printing the filename before matches')
group_output.add_option('--line', action='store_true', dest='show_line', default=True, help='Print the line number before matches (default)')
group_output.add_option('--noline', action='store_false', dest='show_line', help='Suppress printing the line number before matches')
group_output.add_option('--column', action='store_true', dest='show_column', help='Show the column number of the first match')
group_output.add_option('--nocolumn', action='store_false', dest='show_column', help='Suppress showing the column number of the first match (default)')
group_output.add_option('-A', '--after-context', action='store', dest='after_context', metavar='NUM', default=0, type='int', help='Print NUM lines of context after each match')
group_output.add_option('-B', '--before-context', action='store', dest='before_context', metavar='NUM', default=0, type='int', help='Print NUM lines of context before each match')
group_output.add_option('-C', '--context', action='store', dest='context', metavar='NUM', type='int', help='Print NUM lines of context before and after each match')
group_output.add_option('--color', action='store_true', dest='do_colors', default=sys.stdout.isatty(), help='Highlight the matching text')
group_output.add_option('--nocolor', action='store_false', dest='do_colors', help='Do not highlight the matching text (this is the default when output is redirected)')
group_output.add_option('--color-match', metavar='FORE,BACK,STYLE', action='store', dest='color_match', help='Set the color for matches. Examples: "RED", "BLUE,WHITE"')
group_output.add_option('--color-filename', metavar='FORE,BACK,STYLE', action='store', dest='color_filename', help='Set the color for emitted filenames')
group_output.add_option('--color-lineno', metavar='FORE,BACK,STYLE', action='store', dest='color_lineno', help='Set the color for line numbers')
group_output.add_option('--nobreak', action='store_false', dest='do_break', default=sys.stdout.isatty(), help='Print no break between results from different files')
group_output.add_option('--noheading', action='store_false', dest='do_heading', default=sys.stdout.isatty(), help="Print no file name heading above each file's results")
optparser.add_option_group(group_output)
group_filefinding = optparse.OptionGroup(optparser, 'File finding')
group_filefinding.add_option('-f', action='store_true', dest='find_files', help='Only print the names of found files. The pattern must not be specified')
group_filefinding.add_option('-g', action='append', dest='find_files_matching_patterns', metavar='REGEX', default=[], help='Same as -f, but only print files matching REGEX')
group_filefinding.add_option('-l', '--files-with-matches', action='store_true', dest='find_files_with_matches', help='Only print the names of found files that have matches for the pattern')
group_filefinding.add_option('-L', '--files-without-matches', action='store_true', dest='find_files_without_matches', help='Only print the names of found files that have no matches for the pattern')
optparser.add_option_group(group_filefinding)
group_inclusion = optparse.OptionGroup(optparser, 'File inclusion/exclusion')
group_inclusion.add_option('-a', '--all-types', action='store_true', dest='all_types', help='All file types are searched')
group_inclusion.add_option('-u', '--unrestricted', action='store_true', dest='unrestricted', help='All files are searched, including those in ignored directories')
group_inclusion.add_option('--ignore-dir', action='append', dest='ignored_dirs', metavar='name', help='Add directory (or several comma-separated directories) to the list of ignored dirs')
group_inclusion.add_option('--noignore-dir', action='append', dest='noignored_dirs', metavar='name', help='Remove directory (or several comma-separated directories) from the list of ignored dirs')
group_inclusion.add_option('-r', '-R', '--recurse', action='store_true', dest='recurse', default=True, help='Recurse into subdirectories (default)')
group_inclusion.add_option('-n', '--no-recurse', action='store_false', dest='recurse', help='Do not recurse into subdirectories')
group_inclusion.add_option('-t', '--textonly', '--nobinary', action='store_true', dest='textonly', default=False, help='Restrict the search to only textual files.\n Warning: with this option the search is likely to run much slower')
group_inclusion.add_option('-G', '--include-pattern', action='append', dest='include_patterns', metavar='REGEX', default=[], help='Only search files that match REGEX')
group_inclusion.add_option('--exclude-pattern', action='append', dest='exclude_patterns', metavar='REGEX', default=[], help='Exclude files that match REGEX')
optparser.add_option_group(group_inclusion)
def type_option_callback(option, opt_str, value, parser):
optname = opt_str.lstrip('-')
if hasattr(parser.values, 'typelist'):
parser.values.typelist.append(optname)
else:
parser.values.typelist = [optname]
for t in TYPE_MAP:
optparser.add_option(('--' + t), help=optparse.SUPPRESS_HELP, action='callback', callback=type_option_callback)
optparser.add_option(('--no' + t), help=optparse.SUPPRESS_HELP, action='callback', callback=type_option_callback)
(options, args) = optparser.parse_args(cmdline_args)
return (options, args, optparser) |
def test_s2t(root_path=ROOT_PATH):
args_path = Path.joinpath(root_path, 'examples', 'speech_to_text')
os.chdir(args_path)
with tempfile.TemporaryDirectory() as tmpdirname:
cli.sys.argv[1:] = ['--agent', os.path.join(root_path, 'examples', 'speech_to_text', 'counter_in_tgt_lang_agent.py'), '--user-dir', os.path.join(root_path, 'examples'), '--agent-class', 'agents.EnglishSpeechCounter', '--source-segment-size', '1000', '--source', os.path.join(root_path, 'examples', 'speech_to_text', 'source.txt'), '--target', os.path.join(root_path, 'examples', 'speech_to_text', 'reference/en.txt'), '--output', tmpdirname, '--tgt-lang', os.path.join(root_path, 'examples', 'speech_to_text', 'reference/tgt_lang.txt')]
cli.main()
with open(os.path.join(tmpdirname, 'instances.log'), 'r') as f:
for line in f:
instance = LogInstance(line.strip())
assert (instance.prediction == '1 segundos 2 segundos 3 segundos 4 segundos 5 segundos 6 segundos 7 segundos') |
class MessageQueue():
def __init__(self, log_time_order: bool, reverse: bool=False):
self._q: List[_Orderable] = []
self._log_time_order = log_time_order
self._reverse = reverse
def push(self, item: QueueItem):
if isinstance(item, ChunkIndex):
orderable = _ChunkIndexWrapper(item, self._reverse)
else:
orderable = _MessageTupleWrapper(item, self._reverse)
if self._log_time_order:
heapq.heappush(self._q, orderable)
else:
self._q.append(orderable)
def pop(self) -> QueueItem:
if self._log_time_order:
return heapq.heappop(self._q).item
else:
return self._q.pop(0).item
def __len__(self) -> int:
return len(self._q) |
class ValidateAndStoreEnodes(argparse.Action):
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, value: Any, option_string: str=None) -> None:
if (value is None):
return
validate_enode_uri(value)
enode = Node.from_uri(value)
if (getattr(namespace, self.dest) is None):
setattr(namespace, self.dest, [])
enode_list = getattr(namespace, self.dest)
enode_list.append(enode) |
class OptionPlotoptionsTimelineSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def upload_file_to_s3(local_file, s3_file):
s3 = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
(bucket, s3_key) = parse_s3_uri(s3_file)
try:
s3.upload_file(local_file, bucket, s3_key)
return True
except FileNotFoundError:
print(f"{colorstr('aws:')} S3 upload failed because local file not found: {local_file}")
return False
except NoCredentialsError:
print(f"{colorstr('aws:')} AWS credentials are not set. Please configure aws via CLI or set required ENV variables.")
return False |
class bmm_softmax_bmm_permute(bmm):
def __init__(self, shape: Tuple[int], scale=1.0, causal=False, layout='0213'):
super().__init__()
causal_mask = ('_causal' if causal else '')
self._attrs['op'] = ('bmm_softmax_bmm_permute' + causal_mask)
self._attrs['scale'] = scale
self._attrs['shape'] = shape
self._attrs['layout'] = 'Permute4DBMM_{}'.format(layout)
def cal_align_ab(m, n, k):
return common.default_align_ab(k, k, self._attrs['inputs'][0].dtype())
self._attrs['f_ab_alignment'] = cal_align_ab
def _infer_shapes(self, a: Tensor, b: Tensor, b1: Tensor):
a_shapes = a._attrs['shape']
b_shapes = b._attrs['shape']
b1_shapes = b1._attrs['shape']
batch_size_a = a_shapes[0]
batch_size_b = b_shapes[0]
if ((batch_size_a != batch_size_b) and (batch_size_a != 1) and (batch_size_b != 1)):
raise RuntimeError('bmm_rcr operand A and B should have same batch_size, or batch_size = 1! Current shape A: {} shape B: {} .'.format(a_shapes, b_shapes))
batch_size = (batch_size_b if (batch_size_a == IntImm(1)) else batch_size_a)
assert (a_shapes[2] == b_shapes[2]), f'bmm_rcr operand A and B should have the same K dim (dim2)! Current shape A: {a_shapes}, shape B: {b_shapes}'
return [batch_size, a_shapes[1], b1_shapes[2]]
def _extract_dims(self, for_profiling=False):
return {'B': [common.DimInfo(common.Source.INPUT, tensor_idx=0, dim_idx=0), common.DimInfo(common.Source.INPUT, tensor_idx=1, dim_idx=0), common.DimInfo(common.Source.OUTPUT, tensor_idx=0, dim_idx=0)], 'M': [common.DimInfo(common.Source.INPUT, tensor_idx=0, dim_idx=1), common.DimInfo(common.Source.OUTPUT, tensor_idx=0, dim_idx=1)], 'N': [common.DimInfo(common.Source.INPUT, tensor_idx=1, dim_idx=1)], 'K': [common.DimInfo(common.Source.INPUT, tensor_idx=0, dim_idx=2), common.DimInfo(common.Source.INPUT, tensor_idx=1, dim_idx=2)], 'O': [common.DimInfo(common.Source.OUTPUT, tensor_idx=0, dim_idx=2)]}
def _invert_exec_key(self, key):
return common.gemm_inverse_key_func(key)
def _gen_profile_cmd(self, profiler_prefix, cfg, exec_key):
def fbuild_cmd(exec_key):
(B, M, N, K, C) = self._invert_exec_key(exec_key)
cmd = []
cmd.append(B)
cmd.append(M)
cmd.append(N)
cmd.append(K)
cmd.append(C)
return cmd
return super()._gen_profile_cmd(profiler_prefix, cfg, exec_key, fbuild_cmd)
def __call__(self, a: Tensor, b: Tensor, b1: Tensor) -> Tensor:
(a, b) = self._align_ab(a, b)
self._attrs['inputs'] = [a, b, b1]
self._attrs['input_accessors'] = [TensorAccessor(a), TensorAccessor(b), TensorAccessor(b1)]
self._set_depth()
self._sanity_check(a, b)
output_shape = self._infer_shapes(a, b, b1)
output = Tensor(output_shape, src_ops={self}, dtype=a.dtype())
self._attrs['outputs'] = [output]
self._attrs['output_accessors'] = [TensorAccessor(output)]
if (self._attrs['layout'] == 'Permute4DBMM_0213'):
(b, m, o) = output_shape
d1 = self._attrs['shape'][0]
output_shape = [(b.value() // d1), m, d1, o]
self._extract_epilogue_alignment(output_shape)
return reshape()(output, output_shape)
else:
raise NotImplementedError('{} is not implemented!'.format(self._attrs['layout']))
return output
def _get_op_attributes(self):
return {'causal': (self._attrs['op'] == 'bmm_softmax_bmm_permute_causal'), 'layout': self._attrs['layout'].split('_')[(- 1)], 'scale': self._attrs['scale'], 'shape': self._attrs['shape']} |
def column_summary_metric_success():
m = ColumnSummaryMetric(column_name='target')
return TestMetric('column_summary_metric_success', m, outcomes={TestDataset(current=pd.DataFrame({'target': [1, 'ff', 3], 'prediction': ['a', 'b', 'c']}), reference=None, column_mapping=ColumnMapping()): AssertExpectedResult(metric=m, result=ColumnSummaryResult(column_name='target', column_type='cat', reference_characteristics=None, current_characteristics=CategoricalCharacteristics(number_of_rows=3, count=3, unique=3, unique_percentage=100.0, most_common=1, most_common_percentage=33.33, missing=0, missing_percentage=0.0, new_in_current_values_count=None, unused_in_current_values_count=None), plot_data=DataQualityPlot(bins_for_hist=Histogram(current=HistogramData.from_df(pd.DataFrame(dict(x=['1', 'ff', '3'], count=[1, 1, 1])))), data_in_time=None, data_by_target=None, counts_of_values={'current': pd.DataFrame(dict(x=[1, 'ff', 3], count=[1, 1, 1]))}))), TestDataset(current=pd.DataFrame({'col': [1, 2, 1, 2, 1]}), reference=None): Error(ValueError, "Column 'target' not found in dataset."), TestDataset(current=pd.DataFrame({'col2': [1, 2, 1, 2, 1]}), reference=pd.DataFrame({'col': [1, 2, 1, 2, 1]})): Error(ValueError, "Column 'target' not found in dataset.")}) |
class calibration():
def __init__(self, connection):
self.connection = connection
def set_connection(self, connection):
self.connection = connection
def start(self, pointcount=9, max_attempts=5):
for attempt in range(max_attempts):
response = self.connection.request('calibration', 'start', {'pointcount': pointcount})
if (response['statuscode'] == 200):
return True
self.abort()
raise Exception('Error in calibration.start: {} (code {})'.format(response['values']['statusmessage'], response['statuscode']))
def pointstart(self, x, y):
response = self.connection.request('calibration', 'pointstart', {'x': x, 'y': y})
if (response['statuscode'] == 200):
return True
else:
raise Exception('Error in calibration.pointstart: {} (code {})'.format(response['values']['statusmessage'], response['statuscode']))
def pointend(self):
response = self.connection.request('calibration', 'pointend', None)
if (response['statuscode'] != 200):
raise Exception('Error in calibration.pointend: {} (code {})'.format(response['values']['statusmessage'], response['statuscode']))
if (('values' not in response.keys()) or ('calibresult' not in response['values'].keys())):
return True
else:
returndict = {'result': response['values']['calibresult']['result'], 'deg': response['values']['calibresult']['deg'], 'Rdeg': response['values']['calibresult']['degl'], 'Ldeg': response['values']['calibresult']['degr'], 'calibpoints': []}
for pointdict in response['values']['calibresult']['calibpoints']:
returndict['calibpoints'].append({'state': pointdict['state'], 'cpx': pointdict['cp']['x'], 'cpy': pointdict['cp']['y'], 'mecpx': pointdict['mecp']['x'], 'mecpy': pointdict['mecp']['y'], 'acd': pointdict['acd']['ad'], 'Lacd': pointdict['acd']['adl'], 'Racd': pointdict['acd']['adr'], 'mepix': pointdict['mepix']['mep'], 'Lmepix': pointdict['mepix']['mepl'], 'Rmepix': pointdict['mepix']['mepr'], 'asdp': pointdict['asdp']['asd'], 'Lasdp': pointdict['asdp']['asdl'], 'Rasdp': pointdict['asdp']['asdr']})
return returndict
def abort(self):
response = self.connection.request('calibration', 'abort', None)
if (response['statuscode'] == 200):
return True
else:
raise Exception('Error in calibration.abort: {} (code {})'.format(response['values']['statusmessage'], response['statuscode']))
def clear(self):
response = self.connection.request('calibration', 'clear', None)
if (response['statuscode'] == 200):
return True
else:
raise Exception('Error in calibration.clear: {} (code {})'.format(response['values']['statusmessage'], response['statuscode'])) |
class CallingMessageRecord(object):
__slots__ = ('time', 'indent', 'handler', 'source')
def __init__(self, time, indent, handler, source):
self.time = time
self.indent = indent
self.handler = handler
self.source = source
def __str__(self):
gap = ((self.indent * 2) + SPACES_TO_ALIGN_WITH_CHANGE_MESSAGE)
return CALLINGMSG.format(time=self.time, action='CALLING', handler=self.handler, source=self.source, gap=gap) |
class ArgparseMain(DecoratedMain):
def __init__(self, main: MainFun, dora: DoraConfig, parser: argparse.ArgumentParser, slurm: tp.Optional[SlurmConfig]=None, use_underscore: bool=True):
super().__init__(main, dora)
self.parser = parser
self.use_underscore = use_underscore
self.slurm = slurm
def get_xp(self, argv: tp.Sequence[str]) -> XP:
argv = list(argv)
args = self.parser.parse_args(argv)
delta = []
for (key, value) in args.__dict__.items():
if (self.parser.get_default(key) != value):
delta.append((key, value))
xp = XP(dora=self.dora, cfg=args, argv=argv, delta=delta)
return xp
def value_to_argv(self, arg: tp.Any) -> tp.List[str]:
argv = []
if isinstance(arg, str):
argv.append(arg)
elif isinstance(arg, dict):
for (key, value) in arg.items():
if (not self.use_underscore):
key = key.replace('_', '-')
if (value is True):
argv.append(f'--{key}')
else:
argv.append(f'--{key}={value}')
elif isinstance(arg, (list, tuple)):
for part in arg:
argv += self.value_to_argv(part)
else:
raise ValueError(f'Can only process dict, tuple, lists and str, but got {arg}')
return argv
def get_name_parts(self, xp: XP) -> OrderedDict:
parts = OrderedDict()
assert (xp.delta is not None)
for (name, value) in xp.delta:
parts[name] = value
return parts
def get_slurm_config(self) -> SlurmConfig:
if (self.slurm is not None):
return self.slurm
return super().get_slurm_config() |
class Experiment(A2CGAE):
def __init__(self, config, create_env, create_agent):
super().__init__(config, create_env, create_agent)
def _create_model(self):
module = GRUAgentModel(self.obs_dim, self.n_actions, self.config['model/hidden_size'])
module.apply(weight_init)
return module |
def test_tracestate_adding_valid():
header = '00-0af7651916cd43dd8448eb211c80319c-b7ad6b-03'
state_header = 'es=foo:bar;baz:qux,othervendor=<opaque>'
trace_parent = TraceParent.from_string(header, tracestate_string=state_header)
trace_parent.add_tracestate('x', 'y')
assert (trace_parent.tracestate_dict['x'] == 'y')
assert (len(trace_parent.tracestate_dict) == 3)
trace_parent.add_tracestate('x', 1)
assert (trace_parent.tracestate_dict['x'] == '1') |
def cancel_order(payload, request_id=None):
frappe.set_user('Administrator')
frappe.flags.request_id = request_id
order = payload
try:
order_id = order['id']
order_status = order['financial_status']
sales_order = get_sales_order(order_id)
if (not sales_order):
create_shopify_log(status='Invalid', message='Sales Order does not exist')
return
sales_invoice = frappe.db.get_value('Sales Invoice', filters={ORDER_ID_FIELD: order_id})
delivery_notes = frappe.db.get_list('Delivery Note', filters={ORDER_ID_FIELD: order_id})
if sales_invoice:
frappe.db.set_value('Sales Invoice', sales_invoice, ORDER_STATUS_FIELD, order_status)
for dn in delivery_notes:
frappe.db.set_value('Delivery Note', dn.name, ORDER_STATUS_FIELD, order_status)
if ((not sales_invoice) and (not delivery_notes) and (sales_order.docstatus == 1)):
sales_order.cancel()
else:
frappe.db.set_value('Sales Order', sales_order.name, ORDER_STATUS_FIELD, order_status)
except Exception as e:
create_shopify_log(status='Error', exception=e)
else:
create_shopify_log(status='Success') |
class ModelForm(BaseForm):
def __init__(self, model: Type[Model], record: Optional[Row]=None, record_id: Any=None, fields: Union[(Dict[(str, List[str])], List[str])]=None, exclude_fields: List[str]=[], csrf: Union[(str, bool)]='auto', formstyle: Optional[Type[FormStyle]]=None, keepvalues: bool=False, onvalidation: Optional[Callable[([Form], None)]]=None, submit: str='Submit', upload: Optional[str]=None, _action: str='', _enctype: str='multipart/form-data', _method: str='POST', **attributes):
self.model = model._instance_()
self.table: Table = self.model.table
self.record = (record or (self.model.get(record_id) if record_id else self.model.new()))
fields_list_all = []
fields_list_writable = []
if (fields is not None):
if (not isinstance(fields, dict)):
fields = {'writable': fields, 'readable': fields}
for field in self.table:
if (field.name not in fields['readable']):
continue
fields_list_all.append(field)
if (field.name in fields['writable']):
fields_list_writable.append(field)
else:
for field in self.table:
if (field.name in exclude_fields):
continue
if (not field.readable):
continue
if ((not self.record) and (not field.writable)):
continue
fields_list_all.append(field)
if field.writable:
fields_list_writable.append(field)
super().__init__(fields=fields_list_all, writable_fields=fields_list_writable, csrf=csrf, id_prefix=(self.table._tablename + '_'), formstyle=formstyle, keepvalues=keepvalues, onvalidation=onvalidation, submit=submit, upload=upload, _action=_action, _enctype=_enctype, _method=_method)
def _get_id_value(self):
if (len(self.model._fieldset_pk) > 1):
return tuple((self.record[pk] for pk in self.model.primary_keys))
return self.record[self.table._id.name]
def _validate_input(self):
(record, fields) = (self.record.clone(), {field.name: self._get_input_val(field) for field in self.writable_fields})
for field in filter((lambda f: (f.type == 'upload')), self.writable_fields):
val = fields[field.name]
if (((val == b'') or (val is None)) and (not self.input_params.get((field.name + '__del'), False)) and self.record[field.name]):
fields.pop(field.name)
record.update(fields)
errors = record.validation_errors
for field in self.writable_fields:
if (field.name in errors):
self.errors[field.name] = errors[field.name]
elif (field.type == 'upload'):
self.files[field.name] = fields[field.name]
else:
self.params[field.name] = fields[field.name]
async def _process(self, **kwargs):
current._dbvalidation_record_id_ = None
if self.record._concrete:
current._dbvalidation_record_id_ = self._get_id_value()
(await super()._process(write_defaults=False))
if self.accepted:
for field in filter((lambda f: (f.type == 'upload')), self.writable_fields):
upload = self.files[field.name]
del_field = (field.name + '__del')
if (not upload.filename):
if self.input_params.get(del_field, False):
self.params[field.name] = (self.table[field.name].default or '')
elif (self.record._concrete and self.record[field.name]):
self.params[field.name] = self.record[field.name]
continue
else:
(source_file, original_filename) = (upload.stream, upload.filename)
newfilename = field.store(source_file, original_filename, field.uploadfolder)
if isinstance(field.uploadfield, str):
self.params[field.uploadfield] = source_file.read()
self.params[field.name] = newfilename
self.record.update(self.params)
if self.record.save():
self.params.id = self._get_id_value()
del current._dbvalidation_record_id_
if ((not self.processed) or (self.accepted and (not self.keepvalues))):
for field in self.fields:
self.input_params[field.name] = field.formatter(self.record[field.name])
elif (self.processed and (not self.accepted) and self.record._concrete):
for field in self.writable_fields:
if ((field.type == 'upload') and (field.name not in self.params)):
self.input_params[field.name] = field.formatter(self.record[field.name])
return self |
def test_encodings():
enc = Encodings()
assert (len(enc.data) == 0)
assert (enc.max == 2)
enc.set('color', 'test')
assert (len(enc.data) == 1)
assert (len(enc.visual) == 1)
assert (enc.data[enc.visual['color'].data].component == 2)
enc.set('opacity', 'test')
assert (len(enc.data) == 1)
assert (len(enc.visual) == 2)
assert (enc.data[enc.visual['opacity'].data].component == 2)
enc.set('size', 'test2')
assert (len(enc.data) == 2)
assert (len(enc.visual) == 3)
assert (enc.data[enc.visual['size'].data].component == 3)
enc.set('opacity', 'test2')
assert (len(enc.data) == 2)
assert (len(enc.visual) == 3)
assert (enc.data[enc.visual['opacity'].data].component == 3)
try:
enc.set('opacity', 'test3')
except AssertionError:
pass
x = reduce((lambda acc, i: (acc + int(enc.components._components[i].used))), enc.components._components, 0)
print(x)
print([enc.components._components[c].used for c in enc.components._components])
print(enc.components.size)
print(enc.components.full)
assert (len(enc.data) == 2)
assert (len(enc.visual) == 3)
assert (enc.visual['opacity'].data == 'test2')
assert (enc.data[enc.visual['opacity'].data].component == 3)
color_component = enc.get('color').component
assert (color_component == 2)
enc.delete('color')
enc.set('opacity', 'test3')
assert (len(enc.data) == 2)
assert (len(enc.visual) == 2)
assert (enc.visual['opacity'].data == 'test3')
assert (enc.data[enc.visual['opacity'].data].component == color_component)
enc.set('opacity', 'test4')
assert (len(enc.data) == 2)
assert (len(enc.visual) == 2)
assert (enc.visual['opacity'].data == 'test4') |
def test():
assert (len(pattern) == 2), 'Das Pattern sollte zwei Tokens beschreiben (zwei Dictionaries).'
assert (isinstance(pattern[0], dict) and isinstance(pattern[1], dict)), 'Jeder Eintrag im Pattern sollte ein Dictionary sein.'
assert ((len(pattern[0]) == 1) and (len(pattern[1]) == 1)), 'Jeder Eintrag im Pattern sollte nur einen Schlussel haben.'
assert any(((pattern[0].get(key) == 'iOS') for key in ['text', 'TEXT'])), 'Suchst du nach dem Text des ersten Tokens?'
assert any(((pattern[1].get(key) == True) for key in ['is_digit', 'IS_DIGIT'])), 'Suchst du nach dem Attribut is_digit des zweiten Tokens?'
__msg__.good('Sehr gut!') |
def get_dc_nodes(request, sr=('dc', 'node'), prefetch_vms_count=False, prefetch_dc=False, order_by=('node__hostname',)):
qs = DcNode.objects.select_related(*sr).filter(dc=request.dc).order_by(*order_by)
if prefetch_dc:
qs = qs.prefetch_related('node__dc')
if prefetch_vms_count:
vm_count = dict(Vm.objects.filter(dc=request.dc).values_list('node').annotate(Count('uuid')).order_by())
real_vm_count = dict(Vm.objects.filter(dc=request.dc, slavevm__isnull=True).values_list('node').annotate(Count('uuid')).order_by())
for dc_node in qs:
node_uuid = dc_node.node.uuid
dc_node.vms = vm_count.get(node_uuid, 0)
dc_node.real_vms = real_vm_count.get(node_uuid, 0)
return qs |
class _Visitor(ast.NodeVisitor):
path: List[str]
_items: dict
def __init__(self) -> None:
self.path = []
self._items = {}
def generic_visit(self, node: ast.AST) -> None:
sys.exit(f'No visit function defined for {node}. Please implement one.')
def _insert(self, name: str, node: ast.AST) -> None:
item = '.'.join((self.path + [name]))
if (item in self._items):
raise ValueError(f'duplicate path {item}')
self._items[item] = node
def items(self) -> Dict:
return self._items
def visit_Module(self, module: ast.Module) -> None:
for item in module.__dict__['body']:
self.visit(item)
def visit_Import(self, import_: ast.Import) -> None:
pass
def visit_ImportFrom(self, import_from: ast.ImportFrom) -> None:
pass
def visit_Expr(self, expr: ast.Expr) -> None:
if (isinstance(expr.value, ast.Constant) and isinstance(expr.value.value, str)):
return
print(f'The expression {type(expr)} has been ignored.')
def visit_AsyncFunctionDef(self, function: ast.AsyncFunctionDef) -> None:
self._insert(function.name, function)
def visit_FunctionDef(self, function: ast.FunctionDef) -> None:
self._insert(function.name, function)
def visit_ClassDef(self, klass: ast.ClassDef) -> None:
self._insert(klass.name, klass)
def visit_Assign(self, assign: ast.Assign) -> None:
if isinstance(assign.targets[0], ast.Name):
self._insert(assign.targets[0].id, assign)
else:
print(f'Assign node with target of type {type(assign.targets[0])} has been ignored.')
def visit_AnnAssign(self, assign: ast.AnnAssign) -> None:
if isinstance(assign.target, ast.Name):
self._insert(assign.target.id, assign)
else:
print(f'AnnAssign node with target of type {type(assign.target)} has been ignored.') |
def _ingest_error_body(test_target, parse_results, file_name, line_number):
key = (parse_results.stub_service, parse_results.stub_action)
stub_config = test_target.setdefault('stubbed_actions', {}).setdefault(key, {})
if ('body' in stub_config):
raise FixtureSyntaxError('Cannot combine stub action body and errors (must choose one) for {}.{}'.format(*key), file_name, line_number)
field = parse_results.field_name
if ((not field) or (not field.strip()) or (field.strip().lower() == 'none')):
field = None
message = parse_results.error_message
if ((not message) or (not message.strip()) or (message.strip().lower() == 'none')):
message = None
stub_config.setdefault('errors', []).append({'code': parse_results.error_code, 'message': message, 'field': field}) |
.skipif((has_openai_key is False), reason='OpenAI API key not available')
.external
def test_model_error_handling():
nlp = spacy.blank('en')
with pytest.raises(ValueError, match="Could not find function 'spacy.gpt-3.5x.v1'"):
nlp.add_pipe('llm', config={'task': {'_tasks': 'spacy.NoOp.v1'}, 'model': {'_models': 'spacy.gpt-3.5x.v1'}}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.