code stringlengths 281 23.7M |
|---|
class OptionSeriesDependencywheelSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractAsweknewitstoryWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_capture_rising_edges(la):
events = 100
(t1, t2) = la.capture(2, events, modes=['any', 'rising'])
expected = (((FREQUENCY ** (- 1)) * MICROSECONDS) / 2)
result = ((t2 - t1) - (t2 - t1)[0])
assert (np.arange(0, (expected * events), expected) == pytest.approx(result, abs=TWO_CLOCK_CYCLES)) |
class Dispatcher(multiprocessing.Process):
task_type = 'task_type'
worker_manager_class = WorkerManager
max_workers = float('inf')
_previous_task_fetch_ids = set()
def __init__(self, opts):
super().__init__(name=(self.task_type + '-dispatcher'))
self.sleeptime = 0
self.opts = opts
self.log = logging.getLogger()
self.frontend_client = None
self.limits = []
def _update_process_title(cls, msg=None):
proc_title = '{} dispatcher'.format(cls.task_type.capitalize())
if msg:
proc_title += (' - ' + msg)
setproctitle(proc_title)
def get_frontend_tasks(self):
raise NotImplementedError
def get_cancel_requests_ids(self):
_subclass_can_use = self
return []
def report_canceled_task_id(self, task_id, was_running):
def _print_added_jobs(self, tasks):
job_ids = {task.id for task in tasks}
new_job_ids = (job_ids - self._previous_task_fetch_ids)
if new_job_ids:
self.log.info("Got new '%s' tasks: %s", self.task_type, new_job_ids)
self._previous_task_fetch_ids = job_ids
def run(self):
self.log.info('%s dispatching started', self.task_type.capitalize())
self._update_process_title()
redis = get_redis_connection(self.opts)
worker_manager = self.worker_manager_class(redis_connection=redis, log=self.log, max_workers=self.max_workers, frontend_client=self.frontend_client, limits=self.limits)
timeout = self.sleeptime
while True:
self._update_process_title('getting tasks from frontend')
self.log.info('getting %ss from frontend', self.task_type)
start = time.time()
tasks = self.get_frontend_tasks()
if tasks:
worker_manager.clean_tasks()
self._print_added_jobs(tasks)
for task in tasks:
worker_manager.add_task(task)
self._update_process_title('getting cancel requests')
for task_id in self.get_cancel_requests_ids():
was_running = worker_manager.cancel_task_id(task_id)
self.report_canceled_task_id(task_id, was_running)
self.dump_queue(worker_manager)
self._update_process_title('processing tasks')
worker_manager.run(timeout=timeout)
sleep_more = (timeout - (time.time() - start))
if (sleep_more > 0):
time.sleep(sleep_more)
self._update_process_title()
def dump_queue(self, worker_manager):
if (self.log.level > logging.DEBUG):
return
queue = []
for entry in worker_manager.tasks.prio_queue:
if ((not isinstance(entry, list)) or (len(entry) != 3)):
return
(priority, count, task) = entry
queue.append([priority, count, task.id])
name = 'copr-{0}-queue.json'.format(self.name)
path = os.path.join(self.opts.log_dir, name)
with open(path, 'w', encoding='utf8') as fp:
json.dump(queue, fp, indent=4)
self.log.debug('Priority queue dumped to: %s', path) |
class TrackList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(Track)
query_ = event_query(query_, view_kwargs)
return query_
view_kwargs = True
methods = ['GET']
schema = TrackSchema
data_layer = {'session': db.session, 'model': Track, 'methods': {'query': query}} |
def check_program_size(dtype, model):
model_name = 'sizecheck'
features_length = model.estimators_[0].n_features_in_
model_enabled = (0 if (dtype == 'no-model') else 1)
if model_enabled:
c_model = emlearn.convert(model, dtype=dtype)
model_code = c_model.save(name=model_name, inference=['inline'])
else:
model_code = ''
test_program = f'''
#include <stdbool.h>
#include <avr/io.h>
#include <util/delay.h>
#if {model_enabled}
{model_code}
const {dtype} features[{features_length}] = {{0, }};
#endif
int main()
{{
// set PINB0 to output in DDRB
DDRB |= 0b;
#if {model_enabled}
const uint8_t out = {model_name}_predict(features, {features_length});
#else
const uint8_t out = 0;
#endif
// set output
PORTB = out;
_delay_ms(50);
}}
'''
data = get_program_size(test_program, platform='avr')
return pandas.Series(data) |
def load_merged_config(config_file_path: Path, cli_options: dict) -> dict:
default_config_path = get_default_config_file()
default_config = load_config(default_config_path)
user_config = load_config(config_file_path)
if user_config:
logging.info(f'Loading configuration from {config_file_path}')
config = merge_config_sources(user_config=user_config, default_config=default_config, cli_options=cli_options)
return config |
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_form_settings(client, msend):
client.post('/register', data={'email': '', 'password': 'water'})
user = User.query.filter_by(email='').first()
user.plan = Plan.gold
DB.session.add(user)
DB.session.commit()
r = client.post('/api-int/forms', headers={'Accept': 'application/json', 'Content-type': 'application/json', 'Referer': settings.SERVICE_URL}, data=json.dumps({'email': ''}))
resp = json.loads(r.data.decode('utf-8'))
form = Form.query.first()
form.confirmed = True
DB.session.add(form)
DB.session.commit()
form_endpoint = resp['hashid']
msend.reset_mock()
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'disable_email': True}))
assert Form.query.first().disable_email
client.post(('/' + form_endpoint), headers={'Referer': ' data={'name': 'bruce'})
assert (not msend.called)
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'disable_storage': True}))
assert Form.query.first().disable_storage
assert (1 == Submission.query.count())
client.post(('/' + form_endpoint), headers={'Referer': ' data={'name': 'wayne'})
assert (1 == Submission.query.count())
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'disable_email': False}))
assert (not Form.query.first().disable_email)
assert (1 == Submission.query.count())
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'disable_storage': False}))
assert (not Form.query.first().disable_storage)
client.post(('/' + form_endpoint), headers={'Referer': ' data={'name': 'luke'})
assert (2 == Submission.query.filter_by(form_id=form.id).count())
assert (not Form.query.first().captcha_disabled)
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'captcha_disabled': True}))
assert Form.query.first().captcha_disabled
client.patch(('/api-int/forms/' + form_endpoint), headers={'Referer': settings.SERVICE_URL}, content_type='application/json', data=json.dumps({'captcha_disabled': False}))
assert (not Form.query.first().captcha_disabled) |
def generate_bake_mode_previews():
image_location = os.path.join(os.path.dirname(__file__), 'resources/bake_modes_bip')
enum_items = []
for (i, image) in enumerate(os.listdir(image_location)):
mode = image[0:(- 4)]
if (mode in op_bake.modes):
filepath = os.path.join(image_location, image)
thumb = thumbnail_previews.load_safe(filepath, filepath, 'IMAGE')
enum_items.append((image, mode, '', thumb.icon_id, i))
return enum_items |
class Config_():
def defaults_to_merger_of_global_defaults(self):
c = Config()
assert (c.run.warn is False)
assert (c.port == 22)
def our_global_defaults_can_override_invokes(self):
with patch.object(Config, 'global_defaults', return_value={'run': {'warn': 'nope lol'}, 'load_ssh_configs': True, 'ssh_config_path': None}):
assert (Config().run.warn == 'nope lol')
def has_various_Fabric_specific_default_keys(self):
c = Config()
assert (c.port == 22)
assert (c.user == get_local_user())
assert (c.forward_agent is False)
assert (c.connect_kwargs == {})
assert (c.timeouts.connect is None)
assert (c.ssh_config_path is None)
assert (c.inline_ssh_env is True)
def overrides_some_Invoke_defaults(self):
config = Config()
assert (config.tasks.collection_name == 'fabfile')
def amends_Invoke_runners_map(self):
config = Config()
assert (config.runners == dict(remote=Remote, remote_shell=RemoteShell, local=Local))
def uses_Fabric_prefix(self):
assert (Config().prefix == 'fabric')
class from_v1():
def setup(self):
self.env = faux_v1_env()
def _conf(self, **kwargs):
self.env.update(kwargs)
return Config.from_v1(self.env)
def must_be_given_explicit_env_arg(self):
config = Config.from_v1(env=Lexicon(self.env, sudo_password='sikrit'))
assert (config.sudo.password == 'sikrit')
class additional_kwargs():
def forwards_arbitrary_kwargs_to_init(self):
config = Config.from_v1(self.env, overrides={'some': 'value'}, system_ssh_path='/what/ever')
assert (config.some == 'value')
assert (config._system_ssh_path == '/what/ever')
def subservient_to_runtime_overrides(self):
env = self.env
env.sudo_password = 'from-v1'
config = Config.from_v1(env, overrides={'sudo': {'password': 'runtime'}})
assert (config.sudo.password == 'runtime')
def connect_kwargs_also_merged_with_imported_values(self):
self.env['key_filename'] = 'whatever'
conf = Config.from_v1(self.env, overrides={'connect_kwargs': {'meh': 'effort'}})
assert (conf.connect_kwargs['key_filename'] == 'whatever')
assert (conf.connect_kwargs['meh'] == 'effort')
class var_mappings():
def always_use_pty(self):
config = self._conf(always_use_pty=True)
assert (config.run.pty is True)
config = self._conf(always_use_pty=False)
assert (config.run.pty is False)
def forward_agent(self):
config = self._conf(forward_agent=True)
assert (config.forward_agent is True)
def gateway(self):
config = self._conf(gateway='bastion.host')
assert (config.gateway == 'bastion.host')
class key_filename():
def base(self):
config = self._conf(key_filename='/some/path')
assert (config.connect_kwargs['key_filename'] == '/some/path')
def is_not_set_if_None(self):
config = self._conf(key_filename=None)
assert ('key_filename' not in config.connect_kwargs)
def no_agent(self):
config = self._conf()
assert (config.connect_kwargs.allow_agent is True)
config = self._conf(no_agent=True)
assert (config.connect_kwargs.allow_agent is False)
class password():
def set_just_to_connect_kwargs_if_sudo_password_set(self):
config = self._conf(password='screaming-firehawks')
passwd = config.connect_kwargs.password
assert (passwd == 'screaming-firehawks')
def set_to_both_password_fields_if_necessary(self):
config = self._conf(password='sikrit', sudo_password=None)
assert (config.connect_kwargs.password == 'sikrit')
assert (config.sudo.password == 'sikrit')
def ssh_config_path(self):
self.env.ssh_config_path = '/where/ever'
config = Config.from_v1(self.env, lazy=True)
assert (config.ssh_config_path == '/where/ever')
def sudo_password(self):
config = self._conf(sudo_password='sikrit')
assert (config.sudo.password == 'sikrit')
def sudo_prompt(self):
config = self._conf(sudo_prompt='password???')
assert (config.sudo.prompt == 'password???')
def timeout(self):
config = self._conf(timeout=15)
assert (config.timeouts.connect == 15)
def use_ssh_config(self):
config = self._conf(use_ssh_config=True)
assert (config.load_ssh_configs is True)
config = self._conf(use_ssh_config=False)
assert (config.load_ssh_configs is False)
def warn_only(self):
config = self._conf(warn_only=True)
assert (config.run.warn is True)
config = self._conf(warn_only=False)
assert (config.run.warn is False) |
def cbFun(snmpEngine, stateReference, contextEngineId, contextName, varBinds, cbCtx):
execContext = snmpEngine.observer.getExecutionContext('rfc3412.receiveMessage:request')
print(('Notification from %s, ContextEngineId "%s", ContextName "%s"' % (''.join([str(x) for x in execContext['transportAddress']]), contextEngineId.prettyPrint(), contextName.prettyPrint())))
for (name, val) in varBinds:
print(('%s = %s' % (name.prettyPrint(), val.prettyPrint()))) |
def eval_pt_gemm_rcr(*, m, n, k):
input_params = {'dtype': torch.float16, 'device': 'cuda'}
a = torch.rand(m, k, **input_params)
b = torch.rand(n, k, **input_params)
bias = torch.rand(n, **input_params)
output = torch.nn.functional.linear(a, b, bias).to(torch.float16)
return {'a': a, 'b': b, 'bias': bias, 'output': output} |
class WorkflowExecutionFailedException(Exception):
reason: str
details: Dict[(str, Any)]
detail_message: str
def __str__(self) -> str:
cause = self.details.get('cause')
if isinstance(cause, dict):
return f"{cause['class']}: {cause['detailMessage']}"
else:
return f'{self.reason}: {self.detail_message}' |
()
_context
('paths', nargs=(- 1), type=click.Path(path_type=Path))
_command
def diff(ctx: click.Context, paths: List[Path]) -> int:
if (not paths):
raise click.ClickException('Provide some filenames')
return_code = 0
for result in usort_path(paths, write=False):
if result.error:
click.echo(f'Error sorting {result.path}: {result.error}')
if ctx.obj.debug:
click.echo(result.trace)
return_code |= 1
continue
for warning in result.warnings:
click.echo(f'Warning at {result.path}:{warning.line} {warning.message}')
if (result.content != result.output):
assert (result.encoding is not None)
echo_color_unified_diff(result.content.decode(result.encoding), result.output.decode(result.encoding), result.path.as_posix())
print_benchmark(result.timings)
return return_code |
_bp.route((app.config['FLICKET'] + 'ticket_department_category/<int:ticket_id>/'), methods=['GET', 'POST'])
_required
def ticket_department_category(ticket_id=False):
if (not app.config['change_category']):
abort(404)
if app.config['change_category_only_admin_or_super_user']:
if ((not g.user.is_admin) and (not g.user.is_super_user)):
abort(404)
form = ChangeDepartmentCategoryForm()
ticket = FlicketTicket.query.get_or_404(ticket_id)
if (ticket.current_status.status == 'Closed'):
flash(gettext("Can't change the department and category on a closed ticket."))
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
if form.validate_on_submit():
department_category = FlicketDepartmentCategory.query.filter_by(department_category=form.department_category.data).one()
if (ticket.category_id == department_category.category_id):
flash(gettext('Category "{} / {}" is already assigned to ticket.'.format(ticket.category.category, ticket.category.department.department)), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket.id))
ticket.category_id = department_category.category_id
add_action(ticket, 'department_category', data={'department_category': department_category.department_category, 'category_id': department_category.category_id, 'category': department_category.category, 'department_id': department_category.department_id, 'department': department_category.department})
db.session.commit()
flash(gettext('You changed category of ticket: {}'.format(ticket_id)), category='success')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket.id))
title = gettext('Change Department / Category of Ticket')
return render_template('flicket_department_category.html', title=title, form=form, ticket=ticket) |
class TaskClosure(_common.FlyteIdlEntity):
def __init__(self, compiled_task):
self._compiled_task = compiled_task
def compiled_task(self):
return self._compiled_task
def to_flyte_idl(self):
return _admin_task.TaskClosure(compiled_task=self.compiled_task.to_flyte_idl())
def from_flyte_idl(cls, pb2_object):
return cls(compiled_task=CompiledTask.from_flyte_idl(pb2_object.compiled_task)) |
def pre_process(mode: ProcessMode) -> bool:
model_url = get_options('model').get('url')
model_path = get_options('model').get('path')
if ((not facefusion.globals.skip_download) and (not is_download_done(model_url, model_path))):
logger.error((wording.get('model_download_not_done') + wording.get('exclamation_mark')), NAME)
return False
elif (not is_file(model_path)):
logger.error((wording.get('model_file_not_present') + wording.get('exclamation_mark')), NAME)
return False
if ((mode == 'output') and (not facefusion.globals.output_path)):
logger.error((wording.get('select_file_or_directory_output') + wording.get('exclamation_mark')), NAME)
return False
return True |
class AllOne():
def __init__(self):
self.track = {}
self.head = Node(None, None)
self.tail = Node(None, None)
self.head.next = self.tail
self.tail.prev = self.head
def inc(self, key: str) -> None:
if (key not in self.track):
node = Node(key, 1)
self.track[key] = node
nn = self.head.next
self.head.next = node
node.prev = self.head
nn.prev = node
node.next = nn
else:
node = self.track[key]
node.count += 1
curr = node
while ((curr.next.count is not None) and (curr.count > curr.next.count)):
prev = curr.prev
nn = curr.next
curr.next = nn.next
nn.next.prev = curr
curr.prev = nn
nn.next = curr
nn.prev = prev
prev.next = nn
def dec(self, key: str) -> None:
if (key not in self.track):
return
node = self.track[key]
if (node.count == 1):
prev = node.prev
nn = node.next
prev.next = nn
nn.prev = prev
else:
node.count -= 1
curr = node
while ((curr.prev.count is not None) and (curr.count < curr.prev.count)):
prev = curr.prev
nn = curr.next
curr.prev = prev.prev
prev.prev.next = curr
curr.next = prev
prev.prev = curr
nn.prev = prev
prev.next = nn
def getMaxKey(self) -> str:
ret = self.tail.prev.val
if (ret is None):
return ''
return ret
def getMinKey(self) -> str:
ret = self.head.next.val
if (ret is None):
return ''
return ret |
class OptionSeriesScatterDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_write_messages():
output = BytesIO()
ros_writer = Ros1Writer(output=output)
for i in range(0, 10):
ros_writer.write_message('/chatter', String(data=f'string message {i}'), i)
ros_writer.finish()
output.seek(0)
for (index, msg) in enumerate(read_ros1_messages(output)):
assert (msg.channel.topic == '/chatter')
assert (msg.decoded_message.data == f'string message {index}')
assert (msg.message.log_time == index) |
def generateIterationMessages(iterationsMap, iteration, t):
iterationsMap[iteration] = {}
t = generateComponentBuildEvents(iterationsMap, iteration, t)
t = generateSubSystemBuildEvents(iterationsMap, iteration, t)
if ('ArtC2' in iterationsMap[iteration]):
t = generateSubSystemTestEvents(iterationsMap, iteration, t)
if (iterationsMap[iteration]['CLM2']['data']['value'] == 'SUCCESS'):
t = generateSystemIntegrationEvents(iterationsMap, iteration, t)
return t |
class FieldPathOutInfo(FieldOutFileInfo):
def save(self):
super().save()
self.file_path = tempfile.mkdtemp(dir=self.temp_dir())
def after_script_executed(self):
super().after_script_executed()
self.file_path = zip_folder(self.file_path, self.temp_dir(), out_prefix=self.key)
self.generate_download_link = True |
def extractAcountrybetweenBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
()
def mc_conn(request):
params = getattr(request, 'param', {})
client_class = params.get('client_class', pymemcache.client.base.Client)
host = os.environ.get('MEMCACHED_HOST', 'localhost')
port = int(os.environ.get('MEMCACHED_PORT', '11211'))
client = client_class((host, port))
(yield client)
client.flush_all() |
def test_flyte_schema_dataclass():
TestSchema = FlyteSchema[kwtypes(some_str=str)]
class InnerResult(DataClassJsonMixin):
number: int
schema: TestSchema
class Result(DataClassJsonMixin):
result: InnerResult
schema: TestSchema
schema = TestSchema()
def t1(x: int) -> Result:
return Result(result=InnerResult(number=x, schema=schema), schema=schema)
def wf(x: int) -> Result:
return t1(x=x)
assert (wf(x=10) == Result(result=InnerResult(number=10, schema=schema), schema=schema)) |
def fetch_all_products(from_=None):
collection = _fetch_products_from_shopify(from_)
products = []
for product in collection:
d = product.to_dict()
d['synced'] = is_synced(product.id)
products.append(d)
next_url = None
if collection.has_next_page():
next_url = collection.next_page_url
prev_url = None
if collection.has_previous_page():
prev_url = collection.previous_page_url
return {'products': products, 'nextUrl': next_url, 'prevUrl': prev_url} |
('src.lib.cmd_exec.run_command')
.object(source_tree.SourceTree, 'pull')
def test_determine_envoy_hashes_from_source2(mock_source_tree_pull, mock_run_command):
job_control = proto_control.JobControl(remote=False, scavenging_benchmark=True)
_generate_default_benchmark_images(job_control)
_generate_default_envoy_source(job_control)
job_control.images.envoy_image = 'envoyproxy/envoy:v1.16.0'
mock_source_tree_pull.return_value = True
mock_run_command.side_effect = _run_command_side_effect
manager = source_manager.SourceManager(job_control)
hashes = manager.determine_envoy_hashes_from_source()
expected_hashes = {'v1.15.2', 'v1.16.0'}
assert (hashes == expected_hashes) |
def add_quantities_to_concessions(concessions, quantities, last_prescribing_date):
for concession in concessions:
source_date = concession['date']
is_estimate = False
if (source_date > last_prescribing_date):
source_date = last_prescribing_date
is_estimate = True
key = (source_date, concession['bnf_code'])
concession['quantity'] = quantities[key]
concession['is_estimate'] = is_estimate
return concessions |
def download_s3_object(bucket_name: str, key: str, file_path: str, s3_client: BaseClient=None, retry_count: int=3, retry_cooldown: int=30, region_name: str=settings.USASPENDING_AWS_REGION):
if (not s3_client):
s3_client = _get_boto3_s3_client(region_name)
for attempt in range((retry_count + 1)):
try:
s3_client.download_file(bucket_name, key, file_path)
return
except ClientError as e:
logger.info(f'Attempt {(attempt + 1)} of {(retry_count + 1)} failed to download {key} from bucket {bucket_name}. Error: {e}')
if (attempt < retry_count):
time.sleep(retry_cooldown)
else:
logger.error(f'Failed to download {key} from bucket {bucket_name} after {(retry_count + 1)} attempts.')
raise e |
.parametrize('name1,name2,expected1,expected2', [('test_name', 'another_name', 'test_name', 'another_name'), (['test_name'], {'another_name': 'foo'}, "['test_name']", "{'another_name': 'foo'}")])
def test_set_transaction_name(elasticapm_client, name1, name2, expected1, expected2):
elasticapm_client.begin_transaction('test')
elasticapm_client.end_transaction(name1, 200)
elasticapm_client.begin_transaction('test')
elasticapm.set_transaction_name(name2)
elasticapm_client.end_transaction(name1, 200)
transactions = elasticapm_client.events[TRANSACTION]
assert (transactions[0]['name'] == expected1)
assert (transactions[1]['name'] == expected2) |
class GetCurrentUserInfo(GetAllUserInfo):
model_config = ConfigDict(from_attributes=True)
_validator(mode='after')
def handel(self, values):
dept = self.dept
if dept:
self.dept = dept.name
roles = self.roles
if roles:
self.roles = [role.name for role in roles]
return values |
def test_required_is_checked_if_field_not_provided():
class MyConfig(_ConfigBase):
this_one_is_required = _ConfigValue('this_one_is_required', type=int, required=True)
this_one_isnt = _ConfigValue('this_one_isnt', type=int, required=False)
assert MyConfig({'this_one_is_required': None}).errors
assert MyConfig({}).errors
assert MyConfig({'this_one_isnt': 1}).errors
c = MyConfig({'this_one_is_required': 1})
c.update({'this_one_isnt': 0})
assert (not c.errors) |
def test_handle_okta_credentials_options_both_raises(test_config: FidesConfig) -> None:
with pytest.raises(click.UsageError):
input_org_url = 'hello.com'
input_token = 'abcd12345'
input_credentials_id = 'okta_1'
utils.handle_okta_credentials_options(fides_config=test_config, token=input_token, org_url=input_org_url, credentials_id=input_credentials_id) |
class lift(_coconut_baseclass):
__slots__ = ('func',)
def __new__(cls, func, *func_args, **func_kwargs):
self = _coconut.object.__new__(cls)
self.func = func
if (func_args or func_kwargs):
self = self(*func_args, **func_kwargs)
return self
def __reduce__(self):
return (self.__class__, (self.func,))
def __call__(self, *func_args, **func_kwargs):
return _coconut_lifted(self.func, *func_args, **func_kwargs)
def __repr__(self):
return ('lift(%r)' % (self.func,)) |
class RaspberryPiWakeButton(WakeButton):
def __init__(self):
super().__init__()
GPIO.setmode(GPIO.BCM)
GPIO.setup(17, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(17, GPIO.FALLING, callback=self.button_detected, bouncetime=300)
def button_detected(channel, foo):
super().on_detected()
def run(self):
pass |
def check_employee_wise_availability(date, practitioner_doc):
employee = None
if practitioner_doc.employee:
employee = practitioner_doc.employee
elif practitioner_doc.user_id:
employee = frappe.db.get_value('Employee', {'user_id': practitioner_doc.user_id}, 'name')
if employee:
if is_holiday(employee, date):
frappe.throw(_('{0} is a holiday'.format(date)), title=_('Not Available'))
if ('hrms' in frappe.get_installed_apps()):
leave_record = frappe.db.sql('select half_day from `tabLeave Application`\n\t\t\t\twhere employee = %s and %s between from_date and to_date\n\t\t\t\tand docstatus = 1', (employee, date), as_dict=True)
if leave_record:
if leave_record[0].half_day:
frappe.throw(_('{0} is on a Half day Leave on {1}').format(practitioner_doc.name, date), title=_('Not Available'))
else:
frappe.throw(_('{0} is on Leave on {1}').format(practitioner_doc.name, date), title=_('Not Available')) |
.django_db
def test_tas_filter_not_object_or_list(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, "This shouldn't be a string")
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY), 'Failed to return 422 Response' |
def firewall_iprope_list(data, fos):
vdom = data['vdom']
firewall_iprope_list_data = data['firewall_iprope_list']
filtered_data = underscore_to_hyphen(filter_firewall_iprope_list_data(firewall_iprope_list_data))
converted_data = valid_attr_to_invalid_attrs(filtered_data)
return fos.set('firewall.iprope', 'list', data=converted_data, vdom=vdom) |
def constant_fold(operation: OperationType, constants: list[Constant], result_type: Type) -> Constant:
if (operation not in _OPERATION_TO_FOLD_FUNCTION):
raise UnsupportedOperationType(f"Constant folding not implemented for operation '{operation}'.")
if (not all((isinstance(v, int) for v in [c.value for c in constants]))):
raise UnsupportedValueType(f'Constant folding is not implemented for non int constant values: {[c.value for c in constants]}')
return Constant(normalize_int(_OPERATION_TO_FOLD_FUNCTION[operation](constants), result_type.size, (isinstance(result_type, Integer) and result_type.signed)), result_type) |
('src.lib.cmd_exec.run_command')
def test_get_previous_commit_hash(mock_check_output):
origin = _DEFAULT_HTTPS_REPO_URL
source = _generate_source_tree_from_origin(origin)
commit_hash = 'fake_commit_hash_1'
mock_check_output.return_value = 'fake_commit_hash_2'
hash_string = source.get_previous_commit_hash(commit_hash)
assert (hash_string == 'fake_commit_hash_2') |
class WhisperDataset(Dataset):
def __init__(self, path='dataset/mfa-data', split='train'):
self.path = Path(path)
self.files = sorted(list(self.path.glob('**/*.TextGrid.opt')))
self.split = split
random.Random(42).shuffle(self.files)
if (split == 'train'):
self.files = self.files[:int((len(self.files) * 0.98))]
elif (split == 'val'):
self.files = self.files[int((len(self.files) * 0.98)):]
else:
raise ValueError('Invalid split')
def __len__(self):
return len(self.files)
def __getitem__(self, idx):
text_grid_file = self.files[idx]
(audio, sr) = librosa.load(str(text_grid_file).replace('.TextGrid.opt', '.wav'), sr=16000)
if (self.split == 'train'):
speed_up_ratio = (random.randint(80, 120) / 100)
audio = librosa.effects.time_stretch(audio, rate=speed_up_ratio)
pitch_shift = random.randint((- 3), 3)
audio = librosa.effects.pitch_shift(audio, sr=sr, n_steps=pitch_shift)
audio += (np.random.normal(0, 0.005, audio.shape) * np.amax(audio))
else:
speed_up_ratio = 1
mel = log_mel_spectrogram(audio)
mel_len = mel.shape[1]
feature_len = (mel_len // 2)
mel = pad_or_trim(mel, 3000)
grid = TextGrid.fromFile(str(text_grid_file))
phones = [i.mark for i in grid.tiers[1]]
durations = [(i.minTime, i.maxTime) for i in grid.tiers[1]]
durations = ((torch.tensor(durations, dtype=torch.float) * 50) * (1 / speed_up_ratio)).round().long()
aligned_phones = torch.zeros(((3000 // 2),), dtype=torch.long)
for (i, (start, end)) in enumerate(durations):
phone = phones[i]
if (phone in phonemes):
aligned_phones[start:end] = phonemes.index(phone)
return {'mel': mel, 'mel_len': mel_len, 'phones': aligned_phones, 'phones_len': feature_len} |
def test_un_prefix(config_env: Dict):
if (CONFIG_ERROR in config_env.keys()):
fail(f'Config Error: {config_env[CONFIG_ERROR]}')
if (EXPECTED_UN_PREFIX not in config_env[EXPECTED_RESULTS].keys()):
skip(f'Test not requested: {EXPECTED_UN_PREFIX}')
if (not ({BASE45, PREFIX} <= config_env.keys())):
skip(f'Test dataset does not contain {BASE45} and/or {PREFIX}')
if config_env[EXPECTED_RESULTS][EXPECTED_UN_PREFIX]:
assert (f'HC1:{config_env[BASE45]}' == config_env[PREFIX])
else:
assert (not (f'HC1:{config_env[BASE45]}' == config_env[PREFIX])) |
.parametrize('obs_type', ['HISTORY_OBSERVATION', 'SUMMARY_OBSERVATION'])
.parametrize('obs_content, match', [('ERROR = -1;', 'Failed to validate "-1"'), ('ERROR_MODE=RELMIN; ERROR_MIN = -1; ERROR=1.0;', 'Failed to validate "-1"'), ('ERROR_MODE = NOT_ABS; ERROR=1.0;', 'Failed to validate "NOT_ABS"')])
def test_that_common_observation_error_validation_is_handled(tmpdir, obs_type, obs_content, match):
with tmpdir.as_cwd():
config = dedent('\n NUM_REALIZATIONS 2\n\n ECLBASE ECLIPSE_CASE\n REFCASE ECLIPSE_CASE\n OBS_CONFIG observations\n ')
with open('config.ert', 'w', encoding='utf-8') as fh:
fh.writelines(config)
with open('observations', 'w', encoding='utf-8') as fo:
additional = ('' if (obs_type == 'HISTORY_OBSERVATION') else 'RESTART = 1; VALUE=1.0; KEY = FOPR;')
fo.writelines(f'''
{obs_type} FOPR
{{
{obs_content}
{additional}
}};
''')
with open('time_map.txt', 'w', encoding='utf-8') as fo:
fo.writelines('2023-02-01')
run_sim(datetime(2014, 9, 10), [('FOPR', 'SM3/DAY', None), ('FOPRH', 'SM3/DAY', None)])
with pytest.raises(ObservationConfigError, match=match):
ErtConfig.from_file('config.ert') |
class CheckButton(Html.Html):
name = 'Check Button'
_option_cls = OptButton.OptCheck
tag = 'div'
def __init__(self, page: primitives.PageModel, flag: bool=False, tooltip: Optional[str]=None, width: Optional[tuple]=None, height: Optional[tuple]=None, icon: Optional[str]=None, label: Optional[str]=None, html_code: Optional[str]=None, options: Optional[dict]=None, profile: Optional[Union[(bool, dict)]]=None, verbose: bool=False):
super(CheckButton, self).__init__(page, ('Y' if flag else 'N'), html_code=html_code, options=options, css_attrs={'width': width, 'height': height}, profile=profile, verbose=verbose)
self.input = page.ui.images.icon((self.options.icon_check if flag else self.options.icon_not_check)).css({'width': page.body.style.globals.font.normal()})
self.input.style.css.color = (self.page.theme.success.base if flag else self.page.theme.danger.base)
self.input.style.css.middle()
self.input.options.managed = False
self.add_label(label, {'width': 'none', 'float': 'none'}, html_code=self.htmlCode, position='after')
self.add_icon(icon, {'float': 'none'}, html_code=self.htmlCode, position='after', family=options.get('icon_family'))
if (tooltip is not None):
self.tooltip(tooltip)
def options(self) -> OptButton.OptCheck:
return super().options
def dom(self) -> JsHtml.JsHtmlButtonMenu:
if (self._dom is None):
self._dom = JsHtml.JsHtmlButtonMenu(self, page=self.page)
return self._dom
def js(self) -> JsComponents.CheckButton:
if (self._js is None):
self._js = JsComponents.CheckButton(self, page=self.page)
return self._js
def style(self) -> GrpClsButton.ClassButtonCheckBox:
if (self._styleObj is None):
self._styleObj = GrpClsButton.ClassButtonCheckBox(self)
return self._styleObj
def click(self, js_fnc_true: types.JS_FUNCS_TYPES, js_fnc_false: Optional[Union[(list, str)]]=None, with_colors: bool=True, profile: types.PROFILE_TYPE=None, on_ready: bool=False):
if ((self.label is not None) and hasattr(self.label, 'style')):
self.label.style.css.cursor = 'pointer'
self.style.css.cursor = 'pointer'
if (not isinstance(js_fnc_true, list)):
js_fnc_true = [js_fnc_true]
if (js_fnc_false is None):
js_fnc_false = []
elif (not isinstance(js_fnc_false, list)):
js_fnc_false = [js_fnc_false]
if with_colors:
js_fnc_true.append(self.input.dom.css({'color': self.page.theme.success.base}).r)
js_fnc_false.append(self.input.dom.css({'color': self.page.theme.danger.base}).r)
js_fncs = [self.input.dom.switchClass(self.options.icon_check.split(' ')[(- 1)], self.options.icon_not_check.split(' ')[(- 1)]), JsIf.JsIf(self.input.dom.hasClass(self.options.icon_check.split(' ')[(- 1)]), js_fnc_true).else_(js_fnc_false)]
return super(CheckButton, self).click(js_fncs, profile, on_ready=on_ready)
def __str__(self):
return ('<%(tag)s %(attrs)s>%(content)s</%(tag)s>' % {'tag': self.tag, 'attrs': self.get_attrs(css_class_names=self.style.get_classes()), 'content': self.input.html()}) |
class BaseChatAdpter():
def match(self, model_path: str):
return False
def get_generate_stream_func(self, model_path: str):
from dbgpt.model.inference import generate_stream
return generate_stream
def get_conv_template(self, model_path: str) -> Conversation:
return None
def model_adaptation(self, params: Dict, model_path: str, prompt_template: str=None) -> Tuple[(Dict, Dict)]:
conv = self.get_conv_template(model_path)
messages = params.get('messages')
model_context = {'prompt_echo_len_char': (- 1)}
if messages:
messages = [(m if isinstance(m, ModelMessage) else ModelMessage(**m)) for m in messages]
params['messages'] = messages
if prompt_template:
print(f'Use prompt template {prompt_template} from config')
conv = get_conv_template(prompt_template)
if ((not conv) or (not messages)):
print(f'No conv from model_path {model_path} or no messages in params, {self}')
return (params, model_context)
conv = conv.copy()
system_messages = []
for message in messages:
(role, content) = (None, None)
if isinstance(message, ModelMessage):
role = message.role
content = message.content
elif isinstance(message, dict):
role = message['role']
content = message['content']
else:
raise ValueError(f'Invalid message type: {message}')
if (role == ModelMessageRoleType.SYSTEM):
system_messages.append(content)
elif (role == ModelMessageRoleType.HUMAN):
conv.append_message(conv.roles[0], content)
elif (role == ModelMessageRoleType.AI):
conv.append_message(conv.roles[1], content)
else:
raise ValueError(f'Unknown role: {role}')
if system_messages:
conv.update_system_message(''.join(system_messages))
conv.append_message(conv.roles[1], None)
new_prompt = conv.get_prompt()
prompt_echo_len_char = len(new_prompt.replace('</s>', '').replace('<s>', ''))
model_context['prompt_echo_len_char'] = prompt_echo_len_char
model_context['echo'] = params.get('echo', True)
params['prompt'] = new_prompt
params['stop'] = conv.stop_str
return (params, model_context) |
def test_check_public_id_consistency_negative():
random_dir_name = random_string()
with pytest.raises(ValueError, match=f'Directory {random_dir_name} is not valid.'):
component_configuration = ProtocolConfig('name', 'author', protocol_specification_id='some/author:0.1.0')
component_configuration.check_public_id_consistency(Path(random_dir_name)) |
(cls=MultiCommandGroup)
('--version', is_flag=True, is_eager=True, callback=print_bench_version, expose_value=False)
('--use-feature', is_eager=True, callback=use_experimental_feature, expose_value=False)
('-v', '--verbose', is_flag=True, callback=setup_verbosity, expose_value=False)
def bench_command(bench_path='.'):
import bench
bench.set_frappe_version(bench_path=bench_path) |
.django_db
def test_spending_over_time_success(client, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'group': 'fiscal_year', 'filters': {'keywords': ['test', 'testing']}}))
assert (resp.status_code == status.HTTP_200_OK)
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'group': 'quarter', 'filters': non_legacy_filters()}))
assert (resp.status_code == status.HTTP_200_OK) |
def cli():
setup_clear_cache()
global from_command_line, bench_config, is_envvar_warn_set, verbose
from_command_line = True
command = ' '.join(sys.argv)
argv = set(sys.argv)
is_envvar_warn_set = (not (os.environ.get('BENCH_DEVELOPER') or os.environ.get('CI')))
is_cli_command = ((len(sys.argv) > 1) and (not argv.intersection({'src', '--version'})))
cmd_from_sys = get_cmd_from_sysargv()
if ('--verbose' in argv):
verbose = True
change_working_directory()
logger = setup_logging()
logger.info(command)
bench_config = get_config('.')
if is_cli_command:
check_uid()
change_uid()
change_dir()
if (is_envvar_warn_set and is_cli_command and (not bench_config.get('developer_mode')) and is_dist_editable(bench.PROJECT_NAME)):
log('bench is installed in editable mode!\n\nThis is not the recommended mode of installation for production. Instead, install the package from PyPI with: `pip install frappe-bench`\n', level=3)
in_bench = is_bench_directory()
if ((not in_bench) and (len(sys.argv) > 1) and (not argv.intersection({'init', 'find', 'src', 'drop', 'get', 'get-app', '--version'})) and (not cmd_requires_root())):
log('Command not being executed in bench directory', level=3)
if ((len(sys.argv) == 1) or (sys.argv[1] == '--help')):
print(click.Context(bench_command).get_help())
if in_bench:
print(get_frappe_help())
return
_opts = [(x.opts + x.secondary_opts) for x in bench_command.params]
opts = {item for sublist in _opts for item in sublist}
setup_exception_handler()
if (cmd_from_sys and (cmd_from_sys.split('=', 1)[0].strip() in opts)):
bench_command()
if (cmd_from_sys in bench_command.commands):
with execute_cmd(check_for_update=is_cli_command, command=command, logger=logger):
bench_command()
if in_bench:
if (cmd_from_sys in get_frappe_commands()):
frappe_cmd()
else:
app_cmd()
bench_command() |
class NpmModuleProvider(ModuleProvider):
class Options(NamedTuple):
registry: str
no_autopatch: bool
no_trim_index: bool
class RegistryPackageIndex(NamedTuple):
url: str
data: Dict[(Any, Any)]
used_versions: Set[str]
def __init__(self, gen: ManifestGenerator, special: SpecialSourceProvider, lockfile_root: Path, options: Options) -> None:
self.gen = gen
self.special_source_provider = special
self.lockfile_root = lockfile_root
self.registry = options.registry
self.no_autopatch = options.no_autopatch
self.no_trim_index = options.no_trim_index
self.npm_cache_dir = (self.gen.data_root / 'npm-cache')
self.cacache_dir = (self.npm_cache_dir / '_cacache')
self.registry_packages: Dict[(str, asyncio.Future[NpmModuleProvider.RegistryPackageIndex])] = {}
self.index_entries: Dict[(Path, str)] = {}
self.all_lockfiles: Set[Path] = set()
self.git_sources: DefaultDict[(Path, Dict[(Path, GitSource)])] = collections.defaultdict((lambda : {}))
self.rcfile_provider = NpmRCFileProvider()
def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], tb: Optional[types.TracebackType]) -> None:
if (exc_type is None):
self._finalize()
def get_cacache_integrity_path(self, integrity: Integrity) -> Path:
digest = integrity.digest
return ((Path(digest[0:2]) / digest[2:4]) / digest[4:])
def get_cacache_index_path(self, integrity: Integrity) -> Path:
return ((self.cacache_dir / Path('index-v5')) / self.get_cacache_integrity_path(integrity))
def get_cacache_content_path(self, integrity: Integrity) -> Path:
return (((self.cacache_dir / Path('content-v2')) / integrity.algorithm) / self.get_cacache_integrity_path(integrity))
def add_index_entry(self, url: str, metadata: RemoteUrlMetadata, request_headers: Dict[(str, str)]={}) -> None:
key = f'make-fetch-happen:request-cache:{url}'
index_json = json.dumps({'key': key, 'integrity': f'{metadata.integrity.algorithm}-{metadata.integrity.to_base64()}', 'time': 0, 'size': metadata.size, 'metadata': {'url': url, 'reqHeaders': request_headers, 'resHeaders': {}}})
content_integrity = Integrity.generate(index_json, algorithm='sha1')
index = '\t'.join((content_integrity.digest, index_json))
key_integrity = Integrity.generate(key)
index_path = self.get_cacache_index_path(key_integrity)
self.index_entries[index_path] = index
async def resolve_source(self, package: Package) -> ResolvedSource:
assert isinstance(package.source, RegistrySource)
if (package.name not in self.registry_packages):
cache_future = asyncio.get_event_loop().create_future()
self.registry_packages[package.name] = cache_future
data_url = f"{self.get_package_registry(package)}/{package.name.replace('/', '%2f')}"
raw_data = (await Requests.instance.read_all(data_url, cachable=False))
data = json.loads(raw_data)
assert ('versions' in data), f'{data_url} returned an invalid package index'
cache_future.set_result(NpmModuleProvider.RegistryPackageIndex(url=data_url, data=data, used_versions=set()))
if (not self.no_trim_index):
for key in list(data):
if (key != 'versions'):
del data[key]
index = (await self.registry_packages[package.name])
versions = index.data['versions']
assert (package.version in versions), f"{package.name} versions available are {', '.join(versions)}, not {package.version}"
dist = versions[package.version]['dist']
assert ('tarball' in dist), f'{package.name}{package.version} has no tarball in dist'
index.used_versions.add(package.version)
registry_integrity: Integrity
if ('integrity' in dist):
registry_integrity = Integrity.parse(dist['integrity'])
elif ('shasum' in dist):
registry_integrity = Integrity.from_sha1(dist['shasum'])
else:
assert False, f'{package.name}{package.version} has no integrity in dist'
if package.source.integrity:
if ((package.source.integrity.algorithm == registry_integrity.algorithm) and (package.source.integrity.digest != registry_integrity.digest)):
raise ValueError(f"{package.name}{package.version} integrity doesn't match registry integrity")
integrity = package.source.integrity
else:
integrity = registry_integrity
return ResolvedSource(resolved=dist['tarball'], integrity=integrity)
async def generate_package(self, package: Package) -> None:
self.all_lockfiles.add(package.lockfile)
source = package.source
if isinstance(source, (RegistrySource, PackageURLSource)):
if isinstance(source, RegistrySource):
source = (await self.resolve_source(package))
assert (source.resolved is not None)
assert (source.integrity is not None)
self.gen.add_url_source(url=source.resolved, integrity=source.integrity, destination=self.get_cacache_content_path(source.integrity))
self.add_index_entry(url=source.resolved, metadata=RemoteUrlMetadata(integrity=source.integrity, size=(await RemoteUrlMetadata.get_size(source.resolved, cachable=True))))
(await self.special_source_provider.generate_special_sources(package))
elif isinstance(source, GitSource):
name = f'{package.name}-{source.commit}'
path = ((self.gen.data_root / 'git-packages') / name)
self.git_sources[package.lockfile][path] = source
self.gen.add_git_source(source.url, source.commit, path)
elif isinstance(source, LocalSource):
pass
else:
raise NotImplementedError(f'Unknown source type {source.__class__.__name__}')
def relative_lockfile_dir(self, lockfile: Path) -> Path:
return lockfile.parent.relative_to(self.lockfile_root)
_cache(typed=True)
def get_lockfile_rc(self, lockfile: Path) -> Dict[(str, str)]:
rc = {}
rcfile_path = (lockfile.parent / self.rcfile_provider.RCFILE_NAME)
if rcfile_path.is_file():
rc.update(self.rcfile_provider.parse_rcfile(rcfile_path))
return rc
def get_package_registry(self, package: Package) -> str:
assert isinstance(package.source, RegistrySource)
rc = self.get_lockfile_rc(package.lockfile)
if (rc and ('/' in package.name)):
(scope, _) = package.name.split('/', maxsplit=1)
if (f'{scope}:registry' in rc):
return rc[f'{scope}:registry']
return self.registry
def _finalize(self) -> None:
for (_, async_index) in self.registry_packages.items():
index = async_index.result()
if (not self.no_trim_index):
for version in list(index.data['versions'].keys()):
if (version not in index.used_versions):
del index.data['versions'][version]
raw_data = json.dumps(index.data).encode()
metadata = RemoteUrlMetadata(integrity=Integrity.generate(raw_data), size=len(raw_data))
content_path = self.get_cacache_content_path(metadata.integrity)
self.gen.add_data_source(raw_data, content_path)
self.add_index_entry(index.url, metadata, request_headers={'accept': _NPM_CORGIDOC})
patch_commands: DefaultDict[(Path, List[str])] = collections.defaultdict((lambda : []))
if self.git_sources:
scripts = {'package.json': '\n walk(\n if type == "object"\n then\n to_entries | map(\n if (.value | type == "string") and $data[.value]\n then .value = "git+file:\\($buildroot)/\\($data[.value])"\n else .\n end\n ) | from_entries\n else .\n end\n )\n ', 'package-lock.json': '\n walk(\n if type == "object" and (.version | type == "string") and $data[.version]\n then\n .version = "git+file:\\($buildroot)/\\($data[.version])"\n else .\n end\n )\n '}
for (lockfile, sources) in self.git_sources.items():
prefix = self.relative_lockfile_dir(lockfile)
data: Dict[(str, Dict[(str, str)])] = {'package.json': {}, 'package-lock.json': {}}
for (path, source) in sources.items():
GIT_URL_PREFIX = 'git+'
new_version = f'{path}#{source.commit}'
assert (source.from_ is not None)
data['package.json'][source.from_] = new_version
data['package-lock.json'][source.original] = new_version
if source.from_.startswith(GIT_URL_PREFIX):
data['package.json'][source.from_[len(GIT_URL_PREFIX):]] = new_version
if source.original.startswith(GIT_URL_PREFIX):
data['package-lock.json'][source.original[len(GIT_URL_PREFIX):]] = new_version
for (filename, script) in scripts.items():
target = ((Path('$FLATPAK_BUILDER_BUILDDIR') / prefix) / filename)
script = textwrap.dedent(script.lstrip('\n')).strip().replace('\n', '')
json_data = json.dumps(data[filename])
patch_commands[lockfile].append(f'jq --arg buildroot "$FLATPAK_BUILDER_BUILDDIR" --argjson data {shlex.quote(json_data)} {shlex.quote(script)} {target} > {target}.new')
patch_commands[lockfile].append(f'mv {target}{{.new,}}')
patch_all_commands: List[str] = []
for lockfile in self.all_lockfiles:
patch_dest = ((self.gen.data_root / 'patch') / self.relative_lockfile_dir(lockfile))
patch_dest = patch_dest.with_name((patch_dest.name + '.sh'))
self.gen.add_script_source(patch_commands[lockfile], patch_dest)
patch_all_commands.append(f'$FLATPAK_BUILDER_BUILDDIR/{patch_dest}')
patch_all_dest = (self.gen.data_root / 'patch-all.sh')
self.gen.add_script_source(patch_all_commands, patch_all_dest)
if (not self.no_autopatch):
self.gen.add_command(f'FLATPAK_BUILDER_BUILDDIR=$PWD {patch_all_dest}')
if self.index_entries:
for (path, entry) in self.index_entries.items():
self.gen.add_data_source(entry, path) |
('/knowledge/{space_name}/argument/save')
def arguments_save(space_name: str, argument_request: SpaceArgumentRequest):
print(f'/knowledge/space/argument/save params:')
try:
return Result.succ(knowledge_space_service.argument_save(space_name, argument_request))
except Exception as e:
return Result.failed(code='E000X', msg=f'space list error {e}') |
.django_db()
def test_all_states_and_territories_present_district():
df = pd.read_csv(CD_TEST_CSV_FILE)
call_command('load_population_data', type='district', file=CD_TEST_CSV_FILE)
assert (len(df.state_name.unique()) == len(PopCongressionalDistrict.objects.all().distinct('state_name').values('state_name')))
assert (len(df.state_code.unique()) == len(PopCongressionalDistrict.objects.all().distinct('state_code').values('state_code'))) |
class Migration(migrations.Migration):
dependencies = [('frontend', '0047_auto__1413')]
operations = [migrations.RunSQL('DROP MATERIALIZED VIEW IF EXISTS vw__medians_for_tariff'), migrations.RunSQL("\n CREATE MATERIALIZED VIEW vw__medians_for_tariff AS (\n WITH\n recent_date AS (\n SELECT MAX(current_at) AS current_at\n FROM frontend_importlog\n WHERE category = 'prescribing'\n ),\n\n vmps_with_one_ppu AS (\n SELECT vmp.vpid \n FROM dmd_vmp vmp\n INNER JOIN dmd_vmpp vmpp ON vmp.vpid = vmpp.vpid\n INNER JOIN frontend_tariffprice tp ON vmpp.vppid = tp.vmpp_id\n WHERE tp.date IN (SELECT current_at FROM recent_date)\n GROUP BY vmp.vpid\n HAVING stddev_pop(tp.price_pence / vmpp.qtyval) = 0\n )\n\n SELECT\n vmp.vpid,\n vmp.nm,\n vmp.bnf_code,\n rx.processing_date AS date,\n percentile_disc(0.5::double precision) WITHIN GROUP (\n ORDER BY (\n CASE\n WHEN rx.quantity > 0::double precision\n THEN rx.net_cost / rx.quantity\n ELSE 0::double precision\n END\n )\n ) AS median_ppu\n\n FROM dmd_vmp vmp\n INNER JOIN frontend_prescription rx\n ON vmp.bnf_code = rx.presentation_code\n\n WHERE\n vmp.vpid IN (SELECT vpid FROM vmps_with_one_ppu)\n AND rx.processing_date IN (SELECT current_at FROM recent_date)\n\n GROUP BY\n vmp.vpid,\n rx.processing_date\n )\n ")] |
class Test_igmpv3_report_group(unittest.TestCase):
def setUp(self):
self.type_ = MODE_IS_INCLUDE
self.aux_len = 0
self.num = 0
self.address = '225.0.0.1'
self.srcs = []
self.aux = None
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
def setUp_with_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
def setUp_with_aux(self):
self.aux = b'\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = (len(self.aux) // 4)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
def setUp_with_srcs_and_aux(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs)
self.aux = b'\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = (len(self.aux) // 4)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.buf += self.aux
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
def tearDown(self):
pass
def test_init(self):
eq_(self.type_, self.g.type_)
eq_(self.aux_len, self.g.aux_len)
eq_(self.num, self.g.num)
eq_(self.address, self.g.address)
eq_(self.srcs, self.g.srcs)
eq_(self.aux, self.g.aux)
def test_init_with_srcs(self):
self.setUp_with_srcs()
self.test_init()
def test_init_with_aux(self):
self.setUp_with_aux()
self.test_init()
def test_init_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_init()
def test_parser(self):
_res = self.g.parser(self.buf)
if (type(_res) is tuple):
res = _res[0]
else:
res = _res
eq_(res.type_, self.type_)
eq_(res.aux_len, self.aux_len)
eq_(res.num, self.num)
eq_(res.address, self.address)
eq_(res.srcs, self.srcs)
eq_(res.aux, self.aux)
def test_parser_with_srcs(self):
self.setUp_with_srcs()
self.test_parser()
def test_parser_with_aux(self):
self.setUp_with_aux()
self.test_parser()
def test_parser_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_parser()
def test_serialize(self):
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
def test_serialize_with_srcs(self):
self.setUp_with_srcs()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
(src1, src2, src3) = unpack_from('4s4s4s', six.binary_type(buf), igmpv3_report_group._MIN_LEN)
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(src1, addrconv.ipv4.text_to_bin(self.srcs[0]))
eq_(src2, addrconv.ipv4.text_to_bin(self.srcs[1]))
eq_(src3, addrconv.ipv4.text_to_bin(self.srcs[2]))
def test_serialize_with_aux(self):
self.setUp_with_aux()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
(aux,) = unpack_from(('%ds' % (self.aux_len * 4)), six.binary_type(buf), igmpv3_report_group._MIN_LEN)
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(aux, self.aux)
def test_serialize_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
(src1, src2, src3) = unpack_from('4s4s4s', six.binary_type(buf), igmpv3_report_group._MIN_LEN)
(aux,) = unpack_from(('%ds' % (self.aux_len * 4)), six.binary_type(buf), (igmpv3_report_group._MIN_LEN + 12))
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(src1, addrconv.ipv4.text_to_bin(self.srcs[0]))
eq_(src2, addrconv.ipv4.text_to_bin(self.srcs[1]))
eq_(src3, addrconv.ipv4.text_to_bin(self.srcs[2]))
eq_(aux, self.aux)
def test_to_string(self):
igmp_values = {'type_': repr(self.type_), 'aux_len': repr(self.aux_len), 'num': repr(self.num), 'address': repr(self.address), 'srcs': repr(self.srcs), 'aux': repr(self.aux)}
_g_str = ','.join([('%s=%s' % (k, igmp_values[k])) for (k, v) in inspect.getmembers(self.g) if (k in igmp_values)])
g_str = ('%s(%s)' % (igmpv3_report_group.__name__, _g_str))
eq_(str(self.g), g_str)
eq_(repr(self.g), g_str)
def test_to_string_with_srcs(self):
self.setUp_with_srcs()
self.test_to_string()
def test_to_string_with_aux(self):
self.setUp_with_aux()
self.test_to_string()
def test_to_string_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_to_string()
def test_len(self):
eq_(len(self.g), 8)
def test_len_with_srcs(self):
self.setUp_with_srcs()
eq_(len(self.g), 20)
def test_len_with_aux(self):
self.setUp_with_aux()
eq_(len(self.g), 16)
def test_len_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
eq_(len(self.g), 28)
def test_num_larger_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = (len(self.srcs) + 1)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
self.test_parser()
def test_num_smaller_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = (len(self.srcs) - 1)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
self.test_parser()
def test_aux_len_larger_than_aux(self):
self.aux = b'\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = ((len(self.aux) // 4) + 1)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
self.test_parser()
def test_aux_len_smaller_than_aux(self):
self.aux = b'\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = ((len(self.aux) // 4) - 1)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_, self.aux_len, self.num, addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(self.type_, self.aux_len, self.num, self.address, self.srcs, self.aux)
self.test_parser()
def test_default_args(self):
rep = igmpv3_report_group()
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
eq_(res[0], 0)
eq_(res[1], 0)
eq_(res[2], 0)
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
rep = igmpv3_report_group(srcs=srcs)
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
eq_(res[0], 0)
eq_(res[1], 0)
eq_(res[2], len(srcs))
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
res = unpack_from('4s4s4s', six.binary_type(buf), igmpv3_report_group._MIN_LEN)
eq_(res[0], addrconv.ipv4.text_to_bin(srcs[0]))
eq_(res[1], addrconv.ipv4.text_to_bin(srcs[1]))
eq_(res[2], addrconv.ipv4.text_to_bin(srcs[2]))
aux = b'abcde'
rep = igmpv3_report_group(aux=aux)
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, six.binary_type(buf))
eq_(res[0], 0)
eq_(res[1], 2)
eq_(res[2], 0)
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
eq_(buf[igmpv3_report_group._MIN_LEN:], b'abcde\x00\x00\x00') |
class Solitaire(ft.Stack):
def __init__(self):
super().__init__()
self.controls = []
self.slots = []
self.cards = []
self.width = SOLITAIRE_WIDTH
self.height = SOLITAIRE_HEIGHT
def did_mount(self):
self.create_card_deck()
self.create_slots()
self.deal_cards()
def create_card_deck(self):
card1 = Card(self, color='GREEN')
card2 = Card(self, color='YELLOW')
self.cards = [card1, card2]
def create_slots(self):
self.slots.append(Slot(top=0, left=0))
self.slots.append(Slot(top=0, left=200))
self.slots.append(Slot(top=0, left=300))
self.controls.extend(self.slots)
self.update()
def deal_cards(self):
self.controls.extend(self.cards)
for card in self.cards:
card.place(self.slots[0])
self.update() |
class SessionFactoryBase(BaseFactory):
class Meta():
model = Session
title = common.string_
subtitle = common.string_
level = common.int_
short_abstract = common.string_
long_abstract = (common.string_ + common.string_)
comments = common.string_
starts_at = common.dateFuture_
ends_at = common.dateEndFuture_
language = 'English'
slides_url = common.url_
video_url = common.url_
audio_url = common.url_
signup_url = common.url_
state = 'accepted'
submitted_at = common.date_
is_mail_sent = True |
def get_companies_rank_by_employees_amount(df: DataFrame, company_field: str, author_email_field: str, result_employee_field: str='Employees') -> DataFrame:
return df.select(company_field, author_email_field).groupBy(company_field).agg(f.countDistinct(f.col(author_email_field)).alias(result_employee_field)).sort(result_employee_field, ascending=False) |
_required
_required('REGISTRATION_ENABLED')
def forgot_passwd_done(request):
return render(request, 'gui/note.html', {'header': _('Password reset instructions!'), 'blocks': (_("We've emailed you instructions for setting your password. You should be receiving them shortly."), _("If you don't receive an email, please make sure you've entered the address you registered with, and check your spam folder."))}) |
def add_one(l, r):
assert ((type(l) == bytearray) and (len(l) == Ar_KEY_LEN))
assert ((type(r) == bytearray) and (len(r) == Ar_KEY_LEN))
rv = bytearray(16)
for i in range(16):
if (i in [0, 3, 4, 7, 8, 11, 12, 15]):
rv[i] = (l[i] ^ r[i])
else:
rv[i] = ((l[i] + r[i]) % 256)
assert (len(rv) == Ar_KEY_LEN)
return rv |
def main(_):
tf.config.set_visible_devices([], 'GPU')
if FLAGS.wandb:
wandb.init(project='magi', entity='ethanluoyc', name='td3_bc')
logging.info('')
logging.info('Policy: %s, Env: %s, Seed: %s', FLAGS.policy, FLAGS.env, FLAGS.seed)
logging.info('')
np.random.seed(FLAGS.seed)
env = make_environment(FLAGS.env)
environment_spec = specs.make_environment_spec(env)
env.seed(FLAGS.seed)
max_action = environment_spec.actions.maximum[0]
assert (max_action == 1.0)
agent_networks = td3.make_networks(environment_spec)
data = d4rl.qlearning_dataset(env)
if FLAGS.normalize:
(data, mean, std) = d4rl_dataset.normalize_obs(data)
else:
(mean, std) = (0, 1)
data_iterator = d4rl_dataset.make_tf_data_iterator(data, batch_size=FLAGS.batch_size).as_numpy_iterator()
random_key = jax.random.PRNGKey(FLAGS.seed)
(learner_key, actor_key) = jax.random.split(random_key)
learner = td3_bc.TD3BCLearner(agent_networks['policy'], agent_networks['critic'], iterator=data_iterator, random_key=learner_key, policy_optimizer=optax.adam(0.0003), critic_optimizer=optax.adam(0.0003), discount=FLAGS.discount, tau=FLAGS.tau, policy_noise=(FLAGS.policy_noise * max_action), noise_clip=(FLAGS.noise_clip * max_action), policy_update_period=FLAGS.policy_freq, alpha=FLAGS.alpha)
evaluator_network = td3.apply_policy_sample(agent_networks, eval_mode=True)
evaluator = actors.GenericActor(actor_core.batched_feed_forward_to_actor_core(evaluator_network), random_key=actor_key, variable_client=variable_utils.VariableClient(learner, 'policy', device='cpu'))
evaluations = []
for t in range(int(FLAGS.max_timesteps)):
learner.step()
if (((t + 1) % FLAGS.eval_freq) == 0):
logging.info('Time steps: %d', (t + 1))
evaluations.append(evaluate(evaluator, FLAGS.env, FLAGS.seed, mean, std))
if FLAGS.wandb:
wandb.log({'step': t, 'eval_returns': evaluations[(- 1)]}) |
class Network(ServiceInterface):
def __init__(self, *args, connected=False, name='', known='', network_type='', **kwargs):
ServiceInterface.__init__(self, IWD_NETWORK, *args, **kwargs)
self._connected = connected
self._name = name
self._known = known
self._type = network_type
_property(access=PropertyAccess.READ)
def Name(self) -> 's':
return self._name
_property(access=PropertyAccess.READ)
def Connected(self) -> 'b':
return self._connected
_property(access=PropertyAccess.READ)
def KnownNetwork(self) -> 'o':
return self._known
_property(access=PropertyAccess.READ)
def Type(self) -> 's':
return self._type
()
def Connect(self):
pass |
class DepthFirstWalker(Walker):
def walk(self, node, *args, **kwargs):
rv = self.autowalk(node, *args, **kwargs)
if (rv is not None):
return rv
method = self.get_node_method(node, '_walk_')
if callable(method):
with self.set_context(node):
if isinstance(node, BaseNode):
slots = [self.walk(v, *args, **kwargs) for (name, v) in node.iter_slots()]
node = type(node)(*slots)
return method(node, *args, **kwargs)
def copy_node(self, node):
return RecursiveWalker().walk(node) |
class SusiAppWindow(Renderer):
def __init__(self):
super().__init__()
builder = Gtk.Builder()
builder.add_from_file(os.path.join(TOP_DIR, 'glade_files/susi_app.glade'))
self.window = builder.get_object('app_window')
self.user_text_label = builder.get_object('user_text_label')
self.susi_text_label = builder.get_object('susi_text_label')
self.root_box = builder.get_object('root_box')
self.state_stack = builder.get_object('state_stack')
self.mic_button = builder.get_object('mic_button')
self.mic_box = builder.get_object('mic_box')
self.listening_box = builder.get_object('listening_box')
self.thinking_box = builder.get_object('thinking_box')
self.error_label = builder.get_object('error_label')
self.settings_button = builder.get_object('settings_button')
listeningAnimator = ListeningAnimator(self.window)
self.listening_box.add(listeningAnimator)
self.listening_box.reorder_child(listeningAnimator, 1)
self.listening_box.set_child_packing(listeningAnimator, False, False, 0, Gtk.PackType.END)
thinkingAnimator = ThinkingAnimator(self.window)
self.thinking_box.add(thinkingAnimator)
self.thinking_box.reorder_child(thinkingAnimator, 1)
self.thinking_box.set_child_packing(thinkingAnimator, False, False, 0, Gtk.PackType.END)
builder.connect_signals(SusiAppWindow.Handler(self))
self.window.set_default_size(300, 600)
self.window.set_resizable(False)
def show_window(self):
self.window.show_all()
Gtk.main()
def exit_window(self):
self.window.destroy()
Gtk.main_quit()
def receive_message(self, message_type, payload=None):
if (message_type == 'idle'):
self.state_stack.set_visible_child_name('mic_page')
elif (message_type == 'listening'):
self.state_stack.set_visible_child_name('listening_page')
self.user_text_label.set_text('')
self.susi_text_label.set_text('')
elif (message_type == 'recognizing'):
self.state_stack.set_visible_child_name('thinking_page')
elif (message_type == 'recognized'):
user_text = payload
self.user_text_label.set_text(user_text)
elif (message_type == 'speaking'):
self.state_stack.set_visible_child_name('empty_page')
susi_reply = payload['susi_reply']
if ('answer' in susi_reply.keys()):
self.susi_text_label.set_text(susi_reply['answer'])
elif (message_type == 'error'):
self.state_stack.set_visible_child_name('error_page')
error_type = payload
if (error_type is not None):
if (error_type == 'connection'):
self.error_label.set_text('Problem in internet connectivity !!')
elif (error_type == 'recognition'):
self.error_label.set_text("Couldn't recognize the speech.")
else:
self.error_label.set_text('Some error occurred,')
class Handler():
def __init__(self, app_window):
self.app_window = app_window
def on_delete(self, *args):
self.app_window.exit_window()
os.kill(os.getpid(), signal.SIGHUP)
def on_mic_button_clicked(self, button):
Promise((lambda resolve, reject: resolve(self.app_window.on_mic_pressed())))
def on_settings_button_clicked(self, button):
window = ConfigurationWindow()
window.show_window() |
def next_plaintext(f):
tokens = list()
for line in f:
surfs = line.strip().split()
pos = 1
for surf in surfs:
token = Token(surf)
if (pos == 1):
token.firstinsent = True
else:
token.firstinsent = False
token.pos = pos
if (surf in '.?!'):
pos = 1
else:
pos += 1
tokens.append(token)
sep = Token()
sep.nontoken = 'separator'
tokens.append(sep)
return tokens
eoft = Token()
eoft.nontoken = 'eof'
tokens.append(eoft)
return tokens |
(base=RequestContextTask, name='export.order.pdf', bind=True)
def export_order_pdf_task(self, event_id):
orders = db.session.query(Order).filter_by(event_id=event_id)
event = db.session.query(Event).filter_by(id=int(event_id)).first()
discount_code = db.session.query(DiscountCode).filter_by(event_id=event_id)
try:
order_pdf_url = create_save_pdf(render_template('pdf/orders.html', event=event, event_id=event_id, orders=orders, discount_code=discount_code), UPLOAD_PATHS['exports-temp']['pdf'].format(event_id=event_id, identifier=''))
result = {'download_url': order_pdf_url}
except Exception as e:
result = {'__error': True, 'result': str(e)}
logger.exception('Error in exporting order as pdf')
return result |
def set_charset(message):
if message.get_content_charset():
return message
if message.get_charset():
return message
enc_list = ['UTF-8', 'LATIN-1', 'iso8859-1', 'iso8859-2', 'UTF-16', 'CP1252', 'CP720', 'CP437']
locale.setlocale(locale.LC_ALL, '')
code = locale.getpreferredencoding()
if (code not in enc_list):
enc_list.insert(0, code)
for enc in enc_list:
try:
message.as_bytes().decode(enc)
except (UnicodeDecodeError, UnicodeError):
continue
else:
try:
message.set_param('charset', enc)
except (KeyError, UnicodeEncodeError):
continue
break
raise Exception('Encoding not detected.')
return message |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
('flytekit.core.data_persistence.UUID')
def test_write_folder_put_raw(mock_uuid_class):
import pandas as pd
mock_uuid_class.return_value.hex = 'abcdef123'
random_dir = tempfile.mkdtemp()
raw = os.path.join(random_dir, 'raw')
fs = FileAccessProvider(local_sandbox_dir=random_dir, raw_output_prefix=raw)
sio = io.StringIO()
sio.write('hello world')
sio.seek(0)
bio = io.BytesIO()
bio.write(b'hello world bytes')
bio2 = io.BytesIO()
df = pd.DataFrame({'name': ['Tom', 'Joseph'], 'age': [20, 22]})
df.to_parquet(bio2, engine='pyarrow')
fs.put_raw_data(sio, upload_prefix='foo', file_name='a.txt')
fs.put_raw_data(bio, file_name='bar/00000')
fs.put_raw_data(bio2, upload_prefix='', file_name='pd.parquet')
fs.put_raw_data(bio, upload_prefix='', file_name='baz/00000')
fs.put_raw_data(sio, upload_prefix='baz')
paths = [str(p) for p in pathlib.Path(raw).rglob('*')]
assert (len(paths) == 9)
expected = [os.path.join(raw, 'pd.parquet'), os.path.join(raw, 'foo'), os.path.join(raw, 'baz'), os.path.join(raw, 'abcdef123'), os.path.join(raw, 'foo', 'a.txt'), os.path.join(raw, 'baz', '00000'), os.path.join(raw, 'baz', 'abcdef123'), os.path.join(raw, 'abcdef123', 'bar'), os.path.join(raw, 'abcdef123', 'bar', '00000')]
expected = [str(pathlib.Path(p)) for p in expected]
assert (sorted(paths) == sorted(expected)) |
_converter(acc_ops.var)
def acc_ops_var(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = kwargs['input']
if (not isinstance(input_val, AITTensor)):
raise RuntimeError(f'Unexpected input for {name}: {input_val}')
op = var(dim=kwargs['dim'], unbiased=kwargs['unbiased'], keepdim=kwargs['keepdim'], dtype=None)
return op(input_val) |
class TestRegister(unittest.TestCase):
def test_register_config_iterator(self):
class TestConfigIterator(ConfigIterator):
pass
name = '__TestConfigIterator__'
register_config_iterator(name, TestConfigIterator)
self.assertTrue((name in config_iterator_map))
self.assertRaises(ValueError, register_config_iterator, name, TestConfigIterator)
def test_register_data_generator(self):
class TestDataGenerator(DataGenerator):
pass
name = '__TestDataGenerator__'
register_data_generator(name, TestDataGenerator)
self.assertTrue((name in data_generator_map))
self.assertRaises(ValueError, register_data_generator, name, TestDataGenerator)
def test_register_operator(self):
class TestOperator(OperatorInterface):
pass
name = '__TestOperator__'
register_operator(name, TestOperator)
self.assertTrue((name in op_map))
self.assertRaises(ValueError, register_operator, name, TestOperator)
name_1 = '__TestOperator_1__'
name_2 = '__TestOperator_2__'
register_operators({name_1: TestOperator, name_2: TestOperator})
self.assertTrue((name_1 in op_map))
self.assertTrue((name_2 in op_map))
self.assertRaises(ValueError, register_operators, {name_1: TestOperator})
self.assertRaises(ValueError, register_operators, {name_2: TestOperator})
self.assertRaises(ValueError, register_operators, {name_1: TestOperator, name_2: TestOperator}) |
('foremast.utils.security_group.gate_request')
('foremast.utils.security_group.get_vpc_id')
def test_utils_sg_get_security_group_id(mock_vpc_id, mock_gate_request):
data = {'id': 100}
mock_gate_request.return_value.json.return_value = data
result = get_security_group_id()
assert (result == 100)
with pytest.raises(SpinnakerSecurityGroupError):
mock_gate_request.return_value.json.return_value = {}
result = get_security_group_id()
with pytest.raises(AssertionError):
mock_gate_request.return_value.ok = False
result = get_security_group_id() |
def get_current_span_context() -> (TraceContext | None):
current_span = trace.get_current_span()
if ((current_span is not None) and current_span.is_recording()):
return TraceContext(trace_id=str((current_span.context.trace_id & )), span_id=str(current_span.context.span_id), invocation_id=current_span.attributes['invocation_id'])
return None |
def typed_range(type_func, minimum, maximum):
(type_func)
def inner(string):
result = type_func(string)
if ((not (result >= minimum)) and (result <= maximum)):
raise argparse.ArgumentTypeError('Please provide a value between {0} and {1}'.format(minimum, maximum))
return result
return inner |
def test_decode_function_with_simple_argument():
test_function_name = 'testFunction'
test_parameter_name = 'testParameter'
test_abi = pydantic.parse_obj_as(abi.ABI, [{'name': test_function_name, 'type': 'function', 'inputs': [{'name': test_parameter_name, 'type': 'uint256'}]}])
test_function_selector = '350c530b'
test_function_argument = ''
abi_decoder = decode.ABIDecoder(test_abi)
call_data = abi_decoder.decode((('0x' + test_function_selector) + test_function_argument))
assert (call_data.function_name == test_function_name)
assert (call_data.function_signature == 'testFunction(uint256)')
assert (call_data.inputs == {test_parameter_name: 1}) |
def test_channel_curves():
fpath = 'data/chap4-7/iflr/all-reprcodes.dlis'
with dlis.load(fpath) as (f, *_):
channel = f.object('CHANNEL', 'CH26', 10, 0)
curves = channel.curves()
assert (curves[0] == True)
channel = f.object('CHANNEL', 'CH22', 10, 0)
curves22 = channel.curves()
assert (curves22[0] == )
frame_curves = load_curves(fpath)
assert (frame_curves['CH22'] == curves22) |
def test_manual_quadrature():
mesh = UnitSquareMesh(4, 4)
V = FunctionSpace(mesh, 'CG', 3)
x = SpatialCoordinate(mesh)
f = Function(V).interpolate((x[0] ** 3))
f1 = (f * dx(degree=3))
f2 = (f * dx(degree=1))
f3 = (f * dx)
assert np.allclose(assemble(f1), assemble(f3))
assert np.allclose(assemble(f1), 0.25)
assert np.allclose(assemble(f2), 0.)
assert np.allclose((assemble(f1) + assemble(f2)), assemble((f1 + f2))) |
class ConditionEncoder(fl.Chain):
def __init__(self, in_channels: int=3, channels: tuple[(int, int, int, int)]=(320, 640, 1280, 1280), num_residual_blocks: int=2, downscale_factor: int=8, scale: float=1.0, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
self.scale = scale
super().__init__(fl.PixelUnshuffle(downscale_factor=downscale_factor), fl.Conv2d(in_channels=(in_channels * (downscale_factor ** 2)), out_channels=channels[0], kernel_size=3, padding=1, device=device, dtype=dtype), StatefulResidualBlocks(channels[0], channels[0], num_residual_blocks, device=device, dtype=dtype), *(StatefulResidualBlocks(channels[(i - 1)], channels[i], num_residual_blocks, downsample=True, device=device, dtype=dtype) for i in range(1, len(channels))), fl.UseContext(context='t2iadapter', key='features'))
def init_context(self) -> Contexts:
return {'t2iadapter': {'features': []}} |
def compile_with_cachecheck(yaml, errors_ok=False):
cache = Cache(logger)
secret_handler = _secret_handler()
r1 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler)
r2 = Compile(logger, yaml, k8s=True, secret_handler=secret_handler, cache=cache)
if (not errors_ok):
_require_no_errors(r1['ir'])
_require_no_errors(r2['ir'])
r1j = json.dumps(r1['xds'].as_dict(), sort_keys=True, indent=2)
r2j = json.dumps(r2['xds'].as_dict(), sort_keys=True, indent=2)
assert (r1j == r2j)
return r1 |
def gen_common_test_header(out, name):
loxi_utils.gen_c_copy_license(out)
out.write('\n/*\n * Test header file\n *\n * AUTOMATICALLY GENERATED FILE. Edits will be lost on regen.\n */\n\n#if !defined(_TEST_COMMON_H_)\n#define _TEST_COMMON_H_\n\n#define DISABLE_WARN_UNUSED_RESULT\n#include <loci/loci.h>\n#include <locitest/of_dup.h>\n#include <locitest/unittest.h>\n\nextern int global_error;\nextern int exit_on_error;\n\n/* Make option for -k to continue tests if errors */\n#define RUN_TEST(test) do { \\\n int rv; \\\n TESTCASE(test, rv); \\\n if (rv != TEST_PASS) { \\\n global_error=1; \\\n if (exit_on_error) return(1); \\\n } \\\n } while(0)\n\n#define TEST_OK(op) TEST_ASSERT((op) == OF_ERROR_NONE)\n#define TEST_INDIGO_OK(op) TEST_ASSERT((op) == INDIGO_ERROR_NONE)\n\n/*\n * Declarations of functions to populate scalar values in a a class\n */\n\nextern void of_test_str_fill(uint8_t *buf, int value, int len);\nextern int of_test_str_check(uint8_t *buf, int value, int len);\n\n\nextern int of_octets_populate(of_octets_t *octets, int value);\nextern int of_octets_check(of_octets_t *octets, int value);\nextern int of_match_populate(of_match_t *match, of_version_t version,\n int value);\nextern int of_match_check(of_match_t *match, of_version_t version, int value);\nextern int test_ident_macros(void);\nextern int test_dump_objs(void);\n\n/* In test_match_utils.c */\nextern int test_match_utils(void);\n\nextern int run_unified_accessor_tests(void);\nextern int run_match_tests(void);\nextern int run_utility_tests(void);\n\nextern int run_scalar_acc_tests(void);\nextern int run_list_tests(void);\nextern int run_message_tests(void);\n\nextern int run_validator_tests(void);\n\nextern int run_list_limits_tests(void);\n\nextern int test_ext_objs(void);\nextern int test_datafiles(void);\n\n')
for version in of_g.of_version_range:
for cls in of_g.standard_class_order:
if (not loxi_utils.class_in_version(cls, version)):
continue
if (type_maps.class_is_virtual(cls) and (not loxi_utils.class_is_list(cls))):
continue
out.write(('\nextern int %(cls)s_%(v_name)s_populate(\n %(cls)s_t *obj, int value);\nextern int %(cls)s_%(v_name)s_check(\n %(cls)s_t *obj, int value);\nextern int %(cls)s_%(v_name)s_populate_scalars(\n %(cls)s_t *obj, int value);\nextern int %(cls)s_%(v_name)s_check_scalars(\n %(cls)s_t *obj, int value);\n' % dict(cls=cls, v_name=loxi_utils.version_to_name(version))))
out.write('\n#endif /* _TEST_COMMON_H_ */\n') |
class RegressionPredictedVsActualScatterResults(MetricResult):
class Config():
dict_include = False
tags = {IncludeTags.Render}
current: Union[(PredActualScatter, AggPredActualScatter)]
reference: Optional[Union[(PredActualScatter, AggPredActualScatter)]]
agg_data: bool
(current_raw, current_agg) = raw_agg_properties('current', PredActualScatter, AggPredActualScatter, False)
(reference_raw, reference_agg) = raw_agg_properties('reference', PredActualScatter, AggPredActualScatter, True) |
_meta(characters.alice.DollBlastEffect)
class DollBlastEffect():
def effect_string_before(self, act):
(src, tgt) = (act.source, act.target)
c = act.card
if act.do_damage:
return f'{N.char(tgt)}{N.char(src)}({N.card(c)}),,BOOM!{N.char(tgt)}!'
else:
return f'{N.char(tgt)}{N.char(src)},{N.char(src)},{N.char(tgt)}{N.card(c)}' |
def extract_exclamations(text_list):
summary = extract(text_list, EXCLAMATION_MARK, key_name='exclamation_mark')
summary['exclamation_mark_names'] = [([name(c).lower() for c in x] if x else []) for x in summary['exclamation_marks']]
summary['exclamation_text'] = [EXCLAMATION.findall(text) for text in text_list]
return summary |
('supervisor', help='Generate configuration for supervisor')
('--user', help='optional user argument')
('--yes', help='Yes to regeneration of supervisor config', is_flag=True, default=False)
('--skip-redis', help='Skip redis configuration', is_flag=True, default=False)
('--skip-supervisord', help='Skip supervisord configuration', is_flag=True, default=False)
def setup_supervisor(user=None, yes=False, skip_redis=False, skip_supervisord=False):
from bench.utils import get_cmd_output
from bench.config.supervisor import check_supervisord_config, generate_supervisor_config
which('supervisorctl', raise_err=True)
if ((not skip_supervisord) and ('Permission denied' in get_cmd_output('supervisorctl status'))):
check_supervisord_config(user=user)
generate_supervisor_config(bench_path='.', user=user, yes=yes, skip_redis=skip_redis) |
def do_create_or_login(user):
db.session.add(user)
db.session.commit()
flask.flash(u'Welcome, {0}'.format(user.name), 'success')
flask.g.user = user
app.logger.info("%s '%s' logged in", ('Admin' if user.admin else 'User'), user.name)
if (flask.request.url_root == oid.get_next_url()):
return flask.redirect(flask.url_for('coprs_ns.coprs_by_user', username=user.name))
return flask.redirect(oid.get_next_url()) |
def plot_results(df, target_lib='', audio_format='', ext='png'):
sns.set_style('whitegrid')
ordered_libs = df.time.groupby(df.lib).mean().sort_values().index.tolist()
fig = plt.figure()
g = sns.catplot(x='duration', y='time', kind='point', hue_order=ordered_libs, hue='lib', data=df, height=6.6, aspect=1)
g.savefig(('benchmark_%s_%s_dur.%s' % (target_lib, audio_format, ext)))
fig = plt.figure()
ax = sns.barplot(x='time', y='lib', data=df, order=ordered_libs, orient='h')
fig.savefig(('benchmark_%s_%s_bar.%s' % (target_lib, audio_format, ext)), bbox_inches='tight') |
class OptimizerModule():
def _save_to_state_dict(self, states: Iterable, destination: Dict, keep_vars: bool, store_non_tensors: bool):
for (key, value) in states:
if isinstance(value, torch.Tensor):
destination[key] = (value if keep_vars else value.detach())
elif isinstance(value, OptimizerModule):
destination[key] = {}
value.state_dict(destination=destination[key], keep_vars=keep_vars, store_non_tensors=store_non_tensors)
elif isinstance(value, dict):
destination[key] = {}
self._save_to_state_dict(states=value.items(), destination=destination[key], keep_vars=keep_vars, store_non_tensors=store_non_tensors)
elif isinstance(value, COMPATIBLE_DATA_STRUCTURES):
destination[key] = {}
self._save_to_state_dict(states=enumerate(value), destination=destination[key], keep_vars=keep_vars, store_non_tensors=store_non_tensors)
elif store_non_tensors:
destination[key] = value
def state_dict(self, destination: Optional[Dict]=None, keep_vars: bool=False, store_non_tensors: bool=False) -> Dict[(str, Any)]:
if (destination is None):
destination = {}
self._save_to_state_dict(self.__dict__.items(), destination, keep_vars, store_non_tensors)
return destination
def _load_from_state_dict(self, old_state: Any, new_state: Any, store_non_tensors: bool) -> Any:
if isinstance(old_state, torch.Tensor):
if (not isinstance(new_state, torch.Tensor)):
logger.warning(f'Both old state {old_state} and new state {new_state} must be tensors! Continuing...')
return old_state
old_state.detach().copy_(new_state)
elif isinstance(old_state, OptimizerModule):
old_state.load_state_dict(new_state, store_non_tensors)
elif isinstance(old_state, dict):
if (not isinstance(new_state, dict)):
logger.warning(f'Both old state {old_state} and new_state {new_state} must be dicts! Continuing...')
return old_state
for (key, old_value) in old_state.items():
if (key in new_state):
old_state[key] = self._load_from_state_dict(old_state=old_value, new_state=new_state[key], store_non_tensors=store_non_tensors)
elif isinstance(old_state, COMPATIBLE_DATA_STRUCTURES):
old_state = type(old_state)(((self._load_from_state_dict(old_state=old_value, new_state=new_state[i], store_non_tensors=store_non_tensors) if (store_non_tensors or isinstance(old_value, (ALL_CLASSES + (OptimizerModule,)))) else old_value) for (i, old_value) in enumerate(old_state)))
elif store_non_tensors:
if (type(old_state) != type(new_state)):
logger.warning(f'Types of old value {type(old_state)} and new value {type(new_state)} do not match! Continuing...')
return old_state
old_state = deepcopy(new_state)
return old_state
def load_state_dict(self, state_dict: Mapping[(str, Any)], store_non_tensors: bool=False) -> None:
self._load_from_state_dict(self.__dict__, state_dict, store_non_tensors) |
def test_streamplot():
mesh = UnitSquareMesh(10, 10)
V = VectorFunctionSpace(mesh, 'CG', 1)
x = SpatialCoordinate(mesh)
x0 = Constant((0.5, 0.5))
v = (x - x0)
center = interpolate((2 * as_vector(((- v[1]), v[0]))), V)
saddle = interpolate((2 * as_vector((v[0], (- v[1])))), V)
r = Constant(0.5)
sink = interpolate((center - (r * v)), V)
(fig, axes) = plt.subplots(ncols=1, nrows=3, sharex=True, sharey=True)
for ax in axes:
ax.set_aspect('equal')
color_norm = matplotlib.colors.PowerNorm(gamma=0.5)
kwargses = [{'resolution': (1 / 48), 'tolerance': 0.02, 'norm': color_norm, 'seed': 0}, {'loc_tolerance': 1e-05, 'cmap': 'bone', 'vmax': 1.0, 'seed': 0}, {'min_length': (1 / 4), 'max_time': 5.0, 'seed': 0}]
for (ax, function, kwargs) in zip(axes, [center, saddle, sink], kwargses):
lines = streamplot(function, axes=ax, **kwargs)
colorbar = fig.colorbar(lines, ax=ax)
assert (lines is not None)
assert (colorbar is not None) |
class PatchedTemplate(Template):
def get_identifiers(self):
ids = []
for mo in self.pattern.finditer(self.template):
named = (mo.group('named') or mo.group('braced'))
if ((named is not None) and (named not in ids)):
ids.append(named)
elif ((named is None) and (mo.group('invalid') is None) and (mo.group('escaped') is None)):
raise ValueError('Unrecognized named group in pattern', self.pattern)
return ids |
class ParamBag(T):
def __init__(self):
T.__init__(self)
self.dict = {}
def parseVal(self, name, val, f, sect=''):
if ((name[0] == '[') and (name[(- 1)] == ']')):
sub_bag = ParamBag()
sub_bag.load(f)
val = (val, sub_bag)
self.dict[(sect + name)] = val |
class OptionSeriesScatter3dMarkerStates(Options):
def hover(self) -> 'OptionSeriesScatter3dMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesScatter3dMarkerStatesHover)
def normal(self) -> 'OptionSeriesScatter3dMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesScatter3dMarkerStatesNormal)
def select(self) -> 'OptionSeriesScatter3dMarkerStatesSelect':
return self._config_sub_data('select', OptionSeriesScatter3dMarkerStatesSelect) |
class SimpleEditor(SimpleTextEditor):
history = Any()
popup = Any()
filter = filter_trait
def init(self, parent):
self.control = panel = TraitsUIPanel(parent, (- 1))
sizer = wx.BoxSizer(wx.HORIZONTAL)
factory = self.factory
if (factory.entries > 0):
from .history_control import HistoryControl
self.history = HistoryControl(entries=factory.entries, auto_set=factory.auto_set)
control = self.history.create_control(panel)
pad = 3
button = wx.Button(panel, (- 1), '...', size=wx.Size(28, (- 1)))
else:
if factory.enter_set:
control = wx.TextCtrl(panel, (- 1), '', style=wx.TE_PROCESS_ENTER)
panel.Bind(wx.EVT_TEXT_ENTER, self.update_object, id=control.GetId())
else:
control = wx.TextCtrl(panel, (- 1), '')
control.Bind(wx.EVT_KILL_FOCUS, self.update_object)
if factory.auto_set:
panel.Bind(wx.EVT_TEXT, self.update_object, id=control.GetId())
bmp = wx.ArtProvider.GetBitmap(wx.ART_FOLDER_OPEN, size=(15, 15))
button = wx.BitmapButton(panel, (- 1), bitmap=bmp)
pad = 8
self._file_name = control
sizer.Add(control, 1, wx.EXPAND)
sizer.Add(button, 0, (wx.LEFT | wx.ALIGN_CENTER), pad)
panel.Bind(wx.EVT_BUTTON, self.show_file_dialog, id=button.GetId())
panel.SetDropTarget(FileDropTarget(self))
panel.SetSizerAndFit(sizer)
self._button = button
self.set_tooltip(control)
self.filter = factory.filter
self.sync_value(factory.filter_name, 'filter', 'from', is_list=True)
def dispose(self):
panel = self.control
panel.Unbind(wx.EVT_BUTTON, id=self._button.GetId())
self._button = None
if (self.history is not None):
self.history.dispose()
self.history = None
else:
(control, self._file_name) = (self._file_name, None)
control.Unbind(wx.EVT_KILL_FOCUS)
panel.Unbind(wx.EVT_TEXT_ENTER, id=control.GetId())
panel.Unbind(wx.EVT_TEXT, id=control.GetId())
super().dispose()
('history:value')
def _history_value_changed(self, event):
value = event.new
if (not self._no_update):
self._update(value)
def update_object(self, event):
if isinstance(event, wx.FocusEvent):
event.Skip()
self._update(self._file_name.GetValue())
def update_editor(self):
if (self.history is not None):
self._no_update = True
self.history.value = self.str_value
self._no_update = False
else:
self._file_name.SetValue(self.str_value)
def show_file_dialog(self, event=None):
if (self.history is not None):
self.popup = self._create_file_popup()
else:
dlg = self._create_file_dialog()
dlg.open()
if (dlg.return_code == OK):
if self.factory.truncate_ext:
self.value = splitext(dlg.path)[0]
else:
self.value = dlg.path
self.update_editor()
def get_error_control(self):
return self._file_name
('popup:value')
def _popup_value_changed(self, event):
file_name = event.new
if self.factory.truncate_ext:
file_name = splitext(file_name)[0]
self.value = file_name
self._no_update = True
self.history.set_value(self.str_value)
self._no_update = False
('popup:closed')
def _popup_closed_changed(self, event):
self.popup = None
def restore_prefs(self, prefs):
if (self.history is not None):
self.history.history = prefs.get('history', [])[:self.factory.entries]
def save_prefs(self):
if (self.history is not None):
return {'history': self.history.history[:]}
return None
def _create_file_dialog(self):
if (len(self.factory.filter) > 0):
wildcard = '|'.join(self.factory.filter)
else:
wildcard = 'All Files (*.*)|*.*'
dlg = FileDialog(parent=self.get_control_widget(), default_path=self._file_name.GetValue(), action=('save as' if (self.factory.dialog_style == 'save') else 'open'), wildcard=wildcard)
return dlg
def _create_file_popup(self):
return PopupFile(control=self.control, file_name=self.str_value, filter=self.factory.filter, height=300)
def _update(self, file_name):
try:
if self.factory.truncate_ext:
file_name = splitext(file_name)[0]
self.value = file_name
except TraitError as excp:
pass
def _get_value(self):
if (self.history is not None):
return self.history.value
return self._file_name.GetValue() |
class SyncFedShuffleServer(SyncServer):
def __init__(self, *, global_model: IFLModel, channel: Optional[IFLChannel]=None, **kwargs) -> None:
init_self_cfg(self, component_class=__class__, config_class=SyncFedShuffleServerConfig, **kwargs)
self._global_model = global_model
self._aggregator = Aggregator(module=global_model.fl_get_module(), aggregation_type=self.cfg.aggregation_type, only_federated_params=self.cfg.only_federated_params)
self._active_user_selector = instantiate(self.cfg.active_user_selector)
self._channel: IFLChannel = (channel or IdentityChannel())
self._optimizer = instantiate(config=self.cfg.server_optimizer, model=global_model.fl_get_module())
assert (isinstance(self._active_user_selector, UniformlyRandomActiveUserSelector) or isinstance(self._active_user_selector, ImportanceSamplingActiveUserSelector)), 'Currently only Uniform and Importance Sampling user selectors are supported'
self.samples_per_user = []
def select_clients_for_training(self, num_total_users, users_per_round, data_provider: Optional[IFLDataProvider]=None, global_round_num: Optional[int]=None):
assert (data_provider is not None), 'Data provider must be passed into FedShuffleServer'
if (isinstance(self._active_user_selector, ImportanceSamplingActiveUserSelector) and (len(self.samples_per_user) == 0)):
self.samples_per_user = [user.num_train_examples() for user in data_provider.train_users()]
selected_clients = self._active_user_selector.get_user_indices(num_total_users=num_total_users, users_per_round=users_per_round, num_samples_per_user=self.samples_per_user, data_provider=data_provider, global_round_num=global_round_num)
return selected_clients
def receive_update_from_client(self, message: Message):
message = self._channel.client_to_server(message)
if isinstance(self._active_user_selector, ImportanceSamplingActiveUserSelector):
message.weight = 1.0
self._aggregator.apply_weight_to_update(delta=message.model.fl_get_module(), weight=message.weight)
self._aggregator.add_update(delta=message.model.fl_get_module(), weight=message.weight) |
class BasePlugin():
NAME = 'base'
DEPENDENCIES = []
def __init__(self, plugin_path=None, view_updater=None):
self.view_updater = (view_updater if (view_updater is not None) else ViewUpdater())
if plugin_path:
self._sync_view(plugin_path)
def _sync_view(self, plugin_path: str):
view_path = self._get_view_file_path(plugin_path)
if (view_path is not None):
view_content = view_path.read_bytes()
self.view_updater.update_view(self.NAME, view_content)
def _get_view_file_path(cls, plugin_path: str) -> (Path | None):
views_dir = (Path(plugin_path).parent.parent / 'view')
view_files = (list(views_dir.iterdir()) if views_dir.is_dir() else [])
if (len(view_files) < 1):
logging.debug(f'{cls.NAME}: No view available! Generic view will be used.')
return None
if (len(view_files) > 1):
logging.warning(f"{cls.NAME}: Plug-in provides more than one view! '{view_files[0]}' is used!")
return view_files[0] |
def test_deepcopy():
provider = providers.Factory(object)
injection = providers.PositionalInjection(provider)
injection_copy = providers.deepcopy(injection)
assert (injection_copy is not injection)
assert (injection_copy.get_original_value() is not injection.get_original_value()) |
class TestMinorTickGenerator(unittest.TestCase):
def setUp(self):
self.tick_generator = MinorTickGenerator()
def test_minor_tick_generator_with_interval(self):
self.default_tick_generator = DefaultTickGenerator()
high = 1.0
low = 0.0
intervals = [0.05, 0.1, 0.2, 0.25, 0.5]
for i in intervals:
ticksMinor = self.tick_generator.get_ticks(data_low=0, data_high=1, bounds_low=low, bounds_high=high, interval=i)
ticksDefault = self.default_tick_generator.get_ticks(data_low=0, data_high=1, bounds_low=low, bounds_high=high, interval=i)
self.assertEqual(ticksMinor.tolist(), ticksDefault.tolist())
def test_minor_tick_generator_without_interval(self):
self.default_tick_generator = DefaultTickGenerator()
high = 1.0
low = 0.0
ticksMinor = self.tick_generator.get_ticks(data_low=0, data_high=1, bounds_low=low, bounds_high=high, interval='auto')
ticksDefault = self.default_tick_generator.get_ticks(data_low=0, data_high=1, bounds_low=low, bounds_high=high, interval='auto')
self.assertGreater(len(ticksMinor), len(ticksDefault)) |
class OptionPlotoptionsPyramid3dSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def initialize_velocity_block_petsc_options_2():
petsc_options = PETSc.Options()
petsc_options.clear()
for k in petsc_options.getAll():
petsc_options.delValue(k)
petsc_options.setValue('ksp_type', 'gmres')
petsc_options.setValue('ksp_gmres_restart', 100)
petsc_options.setValue('ksp_atol', 1e-08)
petsc_options.setValue('ksp_gmres_modifiedgramschmidt', '')
return petsc_options |
class ListFiles(BaseTool):
def run(self, path: str, recursive: bool, show_hidden: bool=False, exclude_dirs: bool=False):
entries_list = []
with os.scandir(path) as entries:
for entry in entries:
if (show_hidden or (not entry.name.startswith('.'))):
if entry.is_dir():
if (not exclude_dirs):
entries_list.append(f'{entry.name}/')
if recursive:
entries_list.extend(self.run(os.path.join(path, entry.name), recursive, show_hidden, exclude_dirs)['result'])
else:
entries_list.append(entry.name)
return entries_list |
class TestRocAuc(SimpleClassificationTest):
name: ClassVar = 'ROC AUC Score'
_roc_curve: ClassificationRocCurve
def __init__(self, eq: Optional[Numeric]=None, gt: Optional[Numeric]=None, gte: Optional[Numeric]=None, is_in: Optional[List[Union[(Numeric, str, bool)]]]=None, lt: Optional[Numeric]=None, lte: Optional[Numeric]=None, not_eq: Optional[Numeric]=None, not_in: Optional[List[Union[(Numeric, str, bool)]]]=None, is_critical: bool=True):
self._roc_curve = ClassificationRocCurve()
super().__init__(eq=eq, gt=gt, gte=gte, is_in=is_in, lt=lt, lte=lte, not_eq=not_eq, not_in=not_in, is_critical=is_critical)
def get_value(self, result: DatasetClassificationQuality):
return result.roc_auc
def get_description(self, value: Numeric) -> str:
if (value is None):
return 'Not enough data to calculate ROC AUC. Consider providing probabilities instead of labels.'
else:
return f'The ROC AUC Score is {value:.3g}. The test threshold is {self.get_condition()}' |
def filter_log_fortianalyzer_cloud_filter_data(json):
option_list = ['anomaly', 'dlp_archive', 'dns', 'filter', 'filter_type', 'forward_traffic', 'free_style', 'gtp', 'local_traffic', 'multicast_traffic', 'netscan_discovery', 'netscan_vulnerability', 'severity', 'sniffer_traffic', 'ssh', 'voip', 'ztna_traffic']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.