code stringlengths 281 23.7M |
|---|
def _get_response(text: str, labels, spans: List[Tuple[(str, int, int)]]) -> str:
tokens = text.split()
(start_chars, end_chars) = _get_token_char_maps(tokens, [True for _ in tokens])
spans_by_label = defaultdict(list)
for (label, start, end) in spans:
spans_by_label[label].append(text[start_chars[start]:end_chars[(end - 1)]])
response_lines = []
for label in labels:
response_lines.append(f"{label}: {', '.join(spans_by_label[label])}")
return '\n'.join(response_lines) |
def load_python_plugin(plugin_module_path: str, plugin: str, _type: str) -> types.ModuleType:
_plugin = None
module_name = 'fledge.plugins.{}.{}.{}'.format(_type, plugin, plugin)
try:
spec = importlib.util.spec_from_file_location(module_name, '{}/{}.py'.format(plugin_module_path, plugin))
_plugin = importlib.util.module_from_spec(spec)
spec.loader.exec_module(_plugin)
sys.modules[module_name] = _plugin
except FileNotFoundError:
if _FLEDGE_PLUGIN_PATH:
plugin_paths = _FLEDGE_PLUGIN_PATH.split(';')
for pp in plugin_paths:
if os.path.isdir(pp):
plugin_module_path = '{}/{}/{}'.format(pp, _type, plugin)
spec = importlib.util.spec_from_file_location(module_name, '{}/{}.py'.format(plugin_module_path, plugin))
_plugin = importlib.util.module_from_spec(spec)
spec.loader.exec_module(_plugin)
sys.modules[module_name] = _plugin
return _plugin |
def test_can_run_combined_transitions():
class CampaignMachine(StateMachine):
draft = State(initial=True)
producing = State()
closed = State()
abort = ((draft.to(closed) | producing.to(closed)) | closed.to(closed))
produce = draft.to(producing)
machine = CampaignMachine()
machine.abort()
assert machine.closed.is_active |
def extractWwwWangmamareadCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Returning from the Immortal World', 'Returning from the Immortal World', 'translated'), ('Master of Trading Star Card Game (Rebirth)', 'Master of Trading Star Card Game (Rebirth)', 'translated'), ('Long Live Summons!', 'Long Live Summons!', 'translated'), ('Release that Witch', 'Release that Witch', 'translated'), ('Eastern Palace', 'Eastern Palace', 'translated'), ('Good Morning, Mr. President!', 'Good Morning, Mr. President!', 'translated'), ('Star Martial God Technique', 'Star Martial God Technique', 'translated'), ('Douluo Dalu: Legend of the Divine Realm', 'Douluo Dalu: Legend of the Divine Realm', 'translated'), ('zhan long', 'Zhan Long', 'translated'), ('Chronicles of Primordial Wars', 'Chronicles of Primordial Wars', 'translated'), ('To Be a Power in the Shadows!', 'To Be a Power in the Shadows!', 'translated'), ('The Tutorial Is Too Hard', 'The Tutorial Is Too Hard', 'translated'), ('Legend of the Cultivation God', 'Legend of the Cultivation God', 'translated'), ('The Anarchic Consort of the Prince', 'The Anarchic Consort of the Prince', 'translated'), ('Dragon Blood Warrior', 'Dragon Blood Warrior', 'translated'), ('Otherworld Nation Founding Chronicles', 'Otherworld Nation Founding Chronicles', 'translated'), ('across the stunning beast princess: phoenix against the world', 'across the stunning beast princess: phoenix against the world', 'translated'), ('Rebirth of the Rich and Wealthy', 'Rebirth of the Rich and Wealthy', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionNavigationAnnotationsoptionsEvents(Options):
def add(self):
return self._config_get(None)
def add(self, value: Any):
self._config(value, js_type=False)
def afterUpdate(self):
return self._config_get(None)
def afterUpdate(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False) |
class BuildingMenu():
keys_go_back = ['']
sep_keys = '.'
joker_key = '*'
min_shortcut = 1
def __init__(self, caller=None, obj=None, title='Building menu: {obj}', keys=None, parents=None, persistent=False):
self.caller = caller
self.obj = obj
self.title = title
self.keys = (keys or [])
self.parents = (parents or ())
self.persistent = persistent
self.choices = []
self.cmds = {}
self.can_quit = False
if obj:
self.init(obj)
if ((not parents) and (not self.can_quit)):
self.add_choice_quit(key=None)
self._add_keys_choice()
def current_choice(self):
menu_keys = self.keys
if (not menu_keys):
return None
for choice in self.choices:
choice_keys = choice.keys
if (len(menu_keys) == len(choice_keys)):
common = True
for (menu_key, choice_key) in zip(menu_keys, choice_keys):
if (choice_key == self.joker_key):
continue
if ((not isinstance(menu_key, str)) or (menu_key != choice_key)):
common = False
break
if common:
return choice
return None
def relevant_choices(self):
menu_keys = self.keys
relevant = []
for choice in self.choices:
choice_keys = choice.keys
if ((not menu_keys) and (len(choice_keys) == 1)):
relevant.append(choice)
elif (len(menu_keys) == (len(choice_keys) - 1)):
common = True
for (menu_key, choice_key) in zip(menu_keys, choice_keys):
if (choice_key == self.joker_key):
continue
if ((not isinstance(menu_key, str)) or (menu_key != choice_key)):
common = False
break
if common:
relevant.append(choice)
return relevant
def _save(self):
self.caller.ndb._building_menu = self
if self.persistent:
self.caller.db._building_menu = {'class': ((type(self).__module__ + '.') + type(self).__name__), 'obj': self.obj, 'title': self.title, 'keys': self.keys, 'parents': self.parents, 'persistent': self.persistent}
def _add_keys_choice(self):
for choice in self.choices:
if (not choice.key):
title = strip_ansi(choice.title.strip()).lower()
length = self.min_shortcut
while (length <= len(title)):
i = 0
while (i < ((len(title) - length) + 1)):
guess = title[i:(i + length)]
if (guess not in self.cmds):
choice.key = guess
break
i += 1
if choice.key:
break
length += 1
if choice.key:
self.cmds[choice.key] = choice
else:
raise ValueError('Cannot guess the key for {}'.format(choice))
def init(self, obj):
pass
def add_choice(self, title, key=None, aliases=None, attr=None, text=None, glance=None, on_enter=None, on_nomatch=None, on_leave=None):
key = (key or '')
key = key.lower()
aliases = (aliases or [])
aliases = [a.lower() for a in aliases]
if (attr and (on_nomatch is None)):
on_nomatch = menu_setattr
if (key and (key in self.cmds)):
raise ValueError('A conflict exists between {} and {}, both use key or alias {}'.format(self.cmds[key], title, repr(key)))
if attr:
if (glance is None):
glance = (('{obj.' + attr) + '}')
if (text is None):
text = '\n \n {attr} for {{obj}}(#{{obj.id}})\n\n You can change this value simply by entering it.\n Use |y{back}|n to go back to the main menu.\n\n Current value: |c{{{obj_attr}}}|n\n '.format(attr=attr, obj_attr=('obj.' + attr), back='|n or |y'.join(self.keys_go_back))
choice = Choice(title, key=key, aliases=aliases, attr=attr, text=text, glance=glance, on_enter=on_enter, on_nomatch=on_nomatch, on_leave=on_leave, menu=self, caller=self.caller, obj=self.obj)
self.choices.append(choice)
if key:
self.cmds[key] = choice
for alias in aliases:
self.cmds[alias] = choice
return choice
def add_choice_edit(self, title='description', key='d', aliases=None, attr='db.desc', glance='\n {obj.db.desc}', on_enter=None):
on_enter = (on_enter or menu_edit)
return self.add_choice(title, key=key, aliases=aliases, attr=attr, glance=glance, on_enter=on_enter, text='')
def add_choice_quit(self, title='quit the menu', key='q', aliases=None, on_enter=None):
on_enter = (on_enter or menu_quit)
self.can_quit = True
return self.add_choice(title, key=key, aliases=aliases, on_enter=on_enter)
def open(self):
caller = self.caller
self._save()
if caller.cmdset.has(BuildingMenuCmdSet):
caller.cmdset.remove(BuildingMenuCmdSet)
self.caller.cmdset.add(BuildingMenuCmdSet, persistent=self.persistent)
self.display()
def open_parent_menu(self):
parents = list(self.parents)
if parents:
(parent_class, parent_obj, parent_keys) = parents[(- 1)]
del parents[(- 1)]
if self.caller.cmdset.has(BuildingMenuCmdSet):
self.caller.cmdset.remove(BuildingMenuCmdSet)
try:
menu_class = class_from_module(parent_class)
except Exception:
log_trace('BuildingMenu: attempting to load class {} failed'.format(repr(parent_class)))
return
try:
building_menu = menu_class(self.caller, parent_obj, keys=parent_keys, parents=tuple(parents))
except Exception:
log_trace('An error occurred while creating building menu {}'.format(repr(parent_class)))
return
else:
return building_menu.open()
def open_submenu(self, submenu_class, submenu_obj, parent_keys=None):
parent_keys = (parent_keys or [])
parents = list(self.parents)
parents.append((((type(self).__module__ + '.') + type(self).__name__), self.obj, parent_keys))
if self.caller.cmdset.has(BuildingMenuCmdSet):
self.caller.cmdset.remove(BuildingMenuCmdSet)
try:
menu_class = class_from_module(submenu_class)
except Exception:
log_trace('BuildingMenu: attempting to load class {} failed'.format(repr(submenu_class)))
return
try:
building_menu = menu_class(self.caller, submenu_obj, parents=parents)
except Exception:
log_trace('An error occurred while creating building menu {}'.format(repr(submenu_class)))
return
else:
return building_menu.open()
def move(self, key=None, back=False, quiet=False, string=''):
choice = self.current_choice
if choice:
choice.leave('')
if (not back):
if (not key):
raise ValueError('you are asking to move forward, you should specify a key.')
self.keys.append(key)
else:
if (not self.keys):
raise ValueError('you already are at the top of the tree, you cannot move backward.')
del self.keys[(- 1)]
self._save()
choice = self.current_choice
if choice:
choice.enter(string)
if (not quiet):
self.display()
def close(self):
if self.caller.cmdset.has(BuildingMenuCmdSet):
self.caller.cmdset.delete(BuildingMenuCmdSet)
if self.caller.attributes.has('_building_menu'):
self.caller.attributes.remove('_building_menu')
if self.caller.nattributes.has('_building_menu'):
self.caller.nattributes.remove('_building_menu')
def display_title(self):
return _call_or_get(self.title, menu=self, obj=self.obj, caller=self.caller).format(obj=self.obj)
def display_choice(self, choice):
title = _call_or_get(choice.title, menu=self, choice=choice, obj=self.obj, caller=self.caller)
clear_title = title.lower()
pos = clear_title.find(choice.key.lower())
ret = ' '
if (pos >= 0):
ret += ((((title[:pos] + '[|y') + choice.key.title()) + '|n]') + title[(pos + len(choice.key)):])
else:
ret += ((('[|y' + choice.key.title()) + '|n] ') + title)
if choice.glance:
glance = _call_or_get(choice.glance, menu=self, choice=choice, caller=self.caller, string='', obj=self.obj)
glance = glance.format(obj=self.obj, caller=self.caller)
ret += (': ' + glance)
return ret
def display(self):
choice = self.current_choice
if (self.keys and choice):
text = choice.format_text()
else:
text = (self.display_title() + '\n')
for choice in self.relevant_choices:
text += ('\n' + self.display_choice(choice))
self.caller.msg(text)
def restore(caller):
menu = caller.db._building_menu
if menu:
class_name = menu.get('class')
if (not class_name):
log_err('BuildingMenu: on caller {}, a persistent attribute holds building menu data, but no class could be found to restore the menu'.format(caller))
return
try:
menu_class = class_from_module(class_name)
except Exception:
log_trace('BuildingMenu: attempting to load class {} failed'.format(repr(class_name)))
return
obj = menu.get('obj')
keys = menu.get('keys')
title = menu.get('title', '')
parents = menu.get('parents')
persistent = menu.get('persistent', False)
try:
building_menu = menu_class(caller, obj, title=title, keys=keys, parents=parents, persistent=persistent)
except Exception:
log_trace('An error occurred while creating building menu {}'.format(repr(class_name)))
return
return building_menu |
def test_pr_378(tmpdir):
wd = tmpdir.strpath
config_path = os.path.join(wd, 'config.ini')
with open(config_path, 'w+') as config_file:
config_file.write((((make_config_snippet('cloud', 'google') + '\n[login/google]\nimage_user=my_username\nimage_user_sudo=root\nimage_sudo=True\nuser_key_name=elasticluster\nuser_key_private=~/.ssh/google_compute_engine\nuser_key_public=~/.ssh/google_compute_engine.pub\n ') + '\n[cluster/slurm]\ncloud=google\nlogin=google\nsetup=slurm_setup\nsecurity_group=default\nimage_id= ') + make_config_snippet('setup', 'slurm_setup')))
config_file.flush()
with patch('os.path.expanduser') as expanduser:
expanduser.return_value = config_path
creator = make_creator(config_path)
expanduser.assert_any_call('~/.ssh/google_compute_engine.pub')
expanduser.assert_any_call('~/.ssh/google_compute_engine')
cluster = creator.create_cluster('slurm')
assert os.path.isabs(cluster.user_key_public)
assert os.path.isabs(cluster.user_key_private) |
class VmDefineSerializer(VmBaseSerializer):
uuid = s.CharField(read_only=True)
hostname = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\.-]+[A-Za-z0-9]$', max_length=128, min_length=4)
alias = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\.-]+[A-Za-z0-9]$', max_length=24, min_length=4, required=False)
ostype = s.IntegerChoiceField(choices=Vm.OSTYPE, default=settings.VMS_VM_OSTYPE_DEFAULT)
hvm_type = s.IntegerChoiceField(choices=Vm.HVM_TYPE, default=settings.VMS_VM_HVM_TYPE_DEFAULT)
cpu_type = s.ChoiceField(choices=Vm.CPU_TYPE, default=settings.VMS_VM_CPU_TYPE_DEFAULT)
vcpus = s.IntegerField(max_value=1024)
ram = s.IntegerField(max_value=1048576, min_value=1)
note = s.CharField(required=False)
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects, read_only=False, required=False)
node = s.SlugRelatedField(slug_field='hostname', queryset=Node.objects, read_only=False, required=False)
template = s.SlugRelatedField(slug_field='name', queryset=VmTemplate.objects, read_only=False, required=False)
tags = s.TagField(required=False, default=[])
monitored_internal = s.BooleanField(default=settings.MON_ZABBIX_ENABLED)
monitored = s.BooleanField(default=settings.VMS_VM_MONITORED_DEFAULT)
monitoring_hostgroups = s.ArrayField(max_items=16, default=[], validators=(RegexValidator(regex=MonitoringBackend.RE_MONITORING_HOSTGROUPS),))
monitoring_templates = s.ArrayField(max_items=32, default=[])
installed = s.BooleanField(default=False)
snapshot_limit_manual = s.IntegerField(required=False)
snapshot_size_limit = s.IntegerField(required=False)
snapshot_size_percent_limit = s.IntegerField(required=False)
cpu_cap = s.IntegerField(read_only=True)
cpu_shares = s.IntegerField(default=settings.VMS_VM_CPU_SHARES_DEFAULT, min_value=0, max_value=1048576)
zfs_io_priority = s.IntegerField(default=settings.VMS_VM_ZFS_IO_PRIORITY_DEFAULT, min_value=0, max_value=1024)
zpool = s.CharField(default=Node.ZPOOL, max_length=64)
resolvers = s.ArrayField(read_only=True)
maintain_resolvers = s.BooleanField(default=True)
dns_domain = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\ \\._/-]*$', max_length=1024, required=False)
routes = s.RoutesField(default={})
vga = s.ChoiceField(choices=Vm.VGA_MODEL, default=settings.VMS_VGA_MODEL_DEFAULT)
bootrom = s.ChoiceField(choices=Vm.BHYVE_BOOTROM, default=settings.VMS_BHYVE_BOOTROM_DEFAULT)
mdata = s.MetadataField(default=settings.VMS_VM_MDATA_DEFAULT, validators=(validate_mdata(Vm.RESERVED_MDATA_KEYS),))
locked = s.BooleanField(read_only=True, required=False)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, *args, **kwargs):
self.request = request
self.old_hostname = None
self.hostname_changed = False
self.zpool_changed = False
self.node_changed = False
self.update_node_resources = False
self.update_storage_resources = []
self.check_node_resources = kwargs.pop('check_node_resources', True)
self.zone_img = None
self.dc_settings = dc_settings = self.request.dc.settings
hostname = kwargs.pop('hostname', None)
data = kwargs.get('data', None)
super(VmDefineSerializer, self).__init__(request, *args, **kwargs)
if (self.request.method == 'POST'):
vm_template = get_vm_template(request, data)
else:
vm_template = None
self._is_hvm = is_hvm(self.object, data, template=vm_template)
self._is_kvm = is_kvm(self.object, data, template=vm_template)
self._is_bhyve = (self._is_hvm and (not self._is_kvm))
if self._is_hvm:
del self.fields['maintain_resolvers']
del self.fields['routes']
del self.fields['dns_domain']
if (not self._is_kvm):
del self.fields['cpu_type']
del self.fields['vga']
if (not self._is_bhyve):
del self.fields['bootrom']
if (not kwargs.get('many', False)):
self.fields['owner'].default = request.user.username
self.fields['ostype'].default = dc_settings.VMS_VM_OSTYPE_DEFAULT
self.fields['hvm_type'].default = dc_settings.VMS_VM_HVM_TYPE_DEFAULT
self.fields['zpool'].default = dc_settings.VMS_STORAGE_DEFAULT
self.fields['monitored_internal'].default = (DefaultDc().settings.MON_ZABBIX_ENABLED and dc_settings._MON_ZABBIX_VM_SYNC)
self.fields['monitored'].default = (dc_settings.MON_ZABBIX_ENABLED and dc_settings.MON_ZABBIX_VM_SYNC and dc_settings.VMS_VM_MONITORED_DEFAULT)
self.fields['cpu_shares'].default = dc_settings.VMS_VM_CPU_SHARES_DEFAULT
self.fields['zfs_io_priority'].default = dc_settings.VMS_VM_ZFS_IO_PRIORITY_DEFAULT
self.fields['owner'].queryset = get_owners(self.request)
self.fields['template'].queryset = get_templates(self.request)
self.fields['node'].queryset = get_nodes(self.request, is_compute=True)
self.fields['mdata'].default = dc_settings.VMS_VM_MDATA_DEFAULT
field_snapshot_limit_manual = self.fields['snapshot_limit_manual']
field_snapshot_size_limit = self.fields['snapshot_size_limit']
field_snapshot_size_percent_limit = self.fields['snapshot_size_percent_limit']
field_snapshot_limit_manual.default = dc_settings.VMS_VM_SNAPSHOT_LIMIT_MANUAL_DEFAULT
field_snapshot_size_limit.default = dc_settings.VMS_VM_SNAPSHOT_SIZE_LIMIT_DEFAULT
field_snapshot_size_percent_limit.default = dc_settings.VMS_VM_SNAPSHOT_SIZE_PERCENT_LIMIT_DEFAULT
if (dc_settings.VMS_VM_SNAPSHOT_LIMIT_MANUAL is None):
(min_snap, max_snap) = (0, 65536)
else:
(min_snap, max_snap) = (1, int(dc_settings.VMS_VM_SNAPSHOT_LIMIT_MANUAL))
field_snapshot_limit_manual.required = field_snapshot_limit_manual.disallow_empty = True
field_snapshot_limit_manual.validators.append(validators.MinValueValidator(min_snap))
field_snapshot_limit_manual.validators.append(validators.MaxValueValidator(max_snap))
if (dc_settings.VMS_VM_SNAPSHOT_SIZE_LIMIT is None):
(min_snaps_size, max_snaps_size) = (0, )
else:
(min_snaps_size, max_snaps_size) = (1, int(dc_settings.VMS_VM_SNAPSHOT_SIZE_LIMIT))
field_snapshot_size_limit.required = field_snapshot_size_limit.disallow_empty = True
field_snapshot_size_limit.validators.append(validators.MinValueValidator(min_snaps_size))
field_snapshot_size_limit.validators.append(validators.MaxValueValidator(max_snaps_size))
if (dc_settings.VMS_VM_SNAPSHOT_SIZE_PERCENT_LIMIT is None):
(min_snaps_size_perc, max_snaps_size_perc) = (0, 10000)
else:
(min_snaps_size_perc, max_snaps_size_perc) = (1, int(dc_settings.VMS_VM_SNAPSHOT_SIZE_PERCENT_LIMIT))
field_snapshot_size_percent_limit.required = field_snapshot_size_percent_limit.disallow_empty = True
field_snapshot_size_percent_limit.validators.append(validators.MinValueValidator(min_snaps_size_perc))
field_snapshot_size_percent_limit.validators.append(validators.MaxValueValidator(max_snaps_size_perc))
if self._is_kvm:
self.fields['vga'].default = dc_settings.VMS_VGA_MODEL_DEFAULT
if self._is_bhyve:
self.fields['bootrom'].default = dc_settings.VMS_BHYVE_BOOTROM_DEFAULT
if (self._is_hvm or dc_settings.VMS_VM_CPU_CAP_REQUIRED):
vcpus_min = 1
else:
vcpus_min = 0
self.fields['vcpus'].validators.append(validators.MinValueValidator(vcpus_min))
if (self.request.method == 'POST'):
self.fields['hostname'].default = hostname
self.fields['alias'].default = hostname
if (not self._is_hvm):
if ('.' in hostname):
self.fields['dns_domain'].default = hostname.split('.', 1)[(- 1)]
else:
self.fields['dns_domain'].default = ''
if vm_template:
if (vm_template.ostype is not None):
self.fields['ostype'].default = vm_template.ostype
if (vm_template.hvm_type is not None):
self.fields['hvm_type'].default = vm_template.hvm_type
for (field, value) in vm_template.vm_define.items():
try:
self.fields[field].default = value
except KeyError:
pass
def restore_object(self, attrs, instance=None):
if (instance is not None):
vm = instance
else:
vm = Vm(dc=self.request.dc)
if (('owner' in attrs) and (attrs['owner'] is not None)):
vm.owner = attrs['owner']
vm.hostname_is_valid_fqdn()
_json = vm.json
dc_settings = vm.dc.settings
if ('uuid' not in _json):
_json.update2(dc_settings.VMS_VM_JSON_DEFAULTS.copy())
_json['resolvers'] = dc_settings.VMS_VM_RESOLVERS_DEFAULT
if (('template' in attrs) and (attrs['template'] is not None)):
vm.template = attrs['template']
_json.update2(vm.sync_template())
data = vm.template.vm_define
else:
data = {}
vm.json = _json
data.update(attrs)
if ('ostype' in data):
vm.set_ostype(data.pop('ostype'))
if ('hvm_type' in data):
vm.set_hvm_type(data.pop('hvm_type'))
for (key, val) in iteritems(data):
if (key == 'node'):
vm.set_node(val)
elif (key == 'tags'):
vm.set_tags(val)
else:
setattr(vm, key, val)
if ((instance is None) and (not vm.is_hvm()) and ('image_uuid' not in vm.json)):
vm.save_item('image_uuid', self.zone_img.uuid, save=False)
vm.save_item('quota', int(round((float(self.zone_img.size) / float(1024)))), save=False)
vm.save_item('zfs_root_compression', self.dc_settings.VMS_DISK_COMPRESSION_DEFAULT, save=False)
mdata = vm.mdata
if (('hostname' not in mdata) or self.hostname_changed):
mdata['hostname'] = vm.hostname
vm.mdata = mdata
return vm
def validate_owner(self, attrs, source):
validate_owner(self.object, attrs.get(source, None), _('VM'))
return attrs
def validate_node(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and self.object.node):
if (self.object.node != value):
if self.object.is_notcreated():
self.node_changed = True
else:
raise s.ValidationError(_('Cannot change node.'))
elif (value is not None):
self.node_changed = True
if (self.node_changed and value):
if (value.status != Node.ONLINE):
raise s.ValidationError(_('Node is currently not available.'))
if self.object:
validate_nic_tags(self.object, new_node=value)
return attrs
def validate_hostname(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and ((self.object.hostname == value) or (self.object.uuid == value))):
pass
elif Vm.objects.filter((Q(hostname__iexact=value) | Q(uuid__iexact=value))).exists():
raise ObjectAlreadyExists(model=Vm)
elif (('..' in value) or ('--' in value) or (value in INVALID_HOSTNAMES)):
raise s.ValidationError(s.WritableField.default_error_messages['invalid'])
if (self.object and (self.object.hostname != value)):
self.old_hostname = self.object.hostname
self.hostname_changed = True
return attrs
def validate_template(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and value and (self.object.template != value)):
raise s.ValidationError(_('Cannot change template.'))
return attrs
def validate_cpu_shares(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if ((not self.request.user.is_staff) and (value != self.dc_settings.VMS_VM_CPU_SHARES_DEFAULT)):
raise s.ValidationError(PERMISSION_DENIED)
return attrs
def validate_zfs_io_priority(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if ((not self.request.user.is_staff) and (value != self.dc_settings.VMS_VM_ZFS_IO_PRIORITY_DEFAULT)):
raise s.ValidationError(PERMISSION_DENIED)
return attrs
def validate_zpool(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if self.object:
if (self.object.zpool == value):
return attrs
if self.object.is_deployed():
raise s.ValidationError(_('Cannot change zpool.'))
if (not self.object.is_hvm()):
raise s.ValidationError(_('Cannot change zpool for this OS type. Please change it on the first disk.'))
self.zpool_changed = True
return attrs
def validate_ostype(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if self.object:
if (self.object.ostype != value):
raise s.ValidationError(_('Cannot change ostype.'))
elif (not is_hvm(self.object, ostype=value)):
if (not (settings.VMS_ZONE_ENABLED and self.dc_settings.VMS_ZONE_ENABLED)):
raise s.ValidationError(_('This OS type is not supported.'))
if (value == Vm.LINUX_ZONE):
default_zone_image = self.dc_settings.VMS_DISK_IMAGE_LX_ZONE_DEFAULT
else:
default_zone_image = self.dc_settings.VMS_DISK_IMAGE_ZONE_DEFAULT
zone_images = get_images(self.request, ostype=value)
try:
self.zone_img = zone_images.get(name=default_zone_image)
except Image.DoesNotExist:
self.zone_img = zone_images.first()
if (not self.zone_img):
raise s.ValidationError(_('Default disk image for this OS type is not available.'))
return attrs
def validate_hvm_type(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
ostype = None
if self.object:
if (self.object.hvm_type != value):
raise s.ValidationError(_('Cannot change hypervisor type.'))
elif (self.object.ostype is not None):
ostype = self.object.ostype
if ('ostype' in attrs):
ostype = attrs['ostype']
if (ostype is not None):
brand = Vm.OSTYPE_BRAND.get(ostype, 'hvm')
if ((brand == 'hvm') and (value == _HVMType.Hypervisor_NONE)):
raise s.ValidationError(_('You must select correct hypervisor.'))
return attrs
def validate_monitored_internal(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if ((not self.request.user.is_staff) and (value != self.fields['monitored_internal'].default)):
raise s.ValidationError(PERMISSION_DENIED)
return attrs
def validate_monitoring_hostgroups(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and (self.object.monitoring_hostgroups == value)):
return attrs
elif (self.dc_settings.MON_ZABBIX_HOSTGROUPS_VM_RESTRICT and (not set(value).issubset(set(self.dc_settings.MON_ZABBIX_HOSTGROUPS_VM_ALLOWED)))):
raise s.ValidationError(_('Selected monitoring hostgroups are not available.'))
return attrs
def validate_monitoring_templates(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and (self.object.monitoring_templates == value)):
return attrs
elif (self.dc_settings.MON_ZABBIX_TEMPLATES_VM_RESTRICT and (not set(value).issubset(set(self.dc_settings.MON_ZABBIX_TEMPLATES_VM_ALLOWED)))):
raise s.ValidationError(_('Selected monitoring templates are not available.'))
return attrs
def validate_node_resources(self, attrs):
vm = self.object
dc = self.request.dc
node = None
node_errors = []
if ('vcpus' in attrs):
in_cpu = Vm.calculate_cpu_count_from_vcpus(attrs['vcpus'])
else:
in_cpu = None
in_ram = attrs.get('ram', None)
if self.node_changed:
node = attrs['node']
old_cpu = old_ram = new_disk = 0
if vm:
vm_disks = vm.get_disks()
if node:
(old_cpu, old_ram, new_disk) = vm.get_cpu_ram_disk(zpool=node.zpool)
for (zpool, size) in vm_disks.items():
try:
ns = validate_zpool(self.request, zpool, node=node)
except s.ValidationError as err:
node_errors.extend(err.messages)
else:
logger.info('Checking storage %s free space (%s) for vm %s', ns.storage, size, vm)
if ns.check_free_space(size):
self.update_storage_resources.append(ns)
else:
node_errors.append((_('Not enough free disk space on storage with zpool=%s.') % zpool))
if vm.node:
self.update_storage_resources.extend(list(vm.node.get_node_storages(dc, vm_disks.keys())))
if self._is_hvm:
ram_overhead = settings.VMS_VM_KVM_MEMORY_OVERHEAD
else:
ram_overhead = 0
if (in_cpu is None):
new_cpu = old_cpu
else:
new_cpu = in_cpu
if (in_ram is None):
new_ram = (old_ram + ram_overhead)
else:
new_ram = (in_ram + ram_overhead)
elif (vm and vm.node and ((in_cpu is not None) or (in_ram is not None))):
node = vm.node
(old_cpu, old_ram) = vm.get_cpu_ram()
new_disk = 0
if (in_cpu is None):
new_cpu = 0
else:
new_cpu = (in_cpu - old_cpu)
if (in_ram is None):
new_ram = 0
else:
new_ram = (in_ram - old_ram)
if node:
dc_node = node.get_dc_node(dc)
logger.info('Checking node=%s, dc_node=%s resources (cpu=%s, ram=%s, disk=%s) for vm %s', node, dc_node, new_cpu, new_ram, new_disk, vm)
if ((new_cpu > 0) and (not dc_node.check_free_resources(cpu=new_cpu))):
node_errors.append(_('Not enough free vCPUs on node.'))
if ((new_ram > 0) and (not dc_node.check_free_resources(ram=new_ram))):
node_errors.append(_('Not enough free RAM on node.'))
if ((new_disk > 0) and (not dc_node.check_free_resources(disk=new_disk))):
node_errors.append(_('Not enough free disk space on node.'))
if self._is_bhyve:
if (not node.bhyve_capable):
node_errors.append(_('Node is not bhyve capable'))
if (new_cpu > node.bhyve_max_vcpus):
node_errors.append((_('Bhyve supports max %d vcpus') % node.bhyve_max_vcpus))
if (node.platform_version_short < node.BHYVE_MIN_PLATFORM_SHORT):
node_errors.append(_('Compute node platform is too old for bhyve.'))
if node_errors:
self._errors['node'] = s.ErrorList(node_errors)
else:
self.update_node_resources = True
def validate(self, attrs):
vm = self.object
dc_settings = self.dc_settings
if (self.request.method == 'POST'):
limit = dc_settings.VMS_VM_DEFINE_LIMIT
if (limit is not None):
total = self.request.dc.vm_set.count()
if (int(limit) <= total):
raise s.ValidationError(_('Maximum number of server definitions reached.'))
try:
ostype = attrs['ostype']
except KeyError:
ostype = vm.ostype
if ((not vm) and (ostype == Vm.WINDOWS) and ('cpu_type' not in self.init_data)):
attrs['cpu_type'] = Vm.CPU_TYPE_HOST
template = attrs.get('template', None)
if (template and template.ostype):
if (template.ostype != ostype):
err = _('Server template is only available for servers with "%(ostype)s" OS type.')
self._errors['template'] = s.ErrorList([(err % {'ostype': template.get_ostype_display()})])
if (('owner' in attrs) and (attrs['owner'] is None)):
if vm:
del attrs['owner']
else:
attrs['owner'] = self.request.user
if (self.zpool_changed or self.node_changed):
try:
zpool = attrs['zpool']
except KeyError:
zpool = vm.zpool
try:
node = attrs['node']
except KeyError:
if vm:
node = vm.node
else:
node = None
try:
validate_zpool(self.request, zpool, node=node)
except s.ValidationError as err:
self._errors['zpool'] = err.messages
if ('alias' in attrs):
if (vm and ('owner' not in attrs)):
owner = vm.owner
elif ('owner' in attrs):
owner = attrs['owner']
else:
owner = self.request.user
alias = attrs['alias']
if (vm and (vm.alias == alias)):
pass
elif Vm.objects.filter(dc=self.request.dc, owner=owner, alias__iexact=alias).exists():
self._errors['alias'] = s.ErrorList([_('This server name is already in use. Please supply a different server name.')])
if (not self._errors):
self.validate_node_resources(attrs)
if (('monitored_internal' in attrs) and (not (DefaultDc().settings.MON_ZABBIX_ENABLED and dc_settings._MON_ZABBIX_VM_SYNC))):
attrs['monitored_internal'] = False
if (('monitored' in attrs) and (not (dc_settings.MON_ZABBIX_ENABLED and dc_settings.MON_ZABBIX_VM_SYNC))):
attrs['monitored'] = False
return attrs |
def setup_to_pass():
print(shellexec('yum install -q -y ntp'))
shutil.copy('/etc/ntp.conf', '/etc/ntp.conf.bak')
shellexec("sed -i 's/restrict default.*/restrict default kod nomodify notrap nopeer noquery/' /etc/ntp.conf")
print(shellexec('systemctl enable ntpd'))
print(shellexec('systemctl start ntpd'))
(yield None)
print(shellexec('yum remove -q -y ntp'))
print(shellexec('systemctl disable ntpd'))
print(shellexec('systemctl start chronyd')) |
def main():
arguments = docopt(__doc__)
debug = arguments['--debug']
verbose = arguments['--verbose']
ch = logging.StreamHandler()
ch.setLevel(logging.WARNING)
if verbose:
ch.setLevel(logging.INFO)
if debug:
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
logger.info('{}{}'.format(ciftify.utils.ciftify_logo(), ciftify.utils.section_header('Starting ciftify_meants')))
ciftify.utils.log_arguments(arguments)
settings = UserSettings(arguments)
ret = run_ciftify_meants(settings)
logger.info(ciftify.utils.section_header('Done ciftify_meants'))
sys.exit(ret) |
def merge_args_and_kwargs(function_abi: ABIFunction, args: Sequence[Any], kwargs: Dict[(str, Any)]) -> Tuple[(Any, ...)]:
if ((len(args) + len(kwargs)) != len(function_abi.get('inputs', []))):
raise TypeError(f"Incorrect argument count. Expected '{len(function_abi['inputs'])}'. Got '{(len(args) + len(kwargs))}'")
if (not kwargs):
return cast(Tuple[(Any, ...)], args)
kwarg_names = set(kwargs.keys())
sorted_arg_names = tuple((arg_abi['name'] for arg_abi in function_abi['inputs']))
args_as_kwargs = dict(zip(sorted_arg_names, args))
duplicate_args = kwarg_names.intersection(args_as_kwargs.keys())
if duplicate_args:
raise TypeError(f"{function_abi.get('name')}() got multiple values for argument(s) '{', '.join(duplicate_args)}'")
unknown_args = kwarg_names.difference(sorted_arg_names)
if unknown_args:
if function_abi.get('name'):
raise TypeError(f"{function_abi.get('name')}() got unexpected keyword argument(s) '{', '.join(unknown_args)}'")
raise TypeError(f"Type: '{function_abi.get('type')}' got unexpected keyword argument(s) '{', '.join(unknown_args)}'")
sorted_args = tuple(zip(*sorted(itertools.chain(kwargs.items(), args_as_kwargs.items()), key=(lambda kv: sorted_arg_names.index(kv[0])))))
if sorted_args:
return sorted_args[1]
else:
return tuple() |
class OptionPlotoptionsPackedbubbleSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.asyncio
.workspace_host
class TestCreateUserRole():
async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData):
user = test_data['users']['regular']
role = test_data['roles']['castles_manager']
response = (await test_client_api.post(f'/users/{user.id}/roles', json={'id': str(role.id)}))
unauthorized_api_assertions(response)
.authenticated_admin
async def test_unknown_user(self, test_client_api: not_existing_uuid: uuid.UUID, test_data: TestData):
role = test_data['roles']['castles_manager']
response = (await test_client_api.post(f'/users/{not_existing_uuid}/roles', json={'id': str(role.id)}))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin
async def test_unknown_role(self, test_client_api: not_existing_uuid: uuid.UUID, test_data: TestData):
user = test_data['users']['regular']
response = (await test_client_api.post(f'/users/{user.id}/roles', json={'id': str(not_existing_uuid)}))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
json = response.json()
assert (json['detail'] == APIErrorCode.USER_ROLE_CREATE_NOT_EXISTING_ROLE)
.authenticated_admin
async def test_already_added_role(self, test_client_api: test_data: TestData):
role = test_data['roles']['castles_visitor']
user = test_data['users']['regular']
response = (await test_client_api.post(f'/users/{user.id}/roles', json={'id': str(role.id)}))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
json = response.json()
assert (json['detail'] == APIErrorCode.USER_ROLE_CREATE_ALREADY_ADDED_ROLE)
.authenticated_admin
async def test_valid(self, test_client_api: test_data: TestData, workspace: Workspace, workspace_session: AsyncSession, send_task_mock: MagicMock):
role = test_data['roles']['castles_manager']
user = test_data['users']['regular']
response = (await test_client_api.post(f'/users/{user.id}/roles', json={'id': str(role.id)}))
assert (response.status_code == status.HTTP_201_CREATED)
user_role_repository = UserRoleRepository(workspace_session)
user_roles = (await user_role_repository.list(user_role_repository.get_by_user_statement(user.id)))
assert (len(user_roles) == 2)
assert (role.id in [user_role.role_id for user_role in user_roles])
send_task_mock.assert_called_with(on_user_role_created, str(user.id), str(role.id), str(workspace.id)) |
class InputMediaAudio(InputMedia):
def __init__(self, media, thumbnail=None, caption=None, parse_mode=None, caption_entities=None, duration=None, performer=None, title=None):
super(InputMediaAudio, self).__init__(type='audio', media=media, caption=caption, parse_mode=parse_mode, caption_entities=caption_entities)
self.thumbnail = thumbnail
self.duration = duration
self.performer = performer
self.title = title
def thumb(self):
logger.warning('The parameter "thumb" is deprecated, use "thumbnail" instead')
return self.thumbnail
def to_dict(self):
ret = super(InputMediaAudio, self).to_dict()
if self.thumbnail:
ret['thumbnail'] = self.thumbnail
if self.duration:
ret['duration'] = self.duration
if self.performer:
ret['performer'] = self.performer
if self.title:
ret['title'] = self.title
return ret |
(autouse=True, scope='function')
def create_test_database():
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
if (database_url.scheme in ['mysql', 'mysql+aiomysql', 'mysql+asyncmy']):
url = str(database_url.replace(driver='pymysql'))
elif (database_url.scheme in ['postgresql+aiopg', 'sqlite+aiosqlite', 'postgresql+asyncpg']):
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.create_all(engine)
(yield)
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
if (database_url.scheme in ['mysql', 'mysql+aiomysql', 'mysql+asyncmy']):
url = str(database_url.replace(driver='pymysql'))
elif (database_url.scheme in ['postgresql+aiopg', 'sqlite+aiosqlite', 'postgresql+asyncpg']):
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.drop_all(engine)
gc.collect() |
class OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
def wb_command_version():
wb_path = find_workbench()
if (wb_path is None):
raise OSError('wb_command not found. Please check that it is installed.')
wb_help = util.check_output('wb_command')
wb_version = wb_help.split(os.linesep)[0:3]
sep = '{} '.format(os.linesep)
wb_v = sep.join(wb_version)
all_info = 'wb_command:{0}Path: {1}{0}{2}'.format(sep, wb_path, wb_v)
return all_info |
def is_valid_ip_address(address):
if (address.lower() in ['127.0.0.1', 'localhost', '::1', '::ffff:127.0.0.1']):
return True
elif (address.lower() in ('unknown', '')):
return False
elif (address.count('.') == 3):
if address.startswith('::ffff:'):
address = address[7:]
if hasattr(socket, 'inet_aton'):
try:
socket.inet_aton(address)
return True
except socket.error:
return False
else:
match = _re_ipv4.match(address)
if (match and all(((0 <= int(match.group(i)) < 256) for i in (1, 2, 3, 4)))):
return True
return False
elif hasattr(socket, 'inet_pton'):
try:
socket.inet_pton(socket.AF_INET6, address)
return True
except socket.error:
return False
else:
return True |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def test_assert_dict_type():
import pandas as pd
class AnotherDataClass(DataClassJsonMixin):
z: int
class Args(DataClassJsonMixin):
x: int
y: typing.Optional[str]
file: FlyteFile
dataset: StructuredDataset
another_dataclass: AnotherDataClass
pv = tempfile.mkdtemp(prefix='flyte-')
df = pd.DataFrame({'Name': ['Tom', 'Joseph'], 'Age': [20, 22]})
sd = StructuredDataset(dataframe=df, file_format='parquet')
vd = {'x': 3, 'y': 'hello', 'file': FlyteFile(pv), 'dataset': sd, 'another_dataclass': {'z': 4}}
DataclassTransformer().assert_type(Args, vd)
md = {'x': 3, 'file': FlyteFile(pv), 'dataset': sd, 'another_dataclass': {'z': 4}}
DataclassTransformer().assert_type(Args, md)
md = {'y': 'hello', 'file': FlyteFile(pv), 'dataset': sd, 'another_dataclass': {'z': 4}}
with pytest.raises(TypeTransformerFailedError, match=re.escape("The original fields are missing the following keys from the dataclass fields: ['x']")):
DataclassTransformer().assert_type(Args, md)
ed = {'x': 3, 'y': 'hello', 'file': FlyteFile(pv), 'dataset': sd, 'another_dataclass': {'z': 4}, 'z': 'extra'}
with pytest.raises(TypeTransformerFailedError, match=re.escape("The original fields have the following extra keys that are not in dataclass fields: ['z']")):
DataclassTransformer().assert_type(Args, ed)
td = {'x': '3', 'y': 'hello', 'file': FlyteFile(pv), 'dataset': sd, 'another_dataclass': {'z': 4}}
with pytest.raises(TypeTransformerFailedError, match="Type of Val '<class 'str'>' is not an instance of <class 'int'>"):
DataclassTransformer().assert_type(Args, td) |
def get_valid_types(data: dict) -> typing.Tuple[(typing.Set[str], bool)]:
type_strings = data.get('type', [])
if isinstance(type_strings, str):
type_strings = {type_strings}
else:
type_strings = set(type_strings)
if (not type_strings):
type_strings = {'null', 'boolean', 'object', 'array', 'number', 'string'}
if ('number' in type_strings):
type_strings.discard('integer')
allow_null = False
if ('null' in type_strings):
allow_null = True
type_strings.remove('null')
return (type_strings, allow_null) |
class PyGameFRL():
def get_pos(self, gazepos):
return ((gazepos[0] - self.frlcor[0]), (gazepos[1] - self.frlcor[1]))
def update(self, display, stimscreen, gazepos):
frlpos = self.get_pos(gazepos)
display.fill()
r = (self.size / 2)
h = 1
for y in range(0, r):
y = (r - y)
x = (((r ** 2) - (y ** 2)) ** 0.5)
updaterect = [(frlpos[0] - x), (frlpos[1] - (h * y)), (2 * x), h]
pygaze.expdisplay.set_clip(updaterect)
pygaze.expdisplay.blit(stimscreen.screen, (0, 0))
for y in range(0, (r + 1)):
x = (((r ** 2) - (y ** 2)) ** 0.5)
updaterect = [(frlpos[0] - x), (frlpos[1] + (h * y)), (2 * x), h]
pygaze.expdisplay.set_clip(updaterect)
pygaze.expdisplay.blit(stimscreen.screen, (0, 0))
pygaze.expdisplay.set_clip(None)
disptime = display.show()
return disptime |
class Migration(migrations.Migration):
initial = True
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [migrations.CreateModel(name='ActivityLog', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('type', models.CharField(max_length=64)), ('jsondata', models.TextField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='criado em')), ('fromuser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activitylogs_withfromuser', to=settings.AUTH_USER_MODEL)), ('logged_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL))], options={'ordering': ('-created_at',)})] |
def test_wf_resolving():
x = my_wf(a=3, b='hello')
assert (x == (5, 'helloworld'))
assert (my_wf.location == 'tests.flytekit.unit.core.test_resolver.my_wf')
workflows_tasks = my_wf.get_all_tasks()
assert (len(workflows_tasks) == 2)
srz_t0_spec = get_serializable(OrderedDict(), serialization_settings, workflows_tasks[0])
assert (srz_t0_spec.template.container.args[(- 4):] == ['--resolver', 'tests.flytekit.unit.core.test_resolver.my_wf', '--', '0'])
srz_t1_spec = get_serializable(OrderedDict(), serialization_settings, workflows_tasks[1])
assert (srz_t1_spec.template.container.args[(- 4):] == ['--resolver', 'tests.flytekit.unit.core.test_resolver.my_wf', '--', '1']) |
def bad_hashid_error(email_or_string):
g.log.info('Submission rejected. No form found for this target.')
if request_wants_json():
return jsonerror(400, {'error': 'Invalid email address'})
return (render_template('error.html', title='Check email address', text=('Email address %s is not formatted correctly' % str(email_or_string))), 400) |
def test_inforec_dense_table():
path = 'data/lis/records/inforec_01.lis'
(f,) = lis.load(path)
wellsite = f.wellsite_data()[0]
assert wellsite.isstructured()
table = wellsite.table(simple=True)
mnem = np.array(['WN ', 'CN ', 'SRVC'], dtype='O')
np.testing.assert_array_equal(table['MNEM'], mnem)
stat = np.array(['ALLO', 'ALLO', 'ALLO'], dtype='O')
np.testing.assert_array_equal(table['STAT'], stat)
puni = np.array([' ', ' ', ' '], dtype='O')
np.testing.assert_array_equal(table['PUNI'], puni)
tuni = np.array([' ', ' ', ' '], dtype='O')
np.testing.assert_array_equal(table['TUNI'], tuni)
valu = np.array(['15/9-F-15 ', 'StatoilHydro', 'Geoservices '], dtype='O')
np.testing.assert_array_equal(table['VALU'], valu) |
class Block(metaclass=ABCMeta):
NAME = ''
ARGUMENT = False
OPTIONS = {}
def __init__(self, length, tracker, block_mgr, config):
self.arg_spec = self.ARGUMENT
self.option_spec = copy.deepcopy(self.OPTIONS)
if ('attrs' in self.option_spec):
raise ValueError("'attrs' is a reserved option name and cannot be overriden")
self.option_spec['attrs'] = [{}, type_html_attribute_dict]
self._block_mgr = block_mgr
self.length = length
self.tracker = tracker
self.md = block_mgr.md
self.arguments = []
self.options = {}
self.config = config
self.on_init()
def is_raw(self, tag):
return self._block_mgr.is_raw(tag)
def is_block(self, tag):
return self._block_mgr.is_block(tag)
def html_escape(self, text):
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
return text
def dedent(self, text, length=None):
if (length is None):
length = self.md.tab_length
min_length = float('inf')
for x in RE_INDENT.findall(text):
min_length = min(len(x), min_length)
min_length = min(min_length, length)
return RE_DEDENT.sub((lambda m, l=min_length: ('' if (m.group(2) is not None) else m.group(1)[l:])), text)
def on_init(self):
return
def on_markdown(self):
return 'auto'
def _validate(self, parent, arg, **options):
if ((self.arg_spec is not None) and ((arg and (not self.arg_spec)) or ((not arg) and self.arg_spec))):
return False
self.argument = arg
spec = self.option_spec
parsed = {}
for (k, v) in spec.items():
parsed[k] = v[0]
for (k, v) in options.items():
if (k not in spec):
return False
else:
parser = spec[k][1]
if (parser is not None):
try:
v = parser(v)
except Exception:
return False
parsed[k] = v
self.options = parsed
return self.on_validate(parent)
def on_validate(self, parent):
return True
def on_create(self, parent):
def _create(self, parent):
el = self.on_create(parent)
attrib = el.attrib
for (k, v) in self.options['attrs'].items():
if (k == 'class'):
if (k in attrib):
v = (type_string_delimiter(' ')(attrib['class']) + v)
attrib['class'] = ' '.join(v)
else:
attrib[k] = v
return el
def _end(self, block):
mode = self.on_markdown()
add = self.on_add(block)
if ((mode == 'raw') or ((mode == 'auto') and self.is_raw(add))):
add.text = mutil.AtomicString(self.dedent(add.text))
self.on_end(block)
def on_end(self, block):
return
def on_add(self, block):
return block
def on_inline_end(self, block):
return |
def test_fetcher_client_set_cache():
fetcher_client = _initial_fetcher_client()
fetcher_client.set_run_cache(key='test_field_name', value='cached data')
assert (fetcher_client.run_cache['test_field_name'] == 'cached data')
assert (fetcher_client.run_cache['not_existing_key'] is None) |
def extractWwwAmazonCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestComparePluginFileHeader(ComparePluginTest):
PLUGIN_NAME = 'File_Header'
PLUGIN_CLASS = ComparePlugin
def test_compare(self):
result = self.c_plugin.compare_function([self.fw_one, self.fw_two, self.fw_three])
assert all(((key in result) for key in ['hexdiff', 'ascii', 'offsets'])), 'not all result keys given'
assert all((isinstance(result[key], Markup) for key in ['hexdiff', 'ascii', 'offsets'])), 'partial results should be flask.Markup strings'
assert ('>4B<' in result['hexdiff']), 'no bytes in hexdump or bad upper case conversion'
assert ('<br />' in result['hexdiff']), 'no linebreaks found'
def test_at_least_two_are_common(self):
should_be_true = [3, 2, 1, 2]
should_be_false = [5, 4, 3, 1, 2, 6]
assert self.c_plugin._at_least_two_are_common(should_be_true), 'should find a commonality'
assert (not self.c_plugin._at_least_two_are_common(should_be_false)), 'should not find a commonality' |
def run_tests(c=None):
logger = logging.getLogger()
hdlr = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
if (not c):
return True
runner = RyuTestRunner(stream=c.stream, verbosity=c.verbosity, config=c)
return (not core.run(config=c, testRunner=runner)) |
def _remove_user_from_monitoring_server(dc_name, user_name):
dc = Dc.objects.get_by_name(dc_name)
mon = get_monitoring(dc)
if (not mon.enabled):
logger.info('Monitoring is disabled in DC %s', dc)
return
logger.info('Going to delete user with name %s in zabbix %s for dc %s.', user_name, mon, dc)
mon.user_delete(name=user_name) |
class _FeedHandler(logging.Handler):
def __init__(self, capacity):
logging.Handler.__init__(self)
self._records = collections.deque(maxlen=capacity)
def emit(self, record):
global _last_update
_last_update = int(record.created)
self.acquire()
try:
self._records.appendleft((record, str(uuid.uuid4())))
finally:
self.release()
def flush(self):
self.acquire()
try:
self._records.clear()
finally:
self.release()
def close(self):
self.flush()
logging.Handler.close(self)
def presentRecord(self, path, queryargs, mimetype, data, headers):
uids = queryargs.get('uid')
if uids:
uid = uids[0]
else:
return '<span class="error">No UID specified</span>'
record = None
self.acquire()
try:
for (record, uuid) in self._records:
if (uuid == uid):
break
else:
return '<span class="warn">Specified UID was not found; record may have expired</span>'
finally:
self.release()
return self.format(record)
def enumerateRecords(self, limit):
events = []
self.acquire()
try:
for i in range(min(limit, len(self._records))):
(record, uid) = self._records[i]
events.append(_Event(record.msg, record.levelname, record.created, record.name, record.lineno, uid))
finally:
self.release()
return events |
def return_terms(summary):
list_of_terms = ['variational', 'adversarial', 'GAN', 'entangle', 'information', 'bottleneck', 'representation', 'understanding', 'graph']
terms = []
for term in list_of_terms:
if (term in summary):
terms += [term]
return ','.join((k for k in terms)) |
def get_argnames(func: Callable) -> List[str]:
sig = inspect.signature(func)
args = [param.name for param in sig.parameters.values() if (param.kind not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD))]
if (args and (args[0] == 'self')):
args = args[1:]
return args |
class Video(JsonDeserializable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string)
if (('thumbnail' in obj) and ('file_id' in obj['thumbnail'])):
obj['thumbnail'] = PhotoSize.de_json(obj['thumbnail'])
return cls(**obj)
def __init__(self, file_id, file_unique_id, width, height, duration, thumbnail=None, file_name=None, mime_type=None, file_size=None, **kwargs):
self.file_id: str = file_id
self.file_unique_id: str = file_unique_id
self.width: int = width
self.height: int = height
self.duration: int = duration
self.thumbnail: PhotoSize = thumbnail
self.file_name: str = file_name
self.mime_type: str = mime_type
self.file_size: int = file_size
def thumb(self):
logger.warning('The parameter "thumb" is deprecated, use "thumbnail" instead')
return self.thumbnail |
class FaucetDscpMatchTest(FaucetUntaggedTest):
IP_DSCP_MATCH = 46
ETH_TYPE = 2048
SRC_MAC = '0e:00:00:00:00:ff'
DST_MAC = '0e:00:00:00:00:02'
REWRITE_MAC = '0f:00:12:23:48:03'
CONFIG_GLOBAL = ('\nvlans:\n 100:\n description: "untagged"\n\nacls:\n 1:\n - rule:\n ip_dscp: %d\n dl_type: 0x800\n actions:\n allow: 1\n output:\n set_fields:\n - eth_dst: "%s"\n - rule:\n actions:\n allow: 1\n' % (IP_DSCP_MATCH, REWRITE_MAC))
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n '
def test_untagged(self):
(source_host, dest_host) = self.hosts_name_ordered()[0:2]
dest_host.setMAC(self.REWRITE_MAC)
self.wait_until_matching_flow({'ip_dscp': self.IP_DSCP_MATCH, 'eth_type': self.ETH_TYPE}, table_id=self._PORT_ACL_TABLE)
scapy_pkt = self.scapy_dscp(self.SRC_MAC, self.DST_MAC, 184, source_host.defaultIntf())
tcpdump_filter = ('ether dst %s' % self.REWRITE_MAC)
tcpdump_txt = self.tcpdump_helper(dest_host, tcpdump_filter, [(lambda : source_host.cmd(scapy_pkt))], root_intf=True, packets=1)
self.assertTrue(re.search(('%s > %s' % (self.SRC_MAC, self.REWRITE_MAC)), tcpdump_txt)) |
class TestAPNSConfigEncoder():
.parametrize('data', NON_OBJECT_ARGS)
def test_invalid_apns(self, data):
with pytest.raises(ValueError) as excinfo:
check_encoding(messaging.Message(topic='topic', apns=data))
expected = 'Message.apns must be an instance of APNSConfig class.'
assert (str(excinfo.value) == expected)
.parametrize('data', NON_DICT_ARGS)
def test_invalid_headers(self, data):
with pytest.raises(ValueError):
check_encoding(messaging.Message(topic='topic', apns=messaging.APNSConfig(headers=data)))
def test_apns_config(self):
msg = messaging.Message(topic='topic', apns=messaging.APNSConfig(headers={'h1': 'v1', 'h2': 'v2'}, fcm_options=messaging.APNSFCMOptions('analytics_label_v1')))
expected = {'topic': 'topic', 'apns': {'headers': {'h1': 'v1', 'h2': 'v2'}, 'fcm_options': {'analytics_label': 'analytics_label_v1'}}}
check_encoding(msg, expected) |
def test_mysql_connector_build_uri(connection_config_mysql, db: Session):
connector = MySQLConnector(configuration=connection_config_mysql)
s = connection_config_mysql.secrets
port = ((s['port'] and f":{s['port']}") or '')
uri = f"mysql+pymysql://{s['username']}:{s['password']}{s['host']}{port}/{s['dbname']}"
assert (connector.build_uri() == uri)
connection_config_mysql.secrets = {'username': 'mysql_user', 'password': 'mysql_pw', 'host': 'host.docker.internal', 'dbname': 'mysql_example', 'port': '3307'}
connection_config_mysql.save(db)
assert (connector.build_uri() == 'mysql+pymysql://mysql_user:mysql_.internal:3307/mysql_example')
connection_config_mysql.secrets = {'username': 'mysql_user', 'password': 'mysql_pw', 'host': 'host.docker.internal', 'dbname': 'mysql_example'}
connection_config_mysql.save(db)
assert (connector.build_uri() == 'mysql+pymysql://mysql_user:mysql_.internal:3306/mysql_example')
connection_config_mysql.secrets = {'username': 'mysql_user', 'host': 'host.docker.internal', 'dbname': 'mysql_example'}
connection_config_mysql.save(db)
assert (connector.build_uri() == 'mysql+pymysql://mysql_.internal:3306/mysql_example')
connection_config_mysql.secrets = {'host': 'host.docker.internal', 'dbname': 'mysql_example'}
assert (connector.build_uri() == 'mysql+pymysql://host.docker.internal:3306/mysql_example') |
.skipif((not has_tensorflow), reason='needs TensorFlow')
def test_tensorflow_wrapper_construction_requires_keras_model():
import tensorflow as tf
keras_model = tf.keras.Sequential([tf.keras.layers.Dense(12, input_shape=(12,))])
assert isinstance(TensorFlowWrapper(keras_model), Model)
with pytest.raises(ValueError):
TensorFlowWrapper(Linear(2, 3)) |
def test_train_bad_data_too_few_columns():
with tempfile.TemporaryDirectory() as tmpdir:
testdata = os.path.join(tmpdir, 'test_data')
shutil.copytree('./tests/test_data', testdata)
input_file = os.path.join(testdata, 'bad_data_too_few_columns.csv')
operation = 'train'
sys.argv = hf_args(tmpdir, operation, input_file)
instance = HostFootprint()
with pytest.raises(Exception):
instance.main() |
class OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class CustomPrefetchSkuSerializer(SkuTestSerializer):
class Meta(SkuTestSerializer):
model = test_models.Sku
fields = ('id', 'variant')
expandable_fields = dict(owners=dict(serializer=serializers.SerializerMethodField, id_source=False, prefetch_related=['owners']))
def get_owners(self, obj):
return self.represent_child(name='owners', instance=obj.owners.all(), serializer=OwnerTestSerializer, many=True) |
def get_typed_value(type_name, value):
if isinstance(value, AnyValue):
return value
if isinstance(value, RegexValue):
return value
if (isinstance(value, six.text_type) and VARIABLE_SUBSTITUTION_RE.search(value)):
return ENSURE_ACTION_SUBSTITUTION_DEFAULT_INDEX_RE.sub('[[\\1.0.\\2]]', value)
if (type_name in ('None', 'none', 'null')):
return None
try:
if (type_name in ('bytes', 'base64_bytes')):
if (not value):
return b''
if (type_name == 'base64_bytes'):
if (not isinstance(value, six.binary_type)):
value = value.encode('utf-8')
return base64.b64decode(value)
if isinstance(value, six.binary_type):
return value
return value.encode('utf-8')
if isinstance(value, six.binary_type):
value = value.decode('utf-8')
if (type_name == 'int'):
if (not value):
return six.integer_types[(- 1)](0)
return six.integer_types[(- 1)](value)
if (type_name == 'float'):
if (not value):
return 0.0
return float(value)
if (type_name == 'decimal'):
if (not value):
return decimal.Decimal('0.0')
return decimal.Decimal(value)
if (type_name == 'bool'):
if (not value):
return False
if (value.lower() == 'true'):
return True
return False
if (type_name in ('str', 'encoded_ascii', 'encoded_unicode')):
if (not value):
return ''
if (type_name == 'encoded_ascii'):
return value.encode('ascii').decode('unicode_escape')
if (type_name == 'encoded_unicode'):
return value.encode('utf-8').decode('unicode_escape')
return value
except (TypeError, ValueError, decimal.DecimalException) as e:
raise DataTypeConversionError(e.args[0])
if (type_name == 'emptystr'):
return ''
if (type_name == 'emptylist'):
return []
if (type_name == 'emptydict'):
return {}
if (type_name == 'datetime'):
if (not value):
raise DataTypeConversionError('Attempt to convert false-y value to datetime value')
if isinstance(value, six.text_type):
if value.startswith(('now', 'utc_now', 'midnight', 'utc_midnight')):
try:
(datetime_value, timedelta_value) = value.strip().split(' ', 1)
except ValueError:
datetime_value = value.strip()[:]
timedelta_value = ''
if (datetime_value == 'now'):
datetime_value_dt = datetime.datetime.now()
elif (datetime_value == 'utc_now'):
datetime_value_dt = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
elif (datetime_value == 'midnight'):
datetime_value_dt = datetime.datetime.combine(datetime.date.today(), datetime.time())
else:
datetime_value_dt = datetime.datetime.combine(datetime.datetime.utcnow().date(), datetime.time()).replace(tzinfo=pytz.utc)
assert isinstance(datetime_value_dt, datetime.datetime), 'Parse error, value is not a `datetime`'
datetime_value_dt = datetime_value_dt.replace(microsecond=0)
if timedelta_value:
datetime_value_dt = (datetime_value_dt + datetime.timedelta(**_parse_timedelta_args(timedelta_value)))
return datetime_value_dt
return datetime.datetime(*_parse_datetime_args(value))
raise DataTypeConversionError('Attempt to convert unknown type {} to datetime value'.format(type(value).__name__))
if (type_name == 'date'):
if (not value):
raise DataTypeConversionError('Attempt to convert false-y value to date value')
if isinstance(value, six.text_type):
if (value == 'today'):
return datetime.date.today()
elif (value == 'utc_today'):
return datetime.datetime.utcnow().date()
return datetime.date(*_parse_datetime_args(value))
raise DataTypeConversionError('Attempt to convert unknown type {} to date value'.format(type(value).__name__))
if (type_name == 'time'):
if (not value):
raise DataTypeConversionError('Attempt to convert false-y value to time value')
if isinstance(value, six.text_type):
if value.startswith(('now', 'utc_now', 'midnight')):
try:
(time_value, timedelta_value) = value.strip().split(' ', 1)
except ValueError:
time_value = value.strip()[:]
timedelta_value = ''
if (time_value == 'now'):
datetime_value_dt = datetime.datetime.now()
elif (time_value == 'utc_now'):
datetime_value_dt = datetime.datetime.utcnow()
else:
datetime_value_dt = datetime.datetime(2000, 1, 1, 0, 0, 0, 0)
if timedelta_value:
datetime_value_dt = (datetime_value_dt + datetime.timedelta(**_parse_timedelta_args(timedelta_value)))
return datetime_value_dt.replace(microsecond=0).time()
return datetime.time(*_parse_datetime_args(value))
raise DataTypeConversionError('Attempt to convert unknown type {} to time value'.format(type(value).__name__))
raise DataTypeConversionError('Unknown type: {}'.format(type_name)) |
def get_smtp_config():
smtp_encryption = get_settings()['smtp_encryption']
if (smtp_encryption == 'tls'):
smtp_encryption = 'required'
elif (smtp_encryption == 'ssl'):
smtp_encryption = 'ssl'
elif (smtp_encryption == 'tls_optional'):
smtp_encryption = 'optional'
else:
smtp_encryption = 'none'
return {'host': get_settings()['smtp_host'], 'username': get_settings()['smtp_username'], 'password': get_settings()['smtp_password'], 'tls': smtp_encryption, 'port': get_settings()['smtp_port']} |
class TestPrivateComputationGameRepository(unittest.TestCase):
('fbpcs.private_computation.repository.private_computation_game.PRIVATE_COMPUTATION_GAME_CONFIG', {'attribution_compute_dev': {'onedocker_package_name': 'private_attribution/compute-dev', 'arguments': [OneDockerArgument(name='aggregators', required=True), OneDockerArgument(name='input_path', required=True), OneDockerArgument(name='output_path', required=True), OneDockerArgument(name='attribution_rules', required=True)]}})
def setUp(self) -> None:
self.game_repository = PrivateComputationGameRepository()
def test_get_game(self) -> None:
game_config = self.game_repository.private_computation_game_config
expected_game_name = 'attribution_compute_dev'
expected_onedocker_package_name = game_config[expected_game_name]['onedocker_package_name']
attribution_game_config = self.game_repository.get_game(expected_game_name)
expected_arguments: List[MPCGameArgument] = [MPCGameArgument(name=argument.name, required=argument.required) for argument in game_config[expected_game_name]['arguments']]
self.assertEqual(attribution_game_config.game_name, expected_game_name)
self.assertEqual(attribution_game_config.onedocker_package_name, expected_onedocker_package_name)
self.assertEqual(attribution_game_config.arguments, expected_arguments)
def test_unsupported_game(self) -> None:
unsupported_game_name = 'unsupported game'
with self.assertRaisesRegex(ValueError, f'Game {unsupported_game_name} is not supported.'):
self.game_repository.get_game(unsupported_game_name) |
class Solution():
def maximumBags(self, capacity: List[int], rocks: List[int], additionalRocks: int) -> int:
ret = 0
tt = list(sorted([(c - r) for (c, r) in zip(capacity, rocks)]))
for a in tt:
if (a == 0):
ret += 1
continue
if (additionalRocks >= a):
ret += 1
additionalRocks -= a
else:
break
return ret |
class OptionPlotoptionsGaugeSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class _ItemDelegate(QtGui.QStyledItemDelegate):
def __init__(self, table_view):
QtGui.QStyledItemDelegate.__init__(self, table_view)
self._horizontal_lines = table_view._editor.factory.horizontal_lines
self._vertical_lines = table_view._editor.factory.vertical_lines
def paint(self, painter, option, index):
QtGui.QStyledItemDelegate.paint(self, painter, option, index)
painter.save()
painter.setPen(option.palette.color(QtGui.QPalette.ColorRole.Dark))
if self._horizontal_lines:
painter.drawLine(option.rect.bottomLeft(), option.rect.bottomRight())
if self._vertical_lines:
painter.drawLine(option.rect.topRight(), option.rect.bottomRight())
painter.restore() |
def get_clamped_value_counts(value_counts: pd.Series, max_categories_incl_other: int) -> pd.Series:
if (len(value_counts) <= max_categories_incl_other):
categories_shown_as_is = len(value_counts)
else:
categories_shown_as_is = (max_categories_incl_other - 1)
clamped_series = pd.Series(value_counts.head(categories_shown_as_is))
num_in_tail = (len(value_counts) - categories_shown_as_is)
categories_in_other = value_counts.tail(num_in_tail)
if (len(categories_in_other) > 0):
total_in_other = sum(categories_in_other)
if (clamped_series.index.dtype.name == 'category'):
clamped_series.index = clamped_series.index.add_categories([OTHERS_GROUPED])
other_series = pd.Series([total_in_other], index=pd.CategoricalIndex([OTHERS_GROUPED], categories=clamped_series.index))
else:
other_series = pd.Series([total_in_other], index=[OTHERS_GROUPED])
clamped_series = pd.concat([clamped_series, other_series])
return clamped_series |
.parametrize('middleware_attr', ['MIDDLEWARE', 'MIDDLEWARE_CLASSES'])
def test_tracing_middleware_autoinsertion_list(middleware_attr):
settings = mock.Mock(spec=[middleware_attr], **{middleware_attr: ['a', 'b', 'c']})
ElasticAPMConfig.insert_middleware(settings)
middleware_list = getattr(settings, middleware_attr)
assert (len(middleware_list) == 4)
assert (middleware_list[0] == 'elasticapm.contrib.django.middleware.TracingMiddleware')
assert isinstance(middleware_list, list) |
def response_for_status(form, host, referrer, status):
if (status['code'] == Form.STATUS_EMAIL_SENT):
return email_sent_success(status)
if (status['code'] == Form.STATUS_NO_EMAIL):
return no_email_sent_success(status)
if (status['code'] == Form.STATUS_EMAIL_EMPTY):
return errors.empty_form_error(referrer)
if ((status['code'] == Form.STATUS_CONFIRMATION_SENT) or (status['code'] == Form.STATUS_CONFIRMATION_DUPLICATED)):
return confirmation_sent_success(form, host, status)
if (status['code'] == Form.STATUS_OVERLIMIT):
return errors.over_limit_error()
if (status['code'] == Form.STATUS_REPLYTO_ERROR):
return errors.malformed_replyto_error(status)
return errors.generic_send_error(send) |
class OptMedia(Options):
def controls(self):
return self.get(True)
def controls(self, flag: bool):
self.set(flag)
def loop(self):
return self.get(False)
def loop(self, flag: bool):
self.set(flag)
def preload(self):
return self.get('none')
def preload(self, value: str='auto'):
value = (value or 'none')
if (self.options.verbose and (value not in ('none', 'auto', 'metadata'))):
logging.warning(('Not defined preload value %s' % value))
self.set(value)
def muted(self):
return self.get(False)
def muted(self, flag: bool):
self.set(flag)
def poster(self):
return self.get()
def poster(self, url: str):
self.set(url)
def autoplay(self):
return self.get(True)
def autoplay(self, flag: bool):
self.set(flag) |
class OptionPlotoptionsBellcurveSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def extractMiyorftranslatesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def add_speaker_marker(img: Arr, border_h: int, face_loc: tuple[(int, int, int, int)], speaker: int, alpha: float=0.0):
color = COLORS_RGB[speaker]
(x0, y0, x1, y1) = face_loc
(y0, y1) = ((y0 + border_h), (y1 + border_h))
shapes = np.zeros_like(img, np.uint8)
cv2.rectangle(shapes, (x0, y0), (x1, y1), color, thickness=3, lineType=cv2.FILLED)
out = img.copy()
mask = shapes.astype(bool)
out[mask] = cv2.addWeighted(img, alpha, shapes, (1 - alpha), 0)[mask]
return out |
class ApiTools(commands.Cog):
__version__ = '0.0.3'
__author__ = 'flare'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}
Author: {self.__author__}'''
def __init__(self, bot):
self.bot = bot
self.session = aio
def cog_unload(self):
self.bot.loop.create_task(self.session.close())
async def req(self, get_or_post, url, headers={}, data={}):
reqmethod = (self.session.get if (get_or_post == 'get') else self.session.post)
async with reqmethod(url, headers=headers, data=json.dumps(data)) as req:
data = (await req.text())
status = req.status
try:
parsed = json.loads(data)
except json.JSONDecodeError:
parsed = data
return (parsed, status)
()
_owner()
async def apitools(self, ctx):
(name='get')
async def _get(self, ctx, url, *, headers=None):
if (headers is not None):
try:
headers = json.loads(headers)
except json.JSONDecodeError:
return (await ctx.send('The headers you provided are invalid. Please provide them in JSON/Dictionary format.'))
else:
headers = {}
try:
(data, status) = (await self.req('get', url, headers=headers))
except Exception:
return (await ctx.send('An error occured while trying to post your request. Ensure the URL is correct etcetra.'))
color = (discord.Color.green() if (status == 200) else discord.Color.red())
msg = json.dumps(data, indent=4, sort_keys=True)[:2030]
if (len(msg) > 2029):
msg += '\n...'
embed = discord.Embed(title=f'Results for **GET** {url}', color=color, description=box(msg, lang='json'))
embed.add_field(name='Status Code', value=status)
(await ctx.send(embed=embed))
()
async def post(self, ctx, url, *, headers_and_data=None):
if (headers_and_data is not None):
try:
headers_and_data = json.loads(headers_and_data)
headers = (headers_and_data.get('Headers', {}) or headers_and_data.get('headers', {}))
data = (headers_and_data.get('Data', {}) or headers_and_data.get('data', {}))
except json.JSONDecodeError:
return (await ctx.send('The data you provided are invalid. Please provide them in JSON/Dictionary format.\nExample: {"headers": {"Authorization": "token"}, "data": {"name": "flare"}}'))
else:
headers = {}
data = {}
try:
(data, status) = (await self.req('post', url, headers=headers, data=data))
except Exception:
return (await ctx.send('An error occured while trying to post your request. Ensure the URL is correct etcetra.'))
color = (discord.Color.green() if (status == 200) else discord.Color.red())
msg = json.dumps(data, indent=4)[:2030]
if (len(msg) > 2029):
msg += '\n...'
embed = discord.Embed(title=f'Results for **POST** {url}', color=color, description=box(msg, lang='json'))
embed.add_field(name='Status Code', value=status)
(await ctx.send(embed=embed)) |
class TestReadIdentityValues(EsptoolTestCase):
.quick_test
def test_read_mac(self):
output = self.run_esptool('read_mac')
mac = re.search('[0-9a-f:]{17}', output)
assert (mac is not None)
mac = mac.group(0)
assert (mac != '00:00:00:00:00:00')
assert (mac != 'ff:ff:ff:ff:ff:ff')
.skipif((arg_chip != 'esp8266'), reason='ESP8266 only')
def test_read_chip_id(self):
output = self.run_esptool('chip_id')
idstr = re.search('Chip ID: 0x([0-9a-f]+)', output)
assert (idstr is not None)
idstr = idstr.group(1)
assert (idstr != ('0' * 8))
assert (idstr != ('f' * 8)) |
def test_buildPaintVarScaleUniformAroundCenter():
assert _is_var(ot.PaintFormat.PaintVarScaleUniformAroundCenter)
assert _is_uniform_scale(ot.PaintFormat.PaintVarScaleUniformAroundCenter)
assert _is_around_center(ot.PaintFormat.PaintVarScaleUniformAroundCenter)
checkBuildPaintScale(ot.PaintFormat.PaintVarScaleUniformAroundCenter) |
def test_operations_with_combinations():
v = [(- 256), (- 64), (- 16), (- 4.75), (- 3.75), (- 3.25), (- 1), (- 0.75), (- 0.125), 0.0, 0.125, 0.75, 1, 1.5, 3.75, 4.0, 8.0, 32, 128]
for i in range(len(v)):
for j in range(len(v)):
(vx, vy) = (v[i], v[j])
x = Fxp(vx)
y = Fxp(vy)
assert ((vx + vy) == (x + y)())
assert ((vy + vx) == (y + x)())
assert ((vx - vy) == (x - y)())
assert ((- (vy - vx)) == (- (y - x)()))
assert ((vx * vy) == (x * y)())
assert ((vy * vx) == (y * x)())
v = [(- 256), (- 64), (- 16), (- 4.75), (- 4.25), (- 1), (- 0.75), (- 0.125), 0.125, 0.75, 1, 1.5, 2.75, 4.0, 8.0, 32, 128]
d = [(- 256), (- 64), (- 16), (- 1), (- 0.5), (- 0.125), 0.125, 0.5, 1, 2, 4.0, 8.0, 32, 128]
for i in range(len(v)):
for j in range(len(d)):
(vx, vy) = (v[i], d[j])
x = Fxp(vx)
y = Fxp(vy)
assert ((vx / vy) == (x / y)())
assert ((vx // vy) == (x // y)())
assert ((vx % vy) == (x % y)()) |
def load_evm_tools_test(test_case: Dict[(str, str)], fork_name: str) -> None:
test_file = test_case['test_file']
test_key = test_case['test_key']
index = test_case['index']
with open(test_file) as f:
tests = json.load(f)
env = tests[test_key]['env']
env['blockHashes'] = {'0': env['previousHash']}
env['withdrawals'] = []
alloc = tests[test_key]['pre']
post = tests[test_key]['post'][fork_name][index]
post_hash = post['hash']
d = post['indexes']['data']
g = post['indexes']['gas']
v = post['indexes']['value']
tx = {}
for (k, value) in tests[test_key]['transaction'].items():
if (k == 'data'):
tx['input'] = value[d]
elif (k == 'gasLimit'):
tx['gas'] = value[g]
elif (k == 'value'):
tx[k] = value[v]
elif (k == 'accessLists'):
if (value[d] is not None):
tx['accessList'] = value[d]
else:
tx[k] = value
txs = [tx]
sys.stdin = StringIO(json.dumps({'env': env, 'alloc': alloc, 'txs': txs}))
t8n_args = ['t8n', '--input.alloc', 'stdin', '--input.env', 'stdin', '--input.txs', 'stdin', '--state.fork', f'{fork_name}']
t8n_options = parser.parse_args(t8n_args)
t8n = T8N(t8n_options)
t8n.apply_body()
assert (hex_to_bytes(post_hash) == t8n.result.state_root) |
def test_combining_qualifiers(alice, bob, my_logic):
def _is_alice(connection, logic):
assert isinstance(connection, ConnectionAPI)
return (connection is alice)
def _is_my_logic(connection, logic):
assert isinstance(logic, LogicAPI)
return (logic is my_logic)
is_alice_and_my_logic = (_is_alice & _is_my_logic)
assert (is_alice_and_my_logic(alice, my_logic) is True)
assert (is_alice_and_my_logic(bob, my_logic) is False)
assert (is_alice_and_my_logic(alice, SimpleLogic()) is False)
assert (is_alice_and_my_logic(bob, SimpleLogic()) is False)
is_alice_or_my_logic = (_is_alice | _is_my_logic)
assert (is_alice_or_my_logic(alice, my_logic) is True)
assert (is_alice_or_my_logic(bob, my_logic) is True)
assert (is_alice_or_my_logic(alice, SimpleLogic()) is True)
assert (is_alice_or_my_logic(bob, SimpleLogic()) is False) |
def extract_error_field(exc: Union[(eql.EqlParseError, kql.KqlParseError)]) -> Optional[str]:
lines = exc.source.splitlines()
mod = ((- 1) if (exc.line == len(lines)) else 0)
line = lines[(exc.line + mod)]
start = exc.column
stop = (start + len(exc.caret.strip()))
return line[start:stop] |
def test_replace_database_url_components():
u = DatabaseURL('postgresql://localhost/mydatabase')
assert (u.database == 'mydatabase')
new = u.replace(database=('test_' + u.database))
assert (new.database == 'test_mydatabase')
assert (str(new) == 'postgresql://localhost/test_mydatabase')
assert (u.driver == '')
new = u.replace(driver='asyncpg')
assert (new.driver == 'asyncpg')
assert (str(new) == 'postgresql+asyncpg://localhost/mydatabase')
assert (u.port is None)
new = u.replace(port=123)
assert (new.port == 123)
assert (str(new) == 'postgresql://localhost:123/mydatabase')
assert (u.username is None)
assert (u.userinfo is None)
u = DatabaseURL('sqlite:///mydatabase')
assert (u.database == 'mydatabase')
new = u.replace(database=('test_' + u.database))
assert (new.database == 'test_mydatabase')
assert (str(new) == 'sqlite:///test_mydatabase')
u = DatabaseURL('sqlite:////absolute/path')
assert (u.database == '/absolute/path')
new = u.replace(database=(u.database + '_test'))
assert (new.database == '/absolute/path_test')
assert (str(new) == 'sqlite:////absolute/path_test') |
class CheckDockerImageCLIParserTestSuite(unittest.TestCase):
def test_empty_args(self):
empty_args = generate_args(None, None)
status = CheckCLIParser.verify_args(empty_args)
self.assertEqual(status, 1)
def test_both_arguments(self):
args = generate_args('jboss/wildfly', '43a6ca974743')
status = CheckCLIParser.verify_args(args)
self.assertEqual(status, 2)
def test_ok_only_image_name(self):
args = generate_args('jboss/wildfly', None)
status = CheckCLIParser.verify_args(args)
self.assertEqual(status, 0)
def test_ok_only_container_id(self):
args = generate_args(None, '43a6ca974743')
status = CheckCLIParser.verify_args(args)
self.assertEqual(status, 0)
def test_check_full_happy_path(self):
sys.argv = ['dagda.py', 'check', '-i', 'jboss/wildfly']
parsed_args = CheckCLIParser()
self.assertEqual(parsed_args.get_docker_image_name(), 'jboss/wildfly')
def test_check_exit_1(self):
sys.argv = ['dagda.py', 'check']
with self.assertRaises(SystemExit) as cm:
CheckCLIParser()
self.assertEqual(cm.exception.code, 1)
def test_DagdaCheckParser_exit_2(self):
with self.assertRaises(SystemExit) as cm:
DagdaCheckParser().error('fail')
self.assertEqual(cm.exception.code, 2)
def test_DagdaCheckParser_format_help(self):
self.assertEqual(DagdaCheckParser().format_help(), check_parser_text) |
def extract_stable_baselines_data(env, filename):
(x, y) = ([], [])
with open(filename, 'r') as f:
for l in f:
l = [e.strip() for e in l.split('|')]
if ('ep_reward_mean' in l):
y.append(float(l[2]))
if ('total_timesteps' in l):
x.append(int(l[2]))
return (x, y) |
_register_make
_set_nxm_headers([ofproto_v1_0.NXM_OF_ARP_SPA, ofproto_v1_0.NXM_OF_ARP_SPA_W])
class MFArpSpa(MFField):
def make(cls, header):
return cls(header, MF_PACK_STRING_BE32)
def put(self, buf, offset, rule):
return self.putm(buf, offset, rule.flow.arp_spa, rule.wc.arp_spa_mask) |
class Wrapper(browser_model.T):
def __init__(self):
self.type_changelist = []
self.file_changelist = []
self.formula_changelist = []
browser_model.T.__init__(self, Test.g_comp)
self.type_changed += self._type_changed
self.file_changed += self._file_changed
self.formula_changed += self._formula_changed
def _type_changed(self):
self.type_changelist.append(self.current_type)
def _file_changed(self):
self.file_changelist.append(self.current.fname)
def _formula_changed(self):
self.formula_changelist.append(self.current.formula) |
class MLX90614(I2CSlave):
_ADDRESS = 90
_OBJADDR = 7
_AMBADDR = 6
NUMPLOTS = 1
PLOTNAMES = ['Temp']
name = 'PIR temperature'
def __init__(self):
super().__init__(self._ADDRESS)
self.source = self._OBJADDR
self.name = 'Passive IR temperature sensor'
self.params = {'readReg': {'dataType': 'integer', 'min': 0, 'max': 32, 'prefix': 'Addr: '}, 'select_source': ['object temperature', 'ambient temperature']}
def select_source(self, source):
if (source == 'object temperature'):
self.source = self._OBJADDR
elif (source == 'ambient temperature'):
self.source = self._AMBADDR
def readReg(self, addr):
x = self.getVals(addr, 2)
print(hex(addr), hex((x[0] | (x[1] << 8))))
def getVals(self, addr, numbytes):
vals = self.read(numbytes, addr)
return vals
def getRaw(self):
vals = self.getVals(self.source, 3)
if vals:
if (len(vals) == 3):
return [((((((vals[1] & 127) << 8) + vals[0]) * 0.02) - 0.01) - 273.15)]
else:
return False
else:
return False
def getObjectTemperature(self):
self.source = self._OBJADDR
val = self.getRaw()
if val:
return val[0]
else:
return False
def getAmbientTemperature(self):
self.source = self._AMBADDR
val = self.getRaw()
if val:
return val[0]
else:
return False |
class HalMountpoint():
def __init__(self, hal, udi):
self.hal = hal
self.udi = udi
def __str__(self):
udis = self.hal.hal.FindDeviceStringMatch('info.parent', self.udi)
for u in udis:
obj = self.hal.bus.get_object('org.freedesktop.Hal', u)
dev = dbus.Interface(obj, 'org.freedesktop.Hal.Device')
if (dev.GetProperty('volume.is_mounted') is True):
return str(dev.GetProperty('volume.mount_point'))
return '' |
def register_instrumentation(client) -> None:
def begin_transaction(*args, **kwargs) -> None:
task = kwargs['task']
trace_parent = get_trace_parent(task)
client.begin_transaction('celery', trace_parent=trace_parent)
def end_transaction(task_id, task, *args, **kwargs) -> None:
name = get_name_from_func(task)
state = kwargs.get('state', 'None')
if (state == states.SUCCESS):
outcome = constants.OUTCOME.SUCCESS
elif (state in states.EXCEPTION_STATES):
outcome = constants.OUTCOME.FAILURE
else:
outcome = constants.OUTCOME.UNKNOWN
elasticapm.set_transaction_outcome(outcome, override=False)
client.end_transaction(name, state)
dispatch_uid = 'elasticapm-tracing-%s'
signals.before_task_publish.disconnect(set_celery_headers, dispatch_uid=(dispatch_uid % 'before-publish'))
signals.task_prerun.disconnect(begin_transaction, dispatch_uid=(dispatch_uid % 'prerun'))
signals.task_postrun.disconnect(end_transaction, dispatch_uid=(dispatch_uid % 'postrun'))
signals.before_task_publish.connect(set_celery_headers, dispatch_uid=(dispatch_uid % 'before-publish'))
signals.task_prerun.connect(begin_transaction, dispatch_uid=(dispatch_uid % 'prerun'), weak=False)
signals.task_postrun.connect(end_transaction, weak=False, dispatch_uid=(dispatch_uid % 'postrun'))
_register_worker_signals(client) |
def _update_stride_info(mm_info, a_shapes, b_shapes, bias_shapes=None):
if ((len(a_shapes) == 2) or (a_shapes[0] == 1)):
mm_info.a_batch_stride = '0'
if ((len(b_shapes) == 2) or (b_shapes[0] == 1)):
mm_info.b_batch_stride = '0'
if (bias_shapes is None):
return
if ((len(bias_shapes) < 3) or (bias_shapes[0] == 1)):
mm_info.bias_batch_stride = '0'
if ((len(bias_shapes) < 2) or all([(x == 1) for x in bias_shapes[:(- 1)]])):
mm_info.ldbias = '0' |
def test_beacon_domains_punycode(punycode_beacon_file):
bconfig = beacon.BeaconConfig.from_file(punycode_beacon_file)
assert (bconfig.domains == ['kci.com'])
assert (bconfig.domains[0].encode('idna') == b'xn--ki-4ia.com')
assert (b'k\xe7i.com' in bconfig.raw_settings['SETTING_DOMAINS']) |
class _FileOpener(object):
def __init__(self, arg, kwargs, stdio, keep_stdio_open):
self.arg = arg
self.kwargs = kwargs
self.stdio = stdio
self.keep_stdio_open = keep_stdio_open
self.validate_permissions()
def validate_permissions(self):
mode = self.kwargs.get('mode', 'r')
if (self.arg == self.stdio):
return
exists = os.access(self.arg, os.F_OK)
if (not exists):
if (('r' in mode) and ('+' not in mode)):
raise errors.CliValueError('File does not exist: {0!r}'.format(self.arg))
else:
dirname = os.path.dirname(self.arg)
if ((not dirname) or os.access(dirname, os.W_OK)):
return
if (not os.path.exists(dirname)):
raise errors.CliValueError('Directory does not exist: {0!r}'.format(self.arg))
elif os.access(self.arg, os.W_OK):
return
raise errors.CliValueError('Permission denied: {0!r}'.format(self.arg))
def __enter__(self):
if (self.arg == self.stdio):
mode = self.kwargs.get('mode', 'r')
self.f = (sys.stdin if ('r' in mode) else sys.stdout)
else:
try:
self.f = io.open(self.arg, **self.kwargs)
except IOError as exc:
raise _convert_ioerror(self.arg, exc)
return self.f
def __exit__(self, *exc_info):
if ((self.arg != self.stdio) or (not self.keep_stdio_open)):
self.f.close() |
class GroupForm(FlaskForm):
name = StringField(_('Group name'), validators=[DataRequired(message=_('Please enter a name for the group.'))])
description = TextAreaField(_('Description'), validators=[Optional()])
admin = BooleanField(_("Is 'Admin' group?"), description=_('With this option the group has access to the admin panel.'))
super_mod = BooleanField(_("Is 'Super Moderator' group?"), description=_('Check this, if the users in this group are allowed to moderate every forum.'))
mod = BooleanField(_("Is 'Moderator' group?"), description=_('Check this, if the users in this group are allowed to moderate specified forums.'))
banned = BooleanField(_("Is 'Banned' group?"), description=_("Only one group of type 'Banned' is allowed."))
guest = BooleanField(_("Is 'Guest' group?"), description=_("Only one group of type 'Guest' is allowed."))
editpost = BooleanField(_('Can edit posts'), description=_('Check this, if the users in this group can edit posts.'))
deletepost = BooleanField(_('Can delete posts'), description=_('Check this, if the users in this group can delete posts.'))
deletetopic = BooleanField(_('Can delete topics'), description=_('Check this, if the users in this group can delete topics.'))
posttopic = BooleanField(_('Can create topics'), description=_('Check this, if the users in this group can create topics.'))
postreply = BooleanField(_('Can post replies'), description=_('Check this, if the users in this group can post replies.'))
mod_edituser = BooleanField(_('Moderators can edit user profiles'), description=_("Allow moderators to edit another user's profile including password and email changes."))
mod_banuser = BooleanField(_('Moderators can ban users'), description=_('Allow moderators to ban other users.'))
viewhidden = BooleanField(_('Can view hidden posts and topics'), description=_('Allows a user to view hidden posts and topics'))
makehidden = BooleanField(_('Can hide posts and topics'), description=_('Allows a user to hide posts and topics'))
submit = SubmitField(_('Save'))
def validate_name(self, field):
if hasattr(self, 'group'):
group = Group.query.filter(db.and_(Group.name.like(field.data.lower()), db.not_((Group.id == self.group.id)))).first()
else:
group = Group.query.filter(Group.name.like(field.data.lower())).first()
if group:
raise ValidationError(_('This group name is already taken.'))
def validate_banned(self, field):
if hasattr(self, 'group'):
group = Group.query.filter(db.and_(Group.banned, db.not_((Group.id == self.group.id)))).count()
else:
group = Group.query.filter_by(banned=True).count()
if (field.data and (group > 0)):
raise ValidationError(_("There is already a group of type 'Banned'."))
def validate_guest(self, field):
if hasattr(self, 'group'):
group = Group.query.filter(db.and_(Group.guest, db.not_((Group.id == self.group.id)))).count()
else:
group = Group.query.filter_by(guest=True).count()
if (field.data and (group > 0)):
raise ValidationError(_("There is already a group of type 'Guest'."))
def validate(self):
if (not super(GroupForm, self).validate()):
return False
result = True
permission_fields = (self.editpost, self.deletepost, self.deletetopic, self.posttopic, self.postreply, self.mod_edituser, self.mod_banuser, self.viewhidden, self.makehidden)
group_fields = [self.admin, self.super_mod, self.mod, self.banned, self.guest]
if self.guest.data:
for field in permission_fields:
if field.data:
field.errors.append(_("Can't assign any permissions to this group."))
result = False
checked = []
for field in group_fields:
if (field.data and (field.data in checked)):
if (len(checked) > 1):
field.errors.append("A group can't have multiple group types.")
result = False
else:
checked.append(field.data)
return result
def save(self):
data = self.data
data.pop('submit', None)
data.pop('csrf_token', None)
group = Group(**data)
return group.save() |
class Sunburst(DC):
chartFnc = 'SunburstChart'
def innerRadius(self, value):
return self.fnc(('innerRadius(%s)' % JsUtils.jsConvertData(value, None)))
def ringSizes(self, js_func):
return self.fnc(('ringSizes(%s)' % js_func))
def equalRingSizes(self):
return self.fnc(('ringSizes(%s.equalRingSizes())' % self.varId)) |
def test_hicConvertFormat_h5_to_homer():
outfile = NamedTemporaryFile(suffix='.homer', delete=False)
outfile.close()
args = '--matrices {} --outFileName {} --inputFormat cool --outputFormat homer '.format(original_matrix_cool_chr4, outfile.name).split()
compute(hicConvertFormat.main, args, 5)
test = hm.hiCMatrix(original_matrix_cool_chr4)
f = gzip.open(outfile.name, 'rb')
file_content = f.read()
outfile2 = NamedTemporaryFile(suffix='.homer', delete=False)
outfile2.close()
with open(outfile2.name, 'wb') as matrix_file:
matrix_file.write(file_content)
matrixFileHandlerInput = MatrixFileHandler(pFileType='homer', pMatrixFile=outfile2.name)
(_matrix, cut_intervals, nan_bins, distance_counts, correction_factors) = matrixFileHandlerInput.load()
nt.assert_array_almost_equal(test.matrix.data, _matrix.data, decimal=0) |
class _DropEventFilter(QtCore.QObject):
def eventFilter(self, source, event):
typ = event.type()
if (typ == QtCore.QEvent.Type.Drop):
self.dropEvent(event)
elif (typ == QtCore.QEvent.Type.DragEnter):
self.dragEnterEvent(event)
return super().eventFilter(source, event)
def dropEvent(self, e):
editor = self.parent()._qt_editor
klass = editor.factory.klass
if editor.factory.binding:
value = getattr(clipboard, 'node', None)
else:
value = e.mimeData().instance()
if ((klass is None) or isinstance(value, klass)):
editor._no_update = True
try:
if hasattr(value, 'drop_editor_value'):
editor.value = value.drop_editor_value()
else:
editor.value = value
if hasattr(value, 'drop_editor_update'):
value.drop_editor_update(self)
else:
self.setText(editor.str_value)
finally:
editor._no_update = False
e.acceptProposedAction()
def dragEnterEvent(self, e):
editor = self.parent()._qt_editor
if editor.factory.binding:
data = getattr(clipboard, 'node', None)
else:
md = e.mimeData()
if (not isinstance(md, PyMimeData)):
return
data = md.instance()
try:
editor.object.base_trait(editor.name).validate(editor.object, editor.name, data)
e.acceptProposedAction()
except:
pass |
class ShutterLockLever():
def __init__(self, exposure_control_system=None):
self.exposure_control_system = exposure_control_system
self.blocks = False
def activate(self):
if (not self.exposure_control_system):
return
self.blocks = True
def deactivate(self):
if (not self.exposure_control_system):
return
self.blocks = False |
def get_company_contributors_repository_commits(df: DataFrame, author_name_field: str, author_email_field: str, repo_name_field: str, language_field: str, license_field: str, company_field: str, commits_id_field: str, datetime_field: str, day: date, result_field: str='Commits') -> DataFrame:
return df.select(f.col(author_name_field), f.col(author_email_field), f.col(repo_name_field), f.col(language_field), f.col(license_field), f.col(company_field), f.col(commits_id_field), f.col(datetime_field)).filter((f.col(datetime_field).cast('date') == day)).groupBy(author_name_field, author_email_field, repo_name_field, language_field, license_field, company_field).agg(f.count(commits_id_field).alias(result_field)) |
def coordinates2position(coordinates, surface_size=None):
if (surface_size is None):
surface_size = _internals.active_exp.screen.surface.get_size()
rtn = [(coordinates[0] - (surface_size[0] // 2)), ((- coordinates[1]) + (surface_size[1] // 2))]
if ((surface_size[0] % 2) == 0):
rtn[0] += 1
if ((surface_size[1] % 2) == 0):
rtn[1] -= 1
return rtn |
class TrieNodeRequestTracker():
def __init__(self) -> None:
self._trie_fog = fog.HexaryTrieFog()
self._active_prefixes: Set[Nibbles] = set()
self._node_frontier_cache = fog.TrieFrontierCache()
def mark_for_review(self, prefix: Nibbles) -> None:
self._active_prefixes.remove(prefix)
def pause_review(self, prefix: Nibbles) -> None:
self._active_prefixes.add(prefix)
def _get_eligible_fog(self) -> fog.HexaryTrieFog:
return self._trie_fog.mark_all_complete(self._active_prefixes.copy())
def next_path_to_explore(self, starting_index: Nibbles) -> Nibbles:
return self._get_eligible_fog().nearest_unknown(starting_index)
def confirm_prefix(self, confirmed_prefix: Nibbles, node: fog.HexaryTrieFog) -> None:
if node.sub_segments:
self.add_cache(confirmed_prefix, node, node.sub_segments)
elif node.value:
raise ValueError('Do not handle case where prefix of another key has a value')
else:
self.delete_cache(confirmed_prefix)
self._trie_fog = self._trie_fog.explore(confirmed_prefix, node.sub_segments)
def confirm_leaf(self, path_to_leaf: Nibbles) -> None:
self.delete_cache(path_to_leaf)
self._trie_fog = self._trie_fog.explore(path_to_leaf, ())
def generate_request(self, node_hash: Hash32, prefix: Nibbles) -> TrackedRequest:
self.pause_review(prefix)
return TrackedRequest(self, node_hash, prefix)
def has_active_requests(self) -> bool:
return (len(self._active_prefixes) > 0)
def get_cached_parent(self, prefix: Nibbles) -> Tuple[(HexaryTrieNode, Nibbles)]:
return self._node_frontier_cache.get(prefix)
def add_cache(self, prefix: Nibbles, node: HexaryTrieNode, sub_segments: Iterable[Nibbles]) -> None:
self._node_frontier_cache.add(prefix, node, sub_segments)
def delete_cache(self, prefix: Nibbles) -> None:
self._node_frontier_cache.delete(prefix)
def is_complete(self) -> bool:
return self._trie_fog.is_complete
def __repr__(self) -> str:
return f'TrieNodeRequestTracker(trie_fog={self._trie_fog!r}, active_prefixes={self._active_prefixes!r})' |
class SchemaItem(BaseModel):
kw: str
argc_min: NonNegativeInt = 1
argc_max: Optional[NonNegativeInt] = 1
type_map: List[Union[(SchemaItemType, EnumType, None)]] = []
required_children: List[str] = []
deprecation_info: Optional[DeprecationInfo] = None
join_after: Optional[PositiveInt] = None
multi_occurrence: bool = False
expand_envvar: bool = True
substitute_from: NonNegativeInt = 1
required_set: bool = False
required_children_value: Mapping[(str, List[str])] = {}
class Config():
arbitrary_types_allowed = True
def deprecated_dummy_keyword(cls, info: DeprecationInfo) -> 'SchemaItem':
return SchemaItem(kw=info.keyword, deprecation_info=info, required_set=False, argc_min=0, argc_max=None)
def token_to_value_with_context(self, token: FileContextToken, index: int, keyword: FileContextToken, cwd: str) -> Optional[MaybeWithContext]:
if (not (len(self.type_map) > index)):
return ContextString(str(token), token, keyword)
val_type = self.type_map[index]
if (val_type is None):
return ContextString(str(token), token, keyword)
if (val_type == SchemaItemType.BOOL):
if (token.lower() == 'true'):
return ContextBool(True, token, keyword)
elif (token.lower() == 'false'):
return ContextBool(False, token, keyword)
else:
raise ConfigValidationError.with_context(f'{self.kw!r} must have a boolean value as argument {(index + 1)!r}', token)
if (val_type == SchemaItemType.INT):
try:
return ContextInt(int(token), token, keyword)
except ValueError as e:
raise ConfigValidationError.with_context(f'{self.kw!r} must have an integer value as argument {(index + 1)!r}', token) from e
if (val_type == SchemaItemType.FLOAT):
try:
return ContextFloat(float(token), token, keyword)
except ValueError as e:
raise ConfigValidationError.with_context(f'{self.kw!r} must have a number as argument {(index + 1)!r}', token) from e
path: Optional[str] = str(token)
if (val_type in [SchemaItemType.PATH, SchemaItemType.EXISTING_PATH]):
if (not os.path.isabs(token)):
path = os.path.normpath(os.path.join(os.path.dirname(token.filename), token))
if ((val_type == SchemaItemType.EXISTING_PATH) and (not os.path.exists(str(path)))):
err = f'Cannot find file or directory "{token.value}". '
if (path != token):
err += f'The configured value was {path!r} '
raise ConfigValidationError.with_context(err, token)
assert isinstance(path, str)
return ContextString(path, token, keyword)
if (val_type == SchemaItemType.EXECUTABLE):
absolute_path: Optional[str]
if (not os.path.isabs(token)):
absolute_path = os.path.abspath(os.path.join(cwd, token))
else:
absolute_path = token
if (not os.path.exists(absolute_path)):
absolute_path = shutil.which(token)
if (absolute_path is None):
raise ConfigValidationError.with_context(f'Could not find executable {token.value!r}', token)
if os.path.isdir(absolute_path):
raise ConfigValidationError.with_context(f'Expected executable file, but {token.value!r} is a directory.', token)
if (not os.access(absolute_path, os.X_OK)):
context = (f'{token.value!r} which was resolved to {absolute_path!r}' if (token.value != absolute_path) else f'{token.value!r}')
raise ConfigValidationError.with_context(f'File not executable: {context}', token)
return ContextString(absolute_path, token, keyword)
if isinstance(val_type, SchemaItemType):
return ContextString(str(token), token, keyword)
if isinstance(val_type, EnumType):
try:
return val_type(str(token))
except ValueError as err:
raise ConfigValidationError.with_context(f'{self.kw!r} argument {(index + 1)!r} must be one of {[v.value for v in val_type]!r} was {token.value!r}', token) from None
raise ValueError(f'Unknown schema item {val_type}')
def apply_constraints(self, args: List[T], keyword: FileContextToken, cwd: str) -> Union[(T, MaybeWithContext, None, ContextList[Union[(T, MaybeWithContext, None)]])]:
errors: List[Union[(ErrorInfo, ConfigValidationError)]] = []
args_with_context: ContextList[Union[(T, MaybeWithContext, None)]] = ContextList(token=keyword)
for (i, x) in enumerate(args):
if isinstance(x, FileContextToken):
try:
value_with_context = self.token_to_value_with_context(x, i, keyword, cwd)
args_with_context.append(value_with_context)
except ConfigValidationError as err:
errors.append(err)
continue
else:
args_with_context.append(x)
if (len(args) < self.argc_min):
errors.append(ErrorInfo(message=f'{self.kw} must have at least {self.argc_min} arguments', filename=keyword.filename).set_context(ContextString.from_token(keyword)))
elif ((self.argc_max is not None) and (len(args) > self.argc_max)):
errors.append(ErrorInfo(f'{self.kw} must have maximum {self.argc_max} arguments').set_context(ContextString.from_token(keyword)))
if (len(errors) > 0):
raise ConfigValidationError.from_collected(errors)
if ((self.argc_max == 1) and (self.argc_min == 1)):
return args_with_context[0]
return args_with_context
def join_args(self, line: List[FileContextToken]) -> List[FileContextToken]:
n = self.join_after
if ((n is not None) and (n < len(line))):
joined = FileContextToken.join_tokens(line[n:], ' ')
new_line = line[0:n]
if (len(joined) > 0):
new_line.append(joined)
return new_line
return line |
class OptionPlotoptionsTreegraphSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def hdates_from_date(date, start_year, end_year):
if (not str(start_year).isdigit()):
raise ValueError(f'start_year must be an int: {start_year}')
if (not str(end_year).isdigit()):
raise ValueError(f'end_year must be an int: {end_year}')
start_year = int(start_year)
end_year = int(end_year)
from climetlab.utils.dates import to_datetime
if isinstance(date, (list, tuple)):
if (len(date) != 1):
raise NotImplementedError(f'{date} should have only one element.')
date = date[0]
date = to_datetime(date)
assert (not (date.hour or date.minute or date.second)), date
hdates = [date.replace(year=year) for year in range(start_year, (end_year + 1))]
return '/'.join((d.strftime('%Y-%m-%d') for d in hdates)) |
class TestOFPGroupStats(unittest.TestCase):
length = (ofproto.OFP_GROUP_STATS_SIZE + ofproto.OFP_BUCKET_COUNTER_SIZE)
group_id = 6606
ref_count = 2102
packet_count =
byte_count =
buck_packet_count =
buck_byte_count =
bucket_counters = [OFPBucketCounter(buck_packet_count, buck_byte_count)]
buf_bucket_counters = pack(ofproto.OFP_BUCKET_COUNTER_PACK_STR, buck_packet_count, buck_byte_count)
fmt = ofproto.OFP_GROUP_STATS_PACK_STR
buf = (pack(fmt, length, group_id, ref_count, packet_count, byte_count) + buf_bucket_counters)
def test_init(self):
c = OFPGroupStats(self.group_id, self.ref_count, self.packet_count, self.byte_count, self.bucket_counters)
eq_(self.group_id, c.group_id)
eq_(self.ref_count, c.ref_count)
eq_(self.packet_count, c.packet_count)
eq_(self.byte_count, c.byte_count)
eq_(self.bucket_counters, c.bucket_counters)
def _test_parser(self, group_id, ref_count, packet_count, byte_count, bucket_counter_cnt):
length = (ofproto.OFP_GROUP_STATS_SIZE + (ofproto.OFP_BUCKET_COUNTER_SIZE * bucket_counter_cnt))
fmt = ofproto.OFP_GROUP_STATS_PACK_STR
buf = pack(fmt, length, group_id, ref_count, packet_count, byte_count)
bucket_counters = []
for b in range(bucket_counter_cnt):
buck_packet_count = b
buck_byte_count = b
bucket_counter = OFPBucketCounter(buck_packet_count, buck_byte_count)
bucket_counters.append(bucket_counter)
buf_bucket_counters = pack(ofproto.OFP_BUCKET_COUNTER_PACK_STR, buck_packet_count, buck_byte_count)
buf += buf_bucket_counters
res = OFPGroupStats.parser(buf, 0)
eq_(length, res.length)
eq_(group_id, res.group_id)
eq_(ref_count, res.ref_count)
eq_(packet_count, res.packet_count)
eq_(byte_count, res.byte_count)
for b in range(bucket_counter_cnt):
eq_(bucket_counters[b].packet_count, res.bucket_counters[b].packet_count)
eq_(bucket_counters[b].byte_count, res.bucket_counters[b].byte_count)
def test_parser_mid(self):
bucket_counter_cnt = 2046
self._test_parser(self.group_id, self.ref_count, self.packet_count, self.byte_count, bucket_counter_cnt)
def test_parser_max(self):
group_id =
ref_count =
packet_count =
byte_count =
bucket_counter_cnt = 4093
self._test_parser(group_id, ref_count, packet_count, byte_count, bucket_counter_cnt)
def test_parser_min(self):
group_id = 0
ref_count = 0
packet_count = 0
byte_count = 0
bucket_counter_cnt = 0
self._test_parser(group_id, ref_count, packet_count, byte_count, bucket_counter_cnt) |
class PythonBackedTokenizer():
def __init__(self, model_identifier):
self.bos_token_id = 50256
self.eos_token_id = 50256
self._vocab = None
self.model_identifier = model_identifier
def is_available(model_identifier):
try:
import gpt3_tokenizer
openai_models = ['ada-', 'babbage-', 'davinci-', 'gpt']
return any([(m in model_identifier) for m in openai_models])
except:
return False
def vocab_size(self):
return len(self.vocab)
def vocab(self):
if (self._vocab is None):
import gpt3_tokenizer
self._vocab = gpt3_tokenizer._entry._encoder
return self._vocab
def convert_tokens_to_string(self, tokens):
import gpt3_tokenizer
text_with_bytes = ''.join(tokens)
textarr = [int(gpt3_tokenizer._entry._byte_decoder[x]) for x in list(text_with_bytes)]
text = bytearray(textarr).decode('utf-8')
return text
def tokenize(self, s, asbytes=False):
if asbytes:
ids = self(s)['input_ids']
return self.decode_tokens_bytes(ids)
else:
return self._tokenize(s)
def convert_bytes_to_string(self, token_bytes):
ids = self.convert_token_bytes_to_ids(token_bytes)
return self.decode(ids)
def convert_token_bytes_to_ids(self, token_bytes):
import gpt3_tokenizer
result = []
for b in token_bytes:
if (len(b) == 0):
continue
if (b == b'<|endoftext|>'):
result.append(50256)
continue
tb = ''.join((gpt3_tokenizer._entry._byte_encoder[str(c)] for c in b))
tb = gpt3_tokenizer._entry._encoder[tb]
result.append(tb)
return result
def decode_tokens_bytes(self, ids):
import gpt3_tokenizer
result = []
for i in ids:
b = gpt3_tokenizer._entry._decoder[i]
b = [int(gpt3_tokenizer._entry._byte_decoder[c]) for c in list(b)]
result += [bytes(b)]
return result
def _tokenize(self, s):
if ('<|endoftext|>' in s):
return [50256]
import gpt3_tokenizer
unpack = False
if (type(s) is not list):
s = [s]
unpack = True
tokens = [[gpt3_tokenizer._entry._decoder[i] for i in gpt3_tokenizer.encode(se)] for se in s]
if unpack:
return tokens[0]
else:
return tokens
def decode(self, input_ids, clean_up_tokenization_spaces=None):
import gpt3_tokenizer
return gpt3_tokenizer.decode(input_ids)
def __call__(self, s: str, add_special_tokens=False):
import gpt3_tokenizer
unpack = False
if (type(s) is not list):
s = [s]
unpack = True
def encode_segment(se):
if ('<|endoftext|>' in se):
segments = se.split('<|endoftext|>', 1)
return ((encode_segment(segments[0]) + [self.eos_token_id]) + encode_segment(segments[1]))
if (se == '<|endoftext|>'):
return [self.eos_token_id]
else:
return gpt3_tokenizer.encode(se)
input_ids = [encode_segment(se) for se in s]
if unpack:
return {'input_ids': input_ids[0]}
else:
return {'input_ids': input_ids}
def name(self):
return ('python:' + 'gpt3_tokenizer')
def backend(self):
import gpt3_tokenizer
return '<module gpt3_tokenizer>' |
class TestPerformanceMetrics():
def test_performance_metrics(self, ts_short, backend):
m = Prophet(stan_backend=backend)
m.fit(ts_short)
df_cv = diagnostics.cross_validation(m, horizon='4 days', period='10 days', initial='90 days')
df_none = diagnostics.performance_metrics(df_cv, rolling_window=(- 1))
assert (set(df_none.columns) == {'horizon', 'coverage', 'mae', 'mape', 'mdape', 'mse', 'rmse', 'smape'})
assert (df_none.shape[0] == 16)
df_0 = diagnostics.performance_metrics(df_cv, rolling_window=0)
assert (len(df_0) == 4)
assert (len(df_0['horizon'].unique()) == 4)
df_horizon = diagnostics.performance_metrics(df_cv, rolling_window=0.2)
assert (len(df_horizon) == 4)
assert (len(df_horizon['horizon'].unique()) == 4)
df_all = diagnostics.performance_metrics(df_cv, rolling_window=1)
assert (df_all.shape[0] == 1)
for metric in ['mse', 'mape', 'mae', 'coverage']:
assert (df_all[metric].values[0] == pytest.approx(df_none[metric].mean()))
assert (df_all['mdape'].values[0] == pytest.approx(df_none['mdape'].median()))
df_horizon = diagnostics.performance_metrics(df_cv, metrics=['coverage', 'mse'])
assert (set(df_horizon.columns) == {'coverage', 'mse', 'horizon'})
df_cv.loc[(0, 'y')] = 0.0
df_horizon = diagnostics.performance_metrics(df_cv, metrics=['coverage', 'mape'])
assert (set(df_horizon.columns) == {'coverage', 'horizon'})
df_horizon = diagnostics.performance_metrics(df_cv, metrics=['mape'])
assert (df_horizon is None)
with pytest.raises(ValueError):
diagnostics.performance_metrics(df_cv, metrics=['mse', 'error_metric'])
def test_rolling_mean(self):
x = np.arange(10)
h = np.arange(10)
df = diagnostics.rolling_mean_by_h(x=x, h=h, w=1, name='x')
assert np.array_equal(x, df['x'].values)
assert np.array_equal(h, df['horizon'].values)
df = diagnostics.rolling_mean_by_h(x, h, w=4, name='x')
assert np.allclose((x[3:] - 1.5), df['x'].values)
assert np.array_equal(np.arange(3, 10), df['horizon'].values)
h = np.array([1.0, 2.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 7.0, 7.0])
x_true = np.array([1.0, 5.0, (22.0 / 3)])
h_true = np.array([3.0, 4.0, 7.0])
df = diagnostics.rolling_mean_by_h(x, h, w=3, name='x')
assert np.allclose(x_true, df['x'].values)
assert np.array_equal(h_true, df['horizon'].values)
df = diagnostics.rolling_mean_by_h(x, h, w=10, name='x')
assert np.allclose(np.array([7.0]), df['horizon'].values)
assert np.allclose(np.array([4.5]), df['x'].values)
def test_rolling_median(self):
x = np.arange(10)
h = np.arange(10)
df = diagnostics.rolling_median_by_h(x=x, h=h, w=1, name='x')
assert np.array_equal(x, df['x'].values)
assert np.array_equal(h, df['horizon'].values)
df = diagnostics.rolling_median_by_h(x, h, w=4, name='x')
x_true = (x[3:] - 1.5)
assert np.allclose(x_true, df['x'].values)
assert np.array_equal(np.arange(3, 10), df['horizon'].values)
h = np.array([1.0, 2.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 7.0, 7.0])
x_true = np.array([1.0, 5.0, 8.0])
h_true = np.array([3.0, 4.0, 7.0])
df = diagnostics.rolling_median_by_h(x, h, w=3, name='x')
assert np.allclose(x_true, df['x'].values)
assert np.array_equal(h_true, df['horizon'].values)
df = diagnostics.rolling_median_by_h(x, h, w=10, name='x')
assert np.allclose(np.array([7.0]), df['horizon'].values)
assert np.allclose(np.array([4.5]), df['x'].values) |
class ClipboardHandlerBase(abc.ABC):
def __init__(self) -> None:
self.is_compatible = self._is_compatible()
def copy(self, text: str) -> bool:
if (not self.is_compatible):
logger.error('%s.copy() called on incompatible system!', getattr(self, '__name__', ''))
return False
try:
self._copy(text)
except Exception:
logger.exception('%s.copy() failed!', getattr(self, '__name__', ''))
return False
else:
return True
def name(self) -> str:
return self.__class__.__name__
def _os_has_wayland_display_manager() -> bool:
if (sys.platform != 'linux'):
return False
xdg_session_type = os.environ.get('XDG_SESSION_TYPE', '').lower()
has_wayland_display_env = bool(os.environ.get('WAYLAND_DISPLAY', ''))
return (('wayland' in xdg_session_type) or has_wayland_display_env)
def _is_compatible(self) -> bool:
...
def _copy(text: str) -> None:
... |
def create_auto_crop_writer():
import os
import nuke
nodes = nuke.selectedNodes()
[node.setSelected(False) for node in nodes]
write_nodes = []
for node in nodes:
write_node = nuke.createNode('Write')
file_path = node['file'].value()
(filename_with_number_seq, ext) = os.path.splitext(file_path)
(filename, number_seq) = os.path.splitext(filename_with_number_seq)
write_node['file'].setValue((((filename + '_auto_cropped') + number_seq) + ext))
write_node['channels'].setValue('all')
write_node['autocrop'].setValue(True)
write_node.setXpos((node.xpos() + 100))
write_node.setYpos(node.ypos())
write_node.setInput(0, node)
write_node.setSelected(False)
write_nodes.append(write_node)
try:
afanasy = nuke.createNode('afanasy')
for (i, node) in enumerate(write_nodes):
afanasy.setInput(i, node)
except RuntimeError:
pass |
def get_context_string(context, id_width, ctx, ascii_=False):
hash_str = cstr('#', clr('id'))
if isinstance(hash_str, ColoredStr):
ansi_offset = hash_str.lenesc
else:
ansi_offset = 0
path = utils.get_relative_path(context, ctx['path'])
string = '{hash:>{width}} | {path} ({nbr})'.format(hash=hash_str, width=((id_width + ansi_offset) + 1), path=path, nbr=ctx['total_tasks'])
priority = ctx['priority']
if (not is_task_default(ctx, 'priority')):
prio_str = ' {}{}'.format(PRIORITY_ICON[ascii_], priority)
string += cstr(prio_str, clr('priority'))
return string |
class WithdrawalAPI(ABC):
def index(self) -> int:
...
def validator_index(self) -> int:
...
def address(self) -> Address:
...
def amount(self) -> int:
...
def hash(self) -> Hash32:
...
def validate(self) -> None:
...
def encode(self) -> bytes:
... |
def test_loop_broad_peak():
outfile = NamedTemporaryFile(suffix='out', delete=True)
outfile.close()
args = '--data {} --validationData {} --validationType {} --method {} --outFileName {} -r {} --chrPrefixProtein {} '.format((ROOT + 'loops_1.bedgraph'), (ROOT + 'GSM733752_hg19_ctcf_GM12878.broadPeak'), 'bed', 'loops', outfile.name, 10000, 'remove').split()
compute(hicValidateLocations.main, args, 5)
assert are_files_equal((ROOT + 'overlap_ctcf_matched_locations'), (outfile.name + '_matched_locations'))
assert are_files_equal((ROOT + 'overlap_ctcf_statistics'), (outfile.name + '_statistics'), skip=3) |
def _create_taxed_tickets(db, tax_included=True, discount_code=None):
tax = TaxSubFactory(name='GST', rate=18.0, is_tax_included_in_price=tax_included)
tickets = _create_tickets([123.5, 456.3], event=tax.event)
tickets += [TicketSubFactory(type='donation', event=tax.event, min_price=500.0, max_price=1000.0), TicketSubFactory(type='free', price=435.0, event=tax.event)]
if discount_code:
discount_code.tickets = ([tickets[1]] + tickets[2:])
db.session.commit()
tickets_dict = _create_ticket_dict(tickets, [2, 4, 3, 3])
tickets_dict[(- 2)]['price'] = 789.7
return tickets_dict |
('sys.argv', ['flakehell'])
def test_exclude_file(capsys, tmp_path: Path):
(tmp_path / 'checked.py').write_text('import sys\n')
(tmp_path / 'ignored').mkdir()
((tmp_path / 'ignored') / 'first.py').write_text('import sys\n')
((tmp_path / 'ignored') / 'second.py').write_text('invalid syntax!')
with chdir(tmp_path):
result = main(['lint', '--format', 'default', '--exclude', 'ignored'])
assert (result == (1, ''))
captured = capsys.readouterr()
assert (captured.err == '')
exp = "\n ./checked.py:1:1: F401 'sys' imported but unused\n "
assert (captured.out.strip() == dedent(exp).strip()) |
class Solution(object):
def middleNode(self, head):
length = 0
curr = head
while (curr is not None):
curr = curr.next
length += 1
mid = (length / 2)
curr = head
while (mid > 0):
curr = curr.next
mid -= 1
return curr |
def get_linked_deployments(deployments: Dict[(str, Any)]) -> Dict[(str, Any)]:
linked_deployments = {dep: data for (dep, data) in deployments.items() if get_in(('runtimeBytecode', 'linkDependencies'), data)}
for (deployment, data) in linked_deployments.items():
if any(((link_dep['value'] == deployment) for link_dep in data['runtimeBytecode']['linkDependencies'])):
raise BytecodeLinkingError(f'Link dependency found in {deployment} deployment that references its own contract instance, which is disallowed')
return linked_deployments |
def test_correct_number_of_rows_are_generated():
df = gen.generate(props={'region': gen.choice(data=['EMEA', 'LATAM', 'NAM', 'APAC'], weights=[0.1, 0.1, 0.3, 0.5]), 'sic_range': gen.sic_range(), 'sic': gen.sic_industry(sic_range_field='sic_range'), 'country': gen.country_codes(region_field='region'), 'client_name': gen.company_namer(field='sic', field_type='sic', countrycode_field='country')}, count=50, randomstate=np.random.RandomState()).to_dataframe()
df['sic_range'] = df['sic_range'].apply((lambda x: x.name))
df['sic'] = df['sic'].apply((lambda x: x.name))
df['country'] = df['country'].apply((lambda x: x.alpha3_code)) |
def test_unicode_addresses(mailer):
msg = mailer.mail(subject='subject', sender=u'AUO <>', recipients=[u'A <>', u'U <>'], cc=[u'O <>'])
msg_as_string = str(msg)
a1 = sanitize_address(u'A <>')
a2 = sanitize_address(u'U <>')
h1_a = Header(('To: %s, %s' % (a1, a2)))
h1_b = Header(('To: %s, %s' % (a2, a1)))
h2 = Header(('From: %s' % sanitize_address(u'AUO <>')))
h3 = Header(('Cc: %s' % sanitize_address(u'O <>')))
try:
assert (h1_a.encode() in msg_as_string)
except AssertionError:
assert (h1_b.encode() in msg_as_string)
assert (h2.encode() in msg_as_string)
assert (h3.encode() in msg_as_string) |
.asyncio
class TestTasksCleanup():
async def test_cleanup(self, main_session_manager, workspace_session_manager, send_task_mock: MagicMock):
cleanup = CleanupTask(main_session_manager, workspace_session_manager, send_task=send_task_mock)
(await cleanup.run())
send_task_mock.assert_called() |
def test_from_yaml_schema(container: containers.DynamicContainer, tmp_path: pathlib.Path):
schema_path = (tmp_path / 'schema.yml')
with open(schema_path, 'w') as file:
file.write('\n version: "1"\n container:\n provider1:\n provider: Factory\n provides: list\n args:\n - 1\n - 2\n - 3\n provider2:\n provider: Factory\n provides: dict\n kwargs:\n one: container.provider1\n two: 2\n ')
container.from_yaml_schema(schema_path)
assert isinstance(container.provider1, providers.Factory)
assert (container.provider1.provides == list)
assert (container.provider1.args == (1, 2, 3))
assert isinstance(container.provider2, providers.Factory)
assert (container.provider2.provides is dict)
assert (container.provider2.kwargs == {'one': container.provider1, 'two': 2}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.