code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class FileSystemAccess(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = self.__init_path(path) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __init_path(path): <NEW_LINE> <INDENT> if not isinstance(path, str) or len(path) == 0: <NEW_LINE> <INDENT> raise ValueError("path must be initialized as a non empty string") <NEW_LINE> <DEDENT> path = join(expanduser('~'), '.mycroft', path) <NEW_LINE> if not isdir(path): <NEW_LINE> <INDENT> os.makedirs(path) <NEW_LINE> <DEDENT> return path <NEW_LINE> <DEDENT> def open(self, filename, mode): <NEW_LINE> <INDENT> file_path = join(self.path, filename) <NEW_LINE> return open(file_path, mode) <NEW_LINE> <DEDENT> def exists(self, filename): <NEW_LINE> <INDENT> return os.path.exists(join(self.path, filename))
|
A class for providing access to the mycroft FS sandbox. Intended to be
attached to skills at initialization time to provide a skill-specific
namespace.
|
6259902e3eb6a72ae038b6c6
|
class RSAX931Verifier(object): <NEW_LINE> <INDENT> def __init__(self, pubdata): <NEW_LINE> <INDENT> self._bio = libcrypto.BIO_new_mem_buf(pubdata, len(pubdata)) <NEW_LINE> self._rsa = c_void_p(libcrypto.RSA_new()) <NEW_LINE> if not libcrypto.PEM_read_bio_RSA_PUBKEY(self._bio, pointer(self._rsa), None, None): <NEW_LINE> <INDENT> raise ValueError('invalid RSA public key') <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> libcrypto.BIO_free(self._bio) <NEW_LINE> libcrypto.RSA_free(self._rsa) <NEW_LINE> <DEDENT> def verify(self, signed): <NEW_LINE> <INDENT> buf = create_string_buffer(libcrypto.RSA_size(self._rsa)) <NEW_LINE> size = libcrypto.RSA_public_decrypt(len(signed), signed, buf, self._rsa, RSA_X931_PADDING) <NEW_LINE> if size < 0: <NEW_LINE> <INDENT> raise ValueError('Unable to decrypt message') <NEW_LINE> <DEDENT> return buf[0:size]
|
Verify ANSI X9.31 RSA signatures using OpenSSL libcrypto
|
6259902ed164cc6175821fd2
|
class DHCPOffer (Event): <NEW_LINE> <INDENT> def __init__ (self, p): <NEW_LINE> <INDENT> super(DHCPOffer,self).__init__() <NEW_LINE> self.offer = p <NEW_LINE> self.address = p.yiaddr <NEW_LINE> self.server = p.siaddr <NEW_LINE> o = p.options.get(p.SERVER_ID_OPT) <NEW_LINE> if o: self.server = o.addr <NEW_LINE> o = p.options.get(p.SUBNET_MASK_OPT) <NEW_LINE> self.subnet_mask = o.addr if o else None <NEW_LINE> o = p.options.get(p.ROUTERS_OPT) <NEW_LINE> self.routers = o.addrs if o else [] <NEW_LINE> o = p.options.get(p.DNS_SERVER_OPT) <NEW_LINE> self.dns_servers = o.addrs if o else [] <NEW_LINE> o = p.options.get(p.REQUEST_LEASE_OPT) <NEW_LINE> o = o.seconds if o is not None else 86400 <NEW_LINE> self._accept = None <NEW_LINE> <DEDENT> def reject (self): <NEW_LINE> <INDENT> self._accept = False <NEW_LINE> <DEDENT> def accept (self): <NEW_LINE> <INDENT> self._accept = True <NEW_LINE> <DEDENT> def option (self, option, default=None): <NEW_LINE> <INDENT> return self.offer.options.get(option, default=None)
|
Fired when an offer has been received
If you want to immediately accept it, do accept().
If you want to reject it, do reject().
If you want to defer acceptance, do nothing.
|
6259902ee76e3b2f99fd9a6d
|
class AddPrintableRingOperator68(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "addongen.add_printable_ring_operator68" <NEW_LINE> bl_label = "Add 21.89mm (12 1/2)" <NEW_LINE> bl_options = {'REGISTER'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> basicRingFor3DPrint('ring', 64, 21.89) <NEW_LINE> return {'FINISHED'}
|
Add 21.89mm (US 12 1/2 | British Z | French 68 3/4 | German 21 3/4 | Japanese 26 | Swiss 28 3/4)
|
6259902ea4f1c619b294f657
|
class Policy(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'marketplace_purchases': {'key': 'properties.marketplacePurchases', 'type': 'str'}, 'reservation_purchases': {'key': 'properties.reservationPurchases', 'type': 'str'}, 'view_charges': {'key': 'properties.viewCharges', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, marketplace_purchases: Optional[Union[str, "MarketplacePurchasesPolicy"]] = None, reservation_purchases: Optional[Union[str, "ReservationPurchasesPolicy"]] = None, view_charges: Optional[Union[str, "ViewChargesPolicy"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(Policy, self).__init__(**kwargs) <NEW_LINE> self.marketplace_purchases = marketplace_purchases <NEW_LINE> self.reservation_purchases = reservation_purchases <NEW_LINE> self.view_charges = view_charges
|
A policy.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param marketplace_purchases: The policy that controls whether Azure marketplace purchases are
allowed for a billing profile. Possible values include: "AllAllowed", "OnlyFreeAllowed",
"NotAllowed".
:type marketplace_purchases: str or ~azure.mgmt.billing.models.MarketplacePurchasesPolicy
:param reservation_purchases: The policy that controls whether Azure reservation purchases are
allowed for a billing profile. Possible values include: "Allowed", "NotAllowed".
:type reservation_purchases: str or ~azure.mgmt.billing.models.ReservationPurchasesPolicy
:param view_charges: The policy that controls whether users with Azure RBAC access to a
subscription can view its charges. Possible values include: "Allowed", "NotAllowed".
:type view_charges: str or ~azure.mgmt.billing.models.ViewChargesPolicy
|
6259902eb57a9660fecd2ae4
|
class NoSuchProjectError(Exception): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(NoSuchProjectError, self).__init__() <NEW_LINE> self.name = name
|
If a project cannot be found in the LDAP.
|
6259902e50485f2cf55dbfdd
|
class V1PersistentVolume(object): <NEW_LINE> <INDENT> def __init__(self, kind=None, apiVersion=None, metadata=None, spec=None, status=None): <NEW_LINE> <INDENT> self.swagger_types = { 'kind': 'str', 'apiVersion': 'str', 'metadata': 'V1ObjectMeta', 'spec': 'V1PersistentVolumeSpec', 'status': 'V1PersistentVolumeStatus' } <NEW_LINE> self.attribute_map = { 'kind': 'kind', 'apiVersion': 'apiVersion', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status' } <NEW_LINE> self._kind = kind <NEW_LINE> self._apiVersion = apiVersion <NEW_LINE> self._metadata = metadata <NEW_LINE> self._spec = spec <NEW_LINE> self._status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def apiVersion(self): <NEW_LINE> <INDENT> return self._apiVersion <NEW_LINE> <DEDENT> @apiVersion.setter <NEW_LINE> def apiVersion(self, apiVersion): <NEW_LINE> <INDENT> self._apiVersion = apiVersion <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> @property <NEW_LINE> def spec(self): <NEW_LINE> <INDENT> return self._spec <NEW_LINE> <DEDENT> @spec.setter <NEW_LINE> def spec(self, spec): <NEW_LINE> <INDENT> self._spec = spec <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self._status = status <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902e26238365f5fadbb3
|
class GObjectXpraClient(XpraClientBase, gobject.GObject): <NEW_LINE> <INDENT> INSTALL_SIGNAL_HANDLERS = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> gobject.GObject.__init__(self) <NEW_LINE> XpraClientBase.__init__(self) <NEW_LINE> <DEDENT> def init(self, opts): <NEW_LINE> <INDENT> XpraClientBase.init(self, opts) <NEW_LINE> if self.INSTALL_SIGNAL_HANDLERS: <NEW_LINE> <INDENT> self.install_signal_handlers() <NEW_LINE> <DEDENT> self.glib_init() <NEW_LINE> self.gobject_init() <NEW_LINE> <DEDENT> def timeout_add(self, *args): <NEW_LINE> <INDENT> return glib.timeout_add(*args) <NEW_LINE> <DEDENT> def idle_add(self, *args): <NEW_LINE> <INDENT> return glib.idle_add(*args) <NEW_LINE> <DEDENT> def source_remove(self, *args): <NEW_LINE> <INDENT> return glib.source_remove(*args) <NEW_LINE> <DEDENT> def get_scheduler(self): <NEW_LINE> <INDENT> return glib <NEW_LINE> <DEDENT> def client_type(self): <NEW_LINE> <INDENT> return "Python%s/GObject" % sys.version_info[0] <NEW_LINE> <DEDENT> def timeout(self, *_args): <NEW_LINE> <INDENT> log.warn("timeout!") <NEW_LINE> <DEDENT> def init_packet_handlers(self): <NEW_LINE> <INDENT> XpraClientBase.init_packet_handlers(self) <NEW_LINE> def noop(*args): <NEW_LINE> <INDENT> log("ignoring packet: %s", args) <NEW_LINE> <DEDENT> for t in ["new-window", "new-override-redirect", "draw", "cursor", "bell", "notify_show", "notify_close", "ping", "ping_echo", "window-metadata", "configure-override-redirect", "lost-window"]: <NEW_LINE> <INDENT> self._packet_handlers[t] = noop <NEW_LINE> <DEDENT> <DEDENT> def gobject_init(self): <NEW_LINE> <INDENT> gobject.threads_init() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> XpraClientBase.run(self) <NEW_LINE> self.glib_mainloop = glib.MainLoop() <NEW_LINE> self.glib_mainloop.run() <NEW_LINE> return self.exit_code <NEW_LINE> <DEDENT> def make_hello(self): <NEW_LINE> <INDENT> capabilities = XpraClientBase.make_hello(self) <NEW_LINE> capabilities["keyboard"] = False <NEW_LINE> return capabilities <NEW_LINE> <DEDENT> def quit(self, exit_code): <NEW_LINE> <INDENT> log("quit(%s) current exit_code=%s", exit_code, self.exit_code) <NEW_LINE> if self.exit_code is None: <NEW_LINE> <INDENT> self.exit_code = exit_code <NEW_LINE> <DEDENT> self.cleanup() <NEW_LINE> glib.timeout_add(50, self.glib_mainloop.quit)
|
Utility superclass for GObject clients
|
6259902f925a0f43d25e90aa
|
class Network: <NEW_LINE> <INDENT> def __init__(self, genotype): <NEW_LINE> <INDENT> self.nodes = [] <NEW_LINE> self.input_nodes = [] <NEW_LINE> self.output_nodes = [] <NEW_LINE> nodes_dict = dict() <NEW_LINE> for node_id in genotype.input_nodes: <NEW_LINE> <INDENT> node = Node(activation='') <NEW_LINE> self.input_nodes.append(node) <NEW_LINE> self.nodes.append(node) <NEW_LINE> nodes_dict[node_id] = node <NEW_LINE> <DEDENT> for node_id in genotype.output_nodes: <NEW_LINE> <INDENT> node = Node(activation='') <NEW_LINE> self.output_nodes.append(node) <NEW_LINE> self.nodes.append(node) <NEW_LINE> nodes_dict[node_id] = node <NEW_LINE> <DEDENT> for node_id in genotype.hidden_nodes: <NEW_LINE> <INDENT> node = Node(activation='sigmoid_custom') <NEW_LINE> self.nodes.append(node) <NEW_LINE> nodes_dict[node_id] = node <NEW_LINE> <DEDENT> for conn in genotype.genes: <NEW_LINE> <INDENT> if genotype.genes[conn].active: <NEW_LINE> <INDENT> weight = genotype.genes[conn].weight <NEW_LINE> in_id, out_id = conn <NEW_LINE> out_node = nodes_dict[out_id] <NEW_LINE> in_node = nodes_dict[in_id] <NEW_LINE> out_node.predecessors.append((in_node, weight)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def compute(self, data, input_type='points', output_type='probability'): <NEW_LINE> <INDENT> output = [] <NEW_LINE> for vector in data: <NEW_LINE> <INDENT> for v, node in zip(vector, self.input_nodes): <NEW_LINE> <INDENT> node.set_value(v) <NEW_LINE> <DEDENT> result = [node.get_value() for node in self.output_nodes] <NEW_LINE> result = np.array(result) <NEW_LINE> if output_type == 'probability': <NEW_LINE> <INDENT> if len(result) > 1: <NEW_LINE> <INDENT> x = result <NEW_LINE> e_x = np.exp(x - np.max(x)) <NEW_LINE> result = e_x / e_x.sum() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = result <NEW_LINE> x = np.clip(x, -500, 500) <NEW_LINE> result = 1 / (1 + np.exp(-x)) <NEW_LINE> <DEDENT> <DEDENT> output.append(result) <NEW_LINE> for node in self.nodes: <NEW_LINE> <INDENT> node.reset() <NEW_LINE> if input_type == 'points': <NEW_LINE> <INDENT> node.past_value = 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return output
|
Neural Network
nodes - list of all network nodes
input_nodes - list of input layer nodes, same as in nodes
output_nodes - list of output layer nodes, same sa in nodes
|
6259902f0a366e3fb87dda4a
|
class ClubMembership(models.Model): <NEW_LINE> <INDENT> club = models.ForeignKey( 'annuaire.Club', verbose_name=_('club'), related_name='memberships', ) <NEW_LINE> member = models.ForeignKey( Person, verbose_name=_('member'), related_name='club_memberships', editable=False, ) <NEW_LINE> fonction = models.CharField( verbose_name=_('fonction'), max_length=50, blank=True, null=True, ) <NEW_LINE> begin = models.IntegerField( verbose_name=_('start date'), blank=True, null=True, ) <NEW_LINE> end = models.IntegerField( verbose_name=_('end date'), blank=True, null=True, ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.club) + " " + self.fonction <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('club membership') <NEW_LINE> verbose_name_plural = _('club memberships') <NEW_LINE> ordering = ['begin']
|
Club membership for a person
|
6259902f21bff66bcd723cc7
|
class USResident(Person): <NEW_LINE> <INDENT> def __init__(self, name, status): <NEW_LINE> <INDENT> Person.__init__(self, name) <NEW_LINE> self.status = status <NEW_LINE> if not (status == "citizen" or status == "legal_resident" or status == "illegal_resident"): <NEW_LINE> <INDENT> raise ValueError("illegal status") <NEW_LINE> <DEDENT> <DEDENT> def getStatus(self): <NEW_LINE> <INDENT> return self.status
|
A Person who resides in the US.
|
6259902fd6c5a102081e3189
|
class ShareForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> model = Share <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.request = kwargs.pop('request', None) <NEW_LINE> super(ShareForm, self).__init__(*args, **kwargs) <NEW_LINE> choices = Resume.objects.filter(identity__user=self.request.user).values_list('id', 'name') <NEW_LINE> self.fields["resume"].choices = choices
|
Formularz tworzenia i edycji `Share`.
|
6259902f5166f23b2e24443a
|
class Tick: <NEW_LINE> <INDENT> def __init__(self, v=Vector(), p=Vector()): <NEW_LINE> <INDENT> self.velocity = v <NEW_LINE> self.position = p
|
Tick object describing the state of a vehicle at a moment in time.
velocity: Vector
position: Vector
|
6259902fec188e330fdf98f6
|
class IS_EMPTY_OR(Validator): <NEW_LINE> <INDENT> def __init__(self, other, null=None, empty_regex=None): <NEW_LINE> <INDENT> (self.other, self.null) = (other, null) <NEW_LINE> if empty_regex is not None: <NEW_LINE> <INDENT> self.empty_regex = re.compile(empty_regex) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.empty_regex = None <NEW_LINE> <DEDENT> if hasattr(other, "multiple"): <NEW_LINE> <INDENT> self.multiple = other.multiple <NEW_LINE> <DEDENT> if hasattr(other, "options"): <NEW_LINE> <INDENT> self.options = self._options <NEW_LINE> <DEDENT> <DEDENT> def _options(self, *args, **kwargs): <NEW_LINE> <INDENT> options = self.other.options(*args, **kwargs) <NEW_LINE> if (not options or options[0][0] != "") and not self.multiple: <NEW_LINE> <INDENT> options.insert(0, ("", "")) <NEW_LINE> <DEDENT> return options <NEW_LINE> <DEDENT> def set_self_id(self, id): <NEW_LINE> <INDENT> if isinstance(self.other, (list, tuple)): <NEW_LINE> <INDENT> for item in self.other: <NEW_LINE> <INDENT> if hasattr(item, "set_self_id"): <NEW_LINE> <INDENT> item.set_self_id(id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if hasattr(self.other, "set_self_id"): <NEW_LINE> <INDENT> self.other.set_self_id(id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def validate(self, value, record_id=None): <NEW_LINE> <INDENT> value, empty = is_empty(value, empty_regex=self.empty_regex) <NEW_LINE> if empty: <NEW_LINE> <INDENT> return self.null <NEW_LINE> <DEDENT> if isinstance(self.other, (list, tuple)): <NEW_LINE> <INDENT> for item in self.other: <NEW_LINE> <INDENT> value = validator_caller(item, value, record_id) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> return validator_caller(self.other, value, record_id) <NEW_LINE> <DEDENT> def formatter(self, value): <NEW_LINE> <INDENT> if hasattr(self.other, "formatter"): <NEW_LINE> <INDENT> return self.other.formatter(value) <NEW_LINE> <DEDENT> return value
|
Dummy class for testing IS_EMPTY_OR::
>>> IS_EMPTY_OR(IS_EMAIL())('abc@def.com')
('abc@def.com', None)
>>> IS_EMPTY_OR(IS_EMAIL())(' ')
(None, None)
>>> IS_EMPTY_OR(IS_EMAIL(), null='abc')(' ')
('abc', None)
>>> IS_EMPTY_OR(IS_EMAIL(), null='abc', empty_regex='def')('def')
('abc', None)
>>> IS_EMPTY_OR(IS_EMAIL())('abc')
('abc', 'enter a valid email address')
>>> IS_EMPTY_OR(IS_EMAIL())(' abc ')
('abc', 'enter a valid email address')
|
6259902f1d351010ab8f4b7b
|
class InternalLinks(_TypedList): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> super(_TypedList,self).__init__(*args) <NEW_LINE> self.memberType = Relation <NEW_LINE> self.parent = (1, 'INTERNAL_LINKS')
|
List of links
|
6259902f26238365f5fadbb5
|
class MySentences(object): <NEW_LINE> <INDENT> def __init__(self, dirname, start=0, subfix='.txt',bigramword=False,trainfilename=False): <NEW_LINE> <INDENT> self.dirname = dirname <NEW_LINE> self.start=start <NEW_LINE> self.subfix=subfix <NEW_LINE> self.bigram=bigramword <NEW_LINE> self.trainfname=trainfilename <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> if os.path.isfile(self.dirname): <NEW_LINE> <INDENT> fns = [self.dirname] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fns = uc.getfileinfolder(self.dirname,prefix=self.subfix,recurse=2) <NEW_LINE> <DEDENT> for i,fname in enumerate(fns): <NEW_LINE> <INDENT> logger.info('-------------%s(%d/%d)' %(fname,i,len(fns))) <NEW_LINE> filename=os.path.splitext(os.path.split(fname)[1])[0] if self.trainfname else None <NEW_LINE> with codecs.open(fname, 'rU', 'utf8', errors='ignore') as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> l = line.strip().split() <NEW_LINE> if self.start: <NEW_LINE> <INDENT> if len(l) > self.start + 1: <NEW_LINE> <INDENT> yield uc.extend2bigram(l[self.start:],filename) if self.bigram else l[self.start:] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if len(l) > 1: <NEW_LINE> <INDENT> yield uc.extend2bigram(l,filename) if self.bigram else l
|
根据分好词的文件生成句子序列,用于word2vec训练
dirname:分好词的文件路径,可以是单个文件路径也可以是文件夹地址,文件以txt结尾
start:从一行的第几个元素开始算词。因为有的文件每行第一个元素是用户id,则start=1用于略过id,
|
6259902f30c21e258be9986f
|
class ManagerNamespace(NWManager): <NEW_LINE> <INDENT> def __init__(self, id, workgroup): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.workgroup = workgroup <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> return self.getItem(key) <NEW_LINE> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key == "id" or key == "workgroup": <NEW_LINE> <INDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setItem(key, value)
|
A class that inherits :py:class:`NWManager` but adds :py:meth:`__getattr__`
and :py:meth:`__setattr__` methods that enable it to behave like an
object containing shared variables which might be more comfortable
than using :py:meth:`getItem <NWManager.getItem>` and
:py:meth:`setItem <NWManager.setItem>` methods of the :py:class:`NWManager`.
|
6259902f287bf620b6272c4b
|
class CalcE(MultiColumnAction): <NEW_LINE> <INDENT> colXx = Field(doc="The column name to get the xx shape component from.", dtype=str, default="ixx") <NEW_LINE> colYy = Field(doc="The column name to get the yy shape component from.", dtype=str, default="iyy") <NEW_LINE> colXy = Field(doc="The column name to get the xy shape component from.", dtype=str, default="ixy") <NEW_LINE> halvePhaseAngle = Field(doc=("Divide the phase angle by 2? " "Suitable for quiver plots."), dtype=bool, default=False) <NEW_LINE> @property <NEW_LINE> def columns(self): <NEW_LINE> <INDENT> return (self.colXx, self.colYy, self.colXy) <NEW_LINE> <DEDENT> def __call__(self, df): <NEW_LINE> <INDENT> e = (df[self.colXx] - df[self.colYy]) + 1j*(2*df[self.colXy]) <NEW_LINE> e /= (df[self.colXx] + df[self.colYy]) <NEW_LINE> if self.halvePhaseAngle: <NEW_LINE> <INDENT> e *= np.abs(e) <NEW_LINE> return np.sqrt(e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return e
|
Calculate a complex value representation of the ellipticity
This is a shape measurement used for doing QA on the ellipticity
of the sources.
The complex ellipticity is typically defined as
E = ((ixx - iyy) + 1j*(2*ixy))/(ixx + iyy) = |E|exp(i*2*theta).
For plotting purposes we might want to plot |E|*exp(i*theta).
If `halvePhaseAngle` config parameter is set to `True`, then
the returned quantity therefore corresponds to |E|*exp(i*theta)
|
6259902f3eb6a72ae038b6ca
|
class DoublyLinkedList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.start_node = None <NEW_LINE> <DEDENT> '''@helpDescription(__str__ method is automatically called when the print() function is invoked. This method prints the contents of the linked list in a readable format instead of the SLinkedList object.)''' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> s = '[' <NEW_LINE> i = 0 <NEW_LINE> current = self.start_node <NEW_LINE> while current != None: <NEW_LINE> <INDENT> if i != 0: <NEW_LINE> <INDENT> s = s + ', ' + current.item <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = s + current.item <NEW_LINE> <DEDENT> '''@helpDescription("current" is incremented to the next node in the list, and the while loop continues.)''' <NEW_LINE> current = current.nref <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> '''@helpDescription(Ultimately, a comma-separated string containing all items of the linked list is returned.)''' <NEW_LINE> return s + ']' <NEW_LINE> <DEDENT> '''@helpDescription(The insert_at_start(data) method adds an item to the beginning of the linked list.)''' <NEW_LINE> def insert_at_start(self, data): <NEW_LINE> <INDENT> new_node = DoublyLinkedListNode(data) <NEW_LINE> if self.start_node is None: <NEW_LINE> <INDENT> self.start_node = new_node <NEW_LINE> return <NEW_LINE> <DEDENT> '''@helpDescription(What was the start node becomes the new "next" node, because we want to insert another new as the start node.)''' <NEW_LINE> new_node.nref = self.start_node <NEW_LINE> self.start_node.pref = new_node <NEW_LINE> self.start_node = new_node <NEW_LINE> <DEDENT> '''@helpDescription(The insert_at_start(data) method adds an item to the end of the linked list.)''' <NEW_LINE> def insert_at_end(self, data): <NEW_LINE> <INDENT> new_node = DoublyLinkedListNode(data) <NEW_LINE> if self.start_node is None: <NEW_LINE> <INDENT> self.start_node = new_node <NEW_LINE> return <NEW_LINE> <DEDENT> n = self.start_node <NEW_LINE> while n.nref is not None: <NEW_LINE> <INDENT> n = n.nref <NEW_LINE> <DEDENT> '''@helpDescription(Because a new node is inserted at the end of the linked list, "n"'s next node is set as the new node to be inserted.)''' <NEW_LINE> n.nref = new_node <NEW_LINE> new_node.pref = n <NEW_LINE> <DEDENT> '''@helpDescription(The delete_at_start() method deletes the first item in the linked list.)''' <NEW_LINE> def delete_at_start(self): <NEW_LINE> <INDENT> if self.start_node is None: <NEW_LINE> <INDENT> print("The list has no element to delete") <NEW_LINE> return <NEW_LINE> <DEDENT> '''@helpDescription(If the start_node is not None but the start_node's "next node" reference is None, it means there is only one node in the linked list.)''' <NEW_LINE> if self.start_node.nref is None: <NEW_LINE> <INDENT> self.start_node = None <NEW_LINE> return <NEW_LINE> <DEDENT> '''@helpDescription(If there is more than 1 item in the linked list, the start_node reference is changed to the old start start node's "next" reference (the second element in the linked list).)''' <NEW_LINE> self.start_node = self.start_node.nref <NEW_LINE> self.start_node.pref = None
|
@helpDescription(The linked list is initialized with a start node (currently None).)
|
6259902f66673b3332c31455
|
class AdminTools(system.Container): <NEW_LINE> <INDENT> __image__ = "images/admintools.gif" <NEW_LINE> __slots__ = ()
|
Administrative Tools Folder
===========================
This folder contains the users, the policies and
the installed applications containers.
|
6259902f4e696a045264e654
|
class Solution4(object): <NEW_LINE> <INDENT> pass
|
TODO
two pointer solution
|
6259902f1d351010ab8f4b7d
|
class chmod(Command): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> mode_str = self.rest(1) <NEW_LINE> if not mode_str: <NEW_LINE> <INDENT> if self.quantifier is None: <NEW_LINE> <INDENT> self.fm.notify("Syntax: chmod <octal number> " "or specify a quantifier", bad=True) <NEW_LINE> return <NEW_LINE> <DEDENT> mode_str = str(self.quantifier) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mode = int(mode_str, 8) <NEW_LINE> if mode < 0 or mode > 0o777: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.fm.notify("Need an octal number between 0 and 777!", bad=True) <NEW_LINE> return <NEW_LINE> <DEDENT> for fobj in self.fm.thistab.get_selection(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.chmod(fobj.path, mode) <NEW_LINE> <DEDENT> except OSError as ex: <NEW_LINE> <INDENT> self.fm.notify(ex) <NEW_LINE> <DEDENT> <DEDENT> self.fm.thisdir.content_outdated = True
|
:chmod <octal number>
Sets the permissions of the selection to the octal number.
The octal number is between 0 and 777. The digits specify the
permissions for the user, the group and others.
A 1 permits execution, a 2 permits writing, a 4 permits reading.
Add those numbers to combine them. So a 7 permits everything.
|
6259902fec188e330fdf98f8
|
class NPTBerendsen(NVTBerendsen): <NEW_LINE> <INDENT> def __init__(self, atoms, timestep, temperature, taut=0.5e3 * units.fs, pressure=1.01325, taup=1e3 * units.fs, compressibility=4.57e-5, fixcm=True, trajectory=None, logfile=None, loginterval=1, append_trajectory=False): <NEW_LINE> <INDENT> NVTBerendsen.__init__(self, atoms, timestep, temperature, taut, fixcm, trajectory, logfile, loginterval, append_trajectory=append_trajectory) <NEW_LINE> self.taup = taup <NEW_LINE> self.pressure = pressure <NEW_LINE> self.compressibility = compressibility <NEW_LINE> <DEDENT> def set_taup(self, taup): <NEW_LINE> <INDENT> self.taup = taup <NEW_LINE> <DEDENT> def get_taup(self): <NEW_LINE> <INDENT> return self.taup <NEW_LINE> <DEDENT> def set_pressure(self, pressure): <NEW_LINE> <INDENT> self.pressure = pressure <NEW_LINE> <DEDENT> def get_pressure(self): <NEW_LINE> <INDENT> return self.pressure <NEW_LINE> <DEDENT> def set_compressibility(self, compressibility): <NEW_LINE> <INDENT> self.compressibility = compressibility <NEW_LINE> <DEDENT> def get_compressibility(self): <NEW_LINE> <INDENT> return self.compressibility <NEW_LINE> <DEDENT> def set_timestep(self, timestep): <NEW_LINE> <INDENT> self.dt = timestep <NEW_LINE> <DEDENT> def get_timestep(self): <NEW_LINE> <INDENT> return self.dt <NEW_LINE> <DEDENT> def scale_positions_and_cell(self): <NEW_LINE> <INDENT> taupscl = self.dt / self.taup <NEW_LINE> stress = self.atoms.get_stress(voigt=False, include_ideal_gas=True) <NEW_LINE> old_pressure = -stress.trace() / 3 * 1e-5 / units.Pascal <NEW_LINE> scl_pressure = (1.0 - taupscl * self.compressibility / 3.0 * (self.pressure - old_pressure)) <NEW_LINE> cell = self.atoms.get_cell() <NEW_LINE> cell = scl_pressure * cell <NEW_LINE> self.atoms.set_cell(cell, scale_atoms=True) <NEW_LINE> <DEDENT> def step(self, f=None): <NEW_LINE> <INDENT> NVTBerendsen.scale_velocities(self) <NEW_LINE> self.scale_positions_and_cell() <NEW_LINE> atoms = self.atoms <NEW_LINE> if f is None: <NEW_LINE> <INDENT> f = atoms.get_forces() <NEW_LINE> <DEDENT> p = self.atoms.get_momenta() <NEW_LINE> p += 0.5 * self.dt * f <NEW_LINE> if self.fixcm: <NEW_LINE> <INDENT> psum = p.sum(axis=0) / float(len(p)) <NEW_LINE> p = p - psum <NEW_LINE> <DEDENT> self.atoms.set_positions( self.atoms.get_positions() + self.dt * p / self.atoms.get_masses()[:, np.newaxis]) <NEW_LINE> self.atoms.set_momenta(p) <NEW_LINE> f = self.atoms.get_forces() <NEW_LINE> atoms.set_momenta(self.atoms.get_momenta() + 0.5 * self.dt * f) <NEW_LINE> return f
|
Berendsen (constant N, P, T) molecular dynamics.
This dynamics scale the velocities and volumes to maintain a constant
pressure and temperature. The shape of the simulation cell is not
altered, if that is desired use Inhomogenous_NPTBerendsen.
Usage: NPTBerendsen(atoms, timestep, temperature, taut, pressure, taup)
atoms
The list of atoms.
timestep
The time step.
temperature
The desired temperature, in Kelvin.
taut
Time constant for Berendsen temperature coupling.
fixcm
If True, the position and momentum of the center of mass is
kept unperturbed. Default: True.
pressure
The desired pressure, in bar (1 bar = 1e5 Pa).
taup
Time constant for Berendsen pressure coupling.
compressibility
The compressibility of the material, water 4.57E-5 bar-1, in bar-1
|
6259902f8a43f66fc4bf31eb
|
class Writer(Base): <NEW_LINE> <INDENT> __tablename__ = "writer" <NEW_LINE> id = Column('id', Integer, primary_key=True) <NEW_LINE> episode_id = Column(Integer, ForeignKey('episode.id')) <NEW_LINE> name = Column('name', String) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Writer('%s', '%s')>" % (self.id, self.name)
|
This class stores information about a writer of an episode.
|
6259902fd53ae8145f9194c9
|
class EventDeleteView(LoginRequiredMixin, DeleteView): <NEW_LINE> <INDENT> model = Event <NEW_LINE> success_url = reverse_lazy('event-list')
|
Delete an event from the system.
|
6259902fac7a0e7691f73550
|
class Name(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'name': 'int', 'snake_case': 'int', '_property': 'str', '_123_number': 'int' } <NEW_LINE> self.attribute_map = { 'name': 'name', 'snake_case': 'snake_case', '_property': 'property', '_123_number': '123Number' } <NEW_LINE> self._name = None <NEW_LINE> self._snake_case = None <NEW_LINE> self.__property = None <NEW_LINE> self.__123_number = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def snake_case(self): <NEW_LINE> <INDENT> return self._snake_case <NEW_LINE> <DEDENT> @snake_case.setter <NEW_LINE> def snake_case(self, snake_case): <NEW_LINE> <INDENT> self._snake_case = snake_case <NEW_LINE> <DEDENT> @property <NEW_LINE> def _property(self): <NEW_LINE> <INDENT> return self.__property <NEW_LINE> <DEDENT> @_property.setter <NEW_LINE> def _property(self, _property): <NEW_LINE> <INDENT> self.__property = _property <NEW_LINE> <DEDENT> @property <NEW_LINE> def _123_number(self): <NEW_LINE> <INDENT> return self.__123_number <NEW_LINE> <DEDENT> @_123_number.setter <NEW_LINE> def _123_number(self, _123_number): <NEW_LINE> <INDENT> self.__123_number = _123_number <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902f6e29344779b016b7
|
class ListFavoritePrescription(ListView): <NEW_LINE> <INDENT> template_name = 'list_favorite_prescriptions.html' <NEW_LINE> context_object_name = 'list_favorite_prescriptions' <NEW_LINE> model = Prescription <NEW_LINE> paginate_by = 20 <NEW_LINE> ordering = ['-date_created'] <NEW_LINE> @method_decorator(login_required) <NEW_LINE> @method_decorator(is_health_professional) <NEW_LINE> def dispatch(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ListFavoritePrescription, self).dispatch(*args, **kwargs) <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return self.model.objects.filter(health_professional=self.request.user, is_favorite=True)
|
View for list favorite prescriptions in database.
|
6259902f96565a6dacd2d7c2
|
class Article(abc.ABC): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> if url is None or url == '': <NEW_LINE> <INDENT> raise ArticleException('ArticleException: Invalid URL') <NEW_LINE> <DEDENT> self.url = url <NEW_LINE> self.title = '' <NEW_LINE> self.description = '' <NEW_LINE> self.authors = '' <NEW_LINE> self.publish_date = '' <NEW_LINE> self.text = '' <NEW_LINE> self.section = '' <NEW_LINE> self.subsection = '' <NEW_LINE> self.keywords = [] <NEW_LINE> self.characters = [] <NEW_LINE> self.image_url = '' <NEW_LINE> self.html = '' <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def as_dict(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def download(self): <NEW_LINE> <INDENT> html = get_page_html(self.url) <NEW_LINE> if html is not None and html != '': <NEW_LINE> <INDENT> self.html = html <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ArticleException('ArticleException: Article HTML is empty.') <NEW_LINE> <DEDENT> <DEDENT> @abc.abstractmethod <NEW_LINE> def parse(self, preview_soup=''): <NEW_LINE> <INDENT> pass
|
Abstract Article base class.
Base class for newspaper articles. Download method is standard,
while parse and as_dict must be redefined in child classes.
Attributes
----------
url: str
The URL at which the article can be found.
title: str
The title of the article.
description: str
Article's summary/incipit.
authors: str
The article's authors names.
publish_date: str
The date of publishing.
text: string
The whole text contained in the article.
section: str
The name of the newspaper section containing the article.
subsection: str
The name of the newspaper subsection containing the article.
keywords: list of str
A list of keywords used to identify article's topics.
characters: list of str
A list of characters presented or discussed in the article.
image_url: str
The URL of the top image contained in the article.
html: HTML
The raw HTML extracted from the article webpage.
|
6259902f8a43f66fc4bf31ed
|
class JtagError(Exception): <NEW_LINE> <INDENT> pass
|
Generic JTAG error
|
6259902f5e10d32532ce4137
|
class BASEHEADER(Structure): <NEW_LINE> <INDENT> _fields_ = [('version', c_ushort, 2), ('flags', c_ushort, 8), ('length', c_ushort, 6), ('md_type', c_ubyte), ('next_protocol', c_ubyte), ('service_path', c_uint, 24), ('service_index', c_uint, 8)] <NEW_LINE> def __init__(self, service_path=1, service_index=255, version=NSH_VERSION1, flags=NSH_FLAG_ZERO, length=NSH_TYPE1_LEN, md_type=NSH_MD_TYPE1, proto=NSH_NEXT_PROTO_ETH, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.version = version <NEW_LINE> self.flags = flags <NEW_LINE> self.length = length <NEW_LINE> self.md_type = md_type <NEW_LINE> self.next_protocol = proto <NEW_LINE> self.service_path = service_path <NEW_LINE> self.service_index = service_index <NEW_LINE> <DEDENT> header_size = 8 <NEW_LINE> def build(self): <NEW_LINE> <INDENT> return pack('!H B B I', (self.version << 14) + (self.flags << 6) + self.length, self.md_type, self.next_protocol, (self.service_path << 8) + self.service_index)
|
Represent a NSH base header
|
6259902f5166f23b2e24443e
|
class NinthPageExecution(UtilityPage): <NEW_LINE> <INDENT> def enter_date(self, **kwargs): <NEW_LINE> <INDENT> mon = self.driver.find_element_by_css_selector("input[aria-label='Month']") <NEW_LINE> mon.send_keys(Keys.BACKSPACE) <NEW_LINE> mon.send_keys(kwargs.get('fist_value')) <NEW_LINE> self.driver.find_element_by_css_selector("input[aria-label='Day of the month']").send_keys(kwargs.get('second_value')) <NEW_LINE> <DEDENT> def enter_time(self, **kwargs): <NEW_LINE> <INDENT> from nose.tools import set_trace;set_trace() <NEW_LINE> hr = self.driver.find_element_by_css_selector("input[aria-label='Hour']") <NEW_LINE> hr.click() <NEW_LINE> hr.send_keys(Keys.BACKSPACE) <NEW_LINE> hr.send_keys(kwargs.get('fist_value')) <NEW_LINE> mn = self.driver.find_element_by_css_selector("input[aria-label='Minute']") <NEW_LINE> mn.send_keys(Keys.BACKSPACE) <NEW_LINE> mn.send_keys(kwargs.get('second_value'))
|
Execute Ninth page which is about to select dates
|
6259902f8c3a8732951f75c0
|
class LocationModelAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ['location_name', 'latitude', 'longitude', 'elevation', 'site_type', 'last_update', 'modified_by'] <NEW_LINE> list_display_links = ['location_name', 'latitude', 'longitude'] <NEW_LINE> list_filter = ['location_name', 'site_type', 'last_update', 'modified_by'] <NEW_LINE> inlines = [LocationRelationInline, ] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Location
|
Location model admin settings
|
6259902fd10714528d69eebf
|
class RAMONVolume(RAMONBase): <NEW_LINE> <INDENT> def __init__(self, xyz_offset=(0, 0, 0), resolution=0, cutout=None, voxels=None, id=DEFAULT_ID, confidence=DEFAULT_CONFIDENCE, kvpairs=DEFAULT_DYNAMIC_METADATA, status=DEFAULT_STATUS, author=DEFAULT_AUTHOR): <NEW_LINE> <INDENT> self.xyz_offset = xyz_offset <NEW_LINE> self.resolution = resolution <NEW_LINE> self.cutout = cutout <NEW_LINE> self.voxels = voxels <NEW_LINE> RAMONBase.__init__(self, id=id, confidence=confidence, kvpairs=kvpairs, status=status, author=author) <NEW_LINE> <DEDENT> def data(self): <NEW_LINE> <INDENT> if self.cutout: <NEW_LINE> <INDENT> return self.cutout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("Cannot convert from voxel list yet.")
|
RAMONVolume Object for storing neuroscience data with a voxel volume
|
6259902f3eb6a72ae038b6ce
|
class DbSNP(models.Model): <NEW_LINE> <INDENT> rsid = models.CharField(max_length=16, unique=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.rsid
|
Contains dbSNP information.
dbSNP IDs are a separate class with a many-to-many mapping because
(1) some variants have multiple dbSNP IDs mapped to the same
location, (2) dbSNP IDs refer to a position and some have more
than two alleles (e.g. triallelic)
Data attributes:
rsid: dbSNP identifier (CharField, unique)
|
6259902fd18da76e235b7982
|
class PageType(object): <NEW_LINE> <INDENT> def __init__(self, name, index): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.index = index
|
PageType class maintaining page where module type resides.
|
6259902f63f4b57ef00865a7
|
class LoggerMixin(object): <NEW_LINE> <INDENT> def __init__(self, logger, *args, **kwargs): <NEW_LINE> <INDENT> super(LoggerMixin, self).__init__(*args, **kwargs) <NEW_LINE> self.attach_logger(logger) <NEW_LINE> <DEDENT> def attach_logger(self, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.log = logger.log <NEW_LINE> self.verbose = logger.verbose <NEW_LINE> self.debug = logger.debug <NEW_LINE> self.info = logger.info <NEW_LINE> self.warning = self.warn = logger.warning <NEW_LINE> self.error = logger.error <NEW_LINE> self.exception = logger.exception
|
Use as a parent class (or one of them) when you want to "attach" methods of a given
logger to class' instances.
:param ContextAdapter logger: logger to propagate.
|
6259902fa8ecb03325872287
|
class CholeskySampler(object): <NEW_LINE> <INDENT> def __init__(self, mean, cov, dist=None): <NEW_LINE> <INDENT> self.mean = numpy.array(mean, ndmin=1) <NEW_LINE> self.cov = numpy.array(cov, ndmin=2) <NEW_LINE> if dist is None: <NEW_LINE> <INDENT> dist = numpy.random.randn <NEW_LINE> <DEDENT> self.dist = dist <NEW_LINE> npar = mean.size <NEW_LINE> n1, n2 = cov.shape[0: 0 + 2] <NEW_LINE> if npar != cov.shape[0] or npar != cov.shape[1]: <NEW_LINE> <INDENT> raise ValueError( "mean shape [%d] inconsistent " "with cov shape [%d,%d]" % (npar, n1, n2) ) <NEW_LINE> <DEDENT> self.M = numpy.linalg.cholesky(self.cov) <NEW_LINE> self.npar = npar <NEW_LINE> <DEDENT> def sample(self, n=None): <NEW_LINE> <INDENT> if n is None: <NEW_LINE> <INDENT> n = 1 <NEW_LINE> is_scalar = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_scalar = False <NEW_LINE> <DEDENT> npar = self.npar <NEW_LINE> r = self.dist(npar * n).reshape(npar, n) <NEW_LINE> V = numpy.dot(self.M, r) <NEW_LINE> mean = self.mean <NEW_LINE> for i in range(npar): <NEW_LINE> <INDENT> V[i, :] += mean[i] <NEW_LINE> <DEDENT> samples = V.T <NEW_LINE> if is_scalar: <NEW_LINE> <INDENT> return samples[0, :] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return samples
|
sample a multivariate covariant distribution using cholesky decomposition
example
-------
means=[20.0, 40.0]
cov=[[1.0,0.5],[0.5,2.0]]
cs=CholeskySampler(means,cov)
n=100000
rand=cs.sample(n)
s.mean(axis=0)
array([ 20.00139558, 50.00419912])
s.var(axis=0)
array([ 1.00076388, 2.00251013])
mm=s.mean(axis=0)
( (s[:,0]-mm[0])*(s[:,1]-mm[1]) ).sum()/(n-1)
0.50052647916418957
|
6259902f30c21e258be99875
|
class RewriteAction(BaseAction): <NEW_LINE> <INDENT> action_spec = dict( source_directory=REQUIRED_ACTION_KWD, destination_directory=REQUIRED_ACTION_KWD ) <NEW_LINE> action_type = "rewrite" <NEW_LINE> staging = STAGING_ACTION_NONE <NEW_LINE> def __init__(self, path, file_lister=None, source_directory=None, destination_directory=None): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.file_lister = file_lister or DEFAULT_FILE_LISTER <NEW_LINE> self.source_directory = source_directory <NEW_LINE> self.destination_directory = destination_directory <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return dict( path=self.path, action_type=self.action_type, source_directory=self.source_directory, destination_directory=self.destination_directory, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, action_dict): <NEW_LINE> <INDENT> return RewriteAction( path=action_dict["path"], source_directory=action_dict["source_directory"], destination_directory=action_dict["destination_directory"], ) <NEW_LINE> <DEDENT> def path_rewrite(self, path_helper, path=None): <NEW_LINE> <INDENT> if not path: <NEW_LINE> <INDENT> path = self.path <NEW_LINE> <DEDENT> new_path = path_helper.from_posix_with_new_base(self.path, self.source_directory, self.destination_directory) <NEW_LINE> return None if new_path == self.path else new_path
|
This actin indicates the LWR server should simply rewrite the path
to the specified file.
|
6259902f8a43f66fc4bf31ef
|
class testcaseSuite(object): <NEW_LINE> <INDENT> def __init__(self, testsWordDir, simCmd, simulator_if): <NEW_LINE> <INDENT> self._dir = testsWordDir <NEW_LINE> self._simCmd = simCmd <NEW_LINE> self._test = os.path.basename(testsWordDir) <NEW_LINE> self.name = os.path.basename(testsWordDir) <NEW_LINE> self._run = TestRun(simulator_if=simulator_if, testWordDir=self._dir, simCmd = self._simCmd, test_cases=[self._test]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def test_result_file(self): <NEW_LINE> <INDENT> return self._run.get_test_result() <NEW_LINE> <DEDENT> @property <NEW_LINE> def test_information(self): <NEW_LINE> <INDENT> return self._test <NEW_LINE> <DEDENT> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> results = self._run.run(*args, **kwargs) <NEW_LINE> return results
|
A test case to be run in an independent simulation
|
6259902f1f5feb6acb163c5a
|
class FreeAtHomeBinarySensor(BinarySensorEntity): <NEW_LINE> <INDENT> _name = '' <NEW_LINE> binary_device = None <NEW_LINE> _state = None <NEW_LINE> _hass = None <NEW_LINE> def __init__(self, device, hass): <NEW_LINE> <INDENT> self.binary_device = device <NEW_LINE> self._name = self.binary_device.name <NEW_LINE> self._state = (self.binary_device.state == '1') <NEW_LINE> self._hass = hass <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return self.binary_device.device_info <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self.binary_device.serialnumber + '/' + self.binary_device.channel_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> async def after_update_callback(device): <NEW_LINE> <INDENT> await self.async_update_ha_state(True) <NEW_LINE> <DEDENT> self.binary_device.register_device_updated_cb(after_update_callback) <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> self._state = (self.binary_device.state == '1') <NEW_LINE> _LOGGER.info('update sensor') <NEW_LINE> eventdata = { "name" : self._name, "serialnumber": self.binary_device.serialnumber, "unique_id" : self.unique_id, "state" : self._state, "command" : "pressed" } <NEW_LINE> self._hass.bus.async_fire("freeathome_event", eventdata)
|
Interface to the binary devices of Free@Home
|
6259902fd53ae8145f9194cd
|
class ProvisionStart(Page, ProvisionFormButtonMixin): <NEW_LINE> <INDENT> _page_title = "CloudForms Management Engine: Virtual Machines" <NEW_LINE> _template_list_locator = ( By.CSS_SELECTOR, "div#pre_prov_div > fieldset > table > tbody") <NEW_LINE> def click_on_continue(self): <NEW_LINE> <INDENT> self.continue_button.click() <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> return Provision(self.testsetup) <NEW_LINE> <DEDENT> def click_on_cancel(self): <NEW_LINE> <INDENT> from pages.services import Services <NEW_LINE> self.cancel_button.click() <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> return Services.VirtualMachines(self.testsetup) <NEW_LINE> <DEDENT> @property <NEW_LINE> def template_list(self): <NEW_LINE> <INDENT> return ListRegion( self.testsetup, self.get_element(*self._template_list_locator), self.TemplateItem) <NEW_LINE> <DEDENT> class TemplateItem(ListItem): <NEW_LINE> <INDENT> _columns = ["name", "operating_system", "platform", "cpus", "memory", "disk_size", "management_system", "snapshots"] <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._item_data[0].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def operating_system(self): <NEW_LINE> <INDENT> return self._item_data[1].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def platform(self): <NEW_LINE> <INDENT> return self._item_data[2].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def cpus(self): <NEW_LINE> <INDENT> return self._item_data[3].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def memory(self): <NEW_LINE> <INDENT> return self._item_data[4].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def disk_size(self): <NEW_LINE> <INDENT> return self._item_data[5].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def management_system(self): <NEW_LINE> <INDENT> return self._item_data[6].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def snapshots(self): <NEW_LINE> <INDENT> return self._item_data[7].text
|
Page representing the start of the Provision VMs "wizard"
|
6259902f66673b3332c3145b
|
class Hook: <NEW_LINE> <INDENT> def __init__(self, callback, timeoutMilliseconds = 0, *callback_args): <NEW_LINE> <INDENT> self.callback = callback <NEW_LINE> self.callback_args = callback_args <NEW_LINE> self.timeoutMilliseconds = timeoutMilliseconds <NEW_LINE> self.ready_time = datetime.now() <NEW_LINE> <DEDENT> def is_ready(self): <NEW_LINE> <INDENT> return datetime.now() >= self.ready_time <NEW_LINE> <DEDENT> def invoke(self): <NEW_LINE> <INDENT> if (self.timeoutMilliseconds > 0): <NEW_LINE> <INDENT> self.ready_time = self._get_ready_time(self.timeoutMilliseconds) <NEW_LINE> <DEDENT> self._run(self.callback_args) <NEW_LINE> <DEDENT> def _get_ready_time(self, timeoutMilliseconds): <NEW_LINE> <INDENT> return datetime.now() + timedelta(milliseconds = timeoutMilliseconds) <NEW_LINE> <DEDENT> def _run(self, *args): <NEW_LINE> <INDENT> self.callback(*args)
|
Event handler that invokes an arbitrary callback when invoked.
If the timeoutMilliseconds argument is greater than 0,
the hook will be suspended for n milliseconds after it's being invoked.
|
6259902f21bff66bcd723ccf
|
class FixedDepthDecisionMaker(BaseDecisionMaker): <NEW_LINE> <INDENT> def __init__(self, search_context, logger, depth): <NEW_LINE> <INDENT> super(FixedDepthDecisionMaker, self).__init__(search_context, logger) <NEW_LINE> self.__depth = depth <NEW_LINE> <DEDENT> def decide(self): <NEW_LINE> <INDENT> if self._search_context.get_current_serp_position() < self.__depth: <NEW_LINE> <INDENT> return Actions.SNIPPET <NEW_LINE> <DEDENT> return Actions.QUERY
|
A concrete implementation of a decision maker.
Returns True iif the depth at which a user is in a SERP is less than a predetermined value.
|
6259902fd6c5a102081e3191
|
@requires_toolkit([ToolkitName.qt]) <NEW_LINE> class TestStickyDialog(unittest.TestCase): <NEW_LINE> <INDENT> def test_sticky_dialog_with_parent(self): <NEW_LINE> <INDENT> obj = ObjectWithNumber() <NEW_LINE> obj2 = ObjectWithNumber() <NEW_LINE> parent_view = View(Item("number"), title="Parent") <NEW_LINE> nested = View(Item("number"), resizable=True, title="Nested") <NEW_LINE> with create_ui(obj, dict(view=parent_view)) as ui: <NEW_LINE> <INDENT> with create_ui(obj2, dict(parent=ui.control, view=nested)) as ui2: <NEW_LINE> <INDENT> from pyface.qt import QtCore <NEW_LINE> self.assertFalse( ui2.control.windowFlags() & QtCore.Qt.WindowState.WindowMaximized )
|
Test _StickyDialog used by the UI's Qt backend.
|
6259902fec188e330fdf98fe
|
class HelloWorldRenderer(Renderer): <NEW_LINE> <INDENT> def render(self, messages, receiver_id): <NEW_LINE> <INDENT> for message in self.convert(messages): <NEW_LINE> <INDENT> print('--> {}'.format(message.payload)) <NEW_LINE> <DEDENT> <DEDENT> def convert(self, messages): <NEW_LINE> <INDENT> return messages
|
Stdout renderer
|
6259902fbe8e80087fbc00e7
|
class MeanFrameOverTime: <NEW_LINE> <INDENT> def __init__(self, buffer_size=5): <NEW_LINE> <INDENT> assert buffer_size > 1, 'Buffer size must be > 1' <NEW_LINE> self.buffer_size = buffer_size <NEW_LINE> self.buffer = None <NEW_LINE> <DEDENT> def process(self, img: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> if self.buffer is None: <NEW_LINE> <INDENT> self.buffer = np.zeros((self.buffer_size, *img.shape), dtype=img.dtype) <NEW_LINE> <DEDENT> self.buffer[1:] = self.buffer[:-1] <NEW_LINE> self.buffer[0] = img.copy() <NEW_LINE> if self.buffer.shape[0] < self.buffer_size: <NEW_LINE> <INDENT> return img <NEW_LINE> <DEDENT> return np.clip(np.mean(self.buffer, axis=0), 0, 255).astype(img.dtype)
|
Calculates a mean frame over specified time-buffer
|
6259902f5166f23b2e244442
|
class Tablefy(cli.Application): <NEW_LINE> <INDENT> _log_builder = p_logging.ProsperLogger( ME, LOG_PATH ) <NEW_LINE> debug = cli.Flag( ['d', '--debug'], help='Debug mode, no production db, headless mode' ) <NEW_LINE> @cli.switch( ['-v', '--verbose'], help='Enable verbose messaging' ) <NEW_LINE> def enable_verbose(self): <NEW_LINE> <INDENT> self._log_builder.configure_debug_logger() <NEW_LINE> <DEDENT> table_file = path.join(ROOT, 'vincent_lexicon', 'tables', 'news_database.json') <NEW_LINE> @cli.switch( ['-t', '--table'], str, help='path to table/tinyDB file' ) <NEW_LINE> def override_table_file(self, table): <NEW_LINE> <INDENT> if path.isfile(table): <NEW_LINE> <INDENT> self.table_file = table <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise FileNotFoundError <NEW_LINE> <DEDENT> <DEDENT> out_file = path.join(HERE, 'news_database_clean.csv') <NEW_LINE> @cli.switch( ['-o', '--outfile'], str, help='path to output file' ) <NEW_LINE> def override_out_file(self, outfile): <NEW_LINE> <INDENT> self.out_file = path.abspath(outfile) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> global LOGGER <NEW_LINE> LOGGER = self._log_builder.logger <NEW_LINE> LOGGER.debug('hello world') <NEW_LINE> LOGGER.info('loading table file: ' + self.table_file) <NEW_LINE> db_file = None <NEW_LINE> with open(self.table_file, 'r') as json_fh: <NEW_LINE> <INDENT> db_file = json.load(json_fh) <NEW_LINE> <DEDENT> LOGGER.info('processing table file') <NEW_LINE> crunched_price_data = process_price_data(db_file) <NEW_LINE> crunched_news_data = process_news_data(db_file) <NEW_LINE> LOGGER.info('writing summary tables') <NEW_LINE> price_csv_file = self.out_file.replace('.csv', '-price.csv') <NEW_LINE> csv_dump( crunched_price_data, price_csv_file ) <NEW_LINE> news_csv_file = self.out_file.replace('.csv', '-news.csv') <NEW_LINE> csv_dump( crunched_news_data, news_csv_file )
|
Plumbum CLI application to help pre-process tinyDB data into more regular table shape
|
6259902fe76e3b2f99fd9a79
|
class BlockHeader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.version = 0 <NEW_LINE> self.prev_block = 0 <NEW_LINE> self.merkle_root = 0 <NEW_LINE> self.timestamp = 0 <NEW_LINE> self.bits = 0 <NEW_LINE> self.nonce = 0 <NEW_LINE> self.txns_count = 0 <NEW_LINE> <DEDENT> def calculate_hash(self): <NEW_LINE> <INDENT> hash_fields = ["version", "prev_block", "merkle_root", "timestamp", "bits", "nonce"] <NEW_LINE> serializer = BlockSerializer() <NEW_LINE> bin_data = serializer.serialize(self, hash_fields) <NEW_LINE> h = hashlib.sha256(bin_data).digest() <NEW_LINE> h = hashlib.sha256(h).digest() <NEW_LINE> return binascii.b2a_hex (h[::-1]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s Version=[%d] Timestamp=[%s] Nonce=[%d] Hash=[%s] Tx Count=[%d]>" % (self.__class__.__name__, self.version, time.ctime(self.timestamp), self.nonce, self.calculate_hash(), self.txns_count)
|
The header of the block.
|
6259902f4e696a045264e658
|
class SpiderLogHandler(BaseHandler): <NEW_LINE> <INDENT> async def get(self, host_id, project_name, spider_name, spider_id): <NEW_LINE> <INDENT> host_info = await self.application.objects.get(Host, id_=host_id) <NEW_LINE> scrapyd = scrapyd_object(host_info, timeout=1) <NEW_LINE> try: <NEW_LINE> <INDENT> code = await scrapyd.spider_log( project_name=project_name, spider_name=spider_name, spider_id=spider_id) <NEW_LINE> self.render('file.html', id=1, code=code.body) <NEW_LINE> <DEDENT> except Exception as f: <NEW_LINE> <INDENT> app_log.error(str(f)) <NEW_LINE> self.render('404.html')
|
get spider log file
:GET param host_id: server id
:GET param project_name: project name
:GET param spider_name: spider name
:GET param spider_id: scrapyd spider id
|
6259902f1d351010ab8f4b84
|
class PublicIPPrefixListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[PublicIPPrefix]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(PublicIPPrefixListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
|
Response for ListPublicIpPrefixes API service call.
:param value: A list of public IP prefixes that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_08_01.models.PublicIPPrefix]
:param next_link: The URL to get the next set of results.
:type next_link: str
|
6259902f50485f2cf55dbfea
|
class PrintRunner(Runner): <NEW_LINE> <INDENT> def _handleNextDate(self, date): <NEW_LINE> <INDENT> print('Reminders for %s' % date.isoformat()) <NEW_LINE> <DEDENT> def _executeReminder(self, reminder, date): <NEW_LINE> <INDENT> reminder.execute(date)
|
Наследник класса L{Runner}, подходящий для обработки текстовых
напоминателей (таких, что связанные с ними действия выполняют печать
сообщения в поток вывода).
|
6259902fac7a0e7691f73555
|
class StoredCallableWrapper(Generator): <NEW_LINE> <INDENT> def __init__(self, key, function, **parameters): <NEW_LINE> <INDENT> self._function = function <NEW_LINE> self._param_keys = list(parameters.keys()) <NEW_LINE> super(StoredCallableWrapper, self).__init__(key, function=function, **parameters) <NEW_LINE> <DEDENT> def rebuild(self): <NEW_LINE> <INDENT> params = {key: self.get_parameter(key) for key in self._param_keys} <NEW_LINE> return self._function(**params)
|
Generator for callables using that stores results.
|
6259902f96565a6dacd2d7c5
|
class Post(models.Model): <NEW_LINE> <INDENT> NORMAL, CLOSED, MODDED, EDITED = 0, 1, 2, 3 <NEW_LINE> HIDDEN = 99 <NEW_LINE> POST_STATUS = ( (NORMAL,"normal"), (CLOSED,"closed"), (MODDED,"modded"), (EDITED,"edited"), (HIDDEN,"hidden"), ) <NEW_LINE> topic = models.ForeignKey(Topic) <NEW_LINE> creator = models.ForeignKey(User) <NEW_LINE> date = models.DateTimeField() <NEW_LINE> post_num = models.CharField(max_length=15) <NEW_LINE> contents = models.TextField() <NEW_LINE> signature = models.TextField() <NEW_LINE> status = models.CharField(max_length=2, choices=POST_STATUS, default=NORMAL) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s\n %s \n---\n %s" % (self.topic, self.contents, self.signature) <NEW_LINE> <DEDENT> def is_hidden(self): <NEW_LINE> <INDENT> return int(self.status) == Post.HIDDEN
|
Represents a post made on gameefaqs
|
6259902f91af0d3eaad3ae9a
|
class Element(object): <NEW_LINE> <INDENT> __slots__ = ( 'pos', 'plane', 'special_sector', 'flags', 'elements', 'area', 'connection', 'index' ) <NEW_LINE> DIR_UP = 0 <NEW_LINE> DIR_RIGHT = 1 <NEW_LINE> DIR_DOWN = 2 <NEW_LINE> DIR_LEFT = 3 <NEW_LINE> DIR_RANGE = [DIR_UP, DIR_RIGHT, DIR_DOWN, DIR_LEFT] <NEW_LINE> FLAG_DAMAGE_LOW = 0x0001 <NEW_LINE> FLAG_DAMAGE_MEDIUM = 0x0002 <NEW_LINE> FLAG_DAMAGE_HIGH = 0x0004 <NEW_LINE> FLAG_JUMP_NORTH = 0x0008 <NEW_LINE> FLAG_JUMP_EAST = 0x0010 <NEW_LINE> FLAG_JUMP_SOUTH = 0x0020 <NEW_LINE> FLAG_JUMP_WEST = 0x0040 <NEW_LINE> def __init__(self, x, y, z): <NEW_LINE> <INDENT> self.pos = Vector3(x, y, z) <NEW_LINE> self.plane = None <NEW_LINE> self.special_sector = None <NEW_LINE> self.flags = 0 <NEW_LINE> self.elements = [None] * 4 <NEW_LINE> self.area = None <NEW_LINE> self.connection = [None] * 4 <NEW_LINE> self.index = -1 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'element {}, flags {}, sector {}, plane {}'.format(self.pos, self.flags, self.special_sector, self.plane) <NEW_LINE> <DEDENT> def is_similar(self, other): <NEW_LINE> <INDENT> if self.plane is not None or other.plane is not None: <NEW_LINE> <INDENT> return self.special_sector == other.special_sector and self.flags == other.flags and self.plane == other.plane <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.special_sector == other.special_sector and self.flags == other.flags and self.pos.z == other.pos.z
|
A single square grid element on a map that the player can stand at.
|
6259902f287bf620b6272c56
|
class WinerySerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Models.Winery <NEW_LINE> fields = ('url', 'id', 'winery_name', 'winery_addr', 'winery_phn')
|
6259902fd18da76e235b7985
|
|
class BRAINSLinearModelerEPCA(SlicerCommandLine): <NEW_LINE> <INDENT> input_spec = BRAINSLinearModelerEPCAInputSpec <NEW_LINE> output_spec = BRAINSLinearModelerEPCAOutputSpec <NEW_LINE> _cmd = " BRAINSLinearModelerEPCA " <NEW_LINE> _outputs_filenames = {}
|
title: Landmark Linear Modeler (BRAINS)
category: Utilities.BRAINS
description:
Training linear model using EPCA. Implementation based on my MS thesis, "A METHOD FOR AUTOMATED LANDMARK CONSTELLATION DETECTION USING EVOLUTIONARY PRINCIPAL COMPONENTS AND STATISTICAL SHAPE MODELS"
version: 1.0
documentation-url: http://www.nitrc.org/projects/brainscdetector/
|
6259902f50485f2cf55dbfec
|
class Meta: <NEW_LINE> <INDENT> model = Project <NEW_LINE> fields = ('id', 'title', 'description', 'skills', 'author')
|
Maps the project model to json
|
6259902fac7a0e7691f73557
|
class PrimaryKeyConstraint(UniqueConstraint): <NEW_LINE> <INDENT> _SYMBOL = 'pk' <NEW_LINE> _IMPORT = 'from %(constraints)s import PrimaryKey as %(pk)s'
|
Primary Key constraint
|
6259902f63f4b57ef00865aa
|
class Crypto_AES_GCM__AES_SIV(Crypto_AES_CBC_HMAC__AES_SIV): <NEW_LINE> <INDENT> _cryptoScheme = '3' <NEW_LINE> _cryptoName = 'AES-GCM/AES-SIV/scrypt' <NEW_LINE> def __init__(self, password, client=None, fsencoding=sys.getfilesystemencoding()): <NEW_LINE> <INDENT> super().__init__(password, client, fsencoding) <NEW_LINE> <DEDENT> def getContentCipher(self, iv=None): <NEW_LINE> <INDENT> if iv is None: <NEW_LINE> <INDENT> iv = self.getIV() <NEW_LINE> <DEDENT> return AES.new(self._contentKey, AES.MODE_GCM, nonce=iv) <NEW_LINE> <DEDENT> def getContentEncryptor(self, iv=None): <NEW_LINE> <INDENT> return StreamEncryptor(self.getContentCipher(iv))
|
Improved crypto scheme.
Still uses AES-256 GCM for encryption and authentication
Uses ASE-256 SIV encryption and authentaction for files
|
6259902fd99f1b3c44d06713
|
class CsvManager(object): <NEW_LINE> <INDENT> def __init__(self, working_dir, timespan, vars, points, filename): <NEW_LINE> <INDENT> self.collector = {} <NEW_LINE> self.filename = filename <NEW_LINE> self.workingDir = working_dir <NEW_LINE> self.datesStrings = self.__dateFormatter(timespan) <NEW_LINE> self.__initFileFrames(vars, points) <NEW_LINE> <DEDENT> def __initFileFrames(self, vars, points): <NEW_LINE> <INDENT> header = [var["var"] for var in vars] <NEW_LINE> header.insert(0,"timestep") <NEW_LINE> df = pd.DataFrame(columns=header) <NEW_LINE> for point in points: <NEW_LINE> <INDENT> self.collector[id(point)] = { "df": df, "fileName": str(point.getPointCoords()["xIdx"]) + "_" + str(point.getPointCoords()["yIdx"]) + self.filename, "timestep": {"data": self.datesStrings, "long_name": "timestep", "units": ""} } <NEW_LINE> for var in vars: <NEW_LINE> <INDENT> varName = var["var"] <NEW_LINE> funcName = var["func"]["name"] <NEW_LINE> try: <NEW_LINE> <INDENT> long_name = var["data"].long_name + "_" + funcName <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> long_name = varName + "_" + funcName <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> units = var["data"].units <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> units = "" <NEW_LINE> <DEDENT> self.collector[id(point)][varName] = {"data": [], "long_name": long_name, "units": units} <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __dateFormatter(self, timespan): <NEW_LINE> <INDENT> timespanStrs = [] <NEW_LINE> for time in timespan: <NEW_LINE> <INDENT> timespanStrs.append(time["startDate"].strftime("%Y-%m-%d") + "/" + time["endDate"].strftime("%Y-%m-%d")) <NEW_LINE> <DEDENT> return timespanStrs <NEW_LINE> <DEDENT> def writeDataToFile(self): <NEW_LINE> <INDENT> collector = self.collector <NEW_LINE> for pointId in collector: <NEW_LINE> <INDENT> frame = self.collector[pointId] <NEW_LINE> df = frame["df"] <NEW_LINE> for colName in list(df): <NEW_LINE> <INDENT> df[colName] = frame[colName]["data"] <NEW_LINE> if colName != "timestep": <NEW_LINE> <INDENT> unitSep = "/" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unitSep = "" <NEW_LINE> <DEDENT> df = df.rename(index=str, columns={colName: frame[colName]["long_name"] + unitSep + frame[colName]["units"]}) <NEW_LINE> <DEDENT> df.to_csv(self.workingDir + frame["fileName"] + ".csv", index=False) <NEW_LINE> <DEDENT> <DEDENT> def collectValues(self, varName, val, point): <NEW_LINE> <INDENT> varValContainer = self.collector[id(point)][varName]["data"] <NEW_LINE> varValContainer.append(val)
|
Class for managing csv functionalities like initializing,
formatting, inserting values and writing output files for
spotty data (e.g. statistics of user defined points).
|
6259902f1f5feb6acb163c60
|
class Firewall (object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.banned_ports = {} <NEW_LINE> for line in fileinput.input('/root/pox/ext/banned-ports.txt'): <NEW_LINE> <INDENT> portNumber = int(line) <NEW_LINE> self.banned_ports[portNumber] = True <NEW_LINE> <DEDENT> log.debug("Firewall initialized.") <NEW_LINE> <DEDENT> def _handle_ConnectionIn (self, event, flow, packet): <NEW_LINE> <INDENT> log.debug("Allowed connection [" + str(flow.src) + ":" + str(flow.srcport) + "," + str(flow.dst) + ":" + str(flow.dstport) + "]" ) <NEW_LINE> event.action.forward = True <NEW_LINE> <DEDENT> def _handle_DeferredConnectionIn (self, event, flow, packet): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _handle_MonitorData (self, event, packet, reverse): <NEW_LINE> <INDENT> pass
|
Firewall class.
Extend this to implement some firewall functionality.
Don't change the name or anything -- the eecore component
expects it to be firewall.Firewall.
|
6259902f8e05c05ec3f6f693
|
class Pagination: <NEW_LINE> <INDENT> def __init__(self, query, per_page, page, link): <NEW_LINE> <INDENT> self.query = query <NEW_LINE> self.per_page = per_page <NEW_LINE> self.page = page <NEW_LINE> self.link = link <NEW_LINE> self._count = None <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def entries(self): <NEW_LINE> <INDENT> return ( self.query.offset((self.page - 1) * self.per_page) .limit(self.per_page) .all() ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_previous(self): <NEW_LINE> <INDENT> return self.page > 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_next(self): <NEW_LINE> <INDENT> return self.page < self.pages <NEW_LINE> <DEDENT> @property <NEW_LINE> def previous(self): <NEW_LINE> <INDENT> return href(self.link, page=self.page - 1) <NEW_LINE> <DEDENT> @property <NEW_LINE> def next(self): <NEW_LINE> <INDENT> return href(self.link, page=self.page + 1) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def count(self): <NEW_LINE> <INDENT> return self.query.count() <NEW_LINE> <DEDENT> @property <NEW_LINE> def pages(self): <NEW_LINE> <INDENT> return max(0, self.count - 1) // self.per_page + 1
|
Paginate a SQLAlchemy query object.
|
6259902fd10714528d69eec3
|
class DonationTest(seldom.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> page = ServicePage(Seldom.driver) <NEW_LINE> menubar = DonationPage(Seldom.driver) <NEW_LINE> page.get("https://sit.1177tech.com.tw/SSO/external/login.jsp") <NEW_LINE> page.userid_input = "ad21@sharklasers.com" <NEW_LINE> page.pwd_input = "Qqqq1111" <NEW_LINE> page.validate_input = "1111" <NEW_LINE> page.login_button.click() <NEW_LINE> page.big_apple_button.click() <NEW_LINE> page.services.click() <NEW_LINE> PageWait(page.donation) <NEW_LINE> page.donation.click() <NEW_LINE> menubar.donation_quick.click() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> file_path = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> @data(excel_to_list(file_path + "\data.xlsx", sheet="donation_fast", line=1)) <NEW_LINE> def test_case1(self, usename, amount, mobile): <NEW_LINE> <INDENT> page = DonationPage(Seldom.driver) <NEW_LINE> PageWait(page.usename_input) <NEW_LINE> page.usename_input.click() <NEW_LINE> page.usename_input.clear() <NEW_LINE> page.usename_input.send_keys(usename) <NEW_LINE> page.donation_amount.click() <NEW_LINE> page.donation_amount.clear() <NEW_LINE> page.donation_amount.send_keys(amount) <NEW_LINE> page.mobile.clear() <NEW_LINE> page.mobile.send_keys(mobile) <NEW_LINE> page.select.select_by_value("5") <NEW_LINE> page.send.click() <NEW_LINE> PageWait(page.close_button) <NEW_LINE> PageWait(page.show_message) <NEW_LINE> self.assertEqual(page.show_message.text, "快速捐款發送完成!") <NEW_LINE> page.close_button.click()
|
快速捐款建立
|
6259902f30c21e258be9987d
|
class Provider_Activities(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'provider_activities' <NEW_LINE> id_provider_activities = db.Column(db.Integer, primary_key=True) <NEW_LINE> id_provider = db.Column(db.Integer, db.ForeignKey('provider.id_provider')) <NEW_LINE> id_provider_practice_location = db.Column(db.Integer, db.ForeignKey('provider_practice_locaton.id_provider_practice_location')) <NEW_LINE> id_practice_type = db.Column(db.Integer, db.ForeignKey('practice_type.id_practice_type')) <NEW_LINE> primary_activities = db.Column(db.Boolean, default=True, nullable=False) <NEW_LINE> title = db.Column(db.String(120)) <NEW_LINE> effective_date = db.Column(db.Date) <NEW_LINE> term_date = db.Column(db.Numeric) <NEW_LINE> clinical_FTE = db.Column(db.Float) <NEW_LINE> admin_FTE = db.Column(db.Float) <NEW_LINE> research_FTE = db.Column(db.Float) <NEW_LINE> teaching_FTE = db.Column(db.Float) <NEW_LINE> weekly_patient_care_hours = db.Column(db.Float) <NEW_LINE> description = db.Column(db.Text) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Provider Activities: {}>'.format(self.name)
|
Create a Provider Activities table
|
6259902fa8ecb0332587228f
|
class TestBasket(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testBasket(self): <NEW_LINE> <INDENT> pass
|
Basket unit test stubs
|
6259902fd99f1b3c44d06715
|
class EngineSourceMapper(SourceMapper): <NEW_LINE> <INDENT> def __init__(self, scheduler): <NEW_LINE> <INDENT> self._scheduler = scheduler <NEW_LINE> <DEDENT> def _unique_dirs_for_sources(self, sources): <NEW_LINE> <INDENT> seen = set() <NEW_LINE> for source in sources: <NEW_LINE> <INDENT> source_dir = os.path.dirname(source) <NEW_LINE> if source_dir not in seen: <NEW_LINE> <INDENT> seen.add(source_dir) <NEW_LINE> yield source_dir <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def target_addresses_for_source(self, source): <NEW_LINE> <INDENT> return list(self.iter_target_addresses_for_sources([source])) <NEW_LINE> <DEDENT> def _match_source(self, source, fileset): <NEW_LINE> <INDENT> return fileset.matches(source) or matches_filespec(source, fileset.filespec) <NEW_LINE> <DEDENT> def _owns_source(self, source, legacy_target): <NEW_LINE> <INDENT> target_kwargs = legacy_target.adaptor.kwargs() <NEW_LINE> target_source = target_kwargs.get('source') <NEW_LINE> if target_source: <NEW_LINE> <INDENT> path_from_build_root = os.path.join(legacy_target.adaptor.address.spec_path, target_source) <NEW_LINE> if path_from_build_root == source: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> target_sources = target_kwargs.get('sources', []) <NEW_LINE> if target_sources and self._match_source(source, target_sources): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> target_resources = target_kwargs.get('resources') <NEW_LINE> if target_resources: <NEW_LINE> <INDENT> if isinstance(target_resources, EagerFilesetWithSpec): <NEW_LINE> <INDENT> if self._match_source(source, target_resources): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(target_resources, list): <NEW_LINE> <INDENT> resource_dep_subjects = resolve_and_parse_specs(legacy_target.adaptor.address.spec_path, target_resources) <NEW_LINE> for hydrated_targets in self._scheduler.product_request(HydratedTargets, resource_dep_subjects): <NEW_LINE> <INDENT> for hydrated_target in hydrated_targets.dependencies: <NEW_LINE> <INDENT> resource_sources = hydrated_target.adaptor.kwargs().get('sources') <NEW_LINE> if resource_sources and self._match_source(source, resource_sources): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError('Could not process target_resources with type {}'.format(type(target_resources))) <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def iter_target_addresses_for_sources(self, sources): <NEW_LINE> <INDENT> sources_set = set(sources) <NEW_LINE> subjects = [AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)] <NEW_LINE> for hydrated_targets in self._scheduler.product_request(HydratedTargets, subjects): <NEW_LINE> <INDENT> for hydrated_target in hydrated_targets.dependencies: <NEW_LINE> <INDENT> legacy_address = hydrated_target.adaptor.address <NEW_LINE> if any(LegacyAddressMapper.is_declaring_file(legacy_address, f) for f in sources_set): <NEW_LINE> <INDENT> yield legacy_address <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if any(self._owns_source(source, hydrated_target) for source in sources_set): <NEW_LINE> <INDENT> yield legacy_address
|
A v2 engine backed SourceMapper that supports pre-`BuildGraph` cache warming in the daemon.
|
6259902f96565a6dacd2d7c7
|
class IonEntry(PDEntry): <NEW_LINE> <INDENT> def __init__(self, ion, energy, name=None): <NEW_LINE> <INDENT> self.energy = energy <NEW_LINE> self.ion = ion <NEW_LINE> self.composition = ion.composition <NEW_LINE> self.name = name if name else self.ion.reduced_formula <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> return IonEntry(Ion.from_dict(d["ion"]), d["energy"], d.get("name", None)) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> d = {"ion": self.ion.as_dict(), "energy": self.energy, "name": self.name} <NEW_LINE> return d <NEW_LINE> <DEDENT> @property <NEW_LINE> def energy_per_atom(self): <NEW_LINE> <INDENT> return self.energy / self.composition.num_atoms <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "IonEntry : {} with energy = {:.4f}".format(self.composition, self.energy) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__()
|
Object similar to PDEntry, but contains an Ion object instead of a
Composition object.
Args:
comp: Ion object
energy: Energy for composition.
name: Optional parameter to name the entry. Defaults to the
chemical formula.
.. attribute:: name
A name for the entry. This is the string shown in the phase diagrams.
By default, this is the reduced formula for the composition, but can be
set to some other string for display purposes.
|
6259902f30c21e258be9987e
|
class SVGPage(object): <NEW_LINE> <INDENT> def __init__(self, net_regex=NET_REGEX_ALL, airwires=2, pins_to_skip=[], max_pin_count=None, context=global_context): <NEW_LINE> <INDENT> self.net_regex = re.compile(net_regex) <NEW_LINE> self.airwires = airwires <NEW_LINE> self.context = context <NEW_LINE> self.max_pin_count = max_pin_count <NEW_LINE> self.pin_count = 0 <NEW_LINE> self.pins_to_skip = pins_to_skip <NEW_LINE> self.pins_drawn = [] <NEW_LINE> self.cells_dict = {} <NEW_LINE> self.netnames_dict = collections.defaultdict(lambda: {"bits": [], "hide_name": 1}) <NEW_LINE> self.ports_dict = {} <NEW_LINE> self.net_helpers = {} <NEW_LINE> for net in self.context.net_list: <NEW_LINE> <INDENT> self.net_helpers[net] = SVGNet(net, self) <NEW_LINE> <DEDENT> self.part_helpers = {} <NEW_LINE> for part in self.context.parts_list: <NEW_LINE> <INDENT> self.part_helpers[part] = SVGPart(part, self) <NEW_LINE> <DEDENT> <DEDENT> class PageEmpty(Exception): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def write_json(self, fp): <NEW_LINE> <INDENT> self.parts_to_draw = collections.deque(self.context.parts_list) <NEW_LINE> while self.parts_to_draw: <NEW_LINE> <INDENT> if self.max_pin_count and self.pin_count > self.max_pin_count: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> part = self.parts_to_draw[0] <NEW_LINE> self.part_helpers[part].add_parts() <NEW_LINE> <DEDENT> if not self.pins_drawn: <NEW_LINE> <INDENT> raise self.PageEmpty <NEW_LINE> <DEDENT> big_dict = {"modules": {"SVG Output": { "cells": self.cells_dict, "netnames": self.netnames_dict, "ports": self.ports_dict, }}} <NEW_LINE> json.dump(big_dict, fp, indent=4) <NEW_LINE> fp.flush() <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile("w", prefix="netlistsvg_input_", suffix=".json", delete=False) as json_file, tempfile.NamedTemporaryFile("r", prefix="netlistsvg_output_", suffix=".svg", delete=False) as netlistsvg_output: <NEW_LINE> <INDENT> self.write_json(json_file) <NEW_LINE> netlistsvg_command = [ "/usr/bin/env", "node", os.path.join(NETLISTSVG_LOCATION, "bin", "netlistsvg.js"), "--skin", os.path.join(NETLISTSVG_LOCATION, "lib", "analog.svg"), json_file.name, "-o", netlistsvg_output.name ] <NEW_LINE> print(netlistsvg_command) <NEW_LINE> subprocess.call(netlistsvg_command) <NEW_LINE> svg_contents = netlistsvg_output.read() <NEW_LINE> <DEDENT> svg_contents = re.sub("_node\d+", "", svg_contents) <NEW_LINE> return svg_contents
|
Represents single .svg page
|
6259902f73bcbd0ca4bcb303
|
class linux_vma_cache(linux_common.AbstractLinuxCommand): <NEW_LINE> <INDENT> def __init__(self, config, *args, **kwargs): <NEW_LINE> <INDENT> linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs) <NEW_LINE> self._config.add_option('UNALLOCATED', short_option = 'u', default = False, help = 'Show unallocated', action = 'store_true') <NEW_LINE> <DEDENT> def calculate(self): <NEW_LINE> <INDENT> linux_common.set_plugin_members(self) <NEW_LINE> has_owner = self.profile.obj_has_member("mm_struct", "owner") <NEW_LINE> cache = linux_slabinfo(self._config).get_kmem_cache("vm_area_struct", self._config.UNALLOCATED) <NEW_LINE> for vm in cache: <NEW_LINE> <INDENT> start = vm.vm_start <NEW_LINE> end = vm.vm_end <NEW_LINE> if has_owner and vm.vm_mm and vm.vm_mm.is_valid(): <NEW_LINE> <INDENT> task = vm.vm_mm.owner <NEW_LINE> (task_name, pid) = (task.comm, task.pid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (task_name, pid) = ("", "") <NEW_LINE> <DEDENT> if vm.vm_file and vm.vm_file.is_valid(): <NEW_LINE> <INDENT> path = vm.vm_file.dentry.get_partial_path() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = "" <NEW_LINE> <DEDENT> yield task_name, pid, start, end, path <NEW_LINE> <DEDENT> <DEDENT> def render_text(self, outfd, data): <NEW_LINE> <INDENT> self.table_header(outfd, [("Process", "16"), ("PID", "6"), ("Start", "[addrpad]"), ("End", "[addrpad]"), ("Path", "")]) <NEW_LINE> for task_name, pid, start, end, path in data: <NEW_LINE> <INDENT> self.table_row(outfd, task_name, pid, start, end, path)
|
Gather VMAs from the vm_area_struct cache
|
6259902f0a366e3fb87dda5a
|
class Player1326State(): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> self.player = player <NEW_LINE> self.betMultiplier = int() <NEW_LINE> <DEDENT> def currentBet(self): <NEW_LINE> <INDENT> self.player.nextBet = self.player.initialBet * self.betMultiplier <NEW_LINE> <DEDENT> def nextWon(self): <NEW_LINE> <INDENT> self.player.state = self.nextStateWin() <NEW_LINE> <DEDENT> def nextLost(self): <NEW_LINE> <INDENT> self.player.state = Player1326StateFactory.getInstance( 'zerowins', self)
|
This is the superclass of all Player1326 states.
currentBet(obj:bet): set the amount to be betted by the current
state of the bet strategy.
|
6259902f21bff66bcd723cd7
|
class ErrorTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_noMessageValidStatus(self): <NEW_LINE> <INDENT> e = error.Error(b"200") <NEW_LINE> self.assertEqual(e.message, b"OK") <NEW_LINE> <DEDENT> def test_noMessageInvalidStatus(self): <NEW_LINE> <INDENT> e = error.Error(b"InvalidCode") <NEW_LINE> self.assertEqual(e.message, None) <NEW_LINE> <DEDENT> def test_messageExists(self): <NEW_LINE> <INDENT> e = error.Error(b"200", b"My own message") <NEW_LINE> self.assertEqual(e.message, b"My own message") <NEW_LINE> <DEDENT> def test_str(self): <NEW_LINE> <INDENT> e = error.Error(b"200", b"OK") <NEW_LINE> self.assertEqual(str(e), "200 OK") <NEW_LINE> e = error.Error(200, b"OK") <NEW_LINE> self.assertEqual(str(e), "200 OK")
|
Tests for how L{Error} attributes are initialized.
|
6259902fe76e3b2f99fd9a7f
|
class DeleteSubnetPool(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(DeleteSubnetPool, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'subnet_pool', metavar='<subnet-pool>', help=_("Subnet pool to delete (name or ID)") ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = self.app.client_manager.network <NEW_LINE> obj = client.find_subnet_pool(parsed_args.subnet_pool) <NEW_LINE> client.delete_subnet_pool(obj)
|
Delete subnet pool
|
6259902f56b00c62f0fb3934
|
class SpeedIndexMetric(Metric): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SpeedIndexMetric, self).__init__() <NEW_LINE> self._impl = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def CustomizeBrowserOptions(cls, options): <NEW_LINE> <INDENT> options.AppendExtraBrowserArgs('--disable-infobars') <NEW_LINE> <DEDENT> def Start(self, _, tab): <NEW_LINE> <INDENT> self._impl = (VideoSpeedIndexImpl() if tab.video_capture_supported else PaintRectSpeedIndexImpl()) <NEW_LINE> self._impl.Start(tab) <NEW_LINE> <DEDENT> def Stop(self, _, tab): <NEW_LINE> <INDENT> assert self._impl, 'Must call Start() before Stop()' <NEW_LINE> assert self.IsFinished(tab), 'Must wait for IsFinished() before Stop()' <NEW_LINE> self._impl.Stop(tab) <NEW_LINE> <DEDENT> def AddResults(self, tab, results, chart_name=None): <NEW_LINE> <INDENT> index = self._impl.CalculateSpeedIndex(tab) <NEW_LINE> self._impl = None <NEW_LINE> results.AddValue(scalar.ScalarValue( results.current_page, '%s.speed_index' % chart_name, 'ms', index)) <NEW_LINE> <DEDENT> def IsFinished(self, tab): <NEW_LINE> <INDENT> return tab.HasReachedQuiescence()
|
The speed index metric is one way of measuring page load speed.
It is meant to approximate user perception of page load speed, and it
is based on the amount of time that it takes to paint to the visual
portion of the screen. It includes paint events that occur after the
onload event, and it doesn't include time loading things off-screen.
This speed index metric is based on WebPageTest.org (WPT).
For more info see: http://goo.gl/e7AH5l
|
6259902fb57a9660fecd2af6
|
class ReadConfig(): <NEW_LINE> <INDENT> def __init__(self, cfg_array): <NEW_LINE> <INDENT> self.uname = cfg_array['0']['value'] <NEW_LINE> self.email = cfg_array['1']['value'] <NEW_LINE> self.msgprefs = cfg_array['2']['value'] <NEW_LINE> self.mv_folder = cfg_array['3']['value'] <NEW_LINE> self.tv_folder = cfg_array['4']['value']
|
Returns a usable array of configuration options
:param cfg_array: raw json array from ./config.json
.uname
.email
.tv_folder
.mv_folder
|
6259902f50485f2cf55dbfef
|
class JobDecoder(json.JSONDecoder): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> env = kwargs.pop('env') <NEW_LINE> super(JobDecoder, self).__init__( object_hook=self.object_hook, *args, **kwargs ) <NEW_LINE> assert env <NEW_LINE> self.env = env <NEW_LINE> <DEDENT> def object_hook(self, obj): <NEW_LINE> <INDENT> if '_type' not in obj: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> type_ = obj['_type'] <NEW_LINE> if type_ == 'odoo_recordset': <NEW_LINE> <INDENT> model = self.env[obj['model']] <NEW_LINE> if obj.get('uid'): <NEW_LINE> <INDENT> model = model.sudo(obj['uid']) <NEW_LINE> <DEDENT> return model.browse(obj['ids']) <NEW_LINE> <DEDENT> elif type_ == 'datetime_isoformat': <NEW_LINE> <INDENT> return dateutil.parser.parse(obj['value']) <NEW_LINE> <DEDENT> elif type_ == 'date_isoformat': <NEW_LINE> <INDENT> return dateutil.parser.parse(obj['value']).date() <NEW_LINE> <DEDENT> return obj
|
Decode json, recomposing recordsets
|
6259902fac7a0e7691f7355b
|
class UserAccountsTestMixin(object): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(UserAccountsTestMixin, self).setUp() <NEW_LINE> self.package = SourcePackageName.objects.create(name='dummy-package') <NEW_LINE> self.password = 'asdf' <NEW_LINE> self.user = User.objects.create_user( main_email='user@domain.com', password=self.password, first_name='', last_name='') <NEW_LINE> <DEDENT> def refresh_user_object(self): <NEW_LINE> <INDENT> self.user = User.objects.get(main_email=self.user.main_email) <NEW_LINE> <DEDENT> def get_login_url(self): <NEW_LINE> <INDENT> return reverse('dtracker-accounts-login') <NEW_LINE> <DEDENT> def get_profile_url(self): <NEW_LINE> <INDENT> return reverse('dtracker-accounts-profile') <NEW_LINE> <DEDENT> def get_package_url(self, package_name): <NEW_LINE> <INDENT> return reverse('dtracker-package-page', kwargs={ 'package_name': package_name, }) <NEW_LINE> <DEDENT> def create_user(self, main_email, password, associated_emails=()): <NEW_LINE> <INDENT> u = User.objects.create_user(main_email, password=password) <NEW_LINE> for associated_email in associated_emails: <NEW_LINE> <INDENT> u.emails.create(email=associated_email) <NEW_LINE> <DEDENT> return u <NEW_LINE> <DEDENT> def log_in(self, user=None, password=None): <NEW_LINE> <INDENT> if user is None: <NEW_LINE> <INDENT> user = self.user <NEW_LINE> <DEDENT> if password is None: <NEW_LINE> <INDENT> password = self.password <NEW_LINE> <DEDENT> self.get_page(self.get_login_url()) <NEW_LINE> self.input_to_element('id_username', user.main_email) <NEW_LINE> self.input_to_element('id_password', password) <NEW_LINE> self.send_enter('id_password') <NEW_LINE> <DEDENT> def log_out(self): <NEW_LINE> <INDENT> self.browser.find_element_by_id("account-logout").click() <NEW_LINE> <DEDENT> def wait(self, timeout=2): <NEW_LINE> <INDENT> return WebDriverWait(self.browser, timeout)
|
Defines some common methods for all user account tests.
|
6259902fa4f1c619b294f668
|
class SAE(Loss): <NEW_LINE> <INDENT> def loss(self, predicted: Tensor, actual: Tensor) -> float: <NEW_LINE> <INDENT> return np.sum(np.abs(predicted - actual)) <NEW_LINE> <DEDENT> def grad(self, predicted: Tensor, actual: Tensor) -> float: <NEW_LINE> <INDENT> return np.sign(actual - predicted)
|
SAE is sum absolute error
sae = |A - B|
|
6259902fa8ecb03325872291
|
class HealthHandler(RequestHandler): <NEW_LINE> <INDENT> def initialize(self, settings): <NEW_LINE> <INDENT> self._settings = settings <NEW_LINE> <DEDENT> @tornado.gen.coroutine <NEW_LINE> def get(self): <NEW_LINE> <INDENT> ok = True <NEW_LINE> if not ok: <NEW_LINE> <INDENT> raise APIServerError("Service is in the RED state") <NEW_LINE> <DEDENT> self.send_json({"ok": "true"}) <NEW_LINE> self.finish()
|
Check system"s health status.
|
6259902f8a43f66fc4bf31f9
|
class PurpleAirMonitor(StdService): <NEW_LINE> <INDENT> def __init__(self, engine, config_dict): <NEW_LINE> <INDENT> super(PurpleAirMonitor, self).__init__(engine, config_dict) <NEW_LINE> loginf("service version is %s" % WEEWX_PURPLEAIR_VERSION) <NEW_LINE> self.config_dict = config_dict.get('PurpleAirMonitor', {}) <NEW_LINE> try: <NEW_LINE> <INDENT> self.config_dict['hostname'] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise Exception("Data will not be posted: Missing option %s" % e) <NEW_LINE> <DEDENT> self.config_dict.setdefault('port', 80) <NEW_LINE> self.config_dict.setdefault('timeout', 10) <NEW_LINE> self.config_dict.setdefault('interval', 300) <NEW_LINE> binding = self.config_dict.get('data_binding', 'purpleair_binding') <NEW_LINE> self.dbm = self.engine.db_binder.get_manager(data_binding=binding, initialize=True) <NEW_LINE> dbcol = self.dbm.connection.columnsOf(self.dbm.table_name) <NEW_LINE> dbm_dict = weewx.manager.get_manager_dict( config_dict['DataBindings'], config_dict['Databases'], binding) <NEW_LINE> memcol = [x[0] for x in dbm_dict['schema']] <NEW_LINE> if dbcol != memcol: <NEW_LINE> <INDENT> raise Exception('purpleair schema mismatch: %s != %s' % (dbcol, memcol)) <NEW_LINE> <DEDENT> self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record) <NEW_LINE> self._thread = PurpleAirMonitorDataThread(self.config_dict) <NEW_LINE> self._thread.start() <NEW_LINE> <DEDENT> def shutDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.dbm.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self._thread: <NEW_LINE> <INDENT> self._thread.running = False <NEW_LINE> self._thread.join() <NEW_LINE> self._thread = None <NEW_LINE> <DEDENT> <DEDENT> def new_archive_record(self, event): <NEW_LINE> <INDENT> record = self._thread.get_record() <NEW_LINE> if not record: <NEW_LINE> <INDENT> logdbg("Skipping record: empty") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> delta = math.fabs(record['dateTime'] - event.record['dateTime']) <NEW_LINE> if delta > self.config_dict['interval'] * 1.5: <NEW_LINE> <INDENT> logdbg("Skipping record: time difference %f too big" % delta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.save_data(record) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def save_data(self, record): <NEW_LINE> <INDENT> self.dbm.addRecord(record)
|
Collect Purple Air air quality measurements.
|
6259902fbe8e80087fbc00ef
|
class Meta: <NEW_LINE> <INDENT> ordering = ['name'] <NEW_LINE> verbose_name = "provincia" <NEW_LINE> verbose_name_plural = "provincias"
|
Meta class.
|
6259902f91af0d3eaad3aea0
|
class IsochroneResponsePolygonProperties(object): <NEW_LINE> <INDENT> swagger_types = { 'bucket': 'int' } <NEW_LINE> attribute_map = { 'bucket': 'bucket' } <NEW_LINE> def __init__(self, bucket=None): <NEW_LINE> <INDENT> self._bucket = None <NEW_LINE> self.discriminator = None <NEW_LINE> if bucket is not None: <NEW_LINE> <INDENT> self.bucket = bucket <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def bucket(self): <NEW_LINE> <INDENT> return self._bucket <NEW_LINE> <DEDENT> @bucket.setter <NEW_LINE> def bucket(self, bucket): <NEW_LINE> <INDENT> self._bucket = bucket <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IsochroneResponsePolygonProperties): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902f5166f23b2e24444a
|
class BigramWordCandidateProvider(object): <NEW_LINE> <INDENT> def __init__(self, corpus): <NEW_LINE> <INDENT> _bigrams = bigrams(corpus) <NEW_LINE> self._cfd = ConditionalFreqDist(_bigrams) <NEW_LINE> <DEDENT> def candidates(self, word_sequence): <NEW_LINE> <INDENT> word = word_sequence[-1] <NEW_LINE> candidates = [ candidate for (candidate, _) in self._cfd[word].most_common()] <NEW_LINE> return candidates <NEW_LINE> <DEDENT> def random_word(self): <NEW_LINE> <INDENT> return random.choice(list(self._cfd.items()))[0]
|
Provides candidate next words given a word using a bigram model.
|
6259902fd53ae8145f9194d7
|
class WrappedMesh(Mesh): <NEW_LINE> <INDENT> def init(self, mesh_with_shell, data={}): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.with_shell = mesh_with_shell <NEW_LINE> self._interpolators = {} <NEW_LINE> <DEDENT> def cut(self, f, **kwargs): <NEW_LINE> <INDENT> element = f.function_space().ufl_element() <NEW_LINE> V = FunctionSpaceBase(self, element) <NEW_LINE> result = Function(V) <NEW_LINE> interpolator = self._get_interpolator(f.function_space()) <NEW_LINE> interpolator.cut(f.vector(), result.vector()) <NEW_LINE> return result <NEW_LINE> <DEDENT> def expand(self, f, **kwargs): <NEW_LINE> <INDENT> element = f.function_space().ufl_element() <NEW_LINE> V = FunctionSpaceBase(self.with_shell, element) <NEW_LINE> result = Function(V) <NEW_LINE> interpolator = self._get_interpolator(f.function_space()) <NEW_LINE> interpolator.expand(f.vector(), result.vector()) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _get_interpolator(self, V): <NEW_LINE> <INDENT> element = V.ufl_element() <NEW_LINE> key = V.element().signature() <NEW_LINE> if not self._interpolators.has_key(key): <NEW_LINE> <INDENT> Vsub = FunctionSpaceBase(self, element) <NEW_LINE> Vsuper = FunctionSpaceBase(self.with_shell, element) <NEW_LINE> self._interpolators[key] = SubMeshInterpolator(Vsuper, Vsub) <NEW_LINE> <DEDENT> return self._interpolators[key] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create(mesh, subdomain, data = {}): <NEW_LINE> <INDENT> submesh = SubMesh(mesh, mesh.domains().cell_domains(mesh), subdomain) <NEW_LINE> submesh.__class__ = WrappedMesh <NEW_LINE> submesh.init(mesh, data) <NEW_LINE> return submesh
|
This class represents a mesh with a certain submesh and defines
methods for the fast interpolation between the two meshes.
|
6259902fd10714528d69eec5
|
class TestDestinyDefinitionsDestinyUnlockDefinition(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDestinyDefinitionsDestinyUnlockDefinition(self): <NEW_LINE> <INDENT> pass
|
DestinyDefinitionsDestinyUnlockDefinition unit test stubs
|
6259902f66673b3332c31465
|
class KeepLeft(Parser): <NEW_LINE> <INDENT> def __init__(self, left, right): <NEW_LINE> <INDENT> super(KeepLeft, self).__init__() <NEW_LINE> self.set_children([left, right]) <NEW_LINE> <DEDENT> def process(self, pos, data, ctx): <NEW_LINE> <INDENT> left, right = self.children <NEW_LINE> pos, res = left.process(pos, data, ctx) <NEW_LINE> pos, _ = right.process(pos, data, ctx) <NEW_LINE> return pos, res
|
KeepLeft takes two parsers. It requires them both to succeed but only
returns results for the first one. It consumes input for both.
.. code-block:: python
a = Char("a")
q = Char('"')
aq = a << q # like a + q except only the result of a is
# returned
val = aq('a"') # returns "a". Keeps the thing on the left of the
# <<
|
6259902f0a366e3fb87dda5c
|
class StoreHandle(c_size_t): <NEW_LINE> <INDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"{self.__class__.__name__}({self.value})" <NEW_LINE> <DEDENT> async def close(self): <NEW_LINE> <INDENT> if not getattr(self, "_closed", False): <NEW_LINE> <INDENT> await do_call_async("askar_store_close", self) <NEW_LINE> setattr(self, "_closed", True) <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if not getattr(self, "_closed", False) and self: <NEW_LINE> <INDENT> do_call("askar_store_close", self, c_void_p())
|
Index of an active Store instance.
|
6259902f9b70327d1c57fdf9
|
class FMFAbstractCheck(AbstractCheck): <NEW_LINE> <INDENT> metadata = None <NEW_LINE> name: Optional[str] = None <NEW_LINE> fmf_metadata_path = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> if not self.metadata: <NEW_LINE> <INDENT> if not self.fmf_metadata_path: <NEW_LINE> <INDENT> logger.info( "setting self.fmf_metadata_path by class location." " DO NOT use it in this way." " Metadata are set in colin.core.loader (use proper path)" ) <NEW_LINE> self.fmf_metadata_path = os.path.dirname( inspect.getfile(self.__class__) ) <NEW_LINE> <DEDENT> self.metadata = receive_fmf_metadata( name=self.name, path=self.fmf_metadata_path ) <NEW_LINE> <DEDENT> master_class = super(FMFAbstractCheck, self) <NEW_LINE> kwargs = {} <NEW_LINE> try: <NEW_LINE> <INDENT> args_names = list(inspect.signature(master_class.__init__).parameters) <NEW_LINE> <DEDENT> except NameError: <NEW_LINE> <INDENT> args_names = inspect.getargspec(master_class.__init__).args <NEW_LINE> <DEDENT> for arg in args_names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> kwargs[arg] = self.metadata.data[arg] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> master_class.__init__(**kwargs) <NEW_LINE> <DEDENT> except TypeError as error: <NEW_LINE> <INDENT> logger.debug( "missing argument (%s) in FMF metadata key (%s): %s", error, self.metadata.name, self.metadata.data, )
|
Abstract class for checks and loading metadata from FMF format
|
6259902f21bff66bcd723cd9
|
class SoftDTWBarycenter(EuclideanBarycenter): <NEW_LINE> <INDENT> def __init__(self, gamma=1.0, weights=None, method="L-BFGS-B", tol=1e-3, max_iter=50, init=None): <NEW_LINE> <INDENT> EuclideanBarycenter.__init__(self, weights=weights) <NEW_LINE> self.method = method <NEW_LINE> self.tol = tol <NEW_LINE> self.gamma = gamma <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self._X_fit = None <NEW_LINE> if init is None: <NEW_LINE> <INDENT> self.barycenter_ = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.barycenter_ = init <NEW_LINE> <DEDENT> warnings.warn('The use of estimators to compute barycenters was ' 'deprecated in version 0.1.14 and will be removed ' 'in version 0.2. use "softdtw_barycenter()"' 'function instead.', DeprecationWarning) <NEW_LINE> <DEDENT> def _func(self, Z): <NEW_LINE> <INDENT> Z = Z.reshape(self.barycenter_.shape) <NEW_LINE> G = numpy.zeros_like(Z) <NEW_LINE> obj = 0 <NEW_LINE> for i in range(len(self._X_fit)): <NEW_LINE> <INDENT> D = SquaredEuclidean(Z, to_time_series(self._X_fit[i], remove_nans=True)) <NEW_LINE> sdtw = SoftDTW(D, gamma=self.gamma) <NEW_LINE> value = sdtw.compute() <NEW_LINE> E = sdtw.grad() <NEW_LINE> G_tmp = D.jacobian_product(E) <NEW_LINE> G += self.weights[i] * G_tmp <NEW_LINE> obj += self.weights[i] * value <NEW_LINE> <DEDENT> return obj, G.ravel() <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> self._X_fit = to_time_series_dataset(X) <NEW_LINE> self.weights = _set_weights(self.weights, self._X_fit.shape[0]) <NEW_LINE> if self.barycenter_ is None: <NEW_LINE> <INDENT> if check_equal_size(self._X_fit): <NEW_LINE> <INDENT> self.barycenter_ = EuclideanBarycenter.fit(self, self._X_fit) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resampled_X = TimeSeriesResampler( sz=self._X_fit.shape[1]).fit_transform(self._X_fit) <NEW_LINE> self.barycenter_ = EuclideanBarycenter.fit(self, resampled_X) <NEW_LINE> <DEDENT> <DEDENT> if self.max_iter > 0: <NEW_LINE> <INDENT> res = minimize(self._func, self.barycenter_.ravel(), method=self.method, jac=True, tol=self.tol, options=dict(maxiter=self.max_iter, disp=False)) <NEW_LINE> return res.x.reshape(self.barycenter_.shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.barycenter_
|
Compute barycenter (time series averaging) under the soft-DTW
geometry.
Parameters
----------
gamma: float
Regularization parameter.
Lower is less smoothed (closer to true DTW).
weights: None or array
Weights of each X[i]. Must be the same size as len(X).
method: string
Optimization method, passed to `scipy.optimize.minimize`.
Default: L-BFGS.
tol: float
Tolerance of the method used.
max_iter: int
Maximum number of iterations.
init: array or None (default: None)
Initial barycenter to start from for the optimization process.
If `None`, euclidean barycenter is used as a starting point.
.. deprecated:: 0.1.15
The use of estimators to compute barycenters was
deprecated in version 0.1.15 and has been removed
in version 0.2. use
:func:`tslearn.barycenters.softdtw_barycenter` function
instead.
|
6259902fd6c5a102081e319b
|
class DropPipeline(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.re_drop = re.compile(r'redirectUrl') <NEW_LINE> <DEDENT> def process_item(self, item, spider): <NEW_LINE> <INDENT> if item['content']: <NEW_LINE> <INDENT> if self.re_drop.search(item['content']): <NEW_LINE> <INDENT> raise DropItem('商品id: {id} 已下架'.format(id=item['id'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return item
|
docstring for DropPipeline
|
6259902f1d351010ab8f4b8d
|
class ResultNVT(ScanConfigNVT): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> default_resolver = text_resolver <NEW_LINE> <DEDENT> version = graphene.String(description='Version of the NVT used in the scan') <NEW_LINE> @staticmethod <NEW_LINE> def resolve_version(root, _info): <NEW_LINE> <INDENT> return get_text_from_element(root, 'version')
|
NVT to which result applies.
|
6259902fec188e330fdf9908
|
class DefaultNumericArgs(NumericArgsBuilder): <NEW_LINE> <INDENT> def __init__(self, data=None): <NEW_LINE> <INDENT> super(DefaultNumericArgs, self).__init__(data) <NEW_LINE> self.set_contribution('all') <NEW_LINE> self.set_electron_ion_collisions(True) <NEW_LINE> self.set_force_viscosity_regime(0) <NEW_LINE> self.set_eparr(0)
|
Default numerical arguments.
|
6259902fa4f1c619b294f66a
|
class TestBC(unittest.TestCase): <NEW_LINE> <INDENT> def testBoundCond(self): <NEW_LINE> <INDENT> self.assertFalse(boundary.bcsImposed(1)) <NEW_LINE> <DEDENT> def testSinglePoint(self): <NEW_LINE> <INDENT> boundary.addSinglePointBC(17, 0.0) <NEW_LINE> self.assertTrue(boundary.singlePointBC(17)) <NEW_LINE> self.assertFalse(boundary.bcsImposed(17)) <NEW_LINE> self.assertAlmostEqual(boundary.valueForSinglePointBC(17), 0.0, delta = 1e-12) <NEW_LINE> vertex = boundary.vertexForSinglePointBC(17) <NEW_LINE> self.assertEqual(vertex,4294967295) <NEW_LINE> <DEDENT> def testZeroMean(self): <NEW_LINE> <INDENT> boundary.addZeroMeanConstraint(vtest) <NEW_LINE> self.assertTrue(boundary.imposeZeroMeanConstraint(vtest.ID())) <NEW_LINE> boundary.removeZeroMeanConstraint(vtest.ID()) <NEW_LINE> self.assertFalse(boundary.imposeZeroMeanConstraint(vtest.ID())) <NEW_LINE> <DEDENT> """addDirichlet, getDirichletBC, getSpatiallyFilteredFunctionForDirichletBC""" <NEW_LINE> def testDirichlet(self): <NEW_LINE> <INDENT> f = Function.Function_xn(1) <NEW_LINE> boundary.addDirichlet(vflux, SpatialFilter.SpatialFilter_allSpace(), f) <NEW_LINE> self.assertAlmostEqual(boundary.getSpatiallyFilteredFunctionForDirichletBC(vflux.ID()).evaluate(1,0), 1, delta = 1e-12) <NEW_LINE> dirichlet = boundary.getDirichletBC(vflux.ID()) <NEW_LINE> self.assertTrue(dirichlet[0].matchesPoint(1.0,1.0)) <NEW_LINE> self.assertAlmostEqual(dirichlet[1].evaluate(1.0,3.0), 1, delta = 1e-12)
|
test BC()
|
6259902f96565a6dacd2d7c9
|
class AutoRestParameterizedHostTestClient(object): <NEW_LINE> <INDENT> def __init__( self, credentials, host, accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, filepath=None): <NEW_LINE> <INDENT> self.config = AutoRestParameterizedHostTestClientConfiguration(credentials, host, accept_language, long_running_operation_retry_timeout, generate_client_request_id, filepath) <NEW_LINE> self._client = ServiceClient(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.paths = PathsOperations( self._client, self.config, self._serialize, self._deserialize)
|
Test Infrastructure for AutoRest
:ivar config: Configuration for client.
:vartype config: AutoRestParameterizedHostTestClientConfiguration
:ivar paths: Paths operations
:vartype paths: .operations.PathsOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param host: A string value that is used as a global part of the
parameterized host
:type host: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str filepath: Existing config
|
6259902f15baa7234946300e
|
class MeasDCVoltageTask(InstrumentTask): <NEW_LINE> <INDENT> wait_time = Float().tag(pref=True) <NEW_LINE> database_entries = set_default({'voltage': 1.0}) <NEW_LINE> wait = set_default({'activated': True, 'wait': ['instr']}) <NEW_LINE> def perform(self): <NEW_LINE> <INDENT> sleep(self.wait_time) <NEW_LINE> value = self.driver.read_voltage_dc() <NEW_LINE> self.write_in_database('voltage', value)
|
Measure a dc voltage.
Wait for any parallel operation before execution and then wait the
specified time before perfoming the measure.
|
6259902f91af0d3eaad3aea2
|
class VirusForm(URLRedirectBaseForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Virus <NEW_LINE> fields = ['sid', 'tax_id', 'subtype', "isolation_country", "collection_date", "strain", "link_db", "comment"]
|
Form for virus.
collection_date = forms.DateField(widget=forms.TextInput(attrs=
{
'class': 'datepicker'
}))
|
6259902f30c21e258be99882
|
@urls.register <NEW_LINE> class Images(generic.View): <NEW_LINE> <INDENT> url_regex = r'glance/images/$' <NEW_LINE> @rest_utils.ajax() <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> filters, kwargs = rest_utils.parse_filters_kwargs(request, CLIENT_KEYWORDS) <NEW_LINE> images, has_more_data, has_prev_data = api.glance.image_list_detailed( request, filters=filters, **kwargs) <NEW_LINE> return { 'items': [i.to_dict() for i in images], 'has_more_data': has_more_data, 'has_prev_data': has_prev_data, } <NEW_LINE> <DEDENT> @rest_utils.post2data <NEW_LINE> @csrf_exempt <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> form = UploadObjectForm(request.DATA, request.FILES) <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> raise rest_utils.AjaxError(500, 'Invalid request') <NEW_LINE> <DEDENT> data = form.clean() <NEW_LINE> meta = _create_image_metadata(request.DATA) <NEW_LINE> meta['data'] = data['data'] <NEW_LINE> image = api.glance.image_create(request, **meta) <NEW_LINE> return rest_utils.CreatedResponse( '/api/glance/images/%s' % image.name, image.to_dict() ) <NEW_LINE> <DEDENT> @rest_utils.ajax(data_required=True) <NEW_LINE> def put(self, request): <NEW_LINE> <INDENT> meta = _create_image_metadata(request.DATA) <NEW_LINE> if request.DATA.get('image_url'): <NEW_LINE> <INDENT> if request.DATA.get('import_data'): <NEW_LINE> <INDENT> meta['copy_from'] = request.DATA.get('image_url') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> meta['location'] = request.DATA.get('image_url') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> meta['data'] = request.DATA.get('data') <NEW_LINE> <DEDENT> image = api.glance.image_create(request, **meta) <NEW_LINE> return rest_utils.CreatedResponse( '/api/glance/images/%s' % image.name, image.to_dict() )
|
API for Glance images.
|
6259902f5166f23b2e24444c
|
class Linear(equations_scientific.LinearScientific, equations_framework.LinearFramework, TemporalApplicableEquation): <NEW_LINE> <INDENT> pass
|
This class brings together the scientific and framework methods that are
associated with the Linear datatypes.
::
LinearData
|
/ \
LinearFramework LinearScientific
\ /
|
Linear
|
6259902f287bf620b6272c5d
|
class Scalar: <NEW_LINE> <INDENT> def __init__(self, name: str, schema_name: str = "default") -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self._implementation = None <NEW_LINE> self._schema_name = schema_name <NEW_LINE> <DEDENT> def bake(self, schema: "GraphQLSchema") -> None: <NEW_LINE> <INDENT> if not self._implementation: <NEW_LINE> <INDENT> raise MissingImplementation( f"No implementation given for scalar < {self.name} >" ) <NEW_LINE> <DEDENT> scalar = schema.find_scalar(self.name) <NEW_LINE> if not scalar: <NEW_LINE> <INDENT> raise UnknownScalarDefinition( f"Unknown Scalar Definition {self.name}" ) <NEW_LINE> <DEDENT> scalar.coerce_output = self._implementation.coerce_output <NEW_LINE> scalar.coerce_input = self._implementation.coerce_input <NEW_LINE> scalar.parse_literal = self._implementation.parse_literal <NEW_LINE> <DEDENT> def __call__(self, implementation: Any) -> Any: <NEW_LINE> <INDENT> if isclass(implementation): <NEW_LINE> <INDENT> self._implementation = implementation() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._implementation = implementation <NEW_LINE> <DEDENT> SchemaRegistry.register_scalar(self._schema_name, self) <NEW_LINE> return implementation
|
This decorator allows you to link a GraphQL Scalar to a Scalar class.
For example, for the following SDL:
scalar DateTime
Use the Scalar decorator the following way:
@Scalar("DateTime")
class ScalarDateTime:
def coerce_output(self, value):
return value.isoformat()
def coerce_input(self, value):
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S")
def parse_literal(self, ast):
if not isinstance(ast, StringValueNode):
return UNDEFINED_VALUE
try:
return datetime.strptime(ast.value, "%Y-%m-%dT%H:%M:%S")
except Exception:
pass
return UNDEFINED_VALUE
|
6259902f66673b3332c31467
|
class BamAutoQcReport(Step): <NEW_LINE> <INDENT> spec = { "name": "BamAutoQcReport", "version": "0.1", "descr": [ "Create an HTML report based on the auto_qc results" ], "args": { "inputs": [ { "name" : "input_files", "type" : "file", "descr" : "the auto_qc reports", } ], "outputs": [ { "name" : "html_report", "type" : "file", "value" : "report.html", "descr" : "the html reports", }, { "name" : "csv_report", "type" : "file", "value" : "report.csv", "descr" : "the csv reports", } ] } } <NEW_LINE> def process(self): <NEW_LINE> <INDENT> if type(self.input_files) != list: <NEW_LINE> <INDENT> self.input_files = [self.input_files] <NEW_LINE> <DEDENT> qc_info_list = [] <NEW_LINE> qc_keys = ['file'] <NEW_LINE> pattern = re.compile('^\s*(PASS|FAIL)\s+(\S+)[\s:]+([\d\.None]+)\s+\([\d\.]+\)') <NEW_LINE> for fqc in self.input_files: <NEW_LINE> <INDENT> qc_info = { 'file' : os.path.basename(fqc).split('.')[0], 'path' : fqc } <NEW_LINE> with open(fqc) as fh: <NEW_LINE> <INDENT> lines = fh.readlines() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> m = re.match(pattern, line) <NEW_LINE> if m: <NEW_LINE> <INDENT> key = m.group(2) <NEW_LINE> qc_info[key] = m.group(1) <NEW_LINE> if not key in qc_keys: <NEW_LINE> <INDENT> qc_keys.append(key) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> m = re.match('RESULT\s?:\s?(.*)', line) <NEW_LINE> if m: <NEW_LINE> <INDENT> qc_info['result'] = m.group(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> qc_info_list.append(qc_info) <NEW_LINE> <DEDENT> with open(self.csv_report, "w") as csv_fh: <NEW_LINE> <INDENT> csv_fh.write(','.join(qc_keys)) <NEW_LINE> csv_fh.write('\n') <NEW_LINE> for entry in qc_info_list: <NEW_LINE> <INDENT> for key in qc_keys: <NEW_LINE> <INDENT> if key == 'file': <NEW_LINE> <INDENT> csv_fh.write(entry['path']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> csv_fh.write(entry[key]) <NEW_LINE> <DEDENT> csv_fh.write(',') <NEW_LINE> <DEDENT> csv_fh.write('\n') <NEW_LINE> <DEDENT> <DEDENT> with open(self.html_report, "w") as html_fh: <NEW_LINE> <INDENT> code_root = os.path.abspath(os.path.dirname(__file__)) <NEW_LINE> env = jinja2.Environment(loader=jinja2.FileSystemLoader(code_root)) <NEW_LINE> t = env.get_template('bamautoqcreport.html') <NEW_LINE> html_fh.write(t.render(keys=qc_keys, table=qc_info_list))
|
Create the auto_qc final report
|
6259902fd18da76e235b7989
|
class Batch(object): <NEW_LINE> <INDENT> def __init__(self, extension='mp4', resolution='highest', path='.'): <NEW_LINE> <INDENT> self.extension = extension <NEW_LINE> self.resolution = resolution <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> def download_by_url_list(self, url_list): <NEW_LINE> <INDENT> for url in url_list: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> video = self.fetch_video_by_url(url) <NEW_LINE> video.download(self.path) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> path = os.path.normpath(self.path) <NEW_LINE> os.remove(path) <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Failed to download {0}. {1}".format(url, e)) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def download_by_id_list(self, id_list): <NEW_LINE> <INDENT> url_list = self.transfer_id_list_to_url_list(id_list) <NEW_LINE> self.download_by_url_list(url_list) <NEW_LINE> <DEDENT> def merge_video_by_url_list(self, url_list): <NEW_LINE> <INDENT> path = self.path <NEW_LINE> if(os.path.isdir(os.path.normpath(path))): <NEW_LINE> <INDENT> path += "\\merged_video" <NEW_LINE> <DEDENT> for url in url_list: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> video = self.fetch_video_by_url(url) <NEW_LINE> video.put_to_file(path) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> path = os.path.normpath(path) <NEW_LINE> os.remove(path) <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Failed to download {0}. {1}".format(url, e)) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def merge_video_by_id_list(self, id_list): <NEW_LINE> <INDENT> url_list = self.transfer_id_list_to_url_list(id_list) <NEW_LINE> self.merge_video_by_url_list(url_list) <NEW_LINE> <DEDENT> def fetch_video_by_url(self, url): <NEW_LINE> <INDENT> yt = YouTube(url) <NEW_LINE> if(self.resolution == 'highest' or self.resolution == 'lowest'): <NEW_LINE> <INDENT> video_list = yt.filter(self.extension) <NEW_LINE> if(len(video_list) == 0): <NEW_LINE> <INDENT> raise DoesNotExist("No videos met this criteria.") <NEW_LINE> <DEDENT> if(self.resolution == 'highest'): <NEW_LINE> <INDENT> video = video_list[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> video = video_list[0] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result = [] <NEW_LINE> for v in yt.get_videos(): <NEW_LINE> <INDENT> if self.extension and v.extension != self.extension: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif self.resolution and v.resolution != self.resolution: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(v) <NEW_LINE> <DEDENT> <DEDENT> matches = len(result) <NEW_LINE> if matches <= 0: <NEW_LINE> <INDENT> raise DoesNotExist("No videos met this criteria.") <NEW_LINE> <DEDENT> elif matches == 1: <NEW_LINE> <INDENT> video = result[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise MultipleObjectsReturned("Multiple videos met this criteria.") <NEW_LINE> <DEDENT> <DEDENT> return video <NEW_LINE> <DEDENT> def transfer_id_list_to_url_list(self, id_list): <NEW_LINE> <INDENT> url_list = [] <NEW_LINE> for id in id_list: <NEW_LINE> <INDENT> url_list.append("http://www.youtube.com/watch?v=" + id) <NEW_LINE> <DEDENT> return url_list
|
Class representation of a batch operation
|
6259902fd6c5a102081e319d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.