nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/functions/elementary/complexes.py
python
sign._eval_is_zero
(self)
return self.args[0].is_zero
[]
def _eval_is_zero(self): return self.args[0].is_zero
[ "def", "_eval_is_zero", "(", "self", ")", ":", "return", "self", ".", "args", "[", "0", "]", ".", "is_zero" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/functions/elementary/complexes.py#L296-L297
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/op2/tables/geom/dynamics.py
python
DYNAMICS._read_tload1_nx_24
(self, card_obj, data: bytes, n: int)
return n, dloads
Record – TLOAD1(7107,71,138) Word Name Type Description 1 SID I Load set identification number 2 DAREA I DAREA Bulk Data entry identification number 3 DELAYI I DELAY Bulk Data entry identification number 4 TYPE I Nature of the dynamic excitation 5 TID I Identification number of TABLEDi entry that gives F(t) 6 DELAYR RS If DELAYI = 0, constant value for delay
Record – TLOAD1(7107,71,138)
[ "Record", "–", "TLOAD1", "(", "7107", "71", "138", ")" ]
def _read_tload1_nx_24(self, card_obj, data: bytes, n: int) -> Tuple[int, List[TLOAD1]]: """ Record – TLOAD1(7107,71,138) Word Name Type Description 1 SID I Load set identification number 2 DAREA I DAREA Bulk Data entry identification number 3 DELAYI I DELAY Bulk Data entry identification number 4 TYPE I Nature of the dynamic excitation 5 TID I Identification number of TABLEDi entry that gives F(t) 6 DELAYR RS If DELAYI = 0, constant value for delay """ op2 = self.op2 ntotal = 24 * self.factor # 6*4 nentries = (len(data) - n) // ntotal assert (len(data) - n) % ntotal == 0 assert nentries > 0, nentries dloads = [] struc = Struct(mapfmt(op2._endian + b'5i f', self.size)) for unused_i in range(nentries): edata = data[n:n+ntotal] out = struc.unpack(edata) sid, darea, delayi, load_type, tid, delayr = out assert sid > 0, sid if op2.is_debug_file: op2.binary_debug.write('TLOAD1=%s\n' % str(out)) delay = delayi if delayi == 0: delay = delayr dload = TLOAD1(sid, darea, tid, delay=delay, Type=load_type) #dload.validate() dloads.append(dload) n += ntotal return n, dloads
[ "def", "_read_tload1_nx_24", "(", "self", ",", "card_obj", ",", "data", ":", "bytes", ",", "n", ":", "int", ")", "->", "Tuple", "[", "int", ",", "List", "[", "TLOAD1", "]", "]", ":", "op2", "=", "self", ".", "op2", "ntotal", "=", "24", "*", "self...
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/op2/tables/geom/dynamics.py#L2067-L2102
jhpyle/docassemble
b90c84e57af59aa88b3404d44d0b125c70f832cc
docassemble_base/docassemble/base/mako/codegen.py
python
_GenerateRenderMethod.write_def_decl
(self, node, identifiers)
write a locally-available callable referencing a top-level def
write a locally-available callable referencing a top-level def
[ "write", "a", "locally", "-", "available", "callable", "referencing", "a", "top", "-", "level", "def" ]
def write_def_decl(self, node, identifiers): """write a locally-available callable referencing a top-level def""" funcname = node.funcname namedecls = node.get_argument_expressions() nameargs = node.get_argument_expressions(as_call=True) if not self.in_def and ( len(self.identifiers.locally_assigned) > 0 or len(self.identifiers.argument_declared) > 0): nameargs.insert(0, 'context._locals(__M_locals)') else: nameargs.insert(0, 'context') self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) self.printer.writeline( "return render_%s(%s)" % (funcname, ",".join(nameargs))) self.printer.writeline(None)
[ "def", "write_def_decl", "(", "self", ",", "node", ",", "identifiers", ")", ":", "funcname", "=", "node", ".", "funcname", "namedecls", "=", "node", ".", "get_argument_expressions", "(", ")", "nameargs", "=", "node", ".", "get_argument_expressions", "(", "as_c...
https://github.com/jhpyle/docassemble/blob/b90c84e57af59aa88b3404d44d0b125c70f832cc/docassemble_base/docassemble/base/mako/codegen.py#L584-L599
usnistgov/fipy
6809b180b41a11de988a48655575df7e142c93b9
fipy/matrices/trilinosMatrix.py
python
_TrilinosMatrix._getDistributedMatrix
(self)
Returns an equivalent Trilinos matrix, but redistributed evenly over all processors.
Returns an equivalent Trilinos matrix, but redistributed evenly over all processors.
[ "Returns", "an", "equivalent", "Trilinos", "matrix", "but", "redistributed", "evenly", "over", "all", "processors", "." ]
def _getDistributedMatrix(self): """ Returns an equivalent Trilinos matrix, but redistributed evenly over all processors. """ if self.comm.NumProc() == 1: return self.matrix # No redistribution necessary in serial mode else: ## self._matrix.GlobalAssemble() totalElements = self.matrix.NumGlobalRows() # Epetra.Map(numGlobalElements, indexBase, comm) # implicit number of elements per processor DistributedMap = Epetra.Map(totalElements, 0, self.comm) RootToDist = Epetra.Import(DistributedMap, self.rangeMap) DistMatrix = Epetra.CrsMatrix(Epetra.Copy, DistributedMap, (self.bandwidth*3) // 2) DistMatrix.Import(self.matrix, RootToDist, Epetra.Insert) return DistMatrix
[ "def", "_getDistributedMatrix", "(", "self", ")", ":", "if", "self", ".", "comm", ".", "NumProc", "(", ")", "==", "1", ":", "return", "self", ".", "matrix", "# No redistribution necessary in serial mode", "else", ":", "## self._matrix.GlobalAssemble()", "...
https://github.com/usnistgov/fipy/blob/6809b180b41a11de988a48655575df7e142c93b9/fipy/matrices/trilinosMatrix.py#L506-L527
marinho/geraldo
868ebdce67176d9b6205cddc92476f642c783fff
site/newsite/django_1_0/django/forms/formsets.py
python
BaseFormSet.add_fields
(self, form, index)
A hook for adding extra fields on to each form instance.
A hook for adding extra fields on to each form instance.
[ "A", "hook", "for", "adding", "extra", "fields", "on", "to", "each", "form", "instance", "." ]
def add_fields(self, form, index): """A hook for adding extra fields on to each form instance.""" if self.can_order: # Only pre-fill the ordering field for initial forms. if index < self._initial_form_count: form.fields[ORDERING_FIELD_NAME] = IntegerField(label='Order', initial=index+1, required=False) else: form.fields[ORDERING_FIELD_NAME] = IntegerField(label='Order', required=False) if self.can_delete: form.fields[DELETION_FIELD_NAME] = BooleanField(label='Delete', required=False)
[ "def", "add_fields", "(", "self", ",", "form", ",", "index", ")", ":", "if", "self", ".", "can_order", ":", "# Only pre-fill the ordering field for initial forms.", "if", "index", "<", "self", ".", "_initial_form_count", ":", "form", ".", "fields", "[", "ORDERIN...
https://github.com/marinho/geraldo/blob/868ebdce67176d9b6205cddc92476f642c783fff/site/newsite/django_1_0/django/forms/formsets.py#L236-L245
DLR-RM/stable-baselines3
e9a8979022d7005560d43b7a9c1dc1ba85f7989a
stable_baselines3/common/vec_env/subproc_vec_env.py
python
SubprocVecEnv.set_attr
(self, attr_name: str, value: Any, indices: VecEnvIndices = None)
Set attribute inside vectorized environments (see base class).
Set attribute inside vectorized environments (see base class).
[ "Set", "attribute", "inside", "vectorized", "environments", "(", "see", "base", "class", ")", "." ]
def set_attr(self, attr_name: str, value: Any, indices: VecEnvIndices = None) -> None: """Set attribute inside vectorized environments (see base class).""" target_remotes = self._get_target_remotes(indices) for remote in target_remotes: remote.send(("set_attr", (attr_name, value))) for remote in target_remotes: remote.recv()
[ "def", "set_attr", "(", "self", ",", "attr_name", ":", "str", ",", "value", ":", "Any", ",", "indices", ":", "VecEnvIndices", "=", "None", ")", "->", "None", ":", "target_remotes", "=", "self", ".", "_get_target_remotes", "(", "indices", ")", "for", "rem...
https://github.com/DLR-RM/stable-baselines3/blob/e9a8979022d7005560d43b7a9c1dc1ba85f7989a/stable_baselines3/common/vec_env/subproc_vec_env.py#L163-L169
hudora/pyShipping
089c502db5d79182dbd69f0b95c475e0eddda355
pyshipping/shipment.py
python
AbstractLieferung.export_karton_gewichte
(self)
return ret
Returns the weights of the estimated number of packages which will be shipped in gramms.
Returns the weights of the estimated number of packages which will be shipped in gramms.
[ "Returns", "the", "weights", "of", "the", "estimated", "number", "of", "packages", "which", "will", "be", "shipped", "in", "gramms", "." ]
def export_karton_gewichte(self): """Returns the weights of the estimated number of packages which will be shipped in gramms.""" ret = [] for box in self.itemlist: ret.extend(box.export_karton_gewichte) return ret
[ "def", "export_karton_gewichte", "(", "self", ")", ":", "ret", "=", "[", "]", "for", "box", "in", "self", ".", "itemlist", ":", "ret", ".", "extend", "(", "box", ".", "export_karton_gewichte", ")", "return", "ret" ]
https://github.com/hudora/pyShipping/blob/089c502db5d79182dbd69f0b95c475e0eddda355/pyshipping/shipment.py#L193-L198
entropy1337/infernal-twin
10995cd03312e39a48ade0f114ebb0ae3a711bb8
Modules/build/reportlab/src/reportlab/pdfgen/textobject.py
python
_PDFColorSetter.setStrokeColor
(self, aColor, alpha=None)
Takes a color object, allowing colors to be referred to by name
Takes a color object, allowing colors to be referred to by name
[ "Takes", "a", "color", "object", "allowing", "colors", "to", "be", "referred", "to", "by", "name" ]
def setStrokeColor(self, aColor, alpha=None): """Takes a color object, allowing colors to be referred to by name""" if self._enforceColorSpace: aColor = self._enforceColorSpace(aColor) if isinstance(aColor, CMYKColor): d = aColor.density c,m,y,k = (d*aColor.cyan, d*aColor.magenta, d*aColor.yellow, d*aColor.black) self._strokeColorObj = aColor name = self._checkSeparation(aColor) if name: self._code.append('/%s CS %s SCN' % (name,fp_str(d))) else: self._code.append('%s K' % fp_str(c, m, y, k)) elif isinstance(aColor, Color): rgb = (aColor.red, aColor.green, aColor.blue) self._strokeColorObj = aColor self._code.append('%s RG' % fp_str(rgb) ) elif isinstance(aColor,(tuple,list)): l = len(aColor) if l==3: self._strokeColorObj = aColor self._code.append('%s RG' % fp_str(aColor) ) elif l==4: self._strokeColorObj = aColor self._code.append('%s K' % fp_str(aColor)) else: raise ValueError('Unknown color %r' % aColor) elif isStr(aColor): self.setStrokeColor(toColor(aColor)) else: raise ValueError('Unknown color %r' % aColor) if alpha is not None: self.setStrokeAlpha(alpha) elif getattr(aColor, 'alpha', None) is not None: self.setStrokeAlpha(aColor.alpha)
[ "def", "setStrokeColor", "(", "self", ",", "aColor", ",", "alpha", "=", "None", ")", ":", "if", "self", ".", "_enforceColorSpace", ":", "aColor", "=", "self", ".", "_enforceColorSpace", "(", "aColor", ")", "if", "isinstance", "(", "aColor", ",", "CMYKColor...
https://github.com/entropy1337/infernal-twin/blob/10995cd03312e39a48ade0f114ebb0ae3a711bb8/Modules/build/reportlab/src/reportlab/pdfgen/textobject.py#L101-L135
HymanLiuTS/flaskTs
286648286976e85d9b9a5873632331efcafe0b21
flasky/lib/python2.7/site-packages/sqlalchemy/orm/events.py
python
MapperEvents.before_delete
(self, mapper, connection, target)
Receive an object instance before a DELETE statement is emitted corresponding to that instance. This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class before their DELETE statements are emitted at once in a later step. .. warning:: Mapper-level flush events only allow **very limited operations**, on attributes local to the row being operated upon only, as well as allowing any SQL to be emitted on the given :class:`.Connection`. **Please read fully** the notes at :ref:`session_persistence_mapper` for guidelines on using these methods; generally, the :meth:`.SessionEvents.before_flush` method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This provides a handle into the current transaction on the target database specific to this instance. :param target: the mapped instance being deleted. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. .. seealso:: :ref:`session_persistence_events`
Receive an object instance before a DELETE statement is emitted corresponding to that instance.
[ "Receive", "an", "object", "instance", "before", "a", "DELETE", "statement", "is", "emitted", "corresponding", "to", "that", "instance", "." ]
def before_delete(self, mapper, connection, target): """Receive an object instance before a DELETE statement is emitted corresponding to that instance. This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class before their DELETE statements are emitted at once in a later step. .. warning:: Mapper-level flush events only allow **very limited operations**, on attributes local to the row being operated upon only, as well as allowing any SQL to be emitted on the given :class:`.Connection`. **Please read fully** the notes at :ref:`session_persistence_mapper` for guidelines on using these methods; generally, the :meth:`.SessionEvents.before_flush` method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This provides a handle into the current transaction on the target database specific to this instance. :param target: the mapped instance being deleted. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. .. seealso:: :ref:`session_persistence_events` """
[ "def", "before_delete", "(", "self", ",", "mapper", ",", "connection", ",", "target", ")", ":" ]
https://github.com/HymanLiuTS/flaskTs/blob/286648286976e85d9b9a5873632331efcafe0b21/flasky/lib/python2.7/site-packages/sqlalchemy/orm/events.py#L1033-L1071
neulab/compare-mt
df40d8db4a4f19e755dab0c888b984899f1cd32b
compare_mt/compare_ll_main.py
python
print_word_likelihood_report
(ref, lls, bucket_type='freq', bucket_cutoffs=None, freq_count_file=None, freq_corpus_file=None, label_corpus=None, label_set=None, case_insensitive=False)
Print a report comparing the word log likelihood. Args: ref: the ref of words over which the likelihoods are computed lls: likelihoods corresponding to each word in ref from the systems bucket_type: A string specifying the way to bucket words together to calculate average likelihood bucket_cutoffs: The boundaries between buckets, specified as a colon-separated string. freq_corpus_file: When using "freq" as a bucketer, which corpus to use to calculate frequency. freq_count_file: An alternative to freq_corpus that uses a count file in "word\tfreq" format. label_corpus: When using "label" as bucket type, the corpus containing the labels corresponding to each word in the corpus label_set: the permissible set of labels when using "label" as a bucket type case_insensitive: A boolean specifying whether to turn on the case insensitive option
Print a report comparing the word log likelihood.
[ "Print", "a", "report", "comparing", "the", "word", "log", "likelihood", "." ]
def print_word_likelihood_report(ref, lls, bucket_type='freq', bucket_cutoffs=None, freq_count_file=None, freq_corpus_file=None, label_corpus=None, label_set=None, case_insensitive=False): """ Print a report comparing the word log likelihood. Args: ref: the ref of words over which the likelihoods are computed lls: likelihoods corresponding to each word in ref from the systems bucket_type: A string specifying the way to bucket words together to calculate average likelihood bucket_cutoffs: The boundaries between buckets, specified as a colon-separated string. freq_corpus_file: When using "freq" as a bucketer, which corpus to use to calculate frequency. freq_count_file: An alternative to freq_corpus that uses a count file in "word\tfreq" format. label_corpus: When using "label" as bucket type, the corpus containing the labels corresponding to each word in the corpus label_set: the permissible set of labels when using "label" as a bucket type case_insensitive: A boolean specifying whether to turn on the case insensitive option """ case_insensitive = True if case_insensitive == 'True' else False bucketer = bucketers.create_word_bucketer_from_profile(bucket_type=bucket_type, bucket_cutoffs=bucket_cutoffs, freq_count_file=freq_count_file, freq_corpus_file=freq_corpus_file, label_set=label_set, case_insensitive=case_insensitive) if type(label_corpus) == str: label_corpus = corpus_utils.load_tokens(label_corpus) if label_corpus is not None: ref = label_corpus lls_out = [[l for l in bucketer.calc_bucketed_likelihoods(ref, ll)] for ll in lls] print(f'--- average word log likelihood by {bucketer.name()} bucket') for i, bucket_str in enumerate(bucketer.bucket_strs): print (bucket_str + "\t", end='') for ll_out in lls_out: print(f"{formatting.fmt(ll_out[i])}\t", end="") print()
[ "def", "print_word_likelihood_report", "(", "ref", ",", "lls", ",", "bucket_type", "=", "'freq'", ",", "bucket_cutoffs", "=", "None", ",", "freq_count_file", "=", "None", ",", "freq_corpus_file", "=", "None", ",", "label_corpus", "=", "None", ",", "label_set", ...
https://github.com/neulab/compare-mt/blob/df40d8db4a4f19e755dab0c888b984899f1cd32b/compare_mt/compare_ll_main.py#L10-L51
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/decimal.py
python
Context.remainder_near
(self, a, b)
return a.remainder_near(b, context=self)
Returns to be "a - b * n", where n is the integer nearest the exact value of "x / b" (if two integers are equally near then the even one is chosen). If the result is equal to 0 then its sign will be the sign of a. This operation will fail under the same conditions as integer division (that is, if integer division on the same two operands would fail, the remainder cannot be calculated). >>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3')) Decimal('-0.9') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6')) Decimal('-2') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3')) Decimal('1') >>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3')) Decimal('-1') >>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1')) Decimal('0.2') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3')) Decimal('0.1') >>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3')) Decimal('-0.3') >>> ExtendedContext.remainder_near(3, 11) Decimal('3') >>> ExtendedContext.remainder_near(Decimal(3), 11) Decimal('3') >>> ExtendedContext.remainder_near(3, Decimal(11)) Decimal('3')
Returns to be "a - b * n", where n is the integer nearest the exact value of "x / b" (if two integers are equally near then the even one is chosen). If the result is equal to 0 then its sign will be the sign of a.
[ "Returns", "to", "be", "a", "-", "b", "*", "n", "where", "n", "is", "the", "integer", "nearest", "the", "exact", "value", "of", "x", "/", "b", "(", "if", "two", "integers", "are", "equally", "near", "then", "the", "even", "one", "is", "chosen", ")"...
def remainder_near(self, a, b): """Returns to be "a - b * n", where n is the integer nearest the exact value of "x / b" (if two integers are equally near then the even one is chosen). If the result is equal to 0 then its sign will be the sign of a. This operation will fail under the same conditions as integer division (that is, if integer division on the same two operands would fail, the remainder cannot be calculated). >>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3')) Decimal('-0.9') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6')) Decimal('-2') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3')) Decimal('1') >>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3')) Decimal('-1') >>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1')) Decimal('0.2') >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3')) Decimal('0.1') >>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3')) Decimal('-0.3') >>> ExtendedContext.remainder_near(3, 11) Decimal('3') >>> ExtendedContext.remainder_near(Decimal(3), 11) Decimal('3') >>> ExtendedContext.remainder_near(3, Decimal(11)) Decimal('3') """ a = _convert_other(a, raiseit=True) return a.remainder_near(b, context=self)
[ "def", "remainder_near", "(", "self", ",", "a", ",", "b", ")", ":", "a", "=", "_convert_other", "(", "a", ",", "raiseit", "=", "True", ")", "return", "a", ".", "remainder_near", "(", "b", ",", "context", "=", "self", ")" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/decimal.py#L5149-L5181
aceisace/Inkycal
552744bc5d80769c1015d48fd8b13201683ee679
inkycal/display/drivers/epdconfig.py
python
JetsonNano.module_init
(self)
return 0
[]
def module_init(self): self.GPIO.setmode(self.GPIO.BCM) self.GPIO.setwarnings(False) self.GPIO.setup(self.RST_PIN, self.GPIO.OUT) self.GPIO.setup(self.DC_PIN, self.GPIO.OUT) self.GPIO.setup(self.CS_PIN, self.GPIO.OUT) self.GPIO.setup(self.BUSY_PIN, self.GPIO.IN) self.SPI.SYSFS_software_spi_begin() return 0
[ "def", "module_init", "(", "self", ")", ":", "self", ".", "GPIO", ".", "setmode", "(", "self", ".", "GPIO", ".", "BCM", ")", "self", ".", "GPIO", ".", "setwarnings", "(", "False", ")", "self", ".", "GPIO", ".", "setup", "(", "self", ".", "RST_PIN",...
https://github.com/aceisace/Inkycal/blob/552744bc5d80769c1015d48fd8b13201683ee679/inkycal/display/drivers/epdconfig.py#L126-L134
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_replica_set.py
python
V1ReplicaSet.status
(self, status)
Sets the status of this V1ReplicaSet. :param status: The status of this V1ReplicaSet. # noqa: E501 :type: V1ReplicaSetStatus
Sets the status of this V1ReplicaSet.
[ "Sets", "the", "status", "of", "this", "V1ReplicaSet", "." ]
def status(self, status): """Sets the status of this V1ReplicaSet. :param status: The status of this V1ReplicaSet. # noqa: E501 :type: V1ReplicaSetStatus """ self._status = status
[ "def", "status", "(", "self", ",", "status", ")", ":", "self", ".", "_status", "=", "status" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_replica_set.py#L174-L182
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/lib2to3/patcomp.py
python
PatternCompiler.get_int
(self, node)
return int(node.value)
[]
def get_int(self, node): assert node.type == token.NUMBER return int(node.value)
[ "def", "get_int", "(", "self", ",", "node", ")", ":", "assert", "node", ".", "type", "==", "token", ".", "NUMBER", "return", "int", "(", "node", ".", "value", ")" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/lib2to3/patcomp.py#L173-L175
tornadoweb/tornado
208672f3bf6cbb7e37f54c356e02a71ca29f1e02
tornado/ioloop.py
python
IOLoop.instance
()
return IOLoop.current()
Deprecated alias for `IOLoop.current()`. .. versionchanged:: 5.0 Previously, this method returned a global singleton `IOLoop`, in contrast with the per-thread `IOLoop` returned by `current()`. In nearly all cases the two were the same (when they differed, it was generally used from non-Tornado threads to communicate back to the main thread's `IOLoop`). This distinction is not present in `asyncio`, so in order to facilitate integration with that package `instance()` was changed to be an alias to `current()`. Applications using the cross-thread communications aspect of `instance()` should instead set their own global variable to point to the `IOLoop` they want to use. .. deprecated:: 5.0
Deprecated alias for `IOLoop.current()`.
[ "Deprecated", "alias", "for", "IOLoop", ".", "current", "()", "." ]
def instance() -> "IOLoop": """Deprecated alias for `IOLoop.current()`. .. versionchanged:: 5.0 Previously, this method returned a global singleton `IOLoop`, in contrast with the per-thread `IOLoop` returned by `current()`. In nearly all cases the two were the same (when they differed, it was generally used from non-Tornado threads to communicate back to the main thread's `IOLoop`). This distinction is not present in `asyncio`, so in order to facilitate integration with that package `instance()` was changed to be an alias to `current()`. Applications using the cross-thread communications aspect of `instance()` should instead set their own global variable to point to the `IOLoop` they want to use. .. deprecated:: 5.0 """ return IOLoop.current()
[ "def", "instance", "(", ")", "->", "\"IOLoop\"", ":", "return", "IOLoop", ".", "current", "(", ")" ]
https://github.com/tornadoweb/tornado/blob/208672f3bf6cbb7e37f54c356e02a71ca29f1e02/tornado/ioloop.py#L181-L200
atlassian-api/atlassian-python-api
6d8545a790c3aae10b75bdc225fb5c3a0aee44db
atlassian/service_desk.py
python
ServiceDesk.get_service_desk_by_id
(self, service_desk_id)
return self.get( "rest/servicedeskapi/servicedesk/{}".format(service_desk_id), headers=self.experimental_headers, )
Returns the service desk for a given service desk ID :param service_desk_id: str :return: Service Desk
Returns the service desk for a given service desk ID
[ "Returns", "the", "service", "desk", "for", "a", "given", "service", "desk", "ID" ]
def get_service_desk_by_id(self, service_desk_id): """ Returns the service desk for a given service desk ID :param service_desk_id: str :return: Service Desk """ return self.get( "rest/servicedeskapi/servicedesk/{}".format(service_desk_id), headers=self.experimental_headers, )
[ "def", "get_service_desk_by_id", "(", "self", ",", "service_desk_id", ")", ":", "return", "self", ".", "get", "(", "\"rest/servicedeskapi/servicedesk/{}\"", ".", "format", "(", "service_desk_id", ")", ",", "headers", "=", "self", ".", "experimental_headers", ",", ...
https://github.com/atlassian-api/atlassian-python-api/blob/6d8545a790c3aae10b75bdc225fb5c3a0aee44db/atlassian/service_desk.py#L33-L44
shiyanlou/louplus-python
4c61697259e286e3d9116c3299f170d019ba3767
taobei/challenge-05/tbweb/handlers/order.py
python
create
()
return render_template('order/create.html', form=form, cart_products=cart_products)
创建订单
创建订单
[ "创建订单" ]
def create(): """创建订单 """ form = OrderForm() resp = TbUser(current_app).get_json('/addresses', params={ 'user_id': current_user.get_id(), }) addresses = resp['data']['addresses'] form.address_id.choices = [(str(v['id']), v['address']) for v in addresses] for addresse in addresses: if addresse['is_default']: form.address_id.data = str(addresse['id']) resp = TbBuy(current_app).get_json('/cart_products', params={ 'user_id': current_user.get_id(), }) cart_products = resp['data']['cart_products'] if len(cart_products) == 0: flash('购物车为空', 'danger') return redirect(url_for('cart_product.index')) resp = TbMall(current_app).get_json('/products/infos', params={ 'ids': ','.join([str(v['product_id']) for v in cart_products]), }) for cart_product in cart_products: cart_product['product'] = resp['data']['products'].get( str(cart_product['product_id'])) if form.validate_on_submit(): # 检查商品数量是否足够 for cart_product in cart_products: if cart_product['amount'] > cart_product['product']['amount']: flash('商品“{}”数量不足'.format( cart_product['product']['title']), 'danger') return render_template('order/create.html', form=form, cart_products=cart_products) # 创建订单 resp = TbBuy(current_app).post_json('/orders', json={ 'address_id': form.address_id.data, 'note': form.note.data, 'order_products': [ { 'product_id': v['product_id'], 'amount': v['amount'], 'price': v['product']['price'], } for v in cart_products ], 'user_id': current_user.get_id(), }, check_code=False) if resp['code'] != 0: flash(resp['message'], 'danger') return render_template('order/create.html', form=form, cart_products=cart_products) # 扣除商品数量 for cart_product in cart_products: resp = TbMall(current_app).post_json( '/products/{}'.format(cart_product['product_id']), json={ 'amount': cart_product['product']['amount'] - cart_product['amount'], }) if resp['code'] != 0: flash(resp['message'], 'danger') return render_template('order/create.html', form=form, cart_products=cart_products) # 清空购物车 resp = TbBuy(current_app).delete_json('/cart_products', params={ 'user_id': current_user.get_id(), }) if resp['code'] != 0: flash(resp['message'], 'danger') return render_template('order/create.html', form=form, cart_products=cart_products) return redirect(url_for('.index')) return render_template('order/create.html', form=form, cart_products=cart_products)
[ "def", "create", "(", ")", ":", "form", "=", "OrderForm", "(", ")", "resp", "=", "TbUser", "(", "current_app", ")", ".", "get_json", "(", "'/addresses'", ",", "params", "=", "{", "'user_id'", ":", "current_user", ".", "get_id", "(", ")", ",", "}", ")...
https://github.com/shiyanlou/louplus-python/blob/4c61697259e286e3d9116c3299f170d019ba3767/taobei/challenge-05/tbweb/handlers/order.py#L72-L147
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/analyses/identifier/functions/recv_until.py
python
receive_until.args
(self)
return [a[order] for order in self.arg_order]
[]
def args(self): a = self.base_args() return [a[order] for order in self.arg_order]
[ "def", "args", "(", "self", ")", ":", "a", "=", "self", ".", "base_args", "(", ")", "return", "[", "a", "[", "order", "]", "for", "order", "in", "self", ".", "arg_order", "]" ]
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/analyses/identifier/functions/recv_until.py#L182-L184
miyosuda/unreal
31d4886149412fa248f6efa490ab65bd2f425cde
environment/lab_environment.py
python
LabEnvironment.process
(self, action)
return state, reward, terminal, pixel_change
[]
def process(self, action): real_action = LabEnvironment.ACTION_LIST[action] self.conn.send([COMMAND_ACTION, real_action]) obs, reward, terminal = self.conn.recv() if not terminal: state = self._preprocess_frame(obs) else: state = self.last_state pixel_change = self._calc_pixel_change(state, self.last_state) self.last_state = state self.last_action = action self.last_reward = reward return state, reward, terminal, pixel_change
[ "def", "process", "(", "self", ",", "action", ")", ":", "real_action", "=", "LabEnvironment", ".", "ACTION_LIST", "[", "action", "]", "self", ".", "conn", ".", "send", "(", "[", "COMMAND_ACTION", ",", "real_action", "]", ")", "obs", ",", "reward", ",", ...
https://github.com/miyosuda/unreal/blob/31d4886149412fa248f6efa490ab65bd2f425cde/environment/lab_environment.py#L104-L119
mtivadar/qiew
87a3b96b43f1745a6b3f1fcfebce5164d2a40a14
plugins/unpack/basic.py
python
basic.proceed
(self)
[]
def proceed(self): if self.viewMode.selector.getCurrentSelection(): u, v = self.viewMode.selector.getCurrentSelection() # prepare values vrom text boxes op = str(self.ui.op.currentText()) key = str(self.ui.key.text()) if key: key = UnpackPlugin._convert(str(self.ui.key.text())) else: key = 0 keyop = str(self.ui.keyop.currentText()) # get delta delta = str(self.ui.delta.text()) if delta: delta = UnpackPlugin._convert(str(self.ui.delta.text())) else: delta = 0 size = str(self.ui.bytes.text()) if size: size = int(size) else: size = 0 if size < 1: return skip = str(self.ui.skip.text()) if skip: skip = int(skip, 0) else: skip = 0 OP = {} OP['ROL'] = self._rol OP['ROR'] = self._ror OP['ADD'] = self._add OP['SUB'] = self._sub OP['XOR'] = self._xor OP['---'] = lambda key, delta, keysize: key i = 0 while i < v-u: offset = u+i b = 0 j = 0 # ugly while j < size: B = self.dataModel.getBYTE(offset + j) if B: b = b | (B << (8*j)) j += 1 b = OP[op](b, key, size) # compute key size in bytes keysize = (key.bit_length() + (8 - key.bit_length()%8)%8)//8 key = OP[keyop](key, delta, keysize) j = 0 # ugly again while j < size: c = b & 0xFF #self.dataModel.setData_b(offset + size - 1 - j, chr(c)) self.dataModel.setData_b(offset + j, c) b = b >> 8 j += 1 i += (size + skip)
[ "def", "proceed", "(", "self", ")", ":", "if", "self", ".", "viewMode", ".", "selector", ".", "getCurrentSelection", "(", ")", ":", "u", ",", "v", "=", "self", ".", "viewMode", ".", "selector", ".", "getCurrentSelection", "(", ")", "# prepare values vrom t...
https://github.com/mtivadar/qiew/blob/87a3b96b43f1745a6b3f1fcfebce5164d2a40a14/plugins/unpack/basic.py#L64-L140
haiwen/seahub
e92fcd44e3e46260597d8faa9347cb8222b8b10d
seahub/utils/htmldiff.py
python
_format_range_context
(start, stop)
return '{},{}'.format(beginning, beginning + length - 1)
Convert range to the "ed" format
Convert range to the "ed" format
[ "Convert", "range", "to", "the", "ed", "format" ]
def _format_range_context(start, stop): 'Convert range to the "ed" format' # Per the diff spec at http://www.unix.org/single_unix_specification/ beginning = start + 1 # lines start numbering with one length = stop - start if not length: beginning -= 1 # empty ranges begin at line just before the range if length <= 1: return '{}'.format(beginning) return '{},{}'.format(beginning, beginning + length - 1)
[ "def", "_format_range_context", "(", "start", ",", "stop", ")", ":", "# Per the diff spec at http://www.unix.org/single_unix_specification/", "beginning", "=", "start", "+", "1", "# lines start numbering with one", "length", "=", "stop", "-", "start", "if", "not", "length...
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/utils/htmldiff.py#L1224-L1233
microsoft/azure-devops-python-api
451cade4c475482792cbe9e522c1fee32393139e
azure-devops/azure/devops/v6_0/work_item_tracking/work_item_tracking_client.py
python
WorkItemTrackingClient.get_work_item_icon_xaml
(self, icon, color=None, v=None, **kwargs)
return self._client.stream_download(response, callback=callback)
GetWorkItemIconXaml. [Preview API] Get a work item icon given the friendly name and icon color. :param str icon: The name of the icon :param str color: The 6-digit hex color for the icon :param int v: The version of the icon (used only for cache invalidation) :rtype: object
GetWorkItemIconXaml. [Preview API] Get a work item icon given the friendly name and icon color. :param str icon: The name of the icon :param str color: The 6-digit hex color for the icon :param int v: The version of the icon (used only for cache invalidation) :rtype: object
[ "GetWorkItemIconXaml", ".", "[", "Preview", "API", "]", "Get", "a", "work", "item", "icon", "given", "the", "friendly", "name", "and", "icon", "color", ".", ":", "param", "str", "icon", ":", "The", "name", "of", "the", "icon", ":", "param", "str", "col...
def get_work_item_icon_xaml(self, icon, color=None, v=None, **kwargs): """GetWorkItemIconXaml. [Preview API] Get a work item icon given the friendly name and icon color. :param str icon: The name of the icon :param str color: The 6-digit hex color for the icon :param int v: The version of the icon (used only for cache invalidation) :rtype: object """ route_values = {} if icon is not None: route_values['icon'] = self._serialize.url('icon', icon, 'str') query_parameters = {} if color is not None: query_parameters['color'] = self._serialize.query('color', color, 'str') if v is not None: query_parameters['v'] = self._serialize.query('v', v, 'int') response = self._send(http_method='GET', location_id='4e1eb4a5-1970-4228-a682-ec48eb2dca30', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters, accept_media_type='image/xaml+xml') if "callback" in kwargs: callback = kwargs["callback"] else: callback = None return self._client.stream_download(response, callback=callback)
[ "def", "get_work_item_icon_xaml", "(", "self", ",", "icon", ",", "color", "=", "None", ",", "v", "=", "None", ",", "*", "*", "kwargs", ")", ":", "route_values", "=", "{", "}", "if", "icon", "is", "not", "None", ":", "route_values", "[", "'icon'", "]"...
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/v6_0/work_item_tracking/work_item_tracking_client.py#L1499-L1525
ibis-project/ibis
e1ef8b6870ac53de9d1fe5c52851fa41872109c4
ibis/expr/api.py
python
_integer_to_interval
(arg, unit='s')
return op.to_expr()
Convert integer interval with the same inner type Parameters ---------- unit : {'Y', 'M', 'W', 'D', 'h', 'm', s', 'ms', 'us', 'ns'} Returns ------- interval : interval value expression
Convert integer interval with the same inner type
[ "Convert", "integer", "interval", "with", "the", "same", "inner", "type" ]
def _integer_to_interval(arg, unit='s'): """ Convert integer interval with the same inner type Parameters ---------- unit : {'Y', 'M', 'W', 'D', 'h', 'm', s', 'ms', 'us', 'ns'} Returns ------- interval : interval value expression """ op = ops.IntervalFromInteger(arg, unit) return op.to_expr()
[ "def", "_integer_to_interval", "(", "arg", ",", "unit", "=", "'s'", ")", ":", "op", "=", "ops", ".", "IntervalFromInteger", "(", "arg", ",", "unit", ")", "return", "op", ".", "to_expr", "(", ")" ]
https://github.com/ibis-project/ibis/blob/e1ef8b6870ac53de9d1fe5c52851fa41872109c4/ibis/expr/api.py#L1396-L1409
soeaver/Pytorch_Mask_RCNN
aaee46490340cec83c2fbd72471c4020786c5266
tasks/merge_task.py
python
build_rpn_targets
(image_shape, anchors, gt_class_ids, gt_boxes, config)
return rpn_match, rpn_bbox
Given the anchors and GT boxes, compute overlaps and identify positive anchors and deltas to refine them to match their corresponding GT boxes. anchors: [num_anchors, (y1, x1, y2, x2)] gt_class_ids: [num_gt_boxes] Integer class IDs. gt_boxes: [num_gt_boxes, (y1, x1, y2, x2)] Returns: rpn_match: [N] (int32) matches between anchors and GT boxes. 1 = positive anchor, -1 = negative anchor, 0 = neutral rpn_bbox: [N, (dy, dx, log(dh), log(dw))] Anchor bbox deltas.
Given the anchors and GT boxes, compute overlaps and identify positive anchors and deltas to refine them to match their corresponding GT boxes.
[ "Given", "the", "anchors", "and", "GT", "boxes", "compute", "overlaps", "and", "identify", "positive", "anchors", "and", "deltas", "to", "refine", "them", "to", "match", "their", "corresponding", "GT", "boxes", "." ]
def build_rpn_targets(image_shape, anchors, gt_class_ids, gt_boxes, config): """Given the anchors and GT boxes, compute overlaps and identify positive anchors and deltas to refine them to match their corresponding GT boxes. anchors: [num_anchors, (y1, x1, y2, x2)] gt_class_ids: [num_gt_boxes] Integer class IDs. gt_boxes: [num_gt_boxes, (y1, x1, y2, x2)] Returns: rpn_match: [N] (int32) matches between anchors and GT boxes. 1 = positive anchor, -1 = negative anchor, 0 = neutral rpn_bbox: [N, (dy, dx, log(dh), log(dw))] Anchor bbox deltas. """ # RPN Match: 1 = positive anchor, -1 = negative anchor, 0 = neutral rpn_match = np.zeros([anchors.shape[0]], dtype=np.int32) # RPN bounding boxes: [max anchors per image, (dy, dx, log(dh), log(dw))] rpn_bbox = np.zeros((config.RPN_TRAIN_ANCHORS_PER_IMAGE, 4)) # Handle COCO crowds # A crowd box in COCO is a bounding box around several instances. Exclude # them from training. A crowd box is given a negative class ID. crowd_ix = np.where(gt_class_ids < 0)[0] if crowd_ix.shape[0] > 0: # Filter out crowds from ground truth class IDs and boxes non_crowd_ix = np.where(gt_class_ids > 0)[0] crowd_boxes = gt_boxes[crowd_ix] gt_class_ids = gt_class_ids[non_crowd_ix] gt_boxes = gt_boxes[non_crowd_ix] # Compute overlaps with crowd boxes [anchors, crowds] crowd_overlaps = compute_overlaps(anchors, crowd_boxes) crowd_iou_max = np.amax(crowd_overlaps, axis=1) no_crowd_bool = (crowd_iou_max < 0.001) else: # All anchors don't intersect a crowd no_crowd_bool = np.ones([anchors.shape[0]], dtype=bool) # Compute overlaps [num_anchors, num_gt_boxes] overlaps = compute_overlaps(anchors, gt_boxes) # Match anchors to GT Boxes # If an anchor overlaps a GT box with IoU >= 0.7 then it's positive. # If an anchor overlaps a GT box with IoU < 0.3 then it's negative. # Neutral anchors are those that don't match the conditions above, # and they don't influence the loss function. # However, don't keep any GT box unmatched (rare, but happens). Instead, # match it to the closest anchor (even if its max IoU is < 0.3). # # 1. Set negative anchors first. They get overwritten below if a GT box is # matched to them. Skip boxes in crowd areas. anchor_iou_argmax = np.argmax(overlaps, axis=1) anchor_iou_max = overlaps[np.arange(overlaps.shape[0]), anchor_iou_argmax] rpn_match[(anchor_iou_max < 0.3) & (no_crowd_bool)] = -1 # 2. Set an anchor for each GT box (regardless of IoU value). # TODO: If multiple anchors have the same IoU match all of them gt_iou_argmax = np.argmax(overlaps, axis=0) rpn_match[gt_iou_argmax] = 1 # 3. Set anchors with high overlap as positive. rpn_match[anchor_iou_max >= 0.7] = 1 # Subsample to balance positive and negative anchors # Don't let positives be more than half the anchors ids = np.where(rpn_match == 1)[0] extra = len(ids) - (config.RPN_TRAIN_ANCHORS_PER_IMAGE // 2) if extra > 0: # Reset the extra ones to neutral ids = np.random.choice(ids, extra, replace=False) rpn_match[ids] = 0 # Same for negative proposals ids = np.where(rpn_match == -1)[0] extra = len(ids) - (config.RPN_TRAIN_ANCHORS_PER_IMAGE - np.sum(rpn_match == 1)) if extra > 0: # Rest the extra ones to neutral ids = np.random.choice(ids, extra, replace=False) rpn_match[ids] = 0 # For positive anchors, compute shift and scale needed to transform them # to match the corresponding GT boxes. ids = np.where(rpn_match == 1)[0] ix = 0 # index into rpn_bbox # TODO: use box_refinment() rather than duplicating the code here for i, a in zip(ids, anchors[ids]): # Closest gt box (it might have IoU < 0.7) gt = gt_boxes[anchor_iou_argmax[i]] # Convert coordinates to center plus width/height. # GT Box gt_h = gt[2] - gt[0] gt_w = gt[3] - gt[1] gt_center_y = gt[0] + 0.5 * gt_h gt_center_x = gt[1] + 0.5 * gt_w # Anchor a_h = a[2] - a[0] a_w = a[3] - a[1] a_center_y = a[0] + 0.5 * a_h a_center_x = a[1] + 0.5 * a_w # Compute the bbox refinement that the RPN should predict. rpn_bbox[ix] = [ (gt_center_y - a_center_y) / a_h, (gt_center_x - a_center_x) / a_w, np.log(gt_h / a_h), np.log(gt_w / a_w), ] # Normalize rpn_bbox[ix] /= config.RPN_BBOX_STD_DEV ix += 1 return rpn_match, rpn_bbox
[ "def", "build_rpn_targets", "(", "image_shape", ",", "anchors", ",", "gt_class_ids", ",", "gt_boxes", ",", "config", ")", ":", "# RPN Match: 1 = positive anchor, -1 = negative anchor, 0 = neutral", "rpn_match", "=", "np", ".", "zeros", "(", "[", "anchors", ".", "shape...
https://github.com/soeaver/Pytorch_Mask_RCNN/blob/aaee46490340cec83c2fbd72471c4020786c5266/tasks/merge_task.py#L9-L117
opteroncx/MoePhoto
00d7c4f9df861a7882f6b35828c5461672238542
python/models.py
python
pixel_unshuffle
(scale)
return f
Pixel unshuffle. Args: x (Tensor): Input feature with shape (b, c, hh, hw). scale (int): Downsample ratio. Returns: Tensor: the pixel unshuffled feature.
Pixel unshuffle. Args: x (Tensor): Input feature with shape (b, c, hh, hw). scale (int): Downsample ratio. Returns: Tensor: the pixel unshuffled feature.
[ "Pixel", "unshuffle", ".", "Args", ":", "x", "(", "Tensor", ")", ":", "Input", "feature", "with", "shape", "(", "b", "c", "hh", "hw", ")", ".", "scale", "(", "int", ")", ":", "Downsample", "ratio", ".", "Returns", ":", "Tensor", ":", "the", "pixel"...
def pixel_unshuffle(scale): """ Pixel unshuffle. Args: x (Tensor): Input feature with shape (b, c, hh, hw). scale (int): Downsample ratio. Returns: Tensor: the pixel unshuffled feature. """ if scale == 1: return lambda x: x def f(x): b, c, hh, hw = x.size() out_channel = c * (scale**2) assert hh % scale == 0 and hw % scale == 0 h = hh // scale w = hw // scale x_view = x.view(b, c, h, scale, w, scale) return x_view.permute(0, 1, 3, 5, 2, 4).reshape(b, out_channel, h, w) return f
[ "def", "pixel_unshuffle", "(", "scale", ")", ":", "if", "scale", "==", "1", ":", "return", "lambda", "x", ":", "x", "def", "f", "(", "x", ")", ":", "b", ",", "c", ",", "hh", ",", "hw", "=", "x", ".", "size", "(", ")", "out_channel", "=", "c",...
https://github.com/opteroncx/MoePhoto/blob/00d7c4f9df861a7882f6b35828c5461672238542/python/models.py#L414-L432
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/bucket.py
python
Bucket.allow_bundle_redeploy
(self)
return self._allow_bundle_redeploy
Gets the allow_bundle_redeploy of this Bucket. Indicates if this bucket allows the same version of an extension bundle to be redeployed and thus overwrite the existing artifact. By default this is false. :return: The allow_bundle_redeploy of this Bucket. :rtype: bool
Gets the allow_bundle_redeploy of this Bucket. Indicates if this bucket allows the same version of an extension bundle to be redeployed and thus overwrite the existing artifact. By default this is false.
[ "Gets", "the", "allow_bundle_redeploy", "of", "this", "Bucket", ".", "Indicates", "if", "this", "bucket", "allows", "the", "same", "version", "of", "an", "extension", "bundle", "to", "be", "redeployed", "and", "thus", "overwrite", "the", "existing", "artifact", ...
def allow_bundle_redeploy(self): """ Gets the allow_bundle_redeploy of this Bucket. Indicates if this bucket allows the same version of an extension bundle to be redeployed and thus overwrite the existing artifact. By default this is false. :return: The allow_bundle_redeploy of this Bucket. :rtype: bool """ return self._allow_bundle_redeploy
[ "def", "allow_bundle_redeploy", "(", "self", ")", ":", "return", "self", ".", "_allow_bundle_redeploy" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/bucket.py#L210-L218
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/idlelib/StackViewer.py
python
VariablesTreeItem.keys
(self)
return self.object.keys()
[]
def keys(self): # unused, left for possible 3rd party use return self.object.keys()
[ "def", "keys", "(", "self", ")", ":", "# unused, left for possible 3rd party use", "return", "self", ".", "object", ".", "keys", "(", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/idlelib/StackViewer.py#L123-L124
zyfra/ebonite
b01b662c43709d152940f488574d78ff25f89ecf
src/ebonite/core/objects/core.py
python
_WithBuilder.unbind_builder
(self)
[]
def unbind_builder(self): del self.builder
[ "def", "unbind_builder", "(", "self", ")", ":", "del", "self", ".", "builder" ]
https://github.com/zyfra/ebonite/blob/b01b662c43709d152940f488574d78ff25f89ecf/src/ebonite/core/objects/core.py#L1511-L1512
gwastro/pycbc
1e1c85534b9dba8488ce42df693230317ca63dea
pycbc/inference/io/multinest.py
python
MultinestFile.write_resume_point
(self)
Keeps a list of the number of iterations that were in a file when a run was resumed from a checkpoint.
Keeps a list of the number of iterations that were in a file when a run was resumed from a checkpoint.
[ "Keeps", "a", "list", "of", "the", "number", "of", "iterations", "that", "were", "in", "a", "file", "when", "a", "run", "was", "resumed", "from", "a", "checkpoint", "." ]
def write_resume_point(self): """Keeps a list of the number of iterations that were in a file when a run was resumed from a checkpoint.""" try: resume_pts = self.attrs["resume_points"].tolist() except KeyError: resume_pts = [] try: niterations = self.niterations except KeyError: niterations = 0 resume_pts.append(niterations) self.attrs["resume_points"] = resume_pts
[ "def", "write_resume_point", "(", "self", ")", ":", "try", ":", "resume_pts", "=", "self", ".", "attrs", "[", "\"resume_points\"", "]", ".", "tolist", "(", ")", "except", "KeyError", ":", "resume_pts", "=", "[", "]", "try", ":", "niterations", "=", "self...
https://github.com/gwastro/pycbc/blob/1e1c85534b9dba8488ce42df693230317ca63dea/pycbc/inference/io/multinest.py#L106-L118
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/api/v2010/account/conference/recording.py
python
RecordingList.page
(self, date_created_before=values.unset, date_created=values.unset, date_created_after=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset)
return RecordingPage(self._version, response, self._solution)
Retrieve a single page of RecordingInstance records from the API. Request is executed immediately :param date date_created_before: The `YYYY-MM-DD` value of the resources to read :param date date_created: The `YYYY-MM-DD` value of the resources to read :param date date_created_after: The `YYYY-MM-DD` value of the resources to read :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of RecordingInstance :rtype: twilio.rest.api.v2010.account.conference.recording.RecordingPage
Retrieve a single page of RecordingInstance records from the API. Request is executed immediately
[ "Retrieve", "a", "single", "page", "of", "RecordingInstance", "records", "from", "the", "API", ".", "Request", "is", "executed", "immediately" ]
def page(self, date_created_before=values.unset, date_created=values.unset, date_created_after=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of RecordingInstance records from the API. Request is executed immediately :param date date_created_before: The `YYYY-MM-DD` value of the resources to read :param date date_created: The `YYYY-MM-DD` value of the resources to read :param date date_created_after: The `YYYY-MM-DD` value of the resources to read :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of RecordingInstance :rtype: twilio.rest.api.v2010.account.conference.recording.RecordingPage """ data = values.of({ 'DateCreated<': serialize.iso8601_date(date_created_before), 'DateCreated': serialize.iso8601_date(date_created), 'DateCreated>': serialize.iso8601_date(date_created_after), 'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) response = self._version.page(method='GET', uri=self._uri, params=data, ) return RecordingPage(self._version, response, self._solution)
[ "def", "page", "(", "self", ",", "date_created_before", "=", "values", ".", "unset", ",", "date_created", "=", "values", ".", "unset", ",", "date_created_after", "=", "values", ".", "unset", ",", "page_token", "=", "values", ".", "unset", ",", "page_number",...
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/api/v2010/account/conference/recording.py#L97-L125
HymanLiuTS/flaskTs
286648286976e85d9b9a5873632331efcafe0b21
flasky/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py
python
_xml_escape
(data)
return data
Escape &, <, >, ", ', etc. in a string of data.
Escape &, <, >, ", ', etc. in a string of data.
[ "Escape", "&", "<", ">", "etc", ".", "in", "a", "string", "of", "data", "." ]
def _xml_escape(data): """Escape &, <, >, ", ', etc. in a string of data.""" # ampersand must be replaced first from_symbols = '&><"\'' to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) for from_,to_ in zip(from_symbols, to_symbols): data = data.replace(from_, to_) return data
[ "def", "_xml_escape", "(", "data", ")", ":", "# ampersand must be replaced first", "from_symbols", "=", "'&><\"\\''", "to_symbols", "=", "(", "'&'", "+", "s", "+", "';'", "for", "s", "in", "\"amp gt lt quot apos\"", ".", "split", "(", ")", ")", "for", "from_",...
https://github.com/HymanLiuTS/flaskTs/blob/286648286976e85d9b9a5873632331efcafe0b21/flasky/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py#L162-L170
statsmodels/statsmodels
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
statsmodels/genmod/cov_struct.py
python
GlobalOddsRatio.get_eyy
(self, endog_expval, index)
return vmat
Returns a matrix V such that V[i,j] is the joint probability that endog[i] = 1 and endog[j] = 1, based on the marginal probabilities of endog and the global odds ratio `current_or`.
Returns a matrix V such that V[i,j] is the joint probability that endog[i] = 1 and endog[j] = 1, based on the marginal probabilities of endog and the global odds ratio `current_or`.
[ "Returns", "a", "matrix", "V", "such", "that", "V", "[", "i", "j", "]", "is", "the", "joint", "probability", "that", "endog", "[", "i", "]", "=", "1", "and", "endog", "[", "j", "]", "=", "1", "based", "on", "the", "marginal", "probabilities", "of",...
def get_eyy(self, endog_expval, index): """ Returns a matrix V such that V[i,j] is the joint probability that endog[i] = 1 and endog[j] = 1, based on the marginal probabilities of endog and the global odds ratio `current_or`. """ current_or = self.dep_params ibd = self.ibd[index] # The between-observation joint probabilities if current_or == 1.0: vmat = np.outer(endog_expval, endog_expval) else: psum = endog_expval[:, None] + endog_expval[None, :] pprod = endog_expval[:, None] * endog_expval[None, :] pfac = np.sqrt((1. + psum * (current_or - 1.)) ** 2 + 4 * current_or * (1. - current_or) * pprod) vmat = 1. + psum * (current_or - 1.) - pfac vmat /= 2. * (current_or - 1) # Fix E[YY'] for elements that belong to same observation for bdl in ibd: evy = endog_expval[bdl[0]:bdl[1]] if self.endog_type == "ordinal": vmat[bdl[0]:bdl[1], bdl[0]:bdl[1]] =\ np.minimum.outer(evy, evy) else: vmat[bdl[0]:bdl[1], bdl[0]:bdl[1]] = np.diag(evy) return vmat
[ "def", "get_eyy", "(", "self", ",", "endog_expval", ",", "index", ")", ":", "current_or", "=", "self", ".", "dep_params", "ibd", "=", "self", ".", "ibd", "[", "index", "]", "# The between-observation joint probabilities", "if", "current_or", "==", "1.0", ":", ...
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/genmod/cov_struct.py#L1169-L1199
usb-tools/Facedancer
e688fe61dc34087db333432394e1f90e52ac3794
facedancer/USBProxy.py
python
USBProxyDevice._proxy_out_request
(self, req)
Proxy OUT requests, which sends a request from the victim to the target device.
Proxy OUT requests, which sends a request from the victim to the target device.
[ "Proxy", "OUT", "requests", "which", "sends", "a", "request", "from", "the", "victim", "to", "the", "target", "device", "." ]
def _proxy_out_request(self, req): """ Proxy OUT requests, which sends a request from the victim to the target device. """ data = req.data for f in self.filter_list: req, data = f.filter_control_out(req, data) # ... forward the request to the real device. if req: try: self.libusb_device.ctrl_transfer(req.request_type, req.request, req.value, req.index, data) self.ack_status_stage() # Special case: we've stalled, allow the filters to decide what to do. except USBError as e: stalled = True for f in self.filter_list: req, data, stalled = f.handle_out_request_stall(req, data, stalled) if stalled: self.maxusb_app.stall_ep0()
[ "def", "_proxy_out_request", "(", "self", ",", "req", ")", ":", "data", "=", "req", ".", "data", "for", "f", "in", "self", ".", "filter_list", ":", "req", ",", "data", "=", "f", ".", "filter_control_out", "(", "req", ",", "data", ")", "# ... forward th...
https://github.com/usb-tools/Facedancer/blob/e688fe61dc34087db333432394e1f90e52ac3794/facedancer/USBProxy.py#L287-L313
openstack/cinder
23494a6d6c51451688191e1847a458f1d3cdcaa5
cinder/volume/drivers/qnap.py
python
QnapAPIExecutor.get_all_iscsi_portal_setting
(self)
return res_details
Execute get_all_iscsi_portal_setting API.
Execute get_all_iscsi_portal_setting API.
[ "Execute", "get_all_iscsi_portal_setting", "API", "." ]
def get_all_iscsi_portal_setting(self): """Execute get_all_iscsi_portal_setting API.""" res_details = self._get_res_details( '/cgi-bin/disk/iscsi_portal_setting.cgi?', func='get_all', sid=self.sid) return res_details
[ "def", "get_all_iscsi_portal_setting", "(", "self", ")", ":", "res_details", "=", "self", ".", "_get_res_details", "(", "'/cgi-bin/disk/iscsi_portal_setting.cgi?'", ",", "func", "=", "'get_all'", ",", "sid", "=", "self", ".", "sid", ")", "return", "res_details" ]
https://github.com/openstack/cinder/blob/23494a6d6c51451688191e1847a458f1d3cdcaa5/cinder/volume/drivers/qnap.py#L1702-L1709
openstack/octavia
27e5b27d31c695ba72fb6750de2bdafd76e0d7d9
octavia/db/api.py
python
get_lock_session
()
Context manager for using a locking (not auto-commit) session.
Context manager for using a locking (not auto-commit) session.
[ "Context", "manager", "for", "using", "a", "locking", "(", "not", "auto", "-", "commit", ")", "session", "." ]
def get_lock_session(): """Context manager for using a locking (not auto-commit) session.""" lock_session = get_session(autocommit=False) try: yield lock_session lock_session.commit() except Exception: with excutils.save_and_reraise_exception(): lock_session.rollback()
[ "def", "get_lock_session", "(", ")", ":", "lock_session", "=", "get_session", "(", "autocommit", "=", "False", ")", "try", ":", "yield", "lock_session", "lock_session", ".", "commit", "(", ")", "except", "Exception", ":", "with", "excutils", ".", "save_and_rer...
https://github.com/openstack/octavia/blob/27e5b27d31c695ba72fb6750de2bdafd76e0d7d9/octavia/db/api.py#L49-L57
golismero/golismero
7d605b937e241f51c1ca4f47b20f755eeefb9d76
thirdparty_libs/nltk/parse/chart.py
python
ChartParser.chart_parse
(self, tokens, trace=None)
return chart
Return the final parse ``Chart`` from which all possible parse trees can be extracted. :param tokens: The sentence to be parsed :type tokens: list(str) :rtype: Chart
Return the final parse ``Chart`` from which all possible parse trees can be extracted.
[ "Return", "the", "final", "parse", "Chart", "from", "which", "all", "possible", "parse", "trees", "can", "be", "extracted", "." ]
def chart_parse(self, tokens, trace=None): """ Return the final parse ``Chart`` from which all possible parse trees can be extracted. :param tokens: The sentence to be parsed :type tokens: list(str) :rtype: Chart """ if trace is None: trace = self._trace trace_new_edges = self._trace_new_edges tokens = list(tokens) self._grammar.check_coverage(tokens) chart = self._chart_class(tokens) grammar = self._grammar # Width, for printing trace edges. trace_edge_width = self._trace_chart_width / (chart.num_leaves() + 1) if trace: print chart.pp_leaves(trace_edge_width) if self._use_agenda: # Use an agenda-based algorithm. for axiom in self._axioms: new_edges = axiom.apply(chart, grammar) trace_new_edges(chart, axiom, new_edges, trace, trace_edge_width) inference_rules = self._inference_rules agenda = chart.edges() # We reverse the initial agenda, since it is a stack # but chart.edges() functions as a queue. agenda.reverse() while agenda: edge = agenda.pop() for rule in inference_rules: new_edges = rule.apply_iter(chart, grammar, edge) if trace: new_edges = list(new_edges) trace_new_edges(chart, rule, new_edges, trace, trace_edge_width) agenda += new_edges else: # Do not use an agenda-based algorithm. edges_added = True while edges_added: edges_added = False for rule in self._strategy: new_edges = rule.apply_everywhere(chart, grammar) edges_added = len(new_edges) trace_new_edges(chart, rule, new_edges, trace, trace_edge_width) # Return the final chart. return chart
[ "def", "chart_parse", "(", "self", ",", "tokens", ",", "trace", "=", "None", ")", ":", "if", "trace", "is", "None", ":", "trace", "=", "self", ".", "_trace", "trace_new_edges", "=", "self", ".", "_trace_new_edges", "tokens", "=", "list", "(", "tokens", ...
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/thirdparty_libs/nltk/parse/chart.py#L1340-L1392
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/core/ui/console/io/unixctrl.py
python
_moveDelta
(delta, pos_code, neg_code)
[]
def _moveDelta(delta, pos_code, neg_code): if delta != 0: code = delta > 0 and pos_code or neg_code sys.stdout.write(code % abs(delta))
[ "def", "_moveDelta", "(", "delta", ",", "pos_code", ",", "neg_code", ")", ":", "if", "delta", "!=", "0", ":", "code", "=", "delta", ">", "0", "and", "pos_code", "or", "neg_code", "sys", ".", "stdout", ".", "write", "(", "code", "%", "abs", "(", "de...
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/core/ui/console/io/unixctrl.py#L98-L101
enthought/chaco
0907d1dedd07a499202efbaf2fe2a4e51b4c8e5f
chaco/tools/cursor_tool.py
python
CursorTool1D.draw
(self, gc, view_bounds=None)
Draws this tool on a graphics context. Overrides LineInspector, BaseTool.
Draws this tool on a graphics context.
[ "Draws", "this", "tool", "on", "a", "graphics", "context", "." ]
def draw(self, gc, view_bounds=None): """Draws this tool on a graphics context. Overrides LineInspector, BaseTool. """ # We draw at different points depending on whether or not we are # interactive. If both listener and interactive are true, then the # selection metadata on the plot component takes precendence. plot = self.component if plot is None: return sx, sy = plot.map_screen(self.current_position)[0] orientation = plot.orientation if orientation == "h" and sx is not None: self._draw_vertical_line(gc, sx) elif sy is not None: self._draw_horizontal_line(gc, sy) if self.show_marker: self._draw_marker(gc, sx, sy) if self.show_value_line: if orientation == "h" and sy is not None: self._draw_horizontal_line(gc, sy) elif sx is not None: self._draw_vertical_line(gc, sx)
[ "def", "draw", "(", "self", ",", "gc", ",", "view_bounds", "=", "None", ")", ":", "# We draw at different points depending on whether or not we are", "# interactive. If both listener and interactive are true, then the", "# selection metadata on the plot component takes precendence.", ...
https://github.com/enthought/chaco/blob/0907d1dedd07a499202efbaf2fe2a4e51b4c8e5f/chaco/tools/cursor_tool.py#L191-L218
deluge-torrent/deluge
2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc
deluge/ui/gtk3/pluginmanager.py
python
PluginManager.register_hook
(self, hook, function)
Register a hook function with the plugin manager
Register a hook function with the plugin manager
[ "Register", "a", "hook", "function", "with", "the", "plugin", "manager" ]
def register_hook(self, hook, function): """Register a hook function with the plugin manager""" try: self.hooks[hook].append(function) except KeyError: log.warning('Plugin attempting to register invalid hook.')
[ "def", "register_hook", "(", "self", ",", "hook", ",", "function", ")", ":", "try", ":", "self", ".", "hooks", "[", "hook", "]", ".", "append", "(", "function", ")", "except", "KeyError", ":", "log", ".", "warning", "(", "'Plugin attempting to register inv...
https://github.com/deluge-torrent/deluge/blob/2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc/deluge/ui/gtk3/pluginmanager.py#L36-L41
shiweibsw/Translation-Tools
2fbbf902364e557fa7017f9a74a8797b7440c077
venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distlib/manifest.py
python
Manifest._glob_to_re
(self, pattern)
return pattern_re
Translate a shell-like glob pattern to a regular expression. Return a string containing the regex. Differs from 'fnmatch.translate()' in that '*' does not match "special characters" (which are platform-specific).
Translate a shell-like glob pattern to a regular expression.
[ "Translate", "a", "shell", "-", "like", "glob", "pattern", "to", "a", "regular", "expression", "." ]
def _glob_to_re(self, pattern): """Translate a shell-like glob pattern to a regular expression. Return a string containing the regex. Differs from 'fnmatch.translate()' in that '*' does not match "special characters" (which are platform-specific). """ pattern_re = fnmatch.translate(pattern) # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, # and by extension they shouldn't match such "special characters" under # any OS. So change all non-escaped dots in the RE to match any # character except the special characters (currently: just os.sep). sep = os.sep if os.sep == '\\': # we're using a regex to manipulate a regex, so we need # to escape the backslash twice sep = r'\\\\' escaped = r'\1[^%s]' % sep pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) return pattern_re
[ "def", "_glob_to_re", "(", "self", ",", "pattern", ")", ":", "pattern_re", "=", "fnmatch", ".", "translate", "(", "pattern", ")", "# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which", "# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,", "#...
https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distlib/manifest.py#L372-L393
learningequality/kolibri
d056dbc477aaf651ab843caa141a6a1e0a491046
kolibri/core/auth/sync_operations.py
python
KolibriVersionedSyncOperation.downgrade
(self, context)
Called when we're producing data for a version older than `self.version` :type context: morango.sync.context.LocalSessionContext
Called when we're producing data for a version older than `self.version`
[ "Called", "when", "we", "re", "producing", "data", "for", "a", "version", "older", "than", "self", ".", "version" ]
def downgrade(self, context): """ Called when we're producing data for a version older than `self.version` :type context: morango.sync.context.LocalSessionContext """ pass
[ "def", "downgrade", "(", "self", ",", "context", ")", ":", "pass" ]
https://github.com/learningequality/kolibri/blob/d056dbc477aaf651ab843caa141a6a1e0a491046/kolibri/core/auth/sync_operations.py#L191-L197
ronf/asyncssh
ee1714c598d8c2ea6f5484e465443f38b68714aa
asyncssh/process.py
python
SSHProcess.connection_lost
(self, exc: Optional[Exception])
Handle a close of the SSH channel
Handle a close of the SSH channel
[ "Handle", "a", "close", "of", "the", "SSH", "channel" ]
def connection_lost(self, exc: Optional[Exception]) -> None: """Handle a close of the SSH channel""" super().connection_lost(exc) # type: ignore for reader in list(self._readers.values()): reader.close() for writer in list(self._writers.values()): writer.close() self._readers = {} self._writers = {}
[ "def", "connection_lost", "(", "self", ",", "exc", ":", "Optional", "[", "Exception", "]", ")", "->", "None", ":", "super", "(", ")", ".", "connection_lost", "(", "exc", ")", "# type: ignore", "for", "reader", "in", "list", "(", "self", ".", "_readers", ...
https://github.com/ronf/asyncssh/blob/ee1714c598d8c2ea6f5484e465443f38b68714aa/asyncssh/process.py#L890-L902
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/cassandra_cql.py
python
_load_properties
(property_name, config_option, set_default=False, default=None)
return property_name
Load properties for the cassandra module from config or pillar. :param property_name: The property to load. :type property_name: str or list of str :param config_option: The name of the config option. :type config_option: str :param set_default: Should a default be set if not found in config. :type set_default: bool :param default: The default value to be set. :type default: str or int :return: The property fetched from the configuration or default. :rtype: str or list of str
Load properties for the cassandra module from config or pillar.
[ "Load", "properties", "for", "the", "cassandra", "module", "from", "config", "or", "pillar", "." ]
def _load_properties(property_name, config_option, set_default=False, default=None): """ Load properties for the cassandra module from config or pillar. :param property_name: The property to load. :type property_name: str or list of str :param config_option: The name of the config option. :type config_option: str :param set_default: Should a default be set if not found in config. :type set_default: bool :param default: The default value to be set. :type default: str or int :return: The property fetched from the configuration or default. :rtype: str or list of str """ if not property_name: log.debug( "No property specified in function, trying to load from salt configuration" ) try: options = __salt__["config.option"]("cassandra") except BaseException as e: log.error("Failed to get cassandra config options. Reason: %s", e) raise loaded_property = options.get(config_option) if not loaded_property: if set_default: log.debug("Setting default Cassandra %s to %s", config_option, default) loaded_property = default else: log.error( "No cassandra %s specified in the configuration or passed to the" " module.", config_option, ) raise CommandExecutionError( "ERROR: Cassandra {} cannot be empty.".format(config_option) ) return loaded_property return property_name
[ "def", "_load_properties", "(", "property_name", ",", "config_option", ",", "set_default", "=", "False", ",", "default", "=", "None", ")", ":", "if", "not", "property_name", ":", "log", ".", "debug", "(", "\"No property specified in function, trying to load from salt ...
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/cassandra_cql.py#L131-L171
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
source/openerp/addons/base/ir/ir_attachment.py
python
ir_attachment.check
(self, cr, uid, ids, mode, context=None, values=None)
Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there.
Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there.
[ "Restricts", "the", "access", "to", "an", "ir", ".", "attachment", "according", "to", "referred", "model", "In", "the", "document", "module", "it", "is", "overriden", "to", "relax", "this", "hard", "rule", "since", "more", "complex", "ones", "apply", "there"...
def check(self, cr, uid, ids, mode, context=None, values=None): """Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there. """ res_ids = {} require_employee = False if ids: if isinstance(ids, (int, long)): ids = [ids] cr.execute('SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids,)) for rmod, rid, create_uid in cr.fetchall(): if not (rmod and rid): if create_uid != uid: require_employee = True continue res_ids.setdefault(rmod,set()).add(rid) if values: if values.get('res_model') and values.get('res_id'): res_ids.setdefault(values['res_model'],set()).add(values['res_id']) ima = self.pool.get('ir.model.access') for model, mids in res_ids.items(): # ignore attachments that are not attached to a resource anymore when checking access rights # (resource was deleted but attachment was not) if not self.pool.get(model): require_employee = True continue existing_ids = self.pool[model].exists(cr, uid, mids) if len(existing_ids) != len(mids): require_employee = True ima.check(cr, uid, model, mode) self.pool[model].check_access_rule(cr, uid, existing_ids, mode, context=context) if require_employee: if not uid == SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_user'): raise except_orm(_('Access Denied'), _("Sorry, you are not allowed to access this document."))
[ "def", "check", "(", "self", ",", "cr", ",", "uid", ",", "ids", ",", "mode", ",", "context", "=", "None", ",", "values", "=", "None", ")", ":", "res_ids", "=", "{", "}", "require_employee", "=", "False", "if", "ids", ":", "if", "isinstance", "(", ...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/source/openerp/addons/base/ir/ir_attachment.py#L225-L260
tensorflow/tensorboard
61d11d99ef034c30ba20b6a7840c8eededb9031c
tensorboard/encode_png_benchmark.py
python
main
(unused_argv)
[]
def main(unused_argv): logging.set_verbosity(logging.INFO) np.random.seed(0) thread_counts = [1, 2, 4, 6, 8, 10, 12, 14, 16, 32] logger.info("Warming up...") warmup_image = _image_of_size(256) for thread_count in thread_counts: bench(warmup_image, thread_count) logger.info("Running...") results = {} image = _image_of_size(4096) headers = ("THREADS", "TOTAL_TIME", "UNIT_TIME", "SPEEDUP", "PARALLELISM") logger.info(_format_line(headers, headers)) for thread_count in thread_counts: time.sleep(1.0) total_time = min( bench(image, thread_count) for _ in range(3) ) # best-of-three timing unit_time = total_time / thread_count if total_time < 2.0: logger.warning( "This benchmark is running too quickly! This " "may cause misleading timing data. Consider " "increasing the image size until it takes at " "least 2.0s to encode one image." ) results[thread_count] = unit_time speedup = results[1] / results[thread_count] parallelism = speedup / thread_count fields = (thread_count, total_time, unit_time, speedup, parallelism) logger.info(_format_line(headers, fields))
[ "def", "main", "(", "unused_argv", ")", ":", "logging", ".", "set_verbosity", "(", "logging", ".", "INFO", ")", "np", ".", "random", ".", "seed", "(", "0", ")", "thread_counts", "=", "[", "1", ",", "2", ",", "4", ",", "6", ",", "8", ",", "10", ...
https://github.com/tensorflow/tensorboard/blob/61d11d99ef034c30ba20b6a7840c8eededb9031c/tensorboard/encode_png_benchmark.py#L112-L145
zykls/whynot
86fd2349a83cd43c614b55f5bf2dfc9ece143081
whynot/framework.py
python
ParameterCollection.default
(self)
return {name: p.default for name, p in self.params.items()}
Return the default parameter setting for each parameter.
Return the default parameter setting for each parameter.
[ "Return", "the", "default", "parameter", "setting", "for", "each", "parameter", "." ]
def default(self): """Return the default parameter setting for each parameter.""" return {name: p.default for name, p in self.params.items()}
[ "def", "default", "(", "self", ")", ":", "return", "{", "name", ":", "p", ".", "default", "for", "name", ",", "p", "in", "self", ".", "params", ".", "items", "(", ")", "}" ]
https://github.com/zykls/whynot/blob/86fd2349a83cd43c614b55f5bf2dfc9ece143081/whynot/framework.py#L222-L224
homebysix/recipe-robot
fc51b3134b6db7cd86641785d75a0b994ae88154
scripts/recipe_robot_lib/recipe_generator.py
python
required_repo_reminder
(repo_name, repo_url, facts)
Print a reminder if a required repo is not already added. Args: repo_name (str): Name of repo to issue reminder to add. repo_url (str): URL to the repo, to use with `autopkg repo-add`. facts (RoboDict): A continually-updated dictionary containing all the information we know so far about the app associated with the input path.
Print a reminder if a required repo is not already added.
[ "Print", "a", "reminder", "if", "a", "required", "repo", "is", "not", "already", "added", "." ]
def required_repo_reminder(repo_name, repo_url, facts): """Print a reminder if a required repo is not already added. Args: repo_name (str): Name of repo to issue reminder to add. repo_url (str): URL to the repo, to use with `autopkg repo-add`. facts (RoboDict): A continually-updated dictionary containing all the information we know so far about the app associated with the input path. """ cmd = "/usr/local/bin/autopkg repo-list" exitcode, out, err = get_exitcode_stdout_stderr(cmd) if not any( (line.endswith("(%s)" % repo_url) or line.endswith("(%s.git)" % repo_url)) for line in out.splitlines() ): facts["reminders"].append( "You'll need to add the %s repo in order to use " "this recipe:\n autopkg repo-add " '"%s"' % (repo_name, repo_url) )
[ "def", "required_repo_reminder", "(", "repo_name", ",", "repo_url", ",", "facts", ")", ":", "cmd", "=", "\"/usr/local/bin/autopkg repo-list\"", "exitcode", ",", "out", ",", "err", "=", "get_exitcode_stdout_stderr", "(", "cmd", ")", "if", "not", "any", "(", "(", ...
https://github.com/homebysix/recipe-robot/blob/fc51b3134b6db7cd86641785d75a0b994ae88154/scripts/recipe_robot_lib/recipe_generator.py#L164-L184
roclark/sportsipy
c19f545d3376d62ded6304b137dc69238ac620a9
sportsipy/mlb/teams.py
python
Team.single_run_record
(self)
return self._single_run_record
Returns a ``string`` of the team's record when only one run is scored. Record is in the format 'W-L'.
Returns a ``string`` of the team's record when only one run is scored. Record is in the format 'W-L'.
[ "Returns", "a", "string", "of", "the", "team", "s", "record", "when", "only", "one", "run", "is", "scored", ".", "Record", "is", "in", "the", "format", "W", "-", "L", "." ]
def single_run_record(self): """ Returns a ``string`` of the team's record when only one run is scored. Record is in the format 'W-L'. """ return self._single_run_record
[ "def", "single_run_record", "(", "self", ")", ":", "return", "self", ".", "_single_run_record" ]
https://github.com/roclark/sportsipy/blob/c19f545d3376d62ded6304b137dc69238ac620a9/sportsipy/mlb/teams.py#L620-L625
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/intesishome/climate.py
python
IntesisAC.async_added_to_hass
(self)
Subscribe to event updates.
Subscribe to event updates.
[ "Subscribe", "to", "event", "updates", "." ]
async def async_added_to_hass(self): """Subscribe to event updates.""" _LOGGER.debug("Added climate device with state: %s", repr(self._ih_device)) await self._controller.add_update_callback(self.async_update_callback) try: await self._controller.connect() except IHConnectionError as ex: _LOGGER.error("Exception connecting to IntesisHome: %s", ex) raise PlatformNotReady from ex
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "_LOGGER", ".", "debug", "(", "\"Added climate device with state: %s\"", ",", "repr", "(", "self", ".", "_ih_device", ")", ")", "await", "self", ".", "_controller", ".", "add_update_callback", "(", "s...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/intesishome/climate.py#L211-L219
stanfordnlp/stanza
e44d1c88340e33bf9813e6f5a6bd24387eefc4b2
stanza/models/constituency/base_model.py
python
BaseModel.push_constituents
(self, constituent_stacks, constituents)
Add a multiple constituents to multiple constituent_stacks Useful to factor this out in case batching will help
Add a multiple constituents to multiple constituent_stacks
[ "Add", "a", "multiple", "constituents", "to", "multiple", "constituent_stacks" ]
def push_constituents(self, constituent_stacks, constituents): """ Add a multiple constituents to multiple constituent_stacks Useful to factor this out in case batching will help """
[ "def", "push_constituents", "(", "self", ",", "constituent_stacks", ",", "constituents", ")", ":" ]
https://github.com/stanfordnlp/stanza/blob/e44d1c88340e33bf9813e6f5a6bd24387eefc4b2/stanza/models/constituency/base_model.py#L84-L89
francisck/DanderSpritz_docs
86bb7caca5a957147f120b18bb5c31f299914904
Python/Core/Lib/lib-tk/Tkinter.py
python
Canvas.icursor
(self, *args)
Set cursor at position POS in the item identified by TAGORID. In ARGS TAGORID must be first.
Set cursor at position POS in the item identified by TAGORID. In ARGS TAGORID must be first.
[ "Set", "cursor", "at", "position", "POS", "in", "the", "item", "identified", "by", "TAGORID", ".", "In", "ARGS", "TAGORID", "must", "be", "first", "." ]
def icursor(self, *args): """Set cursor at position POS in the item identified by TAGORID. In ARGS TAGORID must be first.""" self.tk.call((self._w, 'icursor') + args)
[ "def", "icursor", "(", "self", ",", "*", "args", ")", ":", "self", ".", "tk", ".", "call", "(", "(", "self", ".", "_w", ",", "'icursor'", ")", "+", "args", ")" ]
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/lib-tk/Tkinter.py#L2513-L2516
agile-geoscience/bruges
64f162dbf8b94ff265108f6eb85e0d4e28ecb2cb
bruges/rockphysics/rpm.py
python
hertzmindlin
(K0, G0, phi, phic=0.4, Cn=8.6, P=10, f=1)
return K_HM, G_HM
Hertz-Mindlin model written by Alessandro Amato del Monte (2015) from Mavko et al., Rock Physics Handbook, p.246 INPUT K0, G0: mineral bulk & shear modulus in GPa phi: porosity phic: critical porosity (default 0.4) Cn: coordination nnumber (default 8.6) P: confining pressure in MPa (default 10) f: shear modulus correction factor, f=1 for dry pack with perfect adhesion between particles and f=0 for dry frictionless pack OUTPUT K_DRY, G_DRY: dry rock bulk & shear modulus in GPa
Hertz-Mindlin model written by Alessandro Amato del Monte (2015) from Mavko et al., Rock Physics Handbook, p.246 INPUT K0, G0: mineral bulk & shear modulus in GPa phi: porosity phic: critical porosity (default 0.4) Cn: coordination nnumber (default 8.6) P: confining pressure in MPa (default 10) f: shear modulus correction factor, f=1 for dry pack with perfect adhesion between particles and f=0 for dry frictionless pack OUTPUT K_DRY, G_DRY: dry rock bulk & shear modulus in GPa
[ "Hertz", "-", "Mindlin", "model", "written", "by", "Alessandro", "Amato", "del", "Monte", "(", "2015", ")", "from", "Mavko", "et", "al", ".", "Rock", "Physics", "Handbook", "p", ".", "246", "INPUT", "K0", "G0", ":", "mineral", "bulk", "&", "shear", "mo...
def hertzmindlin(K0, G0, phi, phic=0.4, Cn=8.6, P=10, f=1): ''' Hertz-Mindlin model written by Alessandro Amato del Monte (2015) from Mavko et al., Rock Physics Handbook, p.246 INPUT K0, G0: mineral bulk & shear modulus in GPa phi: porosity phic: critical porosity (default 0.4) Cn: coordination nnumber (default 8.6) P: confining pressure in MPa (default 10) f: shear modulus correction factor, f=1 for dry pack with perfect adhesion between particles and f=0 for dry frictionless pack OUTPUT K_DRY, G_DRY: dry rock bulk & shear modulus in GPa ''' P /= 1e3 # converts pressure in same units as solid moduli (GPa) PR0=(3*K0-2*G0)/(6*K0+2*G0) # poisson's ratio of mineral mixture K_HM = (P*(Cn**2*(1-phic)**2*G0**2) / (18*np.pi**2*(1-PR0)**2))**(1/3) G_HM = ((2+3*f-PR0*(1+3*f))/(5*(2-PR0))) * ((P*(3*Cn**2*(1-phic)**2*G0**2)/(2*np.pi**2*(1-PR0)**2)))**(1/3) return K_HM, G_HM
[ "def", "hertzmindlin", "(", "K0", ",", "G0", ",", "phi", ",", "phic", "=", "0.4", ",", "Cn", "=", "8.6", ",", "P", "=", "10", ",", "f", "=", "1", ")", ":", "P", "/=", "1e3", "# converts pressure in same units as solid moduli (GPa)", "PR0", "=", "(", ...
https://github.com/agile-geoscience/bruges/blob/64f162dbf8b94ff265108f6eb85e0d4e28ecb2cb/bruges/rockphysics/rpm.py#L28-L48
networkx/networkx
1620568e36702b1cfeaf1c0277b167b6cb93e48d
networkx/readwrite/json_graph/adjacency.py
python
adjacency_data
(G, attrs=_attrs)
return data
Returns data in adjacency format that is suitable for JSON serialization and use in Javascript documents. Parameters ---------- G : NetworkX graph attrs : dict A dictionary that contains two keys 'id' and 'key'. The corresponding values provide the attribute names for storing NetworkX-internal graph data. The values should be unique. Default value: :samp:`dict(id='id', key='key')`. If some user-defined graph data use these attribute names as data keys, they may be silently dropped. Returns ------- data : dict A dictionary with adjacency formatted data. Raises ------ NetworkXError If values in attrs are not unique. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1, 2)]) >>> data = json_graph.adjacency_data(G) To serialize with json >>> import json >>> s = json.dumps(data) Notes ----- Graph, node, and link attributes will be written when using this format but attribute keys must be strings if you want to serialize the resulting data with JSON. The default value of attrs will be changed in a future release of NetworkX. See Also -------- adjacency_graph, node_link_data, tree_data
Returns data in adjacency format that is suitable for JSON serialization and use in Javascript documents.
[ "Returns", "data", "in", "adjacency", "format", "that", "is", "suitable", "for", "JSON", "serialization", "and", "use", "in", "Javascript", "documents", "." ]
def adjacency_data(G, attrs=_attrs): """Returns data in adjacency format that is suitable for JSON serialization and use in Javascript documents. Parameters ---------- G : NetworkX graph attrs : dict A dictionary that contains two keys 'id' and 'key'. The corresponding values provide the attribute names for storing NetworkX-internal graph data. The values should be unique. Default value: :samp:`dict(id='id', key='key')`. If some user-defined graph data use these attribute names as data keys, they may be silently dropped. Returns ------- data : dict A dictionary with adjacency formatted data. Raises ------ NetworkXError If values in attrs are not unique. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1, 2)]) >>> data = json_graph.adjacency_data(G) To serialize with json >>> import json >>> s = json.dumps(data) Notes ----- Graph, node, and link attributes will be written when using this format but attribute keys must be strings if you want to serialize the resulting data with JSON. The default value of attrs will be changed in a future release of NetworkX. See Also -------- adjacency_graph, node_link_data, tree_data """ multigraph = G.is_multigraph() id_ = attrs["id"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. key = None if not multigraph else attrs["key"] if id_ == key: raise nx.NetworkXError("Attribute names are not unique.") data = {} data["directed"] = G.is_directed() data["multigraph"] = multigraph data["graph"] = list(G.graph.items()) data["nodes"] = [] data["adjacency"] = [] for n, nbrdict in G.adjacency(): data["nodes"].append(dict(chain(G.nodes[n].items(), [(id_, n)]))) adj = [] if multigraph: for nbr, keys in nbrdict.items(): for k, d in keys.items(): adj.append(dict(chain(d.items(), [(id_, nbr), (key, k)]))) else: for nbr, d in nbrdict.items(): adj.append(dict(chain(d.items(), [(id_, nbr)]))) data["adjacency"].append(adj) return data
[ "def", "adjacency_data", "(", "G", ",", "attrs", "=", "_attrs", ")", ":", "multigraph", "=", "G", ".", "is_multigraph", "(", ")", "id_", "=", "attrs", "[", "\"id\"", "]", "# Allow 'key' to be omitted from attrs if the graph is not a multigraph.", "key", "=", "None...
https://github.com/networkx/networkx/blob/1620568e36702b1cfeaf1c0277b167b6cb93e48d/networkx/readwrite/json_graph/adjacency.py#L9-L82
openstack/tacker
a60993fc3b2d4fc0e93ab13a874fe3c314fe48de
tacker/vnfm/infra_drivers/openstack/openstack.py
python
OpenStack.heal_vnf_wait
(self, context, vnf_instance, vim_connection_info, heal_vnf_request)
return stack
Check vnf is healed successfully
Check vnf is healed successfully
[ "Check", "vnf", "is", "healed", "successfully" ]
def heal_vnf_wait(self, context, vnf_instance, vim_connection_info, heal_vnf_request): """Check vnf is healed successfully""" access_info = vim_connection_info.access_info region_name = access_info.get('region') inst_vnf_info = vnf_instance.instantiated_vnf_info stack = self._wait_until_stack_ready(inst_vnf_info.instance_id, access_info, infra_cnst.STACK_UPDATE_IN_PROGRESS, infra_cnst.STACK_UPDATE_COMPLETE, vnfm.VNFHealWaitFailed, region_name=region_name) return stack
[ "def", "heal_vnf_wait", "(", "self", ",", "context", ",", "vnf_instance", ",", "vim_connection_info", ",", "heal_vnf_request", ")", ":", "access_info", "=", "vim_connection_info", ".", "access_info", "region_name", "=", "access_info", ".", "get", "(", "'region'", ...
https://github.com/openstack/tacker/blob/a60993fc3b2d4fc0e93ab13a874fe3c314fe48de/tacker/vnfm/infra_drivers/openstack/openstack.py#L1472-L1483
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/wave.py
python
Wave_write.setcomptype
(self, comptype, compname)
[]
def setcomptype(self, comptype, compname): if self._datawritten: raise Error('cannot change parameters after starting to write') if comptype not in ('NONE',): raise Error('unsupported compression type') self._comptype = comptype self._compname = compname
[ "def", "setcomptype", "(", "self", ",", "comptype", ",", "compname", ")", ":", "if", "self", ".", "_datawritten", ":", "raise", "Error", "(", "'cannot change parameters after starting to write'", ")", "if", "comptype", "not", "in", "(", "'NONE'", ",", ")", ":"...
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/wave.py#L382-L388
soravux/scoop
3c0357c32cec3169a19c822a3857c968a48775c5
bench/evosn/futures_evosn.py
python
mutAddWire
(individual, dimension)
[]
def mutAddWire(individual, dimension): index = random.randint(0, len(individual)) individual.insert(index, genWire(dimension))
[ "def", "mutAddWire", "(", "individual", ",", "dimension", ")", ":", "index", "=", "random", ".", "randint", "(", "0", ",", "len", "(", "individual", ")", ")", "individual", ".", "insert", "(", "index", ",", "genWire", "(", "dimension", ")", ")" ]
https://github.com/soravux/scoop/blob/3c0357c32cec3169a19c822a3857c968a48775c5/bench/evosn/futures_evosn.py#L60-L62
P1sec/pycrate
d12bbccf1df8c9c7891a26967a9d2635610ec5b8
pycrate_core/elt.py
python
Array.pop
(self)
Pop the last value of the array wrapped within the its template Args: None Returns: elt (Element) : last element of the instance
Pop the last value of the array wrapped within the its template Args: None Returns: elt (Element) : last element of the instance
[ "Pop", "the", "last", "value", "of", "the", "array", "wrapped", "within", "the", "its", "template", "Args", ":", "None", "Returns", ":", "elt", "(", "Element", ")", ":", "last", "element", "of", "the", "instance" ]
def pop(self): """Pop the last value of the array wrapped within the its template Args: None Returns: elt (Element) : last element of the instance """ if self._SAFE_STAT and self._num is not None and len(self._val) == self._num: raise(EltErr('{0} [pop] val length {1} underflow (num {2})'\ .format(self._name, len(self._val)-1, self._num))) try: val = self._val.pop() except Exception as err: raise(EltErr('{0} [pop]: {1}'.format(self._name, err))) else: clone = self._tmpl.clone() clone._val = val return clone
[ "def", "pop", "(", "self", ")", ":", "if", "self", ".", "_SAFE_STAT", "and", "self", ".", "_num", "is", "not", "None", "and", "len", "(", "self", ".", "_val", ")", "==", "self", ".", "_num", ":", "raise", "(", "EltErr", "(", "'{0} [pop] val length {1...
https://github.com/P1sec/pycrate/blob/d12bbccf1df8c9c7891a26967a9d2635610ec5b8/pycrate_core/elt.py#L2957-L2976
wwqgtxx/wwqLyParse
33136508e52821babd9294fdecffbdf02d73a6fc
wwqLyParse/lib/python-3.7.2-embed-amd64/pyquery/pyquery.py
python
PyQuery.is_
(self, selector)
return bool(self._filter_only(selector, self))
Returns True if selector matches at least one current element, else False: >>> d = PyQuery('<p class="hello"><span>Hi</span></p><p>Bye</p>') >>> d('p').eq(0).is_('.hello') True >>> d('p').eq(0).is_('span') False >>> d('p').eq(1).is_('.hello') False ..
Returns True if selector matches at least one current element, else False:
[ "Returns", "True", "if", "selector", "matches", "at", "least", "one", "current", "element", "else", "False", ":" ]
def is_(self, selector): """Returns True if selector matches at least one current element, else False: >>> d = PyQuery('<p class="hello"><span>Hi</span></p><p>Bye</p>') >>> d('p').eq(0).is_('.hello') True >>> d('p').eq(0).is_('span') False >>> d('p').eq(1).is_('.hello') False .. """ return bool(self._filter_only(selector, self))
[ "def", "is_", "(", "self", ",", "selector", ")", ":", "return", "bool", "(", "self", ".", "_filter_only", "(", "selector", ",", "self", ")", ")" ]
https://github.com/wwqgtxx/wwqLyParse/blob/33136508e52821babd9294fdecffbdf02d73a6fc/wwqLyParse/lib/python-3.7.2-embed-amd64/pyquery/pyquery.py#L648-L664
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/Django/django/db/models/loading.py
python
AppCache.register_models
(self, app_label, *models)
Register a set of models as belonging to an app.
Register a set of models as belonging to an app.
[ "Register", "a", "set", "of", "models", "as", "belonging", "to", "an", "app", "." ]
def register_models(self, app_label, *models): """ Register a set of models as belonging to an app. """ for model in models: # Store as 'name: model' pair in a dictionary # in the app_models dictionary model_name = model._meta.object_name.lower() model_dict = self.app_models.setdefault(app_label, SortedDict()) if model_name in model_dict: # The same model may be imported via different paths (e.g. # appname.models and project.appname.models). We use the source # filename as a means to detect identity. fname1 = os.path.abspath(upath(sys.modules[model.__module__].__file__)) fname2 = os.path.abspath(upath(sys.modules[model_dict[model_name].__module__].__file__)) # Since the filename extension could be .py the first time and # .pyc or .pyo the second time, ignore the extension when # comparing. if os.path.splitext(fname1)[0] == os.path.splitext(fname2)[0]: continue model_dict[model_name] = model self._get_models_cache.clear()
[ "def", "register_models", "(", "self", ",", "app_label", ",", "*", "models", ")", ":", "for", "model", "in", "models", ":", "# Store as 'name: model' pair in a dictionary", "# in the app_models dictionary", "model_name", "=", "model", ".", "_meta", ".", "object_name",...
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/Django/django/db/models/loading.py#L235-L256
benknight/hue-alfred-workflow
4447ba61116caf4a448b50c4bfb866565d66d81e
logic/packages/png/png.py
python
Reader.palette
(self, alpha='natural')
return plte
Returns a palette that is a sequence of 3-tuples or 4-tuples, synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These chunks should have already been processed (for example, by calling the :meth:`preamble` method). All the tuples are the same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when there is a ``tRNS`` chunk. Assumes that the image is colour type 3 and therefore a ``PLTE`` chunk is required. If the `alpha` argument is ``'force'`` then an alpha channel is always added, forcing the result to be a sequence of 4-tuples.
Returns a palette that is a sequence of 3-tuples or 4-tuples, synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These chunks should have already been processed (for example, by calling the :meth:`preamble` method). All the tuples are the same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when there is a ``tRNS`` chunk. Assumes that the image is colour type 3 and therefore a ``PLTE`` chunk is required.
[ "Returns", "a", "palette", "that", "is", "a", "sequence", "of", "3", "-", "tuples", "or", "4", "-", "tuples", "synthesizing", "it", "from", "the", "PLTE", "and", "tRNS", "chunks", ".", "These", "chunks", "should", "have", "already", "been", "processed", ...
def palette(self, alpha='natural'): """Returns a palette that is a sequence of 3-tuples or 4-tuples, synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These chunks should have already been processed (for example, by calling the :meth:`preamble` method). All the tuples are the same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when there is a ``tRNS`` chunk. Assumes that the image is colour type 3 and therefore a ``PLTE`` chunk is required. If the `alpha` argument is ``'force'`` then an alpha channel is always added, forcing the result to be a sequence of 4-tuples. """ if not self.plte: raise FormatError( "Required PLTE chunk is missing in colour type 3 image.") plte = group(array('B', self.plte), 3) if self.trns or alpha == 'force': trns = array('B', self.trns or '') trns.extend([255]*(len(plte)-len(trns))) plte = map(operator.add, plte, group(trns, 1)) return plte
[ "def", "palette", "(", "self", ",", "alpha", "=", "'natural'", ")", ":", "if", "not", "self", ".", "plte", ":", "raise", "FormatError", "(", "\"Required PLTE chunk is missing in colour type 3 image.\"", ")", "plte", "=", "group", "(", "array", "(", "'B'", ",",...
https://github.com/benknight/hue-alfred-workflow/blob/4447ba61116caf4a448b50c4bfb866565d66d81e/logic/packages/png/png.py#L1958-L1979
raiden-network/raiden
76c68b426a6f81f173b9a2c09bd88a610502c38b
raiden/api/python.py
python
RaidenAPI.set_total_channel_withdraw
( self, registry_address: TokenNetworkRegistryAddress, token_address: TokenAddress, partner_address: Address, total_withdraw: WithdrawAmount, retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT, )
Set the `total_withdraw` in the channel with the peer at `partner_address` and the given `token_address`. Raises: InvalidBinaryAddress: If either token_address or partner_address is not 20 bytes long. RaidenUnrecoverableError: May happen for multiple reasons: - During preconditions checks, if the channel was not open at the time of the approve_and_set_total_deposit call. - If the transaction fails during gas estimation or if a previous withdraw transaction with the same value was already mined. DepositMismatch: The total withdraw amount did not increase.
Set the `total_withdraw` in the channel with the peer at `partner_address` and the given `token_address`.
[ "Set", "the", "total_withdraw", "in", "the", "channel", "with", "the", "peer", "at", "partner_address", "and", "the", "given", "token_address", "." ]
def set_total_channel_withdraw( self, registry_address: TokenNetworkRegistryAddress, token_address: TokenAddress, partner_address: Address, total_withdraw: WithdrawAmount, retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT, ) -> None: """Set the `total_withdraw` in the channel with the peer at `partner_address` and the given `token_address`. Raises: InvalidBinaryAddress: If either token_address or partner_address is not 20 bytes long. RaidenUnrecoverableError: May happen for multiple reasons: - During preconditions checks, if the channel was not open at the time of the approve_and_set_total_deposit call. - If the transaction fails during gas estimation or if a previous withdraw transaction with the same value was already mined. DepositMismatch: The total withdraw amount did not increase. """ chain_state = views.state_from_raiden(self.raiden) token_addresses = views.get_token_identifiers(chain_state, registry_address) channel_state = views.get_channelstate_for( chain_state=chain_state, token_network_registry_address=registry_address, token_address=token_address, partner_address=partner_address, ) if not is_binary_address(token_address): raise InvalidBinaryAddress( "Expected binary address format for token in channel deposit" ) if not is_binary_address(partner_address): raise InvalidBinaryAddress( "Expected binary address format for partner in channel deposit" ) if token_address not in token_addresses: raise UnknownTokenAddress("Unknown token address") if channel_state is None: raise NonexistingChannel("No channel with partner_address for the given token") if total_withdraw <= channel_state.our_total_withdraw: raise WithdrawMismatch(f"Total withdraw {total_withdraw} did not increase") current_balance = channel.get_balance( sender=channel_state.our_state, receiver=channel_state.partner_state ) amount_to_withdraw = total_withdraw - channel_state.our_total_withdraw if amount_to_withdraw > current_balance: raise InsufficientFunds( "The withdraw of {} is bigger than the current balance of {}".format( amount_to_withdraw, current_balance ) ) pfs_proxy = self.raiden.pfs_proxy recipient_address = channel_state.partner_state.address recipient_metadata = pfs_proxy.query_address_metadata(recipient_address) self.raiden.withdraw( canonical_identifier=channel_state.canonical_identifier, total_withdraw=total_withdraw, recipient_metadata=recipient_metadata, ) waiting.wait_for_withdraw_complete( raiden=self.raiden, canonical_identifier=channel_state.canonical_identifier, total_withdraw=total_withdraw, retry_timeout=retry_timeout, )
[ "def", "set_total_channel_withdraw", "(", "self", ",", "registry_address", ":", "TokenNetworkRegistryAddress", ",", "token_address", ":", "TokenAddress", ",", "partner_address", ":", "Address", ",", "total_withdraw", ":", "WithdrawAmount", ",", "retry_timeout", ":", "Ne...
https://github.com/raiden-network/raiden/blob/76c68b426a6f81f173b9a2c09bd88a610502c38b/raiden/api/python.py#L537-L613
NTMC-Community/MatchZoo
8a487ee5a574356fc91e4f48e219253dc11bcff2
matchzoo/engine/base_task.py
python
BaseTask.loss
(self)
return self._loss
:return: Loss used in the task.
:return: Loss used in the task.
[ ":", "return", ":", "Loss", "used", "in", "the", "task", "." ]
def loss(self): """:return: Loss used in the task.""" return self._loss
[ "def", "loss", "(", "self", ")", ":", "return", "self", ".", "_loss" ]
https://github.com/NTMC-Community/MatchZoo/blob/8a487ee5a574356fc91e4f48e219253dc11bcff2/matchzoo/engine/base_task.py#L44-L46
9miao/Firefly
fd2795b8c26de6ab63bbec23d11f18c3dfb39a50
firefly/utils/services.py
python
Service.unMapTarget
(self, target)
Remove a target from the service.
Remove a target from the service.
[ "Remove", "a", "target", "from", "the", "service", "." ]
def unMapTarget(self, target): """Remove a target from the service.""" self._lock.acquire() try: key = target.__name__ if key in self._targets: del self._targets[key] finally: self._lock.release()
[ "def", "unMapTarget", "(", "self", ",", "target", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "key", "=", "target", ".", "__name__", "if", "key", "in", "self", ".", "_targets", ":", "del", "self", ".", "_targets", "[", "ke...
https://github.com/9miao/Firefly/blob/fd2795b8c26de6ab63bbec23d11f18c3dfb39a50/firefly/utils/services.py#L50-L58
LinkedInAttic/indextank-service
880c6295ce8e7a3a55bf9b3777cc35c7680e0d7e
api/thrift/server/TNonblockingServer.py
python
locked
(func)
return nested
Decorator which locks self.lock.
Decorator which locks self.lock.
[ "Decorator", "which", "locks", "self", ".", "lock", "." ]
def locked(func): "Decorator which locks self.lock." def nested(self, *args, **kwargs): self.lock.acquire() try: return func(self, *args, **kwargs) finally: self.lock.release() return nested
[ "def", "locked", "(", "func", ")", ":", "def", "nested", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "lock", ".", "acquire", "(", ")", "try", ":", "return", "func", "(", "self", ",", "*", "args", ",", "*", "...
https://github.com/LinkedInAttic/indextank-service/blob/880c6295ce8e7a3a55bf9b3777cc35c7680e0d7e/api/thrift/server/TNonblockingServer.py#L63-L71
replit-archive/empythoned
977ec10ced29a3541a4973dc2b59910805695752
cpython/Lib/wsgiref/util.py
python
is_hop_by_hop
(header_name)
return _hoppish(header_name.lower())
Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header
Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header
[ "Return", "true", "if", "header_name", "is", "an", "HTTP", "/", "1", ".", "1", "Hop", "-", "by", "-", "Hop", "header" ]
def is_hop_by_hop(header_name): """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header""" return _hoppish(header_name.lower())
[ "def", "is_hop_by_hop", "(", "header_name", ")", ":", "return", "_hoppish", "(", "header_name", ".", "lower", "(", ")", ")" ]
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/cpython/Lib/wsgiref/util.py#L163-L165
osroom/osroom
fcb9b7c5e5cfd8e919f8d87521800e70b09b5b44
apps/modules/permission/process/url_permission.py
python
get_urls
()
return data
获取web url :return:
获取web url :return:
[ "获取web", "url", ":", "return", ":" ]
def get_urls(): """ 获取web url :return: """ data = {} ctype = request.argget.all("type") keyword = request.argget.all("keyword") pre = str_to_num(request.argget.all("pre", 10)) page = str_to_num(request.argget.all("page", 1)) q = {} if ctype: q["type"] = ctype if keyword: keyword = {"$regex": keyword, "$options": "$i"} q["$or"] = [{"url": keyword}, {"endpoint": keyword}, {"custom_permission": keyword}, {"methods": keyword}] urls = mdbs["sys"].db.sys_urls.find(q) data_cnt = urls.count(True) urls = list(urls.sort([("url", 1)]).skip(pre * (page - 1)).limit(pre)) for url in urls: url["_id"] = str(url["_id"]) if "OPTIONS" in url["methods"]: url["methods"].remove("OPTIONS") if "HEAD" in url["methods"]: url["methods"].remove("HEAD") # 判断是否不存在自定义权限 if not url["custom_permission"]: url["custom_permission"] = None else: no_custom = True for v in url["custom_permission"].values(): if v: no_custom = False break if no_custom: url["custom_permission"] = None data["urls"] = datas_paging( pre=pre, page_num=page, data_cnt=data_cnt, datas=urls) return data
[ "def", "get_urls", "(", ")", ":", "data", "=", "{", "}", "ctype", "=", "request", ".", "argget", ".", "all", "(", "\"type\"", ")", "keyword", "=", "request", ".", "argget", ".", "all", "(", "\"keyword\"", ")", "pre", "=", "str_to_num", "(", "request"...
https://github.com/osroom/osroom/blob/fcb9b7c5e5cfd8e919f8d87521800e70b09b5b44/apps/modules/permission/process/url_permission.py#L43-L88
ctxis/canape
5f0e03424577296bcc60c2008a60a98ec5307e4b
CANAPE.Scripting/Lib/distutils/ccompiler.py
python
CCompiler.library_option
(self, lib)
Return the compiler option to add 'dir' to the list of libraries linked into the shared library or executable.
Return the compiler option to add 'dir' to the list of libraries linked into the shared library or executable.
[ "Return", "the", "compiler", "option", "to", "add", "dir", "to", "the", "list", "of", "libraries", "linked", "into", "the", "shared", "library", "or", "executable", "." ]
def library_option(self, lib): """Return the compiler option to add 'dir' to the list of libraries linked into the shared library or executable. """ raise NotImplementedError
[ "def", "library_option", "(", "self", ",", "lib", ")", ":", "raise", "NotImplementedError" ]
https://github.com/ctxis/canape/blob/5f0e03424577296bcc60c2008a60a98ec5307e4b/CANAPE.Scripting/Lib/distutils/ccompiler.py#L770-L774
Kaggle/kaggle-api
49057db362903d158b1e71a43d888b981dd27159
kaggle/models/file.py
python
File.to_dict
(self)
return result
Returns the model properties as a dict
Returns the model properties as a dict
[ "Returns", "the", "model", "properties", "as", "a", "dict" ]
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result
[ "def", "to_dict", "(", "self", ")", ":", "result", "=", "{", "}", "for", "attr", ",", "_", "in", "six", ".", "iteritems", "(", "self", ".", "swagger_types", ")", ":", "value", "=", "getattr", "(", "self", ",", "attr", ")", "if", "isinstance", "(", ...
https://github.com/Kaggle/kaggle-api/blob/49057db362903d158b1e71a43d888b981dd27159/kaggle/models/file.py#L395-L417
google-research/language
61fa7260ac7d690d11ef72ca863e45a37c0bdc80
language/conpono/binary_order/run_binary_coherence.py
python
model_fn_builder
(bert_config, init_checkpoint, learning_rate, num_train_steps, num_warmup_steps, use_tpu, use_one_hot_embeddings)
return model_fn
Returns `model_fn` closure for TPUEstimator.
Returns `model_fn` closure for TPUEstimator.
[ "Returns", "model_fn", "closure", "for", "TPUEstimator", "." ]
def model_fn_builder(bert_config, init_checkpoint, learning_rate, num_train_steps, num_warmup_steps, use_tpu, use_one_hot_embeddings): """Returns `model_fn` closure for TPUEstimator.""" def model_fn(features, labels, mode, params): # pylint: disable=unused-argument """The `model_fn` for TPUEstimator.""" tf.logging.info("*** Features ***") for name in sorted(features.keys()): tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape)) input_ids = features["input_ids"] input_mask = features["input_mask"] segment_ids = features["segment_ids"] label_ids = features["label_ids"] is_real_example = None if "is_real_example" in features: is_real_example = tf.cast(features["is_real_example"], dtype=tf.float32) else: is_real_example = tf.ones(tf.shape(label_ids), dtype=tf.float32) is_training = (mode == tf.estimator.ModeKeys.TRAIN) (total_loss, per_example_loss, logits, probabilities) = create_model(bert_config, is_training, input_ids, input_mask, segment_ids, label_ids, use_one_hot_embeddings) tvars = tf.trainable_variables() initialized_variable_names = {} scaffold_fn = None if init_checkpoint: (assignment_map, initialized_variable_names ) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) if use_tpu: def tpu_scaffold(): tf.train.init_from_checkpoint(init_checkpoint, assignment_map) return tf.train.Scaffold() scaffold_fn = tpu_scaffold else: tf.train.init_from_checkpoint(init_checkpoint, assignment_map) tf.logging.info("**** Trainable Variables ****") for var in tvars: init_string = "" if var.name in initialized_variable_names: init_string = ", *INIT_FROM_CKPT*" tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string) output_spec = None if mode == tf.estimator.ModeKeys.TRAIN: train_op = optimization.create_optimizer(total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu) output_spec = contrib_tpu.TPUEstimatorSpec( mode=mode, loss=total_loss, train_op=train_op, scaffold_fn=scaffold_fn) elif mode == tf.estimator.ModeKeys.EVAL: def metric_fn(per_example_loss, label_ids, logits, is_real_example): predictions = tf.argmax(logits, axis=-1, output_type=tf.int32) accuracy = tf.metrics.accuracy( labels=label_ids, predictions=predictions, weights=is_real_example) loss = tf.metrics.mean(values=per_example_loss, weights=is_real_example) return { "eval_accuracy": accuracy, "eval_loss": loss, } eval_metrics = (metric_fn, [per_example_loss, label_ids, logits, is_real_example]) output_spec = contrib_tpu.TPUEstimatorSpec( mode=mode, loss=total_loss, eval_metrics=eval_metrics, scaffold_fn=scaffold_fn) else: output_spec = contrib_tpu.TPUEstimatorSpec( mode=mode, predictions={"probabilities": probabilities}, scaffold_fn=scaffold_fn) return output_spec return model_fn
[ "def", "model_fn_builder", "(", "bert_config", ",", "init_checkpoint", ",", "learning_rate", ",", "num_train_steps", ",", "num_warmup_steps", ",", "use_tpu", ",", "use_one_hot_embeddings", ")", ":", "def", "model_fn", "(", "features", ",", "labels", ",", "mode", "...
https://github.com/google-research/language/blob/61fa7260ac7d690d11ef72ca863e45a37c0bdc80/language/conpono/binary_order/run_binary_coherence.py#L238-L330
ARISE-Initiative/robosuite
a5dfaf03cd769170881a1931d8f19c8eb72f531a
robosuite/environments/manipulation/two_arm_env.py
python
TwoArmEnv._eef1_xquat
(self)
return mat2quat(self._eef1_xmat)
End Effector 1 orientation as a (x,y,z,w) quaternion Note that this draws the orientation from the "ee" site, NOT the gripper site, since the gripper orientations are inconsistent! Returns: np.array: (x,y,z,w) quaternion for EEF1
End Effector 1 orientation as a (x,y,z,w) quaternion Note that this draws the orientation from the "ee" site, NOT the gripper site, since the gripper orientations are inconsistent!
[ "End", "Effector", "1", "orientation", "as", "a", "(", "x", "y", "z", "w", ")", "quaternion", "Note", "that", "this", "draws", "the", "orientation", "from", "the", "ee", "site", "NOT", "the", "gripper", "site", "since", "the", "gripper", "orientations", ...
def _eef1_xquat(self): """ End Effector 1 orientation as a (x,y,z,w) quaternion Note that this draws the orientation from the "ee" site, NOT the gripper site, since the gripper orientations are inconsistent! Returns: np.array: (x,y,z,w) quaternion for EEF1 """ return mat2quat(self._eef1_xmat)
[ "def", "_eef1_xquat", "(", "self", ")", ":", "return", "mat2quat", "(", "self", ".", "_eef1_xmat", ")" ]
https://github.com/ARISE-Initiative/robosuite/blob/a5dfaf03cd769170881a1931d8f19c8eb72f531a/robosuite/environments/manipulation/two_arm_env.py#L125-L134
jupyter/jupyter_client
263173095ceab87a572937f21f843886ffe356f1
jupyter_client/client.py
python
KernelClient._async_recv_reply
( self, msg_id: str, timeout: t.Optional[float] = None, channel: str = "shell" )
Receive and return the reply for a given request
Receive and return the reply for a given request
[ "Receive", "and", "return", "the", "reply", "for", "a", "given", "request" ]
async def _async_recv_reply( self, msg_id: str, timeout: t.Optional[float] = None, channel: str = "shell" ) -> t.Dict[str, t.Any]: """Receive and return the reply for a given request""" if timeout is not None: deadline = time.monotonic() + timeout while True: if timeout is not None: timeout = max(0, deadline - time.monotonic()) try: if channel == "control": reply = await self._async_get_control_msg(timeout=timeout) else: reply = await self._async_get_shell_msg(timeout=timeout) except Empty as e: raise TimeoutError("Timeout waiting for reply") from e if reply["parent_header"].get("msg_id") != msg_id: # not my reply, someone may have forgotten to retrieve theirs continue return reply
[ "async", "def", "_async_recv_reply", "(", "self", ",", "msg_id", ":", "str", ",", "timeout", ":", "t", ".", "Optional", "[", "float", "]", "=", "None", ",", "channel", ":", "str", "=", "\"shell\"", ")", "->", "t", ".", "Dict", "[", "str", ",", "t",...
https://github.com/jupyter/jupyter_client/blob/263173095ceab87a572937f21f843886ffe356f1/jupyter_client/client.py#L193-L212
stoq/stoq
c26991644d1affcf96bc2e0a0434796cabdf8448
stoq/lib/gui/dialogs/transferorderdialog.py
python
TransferOrderDetailsDialog.add_tab
(self, slave, name)
Add a new tab on the notebook :param slave: the slave we are attaching to the new tab :param name: the name of the tab
Add a new tab on the notebook
[ "Add", "a", "new", "tab", "on", "the", "notebook" ]
def add_tab(self, slave, name): """Add a new tab on the notebook :param slave: the slave we are attaching to the new tab :param name: the name of the tab """ event_box = Gtk.EventBox() self.details_notebook.insert_page(event_box, Gtk.Label(label=name), -1) self.attach_slave(name, slave, event_box) event_box.show()
[ "def", "add_tab", "(", "self", ",", "slave", ",", "name", ")", ":", "event_box", "=", "Gtk", ".", "EventBox", "(", ")", "self", ".", "details_notebook", ".", "insert_page", "(", "event_box", ",", "Gtk", ".", "Label", "(", "label", "=", "name", ")", "...
https://github.com/stoq/stoq/blob/c26991644d1affcf96bc2e0a0434796cabdf8448/stoq/lib/gui/dialogs/transferorderdialog.py#L77-L86
pandas-dev/pandas
5ba7d714014ae8feaccc0dd4a98890828cf2832d
pandas/core/generic.py
python
NDFrame._validate_dtype
(cls, dtype)
return dtype
validate the passed dtype
validate the passed dtype
[ "validate", "the", "passed", "dtype" ]
def _validate_dtype(cls, dtype) -> DtypeObj | None: """validate the passed dtype""" if dtype is not None: dtype = pandas_dtype(dtype) # a compound dtype if dtype.kind == "V": raise NotImplementedError( "compound dtypes are not implemented " f"in the {cls.__name__} constructor" ) return dtype
[ "def", "_validate_dtype", "(", "cls", ",", "dtype", ")", "->", "DtypeObj", "|", "None", ":", "if", "dtype", "is", "not", "None", ":", "dtype", "=", "pandas_dtype", "(", "dtype", ")", "# a compound dtype", "if", "dtype", ".", "kind", "==", "\"V\"", ":", ...
https://github.com/pandas-dev/pandas/blob/5ba7d714014ae8feaccc0dd4a98890828cf2832d/pandas/core/generic.py#L443-L455
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/site-packages/jinja2-2.8-py3.3.egg/jinja2/utils.py
python
consume
(iterable)
Consumes an iterable without doing anything with it.
Consumes an iterable without doing anything with it.
[ "Consumes", "an", "iterable", "without", "doing", "anything", "with", "it", "." ]
def consume(iterable): """Consumes an iterable without doing anything with it.""" for event in iterable: pass
[ "def", "consume", "(", "iterable", ")", ":", "for", "event", "in", "iterable", ":", "pass" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/jinja2-2.8-py3.3.egg/jinja2/utils.py#L102-L105
rdiff-backup/rdiff-backup
321e0cd6e5e47d4c158a0172e47ab38240a8b653
src/rdiff_backup/restore.py
python
MirrorStruct.set_mirror_select
(cls, target_rp, select_opts, *filelists)
Initialize the mirror selection object
Initialize the mirror selection object
[ "Initialize", "the", "mirror", "selection", "object" ]
def set_mirror_select(cls, target_rp, select_opts, *filelists): """Initialize the mirror selection object""" if not select_opts: return # nothing to do... cls._select = selection.Select(target_rp) cls._select.parse_selection_args(select_opts, filelists)
[ "def", "set_mirror_select", "(", "cls", ",", "target_rp", ",", "select_opts", ",", "*", "filelists", ")", ":", "if", "not", "select_opts", ":", "return", "# nothing to do...", "cls", ".", "_select", "=", "selection", ".", "Select", "(", "target_rp", ")", "cl...
https://github.com/rdiff-backup/rdiff-backup/blob/321e0cd6e5e47d4c158a0172e47ab38240a8b653/src/rdiff_backup/restore.py#L130-L135
JaniceWuo/MovieRecommend
4c86db64ca45598917d304f535413df3bc9fea65
movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/pyparsing.py
python
Optional.__init__
( self, expr, default=_optionalNotMatched )
[]
def __init__( self, expr, default=_optionalNotMatched ): super(Optional,self).__init__( expr, savelist=False ) self.saveAsList = self.expr.saveAsList self.defaultValue = default self.mayReturnEmpty = True
[ "def", "__init__", "(", "self", ",", "expr", ",", "default", "=", "_optionalNotMatched", ")", ":", "super", "(", "Optional", ",", "self", ")", ".", "__init__", "(", "expr", ",", "savelist", "=", "False", ")", "self", ".", "saveAsList", "=", "self", "."...
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/pyparsing.py#L3956-L3960
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/contrib/auth/base_user.py
python
AbstractBaseUser.has_usable_password
(self)
return is_password_usable(self.password)
Return False if set_unusable_password() has been called for this user.
Return False if set_unusable_password() has been called for this user.
[ "Return", "False", "if", "set_unusable_password", "()", "has", "been", "called", "for", "this", "user", "." ]
def has_usable_password(self): """ Return False if set_unusable_password() has been called for this user. """ return is_password_usable(self.password)
[ "def", "has_usable_password", "(", "self", ")", ":", "return", "is_password_usable", "(", "self", ".", "password", ")" ]
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/contrib/auth/base_user.py#L117-L121
orbingol/NURBS-Python
8ae8b127eb0b130a25a6c81e98e90f319733bca0
geomdl/operations.py
python
flip
(surf, **kwargs)
return geom
Flips the control points grid of the input surface(s). Keyword Arguments: * ``inplace``: if False, operation applied to a copy of the object. *Default: False* :param surf: input surface(s) :type surf: abstract.Surface, multi.SurfaceContainer :return: flipped surface(s)
Flips the control points grid of the input surface(s).
[ "Flips", "the", "control", "points", "grid", "of", "the", "input", "surface", "(", "s", ")", "." ]
def flip(surf, **kwargs): """ Flips the control points grid of the input surface(s). Keyword Arguments: * ``inplace``: if False, operation applied to a copy of the object. *Default: False* :param surf: input surface(s) :type surf: abstract.Surface, multi.SurfaceContainer :return: flipped surface(s) """ if surf.pdimension != 2: raise GeomdlException("Can only flip surfaces") # Keyword arguments inplace = kwargs.get('inplace', False) if not inplace: geom = copy.deepcopy(surf) else: geom = surf for g in geom: size_u = g.ctrlpts_size_u size_v = g.ctrlpts_size_v cpts = g.ctrlptsw if g.rational else g.ctrlpts new_cpts = [[] for _ in range(g.ctrlpts_size)] idx = g.ctrlpts_size - 1 for pt in cpts: new_cpts[idx] = pt idx -= 1 g.set_ctrlpts(new_cpts, size_u, size_v) return geom
[ "def", "flip", "(", "surf", ",", "*", "*", "kwargs", ")", ":", "if", "surf", ".", "pdimension", "!=", "2", ":", "raise", "GeomdlException", "(", "\"Can only flip surfaces\"", ")", "# Keyword arguments", "inplace", "=", "kwargs", ".", "get", "(", "'inplace'",...
https://github.com/orbingol/NURBS-Python/blob/8ae8b127eb0b130a25a6c81e98e90f319733bca0/geomdl/operations.py#L1653-L1685
taigaio/taiga-ncurses
65312098f2d167762e0dbd1c16019754ab64d068
taiga_ncurses/controllers/issues.py
python
ProjectIssuesSubController.handle_create_issue_request
(self)
[]
def handle_create_issue_request(self): data = self.view.get_issue_form_data() if not data.get("subject", None): self.view.notifier.error_msg("Subject is required") else: us_post_f = self.executor.create_issue(data) us_post_f.add_done_callback(self.handle_create_issue_response)
[ "def", "handle_create_issue_request", "(", "self", ")", ":", "data", "=", "self", ".", "view", ".", "get_issue_form_data", "(", ")", "if", "not", "data", ".", "get", "(", "\"subject\"", ",", "None", ")", ":", "self", ".", "view", ".", "notifier", ".", ...
https://github.com/taigaio/taiga-ncurses/blob/65312098f2d167762e0dbd1c16019754ab64d068/taiga_ncurses/controllers/issues.py#L154-L161
inguma/bokken
6109dd0025093a11631cb88cf48cb5c5ed5e617d
lib/web/template.py
python
test
()
r"""Doctest for testing template module. Define a utility function to run template test. >>> class TestResult: ... def __init__(self, t): self.t = t ... def __getattr__(self, name): return getattr(self.t, name) ... def __repr__(self): return repr(unicode(self)) ... >>> def t(code, **keywords): ... tmpl = Template(code, **keywords) ... return lambda *a, **kw: TestResult(tmpl(*a, **kw)) ... Simple tests. >>> t('1')() u'1\n' >>> t('$def with ()\n1')() u'1\n' >>> t('$def with (a)\n$a')(1) u'1\n' >>> t('$def with (a=0)\n$a')(1) u'1\n' >>> t('$def with (a=0)\n$a')(a=1) u'1\n' Test complicated expressions. >>> t('$def with (x)\n$x.upper()')('hello') u'HELLO\n' >>> t('$(2 * 3 + 4 * 5)')() u'26\n' >>> t('${2 * 3 + 4 * 5}')() u'26\n' >>> t('$def with (limit)\nkeep $(limit)ing.')('go') u'keep going.\n' >>> t('$def with (a)\n$a.b[0]')(storage(b=[1])) u'1\n' Test html escaping. >>> t('$def with (x)\n$x', filename='a.html')('<html>') u'&lt;html&gt;\n' >>> t('$def with (x)\n$x', filename='a.txt')('<html>') u'<html>\n' Test if, for and while. >>> t('$if 1: 1')() u'1\n' >>> t('$if 1:\n 1')() u'1\n' >>> t('$if 1:\n 1\\')() u'1' >>> t('$if 0: 0\n$elif 1: 1')() u'1\n' >>> t('$if 0: 0\n$elif None: 0\n$else: 1')() u'1\n' >>> t('$if 0 < 1 and 1 < 2: 1')() u'1\n' >>> t('$for x in [1, 2, 3]: $x')() u'1\n2\n3\n' >>> t('$def with (d)\n$for k, v in d.iteritems(): $k')({1: 1}) u'1\n' >>> t('$for x in [1, 2, 3]:\n\t$x')() u' 1\n 2\n 3\n' >>> t('$def with (a)\n$while a and a.pop():1')([1, 2, 3]) u'1\n1\n1\n' The space after : must be ignored. >>> t('$if True: foo')() u'foo\n' Test loop.xxx. >>> t("$for i in range(5):$loop.index, $loop.parity")() u'1, odd\n2, even\n3, odd\n4, even\n5, odd\n' >>> t("$for i in range(2):\n $for j in range(2):$loop.parent.parity $loop.parity")() u'odd odd\nodd even\neven odd\neven even\n' Test assignment. >>> t('$ a = 1\n$a')() u'1\n' >>> t('$ a = [1]\n$a[0]')() u'1\n' >>> t('$ a = {1: 1}\n$a.keys()[0]')() u'1\n' >>> t('$ a = []\n$if not a: 1')() u'1\n' >>> t('$ a = {}\n$if not a: 1')() u'1\n' >>> t('$ a = -1\n$a')() u'-1\n' >>> t('$ a = "1"\n$a')() u'1\n' Test comments. >>> t('$# 0')() u'\n' >>> t('hello$#comment1\nhello$#comment2')() u'hello\nhello\n' >>> t('$#comment0\nhello$#comment1\nhello$#comment2')() u'\nhello\nhello\n' Test unicode. >>> t('$def with (a)\n$a')(u'\u203d') u'\u203d\n' >>> t('$def with (a)\n$a')(u'\u203d'.encode('utf-8')) u'\u203d\n' >>> t(u'$def with (a)\n$a $:a')(u'\u203d') u'\u203d \u203d\n' >>> t(u'$def with ()\nfoo')() u'foo\n' >>> def f(x): return x ... >>> t(u'$def with (f)\n$:f("x")')(f) u'x\n' >>> t('$def with (f)\n$:f("x")')(f) u'x\n' Test dollar escaping. >>> t("Stop, $$money isn't evaluated.")() u"Stop, $money isn't evaluated.\n" >>> t("Stop, \$money isn't evaluated.")() u"Stop, $money isn't evaluated.\n" Test space sensitivity. >>> t('$def with (x)\n$x')(1) u'1\n' >>> t('$def with(x ,y)\n$x')(1, 1) u'1\n' >>> t('$(1 + 2*3 + 4)')() u'11\n' Make sure globals are working. >>> t('$x')() Traceback (most recent call last): ... NameError: global name 'x' is not defined >>> t('$x', globals={'x': 1})() u'1\n' Can't change globals. >>> t('$ x = 2\n$x', globals={'x': 1})() u'2\n' >>> t('$ x = x + 1\n$x', globals={'x': 1})() Traceback (most recent call last): ... UnboundLocalError: local variable 'x' referenced before assignment Make sure builtins are customizable. >>> t('$min(1, 2)')() u'1\n' >>> t('$min(1, 2)', builtins={})() Traceback (most recent call last): ... NameError: global name 'min' is not defined Test vars. >>> x = t('$var x: 1')() >>> x.x u'1' >>> x = t('$var x = 1')() >>> x.x 1 >>> x = t('$var x: \n foo\n bar')() >>> x.x u'foo\nbar\n' Test BOM chars. >>> t('\xef\xbb\xbf$def with(x)\n$x')('foo') u'foo\n' Test for with weird cases. >>> t('$for i in range(10)[1:5]:\n $i')() u'1\n2\n3\n4\n' >>> t("$for k, v in {'a': 1, 'b': 2}.items():\n $k $v")() u'a 1\nb 2\n' >>> t("$for k, v in ({'a': 1, 'b': 2}.items():\n $k $v")() Traceback (most recent call last): ... SyntaxError: invalid syntax Test datetime. >>> import datetime >>> t("$def with (date)\n$date.strftime('%m %Y')")(datetime.datetime(2009, 1, 1)) u'01 2009\n'
r"""Doctest for testing template module.
[ "r", "Doctest", "for", "testing", "template", "module", "." ]
def test(): r"""Doctest for testing template module. Define a utility function to run template test. >>> class TestResult: ... def __init__(self, t): self.t = t ... def __getattr__(self, name): return getattr(self.t, name) ... def __repr__(self): return repr(unicode(self)) ... >>> def t(code, **keywords): ... tmpl = Template(code, **keywords) ... return lambda *a, **kw: TestResult(tmpl(*a, **kw)) ... Simple tests. >>> t('1')() u'1\n' >>> t('$def with ()\n1')() u'1\n' >>> t('$def with (a)\n$a')(1) u'1\n' >>> t('$def with (a=0)\n$a')(1) u'1\n' >>> t('$def with (a=0)\n$a')(a=1) u'1\n' Test complicated expressions. >>> t('$def with (x)\n$x.upper()')('hello') u'HELLO\n' >>> t('$(2 * 3 + 4 * 5)')() u'26\n' >>> t('${2 * 3 + 4 * 5}')() u'26\n' >>> t('$def with (limit)\nkeep $(limit)ing.')('go') u'keep going.\n' >>> t('$def with (a)\n$a.b[0]')(storage(b=[1])) u'1\n' Test html escaping. >>> t('$def with (x)\n$x', filename='a.html')('<html>') u'&lt;html&gt;\n' >>> t('$def with (x)\n$x', filename='a.txt')('<html>') u'<html>\n' Test if, for and while. >>> t('$if 1: 1')() u'1\n' >>> t('$if 1:\n 1')() u'1\n' >>> t('$if 1:\n 1\\')() u'1' >>> t('$if 0: 0\n$elif 1: 1')() u'1\n' >>> t('$if 0: 0\n$elif None: 0\n$else: 1')() u'1\n' >>> t('$if 0 < 1 and 1 < 2: 1')() u'1\n' >>> t('$for x in [1, 2, 3]: $x')() u'1\n2\n3\n' >>> t('$def with (d)\n$for k, v in d.iteritems(): $k')({1: 1}) u'1\n' >>> t('$for x in [1, 2, 3]:\n\t$x')() u' 1\n 2\n 3\n' >>> t('$def with (a)\n$while a and a.pop():1')([1, 2, 3]) u'1\n1\n1\n' The space after : must be ignored. >>> t('$if True: foo')() u'foo\n' Test loop.xxx. >>> t("$for i in range(5):$loop.index, $loop.parity")() u'1, odd\n2, even\n3, odd\n4, even\n5, odd\n' >>> t("$for i in range(2):\n $for j in range(2):$loop.parent.parity $loop.parity")() u'odd odd\nodd even\neven odd\neven even\n' Test assignment. >>> t('$ a = 1\n$a')() u'1\n' >>> t('$ a = [1]\n$a[0]')() u'1\n' >>> t('$ a = {1: 1}\n$a.keys()[0]')() u'1\n' >>> t('$ a = []\n$if not a: 1')() u'1\n' >>> t('$ a = {}\n$if not a: 1')() u'1\n' >>> t('$ a = -1\n$a')() u'-1\n' >>> t('$ a = "1"\n$a')() u'1\n' Test comments. >>> t('$# 0')() u'\n' >>> t('hello$#comment1\nhello$#comment2')() u'hello\nhello\n' >>> t('$#comment0\nhello$#comment1\nhello$#comment2')() u'\nhello\nhello\n' Test unicode. >>> t('$def with (a)\n$a')(u'\u203d') u'\u203d\n' >>> t('$def with (a)\n$a')(u'\u203d'.encode('utf-8')) u'\u203d\n' >>> t(u'$def with (a)\n$a $:a')(u'\u203d') u'\u203d \u203d\n' >>> t(u'$def with ()\nfoo')() u'foo\n' >>> def f(x): return x ... >>> t(u'$def with (f)\n$:f("x")')(f) u'x\n' >>> t('$def with (f)\n$:f("x")')(f) u'x\n' Test dollar escaping. >>> t("Stop, $$money isn't evaluated.")() u"Stop, $money isn't evaluated.\n" >>> t("Stop, \$money isn't evaluated.")() u"Stop, $money isn't evaluated.\n" Test space sensitivity. >>> t('$def with (x)\n$x')(1) u'1\n' >>> t('$def with(x ,y)\n$x')(1, 1) u'1\n' >>> t('$(1 + 2*3 + 4)')() u'11\n' Make sure globals are working. >>> t('$x')() Traceback (most recent call last): ... NameError: global name 'x' is not defined >>> t('$x', globals={'x': 1})() u'1\n' Can't change globals. >>> t('$ x = 2\n$x', globals={'x': 1})() u'2\n' >>> t('$ x = x + 1\n$x', globals={'x': 1})() Traceback (most recent call last): ... UnboundLocalError: local variable 'x' referenced before assignment Make sure builtins are customizable. >>> t('$min(1, 2)')() u'1\n' >>> t('$min(1, 2)', builtins={})() Traceback (most recent call last): ... NameError: global name 'min' is not defined Test vars. >>> x = t('$var x: 1')() >>> x.x u'1' >>> x = t('$var x = 1')() >>> x.x 1 >>> x = t('$var x: \n foo\n bar')() >>> x.x u'foo\nbar\n' Test BOM chars. >>> t('\xef\xbb\xbf$def with(x)\n$x')('foo') u'foo\n' Test for with weird cases. >>> t('$for i in range(10)[1:5]:\n $i')() u'1\n2\n3\n4\n' >>> t("$for k, v in {'a': 1, 'b': 2}.items():\n $k $v")() u'a 1\nb 2\n' >>> t("$for k, v in ({'a': 1, 'b': 2}.items():\n $k $v")() Traceback (most recent call last): ... SyntaxError: invalid syntax Test datetime. >>> import datetime >>> t("$def with (date)\n$date.strftime('%m %Y')")(datetime.datetime(2009, 1, 1)) u'01 2009\n' """ pass
[ "def", "test", "(", ")", ":", "pass" ]
https://github.com/inguma/bokken/blob/6109dd0025093a11631cb88cf48cb5c5ed5e617d/lib/web/template.py#L1301-L1504
GoogleCloudPlatform/cloudml-samples
efddc4a9898127e55edc0946557aca4bfaf59705
tensorflow/standard/mnist/deprecated/trainer/distribution_utils.py
python
get_distribution_strategy
(num_gpus, all_reduce_alg=None)
Return a DistributionStrategy for running the model. Args: num_gpus: Number of GPUs to run this model. all_reduce_alg: Specify which algorithm to use when performing all-reduce. See tf.contrib.distribute.AllReduceCrossDeviceOps for available algorithms. If None, DistributionStrategy will choose based on device topology. Returns: tf.contrib.distribute.DistibutionStrategy object.
Return a DistributionStrategy for running the model.
[ "Return", "a", "DistributionStrategy", "for", "running", "the", "model", "." ]
def get_distribution_strategy(num_gpus, all_reduce_alg=None): """Return a DistributionStrategy for running the model. Args: num_gpus: Number of GPUs to run this model. all_reduce_alg: Specify which algorithm to use when performing all-reduce. See tf.contrib.distribute.AllReduceCrossDeviceOps for available algorithms. If None, DistributionStrategy will choose based on device topology. Returns: tf.contrib.distribute.DistibutionStrategy object. """ if num_gpus == 0: return tf.contrib.distribute.OneDeviceStrategy("device:CPU:0") elif num_gpus == 1: return tf.contrib.distribute.OneDeviceStrategy("device:GPU:0") else: if all_reduce_alg: return tf.contrib.distribute.MirroredStrategy( num_gpus=num_gpus, cross_tower_ops=tf.contrib.distribute.AllReduceCrossDeviceOps( all_reduce_alg, num_packs=2)) else: return tf.contrib.distribute.MirroredStrategy(num_gpus=num_gpus)
[ "def", "get_distribution_strategy", "(", "num_gpus", ",", "all_reduce_alg", "=", "None", ")", ":", "if", "num_gpus", "==", "0", ":", "return", "tf", ".", "contrib", ".", "distribute", ".", "OneDeviceStrategy", "(", "\"device:CPU:0\"", ")", "elif", "num_gpus", ...
https://github.com/GoogleCloudPlatform/cloudml-samples/blob/efddc4a9898127e55edc0946557aca4bfaf59705/tensorflow/standard/mnist/deprecated/trainer/distribution_utils.py#L24-L48
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/synology_dsm/__init__.py
python
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
return True
Set up Synology DSM sensors.
Set up Synology DSM sensors.
[ "Set", "up", "Synology", "DSM", "sensors", "." ]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Synology DSM sensors.""" # Migrate device indentifiers dev_reg = await get_dev_reg(hass) devices: list[DeviceEntry] = device_registry.async_entries_for_config_entry( dev_reg, entry.entry_id ) for device in devices: old_identifier = list(next(iter(device.identifiers))) if len(old_identifier) > 2: new_identifier = { (old_identifier.pop(0), "_".join([str(x) for x in old_identifier])) } _LOGGER.debug( "migrate identifier '%s' to '%s'", device.identifiers, new_identifier ) dev_reg.async_update_device(device.id, new_identifiers=new_identifier) # Migrate existing entry configuration if entry.data.get(CONF_VERIFY_SSL) is None: hass.config_entries.async_update_entry( entry, data={**entry.data, CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL} ) # Continue setup api = SynoApi(hass, entry) try: await api.async_setup() except ( SynologyDSMLogin2SARequiredException, SynologyDSMLoginDisabledAccountException, SynologyDSMLoginInvalidException, SynologyDSMLoginPermissionDeniedException, ) as err: if err.args[0] and isinstance(err.args[0], dict): # pylint: disable=no-member details = err.args[0].get(EXCEPTION_DETAILS, EXCEPTION_UNKNOWN) else: details = EXCEPTION_UNKNOWN raise ConfigEntryAuthFailed(f"reason: {details}") from err except (SynologyDSMLoginFailedException, SynologyDSMRequestException) as err: if err.args[0] and isinstance(err.args[0], dict): # pylint: disable=no-member details = err.args[0].get(EXCEPTION_DETAILS, EXCEPTION_UNKNOWN) else: details = EXCEPTION_UNKNOWN raise ConfigEntryNotReady(details) from err hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.unique_id] = { UNDO_UPDATE_LISTENER: entry.add_update_listener(_async_update_listener), SYNO_API: api, SYSTEM_LOADED: True, } # Services await _async_setup_services(hass) # For SSDP compat if not entry.data.get(CONF_MAC): network = await hass.async_add_executor_job(getattr, api.dsm, "network") hass.config_entries.async_update_entry( entry, data={**entry.data, CONF_MAC: network.macs} ) async def async_coordinator_update_data_cameras() -> dict[ str, dict[str, SynoCamera] ] | None: """Fetch all camera data from api.""" if not hass.data[DOMAIN][entry.unique_id][SYSTEM_LOADED]: raise UpdateFailed("System not fully loaded") if SynoSurveillanceStation.CAMERA_API_KEY not in api.dsm.apis: return None surveillance_station = api.surveillance_station try: async with async_timeout.timeout(30): await hass.async_add_executor_job(surveillance_station.update) except SynologyDSMAPIErrorException as err: raise UpdateFailed(f"Error communicating with API: {err}") from err return { "cameras": { camera.id: camera for camera in surveillance_station.get_all_cameras() } } async def async_coordinator_update_data_central() -> None: """Fetch all device and sensor data from api.""" try: await api.async_update() except Exception as err: raise UpdateFailed(f"Error communicating with API: {err}") from err return None async def async_coordinator_update_data_switches() -> dict[ str, dict[str, Any] ] | None: """Fetch all switch data from api.""" if not hass.data[DOMAIN][entry.unique_id][SYSTEM_LOADED]: raise UpdateFailed("System not fully loaded") if SynoSurveillanceStation.HOME_MODE_API_KEY not in api.dsm.apis: return None surveillance_station = api.surveillance_station return { "switches": { "home_mode": await hass.async_add_executor_job( surveillance_station.get_home_mode_status ) } } hass.data[DOMAIN][entry.unique_id][COORDINATOR_CAMERAS] = DataUpdateCoordinator( hass, _LOGGER, name=f"{entry.unique_id}_cameras", update_method=async_coordinator_update_data_cameras, update_interval=timedelta(seconds=30), ) hass.data[DOMAIN][entry.unique_id][COORDINATOR_CENTRAL] = DataUpdateCoordinator( hass, _LOGGER, name=f"{entry.unique_id}_central", update_method=async_coordinator_update_data_central, update_interval=timedelta( minutes=entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) ), ) hass.data[DOMAIN][entry.unique_id][COORDINATOR_SWITCHES] = DataUpdateCoordinator( hass, _LOGGER, name=f"{entry.unique_id}_switches", update_method=async_coordinator_update_data_switches, update_interval=timedelta(seconds=30), ) hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", "->", "bool", ":", "# Migrate device indentifiers", "dev_reg", "=", "await", "get_dev_reg", "(", "hass", ")", "devices", ":", "list", "[", "DeviceEntry...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/synology_dsm/__init__.py#L87-L232
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/db/backends/oracle/schema.py
python
DatabaseSchemaEditor._alter_field_type_workaround
(self, model, old_field, new_field)
Oracle refuses to change from some type to other type. What we need to do instead is: - Add a nullable version of the desired field with a temporary name. If the new column is an auto field, then the temporary column can't be nullable. - Update the table to transfer values from old to new - Drop old column - Rename the new column and possibly drop the nullable property
Oracle refuses to change from some type to other type. What we need to do instead is: - Add a nullable version of the desired field with a temporary name. If the new column is an auto field, then the temporary column can't be nullable. - Update the table to transfer values from old to new - Drop old column - Rename the new column and possibly drop the nullable property
[ "Oracle", "refuses", "to", "change", "from", "some", "type", "to", "other", "type", ".", "What", "we", "need", "to", "do", "instead", "is", ":", "-", "Add", "a", "nullable", "version", "of", "the", "desired", "field", "with", "a", "temporary", "name", ...
def _alter_field_type_workaround(self, model, old_field, new_field): """ Oracle refuses to change from some type to other type. What we need to do instead is: - Add a nullable version of the desired field with a temporary name. If the new column is an auto field, then the temporary column can't be nullable. - Update the table to transfer values from old to new - Drop old column - Rename the new column and possibly drop the nullable property """ # Make a new field that's like the new one but with a temporary # column name. new_temp_field = copy.deepcopy(new_field) new_temp_field.null = (new_field.get_internal_type() not in ('AutoField', 'BigAutoField')) new_temp_field.column = self._generate_temp_name(new_field.column) # Add it self.add_field(model, new_temp_field) # Explicit data type conversion # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf # /Data-Type-Comparison-Rules.html#GUID-D0C5A47E-6F93-4C2D-9E49-4F2B86B359DD new_value = self.quote_name(old_field.column) old_type = old_field.db_type(self.connection) if re.match('^N?CLOB', old_type): new_value = "TO_CHAR(%s)" % new_value old_type = 'VARCHAR2' if re.match('^N?VARCHAR2', old_type): new_internal_type = new_field.get_internal_type() if new_internal_type == 'DateField': new_value = "TO_DATE(%s, 'YYYY-MM-DD')" % new_value elif new_internal_type == 'DateTimeField': new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value elif new_internal_type == 'TimeField': # TimeField are stored as TIMESTAMP with a 1900-01-01 date part. new_value = "TO_TIMESTAMP(CONCAT('1900-01-01 ', %s), 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value # Transfer values across self.execute("UPDATE %s set %s=%s" % ( self.quote_name(model._meta.db_table), self.quote_name(new_temp_field.column), new_value, )) # Drop the old field self.remove_field(model, old_field) # Rename and possibly make the new field NOT NULL super().alter_field(model, new_temp_field, new_field)
[ "def", "_alter_field_type_workaround", "(", "self", ",", "model", ",", "old_field", ",", "new_field", ")", ":", "# Make a new field that's like the new one but with a temporary", "# column name.", "new_temp_field", "=", "copy", ".", "deepcopy", "(", "new_field", ")", "new...
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/db/backends/oracle/schema.py#L78-L122
biolab/orange3
41685e1c7b1d1babe680113685a2d44bcc9fec0b
Orange/widgets/visualize/owscatterplotgraph.py
python
OWScatterPlotBase.get_colors
(self)
Prepare data for colors of the points in the plot The method is called by `update_colors`. It gets the colors and the indices of the data subset from the widget (`get_color_data`, `get_subset_mask`), and constructs lists of pens and brushes for each data point. The method uses different palettes for discrete and continuous data, as determined by calling the widget's method `is_continuous_color`. If also marks the points that are in the subset as defined by, for instance the 'Data Subset' signal in the Scatter plot and similar widgets. (Do not confuse this with *selected points*, which are marked by circles around the points, which are colored by groups and thus independent of this method.) Returns: (tuple): a list of pens and list of brushes
Prepare data for colors of the points in the plot
[ "Prepare", "data", "for", "colors", "of", "the", "points", "in", "the", "plot" ]
def get_colors(self): """ Prepare data for colors of the points in the plot The method is called by `update_colors`. It gets the colors and the indices of the data subset from the widget (`get_color_data`, `get_subset_mask`), and constructs lists of pens and brushes for each data point. The method uses different palettes for discrete and continuous data, as determined by calling the widget's method `is_continuous_color`. If also marks the points that are in the subset as defined by, for instance the 'Data Subset' signal in the Scatter plot and similar widgets. (Do not confuse this with *selected points*, which are marked by circles around the points, which are colored by groups and thus independent of this method.) Returns: (tuple): a list of pens and list of brushes """ c_data = self.master.get_color_data() c_data = self._filter_visible(c_data) subset = self.master.get_subset_mask() subset = self._filter_visible(subset) self.subset_is_shown = subset is not None if c_data is None: # same color self.palette = None return self._get_same_colors(subset) elif self.master.is_continuous_color(): return self._get_continuous_colors(c_data, subset) else: return self._get_discrete_colors(c_data, subset)
[ "def", "get_colors", "(", "self", ")", ":", "c_data", "=", "self", ".", "master", ".", "get_color_data", "(", ")", "c_data", "=", "self", ".", "_filter_visible", "(", "c_data", ")", "subset", "=", "self", ".", "master", ".", "get_subset_mask", "(", ")", ...
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/widgets/visualize/owscatterplotgraph.py#L991-L1023
ganeti/ganeti
d340a9ddd12f501bef57da421b5f9b969a4ba905
lib/cmdlib/common.py
python
_FilterVmNodes
(lu, node_uuids)
return [uuid for uuid in node_uuids if uuid not in vm_nodes]
Filters out non-vm_capable nodes from a list. @type lu: L{LogicalUnit} @param lu: the logical unit for which we check @type node_uuids: list @param node_uuids: the list of nodes on which we should check @rtype: list @return: the list of vm-capable nodes
Filters out non-vm_capable nodes from a list.
[ "Filters", "out", "non", "-", "vm_capable", "nodes", "from", "a", "list", "." ]
def _FilterVmNodes(lu, node_uuids): """Filters out non-vm_capable nodes from a list. @type lu: L{LogicalUnit} @param lu: the logical unit for which we check @type node_uuids: list @param node_uuids: the list of nodes on which we should check @rtype: list @return: the list of vm-capable nodes """ vm_nodes = frozenset(lu.cfg.GetNonVmCapableNodeList()) return [uuid for uuid in node_uuids if uuid not in vm_nodes]
[ "def", "_FilterVmNodes", "(", "lu", ",", "node_uuids", ")", ":", "vm_nodes", "=", "frozenset", "(", "lu", ".", "cfg", ".", "GetNonVmCapableNodeList", "(", ")", ")", "return", "[", "uuid", "for", "uuid", "in", "node_uuids", "if", "uuid", "not", "in", "vm_...
https://github.com/ganeti/ganeti/blob/d340a9ddd12f501bef57da421b5f9b969a4ba905/lib/cmdlib/common.py#L876-L888
ym2011/POC-EXP
206b22d3a6b2a172359678df33bbc5b2ad04b6c3
K8/Web-Exp/sqlmap/thirdparty/bottle/bottle.py
python
Route.get_config
(self, key, default=None)
return default
Lookup a config field and return its value, first checking the route.config, then route.app.config.
Lookup a config field and return its value, first checking the route.config, then route.app.config.
[ "Lookup", "a", "config", "field", "and", "return", "its", "value", "first", "checking", "the", "route", ".", "config", "then", "route", ".", "app", ".", "config", "." ]
def get_config(self, key, default=None): """ Lookup a config field and return its value, first checking the route.config, then route.app.config.""" for conf in (self.config, self.app.config): if key in conf: return conf[key] return default
[ "def", "get_config", "(", "self", ",", "key", ",", "default", "=", "None", ")", ":", "for", "conf", "in", "(", "self", ".", "config", ",", "self", ".", "app", ".", "config", ")", ":", "if", "key", "in", "conf", ":", "return", "conf", "[", "key", ...
https://github.com/ym2011/POC-EXP/blob/206b22d3a6b2a172359678df33bbc5b2ad04b6c3/K8/Web-Exp/sqlmap/thirdparty/bottle/bottle.py#L624-L629
PyFilesystem/pyfilesystem2
a6ea045e766c76bae2e2fde19294c8275773ffde
fs/path.py
python
normpath
(path)
return prefix + "/".join(components)
Normalize a path. This function simplifies a path by collapsing back-references and removing duplicated separators. Arguments: path (str): Path to normalize. Returns: str: A valid FS path. Example: >>> normpath("/foo//bar/frob/../baz") '/foo/bar/baz' >>> normpath("foo/../../bar") Traceback (most recent call last): ... fs.errors.IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem
Normalize a path.
[ "Normalize", "a", "path", "." ]
def normpath(path): # type: (Text) -> Text """Normalize a path. This function simplifies a path by collapsing back-references and removing duplicated separators. Arguments: path (str): Path to normalize. Returns: str: A valid FS path. Example: >>> normpath("/foo//bar/frob/../baz") '/foo/bar/baz' >>> normpath("foo/../../bar") Traceback (most recent call last): ... fs.errors.IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem """ # noqa: E501 if path in "/": return path # An early out if there is no need to normalize this path if not _requires_normalization(path): return path.rstrip("/") prefix = "/" if path.startswith("/") else "" components = [] # type: List[Text] try: for component in path.split("/"): if component in "..": # True for '..', '.', and '' if component == "..": components.pop() else: components.append(component) except IndexError: # FIXME (@althonos): should be raised from the IndexError raise IllegalBackReference(path) return prefix + "/".join(components)
[ "def", "normpath", "(", "path", ")", ":", "# type: (Text) -> Text", "# noqa: E501", "if", "path", "in", "\"/\"", ":", "return", "path", "# An early out if there is no need to normalize this path", "if", "not", "_requires_normalization", "(", "path", ")", ":", "return", ...
https://github.com/PyFilesystem/pyfilesystem2/blob/a6ea045e766c76bae2e2fde19294c8275773ffde/fs/path.py#L50-L91
ym2011/POC-EXP
206b22d3a6b2a172359678df33bbc5b2ad04b6c3
K8/Web-Exp/sqlmap/thirdparty/bottle/bottle.py
python
BaseRequest.content_type
(self)
return self.environ.get('CONTENT_TYPE', '').lower()
The Content-Type header as a lowercase-string (default: empty).
The Content-Type header as a lowercase-string (default: empty).
[ "The", "Content", "-", "Type", "header", "as", "a", "lowercase", "-", "string", "(", "default", ":", "empty", ")", "." ]
def content_type(self): """ The Content-Type header as a lowercase-string (default: empty). """ return self.environ.get('CONTENT_TYPE', '').lower()
[ "def", "content_type", "(", "self", ")", ":", "return", "self", ".", "environ", ".", "get", "(", "'CONTENT_TYPE'", ",", "''", ")", ".", "lower", "(", ")" ]
https://github.com/ym2011/POC-EXP/blob/206b22d3a6b2a172359678df33bbc5b2ad04b6c3/K8/Web-Exp/sqlmap/thirdparty/bottle/bottle.py#L1412-L1414
benknight/hue-alfred-workflow
4447ba61116caf4a448b50c4bfb866565d66d81e
logic/packages/workflow/workflow.py
python
Workflow.open_terminal
(self)
Open a Terminal window at workflow's :attr:`workflowdir`.
Open a Terminal window at workflow's :attr:`workflowdir`.
[ "Open", "a", "Terminal", "window", "at", "workflow", "s", ":", "attr", ":", "workflowdir", "." ]
def open_terminal(self): """Open a Terminal window at workflow's :attr:`workflowdir`.""" subprocess.call(['open', '-a', 'Terminal', self.workflowdir])
[ "def", "open_terminal", "(", "self", ")", ":", "subprocess", ".", "call", "(", "[", "'open'", ",", "'-a'", ",", "'Terminal'", ",", "self", ".", "workflowdir", "]", ")" ]
https://github.com/benknight/hue-alfred-workflow/blob/4447ba61116caf4a448b50c4bfb866565d66d81e/logic/packages/workflow/workflow.py#L2656-L2658
avocado-framework/avocado
1f9b3192e8ba47d029c33fe21266bd113d17811f
avocado/core/dispatcher.py
python
VarianterDispatcher.__setstate__
(self, state)
Very fragile pickle which works when all Varianter plugins are available on both machines. TODO: Replace this with per-plugin-refresh-mechanism
Very fragile pickle which works when all Varianter plugins are available on both machines.
[ "Very", "fragile", "pickle", "which", "works", "when", "all", "Varianter", "plugins", "are", "available", "on", "both", "machines", "." ]
def __setstate__(self, state): """ Very fragile pickle which works when all Varianter plugins are available on both machines. TODO: Replace this with per-plugin-refresh-mechanism """ self.__init__() self.extensions = state.get("extensions")
[ "def", "__setstate__", "(", "self", ",", "state", ")", ":", "self", ".", "__init__", "(", ")", "self", ".", "extensions", "=", "state", ".", "get", "(", "\"extensions\"", ")" ]
https://github.com/avocado-framework/avocado/blob/1f9b3192e8ba47d029c33fe21266bd113d17811f/avocado/core/dispatcher.py#L93-L101
ipython/ipyparallel
d35d4fb9501da5b3280b11e83ed633a95f17be1d
docs/source/examples/rmt/rmtkernel.py
python
GOE
(N)
return m / 2
Creates an NxN element of the Gaussian Orthogonal Ensemble
Creates an NxN element of the Gaussian Orthogonal Ensemble
[ "Creates", "an", "NxN", "element", "of", "the", "Gaussian", "Orthogonal", "Ensemble" ]
def GOE(N): """Creates an NxN element of the Gaussian Orthogonal Ensemble""" m = ra.standard_normal((N, N)) m += m.T return m / 2
[ "def", "GOE", "(", "N", ")", ":", "m", "=", "ra", ".", "standard_normal", "(", "(", "N", ",", "N", ")", ")", "m", "+=", "m", ".", "T", "return", "m", "/", "2" ]
https://github.com/ipython/ipyparallel/blob/d35d4fb9501da5b3280b11e83ed633a95f17be1d/docs/source/examples/rmt/rmtkernel.py#L10-L14
google-research/meta-dataset
c67dd2bb66fb2a4ce7e4e9906878e13d9b851eb5
meta_dataset/dataset_conversion/dataset_to_records.py
python
ImageNetConverter._create_data_spec
(self, train_split_only=False)
Initializes the HierarchicalDatasetSpecification instance for ImageNet. See HierarchicalDatasetSpecification for details. Args: train_split_only: bool, if True the entire dataset is assigned to the training split.
Initializes the HierarchicalDatasetSpecification instance for ImageNet.
[ "Initializes", "the", "HierarchicalDatasetSpecification", "instance", "for", "ImageNet", "." ]
def _create_data_spec(self, train_split_only=False): """Initializes the HierarchicalDatasetSpecification instance for ImageNet. See HierarchicalDatasetSpecification for details. Args: train_split_only: bool, if True the entire dataset is assigned to the training split. """ # Load lists of image names that are duplicates with images in other # datasets. They will be skipped from ImageNet. self.files_to_skip = set() for other_dataset in ('Caltech101', 'Caltech256', 'CUBirds'): duplicates_file = os.path.join( AUX_DATA_PATH, 'ImageNet_{}_duplicates.txt'.format(other_dataset)) with tf.io.gfile.GFile(duplicates_file) as fd: duplicates = fd.read() lines = duplicates.splitlines() for l in lines: # Skip comment lines l = l.strip() if l.startswith('#'): continue # Lines look like: # 'synset/synset_imgnumber.JPEG # original_file_name.jpg\n'. # Extract only the 'synset_imgnumber.JPG' part. file_path = l.split('#')[0].strip() file_name = os.path.basename(file_path) self.files_to_skip.add(file_name) ilsvrc_2012_num_leaf_images_path = FLAGS.ilsvrc_2012_num_leaf_images_path if not ilsvrc_2012_num_leaf_images_path: ilsvrc_2012_num_leaf_images_path = os.path.join(self.records_path, 'num_leaf_images.json') specification = imagenet_specification.create_imagenet_specification( learning_spec.Split, self.files_to_skip, ilsvrc_2012_num_leaf_images_path, train_split_only=train_split_only) split_subgraphs, images_per_class, _, _, _, _ = specification # Maps each class id to the name of its class. self.class_names = {} self.dataset_spec = ds_spec.HierarchicalDatasetSpecification( self.name, split_subgraphs, images_per_class, self.class_names, self.records_path, '{}.tfrecords')
[ "def", "_create_data_spec", "(", "self", ",", "train_split_only", "=", "False", ")", ":", "# Load lists of image names that are duplicates with images in other", "# datasets. They will be skipped from ImageNet.", "self", ".", "files_to_skip", "=", "set", "(", ")", "for", "oth...
https://github.com/google-research/meta-dataset/blob/c67dd2bb66fb2a4ce7e4e9906878e13d9b851eb5/meta_dataset/dataset_conversion/dataset_to_records.py#L1501-L1548
bytefish/facerec
4071e1e79a50dbf1d1f2e061d24448576e5ac37d
py/apps/videofacerec/simple_videofacerec.py
python
App.__init__
(self, model, camera_id, cascade_filename)
[]
def __init__(self, model, camera_id, cascade_filename): self.model = model self.detector = CascadedDetector(cascade_fn=cascade_filename, minNeighbors=5, scaleFactor=1.1) self.cam = create_capture(camera_id)
[ "def", "__init__", "(", "self", ",", "model", ",", "camera_id", ",", "cascade_filename", ")", ":", "self", ".", "model", "=", "model", "self", ".", "detector", "=", "CascadedDetector", "(", "cascade_fn", "=", "cascade_filename", ",", "minNeighbors", "=", "5"...
https://github.com/bytefish/facerec/blob/4071e1e79a50dbf1d1f2e061d24448576e5ac37d/py/apps/videofacerec/simple_videofacerec.py#L104-L107
quic/aimet
dae9bae9a77ca719aa7553fefde4768270fc3518
TrainingExtensions/torch/src/python/aimet_torch/meta/old_connectedgraph.py
python
ConnectedGraph.num_operations
(self)
return len(self._ops)
Total number of operations: named and anonymous modules, and functions
Total number of operations: named and anonymous modules, and functions
[ "Total", "number", "of", "operations", ":", "named", "and", "anonymous", "modules", "and", "functions" ]
def num_operations(self): """Total number of operations: named and anonymous modules, and functions""" return len(self._ops)
[ "def", "num_operations", "(", "self", ")", ":", "return", "len", "(", "self", ".", "_ops", ")" ]
https://github.com/quic/aimet/blob/dae9bae9a77ca719aa7553fefde4768270fc3518/TrainingExtensions/torch/src/python/aimet_torch/meta/old_connectedgraph.py#L180-L182
team-ocean/veros
f62d6a2fe459a807fa6f3799c8aaa0a5fb70560f
veros/core/diffusion.py
python
biharmonic_diffusion
(state, tr, diffusivity)
return dtr, flux_east, flux_north
Biharmonic mixing of tracer tr
Biharmonic mixing of tracer tr
[ "Biharmonic", "mixing", "of", "tracer", "tr" ]
def biharmonic_diffusion(state, tr, diffusivity): """ Biharmonic mixing of tracer tr """ vs = state.variables settings = state.settings del2 = allocate(state.dimensions, ("xt", "yt", "zt")) dtr = allocate(state.dimensions, ("xt", "yt", "zt")) flux_east = allocate(state.dimensions, ("xt", "yt", "zt")) flux_north = allocate(state.dimensions, ("xt", "yt", "zt")) flux_east = update( flux_east, at[:-1, :, :], -diffusivity * (tr[1:, :, :] - tr[:-1, :, :]) / (vs.cost[npx.newaxis, :, npx.newaxis] * vs.dxu[:-1, npx.newaxis, npx.newaxis]) * vs.maskU[:-1, :, :], ) flux_north = update( flux_north, at[:, :-1, :], -diffusivity * (tr[:, 1:, :] - tr[:, :-1, :]) / vs.dyu[npx.newaxis, :-1, npx.newaxis] * vs.maskV[:, :-1, :] * vs.cosu[npx.newaxis, :-1, npx.newaxis], ) del2 = update( del2, at[1:, 1:, :], vs.maskT[1:, 1:, :] * (flux_east[1:, 1:, :] - flux_east[:-1, 1:, :]) / (vs.cost[npx.newaxis, 1:, npx.newaxis] * vs.dxt[1:, npx.newaxis, npx.newaxis]) + (flux_north[1:, 1:, :] - flux_north[1:, :-1, :]) / (vs.cost[npx.newaxis, 1:, npx.newaxis] * vs.dyt[npx.newaxis, 1:, npx.newaxis]), ) del2 = utilities.enforce_boundaries(del2, settings.enable_cyclic_x) flux_east = update( flux_east, at[:-1, :, :], diffusivity * (del2[1:, :, :] - del2[:-1, :, :]) / (vs.cost[npx.newaxis, :, npx.newaxis] * vs.dxu[:-1, npx.newaxis, npx.newaxis]) * vs.maskU[:-1, :, :], ) flux_north = update( flux_north, at[:, :-1, :], diffusivity * (del2[:, 1:, :] - del2[:, :-1, :]) / vs.dyu[npx.newaxis, :-1, npx.newaxis] * vs.maskV[:, :-1, :] * vs.cosu[npx.newaxis, :-1, npx.newaxis], ) flux_east = update(flux_east, at[-1, :, :], 0.0) flux_north = update(flux_north, at[:, -1, :], 0.0) dtr = update( dtr, at[1:, 1:, :], (flux_east[1:, 1:, :] - flux_east[:-1, 1:, :]) / (vs.cost[npx.newaxis, 1:, npx.newaxis] * vs.dxt[1:, npx.newaxis, npx.newaxis]) + (flux_north[1:, 1:, :] - flux_north[1:, :-1, :]) / (vs.cost[npx.newaxis, 1:, npx.newaxis] * vs.dyt[npx.newaxis, 1:, npx.newaxis]), ) dtr = dtr * vs.maskT return dtr, flux_east, flux_north
[ "def", "biharmonic_diffusion", "(", "state", ",", "tr", ",", "diffusivity", ")", ":", "vs", "=", "state", ".", "variables", "settings", "=", "state", ".", "settings", "del2", "=", "allocate", "(", "state", ".", "dimensions", ",", "(", "\"xt\"", ",", "\"y...
https://github.com/team-ocean/veros/blob/f62d6a2fe459a807fa6f3799c8aaa0a5fb70560f/veros/core/diffusion.py#L163-L239
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-1.3/django/db/models/base.py
python
Model.__reduce__
(self)
return (model_unpickle, (model, defers, factory), data)
Provide pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path.
Provide pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path.
[ "Provide", "pickling", "support", ".", "Normally", "this", "just", "dispatches", "to", "Python", "s", "standard", "handling", ".", "However", "for", "models", "with", "deferred", "field", "loading", "we", "need", "to", "do", "things", "manually", "as", "they",...
def __reduce__(self): """ Provide pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ data = self.__dict__ model = self.__class__ # The obvious thing to do here is to invoke super().__reduce__() # for the non-deferred case. Don't do that. # On Python 2.4, there is something weird with __reduce__, # and as a result, the super call will cause an infinite recursion. # See #10547 and #12121. defers = [] pk_val = None if self._deferred: from django.db.models.query_utils import deferred_class_factory factory = deferred_class_factory for field in self._meta.fields: if isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute): defers.append(field.attname) if pk_val is None: # The pk_val and model values are the same for all # DeferredAttribute classes, so we only need to do this # once. obj = self.__class__.__dict__[field.attname] model = obj.model_ref() else: factory = simple_class_factory return (model_unpickle, (model, defers, factory), data)
[ "def", "__reduce__", "(", "self", ")", ":", "data", "=", "self", ".", "__dict__", "model", "=", "self", ".", "__class__", "# The obvious thing to do here is to invoke super().__reduce__()", "# for the non-deferred case. Don't do that.", "# On Python 2.4, there is something weird ...
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.3/django/db/models/base.py#L389-L420
rowliny/DiffHelper
ab3a96f58f9579d0023aed9ebd785f4edf26f8af
Tool/SitePackages/nltk/parse/recursivedescent.py
python
SteppingRecursiveDescentParser.initialize
(self, tokens)
Start parsing a given text. This sets the parser's tree to the start symbol, its frontier to the root node, and its remaining text to ``token['SUBTOKENS']``.
Start parsing a given text. This sets the parser's tree to the start symbol, its frontier to the root node, and its remaining text to ``token['SUBTOKENS']``.
[ "Start", "parsing", "a", "given", "text", ".", "This", "sets", "the", "parser", "s", "tree", "to", "the", "start", "symbol", "its", "frontier", "to", "the", "root", "node", "and", "its", "remaining", "text", "to", "token", "[", "SUBTOKENS", "]", "." ]
def initialize(self, tokens): """ Start parsing a given text. This sets the parser's tree to the start symbol, its frontier to the root node, and its remaining text to ``token['SUBTOKENS']``. """ self._rtext = tokens start = self._grammar.start().symbol() self._tree = Tree(start, []) self._frontier = [()] self._tried_e = {} self._tried_m = {} self._history = [] self._parses = [] if self._trace: self._trace_start(self._tree, self._frontier, self._rtext)
[ "def", "initialize", "(", "self", ",", "tokens", ")", ":", "self", ".", "_rtext", "=", "tokens", "start", "=", "self", ".", "_grammar", ".", "start", "(", ")", ".", "symbol", "(", ")", "self", ".", "_tree", "=", "Tree", "(", "start", ",", "[", "]...
https://github.com/rowliny/DiffHelper/blob/ab3a96f58f9579d0023aed9ebd785f4edf26f8af/Tool/SitePackages/nltk/parse/recursivedescent.py#L395-L411
pyca/cryptography
cb63359d3fc874b565a19ff0026af52b52d87ee3
src/cryptography/hazmat/primitives/serialization/ssh.py
python
_FragList.put_mpint
(self, val: int)
Big-endian bigint prefixed with u32 length
Big-endian bigint prefixed with u32 length
[ "Big", "-", "endian", "bigint", "prefixed", "with", "u32", "length" ]
def put_mpint(self, val: int) -> None: """Big-endian bigint prefixed with u32 length""" self.put_sshstr(_to_mpint(val))
[ "def", "put_mpint", "(", "self", ",", "val", ":", "int", ")", "->", "None", ":", "self", ".", "put_sshstr", "(", "_to_mpint", "(", "val", ")", ")" ]
https://github.com/pyca/cryptography/blob/cb63359d3fc874b565a19ff0026af52b52d87ee3/src/cryptography/hazmat/primitives/serialization/ssh.py#L198-L200
natural/java2python
b8037561c542522ae620e0a071ecc7e668461587
java2python/compiler/template.py
python
Method.iterParams
(self)
return chain(*(h(self) for h in self.configHandlers('Param')))
Yields the parameters of this method template.
Yields the parameters of this method template.
[ "Yields", "the", "parameters", "of", "this", "method", "template", "." ]
def iterParams(self): """ Yields the parameters of this method template. """ return chain(*(h(self) for h in self.configHandlers('Param')))
[ "def", "iterParams", "(", "self", ")", ":", "return", "chain", "(", "*", "(", "h", "(", "self", ")", "for", "h", "in", "self", ".", "configHandlers", "(", "'Param'", ")", ")", ")" ]
https://github.com/natural/java2python/blob/b8037561c542522ae620e0a071ecc7e668461587/java2python/compiler/template.py#L474-L476
duo-labs/isthislegit
5d51fd2e0fe070cacd1ee169ca8a371a72e005ef
dashboard/lib/flanker/mime/message/part.py
python
_CounterIO.tell
(self)
return self.length
[]
def tell(self): return self.length
[ "def", "tell", "(", "self", ")", ":", "return", "self", ".", "length" ]
https://github.com/duo-labs/isthislegit/blob/5d51fd2e0fe070cacd1ee169ca8a371a72e005ef/dashboard/lib/flanker/mime/message/part.py#L697-L698