nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
pygments/pygments
cd3ad20dfc8a6cb43e2c0b22b14446dcc0a554d7
pygments/lexers/webmisc.py
python
XQueryLexer.pushstate_operator_root_validate
(lexer, match, ctx)
[]
def pushstate_operator_root_validate(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) ctx.stack = ['root'] lexer.xquery_parse_state.append('operator') ctx.pos = match.end()
[ "def", "pushstate_operator_root_validate", "(", "lexer", ",", "match", ",", "ctx", ")", ":", "yield", "match", ".", "start", "(", ")", ",", "Keyword", ",", "match", ".", "group", "(", "1", ")", "yield", "match", ".", "start", "(", ")", ",", "Text", "...
https://github.com/pygments/pygments/blob/cd3ad20dfc8a6cb43e2c0b22b14446dcc0a554d7/pygments/lexers/webmisc.py#L204-L210
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
option/ctp/ApiStruct.py
python
QueryBrokerDeposit.__init__
(self, BrokerID='', ExchangeID='')
[]
def __init__(self, BrokerID='', ExchangeID=''): self.BrokerID = '' #经纪公司代码, char[11] self.ExchangeID = ''
[ "def", "__init__", "(", "self", ",", "BrokerID", "=", "''", ",", "ExchangeID", "=", "''", ")", ":", "self", ".", "BrokerID", "=", "''", "#经纪公司代码, char[11]", "self", ".", "ExchangeID", "=", "''" ]
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/option/ctp/ApiStruct.py#L5238-L5240
Tencent/QT4i
75f8705c194505b483c6b7464da8522cd53ba679
qt4i/icontrols.py
python
MetisView.os_type
(self)
return "ios"
系统类型,例如"android","ios","pc"
系统类型,例如"android","ios","pc"
[ "系统类型,例如", "android", ",", "ios", ",", "pc" ]
def os_type(self): '''系统类型,例如"android","ios","pc" ''' return "ios"
[ "def", "os_type", "(", "self", ")", ":", "return", "\"ios\"" ]
https://github.com/Tencent/QT4i/blob/75f8705c194505b483c6b7464da8522cd53ba679/qt4i/icontrols.py#L892-L895
llSourcell/AI_Artist
3038c06c2e389b9c919c881c9a169efe2fd7810e
lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/_base.py
python
Node.insertText
(self, data, insertBefore=None)
Insert data as text in the current node, positioned before the start of node insertBefore or to the end of the node's text.
Insert data as text in the current node, positioned before the start of node insertBefore or to the end of the node's text.
[ "Insert", "data", "as", "text", "in", "the", "current", "node", "positioned", "before", "the", "start", "of", "node", "insertBefore", "or", "to", "the", "end", "of", "the", "node", "s", "text", "." ]
def insertText(self, data, insertBefore=None): """Insert data as text in the current node, positioned before the start of node insertBefore or to the end of the node's text. """ raise NotImplementedError
[ "def", "insertText", "(", "self", ",", "data", ",", "insertBefore", "=", "None", ")", ":", "raise", "NotImplementedError" ]
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/_base.py#L59-L63
Xilinx/finn
d1cc9cf94f1c33354cc169c5a6517314d0e94e3b
src/finn/custom_op/fpgadataflow/thresholding_batch.py
python
Thresholding_Batch.get_weightstream_width_padded
(self)
return roundup_to_integer_multiple(weight_width, 8)
Returns weight stream width padded to a multiple of 8. This is required by the AXI Stream spec. Used in decoupled mode.
Returns weight stream width padded to a multiple of 8. This is required by the AXI Stream spec. Used in decoupled mode.
[ "Returns", "weight", "stream", "width", "padded", "to", "a", "multiple", "of", "8", ".", "This", "is", "required", "by", "the", "AXI", "Stream", "spec", ".", "Used", "in", "decoupled", "mode", "." ]
def get_weightstream_width_padded(self): """Returns weight stream width padded to a multiple of 8. This is required by the AXI Stream spec. Used in decoupled mode.""" weight_width = self.get_weightstream_width() return roundup_to_integer_multiple(weight_width, 8)
[ "def", "get_weightstream_width_padded", "(", "self", ")", ":", "weight_width", "=", "self", ".", "get_weightstream_width", "(", ")", "return", "roundup_to_integer_multiple", "(", "weight_width", ",", "8", ")" ]
https://github.com/Xilinx/finn/blob/d1cc9cf94f1c33354cc169c5a6517314d0e94e3b/src/finn/custom_op/fpgadataflow/thresholding_batch.py#L243-L247
dropbox/dropbox-sdk-python
015437429be224732990041164a21a0501235db1
dropbox/team_log.py
python
EventType.get_shared_link_settings_allow_download_disabled
(self)
return self._value
(sharing) Disabled downloads Only call this if :meth:`is_shared_link_settings_allow_download_disabled` is true. :rtype: SharedLinkSettingsAllowDownloadDisabledType
(sharing) Disabled downloads
[ "(", "sharing", ")", "Disabled", "downloads" ]
def get_shared_link_settings_allow_download_disabled(self): """ (sharing) Disabled downloads Only call this if :meth:`is_shared_link_settings_allow_download_disabled` is true. :rtype: SharedLinkSettingsAllowDownloadDisabledType """ if not self.is_shared_link_settings_allow_download_disabled(): raise AttributeError("tag 'shared_link_settings_allow_download_disabled' not set") return self._value
[ "def", "get_shared_link_settings_allow_download_disabled", "(", "self", ")", ":", "if", "not", "self", ".", "is_shared_link_settings_allow_download_disabled", "(", ")", ":", "raise", "AttributeError", "(", "\"tag 'shared_link_settings_allow_download_disabled' not set\"", ")", "...
https://github.com/dropbox/dropbox-sdk-python/blob/015437429be224732990041164a21a0501235db1/dropbox/team_log.py#L35823-L35833
ubuntu/ubuntu-make
939668aad1f4c38ffb74cce55b3678f6fded5c71
umake/frameworks/android.py
python
AndroidPlatformTools.parse_license
(self, line, license_txt, in_license)
return self.category.parse_license('<div class="dialog-content-stretch sdk-terms">', line, license_txt, in_license)
Parse Android SDK download page for license
Parse Android SDK download page for license
[ "Parse", "Android", "SDK", "download", "page", "for", "license" ]
def parse_license(self, line, license_txt, in_license): """Parse Android SDK download page for license""" return self.category.parse_license('<div class="dialog-content-stretch sdk-terms">', line, license_txt, in_license)
[ "def", "parse_license", "(", "self", ",", "line", ",", "license_txt", ",", "in_license", ")", ":", "return", "self", ".", "category", ".", "parse_license", "(", "'<div class=\"dialog-content-stretch sdk-terms\">'", ",", "line", ",", "license_txt", ",", "in_license",...
https://github.com/ubuntu/ubuntu-make/blob/939668aad1f4c38ffb74cce55b3678f6fded5c71/umake/frameworks/android.py#L164-L167
SigmaHQ/sigma
6f7d28b52a6468b2430e8d7dfefb79dc01e2f1af
tools/sigma/backends/misc.py
python
GrepBackend.cleanValue
(self, val)
return re.sub("\\*", ".*", val)
[]
def cleanValue(self, val): val = super().cleanValue(val) val = val.replace("'","'\"'\"'") return re.sub("\\*", ".*", val)
[ "def", "cleanValue", "(", "self", ",", "val", ")", ":", "val", "=", "super", "(", ")", ".", "cleanValue", "(", "val", ")", "val", "=", "val", ".", "replace", "(", "\"'\"", ",", "\"'\\\"'\\\"'\"", ")", "return", "re", ".", "sub", "(", "\"\\\\*\"", "...
https://github.com/SigmaHQ/sigma/blob/6f7d28b52a6468b2430e8d7dfefb79dc01e2f1af/tools/sigma/backends/misc.py#L32-L35
twisted/twisted
dee676b040dd38b847ea6fb112a712cb5e119490
src/twisted/mail/interfaces.py
python
IMailboxIMAP.getUIDNext
()
Return the likely UID for the next message added to this mailbox. @rtype: L{int}
Return the likely UID for the next message added to this mailbox.
[ "Return", "the", "likely", "UID", "for", "the", "next", "message", "added", "to", "this", "mailbox", "." ]
def getUIDNext(): """ Return the likely UID for the next message added to this mailbox. @rtype: L{int} """
[ "def", "getUIDNext", "(", ")", ":" ]
https://github.com/twisted/twisted/blob/dee676b040dd38b847ea6fb112a712cb5e119490/src/twisted/mail/interfaces.py#L623-L628
RaRe-Technologies/gensim
8b8203d8df354673732dff635283494a33d0d422
gensim/models/doc2vec.py
python
Doc2Vec.__str__
(self)
return '%s<%s>' % (self.__class__.__name__, ','.join(segments))
Abbreviated name reflecting major configuration parameters. Returns ------- str Human readable representation of the models internal state.
Abbreviated name reflecting major configuration parameters.
[ "Abbreviated", "name", "reflecting", "major", "configuration", "parameters", "." ]
def __str__(self): """Abbreviated name reflecting major configuration parameters. Returns ------- str Human readable representation of the models internal state. """ segments = [] if self.comment: segments.append('"%s"' % self.comment) if self.sg: if self.dbow_words: segments.append('dbow+w') # also training words else: segments.append('dbow') # PV-DBOW (skip-gram-style) else: # PV-DM... if self.dm_concat: segments.append('dm/c') # ...with concatenative context layer else: if self.cbow_mean: segments.append('dm/m') else: segments.append('dm/s') segments.append('d%d' % self.dv.vector_size) # dimensions if self.negative: segments.append('n%d' % self.negative) # negative samples if self.hs: segments.append('hs') if not self.sg or (self.sg and self.dbow_words): segments.append('w%d' % self.window) # window size, when relevant if self.min_count > 1: segments.append('mc%d' % self.min_count) if self.sample > 0: segments.append('s%g' % self.sample) if self.workers > 1: segments.append('t%d' % self.workers) return '%s<%s>' % (self.__class__.__name__, ','.join(segments))
[ "def", "__str__", "(", "self", ")", ":", "segments", "=", "[", "]", "if", "self", ".", "comment", ":", "segments", ".", "append", "(", "'\"%s\"'", "%", "self", ".", "comment", ")", "if", "self", ".", "sg", ":", "if", "self", ".", "dbow_words", ":",...
https://github.com/RaRe-Technologies/gensim/blob/8b8203d8df354673732dff635283494a33d0d422/gensim/models/doc2vec.py#L677-L716
digidotcom/xbee-python
0757f4be0017530c205175fbee8f9f61be9614d1
digi/xbee/packets/filesystem.py
python
RemoteFSRequestPacket.needs_id
(self)
return True
Override method. .. seealso:: | :meth:`.XBeeAPIPacket.needs_id`
Override method.
[ "Override", "method", "." ]
def needs_id(self): """ Override method. .. seealso:: | :meth:`.XBeeAPIPacket.needs_id` """ return True
[ "def", "needs_id", "(", "self", ")", ":", "return", "True" ]
https://github.com/digidotcom/xbee-python/blob/0757f4be0017530c205175fbee8f9f61be9614d1/digi/xbee/packets/filesystem.py#L440-L447
shiweibsw/Translation-Tools
2fbbf902364e557fa7017f9a74a8797b7440c077
venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/html5lib/treewalkers/base.py
python
TreeWalker.startTag
(self, namespace, name, attrs)
return {"type": "StartTag", "name": name, "namespace": namespace, "data": attrs}
[]
def startTag(self, namespace, name, attrs): return {"type": "StartTag", "name": name, "namespace": namespace, "data": attrs}
[ "def", "startTag", "(", "self", ",", "namespace", ",", "name", ",", "attrs", ")", ":", "return", "{", "\"type\"", ":", "\"StartTag\"", ",", "\"name\"", ":", "name", ",", "\"namespace\"", ":", "namespace", ",", "\"data\"", ":", "attrs", "}" ]
https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/html5lib/treewalkers/base.py#L37-L41
rigetti/pyquil
36987ecb78d5dc85d299dd62395b7669a1cedd5a
pyquil/latex/_diagram.py
python
DiagramBuilder._build_generic_unitary
(self)
Update the partial diagram with a unitary operation. Advances the index by one.
Update the partial diagram with a unitary operation.
[ "Update", "the", "partial", "diagram", "with", "a", "unitary", "operation", "." ]
def _build_generic_unitary(self) -> None: """ Update the partial diagram with a unitary operation. Advances the index by one. """ assert self.working_instructions is not None instr = self.working_instructions[self.index] assert isinstance(instr, Gate) qubits = qubit_indices(instr) dagger = sum(m == "DAGGER" for m in instr.modifiers) % 2 == 1 controls = sum(m == "CONTROLLED" for m in instr.modifiers) assert self.diagram is not None self.diagram.extend_lines_to_common_edge(qubits) control_qubits = qubits[:controls] target_qubits = qubits[controls:] if not self.diagram.is_interval(sorted(target_qubits)): raise ValueError(f"Unable to render instruction {instr} which targets non-adjacent qubits.") for q in control_qubits: offset = target_qubits[0] - q self.diagram.append(q, TIKZ_CONTROL(q, offset)) # we put the gate on the first target line, and nop on the others self.diagram.append( target_qubits[0], TIKZ_GATE(instr.name, size=len(target_qubits), params=instr.params, dagger=dagger), ) for q in target_qubits[1:]: self.diagram.append(q, TIKZ_NOP()) self.index += 1
[ "def", "_build_generic_unitary", "(", "self", ")", "->", "None", ":", "assert", "self", ".", "working_instructions", "is", "not", "None", "instr", "=", "self", ".", "working_instructions", "[", "self", ".", "index", "]", "assert", "isinstance", "(", "instr", ...
https://github.com/rigetti/pyquil/blob/36987ecb78d5dc85d299dd62395b7669a1cedd5a/pyquil/latex/_diagram.py#L484-L517
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/analyses/decompiler/peephole_optimizations/basepointeroffset_add_n.py
python
BasePointerOffsetAddN.optimize
(self, expr: BinaryOp)
return None
[]
def optimize(self, expr: BinaryOp): if (expr.op in ("Add", "Sub") and isinstance(expr.operands[0], BasePointerOffset) and isinstance(expr.operands[1], Const) ): offset = expr.operands[0].offset if expr.op == "Add": offset += expr.operands[1].value else: # expr.op == "Sub" offset -= expr.operands[1].value # convert offset to a signed integer max_int = (1 << (self.project.arch.bits - 1)) - 1 if offset > max_int: offset -= 1 << self.project.arch.bits r = expr.operands[0].copy() r.offset = offset return r return None
[ "def", "optimize", "(", "self", ",", "expr", ":", "BinaryOp", ")", ":", "if", "(", "expr", ".", "op", "in", "(", "\"Add\"", ",", "\"Sub\"", ")", "and", "isinstance", "(", "expr", ".", "operands", "[", "0", "]", ",", "BasePointerOffset", ")", "and", ...
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/analyses/decompiler/peephole_optimizations/basepointeroffset_add_n.py#L12-L33
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/schemes/toric/library.py
python
ToricVarietyFactory._make_ToricVariety
(self, name, coordinate_names, base_ring)
return self.__dict__[dict_key]
r""" Construct a toric variety and cache the result. INPUT: - ``name`` -- string. One of the pre-defined names in the ``toric_varieties_rays_cones`` data structure. - ``coordinate_names`` -- A string describing the names of the homogeneous coordinates of the toric variety. - ``base_ring`` -- a ring (default: `\QQ`). The base ring for the toric variety. OUTPUT: A :class:`toric variety <sage.schemes.toric.variety.ToricVariety_field>`. EXAMPLES:: sage: toric_varieties.A1() # indirect doctest 1-d affine toric variety
r""" Construct a toric variety and cache the result.
[ "r", "Construct", "a", "toric", "variety", "and", "cache", "the", "result", "." ]
def _make_ToricVariety(self, name, coordinate_names, base_ring): r""" Construct a toric variety and cache the result. INPUT: - ``name`` -- string. One of the pre-defined names in the ``toric_varieties_rays_cones`` data structure. - ``coordinate_names`` -- A string describing the names of the homogeneous coordinates of the toric variety. - ``base_ring`` -- a ring (default: `\QQ`). The base ring for the toric variety. OUTPUT: A :class:`toric variety <sage.schemes.toric.variety.ToricVariety_field>`. EXAMPLES:: sage: toric_varieties.A1() # indirect doctest 1-d affine toric variety """ rays, cones = toric_varieties_rays_cones[name] if coordinate_names is None: dict_key = (name, base_ring) else: coordinate_names = normalize_names(coordinate_names, len(rays), DEFAULT_PREFIX) dict_key = (name, base_ring) + tuple(coordinate_names) if dict_key not in self.__dict__: fan = Fan(cones, rays, check=self._check) self.__dict__[dict_key] = \ ToricVariety(fan, coordinate_names=coordinate_names, base_ring=base_ring) return self.__dict__[dict_key]
[ "def", "_make_ToricVariety", "(", "self", ",", "name", ",", "coordinate_names", ",", "base_ring", ")", ":", "rays", ",", "cones", "=", "toric_varieties_rays_cones", "[", "name", "]", "if", "coordinate_names", "is", "None", ":", "dict_key", "=", "(", "name", ...
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/schemes/toric/library.py#L192-L230
DataDog/dd-trace-py
13f9c6c1a8b4820365b299ab204f2bb5189d2a49
ddtrace/vendor/psutil/_pswindows.py
python
WindowsService.username
(self)
return self._query_config()['username']
The name of the user that owns this service.
The name of the user that owns this service.
[ "The", "name", "of", "the", "user", "that", "owns", "this", "service", "." ]
def username(self): """The name of the user that owns this service.""" return self._query_config()['username']
[ "def", "username", "(", "self", ")", ":", "return", "self", ".", "_query_config", "(", ")", "[", "'username'", "]" ]
https://github.com/DataDog/dd-trace-py/blob/13f9c6c1a8b4820365b299ab204f2bb5189d2a49/ddtrace/vendor/psutil/_pswindows.py#L592-L594
Antergos/Cnchi
13ac2209da9432d453e0097cf48a107640b563a9
src/misc/nm.py
python
NetworkManagerWidget.__init__
(self)
Init widget
Init widget
[ "Init", "widget" ]
def __init__(self): """ Init widget """ Gtk.Box.__init__(self) self.set_orientation(Gtk.Orientation.VERTICAL) self.set_spacing(12) self.password_entry = Gtk.Entry() self.view = NetworkManagerTreeView(self.password_entry, self.state_changed) scrolled_window = Gtk.ScrolledWindow() scrolled_window.set_policy( Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC) scrolled_window.set_shadow_type(Gtk.ShadowType.IN) scrolled_window.add(self.view) self.pack_start(scrolled_window, True, True, 0) self.hbox = Gtk.Box(spacing=6) self.pack_start(self.hbox, False, True, 0) self.password_label = Gtk.Label('Password:') self.password_entry.set_visibility(False) self.password_entry.connect('activate', self.connect_to_ap) self.password_entry.connect('changed', self.password_entry_changed) self.display_password = Gtk.CheckButton('Display password') self.display_password.connect('toggled', self.display_password_toggled) self.hbox.pack_start(self.password_label, False, True, 0) self.hbox.pack_start(self.password_entry, True, True, 0) self.hbox.pack_start(self.display_password, False, True, 0) self.hbox.set_sensitive(False) self.selection = self.view.get_selection() self.selection.connect('changed', self.changed) self.show_all()
[ "def", "__init__", "(", "self", ")", ":", "Gtk", ".", "Box", ".", "__init__", "(", "self", ")", "self", ".", "set_orientation", "(", "Gtk", ".", "Orientation", ".", "VERTICAL", ")", "self", ".", "set_spacing", "(", "12", ")", "self", ".", "password_ent...
https://github.com/Antergos/Cnchi/blob/13ac2209da9432d453e0097cf48a107640b563a9/src/misc/nm.py#L493-L523
hasegaw/IkaLog
bd476da541fcc296f792d4db76a6b9174c4777ad
ikalog/utils/image_filters/filters.py
python
ImageFilter.__call__
(self, img_bgr=None, img_gray=None)
return self._run_filter(img_bgr=img_bgr, img_gray=img_gray)
[]
def __call__(self, img_bgr=None, img_gray=None): return self._run_filter(img_bgr=img_bgr, img_gray=img_gray)
[ "def", "__call__", "(", "self", ",", "img_bgr", "=", "None", ",", "img_gray", "=", "None", ")", ":", "return", "self", ".", "_run_filter", "(", "img_bgr", "=", "img_bgr", ",", "img_gray", "=", "img_gray", ")" ]
https://github.com/hasegaw/IkaLog/blob/bd476da541fcc296f792d4db76a6b9174c4777ad/ikalog/utils/image_filters/filters.py#L46-L47
pycrypto/pycrypto
7acba5f3a6ff10f1424c309d0d34d2b713233019
lib/Crypto/Cipher/ARC2.py
python
new
(key, *args, **kwargs)
return RC2Cipher(key, *args, **kwargs)
Create a new RC2 cipher :Parameters: key : byte string The secret key to use in the symmetric cipher. Its length can vary from 1 to 128 bytes. :Keywords: mode : a *MODE_** constant The chaining mode to use for encryption or decryption. Default is `MODE_ECB`. IV : byte string (*Only* `MODE_CBC`, `MODE_CFB`, `MODE_OFB`, `MODE_OPENPGP`). The initialization vector to use for encryption or decryption. It is ignored for `MODE_ECB` and `MODE_CTR`. For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption and `block_size` +2 bytes for decryption (in the latter case, it is actually the *encrypted* IV which was prefixed to the ciphertext). It is mandatory. For all other modes, it must be 8 bytes long. nonce : byte string (*Only* `MODE_EAX`). A mandatory value that must never be reused for any other encryption. There are no restrictions on its length, but it is recommended to use at least 16 bytes. counter : callable (*Only* `MODE_CTR`). A stateful function that returns the next *counter block*, which is a byte string of `block_size` bytes. For better performance, use `Crypto.Util.Counter`. mac_len : integer (*Only* `MODE_EAX`). Length of the MAC, in bytes. It must be no larger than 8 (which is the default). segment_size : integer (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext are segmented in. It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. effective_keylen : integer Maximum cryptographic strength of the key, in bits. It can vary from 0 to 1024. The default value is 1024. :Return: an `RC2Cipher` object
Create a new RC2 cipher
[ "Create", "a", "new", "RC2", "cipher" ]
def new(key, *args, **kwargs): """Create a new RC2 cipher :Parameters: key : byte string The secret key to use in the symmetric cipher. Its length can vary from 1 to 128 bytes. :Keywords: mode : a *MODE_** constant The chaining mode to use for encryption or decryption. Default is `MODE_ECB`. IV : byte string (*Only* `MODE_CBC`, `MODE_CFB`, `MODE_OFB`, `MODE_OPENPGP`). The initialization vector to use for encryption or decryption. It is ignored for `MODE_ECB` and `MODE_CTR`. For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption and `block_size` +2 bytes for decryption (in the latter case, it is actually the *encrypted* IV which was prefixed to the ciphertext). It is mandatory. For all other modes, it must be 8 bytes long. nonce : byte string (*Only* `MODE_EAX`). A mandatory value that must never be reused for any other encryption. There are no restrictions on its length, but it is recommended to use at least 16 bytes. counter : callable (*Only* `MODE_CTR`). A stateful function that returns the next *counter block*, which is a byte string of `block_size` bytes. For better performance, use `Crypto.Util.Counter`. mac_len : integer (*Only* `MODE_EAX`). Length of the MAC, in bytes. It must be no larger than 8 (which is the default). segment_size : integer (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext are segmented in. It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. effective_keylen : integer Maximum cryptographic strength of the key, in bits. It can vary from 0 to 1024. The default value is 1024. :Return: an `RC2Cipher` object """ return RC2Cipher(key, *args, **kwargs)
[ "def", "new", "(", "key", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "RC2Cipher", "(", "key", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/pycrypto/pycrypto/blob/7acba5f3a6ff10f1424c309d0d34d2b713233019/lib/Crypto/Cipher/ARC2.py#L73-L119
facebookresearch/ParlAI
e4d59c30eef44f1f67105961b82a83fd28d7d78b
parlai/core/params.py
python
ParlaiParser.add_model_args
(self)
Add arguments related to models such as model files.
Add arguments related to models such as model files.
[ "Add", "arguments", "related", "to", "models", "such", "as", "model", "files", "." ]
def add_model_args(self): """ Add arguments related to models such as model files. """ model_args = self.add_argument_group('ParlAI Model Arguments') model_args.add_argument( '-m', '--model', default=None, help='the model class name. can match parlai/agents/<model> for ' 'agents in that directory, or can provide a fully specified ' 'module for `from X import Y` via `-m X:Y` ' '(e.g. `-m parlai.agents.seq2seq.seq2seq:Seq2SeqAgent`)', ) model_args.add_argument( '-mf', '--model-file', default=None, help='model file name for loading and saving models', ) model_args.add_argument( '-im', '--init-model', default=None, type=str, help='Initialize model weights and dict from this file', ) model_args.add_argument( '--dict-class', hidden=True, help='the class of the dictionary agent uses' )
[ "def", "add_model_args", "(", "self", ")", ":", "model_args", "=", "self", ".", "add_argument_group", "(", "'ParlAI Model Arguments'", ")", "model_args", ".", "add_argument", "(", "'-m'", ",", "'--model'", ",", "default", "=", "None", ",", "help", "=", "'the m...
https://github.com/facebookresearch/ParlAI/blob/e4d59c30eef44f1f67105961b82a83fd28d7d78b/parlai/core/params.py#L787-L816
Tencent/FaceDetection-DSFD
09deec4376f397a1124f71bc81210fadfaac296e
model/resnet.py
python
resnet50
(pretrained=False)
return model
Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
[ "Constructs", "a", "ResNet", "-", "50", "model", ".", "Args", ":", "pretrained", "(", "bool", ")", ":", "If", "True", "returns", "a", "model", "pre", "-", "trained", "on", "ImageNet" ]
def resnet50(pretrained=False): '''Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet ''' model = ResNet(Bottleneck, [3, 4, 6, 3]) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet50'])) return model
[ "def", "resnet50", "(", "pretrained", "=", "False", ")", ":", "model", "=", "ResNet", "(", "Bottleneck", ",", "[", "3", ",", "4", ",", "6", ",", "3", "]", ")", "if", "pretrained", ":", "model", ".", "load_state_dict", "(", "model_zoo", ".", "load_url...
https://github.com/Tencent/FaceDetection-DSFD/blob/09deec4376f397a1124f71bc81210fadfaac296e/model/resnet.py#L185-L193
taomujian/linbing
fe772a58f41e3b046b51a866bdb7e4655abaf51a
python/main.py
python
pause_scan
(request : VueRequest)
暂停扫描的接口 :param: :return str response: 需要返回的数据
暂停扫描的接口
[ "暂停扫描的接口" ]
async def pause_scan(request : VueRequest): """ 暂停扫描的接口 :param: :return str response: 需要返回的数据 """ try: response = {'code': '', 'message': '', 'data': ''} request = rsa_crypto.decrypt(request.data) request = json.loads(request) target = request['target'] scan_id = request['scan_id'] token = request['token'] query_str = { 'type': 'token', 'data': token } username_result = mysqldb.username(query_str) if username_result == 'L1001': response['code'] = 'L1001' response['message'] = '系统异常' return response elif username_result == None: response['code'] = 'L1003' response['message'] = '认证失败' return response else: scan_status = mysqldb.get_scan_status(username_result['username'], scan_id) if scan_status == '扫描中': send_stop_job_command(redis_conn, md5(username_result['username'] + scan_id)) mysqldb.update_scan_status(username_result['username'], scan_id, '暂停扫描') mysqldb.update_target_scan_status(username_result['username'], target, '暂停扫描') response['data'] = '请求正常' response['code'] = 'L1000' response['message'] = '请求正常' else: response['data'] = '目标不在扫描中,无法暂停扫描' response['code'] = 'L1000' response['message'] = '请求正常' return response except Exception as e: print(e) response['code'] = 'L1001' response['message'] = '系统异常' return response
[ "async", "def", "pause_scan", "(", "request", ":", "VueRequest", ")", ":", "try", ":", "response", "=", "{", "'code'", ":", "''", ",", "'message'", ":", "''", ",", "'data'", ":", "''", "}", "request", "=", "rsa_crypto", ".", "decrypt", "(", "request", ...
https://github.com/taomujian/linbing/blob/fe772a58f41e3b046b51a866bdb7e4655abaf51a/python/main.py#L746-L793
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/collections/__init__.py
python
ChainMap.clear
(self)
Clear maps[0], leaving maps[1:] intact.
Clear maps[0], leaving maps[1:] intact.
[ "Clear", "maps", "[", "0", "]", "leaving", "maps", "[", "1", ":", "]", "intact", "." ]
def clear(self): 'Clear maps[0], leaving maps[1:] intact.' self.maps[0].clear()
[ "def", "clear", "(", "self", ")", ":", "self", ".", "maps", "[", "0", "]", ".", "clear", "(", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/collections/__init__.py#L986-L988
microsoft/botbuilder-python
3d410365461dc434df59bdfeaa2f16d28d9df868
libraries/botbuilder-dialogs/botbuilder/dialogs/dialog_turn_result.py
python
DialogTurnResult.status
(self)
return self._status
Gets or sets the current status of the stack. :return self._status: The status of the stack. :rtype self._status: :class:`DialogTurnStatus`
Gets or sets the current status of the stack.
[ "Gets", "or", "sets", "the", "current", "status", "of", "the", "stack", "." ]
def status(self): """ Gets or sets the current status of the stack. :return self._status: The status of the stack. :rtype self._status: :class:`DialogTurnStatus` """ return self._status
[ "def", "status", "(", "self", ")", ":", "return", "self", ".", "_status" ]
https://github.com/microsoft/botbuilder-python/blob/3d410365461dc434df59bdfeaa2f16d28d9df868/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog_turn_result.py#L23-L30
kaaedit/kaa
e6a8819a5ecba04b7db8303bd5736b5a7c9b822d
kaa/cui/color.py
python
LightPalette.__init__
(self)
[]
def __init__(self): super().__init__() self.COLOR256_TO_16[self.BASE02] = curses.COLOR_BLUE self.COLOR256_TO_16[self.BASE3] = curses.COLOR_CYAN self.COLOR256_TO_16[self.YELLOW] = curses.COLOR_YELLOW self.COLOR256_TO_16[self.WHITE] = curses.COLOR_WHITE if self.num_colors < 256: # todo: self.DEFAULT = -1 self.BLACK = curses.COLOR_BLACK self.BLUE = curses.COLOR_BLUE self.CYAN = curses.COLOR_CYAN self.GREEN = curses.COLOR_GREEN self.MAGENTA = curses.COLOR_MAGENTA self.RED = curses.COLOR_RED self.WHITE = curses.COLOR_WHITE self.YELLOW = curses.COLOR_YELLOW self.ORANGE = curses.COLOR_MAGENTA self.LIGHTBLUE = curses.COLOR_CYAN
[ "def", "__init__", "(", "self", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "COLOR256_TO_16", "[", "self", ".", "BASE02", "]", "=", "curses", ".", "COLOR_BLUE", "self", ".", "COLOR256_TO_16", "[", "self", ".", "BASE3", "]", "...
https://github.com/kaaedit/kaa/blob/e6a8819a5ecba04b7db8303bd5736b5a7c9b822d/kaa/cui/color.py#L378-L399
pysmt/pysmt
ade4dc2a825727615033a96d31c71e9f53ce4764
pysmt/solvers/bdd.py
python
BddConverter.walk_forall
(self, formula, args, **kwargs)
return res
[]
def walk_forall(self, formula, args, **kwargs): f = args[0] cube = self.cube_from_var_list(formula.quantifier_vars()) res = self.ddmanager.UnivAbstract(f, cube) return res
[ "def", "walk_forall", "(", "self", ",", "formula", ",", "args", ",", "*", "*", "kwargs", ")", ":", "f", "=", "args", "[", "0", "]", "cube", "=", "self", ".", "cube_from_var_list", "(", "formula", ".", "quantifier_vars", "(", ")", ")", "res", "=", "...
https://github.com/pysmt/pysmt/blob/ade4dc2a825727615033a96d31c71e9f53ce4764/pysmt/solvers/bdd.py#L329-L333
Podshot/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
release.py
python
get_version
()
return VERSION
Returns the name of the current version
Returns the name of the current version
[ "Returns", "the", "name", "of", "the", "current", "version" ]
def get_version(): ''' Returns the name of the current version ''' return VERSION
[ "def", "get_version", "(", ")", ":", "return", "VERSION" ]
https://github.com/Podshot/MCEdit-Unified/blob/90abfb170c65b877ac67193e717fa3a3ded635dd/release.py#L33-L37
salesforce/awd-lstm-lm
32fcb42562aeb5c7e6c9dec3f2a3baaaf68a5cb5
data.py
python
Corpus.tokenize
(self, path)
return ids
Tokenizes a text file.
Tokenizes a text file.
[ "Tokenizes", "a", "text", "file", "." ]
def tokenize(self, path): """Tokenizes a text file.""" assert os.path.exists(path) # Add words to the dictionary with open(path, 'r') as f: tokens = 0 for line in f: words = line.split() + ['<eos>'] tokens += len(words) for word in words: self.dictionary.add_word(word) # Tokenize file content with open(path, 'r') as f: ids = torch.LongTensor(tokens) token = 0 for line in f: words = line.split() + ['<eos>'] for word in words: ids[token] = self.dictionary.word2idx[word] token += 1 return ids
[ "def", "tokenize", "(", "self", ",", "path", ")", ":", "assert", "os", ".", "path", ".", "exists", "(", "path", ")", "# Add words to the dictionary", "with", "open", "(", "path", ",", "'r'", ")", "as", "f", ":", "tokens", "=", "0", "for", "line", "in...
https://github.com/salesforce/awd-lstm-lm/blob/32fcb42562aeb5c7e6c9dec3f2a3baaaf68a5cb5/data.py#L34-L56
Cadene/tensorflow-model-zoo.torch
990b10ffc22d4c8eacb2a502f20415b4f70c74c2
models/research/adv_imagenet_models/inception_resnet_v2.py
python
inception_resnet_v2_base
(inputs, final_endpoint='Conv2d_7b_1x1', output_stride=16, align_feature_maps=False, scope=None)
Inception model from http://arxiv.org/abs/1602.07261. Constructs an Inception Resnet v2 network from inputs to the given final endpoint. This method can construct the network up to the final inception block Conv2d_7b_1x1. Args: inputs: a tensor of size [batch_size, height, width, channels]. final_endpoint: specifies the endpoint to construct the network up to. It can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3', 'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3', 'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1'] output_stride: A scalar that specifies the requested ratio of input to output spatial resolution. Only supports 8 and 16. align_feature_maps: When true, changes all the VALID paddings in the network to SAME padding so that the feature maps are aligned. scope: Optional variable_scope. Returns: tensor_out: output tensor corresponding to the final_endpoint. end_points: a set of activations for external use, for example summaries or losses. Raises: ValueError: if final_endpoint is not set to one of the predefined values, or if the output_stride is not 8 or 16, or if the output_stride is 8 and we request an end point after 'PreAuxLogits'.
Inception model from http://arxiv.org/abs/1602.07261.
[ "Inception", "model", "from", "http", ":", "//", "arxiv", ".", "org", "/", "abs", "/", "1602", ".", "07261", "." ]
def inception_resnet_v2_base(inputs, final_endpoint='Conv2d_7b_1x1', output_stride=16, align_feature_maps=False, scope=None): """Inception model from http://arxiv.org/abs/1602.07261. Constructs an Inception Resnet v2 network from inputs to the given final endpoint. This method can construct the network up to the final inception block Conv2d_7b_1x1. Args: inputs: a tensor of size [batch_size, height, width, channels]. final_endpoint: specifies the endpoint to construct the network up to. It can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3', 'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3', 'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1'] output_stride: A scalar that specifies the requested ratio of input to output spatial resolution. Only supports 8 and 16. align_feature_maps: When true, changes all the VALID paddings in the network to SAME padding so that the feature maps are aligned. scope: Optional variable_scope. Returns: tensor_out: output tensor corresponding to the final_endpoint. end_points: a set of activations for external use, for example summaries or losses. Raises: ValueError: if final_endpoint is not set to one of the predefined values, or if the output_stride is not 8 or 16, or if the output_stride is 8 and we request an end point after 'PreAuxLogits'. """ if output_stride != 8 and output_stride != 16: raise ValueError('output_stride must be 8 or 16.') padding = 'SAME' if align_feature_maps else 'VALID' end_points = {} def add_and_check_final(name, net): end_points[name] = net return name == final_endpoint with tf.variable_scope(scope, 'InceptionResnetV2', [inputs]): with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d], stride=1, padding='SAME'): # 149 x 149 x 32 net = slim.conv2d(inputs, 32, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points # 147 x 147 x 32 net = slim.conv2d(net, 32, 3, padding=padding, scope='Conv2d_2a_3x3') if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points # 147 x 147 x 64 net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3') if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points # 73 x 73 x 64 net = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_3a_3x3') if add_and_check_final('MaxPool_3a_3x3', net): return net, end_points # 73 x 73 x 80 net = slim.conv2d(net, 80, 1, padding=padding, scope='Conv2d_3b_1x1') if add_and_check_final('Conv2d_3b_1x1', net): return net, end_points # 71 x 71 x 192 net = slim.conv2d(net, 192, 3, padding=padding, scope='Conv2d_4a_3x3') if add_and_check_final('Conv2d_4a_3x3', net): return net, end_points # 35 x 35 x 192 net = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_5a_3x3') if add_and_check_final('MaxPool_5a_3x3', net): return net, end_points # 35 x 35 x 320 with tf.variable_scope('Mixed_5b'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5, scope='Conv2d_0b_5x5') with tf.variable_scope('Branch_2'): tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1') tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3, scope='Conv2d_0b_3x3') tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3, scope='Conv2d_0c_3x3') with tf.variable_scope('Branch_3'): tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME', scope='AvgPool_0a_3x3') tower_pool_1 = slim.conv2d(tower_pool, 64, 1, scope='Conv2d_0b_1x1') net = tf.concat( [tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3) if add_and_check_final('Mixed_5b', net): return net, end_points # TODO(alemi): Register intermediate endpoints net = slim.repeat(net, 10, block35, scale=0.17) # 17 x 17 x 1088 if output_stride == 8, # 33 x 33 x 1088 if output_stride == 16 use_atrous = output_stride == 8 with tf.variable_scope('Mixed_6a'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 384, 3, stride=1 if use_atrous else 2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_1'): tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3, scope='Conv2d_0b_3x3') tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3, stride=1 if use_atrous else 2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_2'): tower_pool = slim.max_pool2d(net, 3, stride=1 if use_atrous else 2, padding=padding, scope='MaxPool_1a_3x3') net = tf.concat([tower_conv, tower_conv1_2, tower_pool], 3) if add_and_check_final('Mixed_6a', net): return net, end_points # TODO(alemi): register intermediate endpoints with slim.arg_scope([slim.conv2d], rate=2 if use_atrous else 1): net = slim.repeat(net, 20, block17, scale=0.10) if add_and_check_final('PreAuxLogits', net): return net, end_points if output_stride == 8: # TODO(gpapan): Properly support output_stride for the rest of the net. raise ValueError('output_stride==8 is only supported up to the ' 'PreAuxlogits end_point for now.') # 8 x 8 x 2080 with tf.variable_scope('Mixed_7a'): with tf.variable_scope('Branch_0'): tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_1'): tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_2'): tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1') tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3, scope='Conv2d_0b_3x3') tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2, padding=padding, scope='Conv2d_1a_3x3') with tf.variable_scope('Branch_3'): tower_pool = slim.max_pool2d(net, 3, stride=2, padding=padding, scope='MaxPool_1a_3x3') net = tf.concat( [tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3) if add_and_check_final('Mixed_7a', net): return net, end_points # TODO(alemi): register intermediate endpoints net = slim.repeat(net, 9, block8, scale=0.20) net = block8(net, activation_fn=None) # 8 x 8 x 1536 net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1') if add_and_check_final('Conv2d_7b_1x1', net): return net, end_points raise ValueError('final_endpoint (%s) not recognized', final_endpoint)
[ "def", "inception_resnet_v2_base", "(", "inputs", ",", "final_endpoint", "=", "'Conv2d_7b_1x1'", ",", "output_stride", "=", "16", ",", "align_feature_maps", "=", "False", ",", "scope", "=", "None", ")", ":", "if", "output_stride", "!=", "8", "and", "output_strid...
https://github.com/Cadene/tensorflow-model-zoo.torch/blob/990b10ffc22d4c8eacb2a502f20415b4f70c74c2/models/research/adv_imagenet_models/inception_resnet_v2.py#L95-L268
GraylinKim/sc2reader
d69feb4e0be597581040588193579d29e8241431
sc2reader/objects.py
python
User.url
(self)
return self.URL_TEMPLATE.format(**self.__dict__)
The player's formatted Battle.net profile url
The player's formatted Battle.net profile url
[ "The", "player", "s", "formatted", "Battle", ".", "net", "profile", "url" ]
def url(self): """The player's formatted Battle.net profile url""" return self.URL_TEMPLATE.format(**self.__dict__)
[ "def", "url", "(", "self", ")", ":", "return", "self", ".", "URL_TEMPLATE", ".", "format", "(", "*", "*", "self", ".", "__dict__", ")" ]
https://github.com/GraylinKim/sc2reader/blob/d69feb4e0be597581040588193579d29e8241431/sc2reader/objects.py#L242-L244
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/bdb.py
python
Bdb.clear_all_breaks
(self)
return None
Delete all existing breakpoints. If none were set, return an error message.
Delete all existing breakpoints.
[ "Delete", "all", "existing", "breakpoints", "." ]
def clear_all_breaks(self): """Delete all existing breakpoints. If none were set, return an error message. """ if not self.breaks: return 'There are no breakpoints' for bp in Breakpoint.bpbynumber: if bp: bp.deleteMe() self.breaks = {} return None
[ "def", "clear_all_breaks", "(", "self", ")", ":", "if", "not", "self", ".", "breaks", ":", "return", "'There are no breakpoints'", "for", "bp", "in", "Breakpoint", ".", "bpbynumber", ":", "if", "bp", ":", "bp", ".", "deleteMe", "(", ")", "self", ".", "br...
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/bdb.py#L442-L453
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/lib-python/3/multiprocessing/pool.py
python
Pool._help_stuff_finish
(inqueue, task_handler, size)
[]
def _help_stuff_finish(inqueue, task_handler, size): # task_handler may be blocked trying to put items on inqueue debug('removing tasks from inqueue until task handler finished') inqueue._rlock.acquire() while task_handler.is_alive() and inqueue._reader.poll(): inqueue._reader.recv() time.sleep(0)
[ "def", "_help_stuff_finish", "(", "inqueue", ",", "task_handler", ",", "size", ")", ":", "# task_handler may be blocked trying to put items on inqueue", "debug", "(", "'removing tasks from inqueue until task handler finished'", ")", "inqueue", ".", "_rlock", ".", "acquire", "...
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/lib-python/3/multiprocessing/pool.py#L468-L474
opsmop/opsmop
376ca587f8c5f9ca8ed1829909d075c339066034
opsmop/callbacks/callback.py
python
BaseCallbacks.setup_logger
(self)
return logger
[]
def setup_logger(self): path = UserDefaults.log_path() dirname = os.path.dirname(path) if not os.path.exists(dirname): os.makedirs(dirname, 0o770) logger = logging.getLogger('opsmop') logger.setLevel(logging.DEBUG) handler = logging.handlers.RotatingFileHandler(path, maxBytes=1024*5000, backupCount=5) formatter = logging.Formatter(UserDefaults.log_format()) handler.setFormatter(formatter) logger.addHandler(handler) return logger
[ "def", "setup_logger", "(", "self", ")", ":", "path", "=", "UserDefaults", ".", "log_path", "(", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dirname", ")", ":", "os"...
https://github.com/opsmop/opsmop/blob/376ca587f8c5f9ca8ed1829909d075c339066034/opsmop/callbacks/callback.py#L34-L45
ifwe/digsby
f5fe00244744aa131e07f09348d10563f3d8fa99
digsby/src/yahoo/yahooutil.py
python
filename_from_url
(url)
return unquote_plus(_filematch.search(url).group(1))
Returns a normally formatted filename from a file transfer URL. >> filename_from_url("http://fs.yahoo.com/name/.tmp/filename.txt?adfadfa'") filename.txt
Returns a normally formatted filename from a file transfer URL.
[ "Returns", "a", "normally", "formatted", "filename", "from", "a", "file", "transfer", "URL", "." ]
def filename_from_url(url): ''' Returns a normally formatted filename from a file transfer URL. >> filename_from_url("http://fs.yahoo.com/name/.tmp/filename.txt?adfadfa'") filename.txt ''' return unquote_plus(_filematch.search(url).group(1))
[ "def", "filename_from_url", "(", "url", ")", ":", "return", "unquote_plus", "(", "_filematch", ".", "search", "(", "url", ")", ".", "group", "(", "1", ")", ")" ]
https://github.com/ifwe/digsby/blob/f5fe00244744aa131e07f09348d10563f3d8fa99/digsby/src/yahoo/yahooutil.py#L29-L37
QUANTAXIS/QUANTAXIS
d6eccb97c8385854aa596d6ba8d70ec0655519ff
QUANTAXIS/QAUtil/QADate.py
python
QA_util_ms_stamp
(ms)
return ms
explanation: 直接返回不做处理 params: * ms-> 含义: 时间戳 类型: float 参数支持: [] return: float
explanation: 直接返回不做处理
[ "explanation", ":", "直接返回不做处理" ]
def QA_util_ms_stamp(ms): """ explanation: 直接返回不做处理 params: * ms-> 含义: 时间戳 类型: float 参数支持: [] return: float """ return ms
[ "def", "QA_util_ms_stamp", "(", "ms", ")", ":", "return", "ms" ]
https://github.com/QUANTAXIS/QUANTAXIS/blob/d6eccb97c8385854aa596d6ba8d70ec0655519ff/QUANTAXIS/QAUtil/QADate.py#L327-L341
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/tabs/tabclasses.py
python
ProjectReportsTab.view
(self)
return MySavedReportsView.urlname
[]
def view(self): from corehq.apps.reports.views import MySavedReportsView return MySavedReportsView.urlname
[ "def", "view", "(", "self", ")", ":", "from", "corehq", ".", "apps", ".", "reports", ".", "views", "import", "MySavedReportsView", "return", "MySavedReportsView", ".", "urlname" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/tabs/tabclasses.py#L127-L129
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/requests-2.25.1/requests/models.py
python
Response.raise_for_status
(self)
Raises :class:`HTTPError`, if one occurred.
Raises :class:`HTTPError`, if one occurred.
[ "Raises", ":", "class", ":", "HTTPError", "if", "one", "occurred", "." ]
def raise_for_status(self): """Raises :class:`HTTPError`, if one occurred.""" http_error_msg = '' if isinstance(self.reason, bytes): # We attempt to decode utf-8 first because some servers # choose to localize their reason strings. If the string # isn't utf-8, we fall back to iso-8859-1 for all other # encodings. (See PR #3538) try: reason = self.reason.decode('utf-8') except UnicodeDecodeError: reason = self.reason.decode('iso-8859-1') else: reason = self.reason if 400 <= self.status_code < 500: http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) elif 500 <= self.status_code < 600: http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self)
[ "def", "raise_for_status", "(", "self", ")", ":", "http_error_msg", "=", "''", "if", "isinstance", "(", "self", ".", "reason", ",", "bytes", ")", ":", "# We attempt to decode utf-8 first because some servers", "# choose to localize their reason strings. If the string", "# i...
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/requests-2.25.1/requests/models.py#L933-L956
GoogleCloudPlatform/ml-on-gcp
ffd88931674e08ef6b0b20de27700ed1da61772c
example_zoo/tensorflow/models/ncf_main/official/recommendation/data_preprocessing.py
python
instantiate_pipeline
(dataset, data_dir, params, constructor_type=None, deterministic=False)
return num_users, num_items, producer
Load and digest data CSV into a usable form. Args: dataset: The name of the dataset to be used. data_dir: The root directory of the dataset. params: dict of parameters for the run. constructor_type: The name of the constructor subclass that should be used for the input pipeline. deterministic: Tell the data constructor to produce deterministically.
Load and digest data CSV into a usable form.
[ "Load", "and", "digest", "data", "CSV", "into", "a", "usable", "form", "." ]
def instantiate_pipeline(dataset, data_dir, params, constructor_type=None, deterministic=False): # type: (str, str, dict, typing.Optional[str], bool) -> (NCFDataset, typing.Callable) """Load and digest data CSV into a usable form. Args: dataset: The name of the dataset to be used. data_dir: The root directory of the dataset. params: dict of parameters for the run. constructor_type: The name of the constructor subclass that should be used for the input pipeline. deterministic: Tell the data constructor to produce deterministically. """ tf.logging.info("Beginning data preprocessing.") st = timeit.default_timer() raw_rating_path = os.path.join(data_dir, dataset, movielens.RATINGS_FILE) cache_path = os.path.join(data_dir, dataset, rconst.RAW_CACHE_FILE) raw_data, _ = _filter_index_sort(raw_rating_path, cache_path) user_map, item_map = raw_data["user_map"], raw_data["item_map"] num_users, num_items = DATASET_TO_NUM_USERS_AND_ITEMS[dataset] if num_users != len(user_map): raise ValueError("Expected to find {} users, but found {}".format( num_users, len(user_map))) if num_items != len(item_map): raise ValueError("Expected to find {} items, but found {}".format( num_items, len(item_map))) producer = data_pipeline.get_constructor(constructor_type or "materialized")( maximum_number_epochs=params["train_epochs"], num_users=num_users, num_items=num_items, user_map=user_map, item_map=item_map, train_pos_users=raw_data[rconst.TRAIN_USER_KEY], train_pos_items=raw_data[rconst.TRAIN_ITEM_KEY], train_batch_size=params["batch_size"], batches_per_train_step=params["batches_per_step"], num_train_negatives=params["num_neg"], eval_pos_users=raw_data[rconst.EVAL_USER_KEY], eval_pos_items=raw_data[rconst.EVAL_ITEM_KEY], eval_batch_size=params["eval_batch_size"], batches_per_eval_step=params["batches_per_step"], stream_files=params["use_tpu"], deterministic=deterministic ) run_time = timeit.default_timer() - st tf.logging.info("Data preprocessing complete. Time: {:.1f} sec." .format(run_time)) print(producer) return num_users, num_items, producer
[ "def", "instantiate_pipeline", "(", "dataset", ",", "data_dir", ",", "params", ",", "constructor_type", "=", "None", ",", "deterministic", "=", "False", ")", ":", "# type: (str, str, dict, typing.Optional[str], bool) -> (NCFDataset, typing.Callable)", "tf", ".", "logging", ...
https://github.com/GoogleCloudPlatform/ml-on-gcp/blob/ffd88931674e08ef6b0b20de27700ed1da61772c/example_zoo/tensorflow/models/ncf_main/official/recommendation/data_preprocessing.py#L178-L232
dawsonjon/Chips-2.0
57a986b8df36248bb4736bd84e3e68046b8665af
chips/compiler/verilog_area.py
python
generate_CHIP
(input_file, name, instructions, output_file, allocator, initialize_memory, memory_size=1024, no_tb_mode=False, options={})
return inputs, outputs
A big ugly function to crunch through all the instructions and generate the CHIP equivilent
A big ugly function to crunch through all the instructions and generate the CHIP equivilent
[ "A", "big", "ugly", "function", "to", "crunch", "through", "all", "the", "instructions", "and", "generate", "the", "CHIP", "equivilent" ]
def generate_CHIP(input_file, name, instructions, output_file, allocator, initialize_memory, memory_size=1024, no_tb_mode=False, options={}): """A big ugly function to crunch through all the instructions and generate the CHIP equivilent""" instructions, initial_memory_contents = calculate_jumps(instructions, True) instruction_set, instruction_memory = generate_instruction_set( instructions) opcodes = [i["op"] for i in instruction_set] register_bits = 16 opcode_bits = log2(len(instruction_set)) instruction_bits = 16 + 4 + 4 + opcode_bits declarations = generate_declarations( instructions, no_tb_mode, register_bits, opcode_bits, allocator) inputs, outputs, input_files, output_files, testbench, inports, outports, signals = declarations floating_point_arithmetic, floating_point_conversions, floating_point_debug = floating_point_enables( instruction_set) # output the code in verilog output_file.write("//name : %s\n" % name) for i in inputs: output_file.write("//input : input_%s:16\n" % i) for i in outputs: output_file.write("//output : output_%s:16\n" % i) output_file.write("//source_file : %s\n" % input_file) output_file.write(dedent(""" ///+============================================================================+ ///| | ///| This file was generated by Chips | ///| | ///| Chips | ///| | ///| http://github.com/dawsonjon/Chips-2.0 | ///| | ///| Python powered | ///+============================================================================+ """)) output_file.write("module %s" % name) all_ports = [name for name, size in inports + outports] + ["exception"] if all_ports: output_file.write("(") output_file.write(",".join(all_ports)) output_file.write(");\n") else: output_file.write(";\n") output_file.write(" integer file_count;\n") for i in floating_point_arithmetic: if i.startswith("double"): output_file.write(" reg [63:0] %s_a;\n" % (i)) output_file.write(" reg [63:0] %s_b;\n" % (i)) output_file.write(" wire [63:0] %s_z;\n" % (i)) else: output_file.write(" reg [31:0] %s_a;\n" % (i)) output_file.write(" reg [31:0] %s_b;\n" % (i)) output_file.write(" wire [31:0] %s_z;\n" % (i)) output_file.write(" reg %s_a_stb;\n" % (i)) output_file.write(" wire %s_a_ack;\n" % (i)) output_file.write(" reg %s_b_stb;\n" % (i)) output_file.write(" wire %s_b_ack;\n" % (i)) output_file.write(" wire %s_z_stb;\n" % (i)) output_file.write(" reg %s_z_ack;\n" % (i)) for i in floating_point_conversions: if i.startswith("long") or i.startswith("double"): output_file.write(" reg [63:0] %s_in;\n" % (i)) else: output_file.write(" reg [31:0] %s_in;\n" % (i)) if i.endswith("long") or i.endswith("double"): output_file.write(" wire [63:0] %s_out;\n" % (i)) else: output_file.write(" wire [31:0] %s_out;\n" % (i)) output_file.write(" wire %s_out_stb;\n" % (i)) output_file.write(" reg %s_out_ack;\n" % (i)) output_file.write(" reg %s_in_stb;\n" % (i)) output_file.write(" wire %s_in_ack;\n" % (i)) if floating_point_debug: output_file.write(" real fp_value;\n") states = [ "stop", "instruction_fetch", "operand_fetch", "execute", "load", "wait_state", ] if inports: states.append("read") if outports: states.append("write") needs_divider = False for i in ["divide", "unsigned_divide", "modulo", "unsigned_modulo"]: if i in opcodes: states.append(i) needs_divider = True needs_long_divider = False for i in ["long_divide", "unsigned_long_divide", "long_modulo", "unsigned_long_modulo"]: if i in opcodes: states.append(i) needs_long_divider = True if "multiply" in opcodes: states.append("multiply") divide_latency = options.get("divide_latency", 32) divide_iterations = 32/divide_latency long_divide_latency = options.get("long_divide_latency", 64) long_divide_iterations = 64/long_divide_latency for i in floating_point_arithmetic: states.append("%s_write_a" % i) states.append("%s_write_b" % i) states.append("%s_read_z" % i) for i in floating_point_conversions: states.append("%s_write_a" % i) states.append("%s_read_z" % i) state_variables = [] for index, state in enumerate(states): state_variables.append( "%s = %s'd%s" % (state, log2(len(states)), index)) signals.append(("state", len(state_variables))) output_file.write(" parameter ") output_file.write(",\n ".join(state_variables)) output_file.write(";\n") input_files = dict( zip(input_files, ["input_file_%s" % i for i, j in enumerate(input_files)])) for i in input_files.values(): output_file.write(" integer %s;\n" % i) output_files = dict( zip(output_files, ["output_file_%s" % i for i, j in enumerate(output_files)])) for i in output_files.values(): output_file.write(" integer %s;\n" % i) def write_declaration(object_type, name, size): if size == 1: output_file.write(object_type) output_file.write(name) output_file.write(";\n") else: output_file.write(object_type) output_file.write("[%i:0]" % (size - 1)) output_file.write(" ") output_file.write(name) output_file.write(";\n") for name, size in inports: write_declaration(" input ", name, size) for name, size in outports: write_declaration(" output ", name, size) for name, size in signals: write_declaration(" reg ", name, size) output_file.write(" output reg exception;\n") output_file.write( " reg [%s:0] instructions [%i:0];\n" % (instruction_bits - 1, len(instructions) - 1)) output_file.write(" reg [31:0] memory [%i:0];\n" % (memory_size-1)) output_file.write(" reg [31:0] registers [15:0];\n") output_file.write(" wire [31:0] operand_a;\n") output_file.write(" wire [31:0] operand_b;\n") output_file.write(" wire [31:0] register_a;\n") output_file.write(" wire [31:0] register_b;\n") output_file.write(" wire [15:0] literal;\n") output_file.write(" wire [%s:0] opcode;\n" % (opcode_bits - 1)) output_file.write(" wire [3:0] address_a;\n") output_file.write(" wire [3:0] address_b;\n") output_file.write(" wire [3:0] address_z;\n") output_file.write(" wire [15:0] load_address;\n") output_file.write(" wire [15:0] store_address;\n") output_file.write(" wire [31:0] store_data;\n") output_file.write(" wire store_enable;\n") output_file.write(" wire forward_a;\n") output_file.write(" wire forward_b;\n") if needs_divider: output_file.write(" reg [31:0] shifter;\n") output_file.write(" reg [32:0] difference;\n") output_file.write(" reg [31:0] divisor;\n") output_file.write(" reg [31:0] dividend;\n") output_file.write(" reg [31:0] quotient;\n") output_file.write(" reg [31:0] remainder;\n") output_file.write(" reg quotient_sign;\n") output_file.write(" reg dividend_sign;\n") if needs_long_divider: output_file.write(" reg [63:0] long_shifter;\n") output_file.write(" reg [64:0] long_difference;\n") output_file.write(" reg [63:0] long_divisor;\n") output_file.write(" reg [63:0] long_dividend;\n") output_file.write(" reg [63:0] long_quotient;\n") output_file.write(" reg [63:0] long_remainder;\n") output_file.write(" reg long_quotient_sign;\n") output_file.write(" reg long_dividend_sign;\n") if "multiply" in opcodes: output_file.write(" reg [31:0] product_a;\n") output_file.write(" reg [31:0] product_b;\n") output_file.write(" reg [31:0] product_c;\n") output_file.write(" reg [31:0] product_d;\n") # generate clock and reset in testbench mode if testbench: output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write( " // CLOCK AND RESET GENERATION \n") output_file.write( " // \n") output_file.write( " // This file was generated in test bench mode. In this mode, the verilog \n") output_file.write( " // output file can be executed directly within a verilog simulator. \n") output_file.write( " // In test bench mode, a simulated clock and reset signal are generated within\n") output_file.write( " // the output file. \n") output_file.write( " // Verilog files generated in testbecnch mode are not suitable for synthesis, \n") output_file.write( " // or for instantiation within a larger design.\n") output_file.write(" \n initial\n") output_file.write(" begin\n") output_file.write(" rst <= 1'b1;\n") output_file.write(" #50 rst <= 1'b0;\n") output_file.write(" end\n\n") output_file.write(" \n initial\n") output_file.write(" begin\n") output_file.write(" clk <= 1'b0;\n") output_file.write(" while (1) begin\n") output_file.write(" #5 clk <= ~clk;\n") output_file.write(" end\n") output_file.write(" end\n\n") # Instance Floating Point Arithmetic if floating_point_arithmetic or floating_point_conversions: output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write( " // Floating Point Arithmetic \n") output_file.write( " // \n") output_file.write( " // Generate IEEE 754 single precision divider, adder and multiplier \n") output_file.write( " // \n") for i in floating_point_arithmetic: output_file.write(" %s %s_inst(\n" % (i, i)) output_file.write(" .clk(clk),\n") output_file.write(" .rst(rst),\n") output_file.write(" .input_a(%s_a),\n" % i) output_file.write(" .input_a_stb(%s_a_stb),\n" % i) output_file.write(" .input_a_ack(%s_a_ack),\n" % i) output_file.write(" .input_b(%s_b),\n" % i) output_file.write(" .input_b_stb(%s_b_stb),\n" % i) output_file.write(" .input_b_ack(%s_b_ack),\n" % i) output_file.write(" .output_z(%s_z),\n" % i) output_file.write(" .output_z_stb(%s_z_stb),\n" % i) output_file.write(" .output_z_ack(%s_z_ack)\n" % i) output_file.write(" );\n") for i in floating_point_conversions: output_file.write(" %s %s_inst(\n" % (i, i)) output_file.write(" .clk(clk),\n") output_file.write(" .rst(rst),\n") output_file.write(" .input_a(%s_in),\n" % i) output_file.write(" .input_a_stb(%s_in_stb),\n" % i) output_file.write(" .input_a_ack(%s_in_ack),\n" % i) output_file.write(" .output_z(%s_out),\n" % i) output_file.write(" .output_z_stb(%s_out_stb),\n" % i) output_file.write(" .output_z_ack(%s_out_ack)\n" % i) output_file.write(" );\n") # Generate a state machine to execute the instructions #if initialize_memory and allocator.memory_content: # output_file.write("\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write(" // MEMORY INITIALIZATION \n") output_file.write(" // \n") output_file.write(" // In order to reduce program size, array contents have been stored into \n") output_file.write(" // memory at initialization. In an FPGA, this will result in the memory being \n") output_file.write(" // initialized when the FPGA configures. \n") output_file.write(" // Memory will not be re-initialized at reset. \n") output_file.write(" // Dissable this behaviour using the no_initialize_memory switch \n") # output_file.write(" \n initial\n") output_file.write(" begin\n") for location, content in initial_memory_contents.iteritems(): output_file.write(" memory[%s] = %s;\n"%(location, content)) output_file.write(" end\n\n") output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write( " // INSTRUCTION INITIALIZATION \n") output_file.write( " // \n") output_file.write( " // Initialise the contents of the instruction memory \n") output_file.write(" //\n") output_file.write(" // Intruction Set\n") output_file.write(" // ==============\n") for num, opcode in enumerate(instruction_set): output_file.write(" // %s %s\n" % (num, opcode)) output_file.write(" // Intructions\n") output_file.write(" // ===========\n") output_file.write(" \n initial\n") output_file.write(" begin\n") for location, instruction in enumerate(instruction_memory): output_file.write(" instructions[%s] = {%s, %s, %s, %s};//%s : %s %s\n" % ( location, print_verilog_literal(opcode_bits, instruction["op"]), print_verilog_literal(4, instruction.get("z", 0)), print_verilog_literal(4, instruction.get("a", 0)), print_verilog_literal( 16, instruction["literal"] | instruction.get("b", 0)), instruction["filename"], instruction["lineno"], instruction["comment"], )) output_file.write(" end\n\n") if input_files or output_files: output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write( " // OPEN FILES \n") output_file.write( " // \n") output_file.write( " // Open all files used at the start of the process \n") output_file.write(" \n initial\n") output_file.write(" begin\n") for file_name, file_ in input_files.iteritems(): output_file.write( " %s = $fopenr(\"%s\");\n" % (file_, file_name)) for file_name, file_ in output_files.iteritems(): output_file.write( " %s = $fopen(\"%s\");\n" % (file_, file_name)) output_file.write(" end\n\n") output_file.write(" \n always @(posedge clk)\n") output_file.write(" begin\n") output_file.write(" load_data <= memory[load_address];\n") output_file.write(" if(store_enable && state == execute) begin\n") output_file.write(" if (store_address > %i) begin\n"%(memory_size-1)) output_file.write(" $display(\"!!!!stack overflow!!!!\");\n") output_file.write(" $finish_and_return(1);\n") output_file.write(" exception <= 1'b1;\n") output_file.write(" end\n") output_file.write(" memory[store_address] <= store_data;\n") output_file.write(" end\n") output_file.write(" if (rst==1'b1) begin\n") output_file.write(" exception <= 1'b0;\n") output_file.write(" end\n") output_file.write(" end\n\n") output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write(" // PIPELINE STAGE 1 -- FETCH INSTRUCTION\n") output_file.write( " // \n") output_file.write(" \n always @(posedge clk)\n") output_file.write(" begin\n") output_file.write(" //implement memory for instructions\n") output_file.write( " if (state == instruction_fetch || state == operand_fetch || state == execute) begin\n") output_file.write(" instruction <= instructions[program_counter];\n") output_file.write(" program_counter_1 <= program_counter;\n") output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" assign opcode = instruction[%s:%s];\n" % ( instruction_bits - 1, instruction_bits - opcode_bits)) output_file.write(" assign address_z = instruction[23:20];\n") output_file.write(" assign address_a = instruction[19:16];\n") output_file.write(" assign address_b = instruction[3:0];\n") output_file.write(" assign literal = instruction[15:0];\n") output_file.write(" assign forward_a = (address_a_2 == address_z_3 && write_enable);\n") output_file.write(" assign forward_b = (address_b_2 == address_z_3 && write_enable);\n") output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write(" // PIPELINE STAGE 2 -- FETCH OPERANDS\n") output_file.write( " // \n") output_file.write(" \n always @(posedge clk)\n") output_file.write(" begin\n") output_file.write(" if (write_enable) begin\n") output_file.write(" registers[address_z_3] <= result;\n") output_file.write(" end\n") output_file.write( " if (state == operand_fetch || state == execute) begin\n") output_file.write(" opcode_2 <= opcode;\n") output_file.write(" literal_2 <= literal;\n") output_file.write(" address_a_2 <= address_a;\n") output_file.write(" address_b_2 <= address_b;\n") output_file.write(" address_z_2 <= address_z;\n") output_file.write(" program_counter_2 <= program_counter_1;\n") output_file.write(" end\n") output_file.write(" end\n") output_file.write(" assign register_a = registers[address_a_2];\n") output_file.write(" assign register_b = registers[address_b_2];\n") output_file.write(" assign operand_a = forward_a?result:register_a;\n") output_file.write(" assign operand_b = forward_b?result:register_b;\n") output_file.write(" assign store_address = operand_a;\n") output_file.write(" assign load_address = operand_a;\n") output_file.write(" assign store_data = operand_b;\n") store_opcode = 0 for opcode, instruction in enumerate(instruction_set): if instruction["op"] == "store": store_opcode = opcode output_file.write( " assign store_enable = (opcode_2==%s);\n" % store_opcode) output_file.write( "\n //////////////////////////////////////////////////////////////////////////////\n") output_file.write(" // PIPELINE STAGE 3 -- EXECUTE\n") output_file.write( " // \n") output_file.write(" \n always @(posedge clk)\n") output_file.write(" begin\n\n") output_file.write(" write_enable <= 0;\n") output_file.write(" timer_clock <= timer_clock + 1;\n") output_file.write(" case(state)\n\n") output_file.write(" //instruction_fetch\n") output_file.write(" instruction_fetch: begin\n") output_file.write(" program_counter <= program_counter + 1;\n") output_file.write(" state <= operand_fetch;\n") output_file.write(" end\n") output_file.write(" //operand_fetch\n") output_file.write(" operand_fetch: begin\n") output_file.write(" program_counter <= program_counter + 1;\n") output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" //execute\n") output_file.write(" execute: begin\n") output_file.write(" program_counter <= program_counter + 1;\n") output_file.write(" address_z_3 <= address_z_2;\n") output_file.write(" case(opcode_2)\n\n") # A frame is executed in each state for opcode, instruction in enumerate(instruction_set): output_file.write(" //%s\n" % (instruction["op"])) output_file.write(" 16'd%s:\n" % (opcode)) output_file.write(" begin\n") if instruction["op"] == "nop": pass elif instruction["op"] == "literal": output_file.write(" result<=$signed(literal_2);\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "addl": output_file.write(" result<=operand_a + literal_2;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "literal_hi": output_file.write( " result<= {literal_2, operand_a[15:0]};\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "store": pass elif instruction["op"] == "load": output_file.write(" state <= load;\n") elif instruction["op"] == "call": output_file.write(" result <= program_counter_2 + 1;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" program_counter <= literal_2;\n") output_file.write(" state <= instruction_fetch;\n") elif instruction["op"] == "return": output_file.write(" program_counter <= operand_a;\n") output_file.write(" state <= instruction_fetch;\n") elif instruction["op"] == "a_lo": output_file.write(" a_lo <= operand_a;\n") output_file.write(" result <= a_lo;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "b_lo": output_file.write(" b_lo <= operand_a;\n") output_file.write(" result <= b_lo;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "a_hi": output_file.write(" a_hi <= operand_a;\n") output_file.write(" result <= a_hi;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "b_hi": output_file.write(" b_hi <= operand_a;\n") output_file.write(" result <= b_hi;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "not": output_file.write(" result <= ~operand_a;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "int_to_long": output_file.write(" if(operand_a[31]) begin\n") output_file.write(" result <= -1;\n") output_file.write(" end else begin\n") output_file.write(" result <= 0;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "int_to_float": output_file.write(" int_to_float_in <= a_lo;\n") output_file.write(" state <= int_to_float_write_a;\n") elif instruction["op"] == "float_to_int": output_file.write(" float_to_int_in <= a_lo;\n") output_file.write(" state <= float_to_int_write_a;\n") elif instruction["op"] == "long_to_double": output_file.write(" long_to_double_in <= {a_hi, a_lo};\n") output_file.write(" state <= long_to_double_write_a;\n") elif instruction["op"] == "double_to_long": output_file.write(" double_to_long_in <= {a_hi, a_lo};\n") output_file.write(" state <= double_to_long_write_a;\n") elif instruction["op"] == "float_to_double": output_file.write(" float_to_double_in <= a_lo;\n") output_file.write(" state <= float_to_double_write_a;\n") elif instruction["op"] == "double_to_float": output_file.write( " double_to_float_in <= {a_hi, a_lo};\n") output_file.write(" state <= double_to_float_write_a;\n") elif instruction["op"] == "add": output_file.write( " long_result = operand_a + operand_b;\n") output_file.write(" result <= long_result[31:0];\n") output_file.write(" carry[0] <= long_result[32];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "add_with_carry": output_file.write( " long_result = operand_a + operand_b + carry[0];\n") output_file.write(" result <= long_result[31:0];\n") output_file.write(" carry[0] <= long_result[32];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "subtract": output_file.write( " long_result = operand_a + (~operand_b) + 1;\n") output_file.write(" result <= long_result[31:0];\n") output_file.write(" carry[0] <= ~long_result[32];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "subtract_with_carry": output_file.write( " long_result = operand_a + (~operand_b) + carry[0];\n") output_file.write(" result <= long_result[31:0];\n") output_file.write(" carry[0] <= ~long_result[32];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "multiply": output_file.write(" product_a <= operand_a[15:0] * operand_b[15:0];\n") output_file.write(" product_b <= operand_a[15:0] * operand_b[31:16];\n") output_file.write(" product_c <= operand_a[31:16] * operand_b[15:0];\n") output_file.write(" product_d <= operand_a[31:16] * operand_b[31:16];\n") output_file.write(" state <= multiply;\n") elif instruction["op"] == "unsigned_divide": output_file.write(" dividend <= operand_a;\n") output_file.write(" divisor <= operand_b;\n") output_file.write(" timer <= %i;\n"%divide_latency) output_file.write(" remainder <= 0;\n") output_file.write(" quotient <= 0;\n") output_file.write(" state <= unsigned_divide;\n") elif instruction["op"] == "divide": output_file.write(" quotient_sign <= operand_a[31] ^ operand_b[31];\n") output_file.write(" dividend <= operand_a;\n") output_file.write(" divisor <= operand_b;\n") output_file.write(" if (operand_a[31]) begin\n") output_file.write(" dividend <= -operand_a;\n") output_file.write(" end\n") output_file.write(" if (operand_b[31]) begin\n") output_file.write(" divisor <= -operand_b;\n") output_file.write(" end\n") output_file.write(" timer <= %i;\n"%divide_latency) output_file.write(" remainder <= 0;\n") output_file.write(" quotient <= 0;\n") output_file.write(" state <= divide;\n") elif instruction["op"] == "unsigned_modulo": output_file.write(" dividend <= operand_a;\n") output_file.write(" divisor <= operand_b;\n") output_file.write(" timer <= %i;\n"%divide_latency) output_file.write(" remainder <= 0;\n") output_file.write(" quotient <= 0;\n") output_file.write(" state <= unsigned_modulo;\n") elif instruction["op"] == "modulo": output_file.write(" dividend_sign <= operand_a[31];\n") output_file.write(" dividend <= operand_a;\n") output_file.write(" divisor <= operand_b;\n") output_file.write(" if (operand_a[31]) begin\n") output_file.write(" dividend <= -operand_a;\n") output_file.write(" end\n") output_file.write(" if (operand_b[31]) begin\n") output_file.write(" divisor <= -operand_b;\n") output_file.write(" end\n") output_file.write(" timer <= %i;\n"%divide_latency) output_file.write(" remainder <= 0;\n") output_file.write(" quotient <= 0;\n") output_file.write(" state <= modulo;\n") elif instruction["op"] == "unsigned_long_divide": output_file.write(" long_dividend <= {a_hi, a_lo};\n") output_file.write(" long_divisor <= {b_hi, b_lo};\n") output_file.write(" timer <= %i;\n"%long_divide_latency) output_file.write(" long_remainder <= 0;\n") output_file.write(" long_quotient <= 0;\n") output_file.write(" state <= unsigned_long_divide;\n") elif instruction["op"] == "long_divide": output_file.write(" long_quotient_sign <= a_hi[31] ^ b_hi[31];\n") output_file.write(" long_dividend <= {a_hi, a_lo};\n") output_file.write(" long_divisor <= {b_hi, b_lo};\n") output_file.write(" if (a_hi[31]) begin\n") output_file.write(" long_dividend <= -{a_hi, a_lo};\n") output_file.write(" end\n") output_file.write(" if (b_hi[31]) begin\n") output_file.write(" long_divisor <= -{b_hi, b_lo};\n") output_file.write(" end\n") output_file.write(" timer <= %i;\n"%long_divide_latency) output_file.write(" long_remainder <= 0;\n") output_file.write(" long_quotient <= 0;\n") output_file.write(" state <= long_divide;\n") elif instruction["op"] == "unsigned_long_modulo": output_file.write(" long_dividend <= {a_hi, a_lo};\n") output_file.write(" long_divisor <= {b_hi, b_lo};\n") output_file.write(" timer <= %i;\n"%long_divide_latency) output_file.write(" long_remainder <= 0;\n") output_file.write(" long_quotient <= 0;\n") output_file.write(" state <= unsigned_long_modulo;\n") elif instruction["op"] == "long_modulo": output_file.write(" long_dividend_sign <= a_hi[31];\n") output_file.write(" long_dividend <= {a_hi, a_lo};\n") output_file.write(" long_divisor <= {b_hi, b_lo};\n") output_file.write(" if (a_hi[31]) begin\n") output_file.write(" long_dividend <= -{a_hi, a_lo};\n") output_file.write(" end\n") output_file.write(" if (b_hi[31]) begin\n") output_file.write(" long_divisor <= -{b_hi, b_lo};\n") output_file.write(" end\n") output_file.write(" timer <= %i;\n"%long_divide_latency) output_file.write(" long_remainder <= 0;\n") output_file.write(" long_quotient <= 0;\n") output_file.write(" state <= long_modulo;\n") elif instruction["op"] == "carry": output_file.write(" result <= carry;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "or": output_file.write(" result <= operand_a | operand_b;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "and": output_file.write(" result <= operand_a & operand_b;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "xor": output_file.write(" result <= operand_a ^ operand_b;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "shift_left": output_file.write(" if(operand_b < 32) begin\n") output_file.write( " result <= operand_a << operand_b;\n") output_file.write( " carry <= operand_a >> (32-operand_b);\n") output_file.write(" end else begin\n") output_file.write(" result <= 0;\n") output_file.write(" carry <= operand_a;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "shift_left_with_carry": output_file.write(" if(operand_b < 32) begin\n") output_file.write( " result <= (operand_a << operand_b) | carry;\n") output_file.write(" end else begin\n") output_file.write(" result <= carry;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "shift_right": output_file.write(" if(operand_b < 32) begin\n") output_file.write( " result <= $signed(operand_a) >>> operand_b;\n") output_file.write( " carry <= operand_a << (32-operand_b);\n") output_file.write(" end else begin\n") output_file.write(" result <= operand_a[31]?-1:0;\n") output_file.write(" carry <= operand_a;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "unsigned_shift_right": output_file.write(" if(operand_b < 32) begin\n") output_file.write( " result <= operand_a >> operand_b;\n") output_file.write( " carry <= operand_a << (32-operand_b);\n") output_file.write(" end else begin\n") output_file.write(" result <= 0;\n") output_file.write(" carry <= operand_a;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "shift_right_with_carry": output_file.write(" if(operand_b < 32) begin\n") output_file.write( " result <= (operand_a >> operand_b) | carry;\n") output_file.write(" end else begin\n") output_file.write(" result <= carry;\n") output_file.write(" end\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "greater": output_file.write( " result <= $signed(operand_a) > $signed(operand_b);\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "greater_equal": output_file.write( " result <= $signed(operand_a) >= $signed(operand_b);\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "unsigned_greater": output_file.write( " result <= $unsigned(operand_a) > $unsigned(operand_b);\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "unsigned_greater_equal": output_file.write( " result <= $unsigned(operand_a) >= $unsigned(operand_b);\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "equal": output_file.write(" result <= operand_a == operand_b;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "not_equal": output_file.write(" result <= operand_a != operand_b;\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "float_add": output_file.write(" adder_a_stb <= 1;\n") output_file.write(" adder_a <= operand_a;\n") output_file.write(" adder_b <= operand_b;\n") output_file.write(" state <= adder_write_a;\n") elif instruction["op"] == "float_subtract": output_file.write(" adder_a_stb <= 1;\n") output_file.write(" adder_a <= operand_a;\n") output_file.write( " adder_b <= {~operand_b[31], operand_b[30:0]};\n") output_file.write(" state <= adder_write_a;\n") elif instruction["op"] == "float_multiply": output_file.write(" multiplier_a_stb <= 1;\n") output_file.write(" multiplier_a <= operand_a;\n") output_file.write(" multiplier_b <= operand_b;\n") output_file.write(" state <= multiplier_write_a;\n") elif instruction["op"] == "float_divide": output_file.write(" divider_a_stb <= 1;\n") output_file.write(" divider_a <= operand_a;\n") output_file.write(" divider_b <= operand_b;\n") output_file.write(" state <= divider_write_a;\n") elif instruction["op"] == "long_float_add": output_file.write(" double_adder_a <= {a_hi, a_lo};\n") output_file.write(" double_adder_b <= {b_hi, b_lo};\n") output_file.write(" state <= double_adder_write_a;\n") elif instruction["op"] == "long_float_subtract": output_file.write(" double_adder_a <= {a_hi, a_lo};\n") output_file.write( " double_adder_b <= {~b_hi[31], b_hi[30:0], b_lo};\n") output_file.write(" state <= double_adder_write_a;\n") elif instruction["op"] == "long_float_multiply": output_file.write( " double_multiplier_a <= {a_hi, a_lo};\n") output_file.write( " double_multiplier_b <= {b_hi, b_lo};\n") output_file.write( " state <= double_multiplier_write_a;\n") elif instruction["op"] == "long_float_divide": output_file.write(" double_divider_a <= {a_hi, a_lo};\n") output_file.write(" double_divider_b <= {b_hi, b_lo};\n") output_file.write(" state <= double_divider_write_a;\n") elif instruction["op"] == "jmp_if_false": output_file.write(" if (operand_a == 0) begin\n") output_file.write(" program_counter <= literal_2;\n") output_file.write(" state <= instruction_fetch;\n") output_file.write(" end\n") elif instruction["op"] == "jmp_if_true": output_file.write(" if (operand_a != 0) begin\n") output_file.write(" program_counter <= literal_2;\n") output_file.write(" state <= instruction_fetch;\n") output_file.write(" end\n") elif instruction["op"] == "goto": output_file.write(" program_counter <= literal_2;\n") output_file.write(" state <= instruction_fetch;\n") elif instruction["op"] == "file_read": output_file.write(" 16'd%s:\n" % (opcode)) output_file.write(" begin\n") output_file.write(" file_count = $fscanf(%s, \"%%d\\n\", result);\n" % ( input_files[instruction["file_name"]])) output_file.write(" write_enable <= 1;\n") output_file.write(" end\n\n") elif instruction["op"] == "float_file_write": output_file.write(' long_result[63] = operand_a[31];\n') output_file.write(' if (operand_a[30:23] == 0) begin\n') output_file.write(' long_result[62:52] = 0;\n') output_file.write( ' end else if (operand_a[30:23] == 127) begin\n') output_file.write(' long_result[62:52] = 1023;\n') output_file.write(' end else begin\n') output_file.write( ' long_result[62:52] = (operand_a[30:23] - 127) + 1023;\n') output_file.write(' end\n') output_file.write( ' long_result[51:29] = operand_a[22:0];\n') output_file.write(' long_result[28:0] = 0;\n') output_file.write( ' fp_value = $bitstoreal(long_result);\n') output_file.write(' $fdisplay (%s, "%%g", fp_value);\n' % ( output_files[ instruction["file_name"]])) elif instruction["op"] == "long_float_file_write": output_file.write( ' fp_value = $bitstoreal({a_hi, a_lo});\n') output_file.write(' $fdisplay (%s, "%%g", fp_value);\n' % ( output_files[ instruction["file_name"]])) elif instruction["op"] == "unsigned_file_write": output_file.write(" $fdisplay (%s, \"%%d\", $unsigned(operand_a));\n" % ( output_files[instruction["file_name"]])) elif instruction["op"] == "file_write": output_file.write(" $fdisplay (%s, \"%%d\", $signed(operand_a));\n" % ( output_files[instruction["file_name"]])) elif instruction["op"] == "read": output_file.write(" state <= read;\n") output_file.write(" read_input <= operand_a;\n") elif instruction["op"] == "ready": output_file.write(" result <= 0;\n") output_file.write(" case(operand_a)\n\n") for handle, input_name in allocator.input_names.iteritems(): output_file.write(" %s:\n" % (handle)) output_file.write(" begin\n") output_file.write( " result[0] <= input_%s_stb;\n" % input_name) output_file.write(" end\n") output_file.write(" endcase\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "output_ready": output_file.write(" result <= 0;\n") output_file.write(" case(operand_a)\n\n") for handle, output_name in allocator.output_names.iteritems(): output_file.write(" %s:\n" % (handle)) output_file.write(" begin\n") output_file.write( " result[0] <= output_%s_ack;\n" % output_name) output_file.write(" end\n") output_file.write(" endcase\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "write": output_file.write(" state <= write;\n") output_file.write(" write_output <= operand_a;\n") output_file.write(" write_value <= operand_b;\n") elif instruction["op"] == "assert": output_file.write(" if (operand_a == 0) begin\n") output_file.write(" $display(\"Assertion failed at line: %s in file: %s\");\n" % ( instruction["line"], instruction["file"])) output_file.write(" $finish_and_return(1);\n") output_file.write(" end\n") elif instruction["op"] == "wait_clocks": output_file.write(" timer <= operand_a;\n") output_file.write(" state <= wait_state;\n") elif instruction["op"] == "timer_low": output_file.write(" result <= timer_clock[31:0];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "timer_high": output_file.write(" result <= timer_clock[63:32];\n") output_file.write(" write_enable <= 1;\n") elif instruction["op"] == "report": output_file.write(' $display ("%%d (report (int) at line: %s in file: %s)", $signed(a_lo));\n' % ( instruction["line"], instruction["file"],)) elif instruction["op"] == "long_report": output_file.write(' $display ("%%d (report (long) at line: %s in file: %s)", $signed({a_hi, a_lo}));\n' % ( instruction["line"], instruction["file"],)) elif instruction["op"] == "float_report": output_file.write(' long_result[63] = a_lo[31];\n') output_file.write(' if (a_lo[30:23] == 0) begin\n') output_file.write(' long_result[62:52] = 0;\n') output_file.write( ' end else if (a_lo[30:23] == 255) begin\n') output_file.write(' long_result[62:52] = 2047;\n') output_file.write(' end else begin\n') output_file.write( ' long_result[62:52] = (a_lo[30:23] - 127) + 1023;\n') output_file.write(' end\n') output_file.write(' long_result[51:29] = a_lo[22:0];\n') output_file.write(' long_result[28:0] = 0;\n') output_file.write( ' fp_value = $bitstoreal(long_result);\n') output_file.write(' $display ("%%f (report (float) at line: %s in file: %s)", fp_value);\n' % ( instruction["line"], instruction["file"])) elif instruction["op"] == "long_float_report": output_file.write( ' fp_value = $bitstoreal({a_hi, a_lo});\n') output_file.write(' $display ("%%f (report (double) at line: %s in file: %s)", fp_value);\n' % ( instruction["line"], instruction["file"])) elif instruction["op"] == "unsigned_report": output_file.write(' $display ("%%d (report (unsigned) at line: %s in file: %s)", $unsigned(a_lo));\n' % ( instruction["line"], instruction["file"])) elif instruction["op"] == "long_unsigned_report": output_file.write(' $display ("%%d (report (unsigned long) at line: %s in file: %s)", $unsigned({a_hi, a_lo}));\n' % ( instruction["line"], instruction["file"])) elif instruction["op"] == "stop": # If we are in testbench mode stop the simulation # If we are part of a larger design, other C programs may still be # running for file_ in input_files.values(): output_file.write(" $fclose(%s);\n" % file_) for file_ in output_files.values(): output_file.write(" $fclose(%s);\n" % file_) if testbench: output_file.write(' $finish;\n') output_file.write(' state <= stop;\n') else: print "unsuported instruction", instruction["op"] print instruction output_file.write(" end\n\n") output_file.write(" endcase\n\n") output_file.write(" end\n\n") if "multiply" in opcodes: output_file.write(" multiply:\n") output_file.write(" begin\n") output_file.write(" long_result = product_a +\n") output_file.write(" (product_b << 16) +\n") output_file.write(" (product_c << 16) +\n") output_file.write(" (product_d << 32);\n") output_file.write(" result <= long_result[31:0];\n") output_file.write(" carry <= long_result[63:32];\n") output_file.write(" write_enable <= 1;\n") output_file.write(" state <= execute;\n") output_file.write(" end\n\n") if "unsigned_divide" in opcodes: output_file.write(" unsigned_divide:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" result <= quotient;\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "divide" in opcodes: output_file.write(" divide:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" if (quotient_sign) begin\n") output_file.write(" result <= -quotient;\n") output_file.write(" end else begin\n") output_file.write(" result <= quotient;\n") output_file.write(" end\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "unsigned_modulo" in opcodes: output_file.write(" unsigned_modulo:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" result <= remainder;\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "modulo" in opcodes: output_file.write(" modulo:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" if (dividend_sign) begin\n") output_file.write(" result <= -remainder;\n") output_file.write(" end else begin\n") output_file.write(" result <= remainder;\n") output_file.write(" end\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "unsigned_long_divide" in opcodes: output_file.write(" unsigned_long_divide:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" a_hi <= long_quotient[63:32];\n") output_file.write(" a_lo <= long_quotient[31:0];\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "long_divide" in opcodes: output_file.write(" long_divide:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" if (long_quotient_sign) begin\n") output_file.write(" long_result = -long_quotient;\n") output_file.write(" a_hi <= long_result[63:32];\n") output_file.write(" a_lo <= long_result[31:0];\n") output_file.write(" end else begin\n") output_file.write(" a_hi <= long_quotient[63:32];\n") output_file.write(" a_lo <= long_quotient[31:0];\n") output_file.write(" end\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "unsigned_long_modulo" in opcodes: output_file.write(" unsigned_long_modulo:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" a_hi <= long_remainder[63:32];\n") output_file.write(" a_lo <= long_remainder[31:0];\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if "long_modulo" in opcodes: output_file.write(" long_modulo:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" if (long_dividend_sign) begin\n") output_file.write(" long_result = -long_remainder;\n") output_file.write(" a_hi <= long_result[63:32];\n") output_file.write(" a_lo <= long_result[31:0];\n") output_file.write(" end else begin\n") output_file.write(" a_hi <= long_remainder[63:32];\n") output_file.write(" a_lo <= long_remainder[31:0];\n") output_file.write(" end\n") output_file.write(" state <= execute;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if allocator.input_names: output_file.write(" read:\n") output_file.write(" begin\n") output_file.write(" case(read_input)\n") for handle, input_name in allocator.input_names.iteritems(): output_file.write(" %s:\n" % (handle)) output_file.write(" begin\n") output_file.write(" s_input_%s_ack <= 1;\n" % input_name) output_file.write(" if (s_input_%s_ack && input_%s_stb) begin\n" % ( input_name, input_name)) output_file.write(" result <= input_%s;\n" % input_name) output_file.write(" write_enable <= 1;\n") output_file.write(" s_input_%s_ack <= 0;\n" % input_name) output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" end\n") output_file.write(" endcase\n") output_file.write(" end\n\n") if allocator.output_names: output_file.write(" write:\n") output_file.write(" begin\n") output_file.write(" case(write_output)\n") for handle, output_name in allocator.output_names.iteritems(): output_file.write(" %s:\n" % (handle)) output_file.write(" begin\n") output_file.write(" s_output_%s_stb <= 1;\n" % output_name) output_file.write( " s_output_%s <= write_value;\n" % output_name) output_file.write(" if (output_%s_ack && s_output_%s_stb) begin\n" % ( output_name, output_name)) output_file.write( " s_output_%s_stb <= 0;\n" % output_name) output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" end\n") output_file.write(" endcase\n") output_file.write(" end\n\n") output_file.write(" load:\n") output_file.write(" begin\n") output_file.write(" result <= load_data;\n") output_file.write(" write_enable <= 1;\n") output_file.write(" state <= execute;\n") output_file.write(" end\n\n") output_file.write(" wait_state:\n") output_file.write(" begin\n") output_file.write(" if (timer) begin\n") output_file.write(" timer <= timer - 1;\n") output_file.write(" end else begin\n") output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" stop:\n") output_file.write(" begin\n") output_file.write(" end\n\n") for i in floating_point_arithmetic: output_file.write(" %s_write_a:\n" % i) output_file.write(" begin\n") output_file.write(" %s_a_stb <= 1;\n" % i) output_file.write(" if (%s_a_stb && %s_a_ack) begin\n" % (i, i)) output_file.write(" %s_a_stb <= 0;\n" % i) output_file.write(" state <= %s_write_b;\n" % i) output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" %s_write_b:\n" % i) output_file.write(" begin\n") output_file.write(" %s_b_stb <= 1;\n" % i) output_file.write(" if (%s_b_stb && %s_b_ack) begin\n" % (i, i)) output_file.write(" %s_b_stb <= 0;\n" % i) output_file.write(" state <= %s_read_z;\n" % i) output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" %s_read_z:\n" % i) output_file.write(" begin\n") output_file.write(" %s_z_ack <= 1;\n" % i) output_file.write(" if (%s_z_stb && %s_z_ack) begin\n" % (i, i)) if i.startswith("double"): output_file.write(" a_lo <= %s_z[31:0];\n" % i) output_file.write(" a_hi <= %s_z[63:32];\n" % i) else: output_file.write(" result <= %s_z;\n" % i) output_file.write(" write_enable <= 1;\n") output_file.write(" %s_z_ack <= 0;\n" % i) output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" end\n\n") for i in floating_point_conversions: output_file.write(" %s_write_a:\n" % i) output_file.write(" begin\n") output_file.write(" %s_in_stb <= 1;\n" % i) output_file.write( " if (%s_in_stb && %s_in_ack) begin\n" % (i, i)) output_file.write(" %s_in_stb <= 0;\n" % i) output_file.write(" state <= %s_read_z;\n" % i) output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" %s_read_z:\n" % i) output_file.write(" begin\n") output_file.write(" %s_out_ack <= 1;\n" % i) output_file.write( " if (%s_out_stb && %s_out_ack) begin\n" % (i, i)) output_file.write(" %s_out_ack <= 0;\n" % i) if (i.startswith("double") and not i.endswith("float")) or i.endswith("double") or i.startswith("long"): output_file.write(" a_lo <= %s_out[31:0];\n" % i) output_file.write(" a_hi <= %s_out[63:32];\n" % i) else: output_file.write(" a_lo <= %s_out;\n" % i) output_file.write(" state <= execute;\n") output_file.write(" end\n") output_file.write(" end\n\n") output_file.write(" endcase\n\n") if needs_divider: output_file.write(" //divider kernel logic\n") output_file.write(" repeat (%u) begin\n"%(divide_iterations)) output_file.write(" shifter = {remainder[30:0], dividend[31]};\n") output_file.write(" difference = shifter - divisor;\n") output_file.write(" dividend = dividend << 1;\n") output_file.write(" if (difference[32]) begin\n") output_file.write(" remainder = shifter;\n") output_file.write(" quotient = quotient << 1;\n") output_file.write(" end else begin\n") output_file.write(" remainder = difference[31:0];\n") output_file.write(" quotient = quotient << 1 | 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") if needs_long_divider: output_file.write(" //long divider kernel logic\n") output_file.write(" repeat (%u) begin\n"%(divide_iterations)) output_file.write(" long_shifter = {long_remainder[62:0], long_dividend[63]};\n") output_file.write(" long_difference = long_shifter - long_divisor;\n") output_file.write(" long_dividend = long_dividend << 1;\n") output_file.write(" if (long_difference[64]) begin\n") output_file.write(" long_remainder = long_shifter;\n") output_file.write(" long_quotient = long_quotient << 1;\n") output_file.write(" end else begin\n") output_file.write(" long_remainder = long_difference[63:0];\n") output_file.write(" long_quotient = long_quotient << 1 | 1;\n") output_file.write(" end\n") output_file.write(" end\n\n") # Reset program counter and control signals output_file.write(" if (rst == 1'b1) begin\n") output_file.write(" timer <= 0;\n") output_file.write(" timer_clock <= 0;\n") output_file.write(" program_counter <= 0;\n") output_file.write(" address_z_3 <= 0;\n") output_file.write(" result <= 0;\n") output_file.write(" a = 0;\n") output_file.write(" b = 0;\n") output_file.write(" z = 0;\n") output_file.write(" state <= instruction_fetch;\n") for i in inputs: output_file.write(" s_input_%s_ack <= 0;\n" % (i)) for i in outputs: output_file.write(" s_output_%s_stb <= 0;\n" % (i)) for i in floating_point_arithmetic: output_file.write(" %s_a_stb <= 0;\n" % (i)) output_file.write(" %s_b_stb <= 0;\n" % (i)) output_file.write(" %s_z_ack <= 0;\n" % (i)) for i in floating_point_conversions: output_file.write(" %s_in_stb <= 0;\n" % (i)) output_file.write(" %s_out_ack <= 0;\n" % (i)) output_file.write(" end\n") output_file.write(" end\n") for i in inputs: output_file.write(" assign input_%s_ack = s_input_%s_ack;\n" % (i, i)) for i in outputs: output_file.write( " assign output_%s_stb = s_output_%s_stb;\n" % (i, i)) output_file.write(" assign output_%s = s_output_%s;\n" % (i, i)) output_file.write("\nendmodule\n") return inputs, outputs
[ "def", "generate_CHIP", "(", "input_file", ",", "name", ",", "instructions", ",", "output_file", ",", "allocator", ",", "initialize_memory", ",", "memory_size", "=", "1024", ",", "no_tb_mode", "=", "False", ",", "options", "=", "{", "}", ")", ":", "instructi...
https://github.com/dawsonjon/Chips-2.0/blob/57a986b8df36248bb4736bd84e3e68046b8665af/chips/compiler/verilog_area.py#L244-L1609
lfz/Guided-Denoise
8881ab768d16eaf87342da4ff7dc8271e183e205
Attackset/Iter8_ensv3_resv2_inresv2_random/nets/vgg.py
python
vgg_19
(inputs, num_classes=1000, is_training=True, dropout_keep_prob=0.5, spatial_squeeze=True, scope='vgg_19', fc_conv_padding='VALID')
Oxford Net VGG 19-Layers version E Example. Note: All the fully_connected layers have been transformed to conv2d layers. To use in classification mode, resize input to 224x224. Args: inputs: a tensor of size [batch_size, height, width, channels]. num_classes: number of predicted classes. is_training: whether or not the model is being trained. dropout_keep_prob: the probability that activations are kept in the dropout layers during training. spatial_squeeze: whether or not should squeeze the spatial dimensions of the outputs. Useful to remove unnecessary dimensions for classification. scope: Optional scope for the variables. fc_conv_padding: the type of padding to use for the fully connected layer that is implemented as a convolutional layer. Use 'SAME' padding if you are applying the network in a fully convolutional manner and want to get a prediction map downsampled by a factor of 32 as an output. Otherwise, the output prediction map will be (input / 32) - 6 in case of 'VALID' padding. Returns: the last op containing the log predictions and end_points dict.
Oxford Net VGG 19-Layers version E Example.
[ "Oxford", "Net", "VGG", "19", "-", "Layers", "version", "E", "Example", "." ]
def vgg_19(inputs, num_classes=1000, is_training=True, dropout_keep_prob=0.5, spatial_squeeze=True, scope='vgg_19', fc_conv_padding='VALID'): """Oxford Net VGG 19-Layers version E Example. Note: All the fully_connected layers have been transformed to conv2d layers. To use in classification mode, resize input to 224x224. Args: inputs: a tensor of size [batch_size, height, width, channels]. num_classes: number of predicted classes. is_training: whether or not the model is being trained. dropout_keep_prob: the probability that activations are kept in the dropout layers during training. spatial_squeeze: whether or not should squeeze the spatial dimensions of the outputs. Useful to remove unnecessary dimensions for classification. scope: Optional scope for the variables. fc_conv_padding: the type of padding to use for the fully connected layer that is implemented as a convolutional layer. Use 'SAME' padding if you are applying the network in a fully convolutional manner and want to get a prediction map downsampled by a factor of 32 as an output. Otherwise, the output prediction map will be (input / 32) - 6 in case of 'VALID' padding. Returns: the last op containing the log predictions and end_points dict. """ with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc: end_points_collection = sc.name + '_end_points' # Collect outputs for conv2d, fully_connected and max_pool2d. with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d], outputs_collections=end_points_collection): net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1') net = slim.max_pool2d(net, [2, 2], scope='pool1') net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2') net = slim.max_pool2d(net, [2, 2], scope='pool2') net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3') net = slim.max_pool2d(net, [2, 2], scope='pool3') net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4') net = slim.max_pool2d(net, [2, 2], scope='pool4') net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5') net = slim.max_pool2d(net, [2, 2], scope='pool5') # Use conv2d instead of fully_connected layers. net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6') net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='dropout6') net = slim.conv2d(net, 4096, [1, 1], scope='fc7') net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='dropout7') net = slim.conv2d(net, num_classes, [1, 1], activation_fn=None, normalizer_fn=None, scope='fc8') # Convert end_points_collection into a end_point dict. end_points = slim.utils.convert_collection_to_dict(end_points_collection) if spatial_squeeze: net = tf.squeeze(net, [1, 2], name='fc8/squeezed') end_points[sc.name + '/fc8'] = net return net, end_points
[ "def", "vgg_19", "(", "inputs", ",", "num_classes", "=", "1000", ",", "is_training", "=", "True", ",", "dropout_keep_prob", "=", "0.5", ",", "spatial_squeeze", "=", "True", ",", "scope", "=", "'vgg_19'", ",", "fc_conv_padding", "=", "'VALID'", ")", ":", "w...
https://github.com/lfz/Guided-Denoise/blob/8881ab768d16eaf87342da4ff7dc8271e183e205/Attackset/Iter8_ensv3_resv2_inresv2_random/nets/vgg.py#L198-L261
SanPen/GridCal
d3f4566d2d72c11c7e910c9d162538ef0e60df31
src/research/three_phase/Engine/Simulation/power_flow.py
python
PowerFlow.__init__
(self, circuit: Circuit)
Power Flow constructor :param circuit:
Power Flow constructor :param circuit:
[ "Power", "Flow", "constructor", ":", "param", "circuit", ":" ]
def __init__(self, circuit: Circuit): """ Power Flow constructor :param circuit: """ self.circuit = circuit
[ "def", "__init__", "(", "self", ",", "circuit", ":", "Circuit", ")", ":", "self", ".", "circuit", "=", "circuit" ]
https://github.com/SanPen/GridCal/blob/d3f4566d2d72c11c7e910c9d162538ef0e60df31/src/research/three_phase/Engine/Simulation/power_flow.py#L8-L13
selfteaching/selfteaching-python-camp
9982ee964b984595e7d664b07c389cddaf158f1e
19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/pyparsing.py
python
pyparsing_common.stripHTMLTags
(s, l, tokens)
return pyparsing_common._html_stripper.transformString(tokens[0])
Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) Prints:: More info at the pyparsing wiki page
Parse action to remove HTML tags from web page HTML source
[ "Parse", "action", "to", "remove", "HTML", "tags", "from", "web", "page", "HTML", "source" ]
def stripHTMLTags(s, l, tokens): """Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) Prints:: More info at the pyparsing wiki page """ return pyparsing_common._html_stripper.transformString(tokens[0])
[ "def", "stripHTMLTags", "(", "s", ",", "l", ",", "tokens", ")", ":", "return", "pyparsing_common", ".", "_html_stripper", ".", "transformString", "(", "tokens", "[", "0", "]", ")" ]
https://github.com/selfteaching/selfteaching-python-camp/blob/9982ee964b984595e7d664b07c389cddaf158f1e/19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/pyparsing.py#L6196-L6211
CalebBell/thermo
572a47d1b03d49fe609b8d5f826fa6a7cde00828
thermo/phases/gibbs_excess.py
python
GibbsExcessLiquid.dVms_dP
(self)
return [0.0]*self.N
[]
def dVms_dP(self): return [0.0]*self.N
[ "def", "dVms_dP", "(", "self", ")", ":", "return", "[", "0.0", "]", "*", "self", ".", "N" ]
https://github.com/CalebBell/thermo/blob/572a47d1b03d49fe609b8d5f826fa6a7cde00828/thermo/phases/gibbs_excess.py#L1114-L1115
fluentpython/example-code-2e
80f7f84274a47579e59c29a4657691525152c9d5
17-it-generator/isis2json/iso2709.py
python
IsoRecord.dump
(self)
[]
def dump(self): for field in self.directory: print('%3s %r' % (field.tag, field.value))
[ "def", "dump", "(", "self", ")", ":", "for", "field", "in", "self", ".", "directory", ":", "print", "(", "'%3s %r'", "%", "(", "field", ".", "tag", ",", "field", ".", "value", ")", ")" ]
https://github.com/fluentpython/example-code-2e/blob/80f7f84274a47579e59c29a4657691525152c9d5/17-it-generator/isis2json/iso2709.py#L141-L143
pytorch/contrib
c545fedf4f73c8e95f91fd81f2d5bf7fa9c62a61
torchcontrib/nn/modules/linear.py
python
FiLM.forward
(self, input, gamma, beta)
return F.film(input, gamma, beta)
[]
def forward(self, input, gamma, beta): return F.film(input, gamma, beta)
[ "def", "forward", "(", "self", ",", "input", ",", "gamma", ",", "beta", ")", ":", "return", "F", ".", "film", "(", "input", ",", "gamma", ",", "beta", ")" ]
https://github.com/pytorch/contrib/blob/c545fedf4f73c8e95f91fd81f2d5bf7fa9c62a61/torchcontrib/nn/modules/linear.py#L35-L36
pretix/pretix
96f694cf61345f54132cd26cdeb07d5d11b34232
src/pretix/base/models/orders.py
python
Order.can_modify_answers
(self)
return False
``True`` if the user can change the question answers / attendee names that are related to the order. This checks order status and modification deadlines. It also returns ``False`` if there are no questions that can be answered.
``True`` if the user can change the question answers / attendee names that are related to the order. This checks order status and modification deadlines. It also returns ``False`` if there are no questions that can be answered.
[ "True", "if", "the", "user", "can", "change", "the", "question", "answers", "/", "attendee", "names", "that", "are", "related", "to", "the", "order", ".", "This", "checks", "order", "status", "and", "modification", "deadlines", ".", "It", "also", "returns", ...
def can_modify_answers(self) -> bool: """ ``True`` if the user can change the question answers / attendee names that are related to the order. This checks order status and modification deadlines. It also returns ``False`` if there are no questions that can be answered. """ from .checkin import Checkin if self.status not in (Order.STATUS_PENDING, Order.STATUS_PAID, Order.STATUS_EXPIRED): return False modify_deadline = self.event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper) if self.event.has_subevents and modify_deadline: dates = [ modify_deadline.datetime(se) for se in self.event.subevents.filter(id__in=self.positions.values_list('subevent', flat=True)) ] modify_deadline = min(dates) if dates else None elif modify_deadline: modify_deadline = modify_deadline.datetime(self.event) if modify_deadline is not None and now() > modify_deadline: return False positions = list( self.positions.all().annotate( has_checkin=Exists(Checkin.objects.filter(position_id=OuterRef('pk'))) ).select_related('item').prefetch_related('item__questions') ) if not self.event.settings.allow_modifications_after_checkin: for cp in positions: if cp.has_checkin: return False if self.event.settings.get('invoice_address_asked', as_type=bool): return True ask_names = self.event.settings.get('attendee_names_asked', as_type=bool) for cp in positions: if (cp.item.admission and ask_names) or cp.item.questions.all(): return True return False
[ "def", "can_modify_answers", "(", "self", ")", "->", "bool", ":", "from", ".", "checkin", "import", "Checkin", "if", "self", ".", "status", "not", "in", "(", "Order", ".", "STATUS_PENDING", ",", "Order", ".", "STATUS_PAID", ",", "Order", ".", "STATUS_EXPIR...
https://github.com/pretix/pretix/blob/96f694cf61345f54132cd26cdeb07d5d11b34232/src/pretix/base/models/orders.py#L749-L790
mesalock-linux/mesapy
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
pypy/module/cpyext/setobject.py
python
PySet_Pop
(space, w_set)
return space.call_method(space.w_set, "pop", w_set)
Return a new reference to an arbitrary object in the set, and removes the object from the set. Return NULL on failure. Raise KeyError if the set is empty. Raise a SystemError if set is an not an instance of set or its subtype.
Return a new reference to an arbitrary object in the set, and removes the object from the set. Return NULL on failure. Raise KeyError if the set is empty. Raise a SystemError if set is an not an instance of set or its subtype.
[ "Return", "a", "new", "reference", "to", "an", "arbitrary", "object", "in", "the", "set", "and", "removes", "the", "object", "from", "the", "set", ".", "Return", "NULL", "on", "failure", ".", "Raise", "KeyError", "if", "the", "set", "is", "empty", ".", ...
def PySet_Pop(space, w_set): """Return a new reference to an arbitrary object in the set, and removes the object from the set. Return NULL on failure. Raise KeyError if the set is empty. Raise a SystemError if set is an not an instance of set or its subtype.""" return space.call_method(space.w_set, "pop", w_set)
[ "def", "PySet_Pop", "(", "space", ",", "w_set", ")", ":", "return", "space", ".", "call_method", "(", "space", ".", "w_set", ",", "\"pop\"", ",", "w_set", ")" ]
https://github.com/mesalock-linux/mesapy/blob/ed546d59a21b36feb93e2309d5c6b75aa0ad95c9/pypy/module/cpyext/setobject.py#L77-L82
e2nIEE/pandapower
12bd83d7c4e1bf3fa338dab2db649c3cd3db0cfb
pandapower/shortcircuit/calc_sc.py
python
calc_sc
(net, bus=None, fault="3ph", case='max', lv_tol_percent=10, topology="auto", ip=False, ith=False, tk_s=1., kappa_method="C", r_fault_ohm=0., x_fault_ohm=0., branch_results=False, check_connectivity=True, return_all_currents=False, inverse_y=True)
Calculates minimal or maximal symmetrical short-circuit currents. The calculation is based on the method of the equivalent voltage source according to DIN/IEC EN 60909. The initial short-circuit alternating current *ikss* is the basis of the short-circuit calculation and is therefore always calculated. Other short-circuit currents can be calculated from *ikss* with the conversion factors defined in DIN/IEC EN 60909. The output is stored in the net.res_bus_sc table as a short_circuit current for each bus. INPUT: **net** (pandapowerNet) pandapower Network **bus** (int, list, np.array, None) defines if short-circuit calculations should only be calculated for defined bus ***fault** (str, 3ph) type of fault - "3ph" for three-phase - "2ph" for two-phase short-circuits - "1ph" for single-phase ground faults **case** (str, "max") - "max" for maximal current calculation - "min" for minimal current calculation **lv_tol_percent** (int, 10) voltage tolerance in low voltage grids - 6 for 6% voltage tolerance - 10 for 10% voltage olerance **ip** (bool, False) if True, calculate aperiodic short-circuit current **ith** (bool, False) if True, calculate equivalent thermical short-circuit current Ith **topology** (str, "auto") define option for meshing (only relevant for ip and ith) - "meshed" - it is assumed all buses are supplied over multiple paths - "radial" - it is assumed all buses are supplied over exactly one path - "auto" - topology check for each bus is performed to see if it is supplied over multiple paths **tk_s** (float, 1) failure clearing time in seconds (only relevant for ith) **r_fault_ohm** (float, 0) fault resistance in Ohm **x_fault_ohm** (float, 0) fault reactance in Ohm **branch_results** (bool, False) defines if short-circuit results should also be generated for branches **return_all_currents** (bool, False) applies only if branch_results=True, if True short-circuit currents for each (branch, bus) tuple is returned otherwise only the max/min is returned **inverse_y** (bool, True) defines if complete inverse should be used instead of LU factorization, factorization version is in experiment which should be faster and memory efficienter OUTPUT: EXAMPLE: calc_sc(net) print(net.res_bus_sc)
Calculates minimal or maximal symmetrical short-circuit currents. The calculation is based on the method of the equivalent voltage source according to DIN/IEC EN 60909. The initial short-circuit alternating current *ikss* is the basis of the short-circuit calculation and is therefore always calculated. Other short-circuit currents can be calculated from *ikss* with the conversion factors defined in DIN/IEC EN 60909.
[ "Calculates", "minimal", "or", "maximal", "symmetrical", "short", "-", "circuit", "currents", ".", "The", "calculation", "is", "based", "on", "the", "method", "of", "the", "equivalent", "voltage", "source", "according", "to", "DIN", "/", "IEC", "EN", "60909", ...
def calc_sc(net, bus=None, fault="3ph", case='max', lv_tol_percent=10, topology="auto", ip=False, ith=False, tk_s=1., kappa_method="C", r_fault_ohm=0., x_fault_ohm=0., branch_results=False, check_connectivity=True, return_all_currents=False, inverse_y=True): """ Calculates minimal or maximal symmetrical short-circuit currents. The calculation is based on the method of the equivalent voltage source according to DIN/IEC EN 60909. The initial short-circuit alternating current *ikss* is the basis of the short-circuit calculation and is therefore always calculated. Other short-circuit currents can be calculated from *ikss* with the conversion factors defined in DIN/IEC EN 60909. The output is stored in the net.res_bus_sc table as a short_circuit current for each bus. INPUT: **net** (pandapowerNet) pandapower Network **bus** (int, list, np.array, None) defines if short-circuit calculations should only be calculated for defined bus ***fault** (str, 3ph) type of fault - "3ph" for three-phase - "2ph" for two-phase short-circuits - "1ph" for single-phase ground faults **case** (str, "max") - "max" for maximal current calculation - "min" for minimal current calculation **lv_tol_percent** (int, 10) voltage tolerance in low voltage grids - 6 for 6% voltage tolerance - 10 for 10% voltage olerance **ip** (bool, False) if True, calculate aperiodic short-circuit current **ith** (bool, False) if True, calculate equivalent thermical short-circuit current Ith **topology** (str, "auto") define option for meshing (only relevant for ip and ith) - "meshed" - it is assumed all buses are supplied over multiple paths - "radial" - it is assumed all buses are supplied over exactly one path - "auto" - topology check for each bus is performed to see if it is supplied over multiple paths **tk_s** (float, 1) failure clearing time in seconds (only relevant for ith) **r_fault_ohm** (float, 0) fault resistance in Ohm **x_fault_ohm** (float, 0) fault reactance in Ohm **branch_results** (bool, False) defines if short-circuit results should also be generated for branches **return_all_currents** (bool, False) applies only if branch_results=True, if True short-circuit currents for each (branch, bus) tuple is returned otherwise only the max/min is returned **inverse_y** (bool, True) defines if complete inverse should be used instead of LU factorization, factorization version is in experiment which should be faster and memory efficienter OUTPUT: EXAMPLE: calc_sc(net) print(net.res_bus_sc) """ if fault not in ["3ph", "2ph", "1ph"]: raise NotImplementedError( "Only 3ph, 2ph and 1ph short-circuit currents implemented") if len(net.gen) and (ip or ith): logger.warning("aperiodic, thermal short-circuit currents are only implemented for " "faults far from generators!") if case not in ['max', 'min']: raise ValueError('case can only be "min" or "max" for minimal or maximal short "\ "circuit current') if topology not in ["meshed", "radial", "auto"]: raise ValueError( 'specify network structure as "meshed", "radial" or "auto"') if branch_results: logger.warning("Branch results are in beta mode and might not always be reliable, " "especially for transformers") # Convert bus to numpy array if bus is None: bus = net.bus.index.values else: bus = np.array([bus]).ravel() kappa = ith or ip net["_options"] = {} _add_ppc_options(net, calculate_voltage_angles=False, trafo_model="pi", check_connectivity=check_connectivity, mode="sc", switch_rx_ratio=2, init_vm_pu="flat", init_va_degree="flat", enforce_q_lims=False, recycle=None) _add_sc_options(net, fault=fault, case=case, lv_tol_percent=lv_tol_percent, tk_s=tk_s, topology=topology, r_fault_ohm=r_fault_ohm, kappa_method=kappa_method, x_fault_ohm=x_fault_ohm, kappa=kappa, ip=ip, ith=ith, branch_results=branch_results, return_all_currents=return_all_currents, inverse_y=inverse_y) init_results(net, "sc") if fault in ("2ph", "3ph"): _calc_sc(net, bus) elif fault == "1ph": _calc_sc_1ph(net, bus) else: raise ValueError("Invalid fault %s" % fault)
[ "def", "calc_sc", "(", "net", ",", "bus", "=", "None", ",", "fault", "=", "\"3ph\"", ",", "case", "=", "'max'", ",", "lv_tol_percent", "=", "10", ",", "topology", "=", "\"auto\"", ",", "ip", "=", "False", ",", "ith", "=", "False", ",", "tk_s", "=",...
https://github.com/e2nIEE/pandapower/blob/12bd83d7c4e1bf3fa338dab2db649c3cd3db0cfb/pandapower/shortcircuit/calc_sc.py#L31-L151
inventree/InvenTree
4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b
InvenTree/report/models.py
python
BillOfMaterialsReport.getSubdir
(cls)
return 'bom'
[]
def getSubdir(cls): return 'bom'
[ "def", "getSubdir", "(", "cls", ")", ":", "return", "'bom'" ]
https://github.com/inventree/InvenTree/blob/4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b/InvenTree/report/models.py#L419-L420
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-windows/x86/PIL/ImageMode.py
python
ModeDescriptor.__str__
(self)
return self.mode
[]
def __str__(self): return self.mode
[ "def", "__str__", "(", "self", ")", ":", "return", "self", ".", "mode" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/PIL/ImageMode.py#L29-L30
biolab/orange3
41685e1c7b1d1babe680113685a2d44bcc9fec0b
Orange/widgets/data/oweditdomain.py
python
make_dict_mapper
( mapping: Mapping, dtype: Optional[DType] = None )
return mapper
Wrap a `mapping` into a callable ufunc-like function with `out`, `dtype`, `where`, ... parameters. If `dtype` is passed to `make_dict_mapper` it is used as a the default return dtype, otherwise the default dtype is `object`.
Wrap a `mapping` into a callable ufunc-like function with `out`, `dtype`, `where`, ... parameters. If `dtype` is passed to `make_dict_mapper` it is used as a the default return dtype, otherwise the default dtype is `object`.
[ "Wrap", "a", "mapping", "into", "a", "callable", "ufunc", "-", "like", "function", "with", "out", "dtype", "where", "...", "parameters", ".", "If", "dtype", "is", "passed", "to", "make_dict_mapper", "it", "is", "used", "as", "a", "the", "default", "return"...
def make_dict_mapper( mapping: Mapping, dtype: Optional[DType] = None ) -> Callable: """ Wrap a `mapping` into a callable ufunc-like function with `out`, `dtype`, `where`, ... parameters. If `dtype` is passed to `make_dict_mapper` it is used as a the default return dtype, otherwise the default dtype is `object`. """ _vmapper = np.frompyfunc(mapping.__getitem__, 1, 1) def mapper(arr, out=None, dtype=dtype, **kwargs): arr = np.asanyarray(arr) if out is None and dtype is not None and arr.shape != (): out = np.empty_like(arr, dtype) return _vmapper(arr, out, dtype=dtype, casting="unsafe", **kwargs) return mapper
[ "def", "make_dict_mapper", "(", "mapping", ":", "Mapping", ",", "dtype", ":", "Optional", "[", "DType", "]", "=", "None", ")", "->", "Callable", ":", "_vmapper", "=", "np", ".", "frompyfunc", "(", "mapping", ".", "__getitem__", ",", "1", ",", "1", ")",...
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/widgets/data/oweditdomain.py#L2633-L2649
hankcs/HanLP
6c02812969c4827d74b404c3ad4207f71ca9165a
hanlp/utils/span_util.py
python
allowed_transitions
(constraint_type: str, labels: Dict[int, str])
return allowed
Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field. # Parameters constraint_type : `str`, required Indicates which constraint to apply. Current choices are "BIO", "IOB1", "BIOUL", and "BMES". labels : `Dict[int, str]`, required A mapping {label_id -> label}. Most commonly this would be the value from Vocabulary.get_index_to_token_vocabulary() # Returns `List[Tuple[int, int]]` The allowed transitions (from_label_id, to_label_id).
Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field.
[ "Given", "labels", "and", "a", "constraint", "type", "returns", "the", "allowed", "transitions", ".", "It", "will", "additionally", "include", "transitions", "for", "the", "start", "and", "end", "states", "which", "are", "used", "by", "the", "conditional", "ra...
def allowed_transitions(constraint_type: str, labels: Dict[int, str]) -> List[Tuple[int, int]]: """ Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field. # Parameters constraint_type : `str`, required Indicates which constraint to apply. Current choices are "BIO", "IOB1", "BIOUL", and "BMES". labels : `Dict[int, str]`, required A mapping {label_id -> label}. Most commonly this would be the value from Vocabulary.get_index_to_token_vocabulary() # Returns `List[Tuple[int, int]]` The allowed transitions (from_label_id, to_label_id). """ num_labels = len(labels) start_tag = num_labels end_tag = num_labels + 1 labels_with_boundaries = list(labels.items()) + [(start_tag, "START"), (end_tag, "END")] allowed = [] for from_label_index, from_label in labels_with_boundaries: if from_label in ("START", "END"): from_tag = from_label from_entity = "" else: from_tag = from_label[0] from_entity = from_label[1:] for to_label_index, to_label in labels_with_boundaries: if to_label in ("START", "END"): to_tag = to_label to_entity = "" else: to_tag = to_label[0] to_entity = to_label[1:] if is_transition_allowed(constraint_type, from_tag, from_entity, to_tag, to_entity): allowed.append((from_label_index, to_label_index)) return allowed
[ "def", "allowed_transitions", "(", "constraint_type", ":", "str", ",", "labels", ":", "Dict", "[", "int", ",", "str", "]", ")", "->", "List", "[", "Tuple", "[", "int", ",", "int", "]", "]", ":", "num_labels", "=", "len", "(", "labels", ")", "start_ta...
https://github.com/hankcs/HanLP/blob/6c02812969c4827d74b404c3ad4207f71ca9165a/hanlp/utils/span_util.py#L102-L144
caiiiac/Machine-Learning-with-Python
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
MachineLearning/venv/lib/python3.5/site-packages/pandas/core/categorical.py
python
Categorical._get_labels
(self)
return self.codes
Get the category labels (deprecated). Deprecated, use .codes!
Get the category labels (deprecated).
[ "Get", "the", "category", "labels", "(", "deprecated", ")", "." ]
def _get_labels(self): """ Get the category labels (deprecated). Deprecated, use .codes! """ warn("'labels' is deprecated. Use 'codes' instead", FutureWarning, stacklevel=2) return self.codes
[ "def", "_get_labels", "(", "self", ")", ":", "warn", "(", "\"'labels' is deprecated. Use 'codes' instead\"", ",", "FutureWarning", ",", "stacklevel", "=", "2", ")", "return", "self", ".", "codes" ]
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/pandas/core/categorical.py#L489-L497
Azure/azure-cli
6c1b085a0910c6c2139006fcbd8ade44006eb6dd
src/azure-cli/azure/cli/command_modules/acr/_docker_utils.py
python
get_login_credentials
(cmd, registry_name, tenant_suffix=None, username=None, password=None)
return _get_credentials(cmd, registry_name, tenant_suffix, username, password, only_refresh_token=True, is_login_context=True)
Try to get AAD authorization tokens or admin user credentials to log into a registry. :param str registry_name: The name of container registry :param str username: The username used to log into the container registry :param str password: The password used to log into the container registry
Try to get AAD authorization tokens or admin user credentials to log into a registry. :param str registry_name: The name of container registry :param str username: The username used to log into the container registry :param str password: The password used to log into the container registry
[ "Try", "to", "get", "AAD", "authorization", "tokens", "or", "admin", "user", "credentials", "to", "log", "into", "a", "registry", ".", ":", "param", "str", "registry_name", ":", "The", "name", "of", "container", "registry", ":", "param", "str", "username", ...
def get_login_credentials(cmd, registry_name, tenant_suffix=None, username=None, password=None): """Try to get AAD authorization tokens or admin user credentials to log into a registry. :param str registry_name: The name of container registry :param str username: The username used to log into the container registry :param str password: The password used to log into the container registry """ return _get_credentials(cmd, registry_name, tenant_suffix, username, password, only_refresh_token=True, is_login_context=True)
[ "def", "get_login_credentials", "(", "cmd", ",", "registry_name", ",", "tenant_suffix", "=", "None", ",", "username", "=", "None", ",", "password", "=", "None", ")", ":", "return", "_get_credentials", "(", "cmd", ",", "registry_name", ",", "tenant_suffix", ","...
https://github.com/Azure/azure-cli/blob/6c1b085a0910c6c2139006fcbd8ade44006eb6dd/src/azure-cli/azure/cli/command_modules/acr/_docker_utils.py#L401-L417
heroku/heroku.py
cadc0a074896cf29c65a457c5c5bdb2069470af0
heroku/models.py
python
App.collaborators
(self)
return self._h._get_resources( resource=('apps', self.name, 'collaborators'), obj=Collaborator, app=self )
The collaborators for this app.
The collaborators for this app.
[ "The", "collaborators", "for", "this", "app", "." ]
def collaborators(self): """The collaborators for this app.""" return self._h._get_resources( resource=('apps', self.name, 'collaborators'), obj=Collaborator, app=self )
[ "def", "collaborators", "(", "self", ")", ":", "return", "self", ".", "_h", ".", "_get_resources", "(", "resource", "=", "(", "'apps'", ",", "self", ".", "name", ",", "'collaborators'", ")", ",", "obj", "=", "Collaborator", ",", "app", "=", "self", ")"...
https://github.com/heroku/heroku.py/blob/cadc0a074896cf29c65a457c5c5bdb2069470af0/heroku/models.py#L214-L219
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/cloud/clouds/azurearm.py
python
get_location
(call=None, kwargs=None)
return config.get_cloud_config_value( "location", vm_dict, __opts__, search_global=False )
Return the location that is configured for this provider
Return the location that is configured for this provider
[ "Return", "the", "location", "that", "is", "configured", "for", "this", "provider" ]
def get_location(call=None, kwargs=None): # pylint: disable=unused-argument """ Return the location that is configured for this provider """ if not kwargs: kwargs = {} vm_dict = get_configured_provider() vm_dict.update(kwargs) return config.get_cloud_config_value( "location", vm_dict, __opts__, search_global=False )
[ "def", "get_location", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "# pylint: disable=unused-argument", "if", "not", "kwargs", ":", "kwargs", "=", "{", "}", "vm_dict", "=", "get_configured_provider", "(", ")", "vm_dict", ".", "update", "(...
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/cloud/clouds/azurearm.py#L290-L300
ricequant/rqalpha-mod-ctp
bfd40801f9a182226a911cac74660f62993eb6db
rqalpha_mod_ctp/ctp/pyctp/linux64_27/__init__.py
python
TraderApi.OnRspExecOrderAction
(self, pInputExecOrderAction, pRspInfo, nRequestID, bIsLast)
执行宣告操作请求响应
执行宣告操作请求响应
[ "执行宣告操作请求响应" ]
def OnRspExecOrderAction(self, pInputExecOrderAction, pRspInfo, nRequestID, bIsLast): """执行宣告操作请求响应"""
[ "def", "OnRspExecOrderAction", "(", "self", ",", "pInputExecOrderAction", ",", "pRspInfo", ",", "nRequestID", ",", "bIsLast", ")", ":" ]
https://github.com/ricequant/rqalpha-mod-ctp/blob/bfd40801f9a182226a911cac74660f62993eb6db/rqalpha_mod_ctp/ctp/pyctp/linux64_27/__init__.py#L549-L550
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pip/_vendor/pyparsing.py
python
ParserElement.parseString
( self, instring, parseAll=False )
Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. If you want the grammar to require that the entire input string be successfully parsed, then set ``parseAll`` to True (equivalent to ending the grammar with ``StringEnd()``). Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string, in order to report proper column numbers in parse actions. If the input string contains tabs and the grammar uses parse actions that use the ``loc`` argument to index into the string being parsed, you can ensure you have a consistent view of the input string by: - calling ``parseWithTabs`` on your grammar before calling ``parseString`` (see :class:`parseWithTabs`) - define your parse action using the full ``(s,loc,toks)`` signature, and reference the input string using the parse action's ``s`` argument - explictly expand the tabs in your input string before calling ``parseString`` Example:: Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text
Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built.
[ "Execute", "the", "parse", "expression", "with", "the", "given", "string", ".", "This", "is", "the", "main", "interface", "to", "the", "client", "code", "once", "the", "complete", "expression", "has", "been", "built", "." ]
def parseString( self, instring, parseAll=False ): """ Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. If you want the grammar to require that the entire input string be successfully parsed, then set ``parseAll`` to True (equivalent to ending the grammar with ``StringEnd()``). Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string, in order to report proper column numbers in parse actions. If the input string contains tabs and the grammar uses parse actions that use the ``loc`` argument to index into the string being parsed, you can ensure you have a consistent view of the input string by: - calling ``parseWithTabs`` on your grammar before calling ``parseString`` (see :class:`parseWithTabs`) - define your parse action using the full ``(s,loc,toks)`` signature, and reference the input string using the parse action's ``s`` argument - explictly expand the tabs in your input string before calling ``parseString`` Example:: Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text """ ParserElement.resetCache() if not self.streamlined: self.streamline() #~ self.saveAsList = True for e in self.ignoreExprs: e.streamline() if not self.keepTabs: instring = instring.expandtabs() try: loc, tokens = self._parse( instring, 0 ) if parseAll: loc = self.preParse( instring, loc ) se = Empty() + StringEnd() se._parse( instring, loc ) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc else: return tokens
[ "def", "parseString", "(", "self", ",", "instring", ",", "parseAll", "=", "False", ")", ":", "ParserElement", ".", "resetCache", "(", ")", "if", "not", "self", ".", "streamlined", ":", "self", ".", "streamline", "(", ")", "#~ self.saveAsList = True", "for", ...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pip/_vendor/pyparsing.py#L1780-L1830
Cog-Creators/Red-DiscordBot
b05933274a11fb097873ab0d1b246d37b06aa306
redbot/core/utils/chat_formatting.py
python
spoiler
(text: str, escape_formatting: bool = True)
return f"||{escape(text, formatting=escape_formatting)}||"
Get the given text as a spoiler. Note: By default, this function will escape ``text`` prior to making the text a spoiler. Parameters ---------- text : str The text to be marked up. escape_formatting : `bool`, optional Set to :code:`False` to not escape markdown formatting in the text. Returns ------- str The marked up text.
Get the given text as a spoiler.
[ "Get", "the", "given", "text", "as", "a", "spoiler", "." ]
def spoiler(text: str, escape_formatting: bool = True) -> str: """Get the given text as a spoiler. Note: By default, this function will escape ``text`` prior to making the text a spoiler. Parameters ---------- text : str The text to be marked up. escape_formatting : `bool`, optional Set to :code:`False` to not escape markdown formatting in the text. Returns ------- str The marked up text. """ return f"||{escape(text, formatting=escape_formatting)}||"
[ "def", "spoiler", "(", "text", ":", "str", ",", "escape_formatting", ":", "bool", "=", "True", ")", "->", "str", ":", "return", "f\"||{escape(text, formatting=escape_formatting)}||\"" ]
https://github.com/Cog-Creators/Red-DiscordBot/blob/b05933274a11fb097873ab0d1b246d37b06aa306/redbot/core/utils/chat_formatting.py#L182-L200
Abjad/abjad
d0646dfbe83db3dc5ab268f76a0950712b87b7fd
abjad/tag.py
python
Line.__repr__
(self)
return _format.get_repr(self)
Gets interpreter representation.
Gets interpreter representation.
[ "Gets", "interpreter", "representation", "." ]
def __repr__(self) -> str: """ Gets interpreter representation. """ return _format.get_repr(self)
[ "def", "__repr__", "(", "self", ")", "->", "str", ":", "return", "_format", ".", "get_repr", "(", "self", ")" ]
https://github.com/Abjad/abjad/blob/d0646dfbe83db3dc5ab268f76a0950712b87b7fd/abjad/tag.py#L417-L421
pfnet/pytorch-pfn-extras
b7ced31c1e78a0527c36d745ca091ec270da49e3
example/mnist_ddp.py
python
test
(args, model, device, data, target)
The extension loops over the iterator in order to drive the evaluator progress bar and reporting averages
The extension loops over the iterator in order to drive the evaluator progress bar and reporting averages
[ "The", "extension", "loops", "over", "the", "iterator", "in", "order", "to", "drive", "the", "evaluator", "progress", "bar", "and", "reporting", "averages" ]
def test(args, model, device, data, target): """ The extension loops over the iterator in order to drive the evaluator progress bar and reporting averages """ model.eval() test_loss = 0.0 correct = 0 data, target = data.to(device), target.to(device) output = model(data) # Final result will be average of averages of the same size test_loss += F.nll_loss(output, target, reduction='mean').item() ppe.reporting.report({'val/loss': test_loss}) pred = output.argmax(dim=1, keepdim=True) correct += pred.eq(target.view_as(pred)).sum().item() ppe.reporting.report({'val/acc': correct / len(data)})
[ "def", "test", "(", "args", ",", "model", ",", "device", ",", "data", ",", "target", ")", ":", "model", ".", "eval", "(", ")", "test_loss", "=", "0.0", "correct", "=", "0", "data", ",", "target", "=", "data", ".", "to", "(", "device", ")", ",", ...
https://github.com/pfnet/pytorch-pfn-extras/blob/b7ced31c1e78a0527c36d745ca091ec270da49e3/example/mnist_ddp.py#L45-L60
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
ansible/roles/lib_gcloud/library/gcloud_dm_resource_builder.py
python
ForwardingRule.__init__
(self, rname, project, zone, desc, ip_address, protocol, region, port_range, target, )
constructor for gcp resource
constructor for gcp resource
[ "constructor", "for", "gcp", "resource" ]
def __init__(self, rname, project, zone, desc, ip_address, protocol, region, port_range, target, ): '''constructor for gcp resource''' super(ForwardingRule, self).__init__(rname, ForwardingRule.resource_type, project, zone) self._desc = desc self._region = region self._ip_address = '$(ref.%s.selfLink)' % ip_address self._protocol = protocol self._port_range = port_range self._target = '$(ref.%s.selfLink)' % target
[ "def", "__init__", "(", "self", ",", "rname", ",", "project", ",", "zone", ",", "desc", ",", "ip_address", ",", "protocol", ",", "region", ",", "port_range", ",", "target", ",", ")", ":", "super", "(", "ForwardingRule", ",", "self", ")", ".", "__init__...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/ansible/roles/lib_gcloud/library/gcloud_dm_resource_builder.py#L845-L863
numenta/nupic
b9ebedaf54f49a33de22d8d44dff7c765cdb5548
external/linux32/lib/python2.6/site-packages/matplotlib/text.py
python
Text.get_fontname
(self)
return self.get_name()
alias for get_name
alias for get_name
[ "alias", "for", "get_name" ]
def get_fontname(self): 'alias for get_name' return self.get_name()
[ "def", "get_fontname", "(", "self", ")", ":", "return", "self", ".", "get_name", "(", ")" ]
https://github.com/numenta/nupic/blob/b9ebedaf54f49a33de22d8d44dff7c765cdb5548/external/linux32/lib/python2.6/site-packages/matplotlib/text.py#L548-L550
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/serial/sensor.py
python
SerialSensor.native_value
(self)
return self._state
Return the state of the sensor.
Return the state of the sensor.
[ "Return", "the", "state", "of", "the", "sensor", "." ]
def native_value(self): """Return the state of the sensor.""" return self._state
[ "def", "native_value", "(", "self", ")", ":", "return", "self", ".", "_state" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/serial/sensor.py#L256-L258
ibis-project/ibis
e1ef8b6870ac53de9d1fe5c52851fa41872109c4
ibis/expr/api.py
python
prevent_rewrite
(expr, client=None)
return ops.SQLQueryResult(query, expr.schema(), client).to_expr()
Prevent optimization from happening below `expr`. Parameters ---------- expr : ir.TableExpr Any table expression whose optimization you want to prevent client : ibis.backends.base.Client, optional, default None A client to use to create the SQLQueryResult operation. This is useful if you're compiling an expression that derives from an :class:`~ibis.expr.operations.UnboundTable` operation. Returns ------- sql_query_result : ir.TableExpr
Prevent optimization from happening below `expr`.
[ "Prevent", "optimization", "from", "happening", "below", "expr", "." ]
def prevent_rewrite(expr, client=None): """Prevent optimization from happening below `expr`. Parameters ---------- expr : ir.TableExpr Any table expression whose optimization you want to prevent client : ibis.backends.base.Client, optional, default None A client to use to create the SQLQueryResult operation. This is useful if you're compiling an expression that derives from an :class:`~ibis.expr.operations.UnboundTable` operation. Returns ------- sql_query_result : ir.TableExpr """ if client is None: client = expr._find_backend() query = client.compile(expr) return ops.SQLQueryResult(query, expr.schema(), client).to_expr()
[ "def", "prevent_rewrite", "(", "expr", ",", "client", "=", "None", ")", ":", "if", "client", "is", "None", ":", "client", "=", "expr", ".", "_find_backend", "(", ")", "query", "=", "client", ".", "compile", "(", "expr", ")", "return", "ops", ".", "SQ...
https://github.com/ibis-project/ibis/blob/e1ef8b6870ac53de9d1fe5c52851fa41872109c4/ibis/expr/api.py#L4673-L4692
pandas-dev/pandas
5ba7d714014ae8feaccc0dd4a98890828cf2832d
pandas/_config/config.py
python
config_prefix
(prefix)
contextmanager for multiple invocations of API with a common prefix supported API functions: (register / get / set )__option Warning: This is not thread - safe, and won't work properly if you import the API functions into your module using the "from x import y" construct. Example ------- import pandas._config.config as cf with cf.config_prefix("display.font"): cf.register_option("color", "red") cf.register_option("size", " 5 pt") cf.set_option(size, " 6 pt") cf.get_option(size) ... etc' will register options "display.font.color", "display.font.size", set the value of "display.font.size"... and so on.
contextmanager for multiple invocations of API with a common prefix
[ "contextmanager", "for", "multiple", "invocations", "of", "API", "with", "a", "common", "prefix" ]
def config_prefix(prefix): """ contextmanager for multiple invocations of API with a common prefix supported API functions: (register / get / set )__option Warning: This is not thread - safe, and won't work properly if you import the API functions into your module using the "from x import y" construct. Example ------- import pandas._config.config as cf with cf.config_prefix("display.font"): cf.register_option("color", "red") cf.register_option("size", " 5 pt") cf.set_option(size, " 6 pt") cf.get_option(size) ... etc' will register options "display.font.color", "display.font.size", set the value of "display.font.size"... and so on. """ # Note: reset_option relies on set_option, and on key directly # it does not fit in to this monkey-patching scheme global register_option, get_option, set_option, reset_option def wrap(func: F) -> F: def inner(key: str, *args, **kwds): pkey = f"{prefix}.{key}" return func(pkey, *args, **kwds) return cast(F, inner) _register_option = register_option _get_option = get_option _set_option = set_option set_option = wrap(set_option) get_option = wrap(get_option) register_option = wrap(register_option) try: yield finally: set_option = _set_option get_option = _get_option register_option = _register_option
[ "def", "config_prefix", "(", "prefix", ")", ":", "# Note: reset_option relies on set_option, and on key directly", "# it does not fit in to this monkey-patching scheme", "global", "register_option", ",", "get_option", ",", "set_option", ",", "reset_option", "def", "wrap", "(", ...
https://github.com/pandas-dev/pandas/blob/5ba7d714014ae8feaccc0dd4a98890828cf2832d/pandas/_config/config.py#L723-L770
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-windows/x86/cryptography/x509/base.py
python
RevokedCertificateBuilder.add_extension
(self, extension, critical)
return RevokedCertificateBuilder( self._serial_number, self._revocation_date, self._extensions + [extension] )
[]
def add_extension(self, extension, critical): if not isinstance(extension, ExtensionType): raise TypeError("extension must be an ExtensionType") extension = Extension(extension.oid, critical, extension) _reject_duplicate_extension(extension, self._extensions) return RevokedCertificateBuilder( self._serial_number, self._revocation_date, self._extensions + [extension] )
[ "def", "add_extension", "(", "self", ",", "extension", ",", "critical", ")", ":", "if", "not", "isinstance", "(", "extension", ",", "ExtensionType", ")", ":", "raise", "TypeError", "(", "\"extension must be an ExtensionType\"", ")", "extension", "=", "Extension", ...
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/cryptography/x509/base.py#L735-L744
magenta/magenta
be6558f1a06984faff6d6949234f5fe9ad0ffdb5
magenta/models/latent_transfer/common.py
python
make_grid
(boundary=2.0, number_grid=50, dim_latent=2)
return ObjectBlob(z_grid=z_grid, dim_grid=dim_grid)
Helper function making 1D or 2D grid for evaluation purpose.
Helper function making 1D or 2D grid for evaluation purpose.
[ "Helper", "function", "making", "1D", "or", "2D", "grid", "for", "evaluation", "purpose", "." ]
def make_grid(boundary=2.0, number_grid=50, dim_latent=2): """Helper function making 1D or 2D grid for evaluation purpose.""" zs = np.linspace(-boundary, boundary, number_grid) z_grid = [] if dim_latent == 1: for x in range(number_grid): z_grid.append([zs[x]]) dim_grid = 1 else: for x in range(number_grid): for y in range(number_grid): z_grid.append([0.] * (dim_latent - 2) + [zs[x], zs[y]]) dim_grid = 2 z_grid = np.array(z_grid) return ObjectBlob(z_grid=z_grid, dim_grid=dim_grid)
[ "def", "make_grid", "(", "boundary", "=", "2.0", ",", "number_grid", "=", "50", ",", "dim_latent", "=", "2", ")", ":", "zs", "=", "np", ".", "linspace", "(", "-", "boundary", ",", "boundary", ",", "number_grid", ")", "z_grid", "=", "[", "]", "if", ...
https://github.com/magenta/magenta/blob/be6558f1a06984faff6d6949234f5fe9ad0ffdb5/magenta/models/latent_transfer/common.py#L216-L230
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/optimization_tf.py
python
AdamWeightDecay.__init__
( self, learning_rate: Union[float, tf.keras.optimizers.schedules.LearningRateSchedule] = 0.001, beta_1: float = 0.9, beta_2: float = 0.999, epsilon: float = 1e-7, amsgrad: bool = False, weight_decay_rate: float = 0.0, include_in_weight_decay: Optional[List[str]] = None, exclude_from_weight_decay: Optional[List[str]] = None, name: str = "AdamWeightDecay", **kwargs )
[]
def __init__( self, learning_rate: Union[float, tf.keras.optimizers.schedules.LearningRateSchedule] = 0.001, beta_1: float = 0.9, beta_2: float = 0.999, epsilon: float = 1e-7, amsgrad: bool = False, weight_decay_rate: float = 0.0, include_in_weight_decay: Optional[List[str]] = None, exclude_from_weight_decay: Optional[List[str]] = None, name: str = "AdamWeightDecay", **kwargs ): super().__init__(learning_rate, beta_1, beta_2, epsilon, amsgrad, name, **kwargs) self.weight_decay_rate = weight_decay_rate self._include_in_weight_decay = include_in_weight_decay self._exclude_from_weight_decay = exclude_from_weight_decay
[ "def", "__init__", "(", "self", ",", "learning_rate", ":", "Union", "[", "float", ",", "tf", ".", "keras", ".", "optimizers", ".", "schedules", ".", "LearningRateSchedule", "]", "=", "0.001", ",", "beta_1", ":", "float", "=", "0.9", ",", "beta_2", ":", ...
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/optimization_tf.py#L191-L207
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/api/apiextensions_v1beta1_api.py
python
ApiextensionsV1beta1Api.read_custom_resource_definition_status
(self, name, **kwargs)
return self.read_custom_resource_definition_status_with_http_info(name, **kwargs)
read_custom_resource_definition_status # noqa: E501 read status of the specified CustomResourceDefinition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_custom_resource_definition_status(name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the CustomResourceDefinition (required) :param str pretty: If 'true', then the output is pretty printed. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1beta1CustomResourceDefinition If the method is called asynchronously, returns the request thread.
read_custom_resource_definition_status # noqa: E501
[ "read_custom_resource_definition_status", "#", "noqa", ":", "E501" ]
def read_custom_resource_definition_status(self, name, **kwargs): # noqa: E501 """read_custom_resource_definition_status # noqa: E501 read status of the specified CustomResourceDefinition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_custom_resource_definition_status(name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the CustomResourceDefinition (required) :param str pretty: If 'true', then the output is pretty printed. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1beta1CustomResourceDefinition If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.read_custom_resource_definition_status_with_http_info(name, **kwargs)
[ "def", "read_custom_resource_definition_status", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "return", "self", ".", "read_custom_resource_definition_status_with_http_info", "("...
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/api/apiextensions_v1beta1_api.py#L1165-L1189
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/contourcarpet/_colorbar.py
python
ColorBar.nticks
(self)
return self["nticks"]
Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". The 'nticks' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [0, 9223372036854775807] Returns ------- int
Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". The 'nticks' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [0, 9223372036854775807]
[ "Specifies", "the", "maximum", "number", "of", "ticks", "for", "the", "particular", "axis", ".", "The", "actual", "number", "of", "ticks", "will", "be", "chosen", "automatically", "to", "be", "less", "than", "or", "equal", "to", "nticks", ".", "Has", "an",...
def nticks(self): """ Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". The 'nticks' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [0, 9223372036854775807] Returns ------- int """ return self["nticks"]
[ "def", "nticks", "(", "self", ")", ":", "return", "self", "[", "\"nticks\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/contourcarpet/_colorbar.py#L331-L346
hatRiot/zarp
2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad
src/lib/scapy/layers/inet6.py
python
_ICMPv6Error.guess_payload_class
(self,p)
return IPerror6
[]
def guess_payload_class(self,p): return IPerror6
[ "def", "guess_payload_class", "(", "self", ",", "p", ")", ":", "return", "IPerror6" ]
https://github.com/hatRiot/zarp/blob/2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad/src/lib/scapy/layers/inet6.py#L1169-L1170
Ultimaker/Uranium
66da853cd9a04edd3a8a03526fac81e83c03f5aa
UM/Mesh/MeshData.py
python
MeshData.getFacePlane
(self, face_id: int)
return in_point, face_normal
Gets the plane the supplied face lies in. The resultant plane is specified by a point and a normal. :param face_id: :type{int} The index of the face (not the flattened indices). :return: :type{Tuple[numpy.ndarray, numpy.ndarray]} A plane, the 1st vector is the center, the 2nd the normal.
Gets the plane the supplied face lies in. The resultant plane is specified by a point and a normal.
[ "Gets", "the", "plane", "the", "supplied", "face", "lies", "in", ".", "The", "resultant", "plane", "is", "specified", "by", "a", "point", "and", "a", "normal", "." ]
def getFacePlane(self, face_id: int) -> Tuple[numpy.ndarray, numpy.ndarray]: """Gets the plane the supplied face lies in. The resultant plane is specified by a point and a normal. :param face_id: :type{int} The index of the face (not the flattened indices). :return: :type{Tuple[numpy.ndarray, numpy.ndarray]} A plane, the 1st vector is the center, the 2nd the normal. """ v_a, v_b, v_c = self.getFaceNodes(face_id) in_point = (v_a + v_b + v_c) / 3.0 face_normal = numpy.cross(v_b - v_a, v_c - v_a) return in_point, face_normal
[ "def", "getFacePlane", "(", "self", ",", "face_id", ":", "int", ")", "->", "Tuple", "[", "numpy", ".", "ndarray", ",", "numpy", ".", "ndarray", "]", ":", "v_a", ",", "v_b", ",", "v_c", "=", "self", ".", "getFaceNodes", "(", "face_id", ")", "in_point"...
https://github.com/Ultimaker/Uranium/blob/66da853cd9a04edd3a8a03526fac81e83c03f5aa/UM/Mesh/MeshData.py#L314-L324
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/phoenixdb-1.1.0/phoenixdb/avatica/client.py
python
AvaticaClient.fetch
(self, connection_id, statement_id, offset=0, frame_max_size=None)
return response.frame
Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in the this frame. :param connection_id: ID of the current connection. :param statement_id: ID of the statement to fetch rows from. :param offset: Zero-based offset of first row in the requested frame. :param frame_max_size: Maximum number of rows to return; negative means no limit. :returns: Frame data, or ``None`` if there are no more.
Returns a frame of rows.
[ "Returns", "a", "frame", "of", "rows", "." ]
def fetch(self, connection_id, statement_id, offset=0, frame_max_size=None): """Returns a frame of rows. The frame describes whether there may be another frame. If there is not another frame, the current iteration is done when we have finished the rows in the this frame. :param connection_id: ID of the current connection. :param statement_id: ID of the statement to fetch rows from. :param offset: Zero-based offset of first row in the requested frame. :param frame_max_size: Maximum number of rows to return; negative means no limit. :returns: Frame data, or ``None`` if there are no more. """ request = requests_pb2.FetchRequest() request.connection_id = connection_id request.statement_id = statement_id request.offset = offset if frame_max_size is not None: request.frame_max_size = frame_max_size response_data = self._apply(request) response = responses_pb2.FetchResponse() response.ParseFromString(response_data) return response.frame
[ "def", "fetch", "(", "self", ",", "connection_id", ",", "statement_id", ",", "offset", "=", "0", ",", "frame_max_size", "=", "None", ")", ":", "request", "=", "requests_pb2", ".", "FetchRequest", "(", ")", "request", ".", "connection_id", "=", "connection_id...
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/phoenixdb-1.1.0/phoenixdb/avatica/client.py#L566-L598
pandas-dev/pandas
5ba7d714014ae8feaccc0dd4a98890828cf2832d
pandas/core/arrays/datetimes.py
python
DatetimeArray.month_name
(self, locale=None)
return result
Return the month names of the DateTimeIndex with specified locale. Parameters ---------- locale : str, optional Locale determining the language in which to return the month name. Default is English locale. Returns ------- Index Index of month names. Examples -------- >>> idx = pd.date_range(start='2018-01', freq='M', periods=3) >>> idx DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'], dtype='datetime64[ns]', freq='M') >>> idx.month_name() Index(['January', 'February', 'March'], dtype='object')
Return the month names of the DateTimeIndex with specified locale.
[ "Return", "the", "month", "names", "of", "the", "DateTimeIndex", "with", "specified", "locale", "." ]
def month_name(self, locale=None): """ Return the month names of the DateTimeIndex with specified locale. Parameters ---------- locale : str, optional Locale determining the language in which to return the month name. Default is English locale. Returns ------- Index Index of month names. Examples -------- >>> idx = pd.date_range(start='2018-01', freq='M', periods=3) >>> idx DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'], dtype='datetime64[ns]', freq='M') >>> idx.month_name() Index(['January', 'February', 'March'], dtype='object') """ values = self._local_timestamps() result = fields.get_date_name_field(values, "month_name", locale=locale) result = self._maybe_mask_results(result, fill_value=None) return result
[ "def", "month_name", "(", "self", ",", "locale", "=", "None", ")", ":", "values", "=", "self", ".", "_local_timestamps", "(", ")", "result", "=", "fields", ".", "get_date_name_field", "(", "values", ",", "\"month_name\"", ",", "locale", "=", "locale", ")",...
https://github.com/pandas-dev/pandas/blob/5ba7d714014ae8feaccc0dd4a98890828cf2832d/pandas/core/arrays/datetimes.py#L1218-L1246
omz/PythonistaAppTemplate
f560f93f8876d82a21d108977f90583df08d55af
PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/plotting/pygletplot/plot_mode_base.py
python
PlotModeBase.push_wireframe
(self, function)
Push a function which performs gl commands used to build a display list. (The list is built outside of the function)
Push a function which performs gl commands used to build a display list. (The list is built outside of the function)
[ "Push", "a", "function", "which", "performs", "gl", "commands", "used", "to", "build", "a", "display", "list", ".", "(", "The", "list", "is", "built", "outside", "of", "the", "function", ")" ]
def push_wireframe(self, function): """ Push a function which performs gl commands used to build a display list. (The list is built outside of the function) """ assert callable(function) self._draw_wireframe.append(function) if len(self._draw_wireframe) > self._max_render_stack_size: del self._draw_wireframe[1]
[ "def", "push_wireframe", "(", "self", ",", "function", ")", ":", "assert", "callable", "(", "function", ")", "self", ".", "_draw_wireframe", ".", "append", "(", "function", ")", "if", "len", "(", "self", ".", "_draw_wireframe", ")", ">", "self", ".", "_m...
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/plotting/pygletplot/plot_mode_base.py#L181-L190
exaile/exaile
a7b58996c5c15b3aa7b9975ac13ee8f784ef4689
xl/player/gst/engine.py
python
AudioStream.on_message
(self, bus, message)
This is called on the main thread
This is called on the main thread
[ "This", "is", "called", "on", "the", "main", "thread" ]
def on_message(self, bus, message): """ This is called on the main thread """ if message.type == Gst.MessageType.BUFFERING: percent = message.parse_buffering() if not percent < 100: self.logger.info('Buffering complete') if percent % 5 == 0: event.log_event('playback_buffering', self.engine.player, percent) elif message.type == Gst.MessageType.TAG: """Update track length and optionally metadata from gstreamer's parser. Useful for streams and files mutagen doesn't understand.""" current = self.current_track if not current.is_local(): gst_utils.parse_stream_tags(current, message.parse_tag()) if current and not current.get_tag_raw('__length'): res, raw_duration = self.playbin.query_duration(Gst.Format.TIME) if not res: self.logger.error("Couldn't query duration") raw_duration = 0 duration = float(raw_duration) / Gst.SECOND if duration > 0: current.set_tag_raw('__length', duration) elif ( message.type == Gst.MessageType.EOS and not self.get_gst_state() == Gst.State.PAUSED ): self.engine._eos_func(self) elif ( message.type == Gst.MessageType.STREAM_START and message.src == self.playbin and self.buffered_track is not None ): # This handles starting the next track during gapless transition buffered_track = self.buffered_track self.buffered_track = None play_args = self.engine.player.engine_autoadvance_notify_next( buffered_track ) + (True, True) self.engine._next_track(*play_args) elif message.type == Gst.MessageType.STATE_CHANGED: # This idea from quodlibet: pulsesink will not notify us when # volume changes if the stream is paused, so do it when the # state changes. if message.src == self.audio_sink: self.playbin.notify("volume") elif message.type == Gst.MessageType.ERROR: self.__handle_error_message(message) elif message.type == Gst.MessageType.ELEMENT: if not missing_plugin.handle_message(message, self.engine): logger.debug( "Unexpected element-specific GstMessage received from %s: %s", message.src, message, ) elif message.type == Gst.MessageType.WARNING: # TODO there might be some useful warnings we ignore for now. gerror, debug_text = Gst.Message.parse_warning(message) logger.warning( "Unhandled GStreamer warning received:\n\tGError: %s\n\tDebug text: %s", gerror, debug_text, ) else: # TODO there might be some useful messages we ignore for now. logger.debug( "Unhandled GstMessage of type %s received: %s", message.type, message )
[ "def", "on_message", "(", "self", ",", "bus", ",", "message", ")", ":", "if", "message", ".", "type", "==", "Gst", ".", "MessageType", ".", "BUFFERING", ":", "percent", "=", "message", ".", "parse_buffering", "(", ")", "if", "not", "percent", "<", "100...
https://github.com/exaile/exaile/blob/a7b58996c5c15b3aa7b9975ac13ee8f784ef4689/xl/player/gst/engine.py#L609-L691
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/decimal.py
python
Decimal.__rfloordiv__
(self, other, context=None)
return other.__floordiv__(self, context=context)
Swaps self/other and returns __floordiv__.
Swaps self/other and returns __floordiv__.
[ "Swaps", "self", "/", "other", "and", "returns", "__floordiv__", "." ]
def __rfloordiv__(self, other, context=None): """Swaps self/other and returns __floordiv__.""" other = _convert_other(other) if other is NotImplemented: return other return other.__floordiv__(self, context=context)
[ "def", "__rfloordiv__", "(", "self", ",", "other", ",", "context", "=", "None", ")", ":", "other", "=", "_convert_other", "(", "other", ")", "if", "other", "is", "NotImplemented", ":", "return", "other", "return", "other", ".", "__floordiv__", "(", "self",...
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/decimal.py#L1575-L1580
lululxvi/deepxde
730c97282636e86c845ce2ba3253482f2178469e
deepxde/nn/tensorflow/deeponet.py
python
DeepONetCartesianProd.call
(self, inputs, training=False)
return x
[]
def call(self, inputs, training=False): x_func = inputs[0] x_loc = inputs[1] # Branch net to encode the input function x_func = self.branch(x_func) # Trunk net to encode the domain of the output function if self._input_transform is not None: x_loc = self._input_transform(x_loc) x_loc = self.activation_trunk(self.trunk(x_loc)) # Dot product if x_func.shape[-1] != x_loc.shape[-1]: raise AssertionError( "Output sizes of branch net and trunk net do not match." ) x = tf.einsum("bi,ni->bn", x_func, x_loc) # Add bias x += self.b if self._output_transform is not None: x = self._output_transform(inputs, x) return x
[ "def", "call", "(", "self", ",", "inputs", ",", "training", "=", "False", ")", ":", "x_func", "=", "inputs", "[", "0", "]", "x_loc", "=", "inputs", "[", "1", "]", "# Branch net to encode the input function", "x_func", "=", "self", ".", "branch", "(", "x_...
https://github.com/lululxvi/deepxde/blob/730c97282636e86c845ce2ba3253482f2178469e/deepxde/nn/tensorflow/deeponet.py#L46-L68
pulp/pulp
a0a28d804f997b6f81c391378aff2e4c90183df9
bindings/pulp/bindings/content.py
python
OrphanContentAPI.orphans_by_type
(self, type_id)
return self.server.GET(path)
Remove all orphaned units of a specific type :param type_id: identifier for a content type :type type_id: str
Remove all orphaned units of a specific type :param type_id: identifier for a content type :type type_id: str
[ "Remove", "all", "orphaned", "units", "of", "a", "specific", "type", ":", "param", "type_id", ":", "identifier", "for", "a", "content", "type", ":", "type", "type_id", ":", "str" ]
def orphans_by_type(self, type_id): """ Remove all orphaned units of a specific type :param type_id: identifier for a content type :type type_id: str """ path = self.PATH + "%s/" % type_id return self.server.GET(path)
[ "def", "orphans_by_type", "(", "self", ",", "type_id", ")", ":", "path", "=", "self", ".", "PATH", "+", "\"%s/\"", "%", "type_id", "return", "self", ".", "server", ".", "GET", "(", "path", ")" ]
https://github.com/pulp/pulp/blob/a0a28d804f997b6f81c391378aff2e4c90183df9/bindings/pulp/bindings/content.py#L29-L36
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/internet/interfaces.py
python
IReactorFDSet.addWriter
(writer)
I add writer to the set of file descriptors to get write events for. @param writer: An L{IWriteDescriptor} provider that will be checked for write events until it is removed from the reactor with L{removeWriter}. @return: C{None}.
I add writer to the set of file descriptors to get write events for.
[ "I", "add", "writer", "to", "the", "set", "of", "file", "descriptors", "to", "get", "write", "events", "for", "." ]
def addWriter(writer): """ I add writer to the set of file descriptors to get write events for. @param writer: An L{IWriteDescriptor} provider that will be checked for write events until it is removed from the reactor with L{removeWriter}. @return: C{None}. """
[ "def", "addWriter", "(", "writer", ")", ":" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/internet/interfaces.py#L892-L901
smart-mobile-software/gitstack
d9fee8f414f202143eb6e620529e8e5539a2af56
python/Lib/lib-tk/Tkinter.py
python
Menu.insert_radiobutton
(self, index, cnf={}, **kw)
Addd radio menu item at INDEX.
Addd radio menu item at INDEX.
[ "Addd", "radio", "menu", "item", "at", "INDEX", "." ]
def insert_radiobutton(self, index, cnf={}, **kw): """Addd radio menu item at INDEX.""" self.insert(index, 'radiobutton', cnf or kw)
[ "def", "insert_radiobutton", "(", "self", ",", "index", ",", "cnf", "=", "{", "}", ",", "*", "*", "kw", ")", ":", "self", ".", "insert", "(", "index", ",", "'radiobutton'", ",", "cnf", "or", "kw", ")" ]
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/lib-tk/Tkinter.py#L2641-L2643
demisto/content
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/CrowdStrikeFalconX.py
python
send_uploaded_file_to_sandbox_analysis_command
( client: Client, sha256: str, environment_id: str, action_script: str = "", command_line: str = "", document_password: str = "", enable_tor: str = "false", submit_name: str = "", system_date: str = "", system_time: str = "" )
return tableToMarkdown("CrowdStrike Falcon X response:", filtered_outputs), entry_context, [response]
Submit a sample SHA256 for sandbox analysis. :param client: the client object with an access token :param sha256: SHA256 ID of the sample, which is a SHA256 hash value :param environment_id: specifies the sandbox environment used for analysis :param action_script: runtime script for sandbox analysis :param command_line: command line script passed to the submitted file at runtime :param document_password: auto-filled for Adobe or Office files that prompt for a password :param enable_tor: if true, sandbox analysis routes network traffic via TOR :param submit_name: name of the malware sample that’s used for file type detection and analysis :param system_date: set a custom date in the format yyyy-MM-dd for the sandbox environment :param system_time: set a custom time in the format HH:mm for the sandbox environment. :return: Demisto outputs when entry_context and responses are lists
Submit a sample SHA256 for sandbox analysis. :param client: the client object with an access token :param sha256: SHA256 ID of the sample, which is a SHA256 hash value :param environment_id: specifies the sandbox environment used for analysis :param action_script: runtime script for sandbox analysis :param command_line: command line script passed to the submitted file at runtime :param document_password: auto-filled for Adobe or Office files that prompt for a password :param enable_tor: if true, sandbox analysis routes network traffic via TOR :param submit_name: name of the malware sample that’s used for file type detection and analysis :param system_date: set a custom date in the format yyyy-MM-dd for the sandbox environment :param system_time: set a custom time in the format HH:mm for the sandbox environment. :return: Demisto outputs when entry_context and responses are lists
[ "Submit", "a", "sample", "SHA256", "for", "sandbox", "analysis", ".", ":", "param", "client", ":", "the", "client", "object", "with", "an", "access", "token", ":", "param", "sha256", ":", "SHA256", "ID", "of", "the", "sample", "which", "is", "a", "SHA256...
def send_uploaded_file_to_sandbox_analysis_command( client: Client, sha256: str, environment_id: str, action_script: str = "", command_line: str = "", document_password: str = "", enable_tor: str = "false", submit_name: str = "", system_date: str = "", system_time: str = "" ) -> Tuple[str, Dict[str, List[Dict[str, dict]]], List[dict]]: """Submit a sample SHA256 for sandbox analysis. :param client: the client object with an access token :param sha256: SHA256 ID of the sample, which is a SHA256 hash value :param environment_id: specifies the sandbox environment used for analysis :param action_script: runtime script for sandbox analysis :param command_line: command line script passed to the submitted file at runtime :param document_password: auto-filled for Adobe or Office files that prompt for a password :param enable_tor: if true, sandbox analysis routes network traffic via TOR :param submit_name: name of the malware sample that’s used for file type detection and analysis :param system_date: set a custom date in the format yyyy-MM-dd for the sandbox environment :param system_time: set a custom time in the format HH:mm for the sandbox environment. :return: Demisto outputs when entry_context and responses are lists """ response = client.send_uploaded_file_to_sandbox_analysis(sha256, environment_id, action_script, command_line, document_password, enable_tor, submit_name, system_date, system_time) sandbox_fields = ["environment_id", "sha256"] resource_fields = ['id', 'state', 'created_timestamp', 'created_timestamp'] filtered_outputs = parse_outputs(response, sandbox_fields=sandbox_fields, resources_fields=resource_fields) # in order identify the id source, upload or submit command, the id name changed filtered_outputs["submitted_id"] = filtered_outputs.pop("id") entry_context = {'csfalconx.resource(val.submitted_id === obj.submitted_id)': [filtered_outputs]} return tableToMarkdown("CrowdStrike Falcon X response:", filtered_outputs), entry_context, [response]
[ "def", "send_uploaded_file_to_sandbox_analysis_command", "(", "client", ":", "Client", ",", "sha256", ":", "str", ",", "environment_id", ":", "str", ",", "action_script", ":", "str", "=", "\"\"", ",", "command_line", ":", "str", "=", "\"\"", ",", "document_passw...
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/CrowdStrikeFalconX.py#L565-L601
foremast/foremast
e8eb9bd24e975772532d90efa8a9ba1850e968cc
src/foremast/runner.py
python
ForemastRunner.check_env_defined
(self)
Checks if the current environment is defined in the pipeline files. Raises a ForemastError if it is missing.
Checks if the current environment is defined in the pipeline files. Raises a ForemastError if it is missing.
[ "Checks", "if", "the", "current", "environment", "is", "defined", "in", "the", "pipeline", "files", ".", "Raises", "a", "ForemastError", "if", "it", "is", "missing", "." ]
def check_env_defined(self): """Checks if the current environment is defined in the pipeline files. Raises a ForemastError if it is missing.""" if not self.env: raise ForemastError("Environment not set") if self.env not in self.configs: raise ForemastError("Environment '{}' not found in pipeline configs.".format(self.env) + "Check pipeline.json and application-master-{}.json".format(self.env))
[ "def", "check_env_defined", "(", "self", ")", ":", "if", "not", "self", ".", "env", ":", "raise", "ForemastError", "(", "\"Environment not set\"", ")", "if", "self", ".", "env", "not", "in", "self", ".", "configs", ":", "raise", "ForemastError", "(", "\"En...
https://github.com/foremast/foremast/blob/e8eb9bd24e975772532d90efa8a9ba1850e968cc/src/foremast/runner.py#L320-L327
tribe29/checkmk
6260f2512e159e311f426e16b84b19d0b8e9ad0c
cmk/base/plugins/agent_based/brocade_optical.py
python
parse_brocade_optical
(string_table: List[type_defs.StringTable])
return parsed
>>> from pprint import pprint >>> pprint(parse_brocade_optical([ ... [['1409', '10GigabitEthernet23/1', '6', '1'], ... ['1410', '10GigabitEthernet23/2', '6', '2'], ... ['1411', '10GigabitEthernet23/3', '6', '2'], ... ['2049', 'EthernetManagement1', '6', '1'], ['33554433', 'lb1', '24', '1'], ... ['67108864', 'tnl0', '150', '1'], ['67108865', 'tnl1', '150', '1'], ... ['67108866', 'tnl2', '150', '1'], ['67108867', 'tnl3', '150', '1'], ... ['83886085', 'LAG5', '202', '2']], ... [[' N/A ', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409'], ... ['31.4882 C: Normal', '-001.4508 dBm: Normal', '-036.9897 dBm: Low-Alarm', '1410'], ... ['31.4531 C: Normal', '-001.4194 dBm: Normal', '-033.9794 dBm: Low-Alarm', '1411'], ... [ '29.5703 C: Normal', '-031.5490 dBm: Low-Alarm', '-036.9897 dBm: Low-Alarm', '1412']], ... [['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014TL', '1409'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UN', '1410'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UL', '1411']], ... [['31.4531 C: Normal', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409.1']], ... ])) {'1409': {'description': '10GigabitEthernet23/1', 'lanes': {1: {'rx_light': (-2.2504, 'Normal'), 'temp': (31.4531, 'Normal'), 'tx_light': (-1.6045, 'Normal')}}, 'operational_status': '1', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-2.2504, 'Normal'), 'serial': 'ADF2094300014TL', 'temp': (None, None), 'tx_light': (-1.6045, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1410': {'description': '10GigabitEthernet23/2', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-36.9897, 'Low-Alarm'), 'serial': 'ADF2094300014UN', 'temp': (31.4882, 'Normal'), 'tx_light': (-1.4508, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1411': {'description': '10GigabitEthernet23/3', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-33.9794, 'Low-Alarm'), 'serial': 'ADF2094300014UL', 'temp': (31.4531, 'Normal'), 'tx_light': (-1.4194, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1412': {'rx_light': (-36.9897, 'Low-Alarm'), 'temp': (29.5703, 'Normal'), 'tx_light': (-31.549, 'Low-Alarm')}}
>>> from pprint import pprint >>> pprint(parse_brocade_optical([ ... [['1409', '10GigabitEthernet23/1', '6', '1'], ... ['1410', '10GigabitEthernet23/2', '6', '2'], ... ['1411', '10GigabitEthernet23/3', '6', '2'], ... ['2049', 'EthernetManagement1', '6', '1'], ['33554433', 'lb1', '24', '1'], ... ['67108864', 'tnl0', '150', '1'], ['67108865', 'tnl1', '150', '1'], ... ['67108866', 'tnl2', '150', '1'], ['67108867', 'tnl3', '150', '1'], ... ['83886085', 'LAG5', '202', '2']], ... [[' N/A ', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409'], ... ['31.4882 C: Normal', '-001.4508 dBm: Normal', '-036.9897 dBm: Low-Alarm', '1410'], ... ['31.4531 C: Normal', '-001.4194 dBm: Normal', '-033.9794 dBm: Low-Alarm', '1411'], ... [ '29.5703 C: Normal', '-031.5490 dBm: Low-Alarm', '-036.9897 dBm: Low-Alarm', '1412']], ... [['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014TL', '1409'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UN', '1410'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UL', '1411']], ... [['31.4531 C: Normal', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409.1']], ... ])) {'1409': {'description': '10GigabitEthernet23/1', 'lanes': {1: {'rx_light': (-2.2504, 'Normal'), 'temp': (31.4531, 'Normal'), 'tx_light': (-1.6045, 'Normal')}}, 'operational_status': '1', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-2.2504, 'Normal'), 'serial': 'ADF2094300014TL', 'temp': (None, None), 'tx_light': (-1.6045, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1410': {'description': '10GigabitEthernet23/2', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-36.9897, 'Low-Alarm'), 'serial': 'ADF2094300014UN', 'temp': (31.4882, 'Normal'), 'tx_light': (-1.4508, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1411': {'description': '10GigabitEthernet23/3', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-33.9794, 'Low-Alarm'), 'serial': 'ADF2094300014UL', 'temp': (31.4531, 'Normal'), 'tx_light': (-1.4194, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1412': {'rx_light': (-36.9897, 'Low-Alarm'), 'temp': (29.5703, 'Normal'), 'tx_light': (-31.549, 'Low-Alarm')}}
[ ">>>", "from", "pprint", "import", "pprint", ">>>", "pprint", "(", "parse_brocade_optical", "(", "[", "...", "[[", "1409", "10GigabitEthernet23", "/", "1", "6", "1", "]", "...", "[", "1410", "10GigabitEthernet23", "/", "2", "6", "2", "]", "...", "[", "14...
def parse_brocade_optical(string_table: List[type_defs.StringTable]) -> Section: """ >>> from pprint import pprint >>> pprint(parse_brocade_optical([ ... [['1409', '10GigabitEthernet23/1', '6', '1'], ... ['1410', '10GigabitEthernet23/2', '6', '2'], ... ['1411', '10GigabitEthernet23/3', '6', '2'], ... ['2049', 'EthernetManagement1', '6', '1'], ['33554433', 'lb1', '24', '1'], ... ['67108864', 'tnl0', '150', '1'], ['67108865', 'tnl1', '150', '1'], ... ['67108866', 'tnl2', '150', '1'], ['67108867', 'tnl3', '150', '1'], ... ['83886085', 'LAG5', '202', '2']], ... [[' N/A ', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409'], ... ['31.4882 C: Normal', '-001.4508 dBm: Normal', '-036.9897 dBm: Low-Alarm', '1410'], ... ['31.4531 C: Normal', '-001.4194 dBm: Normal', '-033.9794 dBm: Low-Alarm', '1411'], ... [ '29.5703 C: Normal', '-031.5490 dBm: Low-Alarm', '-036.9897 dBm: Low-Alarm', '1412']], ... [['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014TL', '1409'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UN', '1410'], ... ['10GE LR 10km SFP+', '57-0000076-01', 'ADF2094300014UL', '1411']], ... [['31.4531 C: Normal', '-001.6045 dBm: Normal', '-002.2504 dBm: Normal', '1409.1']], ... ])) {'1409': {'description': '10GigabitEthernet23/1', 'lanes': {1: {'rx_light': (-2.2504, 'Normal'), 'temp': (31.4531, 'Normal'), 'tx_light': (-1.6045, 'Normal')}}, 'operational_status': '1', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-2.2504, 'Normal'), 'serial': 'ADF2094300014TL', 'temp': (None, None), 'tx_light': (-1.6045, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1410': {'description': '10GigabitEthernet23/2', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-36.9897, 'Low-Alarm'), 'serial': 'ADF2094300014UN', 'temp': (31.4882, 'Normal'), 'tx_light': (-1.4508, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1411': {'description': '10GigabitEthernet23/3', 'operational_status': '2', 'part': '57-0000076-01', 'port_type': '6', 'rx_light': (-33.9794, 'Low-Alarm'), 'serial': 'ADF2094300014UL', 'temp': (31.4531, 'Normal'), 'tx_light': (-1.4194, 'Normal'), 'type': '10GE LR 10km SFP+'}, '1412': {'rx_light': (-36.9897, 'Low-Alarm'), 'temp': (29.5703, 'Normal'), 'tx_light': (-31.549, 'Low-Alarm')}} """ if_info, if_data, if_ids, lanes = string_table parsed: Section = {} for temp, tx_light, rx_light, if_id in if_data: parsed.setdefault( if_id, { "temp": _parse_value(temp), "tx_light": _parse_value(tx_light), "rx_light": _parse_value(rx_light), }, ) for if_id, if_descr, if_type, if_operstatus in if_info: if if_id in parsed: parsed[if_id].update( {"port_type": if_type, "description": if_descr, "operational_status": if_operstatus} ) # add informational values for media_type, part, serial, if_id in if_ids: if if_id in parsed: parsed[if_id].update({"type": media_type, "part": part, "serial": serial}) # add per-lane data for temp, tx_light, rx_light, lane in lanes: if_id, lane = lane.split(".") if if_id in parsed: parsed[if_id].setdefault("lanes", {}).setdefault( int(lane), { "temp": _parse_value(temp), "tx_light": _parse_value(tx_light), "rx_light": _parse_value(rx_light), }, ) return parsed
[ "def", "parse_brocade_optical", "(", "string_table", ":", "List", "[", "type_defs", ".", "StringTable", "]", ")", "->", "Section", ":", "if_info", ",", "if_data", ",", "if_ids", ",", "lanes", "=", "string_table", "parsed", ":", "Section", "=", "{", "}", "f...
https://github.com/tribe29/checkmk/blob/6260f2512e159e311f426e16b84b19d0b8e9ad0c/cmk/base/plugins/agent_based/brocade_optical.py#L130-L221
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/rings/padics/padic_valuation.py
python
pAdicValuation_base.is_unramified
(self, G, include_steps=False, assume_squarefree=False)
r""" Return whether ``G`` defines a single unramified extension of the completion of the domain of this valuation. INPUT: - ``G`` -- a monic squarefree polynomial over the domain of this valuation - ``include_steps`` -- a boolean (default: ``False``); whether to include the approximate valuations that were used to determine the result in the return value. - ``assume_squarefree`` -- a boolean (default: ``False``); whether to assume that ``G`` is square-free over the completion of the domain of this valuation. Setting this to ``True`` can significantly improve the performance. EXAMPLES: We consider an extension as unramified if its ramification index is 1. Hence, a trivial extension is unramified:: sage: R.<x> = QQ[] sage: v = QQ.valuation(2) sage: v.is_unramified(x) True If ``G`` remains irreducible in reduction, then it defines an unramified extension:: sage: v.is_unramified(x^2 + x + 1) True However, even if ``G`` factors, it might define an unramified extension:: sage: v.is_unramified(x^2 + 2*x + 4) True
r""" Return whether ``G`` defines a single unramified extension of the completion of the domain of this valuation.
[ "r", "Return", "whether", "G", "defines", "a", "single", "unramified", "extension", "of", "the", "completion", "of", "the", "domain", "of", "this", "valuation", "." ]
def is_unramified(self, G, include_steps=False, assume_squarefree=False): r""" Return whether ``G`` defines a single unramified extension of the completion of the domain of this valuation. INPUT: - ``G`` -- a monic squarefree polynomial over the domain of this valuation - ``include_steps`` -- a boolean (default: ``False``); whether to include the approximate valuations that were used to determine the result in the return value. - ``assume_squarefree`` -- a boolean (default: ``False``); whether to assume that ``G`` is square-free over the completion of the domain of this valuation. Setting this to ``True`` can significantly improve the performance. EXAMPLES: We consider an extension as unramified if its ramification index is 1. Hence, a trivial extension is unramified:: sage: R.<x> = QQ[] sage: v = QQ.valuation(2) sage: v.is_unramified(x) True If ``G`` remains irreducible in reduction, then it defines an unramified extension:: sage: v.is_unramified(x^2 + x + 1) True However, even if ``G`` factors, it might define an unramified extension:: sage: v.is_unramified(x^2 + 2*x + 4) True """ R = G.parent() from sage.rings.polynomial.polynomial_ring import is_PolynomialRing if not is_PolynomialRing(R) or R.base_ring() is not self.domain() or not G.is_monic(): raise ValueError("G must be a monic univariate polynomial over the domain of this valuation") if not assume_squarefree and not G.is_squarefree(): raise ValueError("G must be squarefree") from sage.rings.valuation.gauss_valuation import GaussValuation steps = [ GaussValuation(R, self) ] while True: v = steps[-1] if v.E() > 1: ret = False break if v.F() == G.degree(): ret = True break assert v(G) is not infinity if v.is_key(G): ret = True break next = v.mac_lane_step(G, assume_squarefree=True) if len(next)>1: ret = False break steps.append(next[0]) if include_steps: return ret, steps else: return ret
[ "def", "is_unramified", "(", "self", ",", "G", ",", "include_steps", "=", "False", ",", "assume_squarefree", "=", "False", ")", ":", "R", "=", "G", ".", "parent", "(", ")", "from", "sage", ".", "rings", ".", "polynomial", ".", "polynomial_ring", "import"...
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/rings/padics/padic_valuation.py#L528-L603
psychopy/psychopy
01b674094f38d0e0bd51c45a6f66f671d7041696
psychopy/iohub/client/__init__.py
python
ioHubConnection.eventListToDict
(evt_data)
return EventConstants.getClass(etype).createEventAsDict(evt_data)
Convert an ioHub event currently in list value format into the event as a dictionary of attribute name, attribute values.
Convert an ioHub event currently in list value format into the event as a dictionary of attribute name, attribute values.
[ "Convert", "an", "ioHub", "event", "currently", "in", "list", "value", "format", "into", "the", "event", "as", "a", "dictionary", "of", "attribute", "name", "attribute", "values", "." ]
def eventListToDict(evt_data): """Convert an ioHub event currently in list value format into the event as a dictionary of attribute name, attribute values.""" if isinstance(evt_data, dict): return evt_data etype = evt_data[DeviceEvent.EVENT_TYPE_ID_INDEX] return EventConstants.getClass(etype).createEventAsDict(evt_data)
[ "def", "eventListToDict", "(", "evt_data", ")", ":", "if", "isinstance", "(", "evt_data", ",", "dict", ")", ":", "return", "evt_data", "etype", "=", "evt_data", "[", "DeviceEvent", ".", "EVENT_TYPE_ID_INDEX", "]", "return", "EventConstants", ".", "getClass", "...
https://github.com/psychopy/psychopy/blob/01b674094f38d0e0bd51c45a6f66f671d7041696/psychopy/iohub/client/__init__.py#L1218-L1224
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/min/queue.py
python
LifoQueue._init
(self, maxsize)
[]
def _init(self, maxsize): self.queue = []
[ "def", "_init", "(", "self", ",", "maxsize", ")", ":", "self", ".", "queue", "=", "[", "]" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/queue.py#L245-L246
emesene/emesene
4548a4098310e21b16437bb36223a7f632a4f7bc
emesene/e3/papylib/papyon/papyon/service/description/SchematizedStore/DeleteRelationships.py
python
soap_body
(cid, source_rid, target_rid)
return """<DeleteRelationships xmlns="http://www.msn.com/webservices/storage/w10"> <sourceHandle> %s </sourceHandle> <targetHandles> <ObjectHandle> <ResourceID> %s </ResourceID> </ObjectHandle> </targetHandles> </DeleteRelationships>""" % (source_handle, target_rid)
Returns the SOAP xml body
Returns the SOAP xml body
[ "Returns", "the", "SOAP", "xml", "body" ]
def soap_body(cid, source_rid, target_rid): """Returns the SOAP xml body """ if cid is not None: source_handle = """<RelationshipName> /UserTiles </RelationshipName> <Alias> <Name> %s </Name> <NameSpace> MyCidStuff </NameSpace> </Alias>""" % cid else: source_handle = "<ResourceID>%s</ResourceID>" % source_rid return """<DeleteRelationships xmlns="http://www.msn.com/webservices/storage/w10"> <sourceHandle> %s </sourceHandle> <targetHandles> <ObjectHandle> <ResourceID> %s </ResourceID> </ObjectHandle> </targetHandles> </DeleteRelationships>""" % (source_handle, target_rid)
[ "def", "soap_body", "(", "cid", ",", "source_rid", ",", "target_rid", ")", ":", "if", "cid", "is", "not", "None", ":", "source_handle", "=", "\"\"\"<RelationshipName>\n /UserTiles\n </RelationshipName>\n ...
https://github.com/emesene/emesene/blob/4548a4098310e21b16437bb36223a7f632a4f7bc/emesene/e3/papylib/papyon/papyon/service/description/SchematizedStore/DeleteRelationships.py#L34-L63
MarioVilas/winappdbg
975a088ac54253d0bdef39fe831e82f24b4c11f6
winappdbg/process.py
python
Process.get_running_time
(self)
return RunningTime / 10000
Determines how long has this process been running. @rtype: long @return: Process running time in milliseconds.
Determines how long has this process been running.
[ "Determines", "how", "long", "has", "this", "process", "been", "running", "." ]
def get_running_time(self): """ Determines how long has this process been running. @rtype: long @return: Process running time in milliseconds. """ if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION else: dwAccess = win32.PROCESS_QUERY_INFORMATION hProcess = self.get_handle(dwAccess) (CreationTime, ExitTime, _, _) = win32.GetProcessTimes(hProcess) if self.is_alive(): ExitTime = win32.GetSystemTimeAsFileTime() CreationTime = CreationTime.dwLowDateTime + (CreationTime.dwHighDateTime << 32) ExitTime = ExitTime.dwLowDateTime + ( ExitTime.dwHighDateTime << 32) RunningTime = ExitTime - CreationTime return RunningTime / 10000
[ "def", "get_running_time", "(", "self", ")", ":", "if", "win32", ".", "PROCESS_ALL_ACCESS", "==", "win32", ".", "PROCESS_ALL_ACCESS_VISTA", ":", "dwAccess", "=", "win32", ".", "PROCESS_QUERY_LIMITED_INFORMATION", "else", ":", "dwAccess", "=", "win32", ".", "PROCES...
https://github.com/MarioVilas/winappdbg/blob/975a088ac54253d0bdef39fe831e82f24b4c11f6/winappdbg/process.py#L832-L850
Yenthe666/Odoo_Samples
a70d5f67a363dbf92865405c4e190898ec80908a
sale/sale.py
python
sale_order_line._amount_line
(self, cr, uid, ids, field_name, arg, context=None)
return res
[]
def _amount_line(self, cr, uid, ids, field_name, arg, context=None): tax_obj = self.pool.get('account.tax') cur_obj = self.pool.get('res.currency') res = {} if context is None: context = {} for line in self.browse(cr, uid, ids, context=context): price = line.price_unit * (1 - (line.discount or 0.0) / 100.0) taxes = tax_obj.compute_all(cr, uid, line.tax_id, price, line.product_uom_qty, line.product_id, line.order_id.partner_id) cur = line.order_id.pricelist_id.currency_id res[line.id] = cur_obj.round(cr, uid, cur, taxes['total']) return res
[ "def", "_amount_line", "(", "self", ",", "cr", ",", "uid", ",", "ids", ",", "field_name", ",", "arg", ",", "context", "=", "None", ")", ":", "tax_obj", "=", "self", ".", "pool", ".", "get", "(", "'account.tax'", ")", "cur_obj", "=", "self", ".", "p...
https://github.com/Yenthe666/Odoo_Samples/blob/a70d5f67a363dbf92865405c4e190898ec80908a/sale/sale.py#L820-L831
openstack/swift
b8d7c3dcb817504dcc0959ba52cc4ed2cf66c100
swift/common/middleware/s3api/controllers/multi_upload.py
python
UploadsController.POST
(self, req)
return HTTPOk(body=body, content_type='application/xml')
Handles Initiate Multipart Upload.
Handles Initiate Multipart Upload.
[ "Handles", "Initiate", "Multipart", "Upload", "." ]
def POST(self, req): """ Handles Initiate Multipart Upload. """ if len(req.object_name) > constraints.MAX_OBJECT_NAME_LENGTH: # Note that we can still run into trouble where the MPU is just # within the limit, which means the segment names will go over raise KeyTooLongError() # Create a unique S3 upload id from UUID to avoid duplicates. upload_id = unique_id() seg_container = req.container_name + MULTIUPLOAD_SUFFIX content_type = req.headers.get('Content-Type') if content_type: req.headers[sysmeta_header('object', 'has-content-type')] = 'yes' req.headers[ sysmeta_header('object', 'content-type')] = content_type else: req.headers[sysmeta_header('object', 'has-content-type')] = 'no' req.headers['Content-Type'] = 'application/directory' try: seg_req = copy.copy(req) seg_req.environ = copy.copy(req.environ) seg_req.container_name = seg_container seg_req.get_container_info(self.app) except NoSuchBucket: try: # multi-upload bucket doesn't exist, create one with # same storage policy and acls as the primary bucket info = req.get_container_info(self.app) policy_name = POLICIES[info['storage_policy']].name hdrs = {'X-Storage-Policy': policy_name} if info.get('read_acl'): hdrs['X-Container-Read'] = info['read_acl'] if info.get('write_acl'): hdrs['X-Container-Write'] = info['write_acl'] seg_req.get_response(self.app, 'PUT', seg_container, '', headers=hdrs) except (BucketAlreadyExists, BucketAlreadyOwnedByYou): pass obj = '%s/%s' % (req.object_name, upload_id) req.headers.pop('Etag', None) req.headers.pop('Content-Md5', None) req.get_response(self.app, 'PUT', seg_container, obj, body='') result_elem = Element('InitiateMultipartUploadResult') SubElement(result_elem, 'Bucket').text = req.container_name SubElement(result_elem, 'Key').text = req.object_name SubElement(result_elem, 'UploadId').text = upload_id body = tostring(result_elem) return HTTPOk(body=body, content_type='application/xml')
[ "def", "POST", "(", "self", ",", "req", ")", ":", "if", "len", "(", "req", ".", "object_name", ")", ">", "constraints", ".", "MAX_OBJECT_NAME_LENGTH", ":", "# Note that we can still run into trouble where the MPU is just", "# within the limit, which means the segment names ...
https://github.com/openstack/swift/blob/b8d7c3dcb817504dcc0959ba52cc4ed2cf66c100/swift/common/middleware/s3api/controllers/multi_upload.py#L403-L460
roclark/sportsipy
c19f545d3376d62ded6304b137dc69238ac620a9
sportsipy/nba/roster.py
python
Player.points_per_poss
(self)
return self._points_per_poss
Returns a ``float`` of the total number of points the player scored per 100 posessions.
Returns a ``float`` of the total number of points the player scored per 100 posessions.
[ "Returns", "a", "float", "of", "the", "total", "number", "of", "points", "the", "player", "scored", "per", "100", "posessions", "." ]
def points_per_poss(self): """ Returns a ``float`` of the total number of points the player scored per 100 posessions. """ return self._points_per_poss
[ "def", "points_per_poss", "(", "self", ")", ":", "return", "self", ".", "_points_per_poss" ]
https://github.com/roclark/sportsipy/blob/c19f545d3376d62ded6304b137dc69238ac620a9/sportsipy/nba/roster.py#L959-L964
ydkhatri/mac_apt
729630c8bbe7a73cce3ca330305d3301a919cb07
plugins/helpers/macinfo.py
python
MountedIosInfo._GetAppDetails
(self)
return False
Get app name, path, version, uuid, container path and other info
Get app name, path, version, uuid, container path and other info
[ "Get", "app", "name", "path", "version", "uuid", "container", "path", "and", "other", "info" ]
def _GetAppDetails(self): '''Get app name, path, version, uuid, container path and other info''' app_state_db = '/private/var/mobile/Library/FrontBoard/applicationState.db' if self.IsValidFilePath(app_state_db): self.ExportFile(app_state_db, 'APPS') try: sqlite = SqliteWrapper(self) conn = sqlite.connect(app_state_db) if conn: log.debug ("Opened DB {} successfully".format(os.path.basename(app_state_db))) try: conn.row_factory = sqlite3.Row query = \ """ SELECT application_identifier_tab.application_identifier as ai, key_tab.key, value FROM application_identifier_tab, key_tab, kvs WHERE kvs.application_identifier=application_identifier_tab.id AND kvs.key=key_tab.id ORDER BY ai """ cursor = conn.execute(query) apps = [] last_app_name = '' app_info = None try: for row in cursor: app = row['ai'] key = row['key'] val = row['value'] if last_app_name != app: # new app found app_info = ApplicationInfo(app) apps.append(app_info) last_app_name = app app_info.source = app_state_db # Process key/val pairs if key == '__UninstallDate': if val: temp_file = BytesIO(val) success, plist, error = CommonFunctions.ReadPlist(temp_file) if success: if isinstance(plist, datetime.datetime): app_info.uninstall_date = plist else: log.error('Uninstall plist is not in the expected form, plist was ' + str(plist)) else: log.error(f'Failed to read "compatibilityInfo" for {app}. {error}') temp_file.close() elif key == 'XBApplicationSnapshotManifest': pass elif key == 'compatibilityInfo': if val: temp_file = BytesIO(val) success, plist, error = CommonFunctions.ReadPlist(temp_file, True) if success: app_info.bundle_container_path = plist.get('bundleContainerPath', '') if app_info.bundle_container_path: app_info.bundle_uuid = UUID(os.path.basename(app_info.bundle_container_path)) app_info.bundle_path = plist.get('bundlePath', '') app_info.sandbox_path = plist.get('sandboxPath', '') if app_info.sandbox_path: app_info.data_uuid = UUID(os.path.basename(app_info.sandbox_path)) self._ReadInfoPlist(app_info, app_info.bundle_path + '/Info.plist') bundle_root, _ = os.path.split(app_info.bundle_path) if bundle_root != '/Applications': self._ReadBundleMetadataPlist(app_info, bundle_root + '/BundleMetadata.plist') app_info.source += ', ' + app_info.bundle_path + '/Info.plist' + ', ' + bundle_root + '/BundleMetadata.plist' else: log.error(f'Failed to read "compatibilityInfo" for {app}. {error}') temp_file.close() conn.close() for app in apps: # add app to main list if properties are not empty if not app.bundle_display_name and not app.bundle_path \ and not app.sandbox_path and not app.uninstall_date \ and not app.bundle_display_name: pass else: self.apps.append(app) except sqlite3.Error as ex: log.exception("Db cursor error while reading file " + app_state_db) conn.close() return False except sqlite3.Error as ex: log.error ("Sqlite error - \nError details: \n" + str(ex)) conn.close() return False conn.close() self._GetAppGroupDetails(self.apps) self._ResolveAppSysGroupFolders(self.apps) return True except sqlite3.Error as ex: log.error ("Failed to open {}, is it a valid DB? Error details: ".format(os.path.basename(app_state_db)) + str(ex)) return False else: log.error(f'Could not find {app_state_db}, cannot get Application information!') return False
[ "def", "_GetAppDetails", "(", "self", ")", ":", "app_state_db", "=", "'/private/var/mobile/Library/FrontBoard/applicationState.db'", "if", "self", ".", "IsValidFilePath", "(", "app_state_db", ")", ":", "self", ".", "ExportFile", "(", "app_state_db", ",", "'APPS'", ")"...
https://github.com/ydkhatri/mac_apt/blob/729630c8bbe7a73cce3ca330305d3301a919cb07/plugins/helpers/macinfo.py#L2039-L2133
maurosoria/dirsearch
b83e68c8fdf360ab06be670d7b92b263262ee5b1
thirdparty/jinja2/compiler.py
python
CodeGenerator.visit_Include
(self, node: nodes.Include, frame: Frame)
Handles includes.
Handles includes.
[ "Handles", "includes", "." ]
def visit_Include(self, node: nodes.Include, frame: Frame) -> None: """Handles includes.""" if node.ignore_missing: self.writeline("try:") self.indent() func_name = "get_or_select_template" if isinstance(node.template, nodes.Const): if isinstance(node.template.value, str): func_name = "get_template" elif isinstance(node.template.value, (tuple, list)): func_name = "select_template" elif isinstance(node.template, (nodes.Tuple, nodes.List)): func_name = "select_template" self.writeline(f"template = environment.{func_name}(", node) self.visit(node.template, frame) self.write(f", {self.name!r})") if node.ignore_missing: self.outdent() self.writeline("except TemplateNotFound:") self.indent() self.writeline("pass") self.outdent() self.writeline("else:") self.indent() skip_event_yield = False if node.with_context: self.writeline( f"{self.choose_async()}for event in template.root_render_func(" "template.new_context(context.get_all(), True," f" {self.dump_local_context(frame)})):" ) elif self.environment.is_async: self.writeline( "for event in (await template._get_default_module_async())" "._body_stream:" ) else: self.writeline("yield from template._get_default_module()._body_stream") skip_event_yield = True if not skip_event_yield: self.indent() self.simple_write("event", frame) self.outdent() if node.ignore_missing: self.outdent()
[ "def", "visit_Include", "(", "self", ",", "node", ":", "nodes", ".", "Include", ",", "frame", ":", "Frame", ")", "->", "None", ":", "if", "node", ".", "ignore_missing", ":", "self", ".", "writeline", "(", "\"try:\"", ")", "self", ".", "indent", "(", ...
https://github.com/maurosoria/dirsearch/blob/b83e68c8fdf360ab06be670d7b92b263262ee5b1/thirdparty/jinja2/compiler.py#L1030-L1079
tensorflow/fold
0e7ca14832a14a5f2009d4e0424783a80e7d7a2c
tensorflow_fold/blocks/block_compiler.py
python
Compiler.root
(self)
return self._root
Returns the root block, or None if `compile()` has not been called.
Returns the root block, or None if `compile()` has not been called.
[ "Returns", "the", "root", "block", "or", "None", "if", "compile", "()", "has", "not", "been", "called", "." ]
def root(self): """Returns the root block, or None if `compile()` has not been called.""" return self._root
[ "def", "root", "(", "self", ")", ":", "return", "self", ".", "_root" ]
https://github.com/tensorflow/fold/blob/0e7ca14832a14a5f2009d4e0424783a80e7d7a2c/tensorflow_fold/blocks/block_compiler.py#L270-L272
ucbdrive/3d-vehicle-tracking
8ee189f6792897651bb56bb2950ce07c9629a89d
3d-tracking/model/tracker_model.py
python
KalmanBoxTracker.update
(self, bbox)
Updates the state vector with observed bbox.
Updates the state vector with observed bbox.
[ "Updates", "the", "state", "vector", "with", "observed", "bbox", "." ]
def update(self, bbox): """ Updates the state vector with observed bbox. """ self.time_since_update = 0 self.history = [] self.hits += 1 self.hit_streak += 1 self.kf.update(convert_bbox_to_z(bbox)) self.lost = False
[ "def", "update", "(", "self", ",", "bbox", ")", ":", "self", ".", "time_since_update", "=", "0", "self", ".", "history", "=", "[", "]", "self", ".", "hits", "+=", "1", "self", ".", "hit_streak", "+=", "1", "self", ".", "kf", ".", "update", "(", "...
https://github.com/ucbdrive/3d-vehicle-tracking/blob/8ee189f6792897651bb56bb2950ce07c9629a89d/3d-tracking/model/tracker_model.py#L55-L64
avocado-framework/avocado
1f9b3192e8ba47d029c33fe21266bd113d17811f
avocado/utils/iso9660.py
python
Iso9660Mount.read
(self, path)
Read data from path :param path: path to read data :type path: str :return: data content :rtype: str
Read data from path
[ "Read", "data", "from", "path" ]
def read(self, path): """ Read data from path :param path: path to read data :type path: str :return: data content :rtype: str """ full_path = os.path.join(self.mnt_dir, path) with open(full_path, 'rb') as file_to_read: return bytes(file_to_read.read())
[ "def", "read", "(", "self", ",", "path", ")", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "mnt_dir", ",", "path", ")", "with", "open", "(", "full_path", ",", "'rb'", ")", "as", "file_to_read", ":", "return", "bytes", "(...
https://github.com/avocado-framework/avocado/blob/1f9b3192e8ba47d029c33fe21266bd113d17811f/avocado/utils/iso9660.py#L324-L335
guoruoqian/DetNet_pytorch
735e2c51eea0ee4e91d2ec3f28e441ac4e076551
lib/pycocotools/coco.py
python
COCO.loadImgs
(self, ids=[])
Load anns with the specified ids. :param ids (int array) : integer ids specifying img :return: imgs (object array) : loaded img objects
Load anns with the specified ids. :param ids (int array) : integer ids specifying img :return: imgs (object array) : loaded img objects
[ "Load", "anns", "with", "the", "specified", "ids", ".", ":", "param", "ids", "(", "int", "array", ")", ":", "integer", "ids", "specifying", "img", ":", "return", ":", "imgs", "(", "object", "array", ")", ":", "loaded", "img", "objects" ]
def loadImgs(self, ids=[]): """ Load anns with the specified ids. :param ids (int array) : integer ids specifying img :return: imgs (object array) : loaded img objects """ if _isArrayLike(ids): return [self.imgs[id] for id in ids] elif type(ids) == int: return [self.imgs[ids]]
[ "def", "loadImgs", "(", "self", ",", "ids", "=", "[", "]", ")", ":", "if", "_isArrayLike", "(", "ids", ")", ":", "return", "[", "self", ".", "imgs", "[", "id", "]", "for", "id", "in", "ids", "]", "elif", "type", "(", "ids", ")", "==", "int", ...
https://github.com/guoruoqian/DetNet_pytorch/blob/735e2c51eea0ee4e91d2ec3f28e441ac4e076551/lib/pycocotools/coco.py#L222-L231
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/colors.py
python
same_color
(c1, c2)
return (to_rgba_array(c1) == to_rgba_array(c2)).all()
Compare two colors to see if they are the same. Parameters ---------- c1, c2 : Matplotlib colors Returns ------- bool ``True`` if *c1* and *c2* are the same color, otherwise ``False``.
Compare two colors to see if they are the same.
[ "Compare", "two", "colors", "to", "see", "if", "they", "are", "the", "same", "." ]
def same_color(c1, c2): """ Compare two colors to see if they are the same. Parameters ---------- c1, c2 : Matplotlib colors Returns ------- bool ``True`` if *c1* and *c2* are the same color, otherwise ``False``. """ return (to_rgba_array(c1) == to_rgba_array(c2)).all()
[ "def", "same_color", "(", "c1", ",", "c2", ")", ":", "return", "(", "to_rgba_array", "(", "c1", ")", "==", "to_rgba_array", "(", "c2", ")", ")", ".", "all", "(", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/colors.py#L134-L147