nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py
python
is_local_appengine
()
return ('APPENGINE_RUNTIME' in os.environ and 'Development/' in os.environ['SERVER_SOFTWARE'])
[]
def is_local_appengine(): return ('APPENGINE_RUNTIME' in os.environ and 'Development/' in os.environ['SERVER_SOFTWARE'])
[ "def", "is_local_appengine", "(", ")", ":", "return", "(", "'APPENGINE_RUNTIME'", "in", "os", ".", "environ", "and", "'Development/'", "in", "os", ".", "environ", "[", "'SERVER_SOFTWARE'", "]", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py#L293-L295
facebookresearch/DeepSDF
48c19b8d49ed5293da4edd7da8c3941444bc5cd7
train_deep_sdf.py
python
StepLearningRateSchedule.__init__
(self, initial, interval, factor)
[]
def __init__(self, initial, interval, factor): self.initial = initial self.interval = interval self.factor = factor
[ "def", "__init__", "(", "self", ",", "initial", ",", "interval", ",", "factor", ")", ":", "self", ".", "initial", "=", "initial", "self", ".", "interval", "=", "interval", "self", ".", "factor", "=", "factor" ]
https://github.com/facebookresearch/DeepSDF/blob/48c19b8d49ed5293da4edd7da8c3941444bc5cd7/train_deep_sdf.py#L32-L35
pyqt/examples
843bb982917cecb2350b5f6d7f42c9b7fb142ec1
src/pyqt-official/network/lightmaps.py
python
longitudeFromTile
(tx, zoom)
return lat
[]
def longitudeFromTile(tx, zoom): zn = float(1 << zoom) lat = tx / zn * 360.0 - 180.0 return lat
[ "def", "longitudeFromTile", "(", "tx", ",", "zoom", ")", ":", "zn", "=", "float", "(", "1", "<<", "zoom", ")", "lat", "=", "tx", "/", "zn", "*", "360.0", "-", "180.0", "return", "lat" ]
https://github.com/pyqt/examples/blob/843bb982917cecb2350b5f6d7f42c9b7fb142ec1/src/pyqt-official/network/lightmaps.py#L96-L100
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
istio/datadog_checks/istio/config_models/shared.py
python
SharedConfig._initial_validation
(cls, values)
return validation.core.initialize_config(getattr(validators, 'initialize_shared', identity)(values))
[]
def _initial_validation(cls, values): return validation.core.initialize_config(getattr(validators, 'initialize_shared', identity)(values))
[ "def", "_initial_validation", "(", "cls", ",", "values", ")", ":", "return", "validation", ".", "core", ".", "initialize_config", "(", "getattr", "(", "validators", ",", "'initialize_shared'", ",", "identity", ")", "(", "values", ")", ")" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/istio/datadog_checks/istio/config_models/shared.py#L41-L42
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/src/class/oc_adm_registry.py
python
Registry.service
(self, config)
setter for service property
setter for service property
[ "setter", "for", "service", "property" ]
def service(self, config): ''' setter for service property ''' self.svc = config
[ "def", "service", "(", "self", ",", "config", ")", ":", "self", ".", "svc", "=", "config" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/src/class/oc_adm_registry.py#L81-L83
kubeflow/pipelines
bea751c9259ff0ae85290f873170aae89284ba8e
backend/api/python_http_client/kfp_server_api/models/api_job.py
python
ApiJob.description
(self)
return self._description
Gets the description of this ApiJob. # noqa: E501 :return: The description of this ApiJob. # noqa: E501 :rtype: str
Gets the description of this ApiJob. # noqa: E501
[ "Gets", "the", "description", "of", "this", "ApiJob", ".", "#", "noqa", ":", "E501" ]
def description(self): """Gets the description of this ApiJob. # noqa: E501 :return: The description of this ApiJob. # noqa: E501 :rtype: str """ return self._description
[ "def", "description", "(", "self", ")", ":", "return", "self", ".", "_description" ]
https://github.com/kubeflow/pipelines/blob/bea751c9259ff0ae85290f873170aae89284ba8e/backend/api/python_http_client/kfp_server_api/models/api_job.py#L172-L179
PaddlePaddle/PaddleX
2bab73f81ab54e328204e7871e6ae4a82e719f5d
paddlex/cv/transforms/operators.py
python
ResizeByShort.__init__
(self, short_size=256, max_size=-1, interp='LINEAR')
[]
def __init__(self, short_size=256, max_size=-1, interp='LINEAR'): if not (interp == "RANDOM" or interp in interp_dict): raise ValueError("interp should be one of {}".format( interp_dict.keys())) super(ResizeByShort, self).__init__() self.short_size = short_size self.max_size = max_size self.interp = interp
[ "def", "__init__", "(", "self", ",", "short_size", "=", "256", ",", "max_size", "=", "-", "1", ",", "interp", "=", "'LINEAR'", ")", ":", "if", "not", "(", "interp", "==", "\"RANDOM\"", "or", "interp", "in", "interp_dict", ")", ":", "raise", "ValueError...
https://github.com/PaddlePaddle/PaddleX/blob/2bab73f81ab54e328204e7871e6ae4a82e719f5d/paddlex/cv/transforms/operators.py#L370-L377
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/_pydecimal.py
python
Context.next_minus
(self, a)
return a.next_minus(context=self)
Returns the largest representable number smaller than a. >>> c = ExtendedContext.copy() >>> c.Emin = -999 >>> c.Emax = 999 >>> ExtendedContext.next_minus(Decimal('1')) Decimal('0.999999999') >>> c.next_minus(Decimal('1E-1007')) Decimal('0E-1007') >>> ExtendedContext.next_minus(Decimal('-1.00000003')) Decimal('-1.00000004') >>> c.next_minus(Decimal('Infinity')) Decimal('9.99999999E+999') >>> c.next_minus(1) Decimal('0.999999999')
Returns the largest representable number smaller than a.
[ "Returns", "the", "largest", "representable", "number", "smaller", "than", "a", "." ]
def next_minus(self, a): """Returns the largest representable number smaller than a. >>> c = ExtendedContext.copy() >>> c.Emin = -999 >>> c.Emax = 999 >>> ExtendedContext.next_minus(Decimal('1')) Decimal('0.999999999') >>> c.next_minus(Decimal('1E-1007')) Decimal('0E-1007') >>> ExtendedContext.next_minus(Decimal('-1.00000003')) Decimal('-1.00000004') >>> c.next_minus(Decimal('Infinity')) Decimal('9.99999999E+999') >>> c.next_minus(1) Decimal('0.999999999') """ a = _convert_other(a, raiseit=True) return a.next_minus(context=self)
[ "def", "next_minus", "(", "self", ",", "a", ")", ":", "a", "=", "_convert_other", "(", "a", ",", "raiseit", "=", "True", ")", "return", "a", ".", "next_minus", "(", "context", "=", "self", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/_pydecimal.py#L4975-L4993
dwadden/dygiepp
8faac5711489d4f5fb1189f8344c8ffb5548d2cb
scripts/new-dataset/annotated_doc.py
python
AnnotatedDoc.char_to_token
(self)
Does the heavy lifting for converting brat format to dygiepp format. Gets the token start and end indices for entities. Raises a warning if no alignment can be found for an entity, as the entity will be dropped. NOTE: End character indices from brat are non-inclusive, like the indexing in python. This is different from DyGIE++'s token indexing, where the end indices are inclusive.
Does the heavy lifting for converting brat format to dygiepp format. Gets the token start and end indices for entities. Raises a warning if no alignment can be found for an entity, as the entity will be dropped.
[ "Does", "the", "heavy", "lifting", "for", "converting", "brat", "format", "to", "dygiepp", "format", ".", "Gets", "the", "token", "start", "and", "end", "indices", "for", "entities", ".", "Raises", "a", "warning", "if", "no", "alignment", "can", "be", "fou...
def char_to_token(self): """ Does the heavy lifting for converting brat format to dygiepp format. Gets the token start and end indices for entities. Raises a warning if no alignment can be found for an entity, as the entity will be dropped. NOTE: End character indices from brat are non-inclusive, like the indexing in python. This is different from DyGIE++'s token indexing, where the end indices are inclusive. """ # Tokenize the text with spacy tok_text = self.nlp(self.text) # Get the alignment for each entity ent_list_tokens = [] for ent in self.ents: # Find the start token start_tok = [tok for tok in tok_text if tok.idx == ent.char_start] if len(start_tok) == 0: # If the entity can't be found because there isn't an exact # match in the list, warn that it will be dropped warnings.warn(f'The entity {ent.text} (ID: {ent.ID}) cannot ' 'be aligned to the tokenization, and will be dropped.') self.dropped_ents += 1 else: # Get token start index ent_tok_start = start_tok[0].i # Get the number of tokens in ent processed_ent = self.nlp(ent.text) num_tok = len(processed_ent) if num_tok > 1: ent_tok_end = ent_tok_start + num_tok - 1 else: ent_tok_end = ent_tok_start # Double-check that the tokens from the annotation file match up # with the tokens in the source text. ent_tok_text = [tok.text for tok in processed_ent] doc_tok_text = [tok.text for i, tok in enumerate(tok_text) if i >= ent_tok_start and i <= ent_tok_end] if ent_tok_text != doc_tok_text: msg = ('The annotation file and source document disagree ' f'on the tokens for entity {ent.text} (ID: ' f'{ent.ID}). This entity will be dropped.') warnings.warn(msg) self.dropped_ents += 1 continue # Set the token start and end chars ent.set_tok_start_end(ent_tok_start, ent_tok_end) # Append to list to keep ent_list_tokens.append(ent) # Set the list of entities that had token matches as ents for doc self.ents = ent_list_tokens print(f'Completed doc {self.doc_key}. {self.dropped_ents} of ' f'{self.total_original_ents} entities ' 'were dropped due to tokenization mismatches.')
[ "def", "char_to_token", "(", "self", ")", ":", "# Tokenize the text with spacy", "tok_text", "=", "self", ".", "nlp", "(", "self", ".", "text", ")", "# Get the alignment for each entity", "ent_list_tokens", "=", "[", "]", "for", "ent", "in", "self", ".", "ents",...
https://github.com/dwadden/dygiepp/blob/8faac5711489d4f5fb1189f8344c8ffb5548d2cb/scripts/new-dataset/annotated_doc.py#L170-L236
spesmilo/electrum
bdbd59300fbd35b01605e66145458e5f396108e8
electrum/interface.py
python
_match_hostname
(name, val)
return val.startswith('*.') and name.endswith(val[1:])
[]
def _match_hostname(name, val): if val == name: return True return val.startswith('*.') and name.endswith(val[1:])
[ "def", "_match_hostname", "(", "name", ",", "val", ")", ":", "if", "val", "==", "name", ":", "return", "True", "return", "val", ".", "startswith", "(", "'*.'", ")", "and", "name", ".", "endswith", "(", "val", "[", "1", ":", "]", ")" ]
https://github.com/spesmilo/electrum/blob/bdbd59300fbd35b01605e66145458e5f396108e8/electrum/interface.py#L1114-L1118
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/ADT/mglutil/gui/BasicWidgets/Tk/customizedWidgets.py
python
SliderWidget.Callbacks
(self)
Implement call to all callbacks
Implement call to all callbacks
[ "Implement", "call", "to", "all", "callbacks" ]
def Callbacks(self): """Implement call to all callbacks""" if self.lookup: val = self.lookup[int(round(self.val))] else: val = self.val for f in self.callbacks: f(val)
[ "def", "Callbacks", "(", "self", ")", ":", "if", "self", ".", "lookup", ":", "val", "=", "self", ".", "lookup", "[", "int", "(", "round", "(", "self", ".", "val", ")", ")", "]", "else", ":", "val", "=", "self", ".", "val", "for", "f", "in", "...
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/ADT/mglutil/gui/BasicWidgets/Tk/customizedWidgets.py#L942-L950
Wramberg/TerminalView
b0856fa62c1fdd3ad968bf6b8aaa344962b65adf
GateOne/terminal.py
python
Terminal._opt_handler
(self, chars)
Optional special escape sequence handler for sequences matching RE_OPT_SEQ. If CALLBACK_OPT is defined it will be called like so:: self.callbacks[CALLBACK_OPT](chars) Applications can use this escape sequence to define whatever special handlers they like. It works like this: If an escape sequence is encountered matching RE_OPT_SEQ this method will be called with the inbetween *chars* (e.g. \x1b]_;<chars>\x07) as the argument. Applications can then do what they wish with *chars*. .. note:: I added this functionality so that plugin authors would have a mechanism to communicate with terminal applications. See the SSH plugin for an example of how this can be done (there's channels of communication amongst ssh_connect.py, ssh.js, and ssh.py).
Optional special escape sequence handler for sequences matching RE_OPT_SEQ. If CALLBACK_OPT is defined it will be called like so::
[ "Optional", "special", "escape", "sequence", "handler", "for", "sequences", "matching", "RE_OPT_SEQ", ".", "If", "CALLBACK_OPT", "is", "defined", "it", "will", "be", "called", "like", "so", "::" ]
def _opt_handler(self, chars): """ Optional special escape sequence handler for sequences matching RE_OPT_SEQ. If CALLBACK_OPT is defined it will be called like so:: self.callbacks[CALLBACK_OPT](chars) Applications can use this escape sequence to define whatever special handlers they like. It works like this: If an escape sequence is encountered matching RE_OPT_SEQ this method will be called with the inbetween *chars* (e.g. \x1b]_;<chars>\x07) as the argument. Applications can then do what they wish with *chars*. .. note:: I added this functionality so that plugin authors would have a mechanism to communicate with terminal applications. See the SSH plugin for an example of how this can be done (there's channels of communication amongst ssh_connect.py, ssh.js, and ssh.py). """ try: for callback in self.callbacks[CALLBACK_OPT].values(): callback(chars) except TypeError: # High likelyhood that nothing is defined. No biggie. pass
[ "def", "_opt_handler", "(", "self", ",", "chars", ")", ":", "try", ":", "for", "callback", "in", "self", ".", "callbacks", "[", "CALLBACK_OPT", "]", ".", "values", "(", ")", ":", "callback", "(", "chars", ")", "except", "TypeError", ":", "# High likelyho...
https://github.com/Wramberg/TerminalView/blob/b0856fa62c1fdd3ad968bf6b8aaa344962b65adf/GateOne/terminal.py#L4061-L4087
QCoDeS/Qcodes
3cda2cef44812e2aa4672781f2423bf5f816f9f9
qcodes/instrument/parameter.py
python
_Cache.set
(self, value: ParamDataType)
Set the cached value of the parameter without invoking the ``set_cmd`` of the parameter (if it has one). For example, in case of an instrument parameter, calling :meth:`cache.set` as opposed to calling ``set`` will only change the internally-stored value of the parameter (that is available when calling ``cache.get()`` or ``get_latest()``), and will NOT pass that value to the instrument. Note that this method also respects all the validation, parsing, offsetting, etc that the parameter's ``set`` method respects. However, if the parameter has :attr:`step` defined, unlike the ``set`` method, this method does not perform setting the parameter step-by-step. Args: value: new value for the parameter
Set the cached value of the parameter without invoking the ``set_cmd`` of the parameter (if it has one). For example, in case of an instrument parameter, calling :meth:`cache.set` as opposed to calling ``set`` will only change the internally-stored value of the parameter (that is available when calling ``cache.get()`` or ``get_latest()``), and will NOT pass that value to the instrument.
[ "Set", "the", "cached", "value", "of", "the", "parameter", "without", "invoking", "the", "set_cmd", "of", "the", "parameter", "(", "if", "it", "has", "one", ")", ".", "For", "example", "in", "case", "of", "an", "instrument", "parameter", "calling", ":", ...
def set(self, value: ParamDataType) -> None: """ Set the cached value of the parameter without invoking the ``set_cmd`` of the parameter (if it has one). For example, in case of an instrument parameter, calling :meth:`cache.set` as opposed to calling ``set`` will only change the internally-stored value of the parameter (that is available when calling ``cache.get()`` or ``get_latest()``), and will NOT pass that value to the instrument. Note that this method also respects all the validation, parsing, offsetting, etc that the parameter's ``set`` method respects. However, if the parameter has :attr:`step` defined, unlike the ``set`` method, this method does not perform setting the parameter step-by-step. Args: value: new value for the parameter """ self._parameter.validate(value) raw_value = self._parameter._from_value_to_raw_value(value) self._update_with(value=value, raw_value=raw_value)
[ "def", "set", "(", "self", ",", "value", ":", "ParamDataType", ")", "->", "None", ":", "self", ".", "_parameter", ".", "validate", "(", "value", ")", "raw_value", "=", "self", ".", "_parameter", ".", "_from_value_to_raw_value", "(", "value", ")", "self", ...
https://github.com/QCoDeS/Qcodes/blob/3cda2cef44812e2aa4672781f2423bf5f816f9f9/qcodes/instrument/parameter.py#L2236-L2255
steeve/xbmctorrent
e6bcb1037668959e1e3cb5ba8cf3e379c6638da9
resources/site-packages/xbmctorrent/player.py
python
TorrentPlayer._wait_t2h_startup
(self, t2h)
return False
[]
def _wait_t2h_startup(self, t2h): start = time.time() while (time.time() - start) < TORRENT2HTTP_TIMEOUT: try: t2h("status") return True except: pass xbmc.sleep(TORRENT2HTTP_POLL) return False
[ "def", "_wait_t2h_startup", "(", "self", ",", "t2h", ")", ":", "start", "=", "time", ".", "time", "(", ")", "while", "(", "time", ".", "time", "(", ")", "-", "start", ")", "<", "TORRENT2HTTP_TIMEOUT", ":", "try", ":", "t2h", "(", "\"status\"", ")", ...
https://github.com/steeve/xbmctorrent/blob/e6bcb1037668959e1e3cb5ba8cf3e379c6638da9/resources/site-packages/xbmctorrent/player.py#L173-L182
FSecureLABS/Jandroid
e31d0dab58a2bfd6ed8e0a387172b8bd7c893436
libs/platform-tools/platform-tools_linux/systrace/catapult/devil/devil/android/sdk/gce_adb_wrapper.py
python
GceAdbWrapper.Pull
(self, remote, local, **kwargs)
Pulls a file from the gce instance to the host. Args: remote: Path on the instance filesystem. local: Path on the host filesystem.
Pulls a file from the gce instance to the host.
[ "Pulls", "a", "file", "from", "the", "gce", "instance", "to", "the", "host", "." ]
def Pull(self, remote, local, **kwargs): """Pulls a file from the gce instance to the host. Args: remote: Path on the instance filesystem. local: Path on the host filesystem. """ cmd = [ 'scp', '-p', '-r', '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', 'root@%s:%s' % (self._instance_ip, remote), local, ] status, _ = cmd_helper.GetCmdStatusAndOutput(cmd) if status: raise device_errors.AdbCommandFailedError( cmd, 'File not reachable on host: %s' % local, device_serial=str(self)) try: adb_wrapper.VerifyLocalFileExists(local) except (subprocess.CalledProcessError, IOError): logger.exception('Error when pulling files from android instance.') raise device_errors.AdbCommandFailedError( cmd, 'File not reachable on host: %s' % local, device_serial=str(self))
[ "def", "Pull", "(", "self", ",", "remote", ",", "local", ",", "*", "*", "kwargs", ")", ":", "cmd", "=", "[", "'scp'", ",", "'-p'", ",", "'-r'", ",", "'-o'", ",", "'UserKnownHostsFile=/dev/null'", ",", "'-o'", ",", "'StrictHostKeyChecking=no'", ",", "'roo...
https://github.com/FSecureLABS/Jandroid/blob/e31d0dab58a2bfd6ed8e0a387172b8bd7c893436/libs/platform-tools/platform-tools_linux/systrace/catapult/devil/devil/android/sdk/gce_adb_wrapper.py#L93-L121
Nuitka/Nuitka
39262276993757fa4e299f497654065600453fc9
nuitka/tools/quality/autoformat/Autoformat.py
python
_cleanupClangFormat
(filename)
Call clang-format on a given filename to format C code. Args: filename: What file to re-format.
Call clang-format on a given filename to format C code.
[ "Call", "clang", "-", "format", "on", "a", "given", "filename", "to", "format", "C", "code", "." ]
def _cleanupClangFormat(filename): """Call clang-format on a given filename to format C code. Args: filename: What file to re-format. """ # Using global here, as this is really a singleton, in # the form of a module, pylint: disable=global-statement global warned_clang_format clang_format_path = ( getExecutablePath("clang-format-12") or getExecutablePath("clang-format-11") or getExecutablePath("clang-format-10") or getExecutablePath("clang-format-9") or getExecutablePath("clang-format-8") or getExecutablePath("clang-format-7") ) # Extra ball on Windows, check default installations paths in MSVC and LLVM too. if not clang_format_path and getOS() == "Windows": with withEnvironmentPathAdded( "PATH", r"C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\Llvm\bin", r"C:\Program Files\LLVM\bin", ): clang_format_path = getExecutablePath("clang-format") if clang_format_path: subprocess.call( [ clang_format_path, "-i", "-style={BasedOnStyle: llvm, IndentWidth: 4, ColumnLimit: 120}", filename, ] ) else: if not warned_clang_format: general.warning("Need to install LLVM for C files format.") warned_clang_format = True
[ "def", "_cleanupClangFormat", "(", "filename", ")", ":", "# Using global here, as this is really a singleton, in", "# the form of a module, pylint: disable=global-statement", "global", "warned_clang_format", "clang_format_path", "=", "(", "getExecutablePath", "(", "\"clang-format-12\""...
https://github.com/Nuitka/Nuitka/blob/39262276993757fa4e299f497654065600453fc9/nuitka/tools/quality/autoformat/Autoformat.py#L457-L498
4shadoww/hakkuframework
409a11fc3819d251f86faa3473439f8c19066a21
lib/future/backports/misc.py
python
Counter.copy
(self)
return self.__class__(self)
Return a shallow copy.
Return a shallow copy.
[ "Return", "a", "shallow", "copy", "." ]
def copy(self): 'Return a shallow copy.' return self.__class__(self)
[ "def", "copy", "(", "self", ")", ":", "return", "self", ".", "__class__", "(", "self", ")" ]
https://github.com/4shadoww/hakkuframework/blob/409a11fc3819d251f86faa3473439f8c19066a21/lib/future/backports/misc.py#L521-L523
tracim/tracim
a0e9746fde5a4c45b4e0f0bfa2caf9522b8c4e21
backend/tracim_backend/lib/webdav/resources.py
python
WorkspaceResource.delete
(self)
For now, it is not possible to delete a workspace through the webdav client.
For now, it is not possible to delete a workspace through the webdav client.
[ "For", "now", "it", "is", "not", "possible", "to", "delete", "a", "workspace", "through", "the", "webdav", "client", "." ]
def delete(self): """For now, it is not possible to delete a workspace through the webdav client.""" # FIXME - G.M - 2018-12-11 - For an unknown reason current_workspace # of tracim_context is here invalid. self.tracim_context._current_workspace = self.workspace try: can_delete_workspace.check(self.tracim_context) except TracimException as exc: raise DAVError(HTTP_FORBIDDEN, contextinfo=str(exc)) raise DAVError(HTTP_FORBIDDEN, "Workspace deletion is not allowed through webdav")
[ "def", "delete", "(", "self", ")", ":", "# FIXME - G.M - 2018-12-11 - For an unknown reason current_workspace", "# of tracim_context is here invalid.", "self", ".", "tracim_context", ".", "_current_workspace", "=", "self", ".", "workspace", "try", ":", "can_delete_workspace", ...
https://github.com/tracim/tracim/blob/a0e9746fde5a4c45b4e0f0bfa2caf9522b8c4e21/backend/tracim_backend/lib/webdav/resources.py#L620-L629
exodrifter/unity-python
bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d
Lib/decimal.py
python
Decimal.is_infinite
(self)
return self._exp == 'F'
Return True if self is infinite; otherwise return False.
Return True if self is infinite; otherwise return False.
[ "Return", "True", "if", "self", "is", "infinite", ";", "otherwise", "return", "False", "." ]
def is_infinite(self): """Return True if self is infinite; otherwise return False.""" return self._exp == 'F'
[ "def", "is_infinite", "(", "self", ")", ":", "return", "self", ".", "_exp", "==", "'F'" ]
https://github.com/exodrifter/unity-python/blob/bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d/Lib/decimal.py#L3027-L3029
mrlesmithjr/Ansible
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/_vendor/distlib/util.py
python
convert_path
(pathname)
return os.path.join(*paths)
Return 'pathname' as a name that will work on the native filesystem. The path is split on '/' and put back together again using the current directory separator. Needed because filenames in the setup script are always supplied in Unix style, and have to be converted to the local convention before we can actually use them in the filesystem. Raises ValueError on non-Unix-ish systems if 'pathname' either starts or ends with a slash.
Return 'pathname' as a name that will work on the native filesystem.
[ "Return", "pathname", "as", "a", "name", "that", "will", "work", "on", "the", "native", "filesystem", "." ]
def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem. The path is split on '/' and put back together again using the current directory separator. Needed because filenames in the setup script are always supplied in Unix style, and have to be converted to the local convention before we can actually use them in the filesystem. Raises ValueError on non-Unix-ish systems if 'pathname' either starts or ends with a slash. """ if os.sep == '/': return pathname if not pathname: return pathname if pathname[0] == '/': raise ValueError("path '%s' cannot be absolute" % pathname) if pathname[-1] == '/': raise ValueError("path '%s' cannot end with '/'" % pathname) paths = pathname.split('/') while os.curdir in paths: paths.remove(os.curdir) if not paths: return os.curdir return os.path.join(*paths)
[ "def", "convert_path", "(", "pathname", ")", ":", "if", "os", ".", "sep", "==", "'/'", ":", "return", "pathname", "if", "not", "pathname", ":", "return", "pathname", "if", "pathname", "[", "0", "]", "==", "'/'", ":", "raise", "ValueError", "(", "\"path...
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/_vendor/distlib/util.py#L310-L334
pika/pika
12dcdf15d0932c388790e0fa990810bfd21b1a32
examples/twisted_service.py
python
PikaProtocol.send
(self)
If connected, send all waiting messages.
If connected, send all waiting messages.
[ "If", "connected", "send", "all", "waiting", "messages", "." ]
def send(self): """If connected, send all waiting messages.""" if self.connected: while self.factory.queued_messages: ( exchange, r_key, message, ) = self.factory.queued_messages.pop(0) self.send_message(exchange, r_key, message)
[ "def", "send", "(", "self", ")", ":", "if", "self", ".", "connected", ":", "while", "self", ".", "factory", ".", "queued_messages", ":", "(", "exchange", ",", "r_key", ",", "message", ",", ")", "=", "self", ".", "factory", ".", "queued_messages", ".", ...
https://github.com/pika/pika/blob/12dcdf15d0932c388790e0fa990810bfd21b1a32/examples/twisted_service.py#L157-L166
OpenNMT/OpenNMT-tf
59a4dfdb911d0570ba1096b7a0a7b9fc5c7844bf
opennmt/tokenizers/tokenizer.py
python
Tokenizer._tokenize_tensor
(self, text, training)
Tokenizes a tensor. When not overriden, this default implementation calls the string-based tokenization. Args: text: A 0-D or 1-D string ``tf.Tensor``. training: Set to ``False`` to tokenize for inference. Returns: A 1-D string ``tf.Tensor``, or a 2-D string ``tf.RaggedTensor`` if the input was a batch of text.
Tokenizes a tensor.
[ "Tokenizes", "a", "tensor", "." ]
def _tokenize_tensor(self, text, training): """Tokenizes a tensor. When not overriden, this default implementation calls the string-based tokenization. Args: text: A 0-D or 1-D string ``tf.Tensor``. training: Set to ``False`` to tokenize for inference. Returns: A 1-D string ``tf.Tensor``, or a 2-D string ``tf.RaggedTensor`` if the input was a batch of text. """ def _python_wrapper(string_t): string = tf.compat.as_text(string_t.numpy()) tokens = self._tokenize_string(string, training) return tf.constant(tokens, dtype=tf.string) def _python_wrapper_batch(batch_text): batch_text = list(map(tf.compat.as_text, batch_text.numpy())) batch_tokens = self._tokenize_string_batch(batch_text, training) flat_tokens = tf.constant(tf.nest.flatten(batch_tokens), dtype=tf.string) lengths = tf.constant(list(map(len, batch_tokens)), dtype=tf.int32) return flat_tokens, lengths rank = text.shape.rank if rank == 0: tokens = tf.py_function(_python_wrapper, [text], tf.string) tokens.set_shape([None]) return tokens elif rank == 1: flat_tokens, lengths = tf.py_function( _python_wrapper_batch, [text], (tf.string, tf.int32) ) flat_tokens.set_shape([None]) lengths.set_shape([None]) return tf.RaggedTensor.from_row_lengths(flat_tokens, lengths) else: raise ValueError("Unsupported tensor rank %d for tokenization" % rank)
[ "def", "_tokenize_tensor", "(", "self", ",", "text", ",", "training", ")", ":", "def", "_python_wrapper", "(", "string_t", ")", ":", "string", "=", "tf", ".", "compat", ".", "as_text", "(", "string_t", ".", "numpy", "(", ")", ")", "tokens", "=", "self"...
https://github.com/OpenNMT/OpenNMT-tf/blob/59a4dfdb911d0570ba1096b7a0a7b9fc5c7844bf/opennmt/tokenizers/tokenizer.py#L149-L189
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/attack/db/sqlmap/plugins/dbms/oracle/fingerprint.py
python
Fingerprint.checkDbms
(self)
[]
def checkDbms(self): if not conf.extensiveFp and Backend.isDbmsWithin(ORACLE_ALIASES): setDbms(DBMS.ORACLE) self.getBanner() return True infoMsg = "testing %s" % DBMS.ORACLE logger.info(infoMsg) # NOTE: SELECT ROWNUM=ROWNUM FROM DUAL does not work connecting # directly to the Oracle database if conf.direct: result = True else: result = inject.checkBooleanExpression("ROWNUM=ROWNUM") if result: infoMsg = "confirming %s" % DBMS.ORACLE logger.info(infoMsg) # NOTE: SELECT LENGTH(SYSDATE)=LENGTH(SYSDATE) FROM DUAL does # not work connecting directly to the Oracle database if conf.direct: result = True else: result = inject.checkBooleanExpression("LENGTH(SYSDATE)=LENGTH(SYSDATE)") if not result: warnMsg = "the back-end DBMS is not %s" % DBMS.ORACLE logger.warn(warnMsg) return False setDbms(DBMS.ORACLE) self.getBanner() if not conf.extensiveFp: return True infoMsg = "actively fingerprinting %s" % DBMS.ORACLE logger.info(infoMsg) # Reference: https://en.wikipedia.org/wiki/Oracle_Database for version in ("12c", "11g", "10g", "9i", "8i"): number = int(re.search(r"([\d]+)", version).group(1)) output = inject.checkBooleanExpression("%d=(SELECT SUBSTR((VERSION),1,%d) FROM SYS.PRODUCT_COMPONENT_VERSION WHERE ROWNUM=1)" % (number, 1 if number < 10 else 2)) if output: Backend.setVersion(version) break return True else: warnMsg = "the back-end DBMS is not %s" % DBMS.ORACLE logger.warn(warnMsg) return False
[ "def", "checkDbms", "(", "self", ")", ":", "if", "not", "conf", ".", "extensiveFp", "and", "Backend", ".", "isDbmsWithin", "(", "ORACLE_ALIASES", ")", ":", "setDbms", "(", "DBMS", ".", "ORACLE", ")", "self", ".", "getBanner", "(", ")", "return", "True", ...
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/attack/db/sqlmap/plugins/dbms/oracle/fingerprint.py#L60-L119
n1nj4sec/pupy
a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39
pupy/network/lib/picocmd/picocmd.py
python
CheckConnect.unpack
(data)
return CheckConnect( host, port_start, port_end ), struct.calcsize('IHH')
[]
def unpack(data): host, port_start, port_end = struct.unpack_from('IHH', data) host = netaddr.IPAddress(host) return CheckConnect( host, port_start, port_end ), struct.calcsize('IHH')
[ "def", "unpack", "(", "data", ")", ":", "host", ",", "port_start", ",", "port_end", "=", "struct", ".", "unpack_from", "(", "'IHH'", ",", "data", ")", "host", "=", "netaddr", ".", "IPAddress", "(", "host", ")", "return", "CheckConnect", "(", "host", ",...
https://github.com/n1nj4sec/pupy/blob/a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39/pupy/network/lib/picocmd/picocmd.py#L389-L395
GNS3/gns3-gui
da8adbaa18ab60e053af2a619efd468f4c8950f3
gns3/compute_manager.py
python
ComputeManager._controllerDisconnectedSlot
(self)
Called when disconnected from a compute.
Called when disconnected from a compute.
[ "Called", "when", "disconnected", "from", "a", "compute", "." ]
def _controllerDisconnectedSlot(self): """ Called when disconnected from a compute. """ for compute_id in list(self._computes): del self._computes[compute_id] self.deleted_signal.emit(compute_id)
[ "def", "_controllerDisconnectedSlot", "(", "self", ")", ":", "for", "compute_id", "in", "list", "(", "self", ".", "_computes", ")", ":", "del", "self", ".", "_computes", "[", "compute_id", "]", "self", ".", "deleted_signal", ".", "emit", "(", "compute_id", ...
https://github.com/GNS3/gns3-gui/blob/da8adbaa18ab60e053af2a619efd468f4c8950f3/gns3/compute_manager.py#L80-L87
foxbook/atap
3e17489f57e1e17c4fba84fe68ad6c7f44fa05ad
snippets/ch10/preprocessor.py
python
ParallelPreprocessor.on_result
(self, result)
Appends the results to the master results list.
Appends the results to the master results list.
[ "Appends", "the", "results", "to", "the", "master", "results", "list", "." ]
def on_result(self, result): """ Appends the results to the master results list. """ self.results.append(result)
[ "def", "on_result", "(", "self", ",", "result", ")", ":", "self", ".", "results", ".", "append", "(", "result", ")" ]
https://github.com/foxbook/atap/blob/3e17489f57e1e17c4fba84fe68ad6c7f44fa05ad/snippets/ch10/preprocessor.py#L209-L213
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-1.3/django/contrib/gis/sitemaps/kml.py
python
KMLSitemap._build_kml_sources
(self, sources)
return kml_sources
Goes through the given sources and returns a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources. If no sources are provided, then all models.
Goes through the given sources and returns a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources.
[ "Goes", "through", "the", "given", "sources", "and", "returns", "a", "3", "-", "tuple", "of", "the", "application", "label", "module", "name", "and", "field", "name", "of", "every", "GeometryField", "encountered", "in", "the", "sources", "." ]
def _build_kml_sources(self, sources): """ Goes through the given sources and returns a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources. If no sources are provided, then all models. """ kml_sources = [] if sources is None: sources = models.get_models() for source in sources: if isinstance(source, models.base.ModelBase): for field in source._meta.fields: if isinstance(field, GeometryField): kml_sources.append((source._meta.app_label, source._meta.module_name, field.name)) elif isinstance(source, (list, tuple)): if len(source) != 3: raise ValueError('Must specify a 3-tuple of (app_label, module_name, field_name).') kml_sources.append(source) else: raise TypeError('KML Sources must be a model or a 3-tuple.') return kml_sources
[ "def", "_build_kml_sources", "(", "self", ",", "sources", ")", ":", "kml_sources", "=", "[", "]", "if", "sources", "is", "None", ":", "sources", "=", "models", ".", "get_models", "(", ")", "for", "source", "in", "sources", ":", "if", "isinstance", "(", ...
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.3/django/contrib/gis/sitemaps/kml.py#L17-L41
criteo/biggraphite
1f647ada6b3f2b2f3fb4e59d326f73a2c891fc30
biggraphite/drivers/elasticsearch.py
python
_parse_wildcard_component
(component)
return value
Given a complex component, this builds a wildcard constraint.
Given a complex component, this builds a wildcard constraint.
[ "Given", "a", "complex", "component", "this", "builds", "a", "wildcard", "constraint", "." ]
def _parse_wildcard_component(component): """Given a complex component, this builds a wildcard constraint.""" value = "" for subcomponent in component: if isinstance(subcomponent, bg_glob.AnySequence): value += "*" elif isinstance(subcomponent, six.string_types): value += subcomponent elif isinstance(subcomponent, bg_glob.AnyChar): value += "?" else: raise Error("Unhandled type '%s'" % subcomponent) return value
[ "def", "_parse_wildcard_component", "(", "component", ")", ":", "value", "=", "\"\"", "for", "subcomponent", "in", "component", ":", "if", "isinstance", "(", "subcomponent", ",", "bg_glob", ".", "AnySequence", ")", ":", "value", "+=", "\"*\"", "elif", "isinsta...
https://github.com/criteo/biggraphite/blob/1f647ada6b3f2b2f3fb4e59d326f73a2c891fc30/biggraphite/drivers/elasticsearch.py#L232-L244
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
mysql/datadog_checks/mysql/config.py
python
MySQLConfig.configuration_checks
(self)
[]
def configuration_checks(self): if self.queries or self.max_custom_queries != DEFAULT_MAX_CUSTOM_QUERIES: self.log.warning( 'The options `queries` and `max_custom_queries` are deprecated and will be ' 'removed in a future release. Use the `custom_queries` option instead.' ) if not (self.host and self.user) and not self.defaults_file: raise ConfigurationError("Mysql host and user or a defaults_file are needed.") if (self.host or self.user or self.port or self.mysql_sock) and self.defaults_file: self.log.warning( "Both connection details and defaults_file have been specified, connection details will be ignored" ) if self.mysql_sock and self.host: self.log.warning("Both socket and host have been specified, socket will be used")
[ "def", "configuration_checks", "(", "self", ")", ":", "if", "self", ".", "queries", "or", "self", ".", "max_custom_queries", "!=", "DEFAULT_MAX_CUSTOM_QUERIES", ":", "self", ".", "log", ".", "warning", "(", "'The options `queries` and `max_custom_queries` are deprecated...
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/mysql/datadog_checks/mysql/config.py#L66-L82
pandas-dev/pandas
5ba7d714014ae8feaccc0dd4a98890828cf2832d
pandas/io/json/_table_schema.py
python
as_json_table_type
(x: DtypeObj)
Convert a NumPy / pandas type to its corresponding json_table. Parameters ---------- x : np.dtype or ExtensionDtype Returns ------- str the Table Schema data types Notes ----- This table shows the relationship between NumPy / pandas dtypes, and Table Schema dtypes. ============== ================= Pandas type Table Schema type ============== ================= int64 integer float64 number bool boolean datetime64[ns] datetime timedelta64[ns] duration object str categorical any =============== =================
Convert a NumPy / pandas type to its corresponding json_table.
[ "Convert", "a", "NumPy", "/", "pandas", "type", "to", "its", "corresponding", "json_table", "." ]
def as_json_table_type(x: DtypeObj) -> str: """ Convert a NumPy / pandas type to its corresponding json_table. Parameters ---------- x : np.dtype or ExtensionDtype Returns ------- str the Table Schema data types Notes ----- This table shows the relationship between NumPy / pandas dtypes, and Table Schema dtypes. ============== ================= Pandas type Table Schema type ============== ================= int64 integer float64 number bool boolean datetime64[ns] datetime timedelta64[ns] duration object str categorical any =============== ================= """ if is_integer_dtype(x): return "integer" elif is_bool_dtype(x): return "boolean" elif is_numeric_dtype(x): return "number" elif is_datetime64_dtype(x) or is_datetime64tz_dtype(x) or is_period_dtype(x): return "datetime" elif is_timedelta64_dtype(x): return "duration" elif is_categorical_dtype(x): return "any" elif is_extension_array_dtype(x): return "any" elif is_string_dtype(x): return "string" else: return "any"
[ "def", "as_json_table_type", "(", "x", ":", "DtypeObj", ")", "->", "str", ":", "if", "is_integer_dtype", "(", "x", ")", ":", "return", "\"integer\"", "elif", "is_bool_dtype", "(", "x", ")", ":", "return", "\"boolean\"", "elif", "is_numeric_dtype", "(", "x", ...
https://github.com/pandas-dev/pandas/blob/5ba7d714014ae8feaccc0dd4a98890828cf2832d/pandas/io/json/_table_schema.py#L48-L95
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/bdf/bdf_interface/add_card.py
python
AddCards.add_cbeam3
(self, eid, pid, nids, x, g0, wa, wb, wc, tw, s, comment='')
return elem
Creates a CBEAM3 card
Creates a CBEAM3 card
[ "Creates", "a", "CBEAM3", "card" ]
def add_cbeam3(self, eid, pid, nids, x, g0, wa, wb, wc, tw, s, comment='') -> CBEAM3: """Creates a CBEAM3 card""" elem = CBEAM3(eid, pid, nids, x, g0, wa, wb, wc, tw, s, comment=comment) self._add_methods._add_element_object(elem) return elem
[ "def", "add_cbeam3", "(", "self", ",", "eid", ",", "pid", ",", "nids", ",", "x", ",", "g0", ",", "wa", ",", "wb", ",", "wc", ",", "tw", ",", "s", ",", "comment", "=", "''", ")", "->", "CBEAM3", ":", "elem", "=", "CBEAM3", "(", "eid", ",", "...
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/bdf/bdf_interface/add_card.py#L2710-L2716
google-research/language
61fa7260ac7d690d11ef72ca863e45a37c0bdc80
language/serene/training.py
python
Trainer._build_tokenizer
(self)
Build the correct tokenizer depending on model encoder. Returns: Tokenizer for model
Build the correct tokenizer depending on model encoder.
[ "Build", "the", "correct", "tokenizer", "depending", "on", "model", "encoder", "." ]
def _build_tokenizer(self): """Build the correct tokenizer depending on model encoder. Returns: Tokenizer for model """ if self._model_config.tokenizer == 'basic': base_tokenizer = tfds.deprecated.text.Tokenizer() return tokenizers.ReservedTokenizer( tokenizer=base_tokenizer, reserved_re=preprocessing.SEPARATOR_RE) elif self._model_config.tokenizer == 'bert': return tokenizers.BertTokenizer( vocab_file=self._model_config.bert_vocab_path, do_lower_case=True) else: raise ValueError('Invalid tokenizer')
[ "def", "_build_tokenizer", "(", "self", ")", ":", "if", "self", ".", "_model_config", ".", "tokenizer", "==", "'basic'", ":", "base_tokenizer", "=", "tfds", ".", "deprecated", ".", "text", ".", "Tokenizer", "(", ")", "return", "tokenizers", ".", "ReservedTok...
https://github.com/google-research/language/blob/61fa7260ac7d690d11ef72ca863e45a37c0bdc80/language/serene/training.py#L297-L311
virus-warnning/twnews
4c7ef436018480d07b5f3f5f474f3843af46eb99
twnews/finance/twse.py
python
import_selled
(dbcon, trading_date, dataset)
匯入已借券賣出資料集
匯入已借券賣出資料集
[ "匯入已借券賣出資料集" ]
def import_selled(dbcon, trading_date, dataset): """ 匯入已借券賣出資料集 """ sql = ''' UPDATE `short_sell` SET `security_name`=?, `selled`=? WHERE `trading_date`=? AND `security_id`=? ''' for detail in dataset['data']: security_id = detail[0] security_name = detail[1].strip() balance = int(detail[12].replace(',', '')) if security_id != '': # TODO: 如果 WHERE 條件不成立,沒更新到資料,應該要產生 Exception 觸發錯誤回報 dbcon.execute(sql, ( security_name, balance, trading_date, security_id ))
[ "def", "import_selled", "(", "dbcon", ",", "trading_date", ",", "dataset", ")", ":", "sql", "=", "'''\n UPDATE `short_sell` SET `security_name`=?, `selled`=?\n WHERE `trading_date`=? AND `security_id`=?\n '''", "for", "detail", "in", "dataset", "[", "'data'", "...
https://github.com/virus-warnning/twnews/blob/4c7ef436018480d07b5f3f5f474f3843af46eb99/twnews/finance/twse.py#L247-L264
jgoerzen/pygopherd
50c01600afbb99be1f6aba63ae3a007404a0bbc4
pygopherd/handlers/mbox.py
python
MessageHandler.canhandlerequest
(self)
return 1
We put MBOX-MESSAGE in here so we don't have to re-check the first line of the mbox file before returning a true or false result.
We put MBOX-MESSAGE in here so we don't have to re-check the first line of the mbox file before returning a true or false result.
[ "We", "put", "MBOX", "-", "MESSAGE", "in", "here", "so", "we", "don", "t", "have", "to", "re", "-", "check", "the", "first", "line", "of", "the", "mbox", "file", "before", "returning", "a", "true", "or", "false", "result", "." ]
def canhandlerequest(self): """We put MBOX-MESSAGE in here so we don't have to re-check the first line of the mbox file before returning a true or false result.""" if not self.selectorargs: return 0 msgnum = re.search('^' + self.getargflag() + '(\d+)$', self.selectorargs) if not msgnum: return 0 self.msgnum = int(msgnum.group(1)) self.message = None return 1
[ "def", "canhandlerequest", "(", "self", ")", ":", "if", "not", "self", ".", "selectorargs", ":", "return", "0", "msgnum", "=", "re", ".", "search", "(", "'^'", "+", "self", ".", "getargflag", "(", ")", "+", "'(\\d+)$'", ",", "self", ".", "selectorargs"...
https://github.com/jgoerzen/pygopherd/blob/50c01600afbb99be1f6aba63ae3a007404a0bbc4/pygopherd/handlers/mbox.py#L66-L78
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v7/services/services/managed_placement_view_service/transports/grpc.py
python
ManagedPlacementViewServiceGrpcTransport.__init__
( self, *, host: str = "googleads.googleapis.com", credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, )
Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason.
Instantiate the transport.
[ "Instantiate", "the", "transport", "." ]
def __init__( self, *, host: str = "googleads.googleapis.com", credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ self._ssl_channel_credentials = ssl_channel_credentials if channel: # Sanity check: Ensure that channel and credentials are not both # provided. credentials = False # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning, ) host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) if credentials is None: credentials, _ = auth.default( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: cert, key = client_cert_source() ssl_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: ssl_credentials = SslCredentials().ssl_credentials # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" if credentials is None: credentials, _ = auth.default(scopes=self.AUTH_SCOPES) # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, ssl_credentials=ssl_channel_credentials, scopes=self.AUTH_SCOPES, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._stubs = {} # type: Dict[str, Callable] # Run the base constructor. super().__init__( host=host, credentials=credentials, client_info=client_info, )
[ "def", "__init__", "(", "self", ",", "*", ",", "host", ":", "str", "=", "\"googleads.googleapis.com\"", ",", "credentials", ":", "credentials", ".", "Credentials", "=", "None", ",", "credentials_file", ":", "str", "=", "None", ",", "scopes", ":", "Sequence",...
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v7/services/services/managed_placement_view_service/transports/grpc.py#L49-L177
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py
python
ParseBaseException._from_exception
(cls, pe)
return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses
internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses
[ "internal", "factory", "method", "to", "simplify", "creating", "one", "type", "of", "ParseException", "from", "another", "-", "avoids", "having", "__init__", "signature", "conflicts", "among", "subclasses" ]
def _from_exception(cls, pe): """ internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses """ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
[ "def", "_from_exception", "(", "cls", ",", "pe", ")", ":", "return", "cls", "(", "pe", ".", "pstr", ",", "pe", ".", "loc", ",", "pe", ".", "msg", ",", "pe", ".", "parserElement", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py#L221-L226
Miserlou/Zappa
5a11c17f5ecf0568bdb73b4baf6fb08ff0184f39
zappa/core.py
python
Zappa.get_credentials_arn
(self)
return role, self.credentials_arn
Given our role name, get and set the credentials_arn.
Given our role name, get and set the credentials_arn.
[ "Given", "our", "role", "name", "get", "and", "set", "the", "credentials_arn", "." ]
def get_credentials_arn(self): """ Given our role name, get and set the credentials_arn. """ role = self.iam.Role(self.role_name) self.credentials_arn = role.arn return role, self.credentials_arn
[ "def", "get_credentials_arn", "(", "self", ")", ":", "role", "=", "self", ".", "iam", ".", "Role", "(", "self", ".", "role_name", ")", "self", ".", "credentials_arn", "=", "role", ".", "arn", "return", "role", ",", "self", ".", "credentials_arn" ]
https://github.com/Miserlou/Zappa/blob/5a11c17f5ecf0568bdb73b4baf6fb08ff0184f39/zappa/core.py#L2531-L2538
francisck/DanderSpritz_docs
86bb7caca5a957147f120b18bb5c31f299914904
Python/Core/Lib/inspect.py
python
walktree
(classes, children, parent)
return results
Recursive helper function for getclasstree().
Recursive helper function for getclasstree().
[ "Recursive", "helper", "function", "for", "getclasstree", "()", "." ]
def walktree(classes, children, parent): """Recursive helper function for getclasstree().""" results = [] classes.sort(key=attrgetter('__module__', '__name__')) for c in classes: results.append((c, c.__bases__)) if c in children: results.append(walktree(children[c], children, c)) return results
[ "def", "walktree", "(", "classes", ",", "children", ",", "parent", ")", ":", "results", "=", "[", "]", "classes", ".", "sort", "(", "key", "=", "attrgetter", "(", "'__module__'", ",", "'__name__'", ")", ")", "for", "c", "in", "classes", ":", "results",...
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/inspect.py#L711-L720
NVlabs/condensa
ff2fd0f9d997ce36b574f4c9bed2bb7cffba835d
examples/cifar/models/resnet.py
python
ResNet.forward
(self, x)
return x
[]
def forward(self, x): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x
[ "def", "forward", "(", "self", ",", "x", ")", ":", "x", "=", "self", ".", "conv1", "(", "x", ")", "x", "=", "self", ".", "bn1", "(", "x", ")", "x", "=", "self", ".", "relu", "(", "x", ")", "x", "=", "self", ".", "layer1", "(", "x", ")", ...
https://github.com/NVlabs/condensa/blob/ff2fd0f9d997ce36b574f4c9bed2bb7cffba835d/examples/cifar/models/resnet.py#L149-L162
WikidPad/WikidPad
558109638807bc76b4672922686e416ab2d5f79c
WikidPad/lib/pwiki/DocPages.py
python
WikiPage.pseudoDeletePage
(self)
Delete a page which doesn't really exist. Just sends an appropriate event.
Delete a page which doesn't really exist. Just sends an appropriate event.
[ "Delete", "a", "page", "which", "doesn", "t", "really", "exist", ".", "Just", "sends", "an", "appropriate", "event", "." ]
def pseudoDeletePage(self): """ Delete a page which doesn't really exist. Just sends an appropriate event. """ wx.CallAfter(self.fireMiscEventKeys, ("pseudo-deleted page", "pseudo-deleted wiki page"))
[ "def", "pseudoDeletePage", "(", "self", ")", ":", "wx", ".", "CallAfter", "(", "self", ".", "fireMiscEventKeys", ",", "(", "\"pseudo-deleted page\"", ",", "\"pseudo-deleted wiki page\"", ")", ")" ]
https://github.com/WikidPad/WikidPad/blob/558109638807bc76b4672922686e416ab2d5f79c/WikidPad/lib/pwiki/DocPages.py#L1440-L1446
jobovy/galpy
8e6a230bbe24ce16938db10053f92eb17fe4bb52
galpy/potential/TwoPowerSphericalPotential.py
python
HernquistPotential._Rforce
(self,R,z,phi=0.,t=0.)
return -R/self.a/sqrtRz/(1.+sqrtRz/self.a)**2./2./self.a
NAME: _Rforce PURPOSE: evaluate the radial force for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t- time OUTPUT: the radial force HISTORY: 2010-07-09 - Written - Bovy (NYU)
NAME: _Rforce PURPOSE: evaluate the radial force for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t- time OUTPUT: the radial force HISTORY: 2010-07-09 - Written - Bovy (NYU)
[ "NAME", ":", "_Rforce", "PURPOSE", ":", "evaluate", "the", "radial", "force", "for", "this", "potential", "INPUT", ":", "R", "-", "Galactocentric", "cylindrical", "radius", "z", "-", "vertical", "height", "phi", "-", "azimuth", "t", "-", "time", "OUTPUT", ...
def _Rforce(self,R,z,phi=0.,t=0.): """ NAME: _Rforce PURPOSE: evaluate the radial force for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t- time OUTPUT: the radial force HISTORY: 2010-07-09 - Written - Bovy (NYU) """ sqrtRz= numpy.sqrt(R**2.+z**2.) return -R/self.a/sqrtRz/(1.+sqrtRz/self.a)**2./2./self.a
[ "def", "_Rforce", "(", "self", ",", "R", ",", "z", ",", "phi", "=", "0.", ",", "t", "=", "0.", ")", ":", "sqrtRz", "=", "numpy", ".", "sqrt", "(", "R", "**", "2.", "+", "z", "**", "2.", ")", "return", "-", "R", "/", "self", ".", "a", "/",...
https://github.com/jobovy/galpy/blob/8e6a230bbe24ce16938db10053f92eb17fe4bb52/galpy/potential/TwoPowerSphericalPotential.py#L868-L885
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/shutil.py
python
register_unpack_format
(name, extensions, function, extra_args=None, description='')
Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function.
Registers an unpack format.
[ "Registers", "an", "unpack", "format", "." ]
def register_unpack_format(name, extensions, function, extra_args=None, description=''): """Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """ if extra_args is None: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = extensions, function, extra_args, description
[ "def", "register_unpack_format", "(", "name", ",", "extensions", ",", "function", ",", "extra_args", "=", "None", ",", "description", "=", "''", ")", ":", "if", "extra_args", "is", "None", ":", "extra_args", "=", "[", "]", "_check_unpack_options", "(", "exte...
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/shutil.py#L837-L857
lad1337/XDM
0c1b7009fe00f06f102a6f67c793478f515e7efe
site-packages/pylint/checkers/raw_metrics.py
python
RawMetricsChecker.open
(self)
init statistics
init statistics
[ "init", "statistics" ]
def open(self): """init statistics""" self.stats = self.linter.add_stats(total_lines=0, code_lines=0, empty_lines=0, docstring_lines=0, comment_lines=0)
[ "def", "open", "(", "self", ")", ":", "self", ".", "stats", "=", "self", ".", "linter", ".", "add_stats", "(", "total_lines", "=", "0", ",", "code_lines", "=", "0", ",", "empty_lines", "=", "0", ",", "docstring_lines", "=", "0", ",", "comment_lines", ...
https://github.com/lad1337/XDM/blob/0c1b7009fe00f06f102a6f67c793478f515e7efe/site-packages/pylint/checkers/raw_metrics.py#L77-L81
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/Python3/types.py
python
new_class
(name, bases=(), kwds=None, exec_body=None)
return meta(name, bases, ns, **kwds)
Create a class object dynamically using the appropriate metaclass.
Create a class object dynamically using the appropriate metaclass.
[ "Create", "a", "class", "object", "dynamically", "using", "the", "appropriate", "metaclass", "." ]
def new_class(name, bases=(), kwds=None, exec_body=None): """Create a class object dynamically using the appropriate metaclass.""" meta, ns, kwds = prepare_class(name, bases, kwds) if exec_body is not None: exec_body(ns) return meta(name, bases, ns, **kwds)
[ "def", "new_class", "(", "name", ",", "bases", "=", "(", ")", ",", "kwds", "=", "None", ",", "exec_body", "=", "None", ")", ":", "meta", ",", "ns", ",", "kwds", "=", "prepare_class", "(", "name", ",", "bases", ",", "kwds", ")", "if", "exec_body", ...
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/Python3/types.py#L57-L62
intel/virtual-storage-manager
00706ab9701acbd0d5e04b19cc80c6b66a2973b8
source/vsm/vsm/scheduler/manager.py
python
SchedulerManager.create_cluster
(self, context, server_list)
return {'message':'res'}
Add the servers into ceph cluster. It's notable that, the type of body['servers'] looks as below: [ {u'is_storage': True, u'is_monitor': True, u'is_mds': True, u'is_rgw': True, u'id': u'1', u'zone_id': u'1'}, {u'is_storage': True, u'is_monitor': False, u'is_mds': False, u'is_rgw': False, u'id': u'2', u'zone_id': u'2'} ] Here we also need to fetch info from DB.
Add the servers into ceph cluster.
[ "Add", "the", "servers", "into", "ceph", "cluster", "." ]
def create_cluster(self, context, server_list): """Add the servers into ceph cluster. It's notable that, the type of body['servers'] looks as below: [ {u'is_storage': True, u'is_monitor': True, u'is_mds': True, u'is_rgw': True, u'id': u'1', u'zone_id': u'1'}, {u'is_storage': True, u'is_monitor': False, u'is_mds': False, u'is_rgw': False, u'id': u'2', u'zone_id': u'2'} ] Here we also need to fetch info from DB. """ # Add hostname here. for ser in server_list: ser_ref = db.init_node_get(context, ser['id']) ser['host'] = ser_ref['host'] def _update(status): LOG.debug('status = %s' % status) self._update_server_list_status(context, server_list, status) if status.lower().find('error') != -1: raise # Set at least 3 mons when creating cluster pool_default_size = db.vsm_settings_get_by_name(context,'osd_pool_default_size') pool_default_size = int(pool_default_size.value) nums = len(server_list) mds_node = None rgw_node = [] if nums >= pool_default_size: count = 0 rest_mon_num = 0 for ser in server_list: if ser['is_monitor'] == True: count += 1 if ser['is_mds'] == True: mds_node = ser if ser['is_rgw'] == True: rgw_node.append(ser) if count < pool_default_size: rest_mon_num = pool_default_size - count if rest_mon_num > 0: for ser in server_list: if ser['is_monitor'] == False: ser['is_monitor'] = True rest_mon_num -= 1 if rest_mon_num <= 0: break # Use mkcephfs to set up ceph system. LOG.info('server_list = %s' % server_list) monitor_node = self._select_monitor(context, server_list) LOG.info('Choose monitor node = %s' % monitor_node) # Clean ceph data. def __clean_data(host): self._agent_rpcapi.update_ssh_keys(context, host) self._agent_rpcapi.clean_ceph_data(context, host) def __create_crushmap(context, server_list, host): self._agent_rpcapi.create_crushmap(context, server_list=server_list, host=host) try: _update("Cleaning") thd_list = [] for ser in server_list: thd = utils.MultiThread(__clean_data, host=ser['host']) thd_list.append(thd) utils.start_threads(thd_list) _update("Clean success") except: _update("ERROR: Cleaning") # When clean data, we also begin to create ceph.conf # and init osd in db. # Do not run with the same time as clean_data. # It maybe cleaned by clean_data. try: _update("Create ceph.conf") manifest_json = ManifestParser(FLAGS.cluster_manifest, False).format_to_json() ceph_conf_in_cluster_manifest = manifest_json['ceph_conf'] LOG.info('ceph_conf_in_cluster_manifest==scheduler===%s'%ceph_conf_in_cluster_manifest) self._agent_rpcapi.inital_ceph_osd_db_conf(context, server_list=server_list, ceph_conf_in_cluster_manifest=ceph_conf_in_cluster_manifest, host=monitor_node['host']) _update("Create ceph.conf success") except: _update("ERROR: ceph.conf") try: _update("create crushmap") # Then begin to create crush map file. create_crushmap = utils.MultiThread(__create_crushmap, context=context, server_list=server_list, host=monitor_node['host']) create_crushmap.start() except: _update("ERROR: crushmap") try: # Begin to mount disks on the mount_point. _update("Mount disks") def __mount_disk(host): self._agent_rpcapi.mount_disks(context, host) thd_list = [] for ser in server_list: thd = utils.MultiThread(__mount_disk, host=ser['host']) thd_list.append(thd) utils.start_threads(thd_list) _update("Mount disks success") except: _update("ERROR: mount disk") # Generate monitor keyring file. try: _update("start montior") monitor_keyring = utils.gen_mon_keyring() def __write_monitor_keyring(host): self._agent_rpcapi.write_monitor_keyring(context, monitor_keyring, host) thd_list = [] for ser in server_list: thd = utils.MultiThread(__write_monitor_keyring, host=ser['host']) thd_list.append(thd) utils.start_threads(thd_list) _update("start monitor success") except: _update("ERROR: start monitor") try: _update("Create keyring") self._track_monitors(context, server_list) # Here we use our self-define dir for ceph-monitor services. # So we need to create the key ring by command. self._agent_rpcapi.create_keyring(context, host=monitor_node['host']) self._agent_rpcapi.upload_keyring_admin_into_db(context, host=monitor_node['host']) def _update_keyring_from_db(host): self._agent_rpcapi.update_keyring_admin_from_db(context, host=host) thd_list = [] for ser in server_list: thd = utils.MultiThread(_update_keyring_from_db, host=ser['host']) thd_list.append(thd) utils.start_threads(thd_list) _update("Success: keyring") except: _update("ERROR: keyring") try: self._agent_rpcapi.prepare_osds(context, server_list, host=monitor_node['host']) # Begin to start osd service. _update('Start osds') def __start_osd(host): self._agent_rpcapi.start_osd(context, host) thd_list = [] for ser in server_list: thd = utils.MultiThread(__start_osd, host=ser['host']) thd_list.append(thd) utils.start_threads(thd_list) _update('OSD success') except: _update("ERROR: start osd") # add mds service if mds_node: try: _update("Start mds") LOG.info('start mds services, host = %s' % mds_node['host']) self._agent_rpcapi.add_mds(context, host=mds_node['host']) except: _update("ERROR: mds") # Created begin to get ceph status try: _update('Ceph status') stat = self._agent_rpcapi.get_ceph_health(context, monitor_node['host']) except: _update('ERROR: ceph -s') if stat == False: self._update_server_list_status(context, server_list, "Ceph Start Error") LOG.error('Ceph starting failed!') raise try: _update('Set crushmap') # Wait until it's created over. while create_crushmap.is_alive(): time.sleep(1) def __set_crushmap(context, host): self._agent_rpcapi.set_crushmap(context, host) set_crushmap = utils.MultiThread(__set_crushmap, context=context, host=monitor_node['host']) set_crushmap.start() except: _update('ERROR: set crushmap') # Add RGW Instance(s) # One is simple configuration, another is federated configuration. # TODO Something Hardcode if Add RGW Instance(s) when created cluster. # Simple configuration: # Federated configuration: if len(rgw_node) == 1: try: _update("Creating Simple RGW") LOG.info("Start creating rgw instance, host = %s" % rgw_node[0]['host']) self._agent_rpcapi.rgw_create(context, name="radosgw.gateway", host=rgw_node[0]['host'], keyring="/etc/ceph/keyring.radosgw.gateway", log_file="/var/log/ceph/radosgw.gateway.log", rgw_frontends="civetweb port=80", is_ssl=False, s3_user_uid="johndoe", s3_user_display_name="John Doe", s3_user_email="john@example.com", swift_user_subuser="johndoe:swift", swift_user_access="full", swift_user_key_type="swift") except: _update("ERROR: rgw") elif len(rgw_node) > 1: try: _update("Creating Multiple RGWs") hosts = [] for rgw in rgw_node: LOG.info("Start creating rgw instance, host = %s" % rgw['host']) hosts.append(rgw['host']) self._agent_rpcapi.rgw_create(context, name="", host=rgw_node[0]['host'], keyring="", log_file="", rgw_frontends="civetweb port=80", is_ssl=False, s3_user_uid="", s3_user_display_name="", s3_user_email="", swift_user_subuser="", swift_user_access="", swift_user_key_type="", multiple_hosts=hosts) except: _update("ERROR: rgw") _update('Active') self._update_init_node(context, server_list) while set_crushmap.is_alive(): time.sleep(1) self._agent_rpcapi.update_all_status(context, host=monitor_node['host']) self._agent_rpcapi.update_zones_from_crushmap_to_db(context,None, monitor_node['host']) self._agent_rpcapi.update_storage_groups_from_crushmap_to_db(context,None, monitor_node['host']) self._judge_drive_ext_threshold(context) self._update_drive_ext_threshold(context) return {'message':'res'}
[ "def", "create_cluster", "(", "self", ",", "context", ",", "server_list", ")", ":", "# Add hostname here.", "for", "ser", "in", "server_list", ":", "ser_ref", "=", "db", ".", "init_node_get", "(", "context", ",", "ser", "[", "'id'", "]", ")", "ser", "[", ...
https://github.com/intel/virtual-storage-manager/blob/00706ab9701acbd0d5e04b19cc80c6b66a2973b8/source/vsm/vsm/scheduler/manager.py#L1487-L1782
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/bdb.py
python
Bdb.get_file_breaks
(self, filename)
Return all lines with breakpoints for filename. If no breakpoints are set, return an empty list.
Return all lines with breakpoints for filename.
[ "Return", "all", "lines", "with", "breakpoints", "for", "filename", "." ]
def get_file_breaks(self, filename): """Return all lines with breakpoints for filename. If no breakpoints are set, return an empty list. """ filename = self.canonic(filename) if filename in self.breaks: return self.breaks[filename] else: return []
[ "def", "get_file_breaks", "(", "self", ",", "filename", ")", ":", "filename", "=", "self", ".", "canonic", "(", "filename", ")", "if", "filename", "in", "self", ".", "breaks", ":", "return", "self", ".", "breaks", "[", "filename", "]", "else", ":", "re...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/bdb.py#L491-L500
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/xml/sax/xmlreader.py
python
AttributesImpl.get
(self, name, alternative=None)
return self._attrs.get(name, alternative)
[]
def get(self, name, alternative=None): return self._attrs.get(name, alternative)
[ "def", "get", "(", "self", ",", "name", ",", "alternative", "=", "None", ")", ":", "return", "self", ".", "_attrs", ".", "get", "(", "name", ",", "alternative", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/xml/sax/xmlreader.py#L326-L327
playframework/play1
0ecac3bc2421ae2dbec27a368bf671eda1c9cba5
python/Lib/cookielib.py
python
split_header_words
(header_values)
return result
r"""Parse header values into a list of lists containing key,value pairs. The function knows how to deal with ",", ";" and "=" as well as quoted values after "=". A list of space separated tokens are parsed as if they were separated by ";". If the header_values passed as argument contains multiple values, then they are treated as if they were a single value separated by comma ",". This means that this function is useful for parsing header fields that follow this syntax (BNF as from the HTTP/1.1 specification, but we relax the requirement for tokens). headers = #header header = (token | parameter) *( [";"] (token | parameter)) token = 1*<any CHAR except CTLs or separators> separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) qdtext = <any TEXT except <">> quoted-pair = "\" CHAR parameter = attribute "=" value attribute = token value = token | quoted-string Each header is represented by a list of key/value pairs. The value for a simple token (not part of a parameter) is None. Syntactically incorrect headers will not necessarily be parsed as you would want. This is easier to describe with some examples: >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] >>> split_header_words(['text/html; charset="iso-8859-1"']) [[('text/html', None), ('charset', 'iso-8859-1')]] >>> split_header_words([r'Basic realm="\"foo\bar\""']) [[('Basic', None), ('realm', '"foobar"')]]
r"""Parse header values into a list of lists containing key,value pairs.
[ "r", "Parse", "header", "values", "into", "a", "list", "of", "lists", "containing", "key", "value", "pairs", "." ]
def split_header_words(header_values): r"""Parse header values into a list of lists containing key,value pairs. The function knows how to deal with ",", ";" and "=" as well as quoted values after "=". A list of space separated tokens are parsed as if they were separated by ";". If the header_values passed as argument contains multiple values, then they are treated as if they were a single value separated by comma ",". This means that this function is useful for parsing header fields that follow this syntax (BNF as from the HTTP/1.1 specification, but we relax the requirement for tokens). headers = #header header = (token | parameter) *( [";"] (token | parameter)) token = 1*<any CHAR except CTLs or separators> separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) qdtext = <any TEXT except <">> quoted-pair = "\" CHAR parameter = attribute "=" value attribute = token value = token | quoted-string Each header is represented by a list of key/value pairs. The value for a simple token (not part of a parameter) is None. Syntactically incorrect headers will not necessarily be parsed as you would want. This is easier to describe with some examples: >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] >>> split_header_words(['text/html; charset="iso-8859-1"']) [[('text/html', None), ('charset', 'iso-8859-1')]] >>> split_header_words([r'Basic realm="\"foo\bar\""']) [[('Basic', None), ('realm', '"foobar"')]] """ assert not isinstance(header_values, basestring) result = [] for text in header_values: orig_text = text pairs = [] while text: m = HEADER_TOKEN_RE.search(text) if m: text = unmatched(m) name = m.group(1) m = HEADER_QUOTED_VALUE_RE.search(text) if m: # quoted value text = unmatched(m) value = m.group(1) value = HEADER_ESCAPE_RE.sub(r"\1", value) else: m = HEADER_VALUE_RE.search(text) if m: # unquoted value text = unmatched(m) value = m.group(1) value = value.rstrip() else: # no value, a lone token value = None pairs.append((name, value)) elif text.lstrip().startswith(","): # concatenated headers, as per RFC 2616 section 4.2 text = text.lstrip()[1:] if pairs: result.append(pairs) pairs = [] else: # skip junk non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text) assert nr_junk_chars > 0, ( "split_header_words bug: '%s', '%s', %s" % (orig_text, text, pairs)) text = non_junk if pairs: result.append(pairs) return result
[ "def", "split_header_words", "(", "header_values", ")", ":", "assert", "not", "isinstance", "(", "header_values", ",", "basestring", ")", "result", "=", "[", "]", "for", "text", "in", "header_values", ":", "orig_text", "=", "text", "pairs", "=", "[", "]", ...
https://github.com/playframework/play1/blob/0ecac3bc2421ae2dbec27a368bf671eda1c9cba5/python/Lib/cookielib.py#L326-L409
numba/numba
bf480b9e0da858a65508c2b17759a72ee6a44c51
numba/core/types/npytypes.py
python
NumpyNdIterType.need_shaped_indexing
(self)
return False
Whether iterating on this iterator requires keeping track of individual indices inside the shape. If False, only a single index over the equivalent flat shape is required, which can make the iterator more efficient.
Whether iterating on this iterator requires keeping track of individual indices inside the shape. If False, only a single index over the equivalent flat shape is required, which can make the iterator more efficient.
[ "Whether", "iterating", "on", "this", "iterator", "requires", "keeping", "track", "of", "individual", "indices", "inside", "the", "shape", ".", "If", "False", "only", "a", "single", "index", "over", "the", "equivalent", "flat", "shape", "is", "required", "whic...
def need_shaped_indexing(self): """ Whether iterating on this iterator requires keeping track of individual indices inside the shape. If False, only a single index over the equivalent flat shape is required, which can make the iterator more efficient. """ for kind, start_dim, end_dim, _ in self.indexers: if kind in ('0d', 'scalar'): pass elif kind == 'flat': if (start_dim, end_dim) != (0, self.ndim): # Broadcast flat iteration needs shaped indexing # to know when to restart iteration. return True else: return True return False
[ "def", "need_shaped_indexing", "(", "self", ")", ":", "for", "kind", ",", "start_dim", ",", "end_dim", ",", "_", "in", "self", ".", "indexers", ":", "if", "kind", "in", "(", "'0d'", ",", "'scalar'", ")", ":", "pass", "elif", "kind", "==", "'flat'", "...
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/core/types/npytypes.py#L381-L398
opendevops-cn/codo-cmdb
334fba324512841d84535f31a094717eb5a40acf
libs/server/push_system_user.py
python
PushSystemUser.run
(self, module_name="shell", module_args='', hosts='', remote_user="root", timeout=10, forks=10)
return result
Ansible运行函数
Ansible运行函数
[ "Ansible运行函数" ]
def run(self, module_name="shell", module_args='', hosts='', remote_user="root", timeout=10, forks=10): '''Ansible运行函数''' runner = Runner( module_name=module_name, module_args=module_args, remote_user=remote_user, pattern="all", hosts=hosts, forks=forks, timeout=timeout, ) result = runner.run() return result
[ "def", "run", "(", "self", ",", "module_name", "=", "\"shell\"", ",", "module_args", "=", "''", ",", "hosts", "=", "''", ",", "remote_user", "=", "\"root\"", ",", "timeout", "=", "10", ",", "forks", "=", "10", ")", ":", "runner", "=", "Runner", "(", ...
https://github.com/opendevops-cn/codo-cmdb/blob/334fba324512841d84535f31a094717eb5a40acf/libs/server/push_system_user.py#L24-L37
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/email/header.py
python
_Accumulator.pop_from
(self, i=0)
return popped
[]
def pop_from(self, i=0): popped = self[i:] self[i:] = [] return popped
[ "def", "pop_from", "(", "self", ",", "i", "=", "0", ")", ":", "popped", "=", "self", "[", "i", ":", "]", "self", "[", "i", ":", "]", "=", "[", "]", "return", "popped" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/email/header.py#L550-L553
F8LEFT/DecLLVM
d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c
python/idaapi.py
python
is_uname
(*args)
return _idaapi.is_uname(*args)
is_uname(name) -> bool
is_uname(name) -> bool
[ "is_uname", "(", "name", ")", "-", ">", "bool" ]
def is_uname(*args): """ is_uname(name) -> bool """ return _idaapi.is_uname(*args)
[ "def", "is_uname", "(", "*", "args", ")", ":", "return", "_idaapi", ".", "is_uname", "(", "*", "args", ")" ]
https://github.com/F8LEFT/DecLLVM/blob/d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c/python/idaapi.py#L47217-L47221
quantmind/pulsar
fee44e871954aa6ca36d00bb5a3739abfdb89b26
pulsar/utils/config.py
python
Config.copy_globals
(self, cfg)
Copy global settings from ``cfg`` to this config. The settings are copied only if they were not already modified.
Copy global settings from ``cfg`` to this config.
[ "Copy", "global", "settings", "from", "cfg", "to", "this", "config", "." ]
def copy_globals(self, cfg): """Copy global settings from ``cfg`` to this config. The settings are copied only if they were not already modified. """ for name, setting in cfg.settings.items(): csetting = self.settings.get(name) if (setting.is_global and csetting is not None and not csetting.modified): csetting.set(setting.get())
[ "def", "copy_globals", "(", "self", ",", "cfg", ")", ":", "for", "name", ",", "setting", "in", "cfg", ".", "settings", ".", "items", "(", ")", ":", "csetting", "=", "self", ".", "settings", ".", "get", "(", "name", ")", "if", "(", "setting", ".", ...
https://github.com/quantmind/pulsar/blob/fee44e871954aa6ca36d00bb5a3739abfdb89b26/pulsar/utils/config.py#L197-L206
matsui528/nanopq
4c1d724494a71f9736b15928a8c03b0ba13ffd19
nanopq/pq.py
python
PQ.encode
(self, vecs)
return codes
Encode input vectors into PQ-codes. Args: vecs (np.ndarray): Input vectors with shape=(N, D) and dtype=np.float32. Returns: np.ndarray: PQ codes with shape=(N, M) and dtype=self.code_dtype
Encode input vectors into PQ-codes.
[ "Encode", "input", "vectors", "into", "PQ", "-", "codes", "." ]
def encode(self, vecs): """Encode input vectors into PQ-codes. Args: vecs (np.ndarray): Input vectors with shape=(N, D) and dtype=np.float32. Returns: np.ndarray: PQ codes with shape=(N, M) and dtype=self.code_dtype """ assert vecs.dtype == np.float32 assert vecs.ndim == 2 N, D = vecs.shape assert D == self.Ds * self.M, "input dimension must be Ds * M" # codes[n][m] : code of n-th vec, m-th subspace codes = np.empty((N, self.M), dtype=self.code_dtype) for m in range(self.M): if self.verbose: print("Encoding the subspace: {} / {}".format(m, self.M)) vecs_sub = vecs[:, m * self.Ds : (m + 1) * self.Ds] codes[:, m], _ = vq(vecs_sub, self.codewords[m]) return codes
[ "def", "encode", "(", "self", ",", "vecs", ")", ":", "assert", "vecs", ".", "dtype", "==", "np", ".", "float32", "assert", "vecs", ".", "ndim", "==", "2", "N", ",", "D", "=", "vecs", ".", "shape", "assert", "D", "==", "self", ".", "Ds", "*", "s...
https://github.com/matsui528/nanopq/blob/4c1d724494a71f9736b15928a8c03b0ba13ffd19/nanopq/pq.py#L96-L119
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/trunking/v1/trunk/phone_number.py
python
PhoneNumberList.page
(self, page_token=values.unset, page_number=values.unset, page_size=values.unset)
return PhoneNumberPage(self._version, response, self._solution)
Retrieve a single page of PhoneNumberInstance records from the API. Request is executed immediately :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of PhoneNumberInstance :rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage
Retrieve a single page of PhoneNumberInstance records from the API. Request is executed immediately
[ "Retrieve", "a", "single", "page", "of", "PhoneNumberInstance", "records", "from", "the", "API", ".", "Request", "is", "executed", "immediately" ]
def page(self, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of PhoneNumberInstance records from the API. Request is executed immediately :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of PhoneNumberInstance :rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberPage """ data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) response = self._version.page(method='GET', uri=self._uri, params=data, ) return PhoneNumberPage(self._version, response, self._solution)
[ "def", "page", "(", "self", ",", "page_token", "=", "values", ".", "unset", ",", "page_number", "=", "values", ".", "unset", ",", "page_size", "=", "values", ".", "unset", ")", ":", "data", "=", "values", ".", "of", "(", "{", "'PageToken'", ":", "pag...
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/trunking/v1/trunk/phone_number.py#L91-L108
vcheckzen/FODI
3bb23644938a33c3fdfb9611a622e35ed4ce6532
back-end-py/main/3rd/PIL/ImageChops.py
python
overlay
(image1, image2)
return image1._new(image1.im.chop_overlay(image2.im))
Superimposes two images on top of each other using the Overlay algorithm :rtype: :py:class:`~PIL.Image.Image`
Superimposes two images on top of each other using the Overlay algorithm
[ "Superimposes", "two", "images", "on", "top", "of", "each", "other", "using", "the", "Overlay", "algorithm" ]
def overlay(image1, image2): """ Superimposes two images on top of each other using the Overlay algorithm :rtype: :py:class:`~PIL.Image.Image` """ image1.load() image2.load() return image1._new(image1.im.chop_overlay(image2.im))
[ "def", "overlay", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_overlay", "(", "image2", ".", "im", ")", ")" ]
https://github.com/vcheckzen/FODI/blob/3bb23644938a33c3fdfb9611a622e35ed4ce6532/back-end-py/main/3rd/PIL/ImageChops.py#L166-L175
kanzure/nanoengineer
874e4c9f8a9190f093625b267f9767e19f82e6c4
cad/src/ne1_ui/ToolTipText_for_CommandToolbars.py
python
toolTipTextForAtomsCommandToolbar
(commandToolbar)
return
"ToolTip" text for widgets in the Build Atoms Command Toolbar. @note: This is a placeholder function. Currenly, all the tooltip text is defined in BuildAtoms_Command.py.
"ToolTip" text for widgets in the Build Atoms Command Toolbar.
[ "ToolTip", "text", "for", "widgets", "in", "the", "Build", "Atoms", "Command", "Toolbar", "." ]
def toolTipTextForAtomsCommandToolbar(commandToolbar): """ "ToolTip" text for widgets in the Build Atoms Command Toolbar. @note: This is a placeholder function. Currenly, all the tooltip text is defined in BuildAtoms_Command.py. """ return
[ "def", "toolTipTextForAtomsCommandToolbar", "(", "commandToolbar", ")", ":", "return" ]
https://github.com/kanzure/nanoengineer/blob/874e4c9f8a9190f093625b267f9767e19f82e6c4/cad/src/ne1_ui/ToolTipText_for_CommandToolbars.py#L17-L24
hardbyte/python-can
e7a2b040ee1f0cdd7fd77fbfef0454353166b333
can/interfaces/etas/__init__.py
python
EtasBus.__init__
( self, channel: str, can_filters: Optional[can.typechecking.CanFilters] = None, receive_own_messages: bool = False, bitrate: int = 1000000, fd: bool = True, data_bitrate: int = 2000000, **kwargs: object, )
[]
def __init__( self, channel: str, can_filters: Optional[can.typechecking.CanFilters] = None, receive_own_messages: bool = False, bitrate: int = 1000000, fd: bool = True, data_bitrate: int = 2000000, **kwargs: object, ): self.receive_own_messages = receive_own_messages nodeRange = CSI_NodeRange(CSI_NODE_MIN, CSI_NODE_MAX) self.tree = ctypes.POINTER(CSI_Tree)() CSI_CreateProtocolTree(ctypes.c_char_p(b""), nodeRange, ctypes.byref(self.tree)) oci_can_v = BOA_Version(1, 4, 0, 0) self.ctrl = OCI_ControllerHandle() OCI_CreateCANControllerNoSearch( channel.encode(), ctypes.byref(oci_can_v), self.tree, ctypes.byref(self.ctrl), ) ctrlConf = OCI_CANConfiguration() ctrlConf.baudrate = bitrate ctrlConf.samplePoint = 80 ctrlConf.samplesPerBit = OCI_CAN_THREE_SAMPLES_PER_BIT ctrlConf.BTL_Cycles = 10 ctrlConf.SJW = 1 ctrlConf.syncEdge = OCI_CAN_SINGLE_SYNC_EDGE ctrlConf.physicalMedia = OCI_CAN_MEDIA_HIGH_SPEED if receive_own_messages: ctrlConf.selfReceptionMode = OCI_SELF_RECEPTION_ON else: ctrlConf.selfReceptionMode = OCI_SELF_RECEPTION_OFF ctrlConf.busParticipationMode = OCI_BUSMODE_ACTIVE if fd: ctrlConf.canFDEnabled = True ctrlConf.canFDConfig.dataBitRate = data_bitrate ctrlConf.canFDConfig.dataBTL_Cycles = 10 ctrlConf.canFDConfig.dataSamplePoint = 80 ctrlConf.canFDConfig.dataSJW = 1 ctrlConf.canFDConfig.flags = 0 ctrlConf.canFDConfig.canFdTxConfig = OCI_CANFDTX_USE_CAN_AND_CANFD_FRAMES ctrlConf.canFDConfig.canFdRxConfig.canRxMode = ( OCI_CAN_RXMODE_CAN_FRAMES_USING_CAN_MESSAGE ) ctrlConf.canFDConfig.canFdRxConfig.canFdRxMode = ( OCI_CANFDRXMODE_CANFD_FRAMES_USING_CANFD_MESSAGE ) ctrlProp = OCI_CANControllerProperties() ctrlProp.mode = OCI_CONTROLLER_MODE_RUNNING ec = OCI_OpenCANController( self.ctrl, ctypes.byref(ctrlConf), ctypes.byref(ctrlProp) ) if ec != 0x0 and ec != 0x40004000: # accept BOA_WARN_PARAM_ADAPTED raise CanInitializationError( f"OCI_OpenCANController failed with error 0x{ec:X}" ) # RX rxQConf = OCI_CANRxQueueConfiguration() rxQConf.onFrame.function = ctypes.cast(None, OCI_CANRxCallbackFunctionSingleMsg) rxQConf.onFrame.userData = None rxQConf.onEvent.function = ctypes.cast(None, OCI_CANRxCallbackFunctionSingleMsg) rxQConf.onEvent.userData = None if receive_own_messages: rxQConf.selfReceptionMode = OCI_SELF_RECEPTION_ON else: rxQConf.selfReceptionMode = OCI_SELF_RECEPTION_OFF self.rxQueue = OCI_QueueHandle() OCI_CreateCANRxQueue( self.ctrl, ctypes.byref(rxQConf), ctypes.byref(self.rxQueue) ) self._oci_filters = None self.filters = can_filters # TX txQConf = OCI_CANTxQueueConfiguration() txQConf.reserved = 0 self.txQueue = OCI_QueueHandle() OCI_CreateCANTxQueue( self.ctrl, ctypes.byref(txQConf), ctypes.byref(self.txQueue) ) # Common timerCapabilities = OCI_TimerCapabilities() OCI_GetTimerCapabilities(self.ctrl, ctypes.byref(timerCapabilities)) self.tickFrequency = timerCapabilities.tickFrequency # clock ticks per second # all timestamps are hardware timestamps relative to the CAN device powerup # calculate an offset to make them relative to epoch now = OCI_Time() OCI_GetTimerValue(self.ctrl, ctypes.byref(now)) self.timeOffset = time.time() - (float(now.value) / self.tickFrequency) self.channel_info = channel
[ "def", "__init__", "(", "self", ",", "channel", ":", "str", ",", "can_filters", ":", "Optional", "[", "can", ".", "typechecking", ".", "CanFilters", "]", "=", "None", ",", "receive_own_messages", ":", "bool", "=", "False", ",", "bitrate", ":", "int", "="...
https://github.com/hardbyte/python-can/blob/e7a2b040ee1f0cdd7fd77fbfef0454353166b333/can/interfaces/etas/__init__.py#L11-L117
shuup/shuup
25f78cfe370109b9885b903e503faac295c7b7f2
shuup/xtheme/_theme.py
python
Theme.settings_obj
(self)
return self._theme_settings
Get a saved settings model for this theme. :rtype: shuup.xtheme.models.ThemeSettings
Get a saved settings model for this theme.
[ "Get", "a", "saved", "settings", "model", "for", "this", "theme", "." ]
def settings_obj(self): """ Get a saved settings model for this theme. :rtype: shuup.xtheme.models.ThemeSettings """ return self._theme_settings
[ "def", "settings_obj", "(", "self", ")", ":", "return", "self", ".", "_theme_settings" ]
https://github.com/shuup/shuup/blob/25f78cfe370109b9885b903e503faac295c7b7f2/shuup/xtheme/_theme.py#L145-L151
ni/nidaqmx-python
62fc6b48cbbb330fe1bcc9aedadc86610a1269b6
nidaqmx/_task_modules/triggering/pause_trigger.py
python
PauseTrigger.anlg_lvl_dig_sync_enable
(self)
[]
def anlg_lvl_dig_sync_enable(self): cfunc = lib_importer.windll.DAQmxResetAnlgLvlPauseTrigDigSyncEnable if cfunc.argtypes is None: with cfunc.arglock: if cfunc.argtypes is None: cfunc.argtypes = [ lib_importer.task_handle] error_code = cfunc( self._handle) check_for_error(error_code)
[ "def", "anlg_lvl_dig_sync_enable", "(", "self", ")", ":", "cfunc", "=", "lib_importer", ".", "windll", ".", "DAQmxResetAnlgLvlPauseTrigDigSyncEnable", "if", "cfunc", ".", "argtypes", "is", "None", ":", "with", "cfunc", ".", "arglock", ":", "if", "cfunc", ".", ...
https://github.com/ni/nidaqmx-python/blob/62fc6b48cbbb330fe1bcc9aedadc86610a1269b6/nidaqmx/_task_modules/triggering/pause_trigger.py#L329-L339
tensorflow/tensor2tensor
2a33b152d7835af66a6d20afe7961751047e28dd
tensor2tensor/data_generators/text_encoder.py
python
TextEncoder.encode
(self, s)
return [int(w) + self._num_reserved_ids for w in s.split()]
Transform a human-readable string into a sequence of int ids. The ids should be in the range [num_reserved_ids, vocab_size). Ids [0, num_reserved_ids) are reserved. EOS is not appended. Args: s: human-readable string to be converted. Returns: ids: list of integers
Transform a human-readable string into a sequence of int ids.
[ "Transform", "a", "human", "-", "readable", "string", "into", "a", "sequence", "of", "int", "ids", "." ]
def encode(self, s): """Transform a human-readable string into a sequence of int ids. The ids should be in the range [num_reserved_ids, vocab_size). Ids [0, num_reserved_ids) are reserved. EOS is not appended. Args: s: human-readable string to be converted. Returns: ids: list of integers """ return [int(w) + self._num_reserved_ids for w in s.split()]
[ "def", "encode", "(", "self", ",", "s", ")", ":", "return", "[", "int", "(", "w", ")", "+", "self", ".", "_num_reserved_ids", "for", "w", "in", "s", ".", "split", "(", ")", "]" ]
https://github.com/tensorflow/tensor2tensor/blob/2a33b152d7835af66a6d20afe7961751047e28dd/tensor2tensor/data_generators/text_encoder.py#L117-L131
menpo/menpo
a61500656c4fc2eea82497684f13cc31a605550b
menpo/transform/groupalign/procrustes.py
python
GeneralizedProcrustesAnalysis._recursive_procrustes
(self)
r""" Recursively calculates a procrustes alignment.
r""" Recursively calculates a procrustes alignment.
[ "r", "Recursively", "calculates", "a", "procrustes", "alignment", "." ]
def _recursive_procrustes(self): r""" Recursively calculates a procrustes alignment. """ # Avoid circular imports from menpo.shape import mean_pointcloud, PointCloud from ..compositions import scale_about_centre if self.n_iterations > self.max_iterations: return False new_tgt = mean_pointcloud( [PointCloud(t.aligned_source().points, copy=False) for t in self.transforms] ) # rescale the new_target to be the same size as the original about # it's centre rescale = scale_about_centre( new_tgt, self.initial_target_scale / new_tgt.norm() ) rescale._apply_inplace(new_tgt) # check to see if we have converged yet delta_target = np.linalg.norm(self.target.points - new_tgt.points) if delta_target < 1e-6: return True else: self.n_iterations += 1 for t in self.transforms: t.set_target(new_tgt) self.target = new_tgt return self._recursive_procrustes()
[ "def", "_recursive_procrustes", "(", "self", ")", ":", "# Avoid circular imports", "from", "menpo", ".", "shape", "import", "mean_pointcloud", ",", "PointCloud", "from", ".", ".", "compositions", "import", "scale_about_centre", "if", "self", ".", "n_iterations", ">"...
https://github.com/menpo/menpo/blob/a61500656c4fc2eea82497684f13cc31a605550b/menpo/transform/groupalign/procrustes.py#L45-L73
tensorflow/federated
5a60a032360087b8f4c7fcfd97ed1c0131c3eac3
tensorflow_federated/python/simulation/baselines/task_data.py
python
BaselineTaskDatasets.__init__
(self, train_data: client_data.ClientData, test_data: CentralOrClientData, validation_data: Optional[CentralOrClientData] = None, train_preprocess_fn: Optional[PreprocessFnType] = None, eval_preprocess_fn: Optional[PreprocessFnType] = None)
Creates a `BaselineTaskDatasets`. Args: train_data: A `tff.simulation.datasets.ClientData` for training. test_data: An optional `tff.simulation.datasets.ClientData` for computing test metrics. validation_data: A `tff.simulation.datasets.ClientData` for computing validation metrics. train_preprocess_fn: An optional callable accepting and returning a `tf.data.Dataset`, used to perform dataset preprocessing for training. If set to `None`, we use the identity map for all train preprocessing. eval_preprocess_fn: An optional callable accepting and returning a `tf.data.Dataset`, used to perform evaluation (eg. validation, testing) preprocessing. If `None`, evaluation preprocessing will be done via the identity map. Raises: ValueError: If `train_data` and `test_data` have different element types after preprocessing with `train_preprocess_fn` and `eval_preprocess_fn`, or if `validation_data` is not `None` and has a different element type than the test data.
Creates a `BaselineTaskDatasets`.
[ "Creates", "a", "BaselineTaskDatasets", "." ]
def __init__(self, train_data: client_data.ClientData, test_data: CentralOrClientData, validation_data: Optional[CentralOrClientData] = None, train_preprocess_fn: Optional[PreprocessFnType] = None, eval_preprocess_fn: Optional[PreprocessFnType] = None): """Creates a `BaselineTaskDatasets`. Args: train_data: A `tff.simulation.datasets.ClientData` for training. test_data: An optional `tff.simulation.datasets.ClientData` for computing test metrics. validation_data: A `tff.simulation.datasets.ClientData` for computing validation metrics. train_preprocess_fn: An optional callable accepting and returning a `tf.data.Dataset`, used to perform dataset preprocessing for training. If set to `None`, we use the identity map for all train preprocessing. eval_preprocess_fn: An optional callable accepting and returning a `tf.data.Dataset`, used to perform evaluation (eg. validation, testing) preprocessing. If `None`, evaluation preprocessing will be done via the identity map. Raises: ValueError: If `train_data` and `test_data` have different element types after preprocessing with `train_preprocess_fn` and `eval_preprocess_fn`, or if `validation_data` is not `None` and has a different element type than the test data. """ self._train_data = train_data self._test_data = test_data self._validation_data = validation_data self._train_preprocess_fn = train_preprocess_fn self._eval_preprocess_fn = eval_preprocess_fn if (train_preprocess_fn is not None and not callable(train_preprocess_fn)): raise ValueError('The train_preprocess_fn must be None or callable.') self._train_preprocess_fn = train_preprocess_fn if (eval_preprocess_fn is not None) and (not callable(eval_preprocess_fn)): raise ValueError('The eval_preprocess_fn must be None or callable.') self._eval_preprocess_fn = eval_preprocess_fn post_preprocess_train_type = _get_element_spec(train_data, train_preprocess_fn) post_preprocess_test_type = _get_element_spec(test_data, eval_preprocess_fn) if post_preprocess_train_type != post_preprocess_test_type: raise ValueError( 'The train and test element structures after preprocessing must be ' 'equal. Found train type {} and test type {}'.format( post_preprocess_train_type, post_preprocess_test_type)) if train_preprocess_fn is None: self._preprocess_train_data = train_data else: self._preprocess_train_data = train_data.preprocess(train_preprocess_fn) self._element_type_structure = post_preprocess_train_type if validation_data is not None: test_type = _get_element_spec(test_data) validation_type = _get_element_spec(validation_data) if test_type != validation_type: raise ValueError( 'The validation set must be None, or have the same element type ' 'structure as the test data. Found test type {} and validation type' ' {}'.format(test_type, validation_type)) self._data_info = None
[ "def", "__init__", "(", "self", ",", "train_data", ":", "client_data", ".", "ClientData", ",", "test_data", ":", "CentralOrClientData", ",", "validation_data", ":", "Optional", "[", "CentralOrClientData", "]", "=", "None", ",", "train_preprocess_fn", ":", "Optiona...
https://github.com/tensorflow/federated/blob/5a60a032360087b8f4c7fcfd97ed1c0131c3eac3/tensorflow_federated/python/simulation/baselines/task_data.py#L69-L134
Komodo/KomodoEdit
61edab75dce2bdb03943b387b0608ea36f548e8e
src/codeintel/play/core.py
python
MouseEvent.LeftDClick
(*args, **kwargs)
return _core.MouseEvent_LeftDClick(*args, **kwargs)
LeftDClick() -> bool
LeftDClick() -> bool
[ "LeftDClick", "()", "-", ">", "bool" ]
def LeftDClick(*args, **kwargs): """LeftDClick() -> bool""" return _core.MouseEvent_LeftDClick(*args, **kwargs)
[ "def", "LeftDClick", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core", ".", "MouseEvent_LeftDClick", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/Komodo/KomodoEdit/blob/61edab75dce2bdb03943b387b0608ea36f548e8e/src/codeintel/play/core.py#L3357-L3359
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_openshift/library/oc_pvc.py
python
OpenShiftCLI.openshift_cmd
(self, cmd, oadm=False, output=False, output_type='json', input_data=None)
return rval
Base command for oc
Base command for oc
[ "Base", "command", "for", "oc" ]
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None): '''Base command for oc ''' cmds = [self.oc_binary] if oadm: cmds.append('adm') cmds.extend(cmd) if self.all_namespaces: cmds.extend(['--all-namespaces']) elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501 cmds.extend(['-n', self.namespace]) if self.verbose: print(' '.join(cmds)) try: returncode, stdout, stderr = self._run(cmds, input_data) except OSError as ex: returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex) rval = {"returncode": returncode, "cmd": ' '.join(cmds)} if output_type == 'json': rval['results'] = {} if output and stdout: try: rval['results'] = json.loads(stdout) except ValueError as verr: if "No JSON object could be decoded" in verr.args: rval['err'] = verr.args elif output_type == 'raw': rval['results'] = stdout if output else '' if self.verbose: print("STDOUT: {0}".format(stdout)) print("STDERR: {0}".format(stderr)) if 'err' in rval or returncode != 0: rval.update({"stderr": stderr, "stdout": stdout}) return rval
[ "def", "openshift_cmd", "(", "self", ",", "cmd", ",", "oadm", "=", "False", ",", "output", "=", "False", ",", "output_type", "=", "'json'", ",", "input_data", "=", "None", ")", ":", "cmds", "=", "[", "self", ".", "oc_binary", "]", "if", "oadm", ":", ...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_openshift/library/oc_pvc.py#L1128-L1172
azavea/raster-vision
fc181a6f31f085affa1ee12f0204bdbc5a6bf85a
rastervision_core/rastervision/core/data/label/semantic_segmentation_labels.py
python
SemanticSegmentationSmoothLabels.__delitem__
(self, window: Box)
Reset scores and hits for pixels in the window.
Reset scores and hits for pixels in the window.
[ "Reset", "scores", "and", "hits", "for", "pixels", "in", "the", "window", "." ]
def __delitem__(self, window: Box) -> None: """Reset scores and hits for pixels in the window.""" y0, x0, y1, x1 = self._to_local_coords(window) self.pixel_scores[..., y0:y1, x0:x1] = 0 self.pixel_hits[..., y0:y1, x0:x1] = 0
[ "def", "__delitem__", "(", "self", ",", "window", ":", "Box", ")", "->", "None", ":", "y0", ",", "x0", ",", "y1", ",", "x1", "=", "self", ".", "_to_local_coords", "(", "window", ")", "self", ".", "pixel_scores", "[", "...", ",", "y0", ":", "y1", ...
https://github.com/azavea/raster-vision/blob/fc181a6f31f085affa1ee12f0204bdbc5a6bf85a/rastervision_core/rastervision/core/data/label/semantic_segmentation_labels.py#L229-L233
inkandswitch/livebook
93c8d467734787366ad084fc3566bf5cbe249c51
public/pypyjs/modules/uuid.py
python
_random_getnode
()
return random.randrange(0, 1<<48L) | 0x010000000000L
Get a random node ID, with eighth bit set as suggested by RFC 4122.
Get a random node ID, with eighth bit set as suggested by RFC 4122.
[ "Get", "a", "random", "node", "ID", "with", "eighth", "bit", "set", "as", "suggested", "by", "RFC", "4122", "." ]
def _random_getnode(): """Get a random node ID, with eighth bit set as suggested by RFC 4122.""" import random return random.randrange(0, 1<<48L) | 0x010000000000L
[ "def", "_random_getnode", "(", ")", ":", "import", "random", "return", "random", ".", "randrange", "(", "0", ",", "1", "<<", "48L", ")", "|", "0x010000000000L" ]
https://github.com/inkandswitch/livebook/blob/93c8d467734787366ad084fc3566bf5cbe249c51/public/pypyjs/modules/uuid.py#L527-L530
druid-io/pydruid
98cab4d9c2a08a35667b26a15dee21bdb77422b4
pydruid/utils/filters.py
python
JavaScript.__eq__
(self, func)
return Filter(type="javascript", dimension=self.dimension, function=func)
[]
def __eq__(self, func): return Filter(type="javascript", dimension=self.dimension, function=func)
[ "def", "__eq__", "(", "self", ",", "func", ")", ":", "return", "Filter", "(", "type", "=", "\"javascript\"", ",", "dimension", "=", "self", ".", "dimension", ",", "function", "=", "func", ")" ]
https://github.com/druid-io/pydruid/blob/98cab4d9c2a08a35667b26a15dee21bdb77422b4/pydruid/utils/filters.py#L180-L181
deepdrive/deepdrive
11adb9480ffeba832231e15eb545ec9aba1f7d65
vendor/tensorflow/models/research/slim/datasets/dataset_utils.py
python
write_label_file
(labels_to_class_names, dataset_dir, filename=LABELS_FILENAME)
Writes a file with the list of class names. Args: labels_to_class_names: A map of (integer) labels to class names. dataset_dir: The directory in which the labels file should be written. filename: The filename where the class names are written.
Writes a file with the list of class names.
[ "Writes", "a", "file", "with", "the", "list", "of", "class", "names", "." ]
def write_label_file(labels_to_class_names, dataset_dir, filename=LABELS_FILENAME): """Writes a file with the list of class names. Args: labels_to_class_names: A map of (integer) labels to class names. dataset_dir: The directory in which the labels file should be written. filename: The filename where the class names are written. """ labels_filename = os.path.join(dataset_dir, filename) with tf.gfile.Open(labels_filename, 'w') as f: for label in labels_to_class_names: class_name = labels_to_class_names[label] f.write('%d:%s\n' % (label, class_name))
[ "def", "write_label_file", "(", "labels_to_class_names", ",", "dataset_dir", ",", "filename", "=", "LABELS_FILENAME", ")", ":", "labels_filename", "=", "os", ".", "path", ".", "join", "(", "dataset_dir", ",", "filename", ")", "with", "tf", ".", "gfile", ".", ...
https://github.com/deepdrive/deepdrive/blob/11adb9480ffeba832231e15eb545ec9aba1f7d65/vendor/tensorflow/models/research/slim/datasets/dataset_utils.py#L101-L114
RobbieHan/sandboxMP
9216755a5bf05f33b8a62fd33b8fa607772d3725
apps/utils/sandbox_utils.py
python
ConfigFileMixin.get_commands
(self)
return self.get_conf_content(*key)
Get the commands from config file.
Get the commands from config file.
[ "Get", "the", "commands", "from", "config", "file", "." ]
def get_commands(self): """ Get the commands from config file. """ key = ['hosts', 'commands'] return self.get_conf_content(*key)
[ "def", "get_commands", "(", "self", ")", ":", "key", "=", "[", "'hosts'", ",", "'commands'", "]", "return", "self", ".", "get_conf_content", "(", "*", "key", ")" ]
https://github.com/RobbieHan/sandboxMP/blob/9216755a5bf05f33b8a62fd33b8fa607772d3725/apps/utils/sandbox_utils.py#L63-L68
cython/cython
9db1fc39b31b7b3b2ed574a79f5f9fd980ee3be7
Cython/Tempita/_tempita.py
python
lex
(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None)
return chunks
Lex a string into chunks: >>> lex('hey') ['hey'] >>> lex('hey {{you}}') ['hey ', ('you', (1, 7))] >>> lex('hey {{') Traceback (most recent call last): ... TemplateError: No }} to finish last expression at line 1 column 7 >>> lex('hey }}') Traceback (most recent call last): ... TemplateError: }} outside expression at line 1 column 7 >>> lex('hey {{ {{') Traceback (most recent call last): ... TemplateError: {{ inside expression at line 1 column 10
Lex a string into chunks:
[ "Lex", "a", "string", "into", "chunks", ":" ]
def lex(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None): """ Lex a string into chunks: >>> lex('hey') ['hey'] >>> lex('hey {{you}}') ['hey ', ('you', (1, 7))] >>> lex('hey {{') Traceback (most recent call last): ... TemplateError: No }} to finish last expression at line 1 column 7 >>> lex('hey }}') Traceback (most recent call last): ... TemplateError: }} outside expression at line 1 column 7 >>> lex('hey {{ {{') Traceback (most recent call last): ... TemplateError: {{ inside expression at line 1 column 10 """ if delimeters is None: delimeters = ( Template.default_namespace['start_braces'], Template.default_namespace['end_braces'] ) in_expr = False chunks = [] last = 0 last_pos = (line_offset + 1, 1) token_re = re.compile(r'%s|%s' % (re.escape(delimeters[0]), re.escape(delimeters[1]))) for match in token_re.finditer(s): expr = match.group(0) pos = find_position(s, match.end(), last, last_pos) if expr == delimeters[0] and in_expr: raise TemplateError('%s inside expression' % delimeters[0], position=pos, name=name) elif expr == delimeters[1] and not in_expr: raise TemplateError('%s outside expression' % delimeters[1], position=pos, name=name) if expr == delimeters[0]: part = s[last:match.start()] if part: chunks.append(part) in_expr = True else: chunks.append((s[last:match.start()], last_pos)) in_expr = False last = match.end() last_pos = pos if in_expr: raise TemplateError('No %s to finish last expression' % delimeters[1], name=name, position=last_pos) part = s[last:] if part: chunks.append(part) if trim_whitespace: chunks = trim_lex(chunks) return chunks
[ "def", "lex", "(", "s", ",", "name", "=", "None", ",", "trim_whitespace", "=", "True", ",", "line_offset", "=", "0", ",", "delimeters", "=", "None", ")", ":", "if", "delimeters", "is", "None", ":", "delimeters", "=", "(", "Template", ".", "default_name...
https://github.com/cython/cython/blob/9db1fc39b31b7b3b2ed574a79f5f9fd980ee3be7/Cython/Tempita/_tempita.py#L630-L691
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/tasmota/config_flow.py
python
FlowHandler.__init__
(self)
Initialize flow.
Initialize flow.
[ "Initialize", "flow", "." ]
def __init__(self) -> None: """Initialize flow.""" self._prefix = DEFAULT_PREFIX
[ "def", "__init__", "(", "self", ")", "->", "None", ":", "self", ".", "_prefix", "=", "DEFAULT_PREFIX" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/tasmota/config_flow.py#L20-L22
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/site-packages/Jinja2-2.6-py2.7.egg/jinja2/bccache.py
python
Bucket.write_bytecode
(self, f)
Dump the bytecode into the file or file like object passed.
Dump the bytecode into the file or file like object passed.
[ "Dump", "the", "bytecode", "into", "the", "file", "or", "file", "like", "object", "passed", "." ]
def write_bytecode(self, f): """Dump the bytecode into the file or file like object passed.""" if self.code is None: raise TypeError('can\'t write empty bucket') f.write(bc_magic) pickle.dump(self.checksum, f, 2) marshal_dump(self.code, f)
[ "def", "write_bytecode", "(", "self", ",", "f", ")", ":", "if", "self", ".", "code", "is", "None", ":", "raise", "TypeError", "(", "'can\\'t write empty bucket'", ")", "f", ".", "write", "(", "bc_magic", ")", "pickle", ".", "dump", "(", "self", ".", "c...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/site-packages/Jinja2-2.6-py2.7.egg/jinja2/bccache.py#L95-L101
qutebrowser/qutebrowser
3a2aaaacbf97f4bf0c72463f3da94ed2822a5442
qutebrowser/utils/version.py
python
ModuleInfo.is_installed
(self)
return self._installed
Checks whether the module is installed.
Checks whether the module is installed.
[ "Checks", "whether", "the", "module", "is", "installed", "." ]
def is_installed(self) -> bool: """Checks whether the module is installed.""" if not self._initialized: self._initialize_info() return self._installed
[ "def", "is_installed", "(", "self", ")", "->", "bool", ":", "if", "not", "self", ".", "_initialized", ":", "self", ".", "_initialize_info", "(", ")", "return", "self", ".", "_installed" ]
https://github.com/qutebrowser/qutebrowser/blob/3a2aaaacbf97f4bf0c72463f3da94ed2822a5442/qutebrowser/utils/version.py#L356-L360
Antergos/Cnchi
13ac2209da9432d453e0097cf48a107640b563a9
src/misc/nm.py
python
NetworkManagerTreeView.get_passphrase
(self, ssid)
return cached
Get passphrase for Access Point (identified by ssid)
Get passphrase for Access Point (identified by ssid)
[ "Get", "passphrase", "for", "Access", "Point", "(", "identified", "by", "ssid", ")" ]
def get_passphrase(self, ssid): """ Get passphrase for Access Point (identified by ssid) """ try: cached = self.wifi_model.passphrases_cache[ssid] except KeyError: return '' return cached
[ "def", "get_passphrase", "(", "self", ",", "ssid", ")", ":", "try", ":", "cached", "=", "self", ".", "wifi_model", ".", "passphrases_cache", "[", "ssid", "]", "except", "KeyError", ":", "return", "''", "return", "cached" ]
https://github.com/Antergos/Cnchi/blob/13ac2209da9432d453e0097cf48a107640b563a9/src/misc/nm.py#L445-L451
yandexdataschool/AgentNet
c28b99f11eb5d1c9080c2368f387b2cc4942adc3
agentnet/utils/layers/helpers.py
python
get_layer_dtype
(layer, default=None)
return layer.output_dtype if hasattr(layer, "output_dtype") else default or theano.config.floatX
takes layer's output_dtype property if it is defined, otherwise defaults to default or (if it's not given) theano.config.floatX
takes layer's output_dtype property if it is defined, otherwise defaults to default or (if it's not given) theano.config.floatX
[ "takes", "layer", "s", "output_dtype", "property", "if", "it", "is", "defined", "otherwise", "defaults", "to", "default", "or", "(", "if", "it", "s", "not", "given", ")", "theano", ".", "config", ".", "floatX" ]
def get_layer_dtype(layer, default=None): """ takes layer's output_dtype property if it is defined, otherwise defaults to default or (if it's not given) theano.config.floatX""" return layer.output_dtype if hasattr(layer, "output_dtype") else default or theano.config.floatX
[ "def", "get_layer_dtype", "(", "layer", ",", "default", "=", "None", ")", ":", "return", "layer", ".", "output_dtype", "if", "hasattr", "(", "layer", ",", "\"output_dtype\"", ")", "else", "default", "or", "theano", ".", "config", ".", "floatX" ]
https://github.com/yandexdataschool/AgentNet/blob/c28b99f11eb5d1c9080c2368f387b2cc4942adc3/agentnet/utils/layers/helpers.py#L35-L38
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/bdf/cards/properties/bars.py
python
PBAR.I12
(self)
return self.i12
gets the section I12 moment of inertia
gets the section I12 moment of inertia
[ "gets", "the", "section", "I12", "moment", "of", "inertia" ]
def I12(self) -> float: """gets the section I12 moment of inertia""" return self.i12
[ "def", "I12", "(", "self", ")", "->", "float", ":", "return", "self", ".", "i12" ]
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/bdf/cards/properties/bars.py#L1383-L1385
sagiebenaim/OneShotTranslation
6f790ae5f4eb53f86bd676fbbf59f81937d8a707
mnist_to_svhn/solver_mnist_to_svhn.py
python
Solver.to_var
(self, x, volatile=False)
return Variable(x)
Converts numpy to variable.
Converts numpy to variable.
[ "Converts", "numpy", "to", "variable", "." ]
def to_var(self, x, volatile=False): """Converts numpy to variable.""" if torch.cuda.is_available(): x = x.cuda() if volatile: return Variable(x, volatile=True) return Variable(x)
[ "def", "to_var", "(", "self", ",", "x", ",", "volatile", "=", "False", ")", ":", "if", "torch", ".", "cuda", ".", "is_available", "(", ")", ":", "x", "=", "x", ".", "cuda", "(", ")", "if", "volatile", ":", "return", "Variable", "(", "x", ",", "...
https://github.com/sagiebenaim/OneShotTranslation/blob/6f790ae5f4eb53f86bd676fbbf59f81937d8a707/mnist_to_svhn/solver_mnist_to_svhn.py#L72-L78
devitocodes/devito
6abd441e3f5f091775ad332be6b95e017b8cbd16
devito/types/basic.py
python
AbstractFunction.dtype
(self)
return self._dtype
The data type of the object.
The data type of the object.
[ "The", "data", "type", "of", "the", "object", "." ]
def dtype(self): """The data type of the object.""" return self._dtype
[ "def", "dtype", "(", "self", ")", ":", "return", "self", ".", "_dtype" ]
https://github.com/devitocodes/devito/blob/6abd441e3f5f091775ad332be6b95e017b8cbd16/devito/types/basic.py#L910-L912
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/litejet/switch.py
python
async_setup_entry
( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, )
Set up entry.
Set up entry.
[ "Set", "up", "entry", "." ]
async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry.""" system = hass.data[DOMAIN] def get_entities(system): entities = [] for i in system.button_switches(): name = system.get_switch_name(i) entities.append(LiteJetSwitch(config_entry.entry_id, system, i, name)) return entities async_add_entities(await hass.async_add_executor_job(get_entities, system), True)
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "config_entry", ":", "ConfigEntry", ",", "async_add_entities", ":", "AddEntitiesCallback", ",", ")", "->", "None", ":", "system", "=", "hass", ".", "data", "[", "DOMAIN", "]", "def", ...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/litejet/switch.py#L16-L32
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py
python
SessionRedirectMixin.rebuild_method
(self, prepared_request, response)
When being redirected we may want to change the method of the request based on certain specs or browser behavior.
When being redirected we may want to change the method of the request based on certain specs or browser behavior.
[ "When", "being", "redirected", "we", "may", "want", "to", "change", "the", "method", "of", "the", "request", "based", "on", "certain", "specs", "or", "browser", "behavior", "." ]
def rebuild_method(self, prepared_request, response): """When being redirected we may want to change the method of the request based on certain specs or browser behavior. """ method = prepared_request.method # https://tools.ietf.org/html/rfc7231#section-6.4.4 if response.status_code == codes.see_other and method != 'HEAD': method = 'GET' # Do what the browsers do, despite standards... # First, turn 302s into GETs. if response.status_code == codes.found and method != 'HEAD': method = 'GET' # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. if response.status_code == codes.moved and method == 'POST': method = 'GET' prepared_request.method = method
[ "def", "rebuild_method", "(", "self", ",", "prepared_request", ",", "response", ")", ":", "method", "=", "prepared_request", ".", "method", "# https://tools.ietf.org/html/rfc7231#section-6.4.4", "if", "response", ".", "status_code", "==", "codes", ".", "see_other", "a...
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py#L317-L337
Delta-ML/delta
31dfebc8f20b7cb282b62f291ff25a87e403cc86
delta/utils/solver/keras_base_solver.py
python
KerasBaseSolver.build
(self, multi_gpu=False)
main entrypoint to build model
main entrypoint to build model
[ "main", "entrypoint", "to", "build", "model" ]
def build(self, multi_gpu=False): ''' main entrypoint to build model ''' assert self.model loss = self.get_loss() optimizer = self.get_optimizer() run_opts, run_metas = self.get_run_opts_metas() # compile model if self.ngpu > 1 and multi_gpu: self._parallel_model = multi_gpu_model( self.model, gpus=self.ngpu, cpu_relocation=False, cpu_merge=False) self.parallel_model.compile( loss=loss, optimizer=optimizer, metrics=self._metrics_used, options=run_opts, run_metadata=run_metas) else: self.model.compile( loss=loss, optimizer=optimizer, metrics=self._metrics_used, options=run_opts, run_metadata=run_metas) # Print model summary if self.model.built and self.model._is_graph_network: self.model.summary() self._built = True
[ "def", "build", "(", "self", ",", "multi_gpu", "=", "False", ")", ":", "assert", "self", ".", "model", "loss", "=", "self", ".", "get_loss", "(", ")", "optimizer", "=", "self", ".", "get_optimizer", "(", ")", "run_opts", ",", "run_metas", "=", "self", ...
https://github.com/Delta-ML/delta/blob/31dfebc8f20b7cb282b62f291ff25a87e403cc86/delta/utils/solver/keras_base_solver.py#L142-L172
jkszw2014/bert-kbqa-NLPCC2017
c09511829377b959a8ad5c81f5581e742ba13dc9
AttributeMap-BERT-Classification/run_classifier.py
python
convert_examples_to_features
(examples, label_list, max_seq_length, tokenizer)
return features
Convert a set of `InputExample`s to a list of `InputFeatures`.
Convert a set of `InputExample`s to a list of `InputFeatures`.
[ "Convert", "a", "set", "of", "InputExample", "s", "to", "a", "list", "of", "InputFeatures", "." ]
def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer): """Convert a set of `InputExample`s to a list of `InputFeatures`.""" features = [] for (ex_index, example) in enumerate(examples): if ex_index % 10000 == 0: tf.logging.info("Writing example %d of %d" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example, label_list, max_seq_length, tokenizer) features.append(feature) return features
[ "def", "convert_examples_to_features", "(", "examples", ",", "label_list", ",", "max_seq_length", ",", "tokenizer", ")", ":", "features", "=", "[", "]", "for", "(", "ex_index", ",", "example", ")", "in", "enumerate", "(", "examples", ")", ":", "if", "ex_inde...
https://github.com/jkszw2014/bert-kbqa-NLPCC2017/blob/c09511829377b959a8ad5c81f5581e742ba13dc9/AttributeMap-BERT-Classification/run_classifier.py#L838-L851
TesterlifeRaymond/doraemon
d5cb6e34bd5f2aa97273ce0c0c9303e32beaa333
venv/lib/python3.6/site-packages/pip/_vendor/requests/models.py
python
Response.__bool__
(self)
return self.ok
Returns true if :attr:`status_code` is 'OK'.
Returns true if :attr:`status_code` is 'OK'.
[ "Returns", "true", "if", ":", "attr", ":", "status_code", "is", "OK", "." ]
def __bool__(self): """Returns true if :attr:`status_code` is 'OK'.""" return self.ok
[ "def", "__bool__", "(", "self", ")", ":", "return", "self", ".", "ok" ]
https://github.com/TesterlifeRaymond/doraemon/blob/d5cb6e34bd5f2aa97273ce0c0c9303e32beaa333/venv/lib/python3.6/site-packages/pip/_vendor/requests/models.py#L618-L620
canonical/cloud-init
dc1aabfca851e520693c05322f724bd102c76364
cloudinit/net/__init__.py
python
interface_has_own_mac
(ifname, strict=False)
return assign_type in (0, 1, 3)
return True if the provided interface has its own address. Based on addr_assign_type in /sys. Return true for any interface that does not have a 'stolen' address. Examples of such devices are bonds or vlans that inherit their mac from another device. Possible values are: 0: permanent address 2: stolen from another device 1: randomly generated 3: set using dev_set_mac_address
return True if the provided interface has its own address.
[ "return", "True", "if", "the", "provided", "interface", "has", "its", "own", "address", "." ]
def interface_has_own_mac(ifname, strict=False): """return True if the provided interface has its own address. Based on addr_assign_type in /sys. Return true for any interface that does not have a 'stolen' address. Examples of such devices are bonds or vlans that inherit their mac from another device. Possible values are: 0: permanent address 2: stolen from another device 1: randomly generated 3: set using dev_set_mac_address""" assign_type = read_sys_net_int(ifname, "addr_assign_type") if assign_type is None: # None is returned if this nic had no 'addr_assign_type' entry. # if strict, raise an error, if not return True. if strict: raise ValueError("%s had no addr_assign_type.") return True return assign_type in (0, 1, 3)
[ "def", "interface_has_own_mac", "(", "ifname", ",", "strict", "=", "False", ")", ":", "assign_type", "=", "read_sys_net_int", "(", "ifname", ",", "\"addr_assign_type\"", ")", "if", "assign_type", "is", "None", ":", "# None is returned if this nic had no 'addr_assign_typ...
https://github.com/canonical/cloud-init/blob/dc1aabfca851e520693c05322f724bd102c76364/cloudinit/net/__init__.py#L607-L624
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/emr/v20190103/emr_client.py
python
EmrClient.SyncPodState
(self, request)
EMR同步TKE中POD状态 :param request: Request instance for SyncPodState. :type request: :class:`tencentcloud.emr.v20190103.models.SyncPodStateRequest` :rtype: :class:`tencentcloud.emr.v20190103.models.SyncPodStateResponse`
EMR同步TKE中POD状态
[ "EMR同步TKE中POD状态" ]
def SyncPodState(self, request): """EMR同步TKE中POD状态 :param request: Request instance for SyncPodState. :type request: :class:`tencentcloud.emr.v20190103.models.SyncPodStateRequest` :rtype: :class:`tencentcloud.emr.v20190103.models.SyncPodStateResponse` """ try: params = request._serialize() body = self.call("SyncPodState", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.SyncPodStateResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "SyncPodState", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"SyncPodState\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", ...
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/emr/v20190103/emr_client.py#L393-L418
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/encodings/mac_cyrillic.py
python
IncrementalEncoder.encode
(self, input, final=False)
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
[]
def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0]
[ "def", "encode", "(", "self", ",", "input", ",", "final", "=", "False", ")", ":", "return", "codecs", ".", "charmap_encode", "(", "input", ",", "self", ".", "errors", ",", "encoding_table", ")", "[", "0", "]" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/encodings/mac_cyrillic.py#L18-L19
cgre-aachen/gempy
6ad16c46fc6616c9f452fba85d31ce32decd8b10
gempy/core/grid_modules/grid_types.py
python
CenteredGrid.set_centered_grid
(self, centers, kernel_centers=None, **kwargs)
Main method of the class, set the XYZ values around centers using a kernel. Args: centers (np.array): XYZ array with the centers of where we want to create a grid around kernel_centers (Optional[np.array]): center of the voxels of a desired kernel. **kwargs: * resolution: [s0] * radius (float): Maximum distance of the kernel Returns:
Main method of the class, set the XYZ values around centers using a kernel.
[ "Main", "method", "of", "the", "class", "set", "the", "XYZ", "values", "around", "centers", "using", "a", "kernel", "." ]
def set_centered_grid(self, centers, kernel_centers=None, **kwargs): """ Main method of the class, set the XYZ values around centers using a kernel. Args: centers (np.array): XYZ array with the centers of where we want to create a grid around kernel_centers (Optional[np.array]): center of the voxels of a desired kernel. **kwargs: * resolution: [s0] * radius (float): Maximum distance of the kernel Returns: """ self.values = np.empty((0, 3)) centers = np.atleast_2d(centers) if kernel_centers is None: kernel_centers = self.set_centered_kernel(**kwargs) assert centers.shape[ 1] == 3, 'Centers must be a numpy array that contains the coordinates XYZ' for i in centers: self.values = np.vstack((self.values, i + kernel_centers)) self.length = self.values.shape[0]
[ "def", "set_centered_grid", "(", "self", ",", "centers", ",", "kernel_centers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", "=", "np", ".", "empty", "(", "(", "0", ",", "3", ")", ")", "centers", "=", "np", ".", "atleast_2...
https://github.com/cgre-aachen/gempy/blob/6ad16c46fc6616c9f452fba85d31ce32decd8b10/gempy/core/grid_modules/grid_types.py#L419-L445
seppius-xbmc-repo/ru
d0879d56ec8243b2c7af44fda5cf3d1ff77fd2e2
script.module.beautifulsoup4/lib/bs4/builder/_htmlparser.py
python
BeautifulSoupHTMLParser.unknown_decl
(self, data)
[]
def unknown_decl(self, data): if data.upper().startswith('CDATA['): cls = CData data = data[len('CDATA['):] else: cls = Declaration self.soup.endData() self.soup.handle_data(data) self.soup.endData(cls)
[ "def", "unknown_decl", "(", "self", ",", "data", ")", ":", "if", "data", ".", "upper", "(", ")", ".", "startswith", "(", "'CDATA['", ")", ":", "cls", "=", "CData", "data", "=", "data", "[", "len", "(", "'CDATA['", ")", ":", "]", "else", ":", "cls...
https://github.com/seppius-xbmc-repo/ru/blob/d0879d56ec8243b2c7af44fda5cf3d1ff77fd2e2/script.module.beautifulsoup4/lib/bs4/builder/_htmlparser.py#L104-L112
GantMan/nsfw_model
7014aba25b3ec24be442394a7b049e1b058a99f4
training/make_nsfw_model_lib.py
python
train_model
(model, hparams, train_data_and_size, valid_data_and_size)
return model.fit( train_data, use_multiprocessing=False, workers=multiprocessing.cpu_count() -1, epochs=hparams.train_epochs, callbacks=[earlystop_callback], steps_per_epoch=steps_per_epoch, validation_data=valid_data, validation_steps=validation_steps)
Trains model with the given data and hyperparameters. Args: model: The tf.keras.Model from _build_model(). hparams: A namedtuple of hyperparameters. This function expects .train_epochs: a Python integer with the number of passes over the training dataset; .learning_rate: a Python float forwarded to the optimizer; .momentum: a Python float forwarded to the optimizer; .batch_size: a Python integer, the number of examples returned by each call to the generators. train_data_and_size: A (data, size) tuple in which data is training data to be fed in tf.keras.Model.fit(), size is a Python integer with the numbers of training. valid_data_and_size: A (data, size) tuple in which data is validation data to be fed in tf.keras.Model.fit(), size is a Python integer with the numbers of validation. Returns: The tf.keras.callbacks.History object returned by tf.keras.Model.fit().
Trains model with the given data and hyperparameters.
[ "Trains", "model", "with", "the", "given", "data", "and", "hyperparameters", "." ]
def train_model(model, hparams, train_data_and_size, valid_data_and_size): """Trains model with the given data and hyperparameters. Args: model: The tf.keras.Model from _build_model(). hparams: A namedtuple of hyperparameters. This function expects .train_epochs: a Python integer with the number of passes over the training dataset; .learning_rate: a Python float forwarded to the optimizer; .momentum: a Python float forwarded to the optimizer; .batch_size: a Python integer, the number of examples returned by each call to the generators. train_data_and_size: A (data, size) tuple in which data is training data to be fed in tf.keras.Model.fit(), size is a Python integer with the numbers of training. valid_data_and_size: A (data, size) tuple in which data is validation data to be fed in tf.keras.Model.fit(), size is a Python integer with the numbers of validation. Returns: The tf.keras.callbacks.History object returned by tf.keras.Model.fit(). """ earlystop_callback = tf.keras.callbacks.EarlyStopping( monitor='val_accuracy', min_delta=0.0001, patience=1) train_data, train_size = train_data_and_size valid_data, valid_size = valid_data_and_size # TODO(b/139467904): Expose this hyperparameter as a flag. loss = tf.keras.losses.CategoricalCrossentropy(label_smoothing=hparams.label_smoothing) if hparams.use_mixed_precision is True: optimizer = tf.train.experimental.enable_mixed_precision_graph_rewrite(configure_optimizer(hparams)) else: optimizer = configure_optimizer(hparams) model.compile( optimizer=optimizer, loss=loss, metrics=["accuracy"]) steps_per_epoch = train_size // hparams.batch_size validation_steps = valid_size // hparams.batch_size return model.fit( train_data, use_multiprocessing=False, workers=multiprocessing.cpu_count() -1, epochs=hparams.train_epochs, callbacks=[earlystop_callback], steps_per_epoch=steps_per_epoch, validation_data=valid_data, validation_steps=validation_steps)
[ "def", "train_model", "(", "model", ",", "hparams", ",", "train_data_and_size", ",", "valid_data_and_size", ")", ":", "earlystop_callback", "=", "tf", ".", "keras", ".", "callbacks", ".", "EarlyStopping", "(", "monitor", "=", "'val_accuracy'", ",", "min_delta", ...
https://github.com/GantMan/nsfw_model/blob/7014aba25b3ec24be442394a7b049e1b058a99f4/training/make_nsfw_model_lib.py#L309-L360
gwastro/pycbc
1e1c85534b9dba8488ce42df693230317ca63dea
pycbc/inference/models/gated_gaussian_noise.py
python
BaseGatedGaussian.get_gate_times_hmeco
(self)
return gatetimes
Gets the time to apply a gate based on the current sky position. Returns ------- dict : Dictionary of detector names -> (gate start, gate width)
Gets the time to apply a gate based on the current sky position. Returns ------- dict : Dictionary of detector names -> (gate start, gate width)
[ "Gets", "the", "time", "to", "apply", "a", "gate", "based", "on", "the", "current", "sky", "position", ".", "Returns", "-------", "dict", ":", "Dictionary", "of", "detector", "names", "-", ">", "(", "gate", "start", "gate", "width", ")" ]
def get_gate_times_hmeco(self): """Gets the time to apply a gate based on the current sky position. Returns ------- dict : Dictionary of detector names -> (gate start, gate width) """ # generate the template waveform try: wfs = self.get_waveforms() except NoWaveformError: return self._nowaveform_logl() except FailedWaveformError as e: if self.ignore_failed_waveforms: return self._nowaveform_logl() raise e # get waveform parameters params = self.current_params spin1 = params['spin1z'] spin2 = params['spin2z'] # gate input for ringdown analysis which consideres a start time # and an end time dgate = params['gate_window'] meco_f = hybrid_meco_frequency(params['mass1'], params['mass2'], spin1, spin2) # figure out the gate times gatetimes = {} for det, h in wfs.items(): invpsd = self._invpsds[det] h.resize(len(invpsd)) ht = h.to_timeseries() f_low = int((self._f_lower[det]+1)/h.delta_f) sample_freqs = h.sample_frequencies[f_low:].numpy() f_idx = numpy.where(sample_freqs <= meco_f)[0][-1] # find time corresponding to meco frequency t_from_freq = time_from_frequencyseries( h[f_low:], sample_frequencies=sample_freqs) if t_from_freq[f_idx] > 0: gatestartdelay = t_from_freq[f_idx] + float(t_from_freq.epoch) else: gatestartdelay = t_from_freq[f_idx] + ht.sample_times[-1] gatestartdelay = min(gatestartdelay, params['t_gate_start']) gatetimes[det] = (gatestartdelay, dgate) return gatetimes
[ "def", "get_gate_times_hmeco", "(", "self", ")", ":", "# generate the template waveform", "try", ":", "wfs", "=", "self", ".", "get_waveforms", "(", ")", "except", "NoWaveformError", ":", "return", "self", ".", "_nowaveform_logl", "(", ")", "except", "FailedWavefo...
https://github.com/gwastro/pycbc/blob/1e1c85534b9dba8488ce42df693230317ca63dea/pycbc/inference/models/gated_gaussian_noise.py#L341-L384
PaddlePaddle/models
511e2e282960ed4c7440c3f1d1e62017acb90e11
tutorials/mobilenetv3_prod/Step1-5/mobilenetv3_ref/utils.py
python
SmoothedValue.value
(self)
return self.deque[-1]
[]
def value(self): return self.deque[-1]
[ "def", "value", "(", "self", ")", ":", "return", "self", ".", "deque", "[", "-", "1", "]" ]
https://github.com/PaddlePaddle/models/blob/511e2e282960ed4c7440c3f1d1e62017acb90e11/tutorials/mobilenetv3_prod/Step1-5/mobilenetv3_ref/utils.py#L64-L65
itailang/SampleNet
442459abc54f9e14f0966a169a094a98febd32eb
reconstruction/src/autoencoder.py
python
AutoEncoder.get_reconstructions_from_sampled
(self, pclouds, batch_size=50)
return np.vstack(reconstructions)
Get the reconstructions for a set of sampled point clouds. Args: pclouds (N, K, 3) numpy array of N point clouds with K points each. batch_size size of point clouds batch
Get the reconstructions for a set of sampled point clouds. Args: pclouds (N, K, 3) numpy array of N point clouds with K points each. batch_size size of point clouds batch
[ "Get", "the", "reconstructions", "for", "a", "set", "of", "sampled", "point", "clouds", ".", "Args", ":", "pclouds", "(", "N", "K", "3", ")", "numpy", "array", "of", "N", "point", "clouds", "with", "K", "points", "each", ".", "batch_size", "size", "of"...
def get_reconstructions_from_sampled(self, pclouds, batch_size=50): """ Get the reconstructions for a set of sampled point clouds. Args: pclouds (N, K, 3) numpy array of N point clouds with K points each. batch_size size of point clouds batch """ reconstructions = [] idx = np.arange(len(pclouds)) for b in iterate_in_chunks(idx, batch_size): feed_dict = {self.s: pclouds[b]} rcon = self.sess.run(self.x_reconstr, feed_dict=feed_dict) reconstructions.append(rcon) return np.vstack(reconstructions)
[ "def", "get_reconstructions_from_sampled", "(", "self", ",", "pclouds", ",", "batch_size", "=", "50", ")", ":", "reconstructions", "=", "[", "]", "idx", "=", "np", ".", "arange", "(", "len", "(", "pclouds", ")", ")", "for", "b", "in", "iterate_in_chunks", ...
https://github.com/itailang/SampleNet/blob/442459abc54f9e14f0966a169a094a98febd32eb/reconstruction/src/autoencoder.py#L323-L335
mesonbuild/meson
a22d0f9a0a787df70ce79b05d0c45de90a970048
mesonbuild/compilers/mixins/visualstudio.py
python
ClangClCompiler.get_toolset_version
(self)
return '14.1'
[]
def get_toolset_version(self) -> T.Optional[str]: # XXX: what is the right thing to do here? return '14.1'
[ "def", "get_toolset_version", "(", "self", ")", "->", "T", ".", "Optional", "[", "str", "]", ":", "# XXX: what is the right thing to do here?", "return", "'14.1'" ]
https://github.com/mesonbuild/meson/blob/a22d0f9a0a787df70ce79b05d0c45de90a970048/mesonbuild/compilers/mixins/visualstudio.py#L435-L437
pymeasure/pymeasure
b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e
pymeasure/instruments/comedi.py
python
getAO
(device, channel, range=None)
return ao
Returns the analog output channel as specified for a given device
Returns the analog output channel as specified for a given device
[ "Returns", "the", "analog", "output", "channel", "as", "specified", "for", "a", "given", "device" ]
def getAO(device, channel, range=None): """ Returns the analog output channel as specified for a given device """ ao = device.find_subdevice_by_type( SUBDEVICE_TYPE.ao, factory=StreamingSubdevice ).channel(channel, factory=AnalogChannel, aref=AREF.diff) if range is not None: ao.range = ao.find_range(unit=UNIT.volt, min=range[0], max=range[1]) return ao
[ "def", "getAO", "(", "device", ",", "channel", ",", "range", "=", "None", ")", ":", "ao", "=", "device", ".", "find_subdevice_by_type", "(", "SUBDEVICE_TYPE", ".", "ao", ",", "factory", "=", "StreamingSubdevice", ")", ".", "channel", "(", "channel", ",", ...
https://github.com/pymeasure/pymeasure/blob/b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e/pymeasure/instruments/comedi.py#L48-L56
twisted/twisted
dee676b040dd38b847ea6fb112a712cb5e119490
src/twisted/enterprise/adbapi.py
python
ConnectionPool.__init__
(self, dbapiName, *connargs, **connkw)
Create a new L{ConnectionPool}. Any positional or keyword arguments other than those documented here are passed to the DB-API object when connecting. Use these arguments to pass database names, usernames, passwords, etc. @param dbapiName: an import string to use to obtain a DB-API compatible module (e.g. C{'pyPgSQL.PgSQL'}) @keyword cp_min: the minimum number of connections in pool (default 3) @keyword cp_max: the maximum number of connections in pool (default 5) @keyword cp_noisy: generate informational log messages during operation (default C{False}) @keyword cp_openfun: a callback invoked after every C{connect()} on the underlying DB-API object. The callback is passed a new DB-API connection object. This callback can setup per-connection state such as charset, timezone, etc. @keyword cp_reconnect: detect connections which have failed and reconnect (default C{False}). Failed connections may result in L{ConnectionLost} exceptions, which indicate the query may need to be re-sent. @keyword cp_good_sql: an sql query which should always succeed and change no state (default C{'select 1'}) @keyword cp_reactor: use this reactor instead of the global reactor (added in Twisted 10.2). @type cp_reactor: L{IReactorCore} provider
Create a new L{ConnectionPool}.
[ "Create", "a", "new", "L", "{", "ConnectionPool", "}", "." ]
def __init__(self, dbapiName, *connargs, **connkw): """ Create a new L{ConnectionPool}. Any positional or keyword arguments other than those documented here are passed to the DB-API object when connecting. Use these arguments to pass database names, usernames, passwords, etc. @param dbapiName: an import string to use to obtain a DB-API compatible module (e.g. C{'pyPgSQL.PgSQL'}) @keyword cp_min: the minimum number of connections in pool (default 3) @keyword cp_max: the maximum number of connections in pool (default 5) @keyword cp_noisy: generate informational log messages during operation (default C{False}) @keyword cp_openfun: a callback invoked after every C{connect()} on the underlying DB-API object. The callback is passed a new DB-API connection object. This callback can setup per-connection state such as charset, timezone, etc. @keyword cp_reconnect: detect connections which have failed and reconnect (default C{False}). Failed connections may result in L{ConnectionLost} exceptions, which indicate the query may need to be re-sent. @keyword cp_good_sql: an sql query which should always succeed and change no state (default C{'select 1'}) @keyword cp_reactor: use this reactor instead of the global reactor (added in Twisted 10.2). @type cp_reactor: L{IReactorCore} provider """ self.dbapiName = dbapiName self.dbapi = reflect.namedModule(dbapiName) if getattr(self.dbapi, "apilevel", None) != "2.0": log.msg("DB API module not DB API 2.0 compliant.") if getattr(self.dbapi, "threadsafety", 0) < 1: log.msg("DB API module not sufficiently thread-safe.") reactor = connkw.pop("cp_reactor", None) if reactor is None: from twisted.internet import reactor self._reactor = reactor self.connargs = connargs self.connkw = connkw for arg in self.CP_ARGS: cpArg = f"cp_{arg}" if cpArg in connkw: setattr(self, arg, connkw[cpArg]) del connkw[cpArg] self.min = min(self.min, self.max) self.max = max(self.min, self.max) # All connections, hashed on thread id self.connections = {} # These are optional so import them here from twisted.python import threadable, threadpool self.threadID = threadable.getThreadID self.threadpool = threadpool.ThreadPool(self.min, self.max) self.startID = self._reactor.callWhenRunning(self._start)
[ "def", "__init__", "(", "self", ",", "dbapiName", ",", "*", "connargs", ",", "*", "*", "connkw", ")", ":", "self", ".", "dbapiName", "=", "dbapiName", "self", ".", "dbapi", "=", "reflect", ".", "namedModule", "(", "dbapiName", ")", "if", "getattr", "("...
https://github.com/twisted/twisted/blob/dee676b040dd38b847ea6fb112a712cb5e119490/src/twisted/enterprise/adbapi.py#L164-L233
mdiazcl/fuzzbunch-debian
2b76c2249ade83a389ae3badb12a1bd09901fd2c
windows/Resources/Python/Core/Lib/mhlib.py
python
SubMessage.__repr__
(self)
return 'SubMessage(%s, %s, %s)' % (f, n, fp)
String representation.
String representation.
[ "String", "representation", "." ]
def __repr__(self): """String representation.""" f, n, fp = self.folder, self.number, self.fp return 'SubMessage(%s, %s, %s)' % (f, n, fp)
[ "def", "__repr__", "(", "self", ")", ":", "f", ",", "n", ",", "fp", "=", "self", ".", "folder", ",", "self", ".", "number", ",", "self", ".", "fp", "return", "'SubMessage(%s, %s, %s)'", "%", "(", "f", ",", "n", ",", "fp", ")" ]
https://github.com/mdiazcl/fuzzbunch-debian/blob/2b76c2249ade83a389ae3badb12a1bd09901fd2c/windows/Resources/Python/Core/Lib/mhlib.py#L767-L770
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/plat-mac/aetypes.py
python
IsKeyword
(x)
return isinstance(x, Keyword)
[]
def IsKeyword(x): return isinstance(x, Keyword)
[ "def", "IsKeyword", "(", "x", ")", ":", "return", "isinstance", "(", "x", ",", "Keyword", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/plat-mac/aetypes.py#L142-L143
beeware/ouroboros
a29123c6fab6a807caffbb7587cf548e0c370296
ouroboros/tkinter/ttk.py
python
Treeview.selection_set
(self, items)
items becomes the new selection.
items becomes the new selection.
[ "items", "becomes", "the", "new", "selection", "." ]
def selection_set(self, items): """items becomes the new selection.""" self.selection("set", items)
[ "def", "selection_set", "(", "self", ",", "items", ")", ":", "self", ".", "selection", "(", "\"set\"", ",", "items", ")" ]
https://github.com/beeware/ouroboros/blob/a29123c6fab6a807caffbb7587cf548e0c370296/ouroboros/tkinter/ttk.py#L1398-L1400
openseg-group/openseg.pytorch
2cdb3de5dcbc96f531b68e5bf1233c860f247b3e
lib/datasets/preprocess/cityscapes/dt_offset_generator.py
python
sobel_kernel
(shape, axis)
return torch.from_numpy(k).unsqueeze(0)
shape must be odd: eg. (5,5) axis is the direction, with 0 to positive x and 1 to positive y
shape must be odd: eg. (5,5) axis is the direction, with 0 to positive x and 1 to positive y
[ "shape", "must", "be", "odd", ":", "eg", ".", "(", "5", "5", ")", "axis", "is", "the", "direction", "with", "0", "to", "positive", "x", "and", "1", "to", "positive", "y" ]
def sobel_kernel(shape, axis): """ shape must be odd: eg. (5,5) axis is the direction, with 0 to positive x and 1 to positive y """ k = np.zeros(shape) p = [ (j, i) for j in range(shape[0]) for i in range(shape[1]) if not (i == (shape[1] - 1) / 2.0 and j == (shape[0] - 1) / 2.0) ] for j, i in p: j_ = int(j - (shape[0] - 1) / 2.0) i_ = int(i - (shape[1] - 1) / 2.0) k[j, i] = (i_ if axis == 0 else j_) / float(i_ * i_ + j_ * j_) return torch.from_numpy(k).unsqueeze(0)
[ "def", "sobel_kernel", "(", "shape", ",", "axis", ")", ":", "k", "=", "np", ".", "zeros", "(", "shape", ")", "p", "=", "[", "(", "j", ",", "i", ")", "for", "j", "in", "range", "(", "shape", "[", "0", "]", ")", "for", "i", "in", "range", "("...
https://github.com/openseg-group/openseg.pytorch/blob/2cdb3de5dcbc96f531b68e5bf1233c860f247b3e/lib/datasets/preprocess/cityscapes/dt_offset_generator.py#L28-L45