repo stringlengths 7 54 | path stringlengths 4 192 | url stringlengths 87 284 | code stringlengths 78 104k | code_tokens list | docstring stringlengths 1 46.9k | docstring_tokens list | language stringclasses 1
value | partition stringclasses 3
values |
|---|---|---|---|---|---|---|---|---|
tcalmant/ipopo | pelix/rsa/__init__.py | https://github.com/tcalmant/ipopo/blob/2f9ae0c44cd9c34ef1a9d50837b3254e75678eb1/pelix/rsa/__init__.py#L1563-L1579 | def copy_non_reserved(props, target):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
"""
Copies all properties with non-reserved names from ``props`` to ``target``
:param props: A dictionary of properties
:param target: Another dictionary
:return: The target dictionary
"""
t... | [
"def",
"copy_non_reserved",
"(",
"props",
",",
"target",
")",
":",
"# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]",
"target",
".",
"update",
"(",
"{",
"key",
":",
"value",
"for",
"key",
",",
"value",
"in",
"props",
".",
"items",
"(",
")",
"if",
"n... | Copies all properties with non-reserved names from ``props`` to ``target``
:param props: A dictionary of properties
:param target: Another dictionary
:return: The target dictionary | [
"Copies",
"all",
"properties",
"with",
"non",
"-",
"reserved",
"names",
"from",
"props",
"to",
"target"
] | python | train |
Jajcus/pyxmpp2 | pyxmpp2/resolver.py | https://github.com/Jajcus/pyxmpp2/blob/14a40a3950910a9cd008b55f0d8905aa0186ce18/pyxmpp2/resolver.py#L60-L69 | def is_ipv4_available():
"""Check if IPv4 is available.
:Return: `True` when an IPv4 socket can be created.
"""
try:
socket.socket(socket.AF_INET).close()
except socket.error:
return False
return True | [
"def",
"is_ipv4_available",
"(",
")",
":",
"try",
":",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
")",
".",
"close",
"(",
")",
"except",
"socket",
".",
"error",
":",
"return",
"False",
"return",
"True"
] | Check if IPv4 is available.
:Return: `True` when an IPv4 socket can be created. | [
"Check",
"if",
"IPv4",
"is",
"available",
"."
] | python | valid |
eflglobal/filters | filters/macros.py | https://github.com/eflglobal/filters/blob/36c2a2b1cffa3a37279053cf181709045fd6683a/filters/macros.py#L39-L85 | def filter_macro(func, *args, **kwargs):
"""
Promotes a function that returns a filter into its own filter type.
Example::
@filter_macro
def String():
return Unicode | Strip | NotEmpty
# You can now use `String` anywhere you would use a regular Filter:
(String ... | [
"def",
"filter_macro",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"filter_partial",
"=",
"partial",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"class",
"FilterMacroMeta",
"(",
"FilterMeta",
")",
":",
"@",
"sta... | Promotes a function that returns a filter into its own filter type.
Example::
@filter_macro
def String():
return Unicode | Strip | NotEmpty
# You can now use `String` anywhere you would use a regular Filter:
(String | Split(':')).apply('...')
You can also use ``fi... | [
"Promotes",
"a",
"function",
"that",
"returns",
"a",
"filter",
"into",
"its",
"own",
"filter",
"type",
"."
] | python | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/gateway_agent.py | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/gateway_agent.py#L673-L714 | def _publish_scan_response(self, client):
"""Publish a scan response message
The message contains all of the devices that are currently known
to this agent. Connection strings for direct connections are
translated to what is appropriate for this agent.
Args:
client... | [
"def",
"_publish_scan_response",
"(",
"self",
",",
"client",
")",
":",
"devices",
"=",
"self",
".",
"_manager",
".",
"scanned_devices",
"converted_devs",
"=",
"[",
"]",
"for",
"uuid",
",",
"info",
"in",
"devices",
".",
"items",
"(",
")",
":",
"slug",
"="... | Publish a scan response message
The message contains all of the devices that are currently known
to this agent. Connection strings for direct connections are
translated to what is appropriate for this agent.
Args:
client (string): A unique id for the client that made this ... | [
"Publish",
"a",
"scan",
"response",
"message"
] | python | train |
sebdah/dynamic-dynamodb | dynamic_dynamodb/core/gsi.py | https://github.com/sebdah/dynamic-dynamodb/blob/bfd0ca806b1c3301e724696de90ef0f973410493/dynamic_dynamodb/core/gsi.py#L585-L1024 | def __ensure_provisioning_writes(
table_name, table_key, gsi_name, gsi_key, num_consec_write_checks):
""" Ensure that provisioning of writes is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type table_key: str
:param table_key: Table configuration option key n... | [
"def",
"__ensure_provisioning_writes",
"(",
"table_name",
",",
"table_key",
",",
"gsi_name",
",",
"gsi_key",
",",
"num_consec_write_checks",
")",
":",
"if",
"not",
"get_gsi_option",
"(",
"table_key",
",",
"gsi_key",
",",
"'enable_writes_autoscaling'",
")",
":",
"log... | Ensure that provisioning of writes is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type table_key: str
:param table_key: Table configuration option key name
:type gsi_name: str
:param gsi_name: Name of the GSI
:type gsi_key: str
:param gsi_key: Configurat... | [
"Ensure",
"that",
"provisioning",
"of",
"writes",
"is",
"correct"
] | python | train |
jut-io/jut-python-tools | jut/commands/programs.py | https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/commands/programs.py#L89-L199 | def pull(options):
"""
pull all remote programs to a local directory
"""
configuration = config.get_default()
app_url = configuration['app_url']
if options.deployment != None:
deployment_name = options.deployment
else:
deployment_name = configuration['deployment_name']
... | [
"def",
"pull",
"(",
"options",
")",
":",
"configuration",
"=",
"config",
".",
"get_default",
"(",
")",
"app_url",
"=",
"configuration",
"[",
"'app_url'",
"]",
"if",
"options",
".",
"deployment",
"!=",
"None",
":",
"deployment_name",
"=",
"options",
".",
"d... | pull all remote programs to a local directory | [
"pull",
"all",
"remote",
"programs",
"to",
"a",
"local",
"directory"
] | python | train |
wummel/dosage | dosagelib/events.py | https://github.com/wummel/dosage/blob/a0109c3a46219f280e6e5e77183674e40da0f304/dosagelib/events.py#L220-L227 | def newComic(self, comic):
"""Start new comic list in HTML."""
if self.lastUrl is not None:
self.html.write(u'</li>\n')
if self.lastComic is not None:
self.html.write(u'</ul>\n')
self.html.write(u'<li>%s</li>\n' % comic.name)
self.html.write(u'<ul>\n') | [
"def",
"newComic",
"(",
"self",
",",
"comic",
")",
":",
"if",
"self",
".",
"lastUrl",
"is",
"not",
"None",
":",
"self",
".",
"html",
".",
"write",
"(",
"u'</li>\\n'",
")",
"if",
"self",
".",
"lastComic",
"is",
"not",
"None",
":",
"self",
".",
"html... | Start new comic list in HTML. | [
"Start",
"new",
"comic",
"list",
"in",
"HTML",
"."
] | python | train |
SuperCowPowers/workbench | workbench/workers/mem_procdump.py | https://github.com/SuperCowPowers/workbench/blob/710232756dd717f734253315e3d0b33c9628dafb/workbench/workers/mem_procdump.py#L32-L78 | def execute(self, input_data):
''' Execute method '''
# Spin up the rekall adapter
adapter = RekallAdapter()
adapter.set_plugin_name(self.plugin_name)
# Create a temporary directory and run this plugin from there
with self.goto_temp_directory():
# R... | [
"def",
"execute",
"(",
"self",
",",
"input_data",
")",
":",
"# Spin up the rekall adapter",
"adapter",
"=",
"RekallAdapter",
"(",
")",
"adapter",
".",
"set_plugin_name",
"(",
"self",
".",
"plugin_name",
")",
"# Create a temporary directory and run this plugin from there",... | Execute method | [
"Execute",
"method"
] | python | train |
tmr232/Sark | sark/code/instruction.py | https://github.com/tmr232/Sark/blob/bee62879c2aea553a3924d887e2b30f2a6008581/sark/code/instruction.py#L270-L280 | def reg(self):
"""Name of the register used in the operand."""
if self.type.is_displ or self.type.is_phrase:
size = core.get_native_size()
return base.get_register_name(self.reg_id, size)
if self.type.is_reg:
return base.get_register_name(self.reg_id, self.si... | [
"def",
"reg",
"(",
"self",
")",
":",
"if",
"self",
".",
"type",
".",
"is_displ",
"or",
"self",
".",
"type",
".",
"is_phrase",
":",
"size",
"=",
"core",
".",
"get_native_size",
"(",
")",
"return",
"base",
".",
"get_register_name",
"(",
"self",
".",
"r... | Name of the register used in the operand. | [
"Name",
"of",
"the",
"register",
"used",
"in",
"the",
"operand",
"."
] | python | train |
google/grr | grr/core/grr_response_core/lib/parsers/wmi_parser.py | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/core/grr_response_core/lib/parsers/wmi_parser.py#L30-L91 | def BinarySIDtoStringSID(sid):
"""Converts a binary SID to its string representation.
https://msdn.microsoft.com/en-us/library/windows/desktop/aa379597.aspx
The byte representation of an SID is as follows:
Offset Length Description
00 01 revision
01 01 sub-authority count
0... | [
"def",
"BinarySIDtoStringSID",
"(",
"sid",
")",
":",
"precondition",
".",
"AssertType",
"(",
"sid",
",",
"bytes",
")",
"# TODO: This seemingly no-op is actually not a no-op. The reason",
"# is that `sid` might be either `bytes` from the future package or `str` (e.g.",
"# a bytes lite... | Converts a binary SID to its string representation.
https://msdn.microsoft.com/en-us/library/windows/desktop/aa379597.aspx
The byte representation of an SID is as follows:
Offset Length Description
00 01 revision
01 01 sub-authority count
02 06 authority (big endian... | [
"Converts",
"a",
"binary",
"SID",
"to",
"its",
"string",
"representation",
"."
] | python | train |
bjodah/pycompilation | pycompilation/compilation.py | https://github.com/bjodah/pycompilation/blob/43eac8d82f8258d30d4df77fd2ad3f3e4f4dca18/pycompilation/compilation.py#L474-L596 | def pyx2obj(pyxpath, objpath=None, interm_c_dir=None, cwd=None,
logger=None, full_module_name=None, only_update=False,
metadir=None, include_numpy=False, include_dirs=None,
cy_kwargs=None, gdb=False, cplus=None, **kwargs):
"""
Convenience function
If cwd is specified, py... | [
"def",
"pyx2obj",
"(",
"pyxpath",
",",
"objpath",
"=",
"None",
",",
"interm_c_dir",
"=",
"None",
",",
"cwd",
"=",
"None",
",",
"logger",
"=",
"None",
",",
"full_module_name",
"=",
"None",
",",
"only_update",
"=",
"False",
",",
"metadir",
"=",
"None",
"... | Convenience function
If cwd is specified, pyxpath and dst are taken to be relative
If only_update is set to `True` the modification time is checked
and compilation is only run if the source is newer than the
destination
Parameters
----------
pyxpath: path string
path to Cython sour... | [
"Convenience",
"function"
] | python | train |
watson-developer-cloud/python-sdk | ibm_watson/tone_analyzer_v3.py | https://github.com/watson-developer-cloud/python-sdk/blob/4c2c9df4466fcde88975da9ecd834e6ba95eb353/ibm_watson/tone_analyzer_v3.py#L1062-L1073 | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'utterance_id') and self.utterance_id is not None:
_dict['utterance_id'] = self.utterance_id
if hasattr(self, 'utterance_text') and self.utterance_text is not None:
... | [
"def",
"_to_dict",
"(",
"self",
")",
":",
"_dict",
"=",
"{",
"}",
"if",
"hasattr",
"(",
"self",
",",
"'utterance_id'",
")",
"and",
"self",
".",
"utterance_id",
"is",
"not",
"None",
":",
"_dict",
"[",
"'utterance_id'",
"]",
"=",
"self",
".",
"utterance_... | Return a json dictionary representing this model. | [
"Return",
"a",
"json",
"dictionary",
"representing",
"this",
"model",
"."
] | python | train |
fastai/fastai | fastai/vision/image.py | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/image.py#L170-L173 | def pixel(self, func:PixelFunc, *args, **kwargs)->'Image':
"Equivalent to `image.px = func(image.px)`."
self.px = func(self.px, *args, **kwargs)
return self | [
"def",
"pixel",
"(",
"self",
",",
"func",
":",
"PixelFunc",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"'Image'",
":",
"self",
".",
"px",
"=",
"func",
"(",
"self",
".",
"px",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
... | Equivalent to `image.px = func(image.px)`. | [
"Equivalent",
"to",
"image",
".",
"px",
"=",
"func",
"(",
"image",
".",
"px",
")",
"."
] | python | train |
mardix/Juice | juice/decorators.py | https://github.com/mardix/Juice/blob/7afa8d4238868235dfcdae82272bd77958dd416a/juice/decorators.py#L514-L532 | def require_user_roles(*roles):
"""
A decorator to check if user has any of the roles specified
@require_user_roles('superadmin', 'admin')
def fn():
pass
"""
def wrapper(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
if ext.user_authenticated():
... | [
"def",
"require_user_roles",
"(",
"*",
"roles",
")",
":",
"def",
"wrapper",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"ext",
".",
"user_authentica... | A decorator to check if user has any of the roles specified
@require_user_roles('superadmin', 'admin')
def fn():
pass | [
"A",
"decorator",
"to",
"check",
"if",
"user",
"has",
"any",
"of",
"the",
"roles",
"specified"
] | python | train |
prompt-toolkit/pyvim | pyvim/window_arrangement.py | https://github.com/prompt-toolkit/pyvim/blob/5928b53b9d700863c1a06d2181a034a955f94594/pyvim/window_arrangement.py#L374-L403 | def _add_editor_buffer(self, editor_buffer, show_in_current_window=False):
"""
Insert this new buffer in the list of buffers, right after the active
one.
"""
assert isinstance(editor_buffer, EditorBuffer) and editor_buffer not in self.editor_buffers
# Add to list of Edit... | [
"def",
"_add_editor_buffer",
"(",
"self",
",",
"editor_buffer",
",",
"show_in_current_window",
"=",
"False",
")",
":",
"assert",
"isinstance",
"(",
"editor_buffer",
",",
"EditorBuffer",
")",
"and",
"editor_buffer",
"not",
"in",
"self",
".",
"editor_buffers",
"# Ad... | Insert this new buffer in the list of buffers, right after the active
one. | [
"Insert",
"this",
"new",
"buffer",
"in",
"the",
"list",
"of",
"buffers",
"right",
"after",
"the",
"active",
"one",
"."
] | python | train |
bwhite/hadoopy | hadoopy/thirdparty/pyinstaller/PyInstaller/build.py | https://github.com/bwhite/hadoopy/blob/ff39b4e6d4e6efaf1f571cf0f2c0e0d7ab28c2d6/hadoopy/thirdparty/pyinstaller/PyInstaller/build.py#L155-L166 | def addSuffixToExtensions(toc):
"""
Returns a new TOC with proper library suffix for EXTENSION items.
"""
new_toc = TOC()
for inm, fnm, typ in toc:
if typ in ('EXTENSION', 'DEPENDENCY'):
binext = os.path.splitext(fnm)[1]
if not os.path.splitext(inm)[1] == binext:
... | [
"def",
"addSuffixToExtensions",
"(",
"toc",
")",
":",
"new_toc",
"=",
"TOC",
"(",
")",
"for",
"inm",
",",
"fnm",
",",
"typ",
"in",
"toc",
":",
"if",
"typ",
"in",
"(",
"'EXTENSION'",
",",
"'DEPENDENCY'",
")",
":",
"binext",
"=",
"os",
".",
"path",
"... | Returns a new TOC with proper library suffix for EXTENSION items. | [
"Returns",
"a",
"new",
"TOC",
"with",
"proper",
"library",
"suffix",
"for",
"EXTENSION",
"items",
"."
] | python | train |
python-openxml/python-docx | docx/oxml/coreprops.py | https://github.com/python-openxml/python-docx/blob/6756f6cd145511d3eb6d1d188beea391b1ddfd53/docx/oxml/coreprops.py#L222-L238 | def _offset_dt(cls, dt, offset_str):
"""
Return a |datetime| instance that is offset from datetime *dt* by
the timezone offset specified in *offset_str*, a string like
``'-07:00'``.
"""
match = cls._offset_pattern.match(offset_str)
if match is None:
ra... | [
"def",
"_offset_dt",
"(",
"cls",
",",
"dt",
",",
"offset_str",
")",
":",
"match",
"=",
"cls",
".",
"_offset_pattern",
".",
"match",
"(",
"offset_str",
")",
"if",
"match",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"'%s' is not a valid offset string\"",
... | Return a |datetime| instance that is offset from datetime *dt* by
the timezone offset specified in *offset_str*, a string like
``'-07:00'``. | [
"Return",
"a",
"|datetime|",
"instance",
"that",
"is",
"offset",
"from",
"datetime",
"*",
"dt",
"*",
"by",
"the",
"timezone",
"offset",
"specified",
"in",
"*",
"offset_str",
"*",
"a",
"string",
"like",
"-",
"07",
":",
"00",
"."
] | python | train |
koehlma/pygrooveshark | src/grooveshark/__init__.py | https://github.com/koehlma/pygrooveshark/blob/17673758ac12f54dc26ac879c30ea44f13b81057/src/grooveshark/__init__.py#L115-L122 | def _request_token(self, method, client):
'''
generates a request token
'''
if time.time() - self.session.time > grooveshark.const.TOKEN_TIMEOUT:
self._get_token()
random_value = self._random_hex()
return random_value + hashlib.sha1((method + ':' + self.sessio... | [
"def",
"_request_token",
"(",
"self",
",",
"method",
",",
"client",
")",
":",
"if",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"session",
".",
"time",
">",
"grooveshark",
".",
"const",
".",
"TOKEN_TIMEOUT",
":",
"self",
".",
"_get_token",
"(",
... | generates a request token | [
"generates",
"a",
"request",
"token"
] | python | train |
choderalab/pymbar | pymbar/mbar.py | https://github.com/choderalab/pymbar/blob/69d1f0ff680e9ac1c6a51a5a207ea28f3ed86740/pymbar/mbar.py#L793-L873 | def computeCovarianceOfSums(self, d_ij, K, a):
"""
We wish to calculate the variance of a weighted sum of free energy differences.
for example ``var(\sum a_i df_i)``.
We explicitly lay out the calculations for four variables (where each variable
is a logarithm of a partition fu... | [
"def",
"computeCovarianceOfSums",
"(",
"self",
",",
"d_ij",
",",
"K",
",",
"a",
")",
":",
"# todo: vectorize this.",
"var_ij",
"=",
"np",
".",
"square",
"(",
"d_ij",
")",
"d2",
"=",
"np",
".",
"zeros",
"(",
"[",
"K",
",",
"K",
"]",
",",
"float",
")... | We wish to calculate the variance of a weighted sum of free energy differences.
for example ``var(\sum a_i df_i)``.
We explicitly lay out the calculations for four variables (where each variable
is a logarithm of a partition function), then generalize.
The uncertainty in the sum of two... | [
"We",
"wish",
"to",
"calculate",
"the",
"variance",
"of",
"a",
"weighted",
"sum",
"of",
"free",
"energy",
"differences",
".",
"for",
"example",
"var",
"(",
"\\",
"sum",
"a_i",
"df_i",
")",
"."
] | python | train |
Microsoft/azure-devops-python-api | azure-devops/azure/devops/v5_1/task_agent/task_agent_client.py | https://github.com/Microsoft/azure-devops-python-api/blob/4777ffda2f5052fabbaddb2abe9cb434e0cf1aa8/azure-devops/azure/devops/v5_1/task_agent/task_agent_client.py#L595-L612 | def delete_deployment_target(self, project, deployment_group_id, target_id):
"""DeleteDeploymentTarget.
[Preview API] Delete a deployment target in a deployment group. This deletes the agent from associated deployment pool too.
:param str project: Project ID or project name
:param int de... | [
"def",
"delete_deployment_target",
"(",
"self",
",",
"project",
",",
"deployment_group_id",
",",
"target_id",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_seria... | DeleteDeploymentTarget.
[Preview API] Delete a deployment target in a deployment group. This deletes the agent from associated deployment pool too.
:param str project: Project ID or project name
:param int deployment_group_id: ID of the deployment group in which deployment target is deleted.
... | [
"DeleteDeploymentTarget",
".",
"[",
"Preview",
"API",
"]",
"Delete",
"a",
"deployment",
"target",
"in",
"a",
"deployment",
"group",
".",
"This",
"deletes",
"the",
"agent",
"from",
"associated",
"deployment",
"pool",
"too",
".",
":",
"param",
"str",
"project",
... | python | train |
python-openxml/python-docx | docx/oxml/text/font.py | https://github.com/python-openxml/python-docx/blob/6756f6cd145511d3eb6d1d188beea391b1ddfd53/docx/oxml/text/font.py#L184-L195 | def subscript(self):
"""
|True| if `w:vertAlign/@w:val` is 'subscript'. |False| if
`w:vertAlign/@w:val` contains any other value. |None| if
`w:vertAlign` is not present.
"""
vertAlign = self.vertAlign
if vertAlign is None:
return None
if vertAl... | [
"def",
"subscript",
"(",
"self",
")",
":",
"vertAlign",
"=",
"self",
".",
"vertAlign",
"if",
"vertAlign",
"is",
"None",
":",
"return",
"None",
"if",
"vertAlign",
".",
"val",
"==",
"ST_VerticalAlignRun",
".",
"SUBSCRIPT",
":",
"return",
"True",
"return",
"F... | |True| if `w:vertAlign/@w:val` is 'subscript'. |False| if
`w:vertAlign/@w:val` contains any other value. |None| if
`w:vertAlign` is not present. | [
"|True|",
"if",
"w",
":",
"vertAlign",
"/"
] | python | train |
lingthio/Flask-User | flask_user/email_manager.py | https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/flask_user/email_manager.py#L37-L58 | def send_confirm_email_email(self, user, user_email):
"""Send the 'email confirmation' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_ENABLE_CONFIRM_EMAIL: return
# The confirm_email email is sent to a... | [
"def",
"send_confirm_email_email",
"(",
"self",
",",
"user",
",",
"user_email",
")",
":",
"# Verify config settings",
"if",
"not",
"self",
".",
"user_manager",
".",
"USER_ENABLE_EMAIL",
":",
"return",
"if",
"not",
"self",
".",
"user_manager",
".",
"USER_ENABLE_CON... | Send the 'email confirmation' email. | [
"Send",
"the",
"email",
"confirmation",
"email",
"."
] | python | train |
log2timeline/plaso | plaso/parsers/winfirewall.py | https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/winfirewall.py#L158-L200 | def _ParseLogLine(self, parser_mediator, structure):
"""Parse a single log line and and produce an event object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
structure (pyparsing.ParseResults): structure of ... | [
"def",
"_ParseLogLine",
"(",
"self",
",",
"parser_mediator",
",",
"structure",
")",
":",
"try",
":",
"date_time",
"=",
"dfdatetime_time_elements",
".",
"TimeElements",
"(",
"time_elements_tuple",
"=",
"structure",
".",
"date_time",
")",
"date_time",
".",
"is_local... | Parse a single log line and and produce an event object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
structure (pyparsing.ParseResults): structure of tokens derived from
a line of a text file. | [
"Parse",
"a",
"single",
"log",
"line",
"and",
"and",
"produce",
"an",
"event",
"object",
"."
] | python | train |
StackStorm/pybind | pybind/slxos/v17s_1_02/openflow_state/__init__.py | https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17s_1_02/openflow_state/__init__.py#L225-L248 | def _set_meter(self, v, load=False):
"""
Setter method for meter, mapped from YANG variable /openflow_state/meter (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_meter is considered as a private
method. Backends looking to populate this variable should
... | [
"def",
"_set_meter",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
... | Setter method for meter, mapped from YANG variable /openflow_state/meter (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_meter is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_meter() directly... | [
"Setter",
"method",
"for",
"meter",
"mapped",
"from",
"YANG",
"variable",
"/",
"openflow_state",
"/",
"meter",
"(",
"container",
")",
"If",
"this",
"variable",
"is",
"read",
"-",
"only",
"(",
"config",
":",
"false",
")",
"in",
"the",
"source",
"YANG",
"f... | python | train |
mediawiki-utilities/python-mwsessions | mwsessions/sessionizer.py | https://github.com/mediawiki-utilities/python-mwsessions/blob/bbbc7330075a2066514df21a64a9afd7a4e0de52/mwsessions/sessionizer.py#L101-L110 | def get_active_sessions(self):
"""
Retrieves the active, unexpired sessions.
:Returns:
A generator of :class:`~mwsessions.Session`
"""
for last_timestamp, i, events in self.recently_active:
yield Session(events[-1].user, unpack_events(events)) | [
"def",
"get_active_sessions",
"(",
"self",
")",
":",
"for",
"last_timestamp",
",",
"i",
",",
"events",
"in",
"self",
".",
"recently_active",
":",
"yield",
"Session",
"(",
"events",
"[",
"-",
"1",
"]",
".",
"user",
",",
"unpack_events",
"(",
"events",
")"... | Retrieves the active, unexpired sessions.
:Returns:
A generator of :class:`~mwsessions.Session` | [
"Retrieves",
"the",
"active",
"unexpired",
"sessions",
"."
] | python | train |
KxSystems/pyq | src/pyq/ptk.py | https://github.com/KxSystems/pyq/blob/ad7b807abde94615a7344aaa930bb01fb1552cc5/src/pyq/ptk.py#L48-L53 | def get_prompt_tokens(_):
"""Return a list of tokens for the prompt"""
namespace = q(r'\d')
if namespace == '.':
namespace = ''
return [(Token.Generic.Prompt, 'q%s)' % namespace)] | [
"def",
"get_prompt_tokens",
"(",
"_",
")",
":",
"namespace",
"=",
"q",
"(",
"r'\\d'",
")",
"if",
"namespace",
"==",
"'.'",
":",
"namespace",
"=",
"''",
"return",
"[",
"(",
"Token",
".",
"Generic",
".",
"Prompt",
",",
"'q%s)'",
"%",
"namespace",
")",
... | Return a list of tokens for the prompt | [
"Return",
"a",
"list",
"of",
"tokens",
"for",
"the",
"prompt"
] | python | train |
boppreh/keyboard | keyboard/_winkeyboard.py | https://github.com/boppreh/keyboard/blob/dbb73dfff484f733d5fed8dbc53301af5b6c7f50/keyboard/_winkeyboard.py#L486-L558 | def prepare_intercept(callback):
"""
Registers a Windows low level keyboard hook. The provided callback will
be invoked for each high-level keyboard event, and is expected to return
True if the key event should be passed to the next program, or False if
the event is to be blocked.
No event is p... | [
"def",
"prepare_intercept",
"(",
"callback",
")",
":",
"_setup_name_tables",
"(",
")",
"def",
"process_key",
"(",
"event_type",
",",
"vk",
",",
"scan_code",
",",
"is_extended",
")",
":",
"global",
"shift_is_pressed",
",",
"altgr_is_pressed",
",",
"ignore_next_righ... | Registers a Windows low level keyboard hook. The provided callback will
be invoked for each high-level keyboard event, and is expected to return
True if the key event should be passed to the next program, or False if
the event is to be blocked.
No event is processed until the Windows messages are pumpe... | [
"Registers",
"a",
"Windows",
"low",
"level",
"keyboard",
"hook",
".",
"The",
"provided",
"callback",
"will",
"be",
"invoked",
"for",
"each",
"high",
"-",
"level",
"keyboard",
"event",
"and",
"is",
"expected",
"to",
"return",
"True",
"if",
"the",
"key",
"ev... | python | train |
googlefonts/glyphsLib | Lib/glyphsLib/builder/builders.py | https://github.com/googlefonts/glyphsLib/blob/9c12dc70c8d13f08d92b824e6710f6e3bb5037bb/Lib/glyphsLib/builder/builders.py#L640-L702 | def _fake_designspace(self, ufos):
"""Build a fake designspace with the given UFOs as sources, so that all
builder functions can rely on the presence of a designspace.
"""
designspace = designspaceLib.DesignSpaceDocument()
ufo_to_location = defaultdict(dict)
# Make weig... | [
"def",
"_fake_designspace",
"(",
"self",
",",
"ufos",
")",
":",
"designspace",
"=",
"designspaceLib",
".",
"DesignSpaceDocument",
"(",
")",
"ufo_to_location",
"=",
"defaultdict",
"(",
"dict",
")",
"# Make weight and width axis if relevant",
"for",
"info_key",
",",
"... | Build a fake designspace with the given UFOs as sources, so that all
builder functions can rely on the presence of a designspace. | [
"Build",
"a",
"fake",
"designspace",
"with",
"the",
"given",
"UFOs",
"as",
"sources",
"so",
"that",
"all",
"builder",
"functions",
"can",
"rely",
"on",
"the",
"presence",
"of",
"a",
"designspace",
"."
] | python | train |
Yubico/yubikey-manager | ykman/cli/otp.py | https://github.com/Yubico/yubikey-manager/blob/3ac27bc59ae76a59db9d09a530494add2edbbabf/ykman/cli/otp.py#L390-L432 | def chalresp(ctx, slot, key, totp, touch, force, generate):
"""
Program a challenge-response credential.
If KEY is not given, an interactive prompt will ask for it.
"""
controller = ctx.obj['controller']
if key:
if generate:
ctx.fail('Invalid options: --generate conflicts w... | [
"def",
"chalresp",
"(",
"ctx",
",",
"slot",
",",
"key",
",",
"totp",
",",
"touch",
",",
"force",
",",
"generate",
")",
":",
"controller",
"=",
"ctx",
".",
"obj",
"[",
"'controller'",
"]",
"if",
"key",
":",
"if",
"generate",
":",
"ctx",
".",
"fail",... | Program a challenge-response credential.
If KEY is not given, an interactive prompt will ask for it. | [
"Program",
"a",
"challenge",
"-",
"response",
"credential",
"."
] | python | train |
fjwCode/cerium | cerium/androiddriver.py | https://github.com/fjwCode/cerium/blob/f6e06e0dcf83a0bc924828e9d6cb81383ed2364f/cerium/androiddriver.py#L168-L172 | def get_screen_density(self) -> str:
'''Show device screen density (PPI).'''
output, _ = self._execute(
'-s', self.device_sn, 'shell', 'wm', 'density')
return output.split()[2] | [
"def",
"get_screen_density",
"(",
"self",
")",
"->",
"str",
":",
"output",
",",
"_",
"=",
"self",
".",
"_execute",
"(",
"'-s'",
",",
"self",
".",
"device_sn",
",",
"'shell'",
",",
"'wm'",
",",
"'density'",
")",
"return",
"output",
".",
"split",
"(",
... | Show device screen density (PPI). | [
"Show",
"device",
"screen",
"density",
"(",
"PPI",
")",
"."
] | python | train |
sorgerlab/indra | indra/sources/sofia/api.py | https://github.com/sorgerlab/indra/blob/79a70415832c5702d7a820c7c9ccc8e25010124b/indra/sources/sofia/api.py#L9-L32 | def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProc... | [
"def",
"process_table",
"(",
"fname",
")",
":",
"book",
"=",
"openpyxl",
".",
"load_workbook",
"(",
"fname",
",",
"read_only",
"=",
"True",
")",
"try",
":",
"rel_sheet",
"=",
"book",
"[",
"'Relations'",
"]",
"except",
"Exception",
"as",
"e",
":",
"rel_sh... | Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of ... | [
"Return",
"processor",
"by",
"processing",
"a",
"given",
"sheet",
"of",
"a",
"spreadsheet",
"file",
"."
] | python | train |
mrstephenneal/pdfconduit | sandbox/pdfrw_upscale.py | https://github.com/mrstephenneal/pdfconduit/blob/993421cc087eefefe01ff09afabd893bcc2718ec/sandbox/pdfrw_upscale.py#L12-L36 | def upscale(file_name, scale=1.5, margin_x=0, margin_y=0, suffix='scaled', tempdir=None):
"""Upscale a PDF to a large size."""
def adjust(page):
info = PageMerge().add(page)
x1, y1, x2, y2 = info.xobj_box
viewrect = (margin_x, margin_y, x2 - x1 - 2 * margin_x, y2 - y1 - 2 * margin_y)
... | [
"def",
"upscale",
"(",
"file_name",
",",
"scale",
"=",
"1.5",
",",
"margin_x",
"=",
"0",
",",
"margin_y",
"=",
"0",
",",
"suffix",
"=",
"'scaled'",
",",
"tempdir",
"=",
"None",
")",
":",
"def",
"adjust",
"(",
"page",
")",
":",
"info",
"=",
"PageMer... | Upscale a PDF to a large size. | [
"Upscale",
"a",
"PDF",
"to",
"a",
"large",
"size",
"."
] | python | train |
vertexproject/synapse | synapse/lib/slabseqn.py | https://github.com/vertexproject/synapse/blob/22e67c5a8f6d7caddbcf34b39ab1bd2d6c4a6e0b/synapse/lib/slabseqn.py#L130-L137 | def rows(self, offs):
'''
Iterate over raw indx, bytes tuples from a given offset.
'''
lkey = s_common.int64en(offs)
for lkey, byts in self.slab.scanByRange(lkey, db=self.db):
indx = s_common.int64un(lkey)
yield indx, byts | [
"def",
"rows",
"(",
"self",
",",
"offs",
")",
":",
"lkey",
"=",
"s_common",
".",
"int64en",
"(",
"offs",
")",
"for",
"lkey",
",",
"byts",
"in",
"self",
".",
"slab",
".",
"scanByRange",
"(",
"lkey",
",",
"db",
"=",
"self",
".",
"db",
")",
":",
"... | Iterate over raw indx, bytes tuples from a given offset. | [
"Iterate",
"over",
"raw",
"indx",
"bytes",
"tuples",
"from",
"a",
"given",
"offset",
"."
] | python | train |
cloud-custodian/cloud-custodian | c7n/logs_support.py | https://github.com/cloud-custodian/cloud-custodian/blob/52ef732eb3d7bc939d1579faf519314814695c08/c7n/logs_support.py#L47-L73 | def normalized_log_entries(raw_entries):
'''Mimic the format returned by LambdaManager.logs()'''
entry_start = r'([0-9:, \-]+) - .* - (\w+) - (.*)$'
entry = None
# process start/end here - avoid parsing log entries twice
for line in raw_entries:
m = re.match(entry_start, line)
if m:
... | [
"def",
"normalized_log_entries",
"(",
"raw_entries",
")",
":",
"entry_start",
"=",
"r'([0-9:, \\-]+) - .* - (\\w+) - (.*)$'",
"entry",
"=",
"None",
"# process start/end here - avoid parsing log entries twice",
"for",
"line",
"in",
"raw_entries",
":",
"m",
"=",
"re",
".",
... | Mimic the format returned by LambdaManager.logs() | [
"Mimic",
"the",
"format",
"returned",
"by",
"LambdaManager",
".",
"logs",
"()"
] | python | train |
hobson/aima | aima/logic.py | https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L357-L401 | def pl_true(exp, model={}):
"""Return True if the propositional logic expression is true in the model,
and False if it is false. If the model does not specify the value for
every proposition, this may return None to indicate 'not obvious';
this may happen even when the expression is tautological."""
... | [
"def",
"pl_true",
"(",
"exp",
",",
"model",
"=",
"{",
"}",
")",
":",
"op",
",",
"args",
"=",
"exp",
".",
"op",
",",
"exp",
".",
"args",
"if",
"exp",
"==",
"TRUE",
":",
"return",
"True",
"elif",
"exp",
"==",
"FALSE",
":",
"return",
"False",
"eli... | Return True if the propositional logic expression is true in the model,
and False if it is false. If the model does not specify the value for
every proposition, this may return None to indicate 'not obvious';
this may happen even when the expression is tautological. | [
"Return",
"True",
"if",
"the",
"propositional",
"logic",
"expression",
"is",
"true",
"in",
"the",
"model",
"and",
"False",
"if",
"it",
"is",
"false",
".",
"If",
"the",
"model",
"does",
"not",
"specify",
"the",
"value",
"for",
"every",
"proposition",
"this"... | python | valid |
intuition-io/intuition | intuition/utils.py | https://github.com/intuition-io/intuition/blob/cd517e6b3b315a743eb4d0d0dc294e264ab913ce/intuition/utils.py#L26-L40 | def next_tick(date, interval=15):
'''
Only return when we reach given datetime
'''
# Intuition works with utc dates, conversion are made for I/O
now = dt.datetime.now(pytz.utc)
live = False
# Sleep until we reach the given date
while now < date:
time.sleep(interval)
# Upd... | [
"def",
"next_tick",
"(",
"date",
",",
"interval",
"=",
"15",
")",
":",
"# Intuition works with utc dates, conversion are made for I/O",
"now",
"=",
"dt",
".",
"datetime",
".",
"now",
"(",
"pytz",
".",
"utc",
")",
"live",
"=",
"False",
"# Sleep until we reach the g... | Only return when we reach given datetime | [
"Only",
"return",
"when",
"we",
"reach",
"given",
"datetime"
] | python | train |
molmod/molmod | molmod/io/atrj.py | https://github.com/molmod/molmod/blob/a7b5b4364ed514ad4c465856c05b5eda1cb561e0/molmod/io/atrj.py#L66-L70 | def get_next(self, label):
"""Get the next section with the given label"""
while self._get_current_label() != label:
self._skip_section()
return self._read_section() | [
"def",
"get_next",
"(",
"self",
",",
"label",
")",
":",
"while",
"self",
".",
"_get_current_label",
"(",
")",
"!=",
"label",
":",
"self",
".",
"_skip_section",
"(",
")",
"return",
"self",
".",
"_read_section",
"(",
")"
] | Get the next section with the given label | [
"Get",
"the",
"next",
"section",
"with",
"the",
"given",
"label"
] | python | train |
urinieto/msaf | msaf/utils.py | https://github.com/urinieto/msaf/blob/9dbb57d77a1310465a65cc40f1641d083ca74385/msaf/utils.py#L120-L154 | def sonify_clicks(audio, clicks, out_file, fs, offset=0):
"""Sonifies the estimated times into the output file.
Parameters
----------
audio: np.array
Audio samples of the input track.
clicks: np.array
Click positions in seconds.
out_file: str
Path to the output file.
... | [
"def",
"sonify_clicks",
"(",
"audio",
",",
"clicks",
",",
"out_file",
",",
"fs",
",",
"offset",
"=",
"0",
")",
":",
"# Generate clicks (this should be done by mir_eval, but its",
"# latest release is not compatible with latest numpy)",
"times",
"=",
"clicks",
"+",
"offset... | Sonifies the estimated times into the output file.
Parameters
----------
audio: np.array
Audio samples of the input track.
clicks: np.array
Click positions in seconds.
out_file: str
Path to the output file.
fs: int
Sample rate.
offset: float
Offset of... | [
"Sonifies",
"the",
"estimated",
"times",
"into",
"the",
"output",
"file",
"."
] | python | test |
PyCQA/astroid | astroid/rebuilder.py | https://github.com/PyCQA/astroid/blob/e0a298df55b15abcb77c2a93253f5ab7be52d0fb/astroid/rebuilder.py#L658-L666 | def visit_ifexp(self, node, parent):
"""visit a IfExp node by returning a fresh instance of it"""
newnode = nodes.IfExp(node.lineno, node.col_offset, parent)
newnode.postinit(
self.visit(node.test, newnode),
self.visit(node.body, newnode),
self.visit(node.orel... | [
"def",
"visit_ifexp",
"(",
"self",
",",
"node",
",",
"parent",
")",
":",
"newnode",
"=",
"nodes",
".",
"IfExp",
"(",
"node",
".",
"lineno",
",",
"node",
".",
"col_offset",
",",
"parent",
")",
"newnode",
".",
"postinit",
"(",
"self",
".",
"visit",
"("... | visit a IfExp node by returning a fresh instance of it | [
"visit",
"a",
"IfExp",
"node",
"by",
"returning",
"a",
"fresh",
"instance",
"of",
"it"
] | python | train |
AmesCornish/buttersink | buttersink/btrfs.py | https://github.com/AmesCornish/buttersink/blob/5cc37e30d9f8071fcf3497dca8b8a91b910321ea/buttersink/btrfs.py#L527-L541 | def _rescanSizes(self, force=True):
""" Zero and recalculate quota sizes to subvolume sizes will be correct. """
status = self.QUOTA_CTL(cmd=BTRFS_QUOTA_CTL_ENABLE).status
logger.debug("CTL Status: %s", hex(status))
status = self.QUOTA_RESCAN_STATUS()
logger.debug("RESCAN Status... | [
"def",
"_rescanSizes",
"(",
"self",
",",
"force",
"=",
"True",
")",
":",
"status",
"=",
"self",
".",
"QUOTA_CTL",
"(",
"cmd",
"=",
"BTRFS_QUOTA_CTL_ENABLE",
")",
".",
"status",
"logger",
".",
"debug",
"(",
"\"CTL Status: %s\"",
",",
"hex",
"(",
"status",
... | Zero and recalculate quota sizes to subvolume sizes will be correct. | [
"Zero",
"and",
"recalculate",
"quota",
"sizes",
"to",
"subvolume",
"sizes",
"will",
"be",
"correct",
"."
] | python | train |
estnltk/estnltk | estnltk/text.py | https://github.com/estnltk/estnltk/blob/28ae334a68a0673072febc318635f04da0dcc54a/estnltk/text.py#L855-L860 | def named_entities(self):
"""The elements of ``named_entities`` layer."""
if not self.is_tagged(NAMED_ENTITIES):
self.tag_named_entities()
phrases = self.split_by(NAMED_ENTITIES)
return [' '.join(phrase.lemmas) for phrase in phrases] | [
"def",
"named_entities",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_tagged",
"(",
"NAMED_ENTITIES",
")",
":",
"self",
".",
"tag_named_entities",
"(",
")",
"phrases",
"=",
"self",
".",
"split_by",
"(",
"NAMED_ENTITIES",
")",
"return",
"[",
"' '",
... | The elements of ``named_entities`` layer. | [
"The",
"elements",
"of",
"named_entities",
"layer",
"."
] | python | train |
emory-libraries/eulfedora | eulfedora/syncutil.py | https://github.com/emory-libraries/eulfedora/blob/161826f3fdcdab4007f6fa7dfd9f1ecabc4bcbe4/eulfedora/syncutil.py#L266-L303 | def get_datastream_info(self, dsinfo):
'''Use regular expressions to pull datastream [version]
details (id, mimetype, size, and checksum) for binary content,
in order to sanity check the decoded data.
:param dsinfo: text content just before a binaryContent tag
:returns: dict wit... | [
"def",
"get_datastream_info",
"(",
"self",
",",
"dsinfo",
")",
":",
"# we only need to look at the end of this section of content",
"dsinfo",
"=",
"dsinfo",
"[",
"-",
"750",
":",
"]",
"# if not enough content is present, include the end of",
"# the last read chunk, if available",... | Use regular expressions to pull datastream [version]
details (id, mimetype, size, and checksum) for binary content,
in order to sanity check the decoded data.
:param dsinfo: text content just before a binaryContent tag
:returns: dict with keys for id, mimetype, size, type and digest,
... | [
"Use",
"regular",
"expressions",
"to",
"pull",
"datastream",
"[",
"version",
"]",
"details",
"(",
"id",
"mimetype",
"size",
"and",
"checksum",
")",
"for",
"binary",
"content",
"in",
"order",
"to",
"sanity",
"check",
"the",
"decoded",
"data",
"."
] | python | train |
maljovec/topopy | topopy/TopologicalObject.py | https://github.com/maljovec/topopy/blob/4be598d51c4e4043b73d4ad44beed6d289e2f088/topopy/TopologicalObject.py#L200-L234 | def build(self, X, Y, w=None, edges=None):
""" Assigns data to this object and builds the requested topological
structure
@ In, X, an m-by-n array of values specifying m
n-dimensional samples
@ In, Y, a m vector of values specifying the output
response... | [
"def",
"build",
"(",
"self",
",",
"X",
",",
"Y",
",",
"w",
"=",
"None",
",",
"edges",
"=",
"None",
")",
":",
"self",
".",
"reset",
"(",
")",
"if",
"X",
"is",
"None",
"or",
"Y",
"is",
"None",
":",
"return",
"self",
".",
"__set_data",
"(",
"X",... | Assigns data to this object and builds the requested topological
structure
@ In, X, an m-by-n array of values specifying m
n-dimensional samples
@ In, Y, a m vector of values specifying the output
responses corresponding to the m samples specified by X
... | [
"Assigns",
"data",
"to",
"this",
"object",
"and",
"builds",
"the",
"requested",
"topological",
"structure"
] | python | train |
shoebot/shoebot | shoebot/sbio/shell.py | https://github.com/shoebot/shoebot/blob/d554c1765c1899fa25727c9fc6805d221585562b/shoebot/sbio/shell.py#L152-L159 | def do_escape_nl(self, arg):
"""
Escape newlines in any responses
"""
if arg.lower() == 'off':
self.escape_nl = False
else:
self.escape_nl = True | [
"def",
"do_escape_nl",
"(",
"self",
",",
"arg",
")",
":",
"if",
"arg",
".",
"lower",
"(",
")",
"==",
"'off'",
":",
"self",
".",
"escape_nl",
"=",
"False",
"else",
":",
"self",
".",
"escape_nl",
"=",
"True"
] | Escape newlines in any responses | [
"Escape",
"newlines",
"in",
"any",
"responses"
] | python | valid |
EUDAT-B2SAFE/B2HANDLE | b2handle/util/argsutils.py | https://github.com/EUDAT-B2SAFE/B2HANDLE/blob/a6d216d459644e01fbdfd5b318a535950bc5cdbb/b2handle/util/argsutils.py#L24-L47 | def check_presence_of_mandatory_args(args, mandatory_args):
'''
Checks whether all mandatory arguments are passed.
This function aims at methods with many arguments
which are passed as kwargs so that the order
in which the are passed does not matter.
:args: The dictionary passed as arg... | [
"def",
"check_presence_of_mandatory_args",
"(",
"args",
",",
"mandatory_args",
")",
":",
"missing_args",
"=",
"[",
"]",
"for",
"name",
"in",
"mandatory_args",
":",
"if",
"name",
"not",
"in",
"args",
".",
"keys",
"(",
")",
":",
"missing_args",
".",
"append",
... | Checks whether all mandatory arguments are passed.
This function aims at methods with many arguments
which are passed as kwargs so that the order
in which the are passed does not matter.
:args: The dictionary passed as args.
:mandatory_args: A list of keys that have to be
present i... | [
"Checks",
"whether",
"all",
"mandatory",
"arguments",
"are",
"passed",
"."
] | python | train |
bolt-project/bolt | bolt/spark/construct.py | https://github.com/bolt-project/bolt/blob/9cd7104aa085498da3097b72696184b9d3651c51/bolt/spark/construct.py#L208-L222 | def _wrap(func, shape, context=None, axis=(0,), dtype=None, npartitions=None):
"""
Wrap an existing numpy constructor in a parallelized construction
"""
if isinstance(shape, int):
shape = (shape,)
key_shape, value_shape = get_kv_shape(shape, ConstructSpark._format_axe... | [
"def",
"_wrap",
"(",
"func",
",",
"shape",
",",
"context",
"=",
"None",
",",
"axis",
"=",
"(",
"0",
",",
")",
",",
"dtype",
"=",
"None",
",",
"npartitions",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"shape",
",",
"int",
")",
":",
"shape",
... | Wrap an existing numpy constructor in a parallelized construction | [
"Wrap",
"an",
"existing",
"numpy",
"constructor",
"in",
"a",
"parallelized",
"construction"
] | python | test |
fogleman/pg | pg/util.py | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L68-L78 | def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z /... | [
"def",
"normal_from_points",
"(",
"a",
",",
"b",
",",
"c",
")",
":",
"x1",
",",
"y1",
",",
"z1",
"=",
"a",
"x2",
",",
"y2",
",",
"z2",
"=",
"b",
"x3",
",",
"y3",
",",
"z3",
"=",
"c",
"ab",
"=",
"(",
"x2",
"-",
"x1",
",",
"y2",
"-",
"y1"... | Computes a normal vector given three points. | [
"Computes",
"a",
"normal",
"vector",
"given",
"three",
"points",
"."
] | python | train |
mcocdawc/chemcoord | src/chemcoord/cartesian_coordinates/_cartesian_class_get_zmat.py | https://github.com/mcocdawc/chemcoord/blob/95561ce387c142227c38fb14a1d182179aef8f5f/src/chemcoord/cartesian_coordinates/_cartesian_class_get_zmat.py#L189-L337 | def get_construction_table(self, fragment_list=None,
use_lookup=None,
perform_checks=True):
"""Create a construction table for a Zmatrix.
A construction table is basically a Zmatrix without the values
for the bond lengths, angles and... | [
"def",
"get_construction_table",
"(",
"self",
",",
"fragment_list",
"=",
"None",
",",
"use_lookup",
"=",
"None",
",",
"perform_checks",
"=",
"True",
")",
":",
"if",
"use_lookup",
"is",
"None",
":",
"use_lookup",
"=",
"settings",
"[",
"'defaults'",
"]",
"[",
... | Create a construction table for a Zmatrix.
A construction table is basically a Zmatrix without the values
for the bond lengths, angles and dihedrals.
It contains the whole information about which reference atoms
are used by each atom in the Zmatrix.
The absolute references in c... | [
"Create",
"a",
"construction",
"table",
"for",
"a",
"Zmatrix",
"."
] | python | train |
assemblerflow/flowcraft | flowcraft/templates/fastqc_report.py | https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/fastqc_report.py#L324-L359 | def get_sample_trim(p1_data, p2_data):
"""Get the optimal read trim range from data files of paired FastQ reads.
Given the FastQC data report files for paired-end FastQ reads, this
function will assess the optimal trim range for the 3' and 5' ends of
the paired-end reads. This assessment will be based ... | [
"def",
"get_sample_trim",
"(",
"p1_data",
",",
"p2_data",
")",
":",
"sample_ranges",
"=",
"[",
"trim_range",
"(",
"x",
")",
"for",
"x",
"in",
"[",
"p1_data",
",",
"p2_data",
"]",
"]",
"# Get the optimal trim position for 5' end",
"optimal_5trim",
"=",
"max",
"... | Get the optimal read trim range from data files of paired FastQ reads.
Given the FastQC data report files for paired-end FastQ reads, this
function will assess the optimal trim range for the 3' and 5' ends of
the paired-end reads. This assessment will be based on the *'Per sequence
GC content'*.
P... | [
"Get",
"the",
"optimal",
"read",
"trim",
"range",
"from",
"data",
"files",
"of",
"paired",
"FastQ",
"reads",
"."
] | python | test |
obulpathi/cdn-fastly-python | fastly/__init__.py | https://github.com/obulpathi/cdn-fastly-python/blob/db2564b047e8af4bce72c3b88d6c27d3d0291425/fastly/__init__.py#L684-L687 | def get_service(self, service_id):
"""Get a specific service by id."""
content = self._fetch("/service/%s" % service_id)
return FastlyService(self, content) | [
"def",
"get_service",
"(",
"self",
",",
"service_id",
")",
":",
"content",
"=",
"self",
".",
"_fetch",
"(",
"\"/service/%s\"",
"%",
"service_id",
")",
"return",
"FastlyService",
"(",
"self",
",",
"content",
")"
] | Get a specific service by id. | [
"Get",
"a",
"specific",
"service",
"by",
"id",
"."
] | python | train |
dwavesystems/dimod | dimod/binary_quadratic_model.py | https://github.com/dwavesystems/dimod/blob/beff1b7f86b559d923ac653c1de6d593876d6d38/dimod/binary_quadratic_model.py#L1218-L1298 | def relabel_variables(self, mapping, inplace=True):
"""Relabel variables of a binary quadratic model as specified by mapping.
Args:
mapping (dict):
Dict mapping current variable labels to new ones. If an incomplete mapping is
provided, unmapped variables reta... | [
"def",
"relabel_variables",
"(",
"self",
",",
"mapping",
",",
"inplace",
"=",
"True",
")",
":",
"try",
":",
"old_labels",
"=",
"set",
"(",
"mapping",
")",
"new_labels",
"=",
"set",
"(",
"itervalues",
"(",
"mapping",
")",
")",
"except",
"TypeError",
":",
... | Relabel variables of a binary quadratic model as specified by mapping.
Args:
mapping (dict):
Dict mapping current variable labels to new ones. If an incomplete mapping is
provided, unmapped variables retain their current labels.
inplace (bool, optional, ... | [
"Relabel",
"variables",
"of",
"a",
"binary",
"quadratic",
"model",
"as",
"specified",
"by",
"mapping",
"."
] | python | train |
ThePlasmaRailgun/py-rolldice | rolldice/rolldice.py | https://github.com/ThePlasmaRailgun/py-rolldice/blob/dc46d1d3e765592e76c52fd812b4f3b7425db552/rolldice/rolldice.py#L199-L221 | def _eval_call(self, node):
"""
Evaluate a function call
:param node: Node to eval
:return: Result of node
"""
try:
func = self.functions[node.func.id]
except KeyError:
raise NameError(node.func.id)
value = func(
*(sel... | [
"def",
"_eval_call",
"(",
"self",
",",
"node",
")",
":",
"try",
":",
"func",
"=",
"self",
".",
"functions",
"[",
"node",
".",
"func",
".",
"id",
"]",
"except",
"KeyError",
":",
"raise",
"NameError",
"(",
"node",
".",
"func",
".",
"id",
")",
"value"... | Evaluate a function call
:param node: Node to eval
:return: Result of node | [
"Evaluate",
"a",
"function",
"call"
] | python | train |
fastai/fastai | fastai/torch_core.py | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/torch_core.py#L365-L371 | def try_int(o:Any)->Any:
"Try to convert `o` to int, default to `o` if not possible."
# NB: single-item rank-1 array/tensor can be converted to int, but we don't want to do this
if isinstance(o, (np.ndarray,Tensor)): return o if o.ndim else int(o)
if isinstance(o, collections.Sized) or getattr(o,'__arra... | [
"def",
"try_int",
"(",
"o",
":",
"Any",
")",
"->",
"Any",
":",
"# NB: single-item rank-1 array/tensor can be converted to int, but we don't want to do this",
"if",
"isinstance",
"(",
"o",
",",
"(",
"np",
".",
"ndarray",
",",
"Tensor",
")",
")",
":",
"return",
"o",... | Try to convert `o` to int, default to `o` if not possible. | [
"Try",
"to",
"convert",
"o",
"to",
"int",
"default",
"to",
"o",
"if",
"not",
"possible",
"."
] | python | train |
edx/edx-enterprise | integrated_channels/xapi/management/commands/send_course_enrollments.py | https://github.com/edx/edx-enterprise/blob/aea91379ab0a87cd3bc798961fce28b60ee49a80/integrated_channels/xapi/management/commands/send_course_enrollments.py#L131-L147 | def get_course_enrollments(self, enterprise_customer, days):
"""
Get course enrollments for all the learners of given enterprise customer.
Arguments:
enterprise_customer (EnterpriseCustomer): Include Course enrollments for learners
of this enterprise customer.
... | [
"def",
"get_course_enrollments",
"(",
"self",
",",
"enterprise_customer",
",",
"days",
")",
":",
"return",
"CourseEnrollment",
".",
"objects",
".",
"filter",
"(",
"created__gt",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"datetime",
".",
"t... | Get course enrollments for all the learners of given enterprise customer.
Arguments:
enterprise_customer (EnterpriseCustomer): Include Course enrollments for learners
of this enterprise customer.
days (int): Include course enrollment of this number of days.
Retu... | [
"Get",
"course",
"enrollments",
"for",
"all",
"the",
"learners",
"of",
"given",
"enterprise",
"customer",
"."
] | python | valid |
faxir/faxir-python | faxir/api/numbers_api.py | https://github.com/faxir/faxir-python/blob/75ed2ea487a6be537342baea1077a02b0c8e70c1/faxir/api/numbers_api.py#L36-L56 | def get_number(self, number, **kwargs): # noqa: E501
"""Get number information # noqa: E501
Get info of a single number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_numb... | [
"def",
"get_number",
"(",
"self",
",",
"number",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"self",
".",
"get_number_with_h... | Get number information # noqa: E501
Get info of a single number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_number(number, async=True)
>>> result = thread.get()
... | [
"Get",
"number",
"information",
"#",
"noqa",
":",
"E501"
] | python | train |
cenobites/flask-jsonrpc | flask_jsonrpc/proxy.py | https://github.com/cenobites/flask-jsonrpc/blob/c7f8e049adda8cf4c5a62aea345eb42697f10eff/flask_jsonrpc/proxy.py#L59-L70 | def send_payload(self, params):
"""Performs the actual sending action and returns the result
"""
data = json.dumps({
'jsonrpc': self.version,
'method': self.service_name,
'params': params,
'id': text_type(uuid.uuid4())
})
data_binar... | [
"def",
"send_payload",
"(",
"self",
",",
"params",
")",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"'jsonrpc'",
":",
"self",
".",
"version",
",",
"'method'",
":",
"self",
".",
"service_name",
",",
"'params'",
":",
"params",
",",
"'id'",
":",
"... | Performs the actual sending action and returns the result | [
"Performs",
"the",
"actual",
"sending",
"action",
"and",
"returns",
"the",
"result"
] | python | valid |
harmsm/PyCmdMessenger | PyCmdMessenger/PyCmdMessenger.py | https://github.com/harmsm/PyCmdMessenger/blob/215d6f9402262662a14a2996f532934339639a5b/PyCmdMessenger/PyCmdMessenger.py#L175-L289 | def receive(self,arg_formats=None):
"""
Recieve commands coming off the serial port.
arg_formats is an optimal keyword that specifies the formats to use to
parse incoming arguments. If specified here, arg_formats supercedes
the formats specified on initialization.
""... | [
"def",
"receive",
"(",
"self",
",",
"arg_formats",
"=",
"None",
")",
":",
"# Read serial input until a command separator or empty character is",
"# reached ",
"msg",
"=",
"[",
"[",
"]",
"]",
"raw_msg",
"=",
"[",
"]",
"escaped",
"=",
"False",
"command_sep_found",
"... | Recieve commands coming off the serial port.
arg_formats is an optimal keyword that specifies the formats to use to
parse incoming arguments. If specified here, arg_formats supercedes
the formats specified on initialization. | [
"Recieve",
"commands",
"coming",
"off",
"the",
"serial",
"port",
"."
] | python | train |
joferkington/mplstereonet | mplstereonet/stereonet_math.py | https://github.com/joferkington/mplstereonet/blob/f6d78ca49807915d4223e864e12bb24d497cc2d6/mplstereonet/stereonet_math.py#L356-L380 | def mean_vector(lons, lats):
"""
Returns the resultant vector from a series of longitudes and latitudes
Parameters
----------
lons : array-like
A sequence of longitudes (in radians)
lats : array-like
A sequence of latitudes (in radians)
Returns
-------
mean_vec : tu... | [
"def",
"mean_vector",
"(",
"lons",
",",
"lats",
")",
":",
"xyz",
"=",
"sph2cart",
"(",
"lons",
",",
"lats",
")",
"xyz",
"=",
"np",
".",
"vstack",
"(",
"xyz",
")",
".",
"T",
"mean_vec",
"=",
"xyz",
".",
"mean",
"(",
"axis",
"=",
"0",
")",
"r_val... | Returns the resultant vector from a series of longitudes and latitudes
Parameters
----------
lons : array-like
A sequence of longitudes (in radians)
lats : array-like
A sequence of latitudes (in radians)
Returns
-------
mean_vec : tuple
(lon, lat) in radians
r_v... | [
"Returns",
"the",
"resultant",
"vector",
"from",
"a",
"series",
"of",
"longitudes",
"and",
"latitudes"
] | python | train |
cyrus-/cypy | cypy/__init__.py | https://github.com/cyrus-/cypy/blob/04bb59e91fa314e8cf987743189c77a9b6bc371d/cypy/__init__.py#L583-L600 | def is_int_like(value):
"""Returns whether the value can be used as a standard integer.
>>> is_int_like(4)
True
>>> is_int_like(4.0)
False
>>> is_int_like("4")
False
>>> is_int_like("abc")
False
"""
try:
if isinstance(value, int): ret... | [
"def",
"is_int_like",
"(",
"value",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"return",
"True",
"return",
"int",
"(",
"value",
")",
"==",
"value",
"and",
"str",
"(",
"value",
")",
".",
"isdigit",
"(",
")",
"excep... | Returns whether the value can be used as a standard integer.
>>> is_int_like(4)
True
>>> is_int_like(4.0)
False
>>> is_int_like("4")
False
>>> is_int_like("abc")
False | [
"Returns",
"whether",
"the",
"value",
"can",
"be",
"used",
"as",
"a",
"standard",
"integer",
"."
] | python | train |
cloudendpoints/endpoints-python | endpoints/api_config.py | https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config.py#L1626-L1644 | def __parameter_default(self, final_subfield):
"""Returns default value of final subfield if it has one.
If this subfield comes from a field list returned from __field_to_subfields,
none of the fields in the subfield list can have a default except the final
one since they all must be message fields.
... | [
"def",
"__parameter_default",
"(",
"self",
",",
"final_subfield",
")",
":",
"if",
"final_subfield",
".",
"default",
":",
"if",
"isinstance",
"(",
"final_subfield",
",",
"messages",
".",
"EnumField",
")",
":",
"return",
"final_subfield",
".",
"default",
".",
"n... | Returns default value of final subfield if it has one.
If this subfield comes from a field list returned from __field_to_subfields,
none of the fields in the subfield list can have a default except the final
one since they all must be message fields.
Args:
final_subfield: A simple field from the... | [
"Returns",
"default",
"value",
"of",
"final",
"subfield",
"if",
"it",
"has",
"one",
"."
] | python | train |
meyersj/geotweet | geotweet/mapreduce/poi_nearby_tweets.py | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/poi_nearby_tweets.py#L110-L142 | def reducer_metro(self, metro, values):
"""
Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet... | [
"def",
"reducer_metro",
"(",
"self",
",",
"metro",
",",
"values",
")",
":",
"lookup",
"=",
"CachedLookup",
"(",
"precision",
"=",
"POI_GEOHASH_PRECISION",
")",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"values",
")",
":",
"type_tag",
",",
"lonlat",... | Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags. | [
"Output",
"tags",
"of",
"POI",
"locations",
"nearby",
"tweet",
"locations"
] | python | train |
MacHu-GWU/angora-project | angora/dtypes/dicttree.py | https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/dtypes/dicttree.py#L287-L302 | def k_depth(d, depth, _counter=1):
"""Iterate keys on specific depth.
depth has to be greater equal than 0.
Usage reference see :meth:`DictTree.kv_depth()<DictTree.kv_depth>`
"""
if depth == 0:
yield d[_meta]["_rootname"]
else:
if _counter == dept... | [
"def",
"k_depth",
"(",
"d",
",",
"depth",
",",
"_counter",
"=",
"1",
")",
":",
"if",
"depth",
"==",
"0",
":",
"yield",
"d",
"[",
"_meta",
"]",
"[",
"\"_rootname\"",
"]",
"else",
":",
"if",
"_counter",
"==",
"depth",
":",
"for",
"key",
"in",
"Dict... | Iterate keys on specific depth.
depth has to be greater equal than 0.
Usage reference see :meth:`DictTree.kv_depth()<DictTree.kv_depth>` | [
"Iterate",
"keys",
"on",
"specific",
"depth",
".",
"depth",
"has",
"to",
"be",
"greater",
"equal",
"than",
"0",
".",
"Usage",
"reference",
"see",
":",
"meth",
":",
"DictTree",
".",
"kv_depth",
"()",
"<DictTree",
".",
"kv_depth",
">"
] | python | train |
johnnoone/json-spec | src/jsonspec/validators/factorize.py | https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/validators/factorize.py#L104-L131 | def register(compiler=None, spec=None):
"""
Expose compiler to factory.
:param compiler: the callable to expose
:type compiler: callable
:param spec: name of the spec
:type spec: str
It can be used as a decorator::
@register(spec='my:first:spec')
def my_compiler(schema, po... | [
"def",
"register",
"(",
"compiler",
"=",
"None",
",",
"spec",
"=",
"None",
")",
":",
"if",
"not",
"spec",
":",
"raise",
"CompilationError",
"(",
"'Spec is required'",
")",
"if",
"not",
"compiler",
":",
"return",
"partial",
"(",
"register",
",",
"spec",
"... | Expose compiler to factory.
:param compiler: the callable to expose
:type compiler: callable
:param spec: name of the spec
:type spec: str
It can be used as a decorator::
@register(spec='my:first:spec')
def my_compiler(schema, pointer, context):
return Validator(schema... | [
"Expose",
"compiler",
"to",
"factory",
"."
] | python | train |
apache/incubator-mxnet | python/mxnet/libinfo.py | https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/libinfo.py#L79-L110 | def find_include_path():
"""Find MXNet included header files.
Returns
-------
incl_path : string
Path to the header files.
"""
incl_from_env = os.environ.get('MXNET_INCLUDE_PATH')
if incl_from_env:
if os.path.isdir(incl_from_env):
if not os.path.isabs(incl_from_e... | [
"def",
"find_include_path",
"(",
")",
":",
"incl_from_env",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'MXNET_INCLUDE_PATH'",
")",
"if",
"incl_from_env",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"incl_from_env",
")",
":",
"if",
"not",
"os",
"."... | Find MXNet included header files.
Returns
-------
incl_path : string
Path to the header files. | [
"Find",
"MXNet",
"included",
"header",
"files",
"."
] | python | train |
amzn/ion-python | amazon/ion/reader_text.py | https://github.com/amzn/ion-python/blob/0b21fa3ba7755f55f745e4aa970d86343b82449d/amazon/ion/reader_text.py#L654-L708 | def _numeric_handler_factory(charset, transition, assertion, illegal_before_underscore, parse_func,
illegal_at_end=(None,), ion_type=None, append_first_if_not=None, first_char=None):
"""Generates a handler co-routine which tokenizes a numeric component (a token or sub-token).
Args:... | [
"def",
"_numeric_handler_factory",
"(",
"charset",
",",
"transition",
",",
"assertion",
",",
"illegal_before_underscore",
",",
"parse_func",
",",
"illegal_at_end",
"=",
"(",
"None",
",",
")",
",",
"ion_type",
"=",
"None",
",",
"append_first_if_not",
"=",
"None",
... | Generates a handler co-routine which tokenizes a numeric component (a token or sub-token).
Args:
charset (sequence): Set of ordinals of legal characters for this numeric component.
transition (callable): Called upon termination of this component (i.e. when a character not in ``charset`` is
... | [
"Generates",
"a",
"handler",
"co",
"-",
"routine",
"which",
"tokenizes",
"a",
"numeric",
"component",
"(",
"a",
"token",
"or",
"sub",
"-",
"token",
")",
"."
] | python | train |
spyder-ide/spyder | spyder/plugins/findinfiles/plugin.py | https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/findinfiles/plugin.py#L98-L113 | def findinfiles_callback(self):
"""Find in files callback"""
widget = QApplication.focusWidget()
if not self.ismaximized:
self.dockwidget.setVisible(True)
self.dockwidget.raise_()
text = ''
try:
if widget.has_selected_text():
... | [
"def",
"findinfiles_callback",
"(",
"self",
")",
":",
"widget",
"=",
"QApplication",
".",
"focusWidget",
"(",
")",
"if",
"not",
"self",
".",
"ismaximized",
":",
"self",
".",
"dockwidget",
".",
"setVisible",
"(",
"True",
")",
"self",
".",
"dockwidget",
".",... | Find in files callback | [
"Find",
"in",
"files",
"callback"
] | python | train |
joeyespo/path-and-address | path_and_address/parsing.py | https://github.com/joeyespo/path-and-address/blob/f8193a09f4b785574d920e8a2aeeb55ea6ff4e20/path_and_address/parsing.py#L4-L18 | def resolve(path_or_address=None, address=None, *ignored):
"""
Returns (path, address) based on consecutive optional arguments,
[path] [address].
"""
if path_or_address is None or address is not None:
return path_or_address, address
path = None
if split_address(path_or_address)[1] i... | [
"def",
"resolve",
"(",
"path_or_address",
"=",
"None",
",",
"address",
"=",
"None",
",",
"*",
"ignored",
")",
":",
"if",
"path_or_address",
"is",
"None",
"or",
"address",
"is",
"not",
"None",
":",
"return",
"path_or_address",
",",
"address",
"path",
"=",
... | Returns (path, address) based on consecutive optional arguments,
[path] [address]. | [
"Returns",
"(",
"path",
"address",
")",
"based",
"on",
"consecutive",
"optional",
"arguments",
"[",
"path",
"]",
"[",
"address",
"]",
"."
] | python | train |
ajslater/picopt | picopt/cli.py | https://github.com/ajslater/picopt/blob/261da837027563c1dc3ed07b70e1086520a60402/picopt/cli.py#L126-L170 | def process_arguments(arguments):
"""Recompute special cases for input arguments."""
Settings.update(arguments)
Settings.config_program_reqs(PROGRAMS)
Settings.verbose = arguments.verbose + 1
Settings.paths = set(arguments.paths)
if arguments.formats == DEFAULT_FORMATS:
Settings.forma... | [
"def",
"process_arguments",
"(",
"arguments",
")",
":",
"Settings",
".",
"update",
"(",
"arguments",
")",
"Settings",
".",
"config_program_reqs",
"(",
"PROGRAMS",
")",
"Settings",
".",
"verbose",
"=",
"arguments",
".",
"verbose",
"+",
"1",
"Settings",
".",
"... | Recompute special cases for input arguments. | [
"Recompute",
"special",
"cases",
"for",
"input",
"arguments",
"."
] | python | train |
Tanganelli/CoAPthon3 | coapthon/messages/message.py | https://github.com/Tanganelli/CoAPthon3/blob/985763bfe2eb9e00f49ec100c5b8877c2ed7d531/coapthon/messages/message.py#L114-L129 | def token(self, value):
"""
Set the Token of the message.
:type value: String
:param value: the Token
:raise AttributeError: if value is longer than 256
"""
if value is None:
self._token = value
return
if not isinstance(value, str)... | [
"def",
"token",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"self",
".",
"_token",
"=",
"value",
"return",
"if",
"not",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"value",
"=",
"str",
"(",
"value",
")",
"if",
"le... | Set the Token of the message.
:type value: String
:param value: the Token
:raise AttributeError: if value is longer than 256 | [
"Set",
"the",
"Token",
"of",
"the",
"message",
"."
] | python | train |
saltstack/salt | salt/modules/gem.py | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/gem.py#L276-L310 | def list_(prefix='', ruby=None, runas=None, gem_bin=None):
'''
List locally installed gems.
:param prefix: string :
Only list gems when the name matches this prefix.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv ... | [
"def",
"list_",
"(",
"prefix",
"=",
"''",
",",
"ruby",
"=",
"None",
",",
"runas",
"=",
"None",
",",
"gem_bin",
"=",
"None",
")",
":",
"cmd",
"=",
"[",
"'list'",
"]",
"if",
"prefix",
":",
"cmd",
".",
"append",
"(",
"prefix",
")",
"stdout",
"=",
... | List locally installed gems.
:param prefix: string :
Only list gems when the name matches this prefix.
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if `... | [
"List",
"locally",
"installed",
"gems",
"."
] | python | train |
explosion/spaCy | spacy/language.py | https://github.com/explosion/spaCy/blob/8ee4100f8ffb336886208a1ea827bf4c745e2709/spacy/language.py#L513-L524 | def preprocess_gold(self, docs_golds):
"""Can be called before training to pre-process gold data. By default,
it handles nonprojectivity and adds missing tags to the tag map.
docs_golds (iterable): Tuples of `Doc` and `GoldParse` objects.
YIELDS (tuple): Tuples of preprocessed `Doc` and... | [
"def",
"preprocess_gold",
"(",
"self",
",",
"docs_golds",
")",
":",
"for",
"name",
",",
"proc",
"in",
"self",
".",
"pipeline",
":",
"if",
"hasattr",
"(",
"proc",
",",
"\"preprocess_gold\"",
")",
":",
"docs_golds",
"=",
"proc",
".",
"preprocess_gold",
"(",
... | Can be called before training to pre-process gold data. By default,
it handles nonprojectivity and adds missing tags to the tag map.
docs_golds (iterable): Tuples of `Doc` and `GoldParse` objects.
YIELDS (tuple): Tuples of preprocessed `Doc` and `GoldParse` objects. | [
"Can",
"be",
"called",
"before",
"training",
"to",
"pre",
"-",
"process",
"gold",
"data",
".",
"By",
"default",
"it",
"handles",
"nonprojectivity",
"and",
"adds",
"missing",
"tags",
"to",
"the",
"tag",
"map",
"."
] | python | train |
bwohlberg/sporco | sporco/util.py | https://github.com/bwohlberg/sporco/blob/8946a04331106f4e39904fbdf2dc7351900baa04/sporco/util.py#L836-L874 | def netgetdata(url, maxtry=3, timeout=10):
"""
Get content of a file via a URL.
Parameters
----------
url : string
URL of the file to be downloaded
maxtry : int, optional (default 3)
Maximum number of download retries
timeout : int, optional (default 10)
Timeout in seconds... | [
"def",
"netgetdata",
"(",
"url",
",",
"maxtry",
"=",
"3",
",",
"timeout",
"=",
"10",
")",
":",
"err",
"=",
"ValueError",
"(",
"'maxtry parameter should be greater than zero'",
")",
"for",
"ntry",
"in",
"range",
"(",
"maxtry",
")",
":",
"try",
":",
"rspns",... | Get content of a file via a URL.
Parameters
----------
url : string
URL of the file to be downloaded
maxtry : int, optional (default 3)
Maximum number of download retries
timeout : int, optional (default 10)
Timeout in seconds for blocking operations
Returns
-------
s... | [
"Get",
"content",
"of",
"a",
"file",
"via",
"a",
"URL",
"."
] | python | train |
molmod/molmod | molmod/pairff.py | https://github.com/molmod/molmod/blob/a7b5b4364ed514ad4c465856c05b5eda1cb561e0/molmod/pairff.py#L67-L89 | def update_coordinates(self, coordinates=None):
"""Update the coordinates (and derived quantities)
Argument:
coordinates -- new Cartesian coordinates of the system
"""
if coordinates is not None:
self.coordinates = coordinates
self.numc = len(self.c... | [
"def",
"update_coordinates",
"(",
"self",
",",
"coordinates",
"=",
"None",
")",
":",
"if",
"coordinates",
"is",
"not",
"None",
":",
"self",
".",
"coordinates",
"=",
"coordinates",
"self",
".",
"numc",
"=",
"len",
"(",
"self",
".",
"coordinates",
")",
"se... | Update the coordinates (and derived quantities)
Argument:
coordinates -- new Cartesian coordinates of the system | [
"Update",
"the",
"coordinates",
"(",
"and",
"derived",
"quantities",
")"
] | python | train |
awslabs/sockeye | sockeye/image_captioning/data_io.py | https://github.com/awslabs/sockeye/blob/5d64a1ee1ef3cbba17c6d1d94bc061020c43f6ab/sockeye/image_captioning/data_io.py#L168-L280 | def get_training_image_text_data_iters(source_root: str,
source: str, target: str,
validation_source_root: str,
validation_source: str, validation_target: str,
voca... | [
"def",
"get_training_image_text_data_iters",
"(",
"source_root",
":",
"str",
",",
"source",
":",
"str",
",",
"target",
":",
"str",
",",
"validation_source_root",
":",
"str",
",",
"validation_source",
":",
"str",
",",
"validation_target",
":",
"str",
",",
"vocab_... | Returns data iterators for training and validation data.
:param source_root: Path to source images since the file in source contains relative paths.
:param source: Path to source training data.
:param target: Path to target training data.
:param validation_source_root: Path to validation source images ... | [
"Returns",
"data",
"iterators",
"for",
"training",
"and",
"validation",
"data",
"."
] | python | train |
codelv/enaml-web | examples/simple_site/main.py | https://github.com/codelv/enaml-web/blob/88f1131a7b3ba9e83467b4f44bc3bab6f0de7559/examples/simple_site/main.py#L101-L112 | def _default_handlers(self):
""" Generate the handlers for this site """
static_path = os.path.abspath(os.path.join(os.path.dirname(__file__),"static"))
urls = [
(r"/static/(.*)", cyclone.web.StaticFileHandler, {"path": static_path}),
]
for p in self.pages:
... | [
"def",
"_default_handlers",
"(",
"self",
")",
":",
"static_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"\"static\"",
")",
")",
"urls",
"=",
"... | Generate the handlers for this site | [
"Generate",
"the",
"handlers",
"for",
"this",
"site"
] | python | test |
jopohl/urh | src/urh/signalprocessing/ProtocolSniffer.py | https://github.com/jopohl/urh/blob/2eb33b125c8407964cd1092843cde5010eb88aae/src/urh/signalprocessing/ProtocolSniffer.py#L155-L203 | def __demodulate_data(self, data):
"""
Demodulates received IQ data and adds demodulated bits to messages
:param data:
:return:
"""
if len(data) == 0:
return
power_spectrum = data.real ** 2 + data.imag ** 2
is_above_noise = np.sqrt(np.mean(pow... | [
"def",
"__demodulate_data",
"(",
"self",
",",
"data",
")",
":",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"return",
"power_spectrum",
"=",
"data",
".",
"real",
"**",
"2",
"+",
"data",
".",
"imag",
"**",
"2",
"is_above_noise",
"=",
"np",
".",
"... | Demodulates received IQ data and adds demodulated bits to messages
:param data:
:return: | [
"Demodulates",
"received",
"IQ",
"data",
"and",
"adds",
"demodulated",
"bits",
"to",
"messages",
":",
"param",
"data",
":",
":",
"return",
":"
] | python | train |
cvxopt/chompack | src/python/pybase/cholesky.py | https://github.com/cvxopt/chompack/blob/e07106b58b8055c34f6201e8c954482f86987833/src/python/pybase/cholesky.py#L5-L79 | def cholesky(X):
"""
Supernodal multifrontal Cholesky factorization:
.. math::
X = LL^T
where :math:`L` is lower-triangular. On exit, the argument :math:`X`
contains the Cholesky factor :math:`L`.
:param X: :py:class:`cspmatrix`
"""
assert isinstance(X, cspmatrix) and X.i... | [
"def",
"cholesky",
"(",
"X",
")",
":",
"assert",
"isinstance",
"(",
"X",
",",
"cspmatrix",
")",
"and",
"X",
".",
"is_factor",
"is",
"False",
",",
"\"X must be a cspmatrix\"",
"n",
"=",
"X",
".",
"symb",
".",
"n",
"snpost",
"=",
"X",
".",
"symb",
".",... | Supernodal multifrontal Cholesky factorization:
.. math::
X = LL^T
where :math:`L` is lower-triangular. On exit, the argument :math:`X`
contains the Cholesky factor :math:`L`.
:param X: :py:class:`cspmatrix` | [
"Supernodal",
"multifrontal",
"Cholesky",
"factorization",
":"
] | python | train |
bram85/topydo | topydo/ui/columns/Main.py | https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/ui/columns/Main.py#L495-L510 | def _update_view(self, p_data):
""" Creates a view from the data entered in the view widget. """
view = self._viewdata_to_view(p_data)
if self.column_mode == _APPEND_COLUMN or self.column_mode == _COPY_COLUMN:
self._add_column(view)
elif self.column_mode == _INSERT_COLUMN:
... | [
"def",
"_update_view",
"(",
"self",
",",
"p_data",
")",
":",
"view",
"=",
"self",
".",
"_viewdata_to_view",
"(",
"p_data",
")",
"if",
"self",
".",
"column_mode",
"==",
"_APPEND_COLUMN",
"or",
"self",
".",
"column_mode",
"==",
"_COPY_COLUMN",
":",
"self",
"... | Creates a view from the data entered in the view widget. | [
"Creates",
"a",
"view",
"from",
"the",
"data",
"entered",
"in",
"the",
"view",
"widget",
"."
] | python | train |
caktus/django-timepiece | timepiece/management/commands/check_entries.py | https://github.com/caktus/django-timepiece/blob/52515dec027664890efbc535429e1ba1ee152f40/timepiece/management/commands/check_entries.py#L84-L110 | def check_entry(self, entries, *args, **kwargs):
"""
With a list of entries, check each entry against every other
"""
verbosity = kwargs.get('verbosity', 1)
user_total_overlaps = 0
user = ''
for index_a, entry_a in enumerate(entries):
# Show the name t... | [
"def",
"check_entry",
"(",
"self",
",",
"entries",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"verbosity",
"=",
"kwargs",
".",
"get",
"(",
"'verbosity'",
",",
"1",
")",
"user_total_overlaps",
"=",
"0",
"user",
"=",
"''",
"for",
"index_a",
"... | With a list of entries, check each entry against every other | [
"With",
"a",
"list",
"of",
"entries",
"check",
"each",
"entry",
"against",
"every",
"other"
] | python | train |
ariebovenberg/snug | tutorial/composed0.py | https://github.com/ariebovenberg/snug/blob/4f5cd30e6b7b2c3f0ad3cc10be865bd8900b38ef/tutorial/composed0.py#L13-L15 | def repo(name: str, owner: str) -> snug.Query[dict]:
"""a repository lookup by owner and name"""
return json.loads((yield f'/repos/{owner}/{name}').content) | [
"def",
"repo",
"(",
"name",
":",
"str",
",",
"owner",
":",
"str",
")",
"->",
"snug",
".",
"Query",
"[",
"dict",
"]",
":",
"return",
"json",
".",
"loads",
"(",
"(",
"yield",
"f'/repos/{owner}/{name}'",
")",
".",
"content",
")"
] | a repository lookup by owner and name | [
"a",
"repository",
"lookup",
"by",
"owner",
"and",
"name"
] | python | train |
StanfordVL/robosuite | robosuite/environments/sawyer_lift.py | https://github.com/StanfordVL/robosuite/blob/65cd16810e2ed647e3ec88746af3412065b7f278/robosuite/environments/sawyer_lift.py#L138-L170 | def _load_model(self):
"""
Loads an xml model, puts it in self.model
"""
super()._load_model()
self.mujoco_robot.set_base_xpos([0, 0, 0])
# load model for table top workspace
self.mujoco_arena = TableArena(
table_full_size=self.table_full_size, table_... | [
"def",
"_load_model",
"(",
"self",
")",
":",
"super",
"(",
")",
".",
"_load_model",
"(",
")",
"self",
".",
"mujoco_robot",
".",
"set_base_xpos",
"(",
"[",
"0",
",",
"0",
",",
"0",
"]",
")",
"# load model for table top workspace",
"self",
".",
"mujoco_arena... | Loads an xml model, puts it in self.model | [
"Loads",
"an",
"xml",
"model",
"puts",
"it",
"in",
"self",
".",
"model"
] | python | train |
galactics/beyond | beyond/dates/eop.py | https://github.com/galactics/beyond/blob/7a7590ff0fd4c0bac3e8e383ecca03caa98e5742/beyond/dates/eop.py#L282-L293 | def register(cls, klass, name=DEFAULT_DBNAME):
"""Register an Eop Database
The only requirement of this database is that it should have ``__getitem__``
method accepting MJD as float.
"""
if name in cls._dbs:
msg = "'{}' is already registered for an Eop database. Ski... | [
"def",
"register",
"(",
"cls",
",",
"klass",
",",
"name",
"=",
"DEFAULT_DBNAME",
")",
":",
"if",
"name",
"in",
"cls",
".",
"_dbs",
":",
"msg",
"=",
"\"'{}' is already registered for an Eop database. Skipping\"",
".",
"format",
"(",
"name",
")",
"log",
".",
"... | Register an Eop Database
The only requirement of this database is that it should have ``__getitem__``
method accepting MJD as float. | [
"Register",
"an",
"Eop",
"Database"
] | python | train |
globality-corp/microcosm-flask | microcosm_flask/conventions/discovery.py | https://github.com/globality-corp/microcosm-flask/blob/c2eaf57f03e7d041eea343751a4a90fcc80df418/microcosm_flask/conventions/discovery.py#L81-L91 | def configure_discovery(graph):
"""
Build a singleton endpoint that provides a link to all search endpoints.
"""
ns = Namespace(
subject=graph.config.discovery_convention.name,
)
convention = DiscoveryConvention(graph)
convention.configure(ns, discover=tuple())
return ns.subject | [
"def",
"configure_discovery",
"(",
"graph",
")",
":",
"ns",
"=",
"Namespace",
"(",
"subject",
"=",
"graph",
".",
"config",
".",
"discovery_convention",
".",
"name",
",",
")",
"convention",
"=",
"DiscoveryConvention",
"(",
"graph",
")",
"convention",
".",
"co... | Build a singleton endpoint that provides a link to all search endpoints. | [
"Build",
"a",
"singleton",
"endpoint",
"that",
"provides",
"a",
"link",
"to",
"all",
"search",
"endpoints",
"."
] | python | train |
yinkaisheng/Python-UIAutomation-for-Windows | uiautomation/uiautomation.py | https://github.com/yinkaisheng/Python-UIAutomation-for-Windows/blob/2cc91060982cc8b777152e698d677cc2989bf263/uiautomation/uiautomation.py#L4739-L4749 | def RangeFromChild(self, child) -> TextRange:
"""
Call IUIAutomationTextPattern::RangeFromChild.
child: `Control` or its subclass.
Return `TextRange` or None, a text range enclosing a child element such as an image,
hyperlink, Microsoft Excel spreadsheet, or other embedded ob... | [
"def",
"RangeFromChild",
"(",
"self",
",",
"child",
")",
"->",
"TextRange",
":",
"textRange",
"=",
"self",
".",
"pattern",
".",
"RangeFromChild",
"(",
"Control",
".",
"Element",
")",
"if",
"textRange",
":",
"return",
"TextRange",
"(",
"textRange",
"=",
"te... | Call IUIAutomationTextPattern::RangeFromChild.
child: `Control` or its subclass.
Return `TextRange` or None, a text range enclosing a child element such as an image,
hyperlink, Microsoft Excel spreadsheet, or other embedded object.
Refer https://docs.microsoft.com/en-us/windows/deskt... | [
"Call",
"IUIAutomationTextPattern",
"::",
"RangeFromChild",
".",
"child",
":",
"Control",
"or",
"its",
"subclass",
".",
"Return",
"TextRange",
"or",
"None",
"a",
"text",
"range",
"enclosing",
"a",
"child",
"element",
"such",
"as",
"an",
"image",
"hyperlink",
"... | python | valid |
TissueMAPS/TmDeploy | tmdeploy/config.py | https://github.com/TissueMAPS/TmDeploy/blob/f891b4ffb21431988bc4a063ae871da3bf284a45/tmdeploy/config.py#L630-L636 | def tm_group(self):
'''str: TissueMAPS system group (defaults to
:attr:`tm_user <tmdeploy.config.AnsibleHostVariableSection.tm_user>`)
'''
if self._tm_group is None:
self._tm_group = self.tm_user
return self._tm_group | [
"def",
"tm_group",
"(",
"self",
")",
":",
"if",
"self",
".",
"_tm_group",
"is",
"None",
":",
"self",
".",
"_tm_group",
"=",
"self",
".",
"tm_user",
"return",
"self",
".",
"_tm_group"
] | str: TissueMAPS system group (defaults to
:attr:`tm_user <tmdeploy.config.AnsibleHostVariableSection.tm_user>`) | [
"str",
":",
"TissueMAPS",
"system",
"group",
"(",
"defaults",
"to",
":",
"attr",
":",
"tm_user",
"<tmdeploy",
".",
"config",
".",
"AnsibleHostVariableSection",
".",
"tm_user",
">",
")"
] | python | train |
frictionlessdata/tabulator-py | tabulator/helpers.py | https://github.com/frictionlessdata/tabulator-py/blob/06c25845a7139d919326388cc6335f33f909db8c/tabulator/helpers.py#L165-L173 | def extract_options(options, names):
"""Return options for names and remove it from given options in-place.
"""
result = {}
for name, value in copy(options).items():
if name in names:
result[name] = value
del options[name]
return result | [
"def",
"extract_options",
"(",
"options",
",",
"names",
")",
":",
"result",
"=",
"{",
"}",
"for",
"name",
",",
"value",
"in",
"copy",
"(",
"options",
")",
".",
"items",
"(",
")",
":",
"if",
"name",
"in",
"names",
":",
"result",
"[",
"name",
"]",
... | Return options for names and remove it from given options in-place. | [
"Return",
"options",
"for",
"names",
"and",
"remove",
"it",
"from",
"given",
"options",
"in",
"-",
"place",
"."
] | python | train |
Nekroze/librarian | librarian/library.py | https://github.com/Nekroze/librarian/blob/5d3da2980d91a637f80ad7164fbf204a2dd2bd58/librarian/library.py#L11-L52 | def Where_filter_gen(*data):
"""
Generate an sqlite "LIKE" filter generator based on the given data.
This functions arguments should be a N length series of field and data
tuples.
"""
where = []
def Fwhere(field, pattern):
"""Add where filter for the given field with the given patte... | [
"def",
"Where_filter_gen",
"(",
"*",
"data",
")",
":",
"where",
"=",
"[",
"]",
"def",
"Fwhere",
"(",
"field",
",",
"pattern",
")",
":",
"\"\"\"Add where filter for the given field with the given pattern.\"\"\"",
"where",
".",
"append",
"(",
"\"WHERE {0} LIKE '{1}'\"",... | Generate an sqlite "LIKE" filter generator based on the given data.
This functions arguments should be a N length series of field and data
tuples. | [
"Generate",
"an",
"sqlite",
"LIKE",
"filter",
"generator",
"based",
"on",
"the",
"given",
"data",
".",
"This",
"functions",
"arguments",
"should",
"be",
"a",
"N",
"length",
"series",
"of",
"field",
"and",
"data",
"tuples",
"."
] | python | train |
tBaxter/tango-photos | build/lib/photos/templatetags/gallery_tags.py | https://github.com/tBaxter/tango-photos/blob/aca52c6d6425cd6016468107a677479216285fc3/build/lib/photos/templatetags/gallery_tags.py#L20-L34 | def get_related_galleries(gallery, count=5):
"""
Gets latest related galleries from same section as originating gallery.
Count defaults to five but can be overridden.
Usage: {% get_related_galleries gallery <10> %}
"""
# just get the first cat. If they assigned to more than one, tough
try:... | [
"def",
"get_related_galleries",
"(",
"gallery",
",",
"count",
"=",
"5",
")",
":",
"# just get the first cat. If they assigned to more than one, tough",
"try",
":",
"cat",
"=",
"gallery",
".",
"sections",
".",
"all",
"(",
")",
"[",
"0",
"]",
"related",
"=",
"cat"... | Gets latest related galleries from same section as originating gallery.
Count defaults to five but can be overridden.
Usage: {% get_related_galleries gallery <10> %} | [
"Gets",
"latest",
"related",
"galleries",
"from",
"same",
"section",
"as",
"originating",
"gallery",
"."
] | python | train |
glomex/gcdt | gcdt/yugen_core.py | https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/yugen_core.py#L139-L159 | def create_api_key(awsclient, api_name, api_key_name):
"""Create a new API key as reference for api.conf.
:param api_name:
:param api_key_name:
:return: api_key
"""
_sleep()
client_api = awsclient.get_client('apigateway')
print('create api key: %s' % api_key_name)
response = client... | [
"def",
"create_api_key",
"(",
"awsclient",
",",
"api_name",
",",
"api_key_name",
")",
":",
"_sleep",
"(",
")",
"client_api",
"=",
"awsclient",
".",
"get_client",
"(",
"'apigateway'",
")",
"print",
"(",
"'create api key: %s'",
"%",
"api_key_name",
")",
"response"... | Create a new API key as reference for api.conf.
:param api_name:
:param api_key_name:
:return: api_key | [
"Create",
"a",
"new",
"API",
"key",
"as",
"reference",
"for",
"api",
".",
"conf",
"."
] | python | train |
rcarmo/pngcanvas | pngcanvas.py | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L164-L172 | def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.c... | [
"def",
"blend_rect",
"(",
"self",
",",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
",",
"dx",
",",
"dy",
",",
"destination",
",",
"alpha",
"=",
"0xff",
")",
":",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
"=",
"self",
".",
"rect_helper",
"(",
"x0",
",... | Blend a rectangle onto the image | [
"Blend",
"a",
"rectangle",
"onto",
"the",
"image"
] | python | train |
mental32/spotify.py | spotify/models/player.py | https://github.com/mental32/spotify.py/blob/bb296cac7c3dd289908906b7069bd80f43950515/spotify/models/player.py#L124-L135 | async def set_volume(self, volume: int, *, device: Optional[SomeDevice] = None):
"""Set the volume for the user’s current playback device.
Parameters
----------
volume : int
The volume to set. Must be a value from 0 to 100 inclusive.
device : Optional[:obj:`SomeDevic... | [
"async",
"def",
"set_volume",
"(",
"self",
",",
"volume",
":",
"int",
",",
"*",
",",
"device",
":",
"Optional",
"[",
"SomeDevice",
"]",
"=",
"None",
")",
":",
"await",
"self",
".",
"_user",
".",
"http",
".",
"set_playback_volume",
"(",
"volume",
",",
... | Set the volume for the user’s current playback device.
Parameters
----------
volume : int
The volume to set. Must be a value from 0 to 100 inclusive.
device : Optional[:obj:`SomeDevice`]
The Device object or id of the device this command is targeting.
... | [
"Set",
"the",
"volume",
"for",
"the",
"user’s",
"current",
"playback",
"device",
"."
] | python | test |
liamw9534/bt-manager | bt_manager/cod.py | https://github.com/liamw9534/bt-manager/blob/51be2919394ce8134c698359649bfad09eedf4ec/bt_manager/cod.py#L189-L201 | def minor_device_class(self):
"""
Return the minor device class property decoded e.g.,
Scanner, Printer, Loudspeaker, Camera, etc.
"""
minor_device = []
minor_lookup = BTCoD._MINOR_DEVICE_CLASS.get(self.cod &
BTCoD._MAJ... | [
"def",
"minor_device_class",
"(",
"self",
")",
":",
"minor_device",
"=",
"[",
"]",
"minor_lookup",
"=",
"BTCoD",
".",
"_MINOR_DEVICE_CLASS",
".",
"get",
"(",
"self",
".",
"cod",
"&",
"BTCoD",
".",
"_MAJOR_DEVICE_MASK",
",",
"[",
"]",
")",
"for",
"i",
"in... | Return the minor device class property decoded e.g.,
Scanner, Printer, Loudspeaker, Camera, etc. | [
"Return",
"the",
"minor",
"device",
"class",
"property",
"decoded",
"e",
".",
"g",
".",
"Scanner",
"Printer",
"Loudspeaker",
"Camera",
"etc",
"."
] | python | train |
has2k1/plotnine | plotnine/guides/guides.py | https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L213-L223 | def validate(self, guide):
"""
Validate guide object
"""
if is_string(guide):
guide = Registry['guide_{}'.format(guide)]()
if not isinstance(guide, guide_class):
raise PlotnineError(
"Unknown guide: {}".format(guide))
return guide | [
"def",
"validate",
"(",
"self",
",",
"guide",
")",
":",
"if",
"is_string",
"(",
"guide",
")",
":",
"guide",
"=",
"Registry",
"[",
"'guide_{}'",
".",
"format",
"(",
"guide",
")",
"]",
"(",
")",
"if",
"not",
"isinstance",
"(",
"guide",
",",
"guide_clas... | Validate guide object | [
"Validate",
"guide",
"object"
] | python | train |
signalfx/signalfx-python | signalfx/ingest.py | https://github.com/signalfx/signalfx-python/blob/650eb9a2b301bcc795e4e3a8c031574ade69849d/signalfx/ingest.py#L199-L207 | def stop(self, msg='Thread stopped'):
"""Stop send thread and flush points for a safe exit."""
with self._lock:
if not self._thread_running:
return
self._thread_running = False
self._queue.put(_BaseSignalFxIngestClient._QUEUE_STOP)
self._send_threa... | [
"def",
"stop",
"(",
"self",
",",
"msg",
"=",
"'Thread stopped'",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"not",
"self",
".",
"_thread_running",
":",
"return",
"self",
".",
"_thread_running",
"=",
"False",
"self",
".",
"_queue",
".",
"put",
"... | Stop send thread and flush points for a safe exit. | [
"Stop",
"send",
"thread",
"and",
"flush",
"points",
"for",
"a",
"safe",
"exit",
"."
] | python | train |
bachya/regenmaschine | regenmaschine/controller.py | https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/controller.py#L85-L97 | async def login(self, password):
"""Authenticate against the device (locally)."""
auth_resp = await self._client_request(
'post',
'{0}/auth/login'.format(self._host),
json={
'pwd': password,
'remember': 1
})
self._a... | [
"async",
"def",
"login",
"(",
"self",
",",
"password",
")",
":",
"auth_resp",
"=",
"await",
"self",
".",
"_client_request",
"(",
"'post'",
",",
"'{0}/auth/login'",
".",
"format",
"(",
"self",
".",
"_host",
")",
",",
"json",
"=",
"{",
"'pwd'",
":",
"pas... | Authenticate against the device (locally). | [
"Authenticate",
"against",
"the",
"device",
"(",
"locally",
")",
"."
] | python | train |
geopy/geopy | geopy/geocoders/mapbox.py | https://github.com/geopy/geopy/blob/02c838d965e76497f3c3d61f53808c86b5c58224/geopy/geocoders/mapbox.py#L70-L85 | def _parse_json(self, json, exactly_one=True):
'''Returns location, (latitude, longitude) from json feed.'''
features = json['features']
if features == []:
return None
def parse_feature(feature):
location = feature['place_name']
place = feature['text'... | [
"def",
"_parse_json",
"(",
"self",
",",
"json",
",",
"exactly_one",
"=",
"True",
")",
":",
"features",
"=",
"json",
"[",
"'features'",
"]",
"if",
"features",
"==",
"[",
"]",
":",
"return",
"None",
"def",
"parse_feature",
"(",
"feature",
")",
":",
"loca... | Returns location, (latitude, longitude) from json feed. | [
"Returns",
"location",
"(",
"latitude",
"longitude",
")",
"from",
"json",
"feed",
"."
] | python | train |
ciena/afkak | afkak/producer.py | https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L315-L378 | def _send_requests(self, parts_results, requests):
"""Send the requests
We've determined the partition for each message group in the batch, or
got errors for them.
"""
# We use these dictionaries to be able to combine all the messages
# destined to the same topic/partiti... | [
"def",
"_send_requests",
"(",
"self",
",",
"parts_results",
",",
"requests",
")",
":",
"# We use these dictionaries to be able to combine all the messages",
"# destined to the same topic/partition into one request",
"# the messages & deferreds, both by topic+partition",
"reqsByTopicPart",
... | Send the requests
We've determined the partition for each message group in the batch, or
got errors for them. | [
"Send",
"the",
"requests"
] | python | train |
inspirehep/refextract | refextract/references/tag.py | https://github.com/inspirehep/refextract/blob/d70e3787be3c495a3a07d1517b53f81d51c788c7/refextract/references/tag.py#L1052-L1070 | def identify_ibids(line):
"""Find IBIDs within the line, record their position and length,
and replace them with underscores.
@param line: (string) the working reference line
@return: (tuple) containing 2 dictionaries and a string:
Dictionary: matched IBID text: (Key: position of IBI... | [
"def",
"identify_ibids",
"(",
"line",
")",
":",
"ibid_match_txt",
"=",
"{",
"}",
"# Record details of each matched ibid:",
"for",
"m_ibid",
"in",
"re_ibid",
".",
"finditer",
"(",
"line",
")",
":",
"ibid_match_txt",
"[",
"m_ibid",
".",
"start",
"(",
")",
"]",
... | Find IBIDs within the line, record their position and length,
and replace them with underscores.
@param line: (string) the working reference line
@return: (tuple) containing 2 dictionaries and a string:
Dictionary: matched IBID text: (Key: position of IBID in
line;... | [
"Find",
"IBIDs",
"within",
"the",
"line",
"record",
"their",
"position",
"and",
"length",
"and",
"replace",
"them",
"with",
"underscores",
"."
] | python | train |
psss/did | did/plugins/sentry.py | https://github.com/psss/did/blob/04e4ee6f1aa14c0cae3ba9f9803871f3f98279cb/did/plugins/sentry.py#L75-L79 | def activities(self):
""" Return all activites (fetch only once) """
if self._activities is None:
self._activities = self._fetch_activities()
return self._activities | [
"def",
"activities",
"(",
"self",
")",
":",
"if",
"self",
".",
"_activities",
"is",
"None",
":",
"self",
".",
"_activities",
"=",
"self",
".",
"_fetch_activities",
"(",
")",
"return",
"self",
".",
"_activities"
] | Return all activites (fetch only once) | [
"Return",
"all",
"activites",
"(",
"fetch",
"only",
"once",
")"
] | python | train |
HPENetworking/PYHPEIMC | pyhpeimc/objects.py | https://github.com/HPENetworking/PYHPEIMC/blob/4fba31827573587e03a6233c7db60f188038c8e5/pyhpeimc/objects.py#L139-L144 | def getipmacarp(self):
"""
Function operates on the IMCDev object and updates the ipmacarp attribute
:return:
"""
self.ipmacarp = get_ip_mac_arp_list(self.auth, self.url, devid = self.devid) | [
"def",
"getipmacarp",
"(",
"self",
")",
":",
"self",
".",
"ipmacarp",
"=",
"get_ip_mac_arp_list",
"(",
"self",
".",
"auth",
",",
"self",
".",
"url",
",",
"devid",
"=",
"self",
".",
"devid",
")"
] | Function operates on the IMCDev object and updates the ipmacarp attribute
:return: | [
"Function",
"operates",
"on",
"the",
"IMCDev",
"object",
"and",
"updates",
"the",
"ipmacarp",
"attribute",
":",
"return",
":"
] | python | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.