nwo stringlengths 5 106 | sha stringlengths 40 40 | path stringlengths 4 174 | language stringclasses 1
value | identifier stringlengths 1 140 | parameters stringlengths 0 87.7k | argument_list stringclasses 1
value | return_statement stringlengths 0 426k | docstring stringlengths 0 64.3k | docstring_summary stringlengths 0 26.3k | docstring_tokens list | function stringlengths 18 4.83M | function_tokens list | url stringlengths 83 304 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
google/rekall | 55d1925f2df9759a989b35271b4fa48fc54a1c86 | rekall-core/rekall/plugins/windows/registry/registry.py | python | _CM_KEY_NODE.values | (self) | Enumerate all the values of the key. | Enumerate all the values of the key. | [
"Enumerate",
"all",
"the",
"values",
"of",
"the",
"key",
"."
] | def values(self):
"""Enumerate all the values of the key."""
for value_ptr in self.ValueList.List.dereference():
value = value_ptr.dereference()
if value.Signature == self.VK_SIG:
yield value | [
"def",
"values",
"(",
"self",
")",
":",
"for",
"value_ptr",
"in",
"self",
".",
"ValueList",
".",
"List",
".",
"dereference",
"(",
")",
":",
"value",
"=",
"value_ptr",
".",
"dereference",
"(",
")",
"if",
"value",
".",
"Signature",
"==",
"self",
".",
"... | https://github.com/google/rekall/blob/55d1925f2df9759a989b35271b4fa48fc54a1c86/rekall-core/rekall/plugins/windows/registry/registry.py#L354-L359 | ||
eth-brownie/brownie | 754bda9f0a294b2beb86453d5eca4ff769a877c8 | brownie/_config.py | python | ConfigDict._lock | (self) | Locks the dict so that new keys cannot be added | Locks the dict so that new keys cannot be added | [
"Locks",
"the",
"dict",
"so",
"that",
"new",
"keys",
"cannot",
"be",
"added"
] | def _lock(self) -> None:
"""Locks the dict so that new keys cannot be added"""
for v in [i for i in self.values() if type(i) is ConfigDict]:
v._lock()
self._locked = True | [
"def",
"_lock",
"(",
"self",
")",
"->",
"None",
":",
"for",
"v",
"in",
"[",
"i",
"for",
"i",
"in",
"self",
".",
"values",
"(",
")",
"if",
"type",
"(",
"i",
")",
"is",
"ConfigDict",
"]",
":",
"v",
".",
"_lock",
"(",
")",
"self",
".",
"_locked"... | https://github.com/eth-brownie/brownie/blob/754bda9f0a294b2beb86453d5eca4ff769a877c8/brownie/_config.py#L142-L146 | ||
ilius/pyglossary | d599b3beda3ae17642af5debd83bb991148e6425 | pyglossary/ui/ui_cmd.py | python | UI.fixLogger | (self) | [] | def fixLogger(self):
for h in log.handlers:
if h.name == "std":
self.fixLogHandler(h)
return | [
"def",
"fixLogger",
"(",
"self",
")",
":",
"for",
"h",
"in",
"log",
".",
"handlers",
":",
"if",
"h",
".",
"name",
"==",
"\"std\"",
":",
"self",
".",
"fixLogHandler",
"(",
"h",
")",
"return"
] | https://github.com/ilius/pyglossary/blob/d599b3beda3ae17642af5debd83bb991148e6425/pyglossary/ui/ui_cmd.py#L182-L186 | ||||
Chaffelson/nipyapi | d3b186fd701ce308c2812746d98af9120955e810 | nipyapi/nifi/models/remote_process_group_dto.py | python | RemoteProcessGroupDTO.inactive_remote_output_port_count | (self) | return self._inactive_remote_output_port_count | Gets the inactive_remote_output_port_count of this RemoteProcessGroupDTO.
The number of inactive remote output ports.
:return: The inactive_remote_output_port_count of this RemoteProcessGroupDTO.
:rtype: int | Gets the inactive_remote_output_port_count of this RemoteProcessGroupDTO.
The number of inactive remote output ports. | [
"Gets",
"the",
"inactive_remote_output_port_count",
"of",
"this",
"RemoteProcessGroupDTO",
".",
"The",
"number",
"of",
"inactive",
"remote",
"output",
"ports",
"."
] | def inactive_remote_output_port_count(self):
"""
Gets the inactive_remote_output_port_count of this RemoteProcessGroupDTO.
The number of inactive remote output ports.
:return: The inactive_remote_output_port_count of this RemoteProcessGroupDTO.
:rtype: int
"""
return self._inactive_remote_output_port_count | [
"def",
"inactive_remote_output_port_count",
"(",
"self",
")",
":",
"return",
"self",
".",
"_inactive_remote_output_port_count"
] | https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/remote_process_group_dto.py#L752-L760 | |
suavecode/SUAVE | 4f83c467c5662b6cc611ce2ab6c0bdd25fd5c0a5 | trunk/SUAVE/Analyses/Aerodynamics/AERODAS.py | python | AERODAS.__defaults__ | (self) | This sets the default values and methods for the analysis.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
N/A | This sets the default values and methods for the analysis. | [
"This",
"sets",
"the",
"default",
"values",
"and",
"methods",
"for",
"the",
"analysis",
"."
] | def __defaults__(self):
"""This sets the default values and methods for the analysis.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
N/A
"""
self.tag = 'AERODAS Model'
settings = self.settings
settings.section_zero_lift_angle_of_attack = 0.0 * Units.deg
settings.section_minimum_drag_coefficient_angle_of_attack = 0.0 * Units.deg
settings.section_lift_curve_slope = 2.0 * np.pi
# build the evaluation process
compute = self.process.compute
compute.setup_data = Methods.AERODAS_setup.setup_data
# Get all of the coefficients for AERODAS wings
compute.wings_coefficients = Process()
compute.wings_coefficients = Process_Geometry('wings')
compute.wings_coefficients.section_properties = Methods.section_properties.section_properties
compute.wings_coefficients.finite_aspect_ratio = Methods.finite_aspect_ratio.finite_aspect_ratio
compute.wings_coefficients.pre_stall = Methods.pre_stall_coefficients.pre_stall_coefficients
compute.wings_coefficients.post_stall = Methods.post_stall_coefficients.post_stall_coefficients
# Fuselage drag?
# do a plate build up with angles
# Miscellaneous drag?
# Compressibility corrections?
compute.lift_drag_total = Methods.AERODAS_setup.lift_drag_total
compute.lift = Process()
compute.lift.total = Common.Lift.aircraft_total
compute.drag = Process()
compute.drag.total = Methods.AERODAS_setup.drag_total
def initialize(self):
super(AERODAS, self).initialize()
self.process.compute.lift.inviscid_wings.geometry = self.geometry
self.process.compute.lift.inviscid_wings.initialize()
finalize = initialize | [
"def",
"__defaults__",
"(",
"self",
")",
":",
"self",
".",
"tag",
"=",
"'AERODAS Model'",
"settings",
"=",
"self",
".",
"settings",
"settings",
".",
"section_zero_lift_angle_of_attack",
"=",
"0.0",
"*",
"Units",
".",
"deg",
"settings",
".",
"section_minimum_drag... | https://github.com/suavecode/SUAVE/blob/4f83c467c5662b6cc611ce2ab6c0bdd25fd5c0a5/trunk/SUAVE/Analyses/Aerodynamics/AERODAS.py#L33-L89 | ||
opendevops-cn/codo-cmdb | 334fba324512841d84535f31a094717eb5a40acf | libs/server/push_system_user.py | python | PushSystemUser.get_asset_info | (self) | 获取所有可连接资产信息
:return: | 获取所有可连接资产信息
:return: | [
"获取所有可连接资产信息",
":",
"return",
":"
] | def get_asset_info(self):
"""
获取所有可连接资产信息
:return:
"""
with DBContext('r') as session:
# 只拿到登陆用到的IP Port User
server_list = session.query(Server.ip, Server.port, AdminUser.system_user,
).outerjoin(AdminUser,
AdminUser.admin_user == Server.admin_user).filter(
Server.state == 'true').all()
return server_list | [
"def",
"get_asset_info",
"(",
"self",
")",
":",
"with",
"DBContext",
"(",
"'r'",
")",
"as",
"session",
":",
"# 只拿到登陆用到的IP Port User",
"server_list",
"=",
"session",
".",
"query",
"(",
"Server",
".",
"ip",
",",
"Server",
".",
"port",
",",
"AdminUser",
".",
... | https://github.com/opendevops-cn/codo-cmdb/blob/334fba324512841d84535f31a094717eb5a40acf/libs/server/push_system_user.py#L77-L89 | ||
ceph/ceph-ansible | 583e60af84180f0414d67ee52c3ec7cd64ddb4dd | library/radosgw_zone.py | python | container_exec | (binary, container_image) | return command_exec | Build the docker CLI to run a command inside a container | Build the docker CLI to run a command inside a container | [
"Build",
"the",
"docker",
"CLI",
"to",
"run",
"a",
"command",
"inside",
"a",
"container"
] | def container_exec(binary, container_image):
'''
Build the docker CLI to run a command inside a container
'''
container_binary = os.getenv('CEPH_CONTAINER_BINARY')
command_exec = [container_binary,
'run',
'--rm',
'--net=host',
'-v', '/etc/ceph:/etc/ceph:z',
'-v', '/var/lib/ceph/:/var/lib/ceph/:z',
'-v', '/var/log/ceph/:/var/log/ceph/:z',
'--entrypoint=' + binary, container_image]
return command_exec | [
"def",
"container_exec",
"(",
"binary",
",",
"container_image",
")",
":",
"container_binary",
"=",
"os",
".",
"getenv",
"(",
"'CEPH_CONTAINER_BINARY'",
")",
"command_exec",
"=",
"[",
"container_binary",
",",
"'run'",
",",
"'--rm'",
",",
"'--net=host'",
",",
"'-v... | https://github.com/ceph/ceph-ansible/blob/583e60af84180f0414d67ee52c3ec7cd64ddb4dd/library/radosgw_zone.py#L127-L141 | |
shiweibsw/Translation-Tools | 2fbbf902364e557fa7017f9a74a8797b7440c077 | venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distlib/util.py | python | get_project_data | (name) | return result | [] | def get_project_data(name):
url = '%s/%s/project.json' % (name[0].upper(), name)
url = urljoin(_external_data_base_url, url)
result = _get_external_data(url)
return result | [
"def",
"get_project_data",
"(",
"name",
")",
":",
"url",
"=",
"'%s/%s/project.json'",
"%",
"(",
"name",
"[",
"0",
"]",
".",
"upper",
"(",
")",
",",
"name",
")",
"url",
"=",
"urljoin",
"(",
"_external_data_base_url",
",",
"url",
")",
"result",
"=",
"_ge... | https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distlib/util.py#L783-L787 | |||
blurstudio/cross3d | 277968d1227de740fc87ef61005c75034420eadf | cross3d/abstract/abstractscenemap.py | python | AbstractSceneMap.fromXml | (scene, xml) | return None | Create a new map from the inputed xml data
:param xml: :class:`cross3d.migrate.XMLElement` | Create a new map from the inputed xml data
:param xml: :class:`cross3d.migrate.XMLElement` | [
"Create",
"a",
"new",
"map",
"from",
"the",
"inputed",
"xml",
"data",
":",
"param",
"xml",
":",
":",
"class",
":",
"cross3d",
".",
"migrate",
".",
"XMLElement"
] | def fromXml(scene, xml):
"""Create a new map from the inputed xml data
:param xml: :class:`cross3d.migrate.XMLElement`
"""
if (xml):
return scene.findMap(name=xml.attribute('name'), uniqueId=int(xml.attribute('id', 0)))
return None | [
"def",
"fromXml",
"(",
"scene",
",",
"xml",
")",
":",
"if",
"(",
"xml",
")",
":",
"return",
"scene",
".",
"findMap",
"(",
"name",
"=",
"xml",
".",
"attribute",
"(",
"'name'",
")",
",",
"uniqueId",
"=",
"int",
"(",
"xml",
".",
"attribute",
"(",
"'... | https://github.com/blurstudio/cross3d/blob/277968d1227de740fc87ef61005c75034420eadf/cross3d/abstract/abstractscenemap.py#L27-L35 | |
zhl2008/awd-platform | 0416b31abea29743387b10b3914581fbe8e7da5e | web_flaskbb/lib/python2.7/site-packages/pygments/lexers/sql.py | python | SqliteConsoleLexer.get_tokens_unprocessed | (self, data) | [] | def get_tokens_unprocessed(self, data):
sql = SqlLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(data):
line = match.group()
if line.startswith('sqlite> ') or line.startswith(' ...> '):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:8])]))
curcode += line[8:]
else:
if curcode:
for item in do_insertions(insertions,
sql.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
if line.startswith('SQL error: '):
yield (match.start(), Generic.Traceback, line)
else:
yield (match.start(), Generic.Output, line)
if curcode:
for item in do_insertions(insertions,
sql.get_tokens_unprocessed(curcode)):
yield item | [
"def",
"get_tokens_unprocessed",
"(",
"self",
",",
"data",
")",
":",
"sql",
"=",
"SqlLexer",
"(",
"*",
"*",
"self",
".",
"options",
")",
"curcode",
"=",
"''",
"insertions",
"=",
"[",
"]",
"for",
"match",
"in",
"line_re",
".",
"finditer",
"(",
"data",
... | https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/pygments/lexers/sql.py#L625-L650 | ||||
linxid/Machine_Learning_Study_Path | 558e82d13237114bbb8152483977806fc0c222af | Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/site-packages/pkg_resources/__init__.py | python | EntryPoint.__init__ | (self, name, module_name, attrs=(), extras=(), dist=None) | [] | def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = tuple(extras)
self.dist = dist | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"module_name",
",",
"attrs",
"=",
"(",
")",
",",
"extras",
"=",
"(",
")",
",",
"dist",
"=",
"None",
")",
":",
"if",
"not",
"MODULE",
"(",
"module_name",
")",
":",
"raise",
"ValueError",
"(",
"\"Inva... | https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/site-packages/pkg_resources/__init__.py#L2375-L2382 | ||||
dropbox/nsot | 941b11f84f5c0d210f638654a6ed34a5610af22a | nsot/util/stats.py | python | calculate_network_utilization | (parent, hosts, as_string=False) | return stats | Calculate utilization for a network and its descendants.
:param parent:
The parent network
:param hosts:
List of host IPs descendant from parent
:param as_string:
Whether to return stats as a string | Calculate utilization for a network and its descendants. | [
"Calculate",
"utilization",
"for",
"a",
"network",
"and",
"its",
"descendants",
"."
] | def calculate_network_utilization(parent, hosts, as_string=False):
"""
Calculate utilization for a network and its descendants.
:param parent:
The parent network
:param hosts:
List of host IPs descendant from parent
:param as_string:
Whether to return stats as a string
"""
parent = IPNetwork(str(parent))
hosts = IPSet(str(ip) for ip in hosts if IPNetwork(str(ip)) in parent)
used = float(hosts.size) / float(parent.size)
free = 1 - used
num_free = parent.size - hosts.size
stats = {
'percent_used': used,
'num_used': hosts.size,
'percent_free': free,
'num_free': num_free,
'max': parent.size,
}
# 10.47.216.0/22 - 14% used (139), 86% free (885)
if as_string:
return '{} - {:.0%} used ({}), {:.0%} free ({})'.format(
parent, used, hosts.size, free, num_free
)
return stats | [
"def",
"calculate_network_utilization",
"(",
"parent",
",",
"hosts",
",",
"as_string",
"=",
"False",
")",
":",
"parent",
"=",
"IPNetwork",
"(",
"str",
"(",
"parent",
")",
")",
"hosts",
"=",
"IPSet",
"(",
"str",
"(",
"ip",
")",
"for",
"ip",
"in",
"hosts... | https://github.com/dropbox/nsot/blob/941b11f84f5c0d210f638654a6ed34a5610af22a/nsot/util/stats.py#L14-L48 | |
farcepest/moist | fbf3c7cc741322733d6d30a21f3772b4fdcbd9e5 | MySQLdb/converters.py | python | bool_to_sql | (connection, boolean) | return str(int(boolean)) | Convert a Python bool to an SQL literal. | Convert a Python bool to an SQL literal. | [
"Convert",
"a",
"Python",
"bool",
"to",
"an",
"SQL",
"literal",
"."
] | def bool_to_sql(connection, boolean):
"""Convert a Python bool to an SQL literal."""
return str(int(boolean)) | [
"def",
"bool_to_sql",
"(",
"connection",
",",
"boolean",
")",
":",
"return",
"str",
"(",
"int",
"(",
"boolean",
")",
")"
] | https://github.com/farcepest/moist/blob/fbf3c7cc741322733d6d30a21f3772b4fdcbd9e5/MySQLdb/converters.py#L20-L22 | |
WikidPad/WikidPad | 558109638807bc76b4672922686e416ab2d5f79c | WikidPad/extensions/mediaWikiParser/MediaWikiParser.py | python | _TheParser.getWikiLanguageName | () | return WIKI_LANGUAGE_NAME | Return the internal name of the wiki language implemented by this
parser. | Return the internal name of the wiki language implemented by this
parser. | [
"Return",
"the",
"internal",
"name",
"of",
"the",
"wiki",
"language",
"implemented",
"by",
"this",
"parser",
"."
] | def getWikiLanguageName():
"""
Return the internal name of the wiki language implemented by this
parser.
"""
return WIKI_LANGUAGE_NAME | [
"def",
"getWikiLanguageName",
"(",
")",
":",
"return",
"WIKI_LANGUAGE_NAME"
] | https://github.com/WikidPad/WikidPad/blob/558109638807bc76b4672922686e416ab2d5f79c/WikidPad/extensions/mediaWikiParser/MediaWikiParser.py#L1549-L1554 | |
PyHDI/veriloggen | 2382d200deabf59cfcfd741f5eba371010aaf2bb | veriloggen/types/fixed.py | python | _to_fixed_neg_point | (value, point) | return shift_right(value, point, signed) | [] | def _to_fixed_neg_point(value, point):
point = -point
if isinstance(value, (int, bool, float)) and isinstance(point, int):
mag = 2 ** point
return int(value / mag)
if isinstance(value, (int, bool)):
return vtypes.Int(value) >> point
if isinstance(value, float):
mag = vtypes.Int(2) ** point
return vtypes.Float(value) / mag
signed = vtypes.get_signed(value)
return shift_right(value, point, signed) | [
"def",
"_to_fixed_neg_point",
"(",
"value",
",",
"point",
")",
":",
"point",
"=",
"-",
"point",
"if",
"isinstance",
"(",
"value",
",",
"(",
"int",
",",
"bool",
",",
"float",
")",
")",
"and",
"isinstance",
"(",
"point",
",",
"int",
")",
":",
"mag",
... | https://github.com/PyHDI/veriloggen/blob/2382d200deabf59cfcfd741f5eba371010aaf2bb/veriloggen/types/fixed.py#L120-L135 | |||
IronLanguages/ironpython2 | 51fdedeeda15727717fb8268a805f71b06c0b9f1 | Src/StdLib/Lib/cookielib.py | python | time2netscape | (t=None) | return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % (
DAYS[wday], mday, MONTHS[mon-1], year, hour, min, sec) | Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like this:
Wed, DD-Mon-YYYY HH:MM:SS GMT | Return a string representing time in seconds since epoch, t. | [
"Return",
"a",
"string",
"representing",
"time",
"in",
"seconds",
"since",
"epoch",
"t",
"."
] | def time2netscape(t=None):
"""Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like this:
Wed, DD-Mon-YYYY HH:MM:SS GMT
"""
if t is None: t = time.time()
year, mon, mday, hour, min, sec, wday = time.gmtime(t)[:7]
return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % (
DAYS[wday], mday, MONTHS[mon-1], year, hour, min, sec) | [
"def",
"time2netscape",
"(",
"t",
"=",
"None",
")",
":",
"if",
"t",
"is",
"None",
":",
"t",
"=",
"time",
".",
"time",
"(",
")",
"year",
",",
"mon",
",",
"mday",
",",
"hour",
",",
"min",
",",
"sec",
",",
"wday",
"=",
"time",
".",
"gmtime",
"("... | https://github.com/IronLanguages/ironpython2/blob/51fdedeeda15727717fb8268a805f71b06c0b9f1/Src/StdLib/Lib/cookielib.py#L103-L117 | |
IronLanguages/ironpython3 | 7a7bb2a872eeab0d1009fc8a6e24dca43f65b693 | Src/StdLib/Lib/tkinter/__init__.py | python | Misc.unbind_all | (self, sequence) | Unbind for all widgets for event SEQUENCE all functions. | Unbind for all widgets for event SEQUENCE all functions. | [
"Unbind",
"for",
"all",
"widgets",
"for",
"event",
"SEQUENCE",
"all",
"functions",
"."
] | def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '') | [
"def",
"unbind_all",
"(",
"self",
",",
"sequence",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"'bind'",
",",
"'all'",
",",
"sequence",
",",
"''",
")"
] | https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/tkinter/__init__.py#L1106-L1108 | ||
aouyar/PyMunin | 94624d4f56340cb2ed7e96ca3c5d9533a0721306 | pysysinfo/asterisk.py | python | AsteriskInfo.getConferenceStats | (self) | return info_dict | Query Asterisk Manager Interface for Conference Room Stats.
CLI Command - meetme list
@return: Dictionary of statistics counters for Conference Rooms. | Query Asterisk Manager Interface for Conference Room Stats.
CLI Command - meetme list | [
"Query",
"Asterisk",
"Manager",
"Interface",
"for",
"Conference",
"Room",
"Stats",
".",
"CLI",
"Command",
"-",
"meetme",
"list"
] | def getConferenceStats(self):
"""Query Asterisk Manager Interface for Conference Room Stats.
CLI Command - meetme list
@return: Dictionary of statistics counters for Conference Rooms.
"""
if not self.hasConference():
return None
if self.checkVersion('1.6'):
cmd = "meetme list"
else:
cmd = "meetme"
cmdresp = self.executeCommand(cmd)
info_dict = dict(active_conferences = 0, conference_users = 0)
for line in cmdresp.splitlines():
mobj = re.match('\w+\s+0(\d+)\s', line)
if mobj:
info_dict['active_conferences'] += 1
info_dict['conference_users'] += int(mobj.group(1))
return info_dict | [
"def",
"getConferenceStats",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"hasConference",
"(",
")",
":",
"return",
"None",
"if",
"self",
".",
"checkVersion",
"(",
"'1.6'",
")",
":",
"cmd",
"=",
"\"meetme list\"",
"else",
":",
"cmd",
"=",
"\"meetme\"... | https://github.com/aouyar/PyMunin/blob/94624d4f56340cb2ed7e96ca3c5d9533a0721306/pysysinfo/asterisk.py#L564-L587 | |
njsmith/colorspacious | 58948923b706879a54071568c7501be3f108797c | colorspacious/ciecam02.py | python | CIECAM02Space.XYZ100_to_CIECAM02 | (self, XYZ100, on_negative_A="raise") | return JChQMsH(J, C, h, Q, M, s, H) | Computes CIECAM02 appearance correlates for the given tristimulus
value(s) XYZ (normalized to be on the 0-100 scale).
Example: ``vc.XYZ100_to_CIECAM02([30.0, 45.5, 21.0])``
:param XYZ100: An array-like of tristimulus values. These should be
given on the 0-100 scale, not the 0-1 scale. The array-like should
have shape ``(..., 3)``; e.g., you can use a simple 3-item list
(shape = ``(3,)``), or to efficiently perform multiple computations
at once, you could pass a higher-dimensional array, e.g. an image.
:arg on_negative_A: A known infelicity of the CIECAM02 model is that
for some inputs, the achromatic signal :math:`A` can be negative,
which makes it impossible to compute :math:`J`, :math:`C`,
:math:`Q`, :math:`M`, or :math:`s` -- only :math:`h`: and :math:`H`
are spared. (See, e.g., section 2.6.4.1 of :cite:`Luo-CIECAM02` for
discussion.) This argument allows you to specify a strategy for
handling such points. Options are:
* ``"raise"``: throws a :class:`NegativeAError` (a subclass of
:class:`ValueError`)
* ``"nan"``: return not-a-number values for the affected
elements. (This may be particularly useful if converting a large
number of points at once.)
:returns: A named tuple of type :class:`JChQMsH`, with attributes
``J``, ``C``, ``h``, ``Q``, ``M``, ``s``, and ``H`` containing the
CIECAM02 appearance correlates. | Computes CIECAM02 appearance correlates for the given tristimulus
value(s) XYZ (normalized to be on the 0-100 scale). | [
"Computes",
"CIECAM02",
"appearance",
"correlates",
"for",
"the",
"given",
"tristimulus",
"value",
"(",
"s",
")",
"XYZ",
"(",
"normalized",
"to",
"be",
"on",
"the",
"0",
"-",
"100",
"scale",
")",
"."
] | def XYZ100_to_CIECAM02(self, XYZ100, on_negative_A="raise"):
"""Computes CIECAM02 appearance correlates for the given tristimulus
value(s) XYZ (normalized to be on the 0-100 scale).
Example: ``vc.XYZ100_to_CIECAM02([30.0, 45.5, 21.0])``
:param XYZ100: An array-like of tristimulus values. These should be
given on the 0-100 scale, not the 0-1 scale. The array-like should
have shape ``(..., 3)``; e.g., you can use a simple 3-item list
(shape = ``(3,)``), or to efficiently perform multiple computations
at once, you could pass a higher-dimensional array, e.g. an image.
:arg on_negative_A: A known infelicity of the CIECAM02 model is that
for some inputs, the achromatic signal :math:`A` can be negative,
which makes it impossible to compute :math:`J`, :math:`C`,
:math:`Q`, :math:`M`, or :math:`s` -- only :math:`h`: and :math:`H`
are spared. (See, e.g., section 2.6.4.1 of :cite:`Luo-CIECAM02` for
discussion.) This argument allows you to specify a strategy for
handling such points. Options are:
* ``"raise"``: throws a :class:`NegativeAError` (a subclass of
:class:`ValueError`)
* ``"nan"``: return not-a-number values for the affected
elements. (This may be particularly useful if converting a large
number of points at once.)
:returns: A named tuple of type :class:`JChQMsH`, with attributes
``J``, ``C``, ``h``, ``Q``, ``M``, ``s``, and ``H`` containing the
CIECAM02 appearance correlates.
"""
#### Argument checking
XYZ100 = np.asarray(XYZ100, dtype=float)
if XYZ100.shape[-1] != 3:
raise ValueError("XYZ100 shape must be (..., 3)")
#### Step 1
RGB = broadcasting_matvec(M_CAT02, XYZ100)
#### Step 2
RGB_C = self.D_RGB * RGB
#### Step 3
RGBprime = broadcasting_matvec(M_HPE_M_CAT02_inv, RGB_C)
#### Step 4
RGBprime_signs = np.sign(RGBprime)
tmp = (self.F_L * RGBprime_signs * RGBprime / 100) ** 0.42
RGBprime_a = RGBprime_signs * 400 * (tmp / (tmp + 27.13)) + 0.1
#### Step 5
a = broadcasting_matvec([1, -12. / 11, 1. / 11], RGBprime_a)
b = broadcasting_matvec([1. / 9, 1. / 9, -2. / 9], RGBprime_a)
h_rad = np.arctan2(b, a)
h = np.rad2deg(h_rad) % 360
# #### Step 6
# hprime = h, unless h < 20.14, in which case hprime = h + 360.
hprime = np.select([h < h_i[0], True], [h + 360, h])
# we use 0-based indexing, so our i is one less than the reference
# formulas' i.
i = np.searchsorted(h_i, hprime, side="right") - 1
tmp = (hprime - h_i[i]) / e_i[i]
H = H_i[i] + ((100 * tmp)
/ (tmp + (h_i[i + 1] - hprime) / e_i[i + 1]))
#### Step 7
A = ((broadcasting_matvec([2, 1, 1. / 20], RGBprime_a) - 0.305)
* self.N_bb)
if on_negative_A == "raise":
if np.any(A < 0):
raise NegativeAError("attempted to convert a tristimulus "
"value whose achromatic signal was "
"negative, and on_negative_A=\"raise\"")
elif on_negative_A == "nan":
A = np.select([A < 0, True], [np.nan, A])
else:
raise ValueError("Invalid on_negative_A argument: got %r, "
"expected \"raise\" or \"nan\""
% (on_negative_A,))
#### Step 8
J = 100 * (A / self.A_w) ** (self.c * self.z)
#### Step 9
Q = self._J_to_Q(J)
#### Step 10
e = (12500. / 13) * self.N_c * self.N_cb * (np.cos(h_rad + 2) + 3.8)
t = (e * np.sqrt(a ** 2 + b ** 2)
/ broadcasting_matvec([1, 1, 21. / 20], RGBprime_a))
C = t**0.9 * (J / 100)**0.5 * (1.64 - 0.29**self.n)**0.73
M = C * self.F_L**0.25
s = 100 * (M / Q)**0.5
return JChQMsH(J, C, h, Q, M, s, H) | [
"def",
"XYZ100_to_CIECAM02",
"(",
"self",
",",
"XYZ100",
",",
"on_negative_A",
"=",
"\"raise\"",
")",
":",
"#### Argument checking",
"XYZ100",
"=",
"np",
".",
"asarray",
"(",
"XYZ100",
",",
"dtype",
"=",
"float",
")",
"if",
"XYZ100",
".",
"shape",
"[",
"-"... | https://github.com/njsmith/colorspacious/blob/58948923b706879a54071568c7501be3f108797c/colorspacious/ciecam02.py#L143-L252 | |
nilearn/nilearn | 9edba4471747efacf21260bf470a346307f52706 | nilearn/regions/rena_clustering.py | python | _make_3d_edges | (vertices, is_mask) | return edges | Create the edges set: Returns a list of edges for a 3D image.
Parameters
----------
vertices : ndarray
The indices of the voxels.
is_mask : boolean
If is_mask is true, it returns the mask of edges.
Returns 1 if the edge is contained in the mask, 0 otherwise.
Returns
-------
edges : ndarray
Edges corresponding to the image or mask.
shape: (1, n_edges) if_mask,
(2, n_edges) otherwise. | Create the edges set: Returns a list of edges for a 3D image. | [
"Create",
"the",
"edges",
"set",
":",
"Returns",
"a",
"list",
"of",
"edges",
"for",
"a",
"3D",
"image",
"."
] | def _make_3d_edges(vertices, is_mask):
"""Create the edges set: Returns a list of edges for a 3D image.
Parameters
----------
vertices : ndarray
The indices of the voxels.
is_mask : boolean
If is_mask is true, it returns the mask of edges.
Returns 1 if the edge is contained in the mask, 0 otherwise.
Returns
-------
edges : ndarray
Edges corresponding to the image or mask.
shape: (1, n_edges) if_mask,
(2, n_edges) otherwise.
"""
if is_mask:
edges_deep = np.logical_and(vertices[:, :, :-1].ravel(),
vertices[:, :, 1:].ravel())
edges_right = np.logical_and(vertices[:, :-1].ravel(),
vertices[:, 1:].ravel())
edges_down = np.logical_and(vertices[:-1].ravel(),
vertices[1:].ravel())
else:
edges_deep = np.vstack([vertices[:, :, :-1].ravel(),
vertices[:, :, 1:].ravel()])
edges_right = np.vstack([vertices[:, :-1].ravel(),
vertices[:, 1:].ravel()])
edges_down = np.vstack([vertices[:-1].ravel(),
vertices[1:].ravel()])
edges = np.hstack([edges_deep, edges_right, edges_down])
return edges | [
"def",
"_make_3d_edges",
"(",
"vertices",
",",
"is_mask",
")",
":",
"if",
"is_mask",
":",
"edges_deep",
"=",
"np",
".",
"logical_and",
"(",
"vertices",
"[",
":",
",",
":",
",",
":",
"-",
"1",
"]",
".",
"ravel",
"(",
")",
",",
"vertices",
"[",
":",
... | https://github.com/nilearn/nilearn/blob/9edba4471747efacf21260bf470a346307f52706/nilearn/regions/rena_clustering.py#L63-L100 | |
abisee/pointer-generator | b29e986f24fdd01a6b6d6008187c5c887f0be282 | data.py | python | Vocab.write_metadata | (self, fpath) | Writes metadata file for Tensorboard word embedding visualizer as described here:
https://www.tensorflow.org/get_started/embedding_viz
Args:
fpath: place to write the metadata file | Writes metadata file for Tensorboard word embedding visualizer as described here:
https://www.tensorflow.org/get_started/embedding_viz | [
"Writes",
"metadata",
"file",
"for",
"Tensorboard",
"word",
"embedding",
"visualizer",
"as",
"described",
"here",
":",
"https",
":",
"//",
"www",
".",
"tensorflow",
".",
"org",
"/",
"get_started",
"/",
"embedding_viz"
] | def write_metadata(self, fpath):
"""Writes metadata file for Tensorboard word embedding visualizer as described here:
https://www.tensorflow.org/get_started/embedding_viz
Args:
fpath: place to write the metadata file
"""
print "Writing word embedding metadata file to %s..." % (fpath)
with open(fpath, "w") as f:
fieldnames = ['word']
writer = csv.DictWriter(f, delimiter="\t", fieldnames=fieldnames)
for i in xrange(self.size()):
writer.writerow({"word": self._id_to_word[i]}) | [
"def",
"write_metadata",
"(",
"self",
",",
"fpath",
")",
":",
"print",
"\"Writing word embedding metadata file to %s...\"",
"%",
"(",
"fpath",
")",
"with",
"open",
"(",
"fpath",
",",
"\"w\"",
")",
"as",
"f",
":",
"fieldnames",
"=",
"[",
"'word'",
"]",
"write... | https://github.com/abisee/pointer-generator/blob/b29e986f24fdd01a6b6d6008187c5c887f0be282/data.py#L93-L105 | ||
algorhythms/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | 897 Increasing Order Search Tree.py | python | Solution.increasingBST | (self, root: TreeNode) | return self.root | keep a previous index
in-order is easy | keep a previous index
in-order is easy | [
"keep",
"a",
"previous",
"index",
"in",
"-",
"order",
"is",
"easy"
] | def increasingBST(self, root: TreeNode) -> TreeNode:
"""
keep a previous index
in-order is easy
"""
self.dfs(root)
return self.root | [
"def",
"increasingBST",
"(",
"self",
",",
"root",
":",
"TreeNode",
")",
"->",
"TreeNode",
":",
"self",
".",
"dfs",
"(",
"root",
")",
"return",
"self",
".",
"root"
] | https://github.com/algorhythms/LeetCode/blob/3fb14aeea62a960442e47dfde9f964c7ffce32be/897 Increasing Order Search Tree.py#L57-L63 | |
google/grr | 8ad8a4d2c5a93c92729206b7771af19d92d4f915 | api_client/python/grr_api_client/vfs.py | python | FileBase.Collect | (self) | return CollectOperation(
client_id=self.client_id,
operation_id=result.operation_id,
target_file=self,
context=self._context) | [] | def Collect(self) -> "CollectOperation":
args = vfs_pb2.ApiUpdateVfsFileContentArgs(
client_id=self.client_id, file_path=self.path)
result = self._context.SendRequest("UpdateVfsFileContent", args)
if not isinstance(result, vfs_pb2.ApiUpdateVfsFileContentResult):
raise TypeError(f"Unexpected result type: {type(result)}")
return CollectOperation(
client_id=self.client_id,
operation_id=result.operation_id,
target_file=self,
context=self._context) | [
"def",
"Collect",
"(",
"self",
")",
"->",
"\"CollectOperation\"",
":",
"args",
"=",
"vfs_pb2",
".",
"ApiUpdateVfsFileContentArgs",
"(",
"client_id",
"=",
"self",
".",
"client_id",
",",
"file_path",
"=",
"self",
".",
"path",
")",
"result",
"=",
"self",
".",
... | https://github.com/google/grr/blob/8ad8a4d2c5a93c92729206b7771af19d92d4f915/api_client/python/grr_api_client/vfs.py#L235-L247 | |||
hsoft/moneyguru | 802f2f45c181224f5a14272d58dd90bac80bcf22 | core/model/date.py | python | DateRange.past | (self) | The past part of the date range.
That is, the part of the range that is earlier than today. | The past part of the date range. | [
"The",
"past",
"part",
"of",
"the",
"date",
"range",
"."
] | def past(self):
"""The past part of the date range.
That is, the part of the range that is earlier than today.
"""
today = date.today()
if self.end < today:
return self
else:
return DateRange(self.start, today) | [
"def",
"past",
"(",
"self",
")",
":",
"today",
"=",
"date",
".",
"today",
"(",
")",
"if",
"self",
".",
"end",
"<",
"today",
":",
"return",
"self",
"else",
":",
"return",
"DateRange",
"(",
"self",
".",
"start",
",",
"today",
")"
] | https://github.com/hsoft/moneyguru/blob/802f2f45c181224f5a14272d58dd90bac80bcf22/core/model/date.py#L151-L160 | ||
saltstack/salt | fae5bc757ad0f1716483ce7ae180b451545c2058 | salt/ext/tornado/locale.py | python | get_supported_locales | () | return _supported_locales | Returns a list of all the supported locale codes. | Returns a list of all the supported locale codes. | [
"Returns",
"a",
"list",
"of",
"all",
"the",
"supported",
"locale",
"codes",
"."
] | def get_supported_locales():
"""Returns a list of all the supported locale codes."""
return _supported_locales | [
"def",
"get_supported_locales",
"(",
")",
":",
"return",
"_supported_locales"
] | https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/ext/tornado/locale.py#L227-L229 | |
zhang-can/ECO-pytorch | 355c3866b35cdaa5d451263c1f3291c150e22eeb | tf_model_zoo/models/swivel/swivel.py | python | write_embeddings_to_disk | (config, model, sess) | Writes row and column embeddings disk | Writes row and column embeddings disk | [
"Writes",
"row",
"and",
"column",
"embeddings",
"disk"
] | def write_embeddings_to_disk(config, model, sess):
"""Writes row and column embeddings disk"""
# Row Embedding
row_vocab_path = config.input_base_path + '/row_vocab.txt'
row_embedding_output_path = config.output_base_path + '/row_embedding.tsv'
print 'Writing row embeddings to:', row_embedding_output_path
sys.stdout.flush()
write_embedding_tensor_to_disk(row_vocab_path, row_embedding_output_path,
sess, model.row_embedding)
# Column Embedding
col_vocab_path = config.input_base_path + '/col_vocab.txt'
col_embedding_output_path = config.output_base_path + '/col_embedding.tsv'
print 'Writing column embeddings to:', col_embedding_output_path
sys.stdout.flush()
write_embedding_tensor_to_disk(col_vocab_path, col_embedding_output_path,
sess, model.col_embedding) | [
"def",
"write_embeddings_to_disk",
"(",
"config",
",",
"model",
",",
"sess",
")",
":",
"# Row Embedding",
"row_vocab_path",
"=",
"config",
".",
"input_base_path",
"+",
"'/row_vocab.txt'",
"row_embedding_output_path",
"=",
"config",
".",
"output_base_path",
"+",
"'/row... | https://github.com/zhang-can/ECO-pytorch/blob/355c3866b35cdaa5d451263c1f3291c150e22eeb/tf_model_zoo/models/swivel/swivel.py#L161-L177 | ||
loli/medpy | 39131b94f0ab5328ab14a874229320efc2f74d98 | medpy/io/header.py | python | get_offset | (hdr) | return hdr.get_offset() | r"""
Extracts the image offset (akak origin) from an image header.
Notes
-----
It is recommended to call `hdr.get_offset()` instead
of this function.
It can be assumed that the offset is measured from the center point of
the first pixel, which SimpleITK promises independent of the file format.
Some formats do not specify a header field for the offset, thus zeros
are returned.
Parameters
----------
hdr : medpy.io.Header
An image header as returned by `load`.
Returns
-------
offset : tuple of floats
The image's offset. | r"""
Extracts the image offset (akak origin) from an image header. | [
"r",
"Extracts",
"the",
"image",
"offset",
"(",
"akak",
"origin",
")",
"from",
"an",
"image",
"header",
"."
] | def get_offset(hdr):
r"""
Extracts the image offset (akak origin) from an image header.
Notes
-----
It is recommended to call `hdr.get_offset()` instead
of this function.
It can be assumed that the offset is measured from the center point of
the first pixel, which SimpleITK promises independent of the file format.
Some formats do not specify a header field for the offset, thus zeros
are returned.
Parameters
----------
hdr : medpy.io.Header
An image header as returned by `load`.
Returns
-------
offset : tuple of floats
The image's offset.
"""
return hdr.get_offset() | [
"def",
"get_offset",
"(",
"hdr",
")",
":",
"return",
"hdr",
".",
"get_offset",
"(",
")"
] | https://github.com/loli/medpy/blob/39131b94f0ab5328ab14a874229320efc2f74d98/medpy/io/header.py#L57-L80 | |
Galvant/InstrumentKit | 6d216bd7f8e9ec7918762fe5fb7a306d5bd0eb1f | instruments/config.py | python | load_instruments | (conf_file_name, conf_path="/") | return inst_dict | Given the path to a YAML-formatted configuration file and a path within
that file, loads the instruments described in that configuration file.
The subsection of the configuration file is expected to look like a map from
names to YAML nodes giving the class and instrument URI for each instrument.
For example::
ddg:
class: !!python/name:instruments.srs.SRSDG645
uri: gpib+usb://COM7/15
Loading instruments from this configuration will result in a dictionary of
the form
``{'ddg': instruments.srs.SRSDG645.open_from_uri('gpib+usb://COM7/15')}``.
Each instrument configuration section can also specify one or more attributes
to set. These attributes are specified using a ``attrs`` section as well as the
required ``class`` and ``uri`` sections. For instance, the following
dictionary creates a ThorLabs APT motor controller instrument with a single motor
model configured::
rot_stage:
class: !!python/name:instruments.thorabsapt.APTMotorController
uri: serial:///dev/ttyUSB0?baud=115200
attrs:
channel[0].motor_model: PRM1-Z8
Unitful attributes can be specified by using the ``!Q`` tag to quickly create
instances of `u.Quantity`. In the example above, for instance, we can set a motion
timeout as a unitful quantity::
attrs:
motion_timeout: !Q 1 minute
When using the ``!Q`` tag, any text before a space is taken to be the magnitude
of the quantity, and text following is taken to be the unit specification.
By specifying a path within the configuration file, one can load only a part
of the given file. For instance, consider the configuration::
instruments:
ddg:
class: !!python/name:instruments.srs.SRSDG645
uri: gpib+usb://COM7/15
prefs:
...
Then, specifying ``"/instruments"`` as the configuration path will cause
this function to load the instruments named in that block, and ignore
all other keys in the YAML file.
:param str conf_file_name: Name of the configuration file to load
instruments from. Alternatively, a file-like object may be provided.
:param str conf_path: ``"/"`` separated path to the section in the
configuration file to load.
:rtype: `dict`
.. warning::
The configuration file must be trusted, as the class name references
allow for executing arbitrary code. Do not load instruments from
configuration files sent over network connections.
Note that keys in sections excluded by the ``conf_path`` argument are
still processed, such that any side effects that may occur due to
such processing will occur independently of the value of ``conf_path``. | Given the path to a YAML-formatted configuration file and a path within
that file, loads the instruments described in that configuration file.
The subsection of the configuration file is expected to look like a map from
names to YAML nodes giving the class and instrument URI for each instrument.
For example:: | [
"Given",
"the",
"path",
"to",
"a",
"YAML",
"-",
"formatted",
"configuration",
"file",
"and",
"a",
"path",
"within",
"that",
"file",
"loads",
"the",
"instruments",
"described",
"in",
"that",
"configuration",
"file",
".",
"The",
"subsection",
"of",
"the",
"con... | def load_instruments(conf_file_name, conf_path="/"):
"""
Given the path to a YAML-formatted configuration file and a path within
that file, loads the instruments described in that configuration file.
The subsection of the configuration file is expected to look like a map from
names to YAML nodes giving the class and instrument URI for each instrument.
For example::
ddg:
class: !!python/name:instruments.srs.SRSDG645
uri: gpib+usb://COM7/15
Loading instruments from this configuration will result in a dictionary of
the form
``{'ddg': instruments.srs.SRSDG645.open_from_uri('gpib+usb://COM7/15')}``.
Each instrument configuration section can also specify one or more attributes
to set. These attributes are specified using a ``attrs`` section as well as the
required ``class`` and ``uri`` sections. For instance, the following
dictionary creates a ThorLabs APT motor controller instrument with a single motor
model configured::
rot_stage:
class: !!python/name:instruments.thorabsapt.APTMotorController
uri: serial:///dev/ttyUSB0?baud=115200
attrs:
channel[0].motor_model: PRM1-Z8
Unitful attributes can be specified by using the ``!Q`` tag to quickly create
instances of `u.Quantity`. In the example above, for instance, we can set a motion
timeout as a unitful quantity::
attrs:
motion_timeout: !Q 1 minute
When using the ``!Q`` tag, any text before a space is taken to be the magnitude
of the quantity, and text following is taken to be the unit specification.
By specifying a path within the configuration file, one can load only a part
of the given file. For instance, consider the configuration::
instruments:
ddg:
class: !!python/name:instruments.srs.SRSDG645
uri: gpib+usb://COM7/15
prefs:
...
Then, specifying ``"/instruments"`` as the configuration path will cause
this function to load the instruments named in that block, and ignore
all other keys in the YAML file.
:param str conf_file_name: Name of the configuration file to load
instruments from. Alternatively, a file-like object may be provided.
:param str conf_path: ``"/"`` separated path to the section in the
configuration file to load.
:rtype: `dict`
.. warning::
The configuration file must be trusted, as the class name references
allow for executing arbitrary code. Do not load instruments from
configuration files sent over network connections.
Note that keys in sections excluded by the ``conf_path`` argument are
still processed, such that any side effects that may occur due to
such processing will occur independently of the value of ``conf_path``.
"""
if yaml is None:
raise ImportError("Could not import ruamel.yaml, which is required "
"for this function.")
if isinstance(conf_file_name, str):
with open(conf_file_name, 'r') as f:
conf_dict = yaml.load(f, Loader=yaml.Loader)
else:
conf_dict = yaml.load(conf_file_name, Loader=yaml.Loader)
conf_dict = walk_dict(conf_dict, conf_path)
inst_dict = {}
for name, value in conf_dict.items():
try:
inst_dict[name] = value["class"].open_from_uri(value["uri"])
if 'attrs' in value:
# We have some attrs we can set on the newly created instrument.
for attr_name, attr_value in value['attrs'].items():
setattr_expression(inst_dict[name], attr_name, attr_value)
except IOError as ex:
# FIXME: need to subclass Warning so that repeated warnings
# aren't ignored.
warnings.warn("Exception occured loading device with URI "
"{}:\n\t{}.".format(value["uri"], ex), RuntimeWarning)
inst_dict[name] = None
return inst_dict | [
"def",
"load_instruments",
"(",
"conf_file_name",
",",
"conf_path",
"=",
"\"/\"",
")",
":",
"if",
"yaml",
"is",
"None",
":",
"raise",
"ImportError",
"(",
"\"Could not import ruamel.yaml, which is required \"",
"\"for this function.\"",
")",
"if",
"isinstance",
"(",
"c... | https://github.com/Galvant/InstrumentKit/blob/6d216bd7f8e9ec7918762fe5fb7a306d5bd0eb1f/instruments/config.py#L71-L169 | |
clinton-hall/nzbToMedia | 27669389216902d1085660167e7bda0bd8527ecf | libs/common/six.py | python | add_metaclass | (metaclass) | return wrapper | Class decorator for creating a class with a metaclass. | Class decorator for creating a class with a metaclass. | [
"Class",
"decorator",
"for",
"creating",
"a",
"class",
"with",
"a",
"metaclass",
"."
] | def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
if hasattr(cls, '__qualname__'):
orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper | [
"def",
"add_metaclass",
"(",
"metaclass",
")",
":",
"def",
"wrapper",
"(",
"cls",
")",
":",
"orig_vars",
"=",
"cls",
".",
"__dict__",
".",
"copy",
"(",
")",
"slots",
"=",
"orig_vars",
".",
"get",
"(",
"'__slots__'",
")",
"if",
"slots",
"is",
"not",
"... | https://github.com/clinton-hall/nzbToMedia/blob/27669389216902d1085660167e7bda0bd8527ecf/libs/common/six.py#L835-L850 | |
Jenyay/outwiker | 50530cf7b3f71480bb075b2829bc0669773b835b | plugins/updatenotifier/updatenotifier/libs/jinja2/environment.py | python | Template.generate_async | (self, *args, **kwargs) | An async version of :meth:`generate`. Works very similarly but
returns an async iterator instead. | An async version of :meth:`generate`. Works very similarly but
returns an async iterator instead. | [
"An",
"async",
"version",
"of",
":",
"meth",
":",
"generate",
".",
"Works",
"very",
"similarly",
"but",
"returns",
"an",
"async",
"iterator",
"instead",
"."
] | def generate_async(self, *args, **kwargs):
"""An async version of :meth:`generate`. Works very similarly but
returns an async iterator instead.
"""
# see asyncsupport for the actual implementation
raise NotImplementedError('This feature is not available for this '
'version of Python') | [
"def",
"generate_async",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# see asyncsupport for the actual implementation",
"raise",
"NotImplementedError",
"(",
"'This feature is not available for this '",
"'version of Python'",
")"
] | https://github.com/Jenyay/outwiker/blob/50530cf7b3f71480bb075b2829bc0669773b835b/plugins/updatenotifier/updatenotifier/libs/jinja2/environment.py#L1047-L1053 | ||
Nike-Inc/gimme-aws-creds | 94ca37dc7a836a49c19dd09ec879dec416280e36 | gimme_aws_creds/registered_authenticators.py | python | RegisteredAuthenticators.add_authenticator | (self, credential_id, user) | :param credential_id: the id of added authenticator credential
:type credential_id: bytes
:param user: a user identifier (email, name, uid, ...)
:type user: str | :param credential_id: the id of added authenticator credential
:type credential_id: bytes
:param user: a user identifier (email, name, uid, ...)
:type user: str | [
":",
"param",
"credential_id",
":",
"the",
"id",
"of",
"added",
"authenticator",
"credential",
":",
"type",
"credential_id",
":",
"bytes",
":",
"param",
"user",
":",
"a",
"user",
"identifier",
"(",
"email",
"name",
"uid",
"...",
")",
":",
"type",
"user",
... | def add_authenticator(self, credential_id, user):
"""
:param credential_id: the id of added authenticator credential
:type credential_id: bytes
:param user: a user identifier (email, name, uid, ...)
:type user: str
"""
authenticators = self._get_authenticators()
authenticators.append(RegisteredAuthenticator(credential_id=credential_id, user=user))
with open(self._json_path, 'w') as f:
json.dump(authenticators, f) | [
"def",
"add_authenticator",
"(",
"self",
",",
"credential_id",
",",
"user",
")",
":",
"authenticators",
"=",
"self",
".",
"_get_authenticators",
"(",
")",
"authenticators",
".",
"append",
"(",
"RegisteredAuthenticator",
"(",
"credential_id",
"=",
"credential_id",
... | https://github.com/Nike-Inc/gimme-aws-creds/blob/94ca37dc7a836a49c19dd09ec879dec416280e36/gimme_aws_creds/registered_authenticators.py#L35-L46 | ||
cloudera/hue | 23f02102d4547c17c32bd5ea0eb24e9eadd657a4 | desktop/core/ext-py/python-ldap-2.3.13/Lib/ldap/ldapobject.py | python | ReconnectLDAPObject.sasl_interactive_bind_s | (self,*args,**kwargs) | return SimpleLDAPObject.sasl_interactive_bind_s(self,*args,**kwargs) | sasl_interactive_bind_s(who, auth) -> None | sasl_interactive_bind_s(who, auth) -> None | [
"sasl_interactive_bind_s",
"(",
"who",
"auth",
")",
"-",
">",
"None"
] | def sasl_interactive_bind_s(self,*args,**kwargs):
"""
sasl_interactive_bind_s(who, auth) -> None
"""
self._last_bind = (self.sasl_interactive_bind_s,args,kwargs)
return SimpleLDAPObject.sasl_interactive_bind_s(self,*args,**kwargs) | [
"def",
"sasl_interactive_bind_s",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_last_bind",
"=",
"(",
"self",
".",
"sasl_interactive_bind_s",
",",
"args",
",",
"kwargs",
")",
"return",
"SimpleLDAPObject",
".",
"sasl_interac... | https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/python-ldap-2.3.13/Lib/ldap/ldapobject.py#L792-L797 | |
huggingface/transformers | 623b4f7c63f60cce917677ee704d6c93ee960b4b | src/transformers/utils/dummy_tf_objects.py | python | TFXLMForSequenceClassification.__init__ | (self, *args, **kwargs) | [] | def __init__(self, *args, **kwargs):
requires_backends(self, ["tf"]) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"requires_backends",
"(",
"self",
",",
"[",
"\"tf\"",
"]",
")"
] | https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/utils/dummy_tf_objects.py#L2770-L2771 | ||||
aio-libs/aiopg | 7ac2d29930e86a07e03d65464e7c542522b77d01 | aiopg/connection.py | python | Connection.isolation_level | (self) | return self._conn.isolation_level | Transaction isolation level.
The only allowed value is ISOLATION_LEVEL_READ_COMMITTED. | Transaction isolation level. | [
"Transaction",
"isolation",
"level",
"."
] | def isolation_level(self) -> int:
"""Transaction isolation level.
The only allowed value is ISOLATION_LEVEL_READ_COMMITTED.
"""
return self._conn.isolation_level | [
"def",
"isolation_level",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"_conn",
".",
"isolation_level"
] | https://github.com/aio-libs/aiopg/blob/7ac2d29930e86a07e03d65464e7c542522b77d01/aiopg/connection.py#L1091-L1097 | |
Kismuz/btgym | 7fb3316e67f1d7a17c620630fb62fb29428b2cec | btgym/research/model_based/model/univariate.py | python | OUProcess.__init__ | (self, alpha=None, filter_alpha=None) | [] | def __init__(self, alpha=None, filter_alpha=None):
self.alpha = alpha
self.filter_alpha = filter_alpha
self.estimator = OUEstimator(alpha)
# Just use exponential smoothing as state-space trajectory filter:
self.filter = Covariance(3, alpha=filter_alpha)
# Driver is Student-t:
self.driver_estimator = STEstimator(alpha)
# Empirical statistics tracker (debug, mostly for accuracy checking, not included in OUProcessState):
self.stat = Zscore(1, alpha)
self.is_ready = False | [
"def",
"__init__",
"(",
"self",
",",
"alpha",
"=",
"None",
",",
"filter_alpha",
"=",
"None",
")",
":",
"self",
".",
"alpha",
"=",
"alpha",
"self",
".",
"filter_alpha",
"=",
"filter_alpha",
"self",
".",
"estimator",
"=",
"OUEstimator",
"(",
"alpha",
")",
... | https://github.com/Kismuz/btgym/blob/7fb3316e67f1d7a17c620630fb62fb29428b2cec/btgym/research/model_based/model/univariate.py#L20-L34 | ||||
triaquae/triaquae | bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9 | TriAquae/models/Ubuntu_13/paramiko/transport.py | python | Transport.send_ignore | (self, bytes=None) | Send a junk packet across the encrypted link. This is sometimes used
to add "noise" to a connection to confuse would-be attackers. It can
also be used as a keep-alive for long lived connections traversing
firewalls.
@param bytes: the number of random bytes to send in the payload of the
ignored packet -- defaults to a random number from 10 to 41.
@type bytes: int | Send a junk packet across the encrypted link. This is sometimes used
to add "noise" to a connection to confuse would-be attackers. It can
also be used as a keep-alive for long lived connections traversing
firewalls. | [
"Send",
"a",
"junk",
"packet",
"across",
"the",
"encrypted",
"link",
".",
"This",
"is",
"sometimes",
"used",
"to",
"add",
"noise",
"to",
"a",
"connection",
"to",
"confuse",
"would",
"-",
"be",
"attackers",
".",
"It",
"can",
"also",
"be",
"used",
"as",
... | def send_ignore(self, bytes=None):
"""
Send a junk packet across the encrypted link. This is sometimes used
to add "noise" to a connection to confuse would-be attackers. It can
also be used as a keep-alive for long lived connections traversing
firewalls.
@param bytes: the number of random bytes to send in the payload of the
ignored packet -- defaults to a random number from 10 to 41.
@type bytes: int
"""
m = Message()
m.add_byte(chr(MSG_IGNORE))
if bytes is None:
bytes = (ord(rng.read(1)) % 32) + 10
m.add_bytes(rng.read(bytes))
self._send_user_message(m) | [
"def",
"send_ignore",
"(",
"self",
",",
"bytes",
"=",
"None",
")",
":",
"m",
"=",
"Message",
"(",
")",
"m",
".",
"add_byte",
"(",
"chr",
"(",
"MSG_IGNORE",
")",
")",
"if",
"bytes",
"is",
"None",
":",
"bytes",
"=",
"(",
"ord",
"(",
"rng",
".",
"... | https://github.com/triaquae/triaquae/blob/bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9/TriAquae/models/Ubuntu_13/paramiko/transport.py#L848-L864 | ||
buke/GreenOdoo | 3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df | source/addons/analytic/analytic.py | python | account_analytic_account.copy | (self, cr, uid, id, default=None, context=None) | return super(account_analytic_account, self).copy(cr, uid, id, default, context=context) | executed only on the toplevel copied object of the hierarchy.
Subobject are actually copied with copy_data | executed only on the toplevel copied object of the hierarchy.
Subobject are actually copied with copy_data | [
"executed",
"only",
"on",
"the",
"toplevel",
"copied",
"object",
"of",
"the",
"hierarchy",
".",
"Subobject",
"are",
"actually",
"copied",
"with",
"copy_data"
] | def copy(self, cr, uid, id, default=None, context=None):
""" executed only on the toplevel copied object of the hierarchy.
Subobject are actually copied with copy_data"""
if not default:
default = {}
analytic = self.browse(cr, uid, id, context=context)
default['name'] = _("%s (copy)") % analytic['name']
return super(account_analytic_account, self).copy(cr, uid, id, default, context=context) | [
"def",
"copy",
"(",
"self",
",",
"cr",
",",
"uid",
",",
"id",
",",
"default",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"not",
"default",
":",
"default",
"=",
"{",
"}",
"analytic",
"=",
"self",
".",
"browse",
"(",
"cr",
",",
"ui... | https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/source/addons/analytic/analytic.py#L274-L281 | |
davidbau/ganseeing | 93cea2c8f391aef001ddf9dcb35c43990681a47c | seeing/proggan_ablation.py | python | NormUpscaleConvBlock.__init__ | (self, in_channels, out_channels, kernel_size, padding, no_pixel=False, no_wscale=False) | [] | def __init__(self, in_channels, out_channels, kernel_size, padding, no_pixel=False, no_wscale=False):
super(NormUpscaleConvBlock, self).__init__()
self.norm = None
self.wscale = None
if not no_pixel:
self.norm = PixelNormLayer()
if not no_wscale:
self.wscale = WScaleLayer(out_channels)
self.up = nn.Upsample(scale_factor=2, mode='nearest')
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, 1, padding, bias=no_wscale)
self.relu = nn.LeakyReLU(inplace=True, negative_slope=0.2) | [
"def",
"__init__",
"(",
"self",
",",
"in_channels",
",",
"out_channels",
",",
"kernel_size",
",",
"padding",
",",
"no_pixel",
"=",
"False",
",",
"no_wscale",
"=",
"False",
")",
":",
"super",
"(",
"NormUpscaleConvBlock",
",",
"self",
")",
".",
"__init__",
"... | https://github.com/davidbau/ganseeing/blob/93cea2c8f391aef001ddf9dcb35c43990681a47c/seeing/proggan_ablation.py#L79-L90 | ||||
plotly/plotly.py | cfad7862594b35965c0e000813bd7805e8494a5b | packages/python/plotly/plotly/matplotlylib/renderer.py | python | PlotlyRenderer.draw_legend_shapes | (self, mode, shape, **props) | Create a shape that matches lines or markers in legends.
Main issue is that path for circles do not render, so we have to use 'circle'
instead of 'path'. | Create a shape that matches lines or markers in legends. | [
"Create",
"a",
"shape",
"that",
"matches",
"lines",
"or",
"markers",
"in",
"legends",
"."
] | def draw_legend_shapes(self, mode, shape, **props):
"""Create a shape that matches lines or markers in legends.
Main issue is that path for circles do not render, so we have to use 'circle'
instead of 'path'.
"""
for single_mode in mode.split("+"):
x = props["data"][0][0]
y = props["data"][0][1]
if single_mode == "markers" and props.get("markerstyle"):
size = shape.pop("size", 6)
symbol = shape.pop("symbol")
# aligning to "center"
x0 = 0
y0 = 0
x1 = size
y1 = size
markerpath = props["markerstyle"].get("markerpath")
if markerpath is None and symbol != "circle":
self.msg += (
"not sure how to handle this marker without a valid path\n"
)
return
# marker path to SVG path conversion
path = " ".join(
[f"{a} {t[0]},{t[1]}" for a, t in zip(markerpath[1], markerpath[0])]
)
if symbol == "circle":
# symbols like . and o in matplotlib, use circle
# plotly also maps many other markers to circle, such as 1,8 and p
path = None
shape_type = "circle"
x0 = -size / 2
y0 = size / 2
x1 = size / 2
y1 = size + size / 2
else:
# triangles, star etc
shape_type = "path"
legend_shape = go.layout.Shape(
type=shape_type,
xref="paper",
yref="paper",
x0=x0,
y0=y0,
x1=x1,
y1=y1,
xsizemode="pixel",
ysizemode="pixel",
xanchor=x,
yanchor=y,
path=path,
**shape,
)
elif single_mode == "lines":
mode = "line"
x1 = props["data"][1][0]
y1 = props["data"][1][1]
legend_shape = go.layout.Shape(
type=mode,
xref="paper",
yref="paper",
x0=x,
y0=y + 0.02,
x1=x1,
y1=y1 + 0.02,
**shape,
)
else:
self.msg += "not sure how to handle this element\n"
return
self.plotly_fig.add_shape(legend_shape)
self.msg += " Heck yeah, I drew that shape\n" | [
"def",
"draw_legend_shapes",
"(",
"self",
",",
"mode",
",",
"shape",
",",
"*",
"*",
"props",
")",
":",
"for",
"single_mode",
"in",
"mode",
".",
"split",
"(",
"\"+\"",
")",
":",
"x",
"=",
"props",
"[",
"\"data\"",
"]",
"[",
"0",
"]",
"[",
"0",
"]"... | https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/matplotlylib/renderer.py#L315-L390 | ||
junyanz/VON | 2bd39d0c11dd318a45ecda7b2125caa1c0dd93e8 | render_module/vtn/vtn/modules/GridSampler3D.py | python | GridSampler3D.forward | (self, theta, size) | return grid_sample3d(theta, size) | [] | def forward(self, theta, size):
return grid_sample3d(theta, size) | [
"def",
"forward",
"(",
"self",
",",
"theta",
",",
"size",
")",
":",
"return",
"grid_sample3d",
"(",
"theta",
",",
"size",
")"
] | https://github.com/junyanz/VON/blob/2bd39d0c11dd318a45ecda7b2125caa1c0dd93e8/render_module/vtn/vtn/modules/GridSampler3D.py#L6-L7 | |||
kamalgill/flask-appengine-template | 11760f83faccbb0d0afe416fc58e67ecfb4643c2 | src/lib/wtforms/ext/sqlalchemy/orm.py | python | ModelConverter.conv_DateTime | (self, field_args, **extra) | return f.DateTimeField(**field_args) | [] | def conv_DateTime(self, field_args, **extra):
return f.DateTimeField(**field_args) | [
"def",
"conv_DateTime",
"(",
"self",
",",
"field_args",
",",
"*",
"*",
"extra",
")",
":",
"return",
"f",
".",
"DateTimeField",
"(",
"*",
"*",
"field_args",
")"
] | https://github.com/kamalgill/flask-appengine-template/blob/11760f83faccbb0d0afe416fc58e67ecfb4643c2/src/lib/wtforms/ext/sqlalchemy/orm.py#L166-L167 | |||
garywiz/chaperone | 9ff2c3a5b9c6820f8750320a564ea214042df06f | chaperone/cproc/process_manager.py | python | TopLevelProcess._cancel_pending | (self) | Cancel any pending activated tasks | Cancel any pending activated tasks | [
"Cancel",
"any",
"pending",
"activated",
"tasks"
] | def _cancel_pending(self):
"Cancel any pending activated tasks"
for p in list(self._pending):
if not p.cancelled():
p.cancel() | [
"def",
"_cancel_pending",
"(",
"self",
")",
":",
"for",
"p",
"in",
"list",
"(",
"self",
".",
"_pending",
")",
":",
"if",
"not",
"p",
".",
"cancelled",
"(",
")",
":",
"p",
".",
"cancel",
"(",
")"
] | https://github.com/garywiz/chaperone/blob/9ff2c3a5b9c6820f8750320a564ea214042df06f/chaperone/cproc/process_manager.py#L234-L239 | ||
PaddlePaddle/Parakeet | 8705a2a8405e3c63f2174d69880d2b5525a6c9fd | parakeet/training/updaters/standard_updater.py | python | StandardUpdater.updates_per_epoch | (self) | Number of updater per epoch, determined by the length of the
dataloader. | Number of updater per epoch, determined by the length of the
dataloader. | [
"Number",
"of",
"updater",
"per",
"epoch",
"determined",
"by",
"the",
"length",
"of",
"the",
"dataloader",
"."
] | def updates_per_epoch(self):
"""Number of updater per epoch, determined by the length of the
dataloader."""
length_of_dataloader = None
try:
length_of_dataloader = len(self.dataloader)
except TypeError:
logging.debug("This dataloader has no __len__.")
finally:
return length_of_dataloader | [
"def",
"updates_per_epoch",
"(",
"self",
")",
":",
"length_of_dataloader",
"=",
"None",
"try",
":",
"length_of_dataloader",
"=",
"len",
"(",
"self",
".",
"dataloader",
")",
"except",
"TypeError",
":",
"logging",
".",
"debug",
"(",
"\"This dataloader has no __len__... | https://github.com/PaddlePaddle/Parakeet/blob/8705a2a8405e3c63f2174d69880d2b5525a6c9fd/parakeet/training/updaters/standard_updater.py#L151-L160 | ||
WerWolv/EdiZon_CheatsConfigsAndScripts | d16d36c7509c01dca770f402babd83ff2e9ae6e7 | Scripts/lib/python3.5/tracemalloc.py | python | Snapshot.dump | (self, filename) | Write the snapshot into a file. | Write the snapshot into a file. | [
"Write",
"the",
"snapshot",
"into",
"a",
"file",
"."
] | def dump(self, filename):
"""
Write the snapshot into a file.
"""
with open(filename, "wb") as fp:
pickle.dump(self, fp, pickle.HIGHEST_PROTOCOL) | [
"def",
"dump",
"(",
"self",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"\"wb\"",
")",
"as",
"fp",
":",
"pickle",
".",
"dump",
"(",
"self",
",",
"fp",
",",
"pickle",
".",
"HIGHEST_PROTOCOL",
")"
] | https://github.com/WerWolv/EdiZon_CheatsConfigsAndScripts/blob/d16d36c7509c01dca770f402babd83ff2e9ae6e7/Scripts/lib/python3.5/tracemalloc.py#L352-L357 | ||
DevTable/gantryd | eb348113f0f73a0be45a45f7a5626ad2b5dd30ba | gantryd.py | python | run | (dclient, args) | Runs gantryd. | Runs gantryd. | [
"Runs",
"gantryd",
"."
] | def run(dclient, args):
""" Runs gantryd. """
dclient.run(args.component) | [
"def",
"run",
"(",
"dclient",
",",
"args",
")",
":",
"dclient",
".",
"run",
"(",
"args",
".",
"component",
")"
] | https://github.com/DevTable/gantryd/blob/eb348113f0f73a0be45a45f7a5626ad2b5dd30ba/gantryd.py#L10-L12 | ||
thinkle/gourmet | 8af29c8ded24528030e5ae2ea3461f61c1e5a575 | gourmet/exporters/exportManager.py | python | ExportManager.do_multiple_export | (self, recs, fn, exp_type=None,
setup_gui=True, extra_prefs=EXTRA_PREFS_AUTOMATIC) | return exporterInstance | [] | def do_multiple_export (self, recs, fn, exp_type=None,
setup_gui=True, extra_prefs=EXTRA_PREFS_AUTOMATIC):
myexp, exporterInstance = self.get_multiple_exporter(recs,fn,exp_type,setup_gui,extra_prefs)
tm = get_thread_manager()
tm.add_thread(exporterInstance)
if setup_gui:
tmg = get_thread_manager_gui()
tmg.register_thread_with_dialog(_('Export')+' ('+myexp.label+')',
exporterInstance)
exporterInstance.connect('completed', tmg.notification_thread_done,
_('Recipes successfully exported to <a href="file:///%s">%s</a>')%(fn,fn))
tmg.show()
print('Return exporter instance')
return exporterInstance | [
"def",
"do_multiple_export",
"(",
"self",
",",
"recs",
",",
"fn",
",",
"exp_type",
"=",
"None",
",",
"setup_gui",
"=",
"True",
",",
"extra_prefs",
"=",
"EXTRA_PREFS_AUTOMATIC",
")",
":",
"myexp",
",",
"exporterInstance",
"=",
"self",
".",
"get_multiple_exporte... | https://github.com/thinkle/gourmet/blob/8af29c8ded24528030e5ae2ea3461f61c1e5a575/gourmet/exporters/exportManager.py#L157-L170 | |||
pyparallel/pyparallel | 11e8c6072d48c8f13641925d17b147bf36ee0ba3 | Lib/site-packages/numpy-1.10.0.dev0_046311a-py3.3-win-amd64.egg/numpy/distutils/misc_util.py | python | get_shared_lib_extension | (is_python_ext=False) | return so_ext | Return the correct file extension for shared libraries.
Parameters
----------
is_python_ext : bool, optional
Whether the shared library is a Python extension. Default is False.
Returns
-------
so_ext : str
The shared library extension.
Notes
-----
For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X,
and '.pyd' on Windows. For Python >= 3.2 `so_ext` has a tag prepended on
POSIX systems according to PEP 3149. For Python 3.2 this is implemented on
Linux, but not on OS X. | Return the correct file extension for shared libraries. | [
"Return",
"the",
"correct",
"file",
"extension",
"for",
"shared",
"libraries",
"."
] | def get_shared_lib_extension(is_python_ext=False):
"""Return the correct file extension for shared libraries.
Parameters
----------
is_python_ext : bool, optional
Whether the shared library is a Python extension. Default is False.
Returns
-------
so_ext : str
The shared library extension.
Notes
-----
For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X,
and '.pyd' on Windows. For Python >= 3.2 `so_ext` has a tag prepended on
POSIX systems according to PEP 3149. For Python 3.2 this is implemented on
Linux, but not on OS X.
"""
confvars = distutils.sysconfig.get_config_vars()
# SO is deprecated in 3.3.1, use EXT_SUFFIX instead
so_ext = confvars.get('EXT_SUFFIX', None)
if so_ext is None:
so_ext = confvars.get('SO', '')
if not is_python_ext:
# hardcode known values, config vars (including SHLIB_SUFFIX) are
# unreliable (see #3182)
# darwin, windows and debug linux are wrong in 3.3.1 and older
if (sys.platform.startswith('linux') or
sys.platform.startswith('gnukfreebsd')):
so_ext = '.so'
elif sys.platform.startswith('darwin'):
so_ext = '.dylib'
elif sys.platform.startswith('win'):
so_ext = '.dll'
else:
# fall back to config vars for unknown platforms
# fix long extension for Python >=3.2, see PEP 3149.
if 'SOABI' in confvars:
# Does nothing unless SOABI config var exists
so_ext = so_ext.replace('.' + confvars.get('SOABI'), '', 1)
return so_ext | [
"def",
"get_shared_lib_extension",
"(",
"is_python_ext",
"=",
"False",
")",
":",
"confvars",
"=",
"distutils",
".",
"sysconfig",
".",
"get_config_vars",
"(",
")",
"# SO is deprecated in 3.3.1, use EXT_SUFFIX instead",
"so_ext",
"=",
"confvars",
".",
"get",
"(",
"'EXT_... | https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/numpy-1.10.0.dev0_046311a-py3.3-win-amd64.egg/numpy/distutils/misc_util.py#L611-L656 | |
tobegit3hub/deep_image_model | 8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e | java_predict_client/src/main/proto/tensorflow/python/training/session_run_hook.py | python | SessionRunContext.original_args | (self) | return self._original_args | A `SessionRunArgs` object holding the original arguments of `run()`.
If user called `MonitoredSession.run(fetches=a, feed_dict=b)`, then this
field is equal to SessionRunArgs(a, b).
Returns:
A `SessionRunArgs` object | A `SessionRunArgs` object holding the original arguments of `run()`. | [
"A",
"SessionRunArgs",
"object",
"holding",
"the",
"original",
"arguments",
"of",
"run",
"()",
"."
] | def original_args(self):
"""A `SessionRunArgs` object holding the original arguments of `run()`.
If user called `MonitoredSession.run(fetches=a, feed_dict=b)`, then this
field is equal to SessionRunArgs(a, b).
Returns:
A `SessionRunArgs` object
"""
return self._original_args | [
"def",
"original_args",
"(",
"self",
")",
":",
"return",
"self",
".",
"_original_args"
] | https://github.com/tobegit3hub/deep_image_model/blob/8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e/java_predict_client/src/main/proto/tensorflow/python/training/session_run_hook.py#L191-L200 | |
slackapi/python-slack-sdk | 2dee6656ffacb7de0c29bb2a6c2b51ec6b5dbce7 | slack_sdk/web/async_client.py | python | AsyncWebClient.admin_users_session_reset | (
self,
*,
user_id: str,
mobile_only: Optional[bool] = None,
web_only: Optional[bool] = None,
**kwargs,
) | return await self.api_call("admin.users.session.reset", params=kwargs) | Wipes all valid sessions on all devices for a given user.
https://api.slack.com/methods/admin.users.session.reset | Wipes all valid sessions on all devices for a given user.
https://api.slack.com/methods/admin.users.session.reset | [
"Wipes",
"all",
"valid",
"sessions",
"on",
"all",
"devices",
"for",
"a",
"given",
"user",
".",
"https",
":",
"//",
"api",
".",
"slack",
".",
"com",
"/",
"methods",
"/",
"admin",
".",
"users",
".",
"session",
".",
"reset"
] | async def admin_users_session_reset(
self,
*,
user_id: str,
mobile_only: Optional[bool] = None,
web_only: Optional[bool] = None,
**kwargs,
) -> AsyncSlackResponse:
"""Wipes all valid sessions on all devices for a given user.
https://api.slack.com/methods/admin.users.session.reset
"""
kwargs.update(
{
"user_id": user_id,
"mobile_only": mobile_only,
"web_only": web_only,
}
)
return await self.api_call("admin.users.session.reset", params=kwargs) | [
"async",
"def",
"admin_users_session_reset",
"(",
"self",
",",
"*",
",",
"user_id",
":",
"str",
",",
"mobile_only",
":",
"Optional",
"[",
"bool",
"]",
"=",
"None",
",",
"web_only",
":",
"Optional",
"[",
"bool",
"]",
"=",
"None",
",",
"*",
"*",
"kwargs"... | https://github.com/slackapi/python-slack-sdk/blob/2dee6656ffacb7de0c29bb2a6c2b51ec6b5dbce7/slack_sdk/web/async_client.py#L911-L929 | |
wistbean/learn_python3_spider | 73c873f4845f4385f097e5057407d03dd37a117b | stackoverflow/venv/lib/python3.6/site-packages/scrapy_redis/scheduler.py | python | Scheduler.__init__ | (self, server,
persist=False,
flush_on_start=False,
queue_key=defaults.SCHEDULER_QUEUE_KEY,
queue_cls=defaults.SCHEDULER_QUEUE_CLASS,
dupefilter_key=defaults.SCHEDULER_DUPEFILTER_KEY,
dupefilter_cls=defaults.SCHEDULER_DUPEFILTER_CLASS,
idle_before_close=0,
serializer=None) | Initialize scheduler.
Parameters
----------
server : Redis
The redis server instance.
persist : bool
Whether to flush requests when closing. Default is False.
flush_on_start : bool
Whether to flush requests on start. Default is False.
queue_key : str
Requests queue key.
queue_cls : str
Importable path to the queue class.
dupefilter_key : str
Duplicates filter key.
dupefilter_cls : str
Importable path to the dupefilter class.
idle_before_close : int
Timeout before giving up. | Initialize scheduler. | [
"Initialize",
"scheduler",
"."
] | def __init__(self, server,
persist=False,
flush_on_start=False,
queue_key=defaults.SCHEDULER_QUEUE_KEY,
queue_cls=defaults.SCHEDULER_QUEUE_CLASS,
dupefilter_key=defaults.SCHEDULER_DUPEFILTER_KEY,
dupefilter_cls=defaults.SCHEDULER_DUPEFILTER_CLASS,
idle_before_close=0,
serializer=None):
"""Initialize scheduler.
Parameters
----------
server : Redis
The redis server instance.
persist : bool
Whether to flush requests when closing. Default is False.
flush_on_start : bool
Whether to flush requests on start. Default is False.
queue_key : str
Requests queue key.
queue_cls : str
Importable path to the queue class.
dupefilter_key : str
Duplicates filter key.
dupefilter_cls : str
Importable path to the dupefilter class.
idle_before_close : int
Timeout before giving up.
"""
if idle_before_close < 0:
raise TypeError("idle_before_close cannot be negative")
self.server = server
self.persist = persist
self.flush_on_start = flush_on_start
self.queue_key = queue_key
self.queue_cls = queue_cls
self.dupefilter_cls = dupefilter_cls
self.dupefilter_key = dupefilter_key
self.idle_before_close = idle_before_close
self.serializer = serializer
self.stats = None | [
"def",
"__init__",
"(",
"self",
",",
"server",
",",
"persist",
"=",
"False",
",",
"flush_on_start",
"=",
"False",
",",
"queue_key",
"=",
"defaults",
".",
"SCHEDULER_QUEUE_KEY",
",",
"queue_cls",
"=",
"defaults",
".",
"SCHEDULER_QUEUE_CLASS",
",",
"dupefilter_key... | https://github.com/wistbean/learn_python3_spider/blob/73c873f4845f4385f097e5057407d03dd37a117b/stackoverflow/venv/lib/python3.6/site-packages/scrapy_redis/scheduler.py#L34-L77 | ||
trakt/Plex-Trakt-Scrobbler | aeb0bfbe62fad4b06c164f1b95581da7f35dce0b | Trakttv.bundle/Contents/Libraries/Shared/requests/packages/urllib3/packages/ordered_dict.py | python | OrderedDict.__eq__ | (self, other) | return dict.__eq__(self, other) | od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive. | od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive. | [
"od",
".",
"__eq__",
"(",
"y",
")",
"<",
"==",
">",
"od",
"==",
"y",
".",
"Comparison",
"to",
"another",
"OD",
"is",
"order",
"-",
"sensitive",
"while",
"comparison",
"to",
"a",
"regular",
"mapping",
"is",
"order",
"-",
"insensitive",
"."
] | def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other) | [
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"OrderedDict",
")",
":",
"return",
"len",
"(",
"self",
")",
"==",
"len",
"(",
"other",
")",
"and",
"self",
".",
"items",
"(",
")",
"==",
"other",
".",
"i... | https://github.com/trakt/Plex-Trakt-Scrobbler/blob/aeb0bfbe62fad4b06c164f1b95581da7f35dce0b/Trakttv.bundle/Contents/Libraries/Shared/requests/packages/urllib3/packages/ordered_dict.py#L235-L242 | |
t4ngo/dragonfly | 3c885cbf1a63b373fd725d4bbfcb716e162dc92c | dragonfly/engines/backend_natlink/dictation_format.py | python | WordParserFactory.get_parser | (self) | return self.parser_class() | Create an instance of the detective parser class. | Create an instance of the detective parser class. | [
"Create",
"an",
"instance",
"of",
"the",
"detective",
"parser",
"class",
"."
] | def get_parser(self):
""" Create an instance of the detective parser class. """
if not self.parser_class:
self.parser_class = self.detect_parser_class()
return self.parser_class() | [
"def",
"get_parser",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"parser_class",
":",
"self",
".",
"parser_class",
"=",
"self",
".",
"detect_parser_class",
"(",
")",
"return",
"self",
".",
"parser_class",
"(",
")"
] | https://github.com/t4ngo/dragonfly/blob/3c885cbf1a63b373fd725d4bbfcb716e162dc92c/dragonfly/engines/backend_natlink/dictation_format.py#L442-L446 | |
Chaffelson/nipyapi | d3b186fd701ce308c2812746d98af9120955e810 | nipyapi/registry/models/model_property.py | python | ModelProperty.default_value | (self, default_value) | Sets the default_value of this ModelProperty.
The default value
:param default_value: The default_value of this ModelProperty.
:type: str | Sets the default_value of this ModelProperty.
The default value | [
"Sets",
"the",
"default_value",
"of",
"this",
"ModelProperty",
".",
"The",
"default",
"value"
] | def default_value(self, default_value):
"""
Sets the default_value of this ModelProperty.
The default value
:param default_value: The default_value of this ModelProperty.
:type: str
"""
self._default_value = default_value | [
"def",
"default_value",
"(",
"self",
",",
"default_value",
")",
":",
"self",
".",
"_default_value",
"=",
"default_value"
] | https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/registry/models/model_property.py#L187-L196 | ||
catap/namebench | 9913a7a1a7955a3759eb18cbe73b421441a7a00f | libnamebench/providers.py | python | GetExternalIp | () | Helper method to get external IP from anyone who cares. | Helper method to get external IP from anyone who cares. | [
"Helper",
"method",
"to",
"get",
"external",
"IP",
"from",
"anyone",
"who",
"cares",
"."
] | def GetExternalIp():
"""Helper method to get external IP from anyone who cares."""
h = httplib2.Http(tempfile.gettempdir(), timeout=10)
url = 'http://whatismyip.akamai.com'
resp, content = h.request(url, 'GET')
if resp.status == 200:
return content
for provider in (UltraDNSAuth(), MyResolverInfo()):
answer = provider.GetClientIp()
if answer:
return answer | [
"def",
"GetExternalIp",
"(",
")",
":",
"h",
"=",
"httplib2",
".",
"Http",
"(",
"tempfile",
".",
"gettempdir",
"(",
")",
",",
"timeout",
"=",
"10",
")",
"url",
"=",
"'http://whatismyip.akamai.com'",
"resp",
",",
"content",
"=",
"h",
".",
"request",
"(",
... | https://github.com/catap/namebench/blob/9913a7a1a7955a3759eb18cbe73b421441a7a00f/libnamebench/providers.py#L41-L51 | ||
openedx/edx-platform | 68dd185a0ab45862a2a61e0f803d7e03d2be71b5 | common/lib/xmodule/xmodule/x_module.py | python | ModuleSystem.get | (self, attr) | return self.__dict__.get(attr) | provide uniform access to attributes (like etree). | provide uniform access to attributes (like etree). | [
"provide",
"uniform",
"access",
"to",
"attributes",
"(",
"like",
"etree",
")",
"."
] | def get(self, attr):
""" provide uniform access to attributes (like etree)."""
return self.__dict__.get(attr) | [
"def",
"get",
"(",
"self",
",",
"attr",
")",
":",
"return",
"self",
".",
"__dict__",
".",
"get",
"(",
"attr",
")"
] | https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/common/lib/xmodule/xmodule/x_module.py#L2008-L2010 | |
befelix/safe_learning | f1aad5a3d2f433993e842aa2e6ca7a9c45ad95d4 | examples/utilities.py | python | VanDerPol.ode | (self, state) | return state_derivative | Compute the state time-derivative.
Parameters
----------
state: ndarray or Tensor
States.
Returns
-------
state_derivative: Tensor
The state derivative according to the dynamics. | Compute the state time-derivative. | [
"Compute",
"the",
"state",
"time",
"-",
"derivative",
"."
] | def ode(self, state):
"""Compute the state time-derivative.
Parameters
----------
state: ndarray or Tensor
States.
Returns
-------
state_derivative: Tensor
The state derivative according to the dynamics.
"""
x, y = tf.split(state, 2, axis=1)
x_dot = - y
y_dot = x + self.damping * (x ** 2 - 1) * y
state_derivative = tf.concat((x_dot, y_dot), axis=1)
return state_derivative | [
"def",
"ode",
"(",
"self",
",",
"state",
")",
":",
"x",
",",
"y",
"=",
"tf",
".",
"split",
"(",
"state",
",",
"2",
",",
"axis",
"=",
"1",
")",
"x_dot",
"=",
"-",
"y",
"y_dot",
"=",
"x",
"+",
"self",
".",
"damping",
"*",
"(",
"x",
"**",
"2... | https://github.com/befelix/safe_learning/blob/f1aad5a3d2f433993e842aa2e6ca7a9c45ad95d4/examples/utilities.py#L501-L519 | |
openembedded/bitbake | 98407efc8c670abd71d3fa88ec3776ee9b5c38f3 | lib/pyinotify.py | python | Watch.__init__ | (self, wd, path, mask, proc_fun, auto_add, exclude_filter) | Initializations.
@param wd: Watch descriptor.
@type wd: int
@param path: Path of the file or directory being watched.
@type path: str
@param mask: Mask.
@type mask: int
@param proc_fun: Processing callable object.
@type proc_fun:
@param auto_add: Automatically add watches on new directories.
@type auto_add: bool
@param exclude_filter: Boolean function, used to exclude new
directories from being automatically watched.
See WatchManager.__init__
@type exclude_filter: callable object | Initializations. | [
"Initializations",
"."
] | def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter):
"""
Initializations.
@param wd: Watch descriptor.
@type wd: int
@param path: Path of the file or directory being watched.
@type path: str
@param mask: Mask.
@type mask: int
@param proc_fun: Processing callable object.
@type proc_fun:
@param auto_add: Automatically add watches on new directories.
@type auto_add: bool
@param exclude_filter: Boolean function, used to exclude new
directories from being automatically watched.
See WatchManager.__init__
@type exclude_filter: callable object
"""
self.wd = wd
self.path = path
self.mask = mask
self.proc_fun = proc_fun
self.auto_add = auto_add
self.exclude_filter = exclude_filter
self.dir = os.path.isdir(self.path) | [
"def",
"__init__",
"(",
"self",
",",
"wd",
",",
"path",
",",
"mask",
",",
"proc_fun",
",",
"auto_add",
",",
"exclude_filter",
")",
":",
"self",
".",
"wd",
"=",
"wd",
"self",
".",
"path",
"=",
"path",
"self",
".",
"mask",
"=",
"mask",
"self",
".",
... | https://github.com/openembedded/bitbake/blob/98407efc8c670abd71d3fa88ec3776ee9b5c38f3/lib/pyinotify.py#L1564-L1589 | ||
cloudera/hue | 23f02102d4547c17c32bd5ea0eb24e9eadd657a4 | desktop/core/ext-py/docutils-0.14/docutils/utils/math/math2html.py | python | BigBracket.getpiece3 | (self, index) | return self.pieces[1] | Get the nth piece for a 3-piece bracket: parenthesis or square bracket. | Get the nth piece for a 3-piece bracket: parenthesis or square bracket. | [
"Get",
"the",
"nth",
"piece",
"for",
"a",
"3",
"-",
"piece",
"bracket",
":",
"parenthesis",
"or",
"square",
"bracket",
"."
] | def getpiece3(self, index):
"Get the nth piece for a 3-piece bracket: parenthesis or square bracket."
if index == 0:
return self.pieces[0]
if index == self.size - 1:
return self.pieces[-1]
return self.pieces[1] | [
"def",
"getpiece3",
"(",
"self",
",",
"index",
")",
":",
"if",
"index",
"==",
"0",
":",
"return",
"self",
".",
"pieces",
"[",
"0",
"]",
"if",
"index",
"==",
"self",
".",
"size",
"-",
"1",
":",
"return",
"self",
".",
"pieces",
"[",
"-",
"1",
"]"... | https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/docutils-0.14/docutils/utils/math/math2html.py#L4354-L4360 | |
IronLanguages/ironpython3 | 7a7bb2a872eeab0d1009fc8a6e24dca43f65b693 | Src/StdLib/Lib/email/_policybase.py | python | Policy.fold_binary | (self, name, value) | Given the header name and the value from the model, return binary
data containing linesep characters that implement the folding of the
header according to the policy controls. The value passed in by the
email package may contain surrogateescaped binary data. | Given the header name and the value from the model, return binary
data containing linesep characters that implement the folding of the
header according to the policy controls. The value passed in by the
email package may contain surrogateescaped binary data. | [
"Given",
"the",
"header",
"name",
"and",
"the",
"value",
"from",
"the",
"model",
"return",
"binary",
"data",
"containing",
"linesep",
"characters",
"that",
"implement",
"the",
"folding",
"of",
"the",
"header",
"according",
"to",
"the",
"policy",
"controls",
".... | def fold_binary(self, name, value):
"""Given the header name and the value from the model, return binary
data containing linesep characters that implement the folding of the
header according to the policy controls. The value passed in by the
email package may contain surrogateescaped binary data.
"""
raise NotImplementedError | [
"def",
"fold_binary",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"raise",
"NotImplementedError"
] | https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/email/_policybase.py#L251-L258 | ||
seann999/ssd_tensorflow | f381dba71029a329a2b23763b804289eb8069b4b | coco_loader.py | python | Loader.__init__ | (self, train=True) | [] | def __init__(self, train=True):
if train:
self.image_dir = train_dir
ann_file = train_ann_file
self.get_image_path = self.get_train_path
else:
self.image_dir = val_dir
ann_file = val_ann_file
self.get_image_path = self.get_val_path
self.coco = COCO(ann_file)
cats = self.coco.loadCats(self.coco.getCatIds())
names = [cat['name'] for cat in cats]
# id is number from pycocotools
# i is actual index used
id2name = dict((cat["id"], cat["name"]) for cat in cats)
self.id2i = dict((cats[i]['id'], i) for i in range(len(cats)))
self.i2name = {v: id2name[k] for k, v in self.id2i.iteritems()}
self.i2name[classes] = "void"
print("NUMBER OF CLASSES: %i" % len(id2name))
self.cat_ids = self.coco.getCatIds()
self.img_ids = self.coco.getImgIds()
print("%i total training images" % len(self.img_ids)) | [
"def",
"__init__",
"(",
"self",
",",
"train",
"=",
"True",
")",
":",
"if",
"train",
":",
"self",
".",
"image_dir",
"=",
"train_dir",
"ann_file",
"=",
"train_ann_file",
"self",
".",
"get_image_path",
"=",
"self",
".",
"get_train_path",
"else",
":",
"self",
... | https://github.com/seann999/ssd_tensorflow/blob/f381dba71029a329a2b23763b804289eb8069b4b/coco_loader.py#L22-L47 | ||||
arthurdejong/python-stdnum | 02dec52602ae0709b940b781fc1fcebfde7340b7 | stdnum/it/codicefiscale.py | python | get_gender | (number) | return 'M' if int(number[9:11]) < 32 else 'F' | Get the gender of the person's fiscal code.
>>> get_gender('RCCMNL83S18D969H')
'M'
>>> get_gender('CNTCHR83T41D969D')
'F' | Get the gender of the person's fiscal code. | [
"Get",
"the",
"gender",
"of",
"the",
"person",
"s",
"fiscal",
"code",
"."
] | def get_gender(number):
"""Get the gender of the person's fiscal code.
>>> get_gender('RCCMNL83S18D969H')
'M'
>>> get_gender('CNTCHR83T41D969D')
'F'
"""
number = compact(number)
if len(number) != 16:
raise InvalidComponent()
return 'M' if int(number[9:11]) < 32 else 'F' | [
"def",
"get_gender",
"(",
"number",
")",
":",
"number",
"=",
"compact",
"(",
"number",
")",
"if",
"len",
"(",
"number",
")",
"!=",
"16",
":",
"raise",
"InvalidComponent",
"(",
")",
"return",
"'M'",
"if",
"int",
"(",
"number",
"[",
"9",
":",
"11",
"... | https://github.com/arthurdejong/python-stdnum/blob/02dec52602ae0709b940b781fc1fcebfde7340b7/stdnum/it/codicefiscale.py#L135-L146 | |
fonttools/fonttools | 892322aaff6a89bea5927379ec06bc0da3dfb7df | Lib/fontTools/misc/classifyTools.py | python | classify | (list_of_sets, sort=True) | return classifier.getClasses(), classifier.getMapping() | Takes a iterable of iterables (list of sets from here on; but any
iterable works.), and returns the smallest list of sets such that
each set, is either a subset, or is disjoint from, each of the input
sets.
In other words, this function classifies all the things present in
any of the input sets, into similar classes, based on which sets
things are a member of.
If sort=True, return class sets are sorted by decreasing size and
their natural sort order within each class size. Otherwise, class
sets are returned in the order that they were identified, which is
generally not significant.
>>> classify([]) == ([], {})
True
>>> classify([[]]) == ([], {})
True
>>> classify([[], []]) == ([], {})
True
>>> classify([[1]]) == ([{1}], {1: {1}})
True
>>> classify([[1,2]]) == ([{1, 2}], {1: {1, 2}, 2: {1, 2}})
True
>>> classify([[1],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
True
>>> classify([[1,2],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
True
>>> classify([[1,2],[2,4]]) == ([{1}, {2}, {4}], {1: {1}, 2: {2}, 4: {4}})
True
>>> classify([[1,2],[2,4,5]]) == (
... [{4, 5}, {1}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
True
>>> classify([[1,2],[2,4,5]], sort=False) == (
... [{1}, {4, 5}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
True
>>> classify([[1,2,9],[2,4,5]], sort=False) == (
... [{1, 9}, {4, 5}, {2}], {1: {1, 9}, 2: {2}, 4: {4, 5}, 5: {4, 5},
... 9: {1, 9}})
True
>>> classify([[1,2,9,15],[2,4,5]], sort=False) == (
... [{1, 9, 15}, {4, 5}, {2}], {1: {1, 9, 15}, 2: {2}, 4: {4, 5},
... 5: {4, 5}, 9: {1, 9, 15}, 15: {1, 9, 15}})
True
>>> classes, mapping = classify([[1,2,9,15],[2,4,5],[15,5]], sort=False)
>>> set([frozenset(c) for c in classes]) == set(
... [frozenset(s) for s in ({1, 9}, {4}, {2}, {5}, {15})])
True
>>> mapping == {1: {1, 9}, 2: {2}, 4: {4}, 5: {5}, 9: {1, 9}, 15: {15}}
True | Takes a iterable of iterables (list of sets from here on; but any
iterable works.), and returns the smallest list of sets such that
each set, is either a subset, or is disjoint from, each of the input
sets. | [
"Takes",
"a",
"iterable",
"of",
"iterables",
"(",
"list",
"of",
"sets",
"from",
"here",
"on",
";",
"but",
"any",
"iterable",
"works",
".",
")",
"and",
"returns",
"the",
"smallest",
"list",
"of",
"sets",
"such",
"that",
"each",
"set",
"is",
"either",
"a... | def classify(list_of_sets, sort=True):
"""
Takes a iterable of iterables (list of sets from here on; but any
iterable works.), and returns the smallest list of sets such that
each set, is either a subset, or is disjoint from, each of the input
sets.
In other words, this function classifies all the things present in
any of the input sets, into similar classes, based on which sets
things are a member of.
If sort=True, return class sets are sorted by decreasing size and
their natural sort order within each class size. Otherwise, class
sets are returned in the order that they were identified, which is
generally not significant.
>>> classify([]) == ([], {})
True
>>> classify([[]]) == ([], {})
True
>>> classify([[], []]) == ([], {})
True
>>> classify([[1]]) == ([{1}], {1: {1}})
True
>>> classify([[1,2]]) == ([{1, 2}], {1: {1, 2}, 2: {1, 2}})
True
>>> classify([[1],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
True
>>> classify([[1,2],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
True
>>> classify([[1,2],[2,4]]) == ([{1}, {2}, {4}], {1: {1}, 2: {2}, 4: {4}})
True
>>> classify([[1,2],[2,4,5]]) == (
... [{4, 5}, {1}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
True
>>> classify([[1,2],[2,4,5]], sort=False) == (
... [{1}, {4, 5}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
True
>>> classify([[1,2,9],[2,4,5]], sort=False) == (
... [{1, 9}, {4, 5}, {2}], {1: {1, 9}, 2: {2}, 4: {4, 5}, 5: {4, 5},
... 9: {1, 9}})
True
>>> classify([[1,2,9,15],[2,4,5]], sort=False) == (
... [{1, 9, 15}, {4, 5}, {2}], {1: {1, 9, 15}, 2: {2}, 4: {4, 5},
... 5: {4, 5}, 9: {1, 9, 15}, 15: {1, 9, 15}})
True
>>> classes, mapping = classify([[1,2,9,15],[2,4,5],[15,5]], sort=False)
>>> set([frozenset(c) for c in classes]) == set(
... [frozenset(s) for s in ({1, 9}, {4}, {2}, {5}, {15})])
True
>>> mapping == {1: {1, 9}, 2: {2}, 4: {4}, 5: {5}, 9: {1, 9}, 15: {15}}
True
"""
classifier = Classifier(sort=sort)
classifier.update(list_of_sets)
return classifier.getClasses(), classifier.getMapping() | [
"def",
"classify",
"(",
"list_of_sets",
",",
"sort",
"=",
"True",
")",
":",
"classifier",
"=",
"Classifier",
"(",
"sort",
"=",
"sort",
")",
"classifier",
".",
"update",
"(",
"list_of_sets",
")",
"return",
"classifier",
".",
"getClasses",
"(",
")",
",",
"... | https://github.com/fonttools/fonttools/blob/892322aaff6a89bea5927379ec06bc0da3dfb7df/Lib/fontTools/misc/classifyTools.py#L111-L166 | |
Yelp/undebt | 90a4afb0d0eebe4ba848a2319bb43cfbf249f022 | undebt/pattern/util.py | python | condense | (item) | return attach(item, "".join) | Condenses without space auto-whitespace-parsed tokens. | Condenses without space auto-whitespace-parsed tokens. | [
"Condenses",
"without",
"space",
"auto",
"-",
"whitespace",
"-",
"parsed",
"tokens",
"."
] | def condense(item):
"""Condenses without space auto-whitespace-parsed tokens."""
return attach(item, "".join) | [
"def",
"condense",
"(",
"item",
")",
":",
"return",
"attach",
"(",
"item",
",",
"\"\"",
".",
"join",
")"
] | https://github.com/Yelp/undebt/blob/90a4afb0d0eebe4ba848a2319bb43cfbf249f022/undebt/pattern/util.py#L33-L35 | |
HiKapok/SSD.TensorFlow | b47ff6164c8925a8bbccc593719d5bbbab996058 | train_ssd.py | python | ssd_model_fn | (features, labels, mode, params) | return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=total_loss,
train_op=train_op,
eval_metric_ops=metrics,
scaffold=tf.train.Scaffold(init_fn=get_init_fn())) | model_fn for SSD to be used with our Estimator. | model_fn for SSD to be used with our Estimator. | [
"model_fn",
"for",
"SSD",
"to",
"be",
"used",
"with",
"our",
"Estimator",
"."
] | def ssd_model_fn(features, labels, mode, params):
"""model_fn for SSD to be used with our Estimator."""
shape = labels['shape']
loc_targets = labels['loc_targets']
cls_targets = labels['cls_targets']
match_scores = labels['match_scores']
global global_anchor_info
decode_fn = global_anchor_info['decode_fn']
num_anchors_per_layer = global_anchor_info['num_anchors_per_layer']
all_num_anchors_depth = global_anchor_info['all_num_anchors_depth']
# bboxes_pred = decode_fn(loc_targets[0])
# bboxes_pred = [tf.reshape(preds, [-1, 4]) for preds in bboxes_pred]
# bboxes_pred = tf.concat(bboxes_pred, axis=0)
# save_image_op = tf.py_func(save_image_with_bbox,
# [ssd_preprocessing.unwhiten_image(features[0]),
# tf.clip_by_value(cls_targets[0], 0, tf.int64.max),
# match_scores[0],
# bboxes_pred],
# tf.int64, stateful=True)
# with tf.control_dependencies([save_image_op]):
#print(all_num_anchors_depth)
with tf.variable_scope(params['model_scope'], default_name=None, values=[features], reuse=tf.AUTO_REUSE):
backbone = ssd_net.VGG16Backbone(params['data_format'])
feature_layers = backbone.forward(features, training=(mode == tf.estimator.ModeKeys.TRAIN))
#print(feature_layers)
location_pred, cls_pred = ssd_net.multibox_head(feature_layers, params['num_classes'], all_num_anchors_depth, data_format=params['data_format'])
if params['data_format'] == 'channels_first':
cls_pred = [tf.transpose(pred, [0, 2, 3, 1]) for pred in cls_pred]
location_pred = [tf.transpose(pred, [0, 2, 3, 1]) for pred in location_pred]
cls_pred = [tf.reshape(pred, [tf.shape(features)[0], -1, params['num_classes']]) for pred in cls_pred]
location_pred = [tf.reshape(pred, [tf.shape(features)[0], -1, 4]) for pred in location_pred]
cls_pred = tf.concat(cls_pred, axis=1)
location_pred = tf.concat(location_pred, axis=1)
cls_pred = tf.reshape(cls_pred, [-1, params['num_classes']])
location_pred = tf.reshape(location_pred, [-1, 4])
with tf.device('/cpu:0'):
with tf.control_dependencies([cls_pred, location_pred]):
with tf.name_scope('post_forward'):
#bboxes_pred = decode_fn(location_pred)
bboxes_pred = tf.map_fn(lambda _preds : decode_fn(_preds),
tf.reshape(location_pred, [tf.shape(features)[0], -1, 4]),
dtype=[tf.float32] * len(num_anchors_per_layer), back_prop=False)
#cls_targets = tf.Print(cls_targets, [tf.shape(bboxes_pred[0]),tf.shape(bboxes_pred[1]),tf.shape(bboxes_pred[2]),tf.shape(bboxes_pred[3])])
bboxes_pred = [tf.reshape(preds, [-1, 4]) for preds in bboxes_pred]
bboxes_pred = tf.concat(bboxes_pred, axis=0)
flaten_cls_targets = tf.reshape(cls_targets, [-1])
flaten_match_scores = tf.reshape(match_scores, [-1])
flaten_loc_targets = tf.reshape(loc_targets, [-1, 4])
# each positive examples has one label
positive_mask = flaten_cls_targets > 0
n_positives = tf.count_nonzero(positive_mask)
batch_n_positives = tf.count_nonzero(cls_targets, -1)
batch_negtive_mask = tf.equal(cls_targets, 0)#tf.logical_and(tf.equal(cls_targets, 0), match_scores > 0.)
batch_n_negtives = tf.count_nonzero(batch_negtive_mask, -1)
batch_n_neg_select = tf.cast(params['negative_ratio'] * tf.cast(batch_n_positives, tf.float32), tf.int32)
batch_n_neg_select = tf.minimum(batch_n_neg_select, tf.cast(batch_n_negtives, tf.int32))
# hard negative mining for classification
predictions_for_bg = tf.nn.softmax(tf.reshape(cls_pred, [tf.shape(features)[0], -1, params['num_classes']]))[:, :, 0]
prob_for_negtives = tf.where(batch_negtive_mask,
0. - predictions_for_bg,
# ignore all the positives
0. - tf.ones_like(predictions_for_bg))
topk_prob_for_bg, _ = tf.nn.top_k(prob_for_negtives, k=tf.shape(prob_for_negtives)[1])
score_at_k = tf.gather_nd(topk_prob_for_bg, tf.stack([tf.range(tf.shape(features)[0]), batch_n_neg_select - 1], axis=-1))
selected_neg_mask = prob_for_negtives >= tf.expand_dims(score_at_k, axis=-1)
# include both selected negtive and all positive examples
final_mask = tf.stop_gradient(tf.logical_or(tf.reshape(tf.logical_and(batch_negtive_mask, selected_neg_mask), [-1]), positive_mask))
total_examples = tf.count_nonzero(final_mask)
cls_pred = tf.boolean_mask(cls_pred, final_mask)
location_pred = tf.boolean_mask(location_pred, tf.stop_gradient(positive_mask))
flaten_cls_targets = tf.boolean_mask(tf.clip_by_value(flaten_cls_targets, 0, params['num_classes']), final_mask)
flaten_loc_targets = tf.stop_gradient(tf.boolean_mask(flaten_loc_targets, positive_mask))
predictions = {
'classes': tf.argmax(cls_pred, axis=-1),
'probabilities': tf.reduce_max(tf.nn.softmax(cls_pred, name='softmax_tensor'), axis=-1),
'loc_predict': bboxes_pred }
cls_accuracy = tf.metrics.accuracy(flaten_cls_targets, predictions['classes'])
metrics = {'cls_accuracy': cls_accuracy}
# Create a tensor named train_accuracy for logging purposes.
tf.identity(cls_accuracy[1], name='cls_accuracy')
tf.summary.scalar('cls_accuracy', cls_accuracy[1])
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)
# Calculate loss, which includes softmax cross entropy and L2 regularization.
#cross_entropy = tf.cond(n_positives > 0, lambda: tf.losses.sparse_softmax_cross_entropy(labels=flaten_cls_targets, logits=cls_pred), lambda: 0.)# * (params['negative_ratio'] + 1.)
#flaten_cls_targets=tf.Print(flaten_cls_targets, [flaten_loc_targets],summarize=50000)
cross_entropy = tf.losses.sparse_softmax_cross_entropy(labels=flaten_cls_targets, logits=cls_pred) * (params['negative_ratio'] + 1.)
# Create a tensor named cross_entropy for logging purposes.
tf.identity(cross_entropy, name='cross_entropy_loss')
tf.summary.scalar('cross_entropy_loss', cross_entropy)
#loc_loss = tf.cond(n_positives > 0, lambda: modified_smooth_l1(location_pred, tf.stop_gradient(flaten_loc_targets), sigma=1.), lambda: tf.zeros_like(location_pred))
loc_loss = modified_smooth_l1(location_pred, flaten_loc_targets, sigma=1.)
#loc_loss = modified_smooth_l1(location_pred, tf.stop_gradient(gtargets))
loc_loss = tf.reduce_mean(tf.reduce_sum(loc_loss, axis=-1), name='location_loss')
tf.summary.scalar('location_loss', loc_loss)
tf.losses.add_loss(loc_loss)
l2_loss_vars = []
for trainable_var in tf.trainable_variables():
if '_bn' not in trainable_var.name:
if 'conv4_3_scale' not in trainable_var.name:
l2_loss_vars.append(tf.nn.l2_loss(trainable_var))
else:
l2_loss_vars.append(tf.nn.l2_loss(trainable_var) * 0.1)
# Add weight decay to the loss. We exclude the batch norm variables because
# doing so leads to a small improvement in accuracy.
total_loss = tf.add(cross_entropy + loc_loss, tf.multiply(params['weight_decay'], tf.add_n(l2_loss_vars), name='l2_loss'), name='total_loss')
if mode == tf.estimator.ModeKeys.TRAIN:
global_step = tf.train.get_or_create_global_step()
lr_values = [params['learning_rate'] * decay for decay in params['lr_decay_factors']]
learning_rate = tf.train.piecewise_constant(tf.cast(global_step, tf.int32),
[int(_) for _ in params['decay_boundaries']],
lr_values)
truncated_learning_rate = tf.maximum(learning_rate, tf.constant(params['end_learning_rate'], dtype=learning_rate.dtype), name='learning_rate')
# Create a tensor named learning_rate for logging purposes.
tf.summary.scalar('learning_rate', truncated_learning_rate)
optimizer = tf.train.MomentumOptimizer(learning_rate=truncated_learning_rate,
momentum=params['momentum'])
optimizer = tf.contrib.estimator.TowerOptimizer(optimizer)
# Batch norm requires update_ops to be added as a train_op dependency.
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(total_loss, global_step)
else:
train_op = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=total_loss,
train_op=train_op,
eval_metric_ops=metrics,
scaffold=tf.train.Scaffold(init_fn=get_init_fn())) | [
"def",
"ssd_model_fn",
"(",
"features",
",",
"labels",
",",
"mode",
",",
"params",
")",
":",
"shape",
"=",
"labels",
"[",
"'shape'",
"]",
"loc_targets",
"=",
"labels",
"[",
"'loc_targets'",
"]",
"cls_targets",
"=",
"labels",
"[",
"'cls_targets'",
"]",
"mat... | https://github.com/HiKapok/SSD.TensorFlow/blob/b47ff6164c8925a8bbccc593719d5bbbab996058/train_ssd.py#L244-L403 | |
dagster-io/dagster | b27d569d5fcf1072543533a0c763815d96f90b8f | python_modules/dagster/dagster/core/execution/context/compute.py | python | SolidExecutionContext.solid_handle | (self) | return self._step_execution_context.solid_handle | NodeHandle: The current solid's handle.
:meta private: | NodeHandle: The current solid's handle. | [
"NodeHandle",
":",
"The",
"current",
"solid",
"s",
"handle",
"."
] | def solid_handle(self) -> NodeHandle:
"""NodeHandle: The current solid's handle.
:meta private:
"""
return self._step_execution_context.solid_handle | [
"def",
"solid_handle",
"(",
"self",
")",
"->",
"NodeHandle",
":",
"return",
"self",
".",
"_step_execution_context",
".",
"solid_handle"
] | https://github.com/dagster-io/dagster/blob/b27d569d5fcf1072543533a0c763815d96f90b8f/python_modules/dagster/dagster/core/execution/context/compute.py#L181-L186 | |
Netflix/dispatch | f734b7cb91cba0e3a95b4d0adaa7198bfc94552b | src/dispatch/organization/service.py | python | create | (*, db_session, organization_in: OrganizationCreate) | return organization | Creates an organization. | Creates an organization. | [
"Creates",
"an",
"organization",
"."
] | def create(*, db_session, organization_in: OrganizationCreate) -> Organization:
"""Creates an organization."""
organization = Organization(
**organization_in.dict(exclude={"banner_color"}),
)
if organization_in.banner_color:
organization.banner_color = organization_in.banner_color.as_hex()
# we let the new schema session create the organization
organization = init_schema(engine=engine, organization=organization)
return organization | [
"def",
"create",
"(",
"*",
",",
"db_session",
",",
"organization_in",
":",
"OrganizationCreate",
")",
"->",
"Organization",
":",
"organization",
"=",
"Organization",
"(",
"*",
"*",
"organization_in",
".",
"dict",
"(",
"exclude",
"=",
"{",
"\"banner_color\"",
"... | https://github.com/Netflix/dispatch/blob/f734b7cb91cba0e3a95b4d0adaa7198bfc94552b/src/dispatch/organization/service.py#L100-L111 | |
PaddlePaddle/Research | 2da0bd6c72d60e9df403aff23a7802779561c4a1 | NLP/ACL2019-DuConv/generative_paddle/network.py | python | train_loop | (config,
train_generator, valid_generator,
main_program, inference_program,
model_handle, param_name_list, opt_var_name_list) | model train loop | model train loop | [
"model",
"train",
"loop"
] | def train_loop(config,
train_generator, valid_generator,
main_program, inference_program,
model_handle, param_name_list, opt_var_name_list):
""" model train loop """
stage = config.stage
[exe, place, bow_loss, kl_loss, nll_loss, final_loss] = model_handle
total_step = 0
start_epoch = 0 if stage == 0 else config.pretrain_epoch
end_epoch = config.pretrain_epoch if stage == 0 else config.num_epochs
print("start end", start_epoch, end_epoch)
best_score = float('inf')
for epoch_idx in range(start_epoch, end_epoch):
total_bow_loss = 0
total_kl_loss = 0
total_nll_loss = 0
total_final_loss = 0
sample_num = 0
for batch_id, data in enumerate(train_generator()):
data_feed = build_data_feed(data, place,
batch_size=config.batch_size,
is_training=True,
bow_max_len=config.max_len,
pretrain_epoch=epoch_idx < config.pretrain_epoch)
if data_feed is None:
break
out = exe.run(main_program, feed=data_feed,
fetch_list=[bow_loss.name, kl_loss.name, nll_loss.name, final_loss.name])
total_step += 1
total_bow_loss += out[0]
total_kl_loss += out[1]
total_nll_loss += out[2]
total_final_loss += out[3]
sample_num += 1
if batch_id > 0 and batch_id % config.log_steps == 0:
print("epoch %d step %d | "
"bow loss %0.6f kl loss %0.6f nll loss %0.6f total loss %0.6f" % \
(epoch_idx, batch_id,
total_bow_loss / sample_num, total_kl_loss / sample_num, \
total_nll_loss / sample_num, total_final_loss / sample_num))
total_bow_loss = 0
total_kl_loss = 0
total_nll_loss = 0
total_final_loss = 0
sample_num = 0
if batch_id > 0 and batch_id % config.valid_steps == 0:
eval_bow_loss, eval_kl_loss, eval_nll_loss, eval_total_loss = \
vaild_loop(config, valid_generator, inference_program, model_handle)
# save model
if stage != 0:
param_path = config.save_dir + "/" + str(total_step)
fluid.io.save_params(executor=exe, dirname=param_path,
main_program=main_program)
if eval_nll_loss < best_score:
# save to best
best_model_path = config.save_dir + "/best_model"
print("save to best", eval_nll_loss, best_model_path)
fluid.io.save_params(executor=exe, dirname=best_model_path,
main_program=main_program)
best_score = eval_nll_loss
eval_bow_loss, eval_kl_loss, eval_nll_loss, eval_total_loss = \
vaild_loop(config, valid_generator, inference_program, model_handle)
if stage != 0:
param_path = config.save_dir + "/" + str(total_step)
fluid.io.save_params(executor=exe, dirname=param_path,
main_program=main_program)
if eval_nll_loss < best_score:
best_model_path = config.save_dir + "/best_model"
print("save to best", eval_nll_loss, best_model_path)
fluid.io.save_params(executor=exe, dirname=best_model_path,
main_program=main_program)
best_score = eval_nll_loss
if stage == 0:
# save last model and opt_stat to npz for next stage init
save_model_file = config.save_dir + "/model_stage_0"
save_opt_state_file = config.save_dir + "/opt_state_stage_0"
model_stage_0 = {}
for name in param_name_list:
t = np.asarray(fluid.global_scope().find_var(name).get_tensor())
model_stage_0[name] = t
np.savez(save_model_file, **model_stage_0)
opt_state_stage_0 = {}
for name in opt_var_name_list:
t_data = np.asarray(fluid.global_scope().find_var(name).get_tensor())
opt_state_stage_0[name] = t_data
np.savez(save_opt_state_file, **opt_state_stage_0) | [
"def",
"train_loop",
"(",
"config",
",",
"train_generator",
",",
"valid_generator",
",",
"main_program",
",",
"inference_program",
",",
"model_handle",
",",
"param_name_list",
",",
"opt_var_name_list",
")",
":",
"stage",
"=",
"config",
".",
"stage",
"[",
"exe",
... | https://github.com/PaddlePaddle/Research/blob/2da0bd6c72d60e9df403aff23a7802779561c4a1/NLP/ACL2019-DuConv/generative_paddle/network.py#L250-L350 | ||
ahmetcemturan/SFACT | 7576e29ba72b33e5058049b77b7b558875542747 | fabmetheus_utilities/euclidean.py | python | getBottomByPath | (path) | return bottom | Get the bottom of the path. | Get the bottom of the path. | [
"Get",
"the",
"bottom",
"of",
"the",
"path",
"."
] | def getBottomByPath(path):
'Get the bottom of the path.'
bottom = 987654321987654321.0
for point in path:
bottom = min(bottom, point.z)
return bottom | [
"def",
"getBottomByPath",
"(",
"path",
")",
":",
"bottom",
"=",
"987654321987654321.0",
"for",
"point",
"in",
"path",
":",
"bottom",
"=",
"min",
"(",
"bottom",
",",
"point",
".",
"z",
")",
"return",
"bottom"
] | https://github.com/ahmetcemturan/SFACT/blob/7576e29ba72b33e5058049b77b7b558875542747/fabmetheus_utilities/euclidean.py#L483-L488 | |
adamchainz/django-mysql | 389594dc078f73c9f204306014332344fe4b6d04 | src/django_mysql/locks.py | python | TableLock.release | (
self,
exc_type: Optional[Type[BaseException]] = None,
exc_value: Optional[BaseException] = None,
exc_traceback: Optional[TracebackType] = None,
) | [] | def release(
self,
exc_type: Optional[Type[BaseException]] = None,
exc_value: Optional[BaseException] = None,
exc_traceback: Optional[TracebackType] = None,
) -> None:
connection = connections[self.db]
with connection.cursor() as cursor:
self._atomic.__exit__(exc_type, exc_value, exc_traceback)
self._atomic = None
cursor.execute("UNLOCK TABLES") | [
"def",
"release",
"(",
"self",
",",
"exc_type",
":",
"Optional",
"[",
"Type",
"[",
"BaseException",
"]",
"]",
"=",
"None",
",",
"exc_value",
":",
"Optional",
"[",
"BaseException",
"]",
"=",
"None",
",",
"exc_traceback",
":",
"Optional",
"[",
"TracebackType... | https://github.com/adamchainz/django-mysql/blob/389594dc078f73c9f204306014332344fe4b6d04/src/django_mysql/locks.py#L165-L175 | ||||
tomplus/kubernetes_asyncio | f028cc793e3a2c519be6a52a49fb77ff0b014c9b | kubernetes_asyncio/client/models/v1_resource_quota.py | python | V1ResourceQuota.__init__ | (self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None) | V1ResourceQuota - a model defined in OpenAPI | V1ResourceQuota - a model defined in OpenAPI | [
"V1ResourceQuota",
"-",
"a",
"model",
"defined",
"in",
"OpenAPI"
] | def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): # noqa: E501
"""V1ResourceQuota - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status | [
"def",
"__init__",
"(",
"self",
",",
"api_version",
"=",
"None",
",",
"kind",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"spec",
"=",
"None",
",",
"status",
"=",
"None",
",",
"local_vars_configuration",
"=",
"None",
")",
":",
"# noqa: E501",
"# noqa... | https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/v1_resource_quota.py#L51-L73 | ||
aws/aws-encryption-sdk-python | 0922a76eb4536cbc2246e723f97496e068204d78 | decrypt_oracle/.chalice/pipeline.py | python | _pipeline_role | (buckets: Iterable[s3.Bucket]) | return iam.Role(
"CodePipelinesRole", AssumeRolePolicyDocument=_service_assume_role(CODEPIPELINE.prefix), Policies=[policy]
) | Build and return the IAM Role resource to be used by CodePipeline to run the pipeline. | Build and return the IAM Role resource to be used by CodePipeline to run the pipeline. | [
"Build",
"and",
"return",
"the",
"IAM",
"Role",
"resource",
"to",
"be",
"used",
"by",
"CodePipeline",
"to",
"run",
"the",
"pipeline",
"."
] | def _pipeline_role(buckets: Iterable[s3.Bucket]) -> iam.Role:
"""Build and return the IAM Role resource to be used by CodePipeline to run the pipeline."""
bucket_statements = [
AWS.Statement(
Effect=AWS.Allow,
Action=[S3.GetBucketVersioning, S3.PutBucketVersioning],
Resource=[GetAtt(bucket, "Arn") for bucket in buckets],
),
AWS.Statement(
Effect=AWS.Allow,
Action=[S3.GetObject, S3.PutObject],
Resource=[Sub("${{{bucket}.Arn}}/*".format(bucket=bucket.title)) for bucket in buckets],
),
]
policy = iam.Policy(
"PipelinePolicy",
PolicyName="PipelinePolicy",
PolicyDocument=AWS.PolicyDocument(
Statement=bucket_statements
+ [
AllowEverywhere(Action=[CLOUDWATCH.Action("*"), IAM.PassRole]),
AllowEverywhere(Action=[LAMBDA.InvokeFunction, LAMBDA.ListFunctions]),
AllowEverywhere(
Action=[
CLOUDFORMATION.CreateStack,
CLOUDFORMATION.DeleteStack,
CLOUDFORMATION.DescribeStacks,
CLOUDFORMATION.UpdateStack,
CLOUDFORMATION.CreateChangeSet,
CLOUDFORMATION.DeleteChangeSet,
CLOUDFORMATION.DescribeChangeSet,
CLOUDFORMATION.ExecuteChangeSet,
CLOUDFORMATION.SetStackPolicy,
CLOUDFORMATION.ValidateTemplate,
]
),
AllowEverywhere(Action=[CODEBUILD.BatchGetBuilds, CODEBUILD.StartBuild]),
]
),
)
return iam.Role(
"CodePipelinesRole", AssumeRolePolicyDocument=_service_assume_role(CODEPIPELINE.prefix), Policies=[policy]
) | [
"def",
"_pipeline_role",
"(",
"buckets",
":",
"Iterable",
"[",
"s3",
".",
"Bucket",
"]",
")",
"->",
"iam",
".",
"Role",
":",
"bucket_statements",
"=",
"[",
"AWS",
".",
"Statement",
"(",
"Effect",
"=",
"AWS",
".",
"Allow",
",",
"Action",
"=",
"[",
"S3... | https://github.com/aws/aws-encryption-sdk-python/blob/0922a76eb4536cbc2246e723f97496e068204d78/decrypt_oracle/.chalice/pipeline.py#L91-L133 | |
shiweibsw/Translation-Tools | 2fbbf902364e557fa7017f9a74a8797b7440c077 | venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distro.py | python | LinuxDistribution.id | (self) | return '' | Return the distro ID of the Linux distribution, as a string.
For details, see :func:`distro.id`. | Return the distro ID of the Linux distribution, as a string. | [
"Return",
"the",
"distro",
"ID",
"of",
"the",
"Linux",
"distribution",
"as",
"a",
"string",
"."
] | def id(self):
"""Return the distro ID of the Linux distribution, as a string.
For details, see :func:`distro.id`.
"""
def normalize(distro_id, table):
distro_id = distro_id.lower().replace(' ', '_')
return table.get(distro_id, distro_id)
distro_id = self.os_release_attr('id')
if distro_id:
return normalize(distro_id, NORMALIZED_OS_ID)
distro_id = self.lsb_release_attr('distributor_id')
if distro_id:
return normalize(distro_id, NORMALIZED_LSB_ID)
distro_id = self.distro_release_attr('id')
if distro_id:
return normalize(distro_id, NORMALIZED_DISTRO_ID)
return '' | [
"def",
"id",
"(",
"self",
")",
":",
"def",
"normalize",
"(",
"distro_id",
",",
"table",
")",
":",
"distro_id",
"=",
"distro_id",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
"return",
"table",
".",
"get",
"(",
"distro_id",
"... | https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/distro.py#L627-L648 | |
tegaki/tegaki | eceec69fe651d0733c8c8752dae569d2283d0f3c | tegaki-pygtk/tegakigtk/canvas.py | python | Canvas.do_size_request | (self, requisition) | The do_size_request method Gtk+ is called on a widget to ask it the
widget how large it wishes to be.
It's not guaranteed that gtk+ will actually give this size to the
widget. | The do_size_request method Gtk+ is called on a widget to ask it the
widget how large it wishes to be.
It's not guaranteed that gtk+ will actually give this size to the
widget. | [
"The",
"do_size_request",
"method",
"Gtk",
"+",
"is",
"called",
"on",
"a",
"widget",
"to",
"ask",
"it",
"the",
"widget",
"how",
"large",
"it",
"wishes",
"to",
"be",
".",
"It",
"s",
"not",
"guaranteed",
"that",
"gtk",
"+",
"will",
"actually",
"give",
"t... | def do_size_request(self, requisition):
"""
The do_size_request method Gtk+ is called on a widget to ask it the
widget how large it wishes to be.
It's not guaranteed that gtk+ will actually give this size to the
widget.
"""
requisition.height = self.DEFAULT_HEIGHT
requisition.width = self.DEFAULT_WIDTH | [
"def",
"do_size_request",
"(",
"self",
",",
"requisition",
")",
":",
"requisition",
".",
"height",
"=",
"self",
".",
"DEFAULT_HEIGHT",
"requisition",
".",
"width",
"=",
"self",
".",
"DEFAULT_WIDTH"
] | https://github.com/tegaki/tegaki/blob/eceec69fe651d0733c8c8752dae569d2283d0f3c/tegaki-pygtk/tegakigtk/canvas.py#L153-L161 | ||
cloudera/hue | 23f02102d4547c17c32bd5ea0eb24e9eadd657a4 | desktop/core/ext-py/Django-1.11.29/django/utils/translation/trans_real.py | python | DjangoTranslation._add_fallback | (self, localedirs=None) | Sets the GNUTranslations() fallback with the default language. | Sets the GNUTranslations() fallback with the default language. | [
"Sets",
"the",
"GNUTranslations",
"()",
"fallback",
"with",
"the",
"default",
"language",
"."
] | def _add_fallback(self, localedirs=None):
"""Sets the GNUTranslations() fallback with the default language."""
# Don't set a fallback for the default language or any English variant
# (as it's empty, so it'll ALWAYS fall back to the default language)
if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'):
return
if self.domain == 'django':
# Get from cache
default_translation = translation(settings.LANGUAGE_CODE)
else:
default_translation = DjangoTranslation(
settings.LANGUAGE_CODE, domain=self.domain, localedirs=localedirs
)
self.add_fallback(default_translation) | [
"def",
"_add_fallback",
"(",
"self",
",",
"localedirs",
"=",
"None",
")",
":",
"# Don't set a fallback for the default language or any English variant",
"# (as it's empty, so it'll ALWAYS fall back to the default language)",
"if",
"self",
".",
"__language",
"==",
"settings",
".",... | https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/Django-1.11.29/django/utils/translation/trans_real.py#L186-L199 | ||
Azure/azure-linux-extensions | a42ef718c746abab2b3c6a21da87b29e76364558 | CustomScript/azure/servicebus/__init__.py | python | Message.unlock | (self) | Unlocks itself if find queue name or topic name and subscription
name. | Unlocks itself if find queue name or topic name and subscription
name. | [
"Unlocks",
"itself",
"if",
"find",
"queue",
"name",
"or",
"topic",
"name",
"and",
"subscription",
"name",
"."
] | def unlock(self):
''' Unlocks itself if find queue name or topic name and subscription
name. '''
if self._queue_name:
self.service_bus_service.unlock_queue_message(
self._queue_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
elif self._topic_name and self._subscription_name:
self.service_bus_service.unlock_subscription_message(
self._topic_name,
self._subscription_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
else:
raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK) | [
"def",
"unlock",
"(",
"self",
")",
":",
"if",
"self",
".",
"_queue_name",
":",
"self",
".",
"service_bus_service",
".",
"unlock_queue_message",
"(",
"self",
".",
"_queue_name",
",",
"self",
".",
"broker_properties",
"[",
"'SequenceNumber'",
"]",
",",
"self",
... | https://github.com/Azure/azure-linux-extensions/blob/a42ef718c746abab2b3c6a21da87b29e76364558/CustomScript/azure/servicebus/__init__.py#L199-L214 | ||
GoogleCloudPlatform/gsutil | 5be882803e76608e2fd29cf8c504ccd1fe0a7746 | gslib/command_runner.py | python | CommandRunner._LoadCommandMap | (self) | return command_map | Returns dict mapping each command_name to implementing class. | Returns dict mapping each command_name to implementing class. | [
"Returns",
"dict",
"mapping",
"each",
"command_name",
"to",
"implementing",
"class",
"."
] | def _LoadCommandMap(self):
"""Returns dict mapping each command_name to implementing class."""
# Import all gslib.commands submodules.
for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
__import__('gslib.commands.%s' % module_name)
command_map = {}
# Only include Command subclasses in the dict.
for command in Command.__subclasses__():
command_map[command.command_spec.command_name] = command
for command_name_aliases in command.command_spec.command_name_aliases:
command_map[command_name_aliases] = command
return command_map | [
"def",
"_LoadCommandMap",
"(",
"self",
")",
":",
"# Import all gslib.commands submodules.",
"for",
"_",
",",
"module_name",
",",
"_",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"gslib",
".",
"commands",
".",
"__path__",
")",
":",
"__import__",
"(",
"'gslib.comma... | https://github.com/GoogleCloudPlatform/gsutil/blob/5be882803e76608e2fd29cf8c504ccd1fe0a7746/gslib/command_runner.py#L168-L180 | |
dr-prodigy/python-holidays | 57dd2ed2b8659c76f776f10207456e5231c82099 | holidays/countries/malaysia.py | python | Malaysia.__init__ | (
self,
years: Union[int, Iterable[int]] = None,
expand: bool = True,
observed: bool = True,
prov: Optional[str] = None,
state: Optional[str] = None,
) | An subclass of :py:class:`HolidayBase` representing public holidays in
Malaysia.
If ``state`` is not supplied, only nationwide holidays are
returned. The following ``state`` codes are used (ISO 3166-2
subdivision codes are not yet supported):
- JHR: Johor
- KDH: Kedah
- KTN: Kelantan
- MLK: Melaka
- NSN: Negeri Sembilan
- PHG: Pahang
- PRK: Perak
- PLS: Perlis
- PNG: Pulau Pinang
- SBH: Sabah
- SWK: Sarawak
- SGR: Selangor
- TRG: Terengganu
- KUL: FT Kuala Lumpur
- LBN: FT Labuan
- PJY: FT Putrajaya
Limitations:
- Prior to 2021: holidays are not accurate.
- 2027 and later: Thaipusam dates are are estimated, and so denoted.
Reference: `Wikipedia
<https://en.wikipedia.org/wiki/Public_holidays_in_Malaysia>`__
Country created by: `Eden <https://github.com/jusce17>`__
Country maintained by: `Mike Borsetti <https://github.com/mborsetti>`__
See parameters and usage in :py:class:`HolidayBase`. | An subclass of :py:class:`HolidayBase` representing public holidays in
Malaysia. | [
"An",
"subclass",
"of",
":",
"py",
":",
"class",
":",
"HolidayBase",
"representing",
"public",
"holidays",
"in",
"Malaysia",
"."
] | def __init__(
self,
years: Union[int, Iterable[int]] = None,
expand: bool = True,
observed: bool = True,
prov: Optional[str] = None,
state: Optional[str] = None,
) -> None:
"""
An subclass of :py:class:`HolidayBase` representing public holidays in
Malaysia.
If ``state`` is not supplied, only nationwide holidays are
returned. The following ``state`` codes are used (ISO 3166-2
subdivision codes are not yet supported):
- JHR: Johor
- KDH: Kedah
- KTN: Kelantan
- MLK: Melaka
- NSN: Negeri Sembilan
- PHG: Pahang
- PRK: Perak
- PLS: Perlis
- PNG: Pulau Pinang
- SBH: Sabah
- SWK: Sarawak
- SGR: Selangor
- TRG: Terengganu
- KUL: FT Kuala Lumpur
- LBN: FT Labuan
- PJY: FT Putrajaya
Limitations:
- Prior to 2021: holidays are not accurate.
- 2027 and later: Thaipusam dates are are estimated, and so denoted.
Reference: `Wikipedia
<https://en.wikipedia.org/wiki/Public_holidays_in_Malaysia>`__
Country created by: `Eden <https://github.com/jusce17>`__
Country maintained by: `Mike Borsetti <https://github.com/mborsetti>`__
See parameters and usage in :py:class:`HolidayBase`.
"""
self.cnls = ChineseLuniSolar()
super().__init__(years, expand, observed, prov, state) | [
"def",
"__init__",
"(",
"self",
",",
"years",
":",
"Union",
"[",
"int",
",",
"Iterable",
"[",
"int",
"]",
"]",
"=",
"None",
",",
"expand",
":",
"bool",
"=",
"True",
",",
"observed",
":",
"bool",
"=",
"True",
",",
"prov",
":",
"Optional",
"[",
"st... | https://github.com/dr-prodigy/python-holidays/blob/57dd2ed2b8659c76f776f10207456e5231c82099/holidays/countries/malaysia.py#L61-L109 | ||
druid-io/pydruid | 98cab4d9c2a08a35667b26a15dee21bdb77422b4 | pydruid/db/api.py | python | get_type | (value) | Infer type from value.
Note that bool is a subclass of int so order of statements matter. | Infer type from value. | [
"Infer",
"type",
"from",
"value",
"."
] | def get_type(value):
"""
Infer type from value.
Note that bool is a subclass of int so order of statements matter.
"""
if isinstance(value, str) or value is None:
return Type.STRING
elif isinstance(value, bool):
return Type.BOOLEAN
elif isinstance(value, (int, float)):
return Type.NUMBER
raise exceptions.Error("Value of unknown type: {value}".format(value=value)) | [
"def",
"get_type",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"str",
")",
"or",
"value",
"is",
"None",
":",
"return",
"Type",
".",
"STRING",
"elif",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"return",
"Type",
".",
"BOOLEA... | https://github.com/druid-io/pydruid/blob/98cab4d9c2a08a35667b26a15dee21bdb77422b4/pydruid/db/api.py#L100-L114 | ||
openshift/openshift-tools | 1188778e728a6e4781acf728123e5b356380fe6f | openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_vendored_deps/library/oc_adm_ca_server_cert.py | python | Utils.exists | (results, _name) | return False | Check to see if the results include the name | Check to see if the results include the name | [
"Check",
"to",
"see",
"if",
"the",
"results",
"include",
"the",
"name"
] | def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False | [
"def",
"exists",
"(",
"results",
",",
"_name",
")",
":",
"if",
"not",
"results",
":",
"return",
"False",
"if",
"Utils",
".",
"find_result",
"(",
"results",
",",
"_name",
")",
":",
"return",
"True",
"return",
"False"
] | https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_vendored_deps/library/oc_adm_ca_server_cert.py#L1265-L1273 | |
StackStorm/st2contrib | 095b021a31ba134728deb7c707240196d016e729 | packs/travis_ci/actions/disable_hook.py | python | DisableHookAction.run | (self, hook_id) | return response.content | Disable a hook to monitor through Travis | Disable a hook to monitor through Travis | [
"Disable",
"a",
"hook",
"to",
"monitor",
"through",
"Travis"
] | def run(self, hook_id):
"""
Disable a hook to monitor through Travis
"""
path = '/hooks/' + str(hook_id)
json_req = {
'hook': {
'active': 'false'
}
}
json_req = json.dumps(json_req)
response = self._perform_request(path, data=json_req, method='PUT')
return response.content | [
"def",
"run",
"(",
"self",
",",
"hook_id",
")",
":",
"path",
"=",
"'/hooks/'",
"+",
"str",
"(",
"hook_id",
")",
"json_req",
"=",
"{",
"'hook'",
":",
"{",
"'active'",
":",
"'false'",
"}",
"}",
"json_req",
"=",
"json",
".",
"dumps",
"(",
"json_req",
... | https://github.com/StackStorm/st2contrib/blob/095b021a31ba134728deb7c707240196d016e729/packs/travis_ci/actions/disable_hook.py#L7-L19 | |
robotlearn/pyrobolearn | 9cd7c060723fda7d2779fa255ac998c2c82b8436 | pyrobolearn/terminal_conditions/terminal_condition.py | python | TerminalCondition.__init__ | (self, btype=None, name=None) | Initialize the terminal condition.
Args:
btype (bool, str, None): if the terminal condition represents a failure or success condition. If None, it
represents a neutral terminal condition (which is neither a failure or success condition, but just
means the episode is over). If string, it has to be among {"success", "failure", "neutral"}.
name (str): name of the final condition | Initialize the terminal condition. | [
"Initialize",
"the",
"terminal",
"condition",
"."
] | def __init__(self, btype=None, name=None):
"""
Initialize the terminal condition.
Args:
btype (bool, str, None): if the terminal condition represents a failure or success condition. If None, it
represents a neutral terminal condition (which is neither a failure or success condition, but just
means the episode is over). If string, it has to be among {"success", "failure", "neutral"}.
name (str): name of the final condition
"""
self.btype = btype
self._over = False
self._achieved = False
self.name = name | [
"def",
"__init__",
"(",
"self",
",",
"btype",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"self",
".",
"btype",
"=",
"btype",
"self",
".",
"_over",
"=",
"False",
"self",
".",
"_achieved",
"=",
"False",
"self",
".",
"name",
"=",
"name"
] | https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/terminal_conditions/terminal_condition.py#L39-L52 | ||
TencentCloud/tencentcloud-sdk-python | 3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2 | tencentcloud/autoscaling/v20180419/models.py | python | SpotMixedAllocationPolicy.__init__ | (self) | r"""
:param BaseCapacity: 混合模式下,基础容量的大小,基础容量部分固定为按量计费实例。默认值 0,最大不可超过伸缩组的最大实例数。
注意:此字段可能返回 null,表示取不到有效值。
:type BaseCapacity: int
:param OnDemandPercentageAboveBaseCapacity: 超出基础容量部分,按量计费实例所占的比例。取值范围 [0, 100],0 代表超出基础容量的部分仅生产竞价实例,100 代表仅生产按量实例,默认值为 70。按百分比计算按量实例数时,向上取整。
比如,总期望实例数取 3,基础容量取 1,超基础部分按量百分比取 1,则最终按量 2 台(1 台来自基础容量,1 台按百分比向上取整得到),竞价 1台。
注意:此字段可能返回 null,表示取不到有效值。
:type OnDemandPercentageAboveBaseCapacity: int
:param SpotAllocationStrategy: 混合模式下,竞价实例的分配策略。取值包括 COST_OPTIMIZED 和 CAPACITY_OPTIMIZED,默认取 COST_OPTIMIZED。
<br><li> COST_OPTIMIZED,成本优化策略。对于启动配置内的所有机型,按照各机型在各可用区的每核单价由小到大依次尝试。优先尝试购买每核单价最便宜的,如果购买失败则尝试购买次便宜的,以此类推。
<br><li> CAPACITY_OPTIMIZED,容量优化策略。对于启动配置内的所有机型,按照各机型在各可用区的库存情况由大到小依次尝试。优先尝试购买剩余库存最大的机型,这样可尽量降低竞价实例被动回收的发生概率。
注意:此字段可能返回 null,表示取不到有效值。
:type SpotAllocationStrategy: str
:param CompensateWithBaseInstance: 按量实例替补功能。取值范围:
<br><li> TRUE,开启该功能,当所有竞价机型因库存不足等原因全部购买失败后,尝试购买按量实例。
<br><li> FALSE,不开启该功能,伸缩组在需要扩容竞价实例时仅尝试所配置的竞价机型。
默认取值: TRUE。
注意:此字段可能返回 null,表示取不到有效值。
:type CompensateWithBaseInstance: bool | r"""
:param BaseCapacity: 混合模式下,基础容量的大小,基础容量部分固定为按量计费实例。默认值 0,最大不可超过伸缩组的最大实例数。
注意:此字段可能返回 null,表示取不到有效值。
:type BaseCapacity: int
:param OnDemandPercentageAboveBaseCapacity: 超出基础容量部分,按量计费实例所占的比例。取值范围 [0, 100],0 代表超出基础容量的部分仅生产竞价实例,100 代表仅生产按量实例,默认值为 70。按百分比计算按量实例数时,向上取整。
比如,总期望实例数取 3,基础容量取 1,超基础部分按量百分比取 1,则最终按量 2 台(1 台来自基础容量,1 台按百分比向上取整得到),竞价 1台。
注意:此字段可能返回 null,表示取不到有效值。
:type OnDemandPercentageAboveBaseCapacity: int
:param SpotAllocationStrategy: 混合模式下,竞价实例的分配策略。取值包括 COST_OPTIMIZED 和 CAPACITY_OPTIMIZED,默认取 COST_OPTIMIZED。
<br><li> COST_OPTIMIZED,成本优化策略。对于启动配置内的所有机型,按照各机型在各可用区的每核单价由小到大依次尝试。优先尝试购买每核单价最便宜的,如果购买失败则尝试购买次便宜的,以此类推。
<br><li> CAPACITY_OPTIMIZED,容量优化策略。对于启动配置内的所有机型,按照各机型在各可用区的库存情况由大到小依次尝试。优先尝试购买剩余库存最大的机型,这样可尽量降低竞价实例被动回收的发生概率。
注意:此字段可能返回 null,表示取不到有效值。
:type SpotAllocationStrategy: str
:param CompensateWithBaseInstance: 按量实例替补功能。取值范围:
<br><li> TRUE,开启该功能,当所有竞价机型因库存不足等原因全部购买失败后,尝试购买按量实例。
<br><li> FALSE,不开启该功能,伸缩组在需要扩容竞价实例时仅尝试所配置的竞价机型。 | [
"r",
":",
"param",
"BaseCapacity",
":",
"混合模式下,基础容量的大小,基础容量部分固定为按量计费实例。默认值",
"0,最大不可超过伸缩组的最大实例数。",
"注意:此字段可能返回",
"null,表示取不到有效值。",
":",
"type",
"BaseCapacity",
":",
"int",
":",
"param",
"OnDemandPercentageAboveBaseCapacity",
":",
"超出基础容量部分,按量计费实例所占的比例。取值范围",
"[",
"0",
"100"... | def __init__(self):
r"""
:param BaseCapacity: 混合模式下,基础容量的大小,基础容量部分固定为按量计费实例。默认值 0,最大不可超过伸缩组的最大实例数。
注意:此字段可能返回 null,表示取不到有效值。
:type BaseCapacity: int
:param OnDemandPercentageAboveBaseCapacity: 超出基础容量部分,按量计费实例所占的比例。取值范围 [0, 100],0 代表超出基础容量的部分仅生产竞价实例,100 代表仅生产按量实例,默认值为 70。按百分比计算按量实例数时,向上取整。
比如,总期望实例数取 3,基础容量取 1,超基础部分按量百分比取 1,则最终按量 2 台(1 台来自基础容量,1 台按百分比向上取整得到),竞价 1台。
注意:此字段可能返回 null,表示取不到有效值。
:type OnDemandPercentageAboveBaseCapacity: int
:param SpotAllocationStrategy: 混合模式下,竞价实例的分配策略。取值包括 COST_OPTIMIZED 和 CAPACITY_OPTIMIZED,默认取 COST_OPTIMIZED。
<br><li> COST_OPTIMIZED,成本优化策略。对于启动配置内的所有机型,按照各机型在各可用区的每核单价由小到大依次尝试。优先尝试购买每核单价最便宜的,如果购买失败则尝试购买次便宜的,以此类推。
<br><li> CAPACITY_OPTIMIZED,容量优化策略。对于启动配置内的所有机型,按照各机型在各可用区的库存情况由大到小依次尝试。优先尝试购买剩余库存最大的机型,这样可尽量降低竞价实例被动回收的发生概率。
注意:此字段可能返回 null,表示取不到有效值。
:type SpotAllocationStrategy: str
:param CompensateWithBaseInstance: 按量实例替补功能。取值范围:
<br><li> TRUE,开启该功能,当所有竞价机型因库存不足等原因全部购买失败后,尝试购买按量实例。
<br><li> FALSE,不开启该功能,伸缩组在需要扩容竞价实例时仅尝试所配置的竞价机型。
默认取值: TRUE。
注意:此字段可能返回 null,表示取不到有效值。
:type CompensateWithBaseInstance: bool
"""
self.BaseCapacity = None
self.OnDemandPercentageAboveBaseCapacity = None
self.SpotAllocationStrategy = None
self.CompensateWithBaseInstance = None | [
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"BaseCapacity",
"=",
"None",
"self",
".",
"OnDemandPercentageAboveBaseCapacity",
"=",
"None",
"self",
".",
"SpotAllocationStrategy",
"=",
"None",
"self",
".",
"CompensateWithBaseInstance",
"=",
"None"
] | https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/autoscaling/v20180419/models.py#L4778-L4803 | ||
IndicoDataSolutions/finetune | 83ba222eed331df64b2fa7157bb64f0a2eef4a2c | finetune/datasets/multinli.py | python | MultiNLI.download | (self) | Download Stanford Sentiment Treebank to data directory | Download Stanford Sentiment Treebank to data directory | [
"Download",
"Stanford",
"Sentiment",
"Treebank",
"to",
"data",
"directory"
] | def download(self):
"""
Download Stanford Sentiment Treebank to data directory
"""
path = Path(self.filename)
path.parent.mkdir(parents=True, exist_ok=True)
remote_url = "https://s3.amazonaws.com/enso-data/multinli.dev.csv"
response = requests.get(remote_url)
open(DATA_PATH, 'wb').write(response.content) | [
"def",
"download",
"(",
"self",
")",
":",
"path",
"=",
"Path",
"(",
"self",
".",
"filename",
")",
"path",
".",
"parent",
".",
"mkdir",
"(",
"parents",
"=",
"True",
",",
"exist_ok",
"=",
"True",
")",
"remote_url",
"=",
"\"https://s3.amazonaws.com/enso-data/... | https://github.com/IndicoDataSolutions/finetune/blob/83ba222eed331df64b2fa7157bb64f0a2eef4a2c/finetune/datasets/multinli.py#L31-L41 | ||
LiyuanLucasLiu/RAdam | d9fd30a337894c4003768561d45e8730dbd41333 | cifar_imagenet/models/imagenet/resnext.py | python | ResNeXt.__init__ | (self, baseWidth, cardinality, layers, num_classes) | Constructor
Args:
baseWidth: baseWidth for ResNeXt.
cardinality: number of convolution groups.
layers: config of layers, e.g., [3, 4, 6, 3]
num_classes: number of classes | Constructor
Args:
baseWidth: baseWidth for ResNeXt.
cardinality: number of convolution groups.
layers: config of layers, e.g., [3, 4, 6, 3]
num_classes: number of classes | [
"Constructor",
"Args",
":",
"baseWidth",
":",
"baseWidth",
"for",
"ResNeXt",
".",
"cardinality",
":",
"number",
"of",
"convolution",
"groups",
".",
"layers",
":",
"config",
"of",
"layers",
"e",
".",
"g",
".",
"[",
"3",
"4",
"6",
"3",
"]",
"num_classes",
... | def __init__(self, baseWidth, cardinality, layers, num_classes):
""" Constructor
Args:
baseWidth: baseWidth for ResNeXt.
cardinality: number of convolution groups.
layers: config of layers, e.g., [3, 4, 6, 3]
num_classes: number of classes
"""
super(ResNeXt, self).__init__()
block = Bottleneck
self.cardinality = cardinality
self.baseWidth = baseWidth
self.num_classes = num_classes
self.inplanes = 64
self.output_size = 64
self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], 2)
self.layer3 = self._make_layer(block, 256, layers[2], 2)
self.layer4 = self._make_layer(block, 512, layers[3], 2)
self.avgpool = nn.AvgPool2d(7)
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_() | [
"def",
"__init__",
"(",
"self",
",",
"baseWidth",
",",
"cardinality",
",",
"layers",
",",
"num_classes",
")",
":",
"super",
"(",
"ResNeXt",
",",
"self",
")",
".",
"__init__",
"(",
")",
"block",
"=",
"Bottleneck",
"self",
".",
"cardinality",
"=",
"cardina... | https://github.com/LiyuanLucasLiu/RAdam/blob/d9fd30a337894c4003768561d45e8730dbd41333/cifar_imagenet/models/imagenet/resnext.py#L75-L109 | ||
simonw/djangopeople.net | ed04d3c79d03b9c74f3e7f82b2af944e021f8e15 | lib/yadis/filters.py | python | TransformFilterMaker.__init__ | (self, filter_functions) | Initialize the filter maker's state
@param filter_functions: The endpoint transformer functions to
apply to the basic endpoint. These are called in turn
until one of them does not return None, and the result of
that transformer is returned. | Initialize the filter maker's state | [
"Initialize",
"the",
"filter",
"maker",
"s",
"state"
] | def __init__(self, filter_functions):
"""Initialize the filter maker's state
@param filter_functions: The endpoint transformer functions to
apply to the basic endpoint. These are called in turn
until one of them does not return None, and the result of
that transformer is returned.
"""
self.filter_functions = filter_functions | [
"def",
"__init__",
"(",
"self",
",",
"filter_functions",
")",
":",
"self",
".",
"filter_functions",
"=",
"filter_functions"
] | https://github.com/simonw/djangopeople.net/blob/ed04d3c79d03b9c74f3e7f82b2af944e021f8e15/lib/yadis/filters.py#L85-L93 | ||
jython/frozen-mirror | b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99 | Lib/xml/sax/xmlreader.py | python | XMLReader.getEntityResolver | (self) | return self._ent_handler | Returns the current EntityResolver. | Returns the current EntityResolver. | [
"Returns",
"the",
"current",
"EntityResolver",
"."
] | def getEntityResolver(self):
"Returns the current EntityResolver."
return self._ent_handler | [
"def",
"getEntityResolver",
"(",
"self",
")",
":",
"return",
"self",
".",
"_ent_handler"
] | https://github.com/jython/frozen-mirror/blob/b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99/Lib/xml/sax/xmlreader.py#L50-L52 | |
wikimedia/pywikibot | 81a01ffaec7271bf5b4b170f85a80388420a4e78 | scripts/archive/makecat.py | python | MakeCatBot._setup_menubar | (cls) | Setup treat_page option bar. | Setup treat_page option bar. | [
"Setup",
"treat_page",
"option",
"bar",
"."
] | def _setup_menubar(cls):
"""Setup treat_page option bar."""
small = [
('yes', 'y'), ('no', 'n'), ('ignore', 'i'),
('extend', 'e'), ('help', 'h')]
extended = small[:3] + [
('more', 'm'), ('sort key', 'k'), ('skip', 's'), ('check', 'c'),
('other', 'o'), ('list', 'l'), ('reduce', 'r'), ('help', 'h')]
cls.option_bar = {'e': extended, 'r': small}
cls.treat_options = cls.option_bar['r'] | [
"def",
"_setup_menubar",
"(",
"cls",
")",
":",
"small",
"=",
"[",
"(",
"'yes'",
",",
"'y'",
")",
",",
"(",
"'no'",
",",
"'n'",
")",
",",
"(",
"'ignore'",
",",
"'i'",
")",
",",
"(",
"'extend'",
",",
"'e'",
")",
",",
"(",
"'help'",
",",
"'h'",
... | https://github.com/wikimedia/pywikibot/blob/81a01ffaec7271bf5b4b170f85a80388420a4e78/scripts/archive/makecat.py#L90-L99 | ||
QUANTAXIS/QUANTAXIS | d6eccb97c8385854aa596d6ba8d70ec0655519ff | QUANTAXIS/QAUtil/QADate.py | python | QA_util_id2date | (idx, client) | return temp_str['date'] | explanation:
从数据库中查询通达信时间
params:
* idx->
含义: 数据库index
类型: str
参数支持: []
* client->
含义: 源
类型: pymongo.MongoClient
参数支持: []
return:
str | explanation:
从数据库中查询通达信时间 | [
"explanation",
":",
"从数据库中查询通达信时间"
] | def QA_util_id2date(idx, client):
"""
explanation:
从数据库中查询通达信时间
params:
* idx->
含义: 数据库index
类型: str
参数支持: []
* client->
含义: 源
类型: pymongo.MongoClient
参数支持: []
return:
str
"""
coll = client.quantaxis.trade_date
temp_str = coll.find_one({'num': idx})
return temp_str['date'] | [
"def",
"QA_util_id2date",
"(",
"idx",
",",
"client",
")",
":",
"coll",
"=",
"client",
".",
"quantaxis",
".",
"trade_date",
"temp_str",
"=",
"coll",
".",
"find_one",
"(",
"{",
"'num'",
":",
"idx",
"}",
")",
"return",
"temp_str",
"[",
"'date'",
"]"
] | https://github.com/QUANTAXIS/QUANTAXIS/blob/d6eccb97c8385854aa596d6ba8d70ec0655519ff/QUANTAXIS/QAUtil/QADate.py#L391-L411 | |
zhl2008/awd-platform | 0416b31abea29743387b10b3914581fbe8e7da5e | web_hxb2/lib/python3.5/site-packages/django_redis/cache.py | python | omit_exception | (method) | return _decorator | Simple decorator that intercepts connection
errors and ignores these if settings specify this.
Note: this doesn't handle the `default` argument in .get(). | Simple decorator that intercepts connection
errors and ignores these if settings specify this. | [
"Simple",
"decorator",
"that",
"intercepts",
"connection",
"errors",
"and",
"ignores",
"these",
"if",
"settings",
"specify",
"this",
"."
] | def omit_exception(method):
"""
Simple decorator that intercepts connection
errors and ignores these if settings specify this.
Note: this doesn't handle the `default` argument in .get().
"""
@functools.wraps(method)
def _decorator(self, *args, **kwargs):
try:
return method(self, *args, **kwargs)
except ConnectionInterrupted as e:
if self._ignore_exceptions:
return None
raise e.parent
return _decorator | [
"def",
"omit_exception",
"(",
"method",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"method",
")",
"def",
"_decorator",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"method",
"(",
"self",
",",
"*",
"arg... | https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/django_redis/cache.py#L14-L32 | |
numba/numba | bf480b9e0da858a65508c2b17759a72ee6a44c51 | numba/core/dataflow.py | python | DataFlowAnalysis.op_SLICE_2 | (self, info, inst) | TOS = TOS1[:TOS] | TOS = TOS1[:TOS] | [
"TOS",
"=",
"TOS1",
"[",
":",
"TOS",
"]"
] | def op_SLICE_2(self, info, inst):
"""
TOS = TOS1[:TOS]
"""
tos = info.pop()
tos1 = info.pop()
res = info.make_temp()
slicevar = info.make_temp()
indexvar = info.make_temp()
nonevar = info.make_temp()
info.append(inst, base=tos1, stop=tos, res=res, slicevar=slicevar,
indexvar=indexvar, nonevar=nonevar)
info.push(res) | [
"def",
"op_SLICE_2",
"(",
"self",
",",
"info",
",",
"inst",
")",
":",
"tos",
"=",
"info",
".",
"pop",
"(",
")",
"tos1",
"=",
"info",
".",
"pop",
"(",
")",
"res",
"=",
"info",
".",
"make_temp",
"(",
")",
"slicevar",
"=",
"info",
".",
"make_temp",
... | https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/core/dataflow.py#L503-L515 | ||
ethereum/trinity | 6383280c5044feb06695ac2f7bc1100b7bcf4fe0 | p2p/behaviors.py | python | Behavior.should_apply_to | (self, connection: 'ConnectionAPI') | return self.qualifier(connection, self.logic) | [] | def should_apply_to(self, connection: 'ConnectionAPI') -> bool:
# mypy bug: https://github.com/python/mypy/issues/708
return self.qualifier(connection, self.logic) | [
"def",
"should_apply_to",
"(",
"self",
",",
"connection",
":",
"'ConnectionAPI'",
")",
"->",
"bool",
":",
"# mypy bug: https://github.com/python/mypy/issues/708",
"return",
"self",
".",
"qualifier",
"(",
"connection",
",",
"self",
".",
"logic",
")"
] | https://github.com/ethereum/trinity/blob/6383280c5044feb06695ac2f7bc1100b7bcf4fe0/p2p/behaviors.py#L37-L39 | |||
nate-parrott/Flashlight | c3a7c7278a1cccf8918e7543faffc68e863ff5ab | flashlightplugins/cloudstorage/api_utils.py | python | _RetryWrapper.__init__ | (self,
retry_params,
retriable_exceptions=_RETRIABLE_EXCEPTIONS,
should_retry=lambda r: False) | Init.
Args:
retry_params: an RetryParams instance.
retriable_exceptions: a list of exception classes that are retriable.
should_retry: a function that takes a result from the tasklet and returns
a boolean. True if the result should be retried. | Init. | [
"Init",
"."
] | def __init__(self,
retry_params,
retriable_exceptions=_RETRIABLE_EXCEPTIONS,
should_retry=lambda r: False):
"""Init.
Args:
retry_params: an RetryParams instance.
retriable_exceptions: a list of exception classes that are retriable.
should_retry: a function that takes a result from the tasklet and returns
a boolean. True if the result should be retried.
"""
self.retry_params = retry_params
self.retriable_exceptions = retriable_exceptions
self.should_retry = should_retry | [
"def",
"__init__",
"(",
"self",
",",
"retry_params",
",",
"retriable_exceptions",
"=",
"_RETRIABLE_EXCEPTIONS",
",",
"should_retry",
"=",
"lambda",
"r",
":",
"False",
")",
":",
"self",
".",
"retry_params",
"=",
"retry_params",
"self",
".",
"retriable_exceptions",
... | https://github.com/nate-parrott/Flashlight/blob/c3a7c7278a1cccf8918e7543faffc68e863ff5ab/flashlightplugins/cloudstorage/api_utils.py#L118-L132 | ||
kubernetes-client/python | 47b9da9de2d02b2b7a34fbe05afb44afd130d73a | kubernetes/client/models/v1beta1_event.py | python | V1beta1Event.type | (self) | return self._type | Gets the type of this V1beta1Event. # noqa: E501
type is the type of this event (Normal, Warning), new types could be added in the future. It is machine-readable. # noqa: E501
:return: The type of this V1beta1Event. # noqa: E501
:rtype: str | Gets the type of this V1beta1Event. # noqa: E501 | [
"Gets",
"the",
"type",
"of",
"this",
"V1beta1Event",
".",
"#",
"noqa",
":",
"E501"
] | def type(self):
"""Gets the type of this V1beta1Event. # noqa: E501
type is the type of this event (Normal, Warning), new types could be added in the future. It is machine-readable. # noqa: E501
:return: The type of this V1beta1Event. # noqa: E501
:rtype: str
"""
return self._type | [
"def",
"type",
"(",
"self",
")",
":",
"return",
"self",
".",
"_type"
] | https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1beta1_event.py#L495-L503 | |
googleads/google-ads-python | 2a1d6062221f6aad1992a6bcca0e7e4a93d2db86 | google/ads/googleads/v8/services/services/bidding_seasonality_adjustment_service/client.py | python | BiddingSeasonalityAdjustmentServiceClient._get_default_mtls_endpoint | (api_endpoint) | return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") | Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint. | Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint. | [
"Convert",
"api",
"endpoint",
"to",
"mTLS",
"endpoint",
".",
"Convert",
"*",
".",
"sandbox",
".",
"googleapis",
".",
"com",
"and",
"*",
".",
"googleapis",
".",
"com",
"to",
"*",
".",
"mtls",
".",
"sandbox",
".",
"googleapis",
".",
"com",
"and",
"*",
... | def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") | [
"def",
"_get_default_mtls_endpoint",
"(",
"api_endpoint",
")",
":",
"if",
"not",
"api_endpoint",
":",
"return",
"api_endpoint",
"mtls_endpoint_re",
"=",
"re",
".",
"compile",
"(",
"r\"(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?\... | https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/bidding_seasonality_adjustment_service/client.py#L88-L114 | |
xmengli/H-DenseUNet | 06cc436a43196310fe933d114a353839907cc176 | Keras-2.0.8/keras/utils/data_utils.py | python | OrderedEnqueuer._run | (self) | Function to submit request to the executor and queue the `Future` objects. | Function to submit request to the executor and queue the `Future` objects. | [
"Function",
"to",
"submit",
"request",
"to",
"the",
"executor",
"and",
"queue",
"the",
"Future",
"objects",
"."
] | def _run(self):
"""Function to submit request to the executor and queue the `Future` objects."""
sequence = list(range(len(self.sequence)))
while True:
if self.shuffle:
random.shuffle(sequence)
for i in sequence:
if self.stop_signal.is_set():
return
self.queue.put(
self.executor.apply_async(get_index,
(self.sequence, i)), block=True)
# Call the internal on epoch end.
self.sequence.on_epoch_end() | [
"def",
"_run",
"(",
"self",
")",
":",
"sequence",
"=",
"list",
"(",
"range",
"(",
"len",
"(",
"self",
".",
"sequence",
")",
")",
")",
"while",
"True",
":",
"if",
"self",
".",
"shuffle",
":",
"random",
".",
"shuffle",
"(",
"sequence",
")",
"for",
... | https://github.com/xmengli/H-DenseUNet/blob/06cc436a43196310fe933d114a353839907cc176/Keras-2.0.8/keras/utils/data_utils.py#L479-L492 | ||
santatic/web2attack | 44b6e481a3d56cf0d98073ae0fb69833dda563d9 | w2a/modules/scan/vuln_file.py | python | Module.checkfile | (self, dirpath) | [] | def checkfile(self, dirpath):
victim = deepcopy(self.victim)
if self.type == 'location':
victim.redirect = True
while len(self.tmp_files) > 0:
for ext in self.extension:
if len(self.tmp_files) == 0:
return
filepath = dirpath + '/' + self.tmp_files.pop(0).format(ext = ext)
if self.checker(victim, filepath):
self.success.append('FILE: ' + filepath)
self.frmwk.print_success('FOUND FILE: %s' % filepath)
if self.stop:
self.tmp_files = []
return
else:
self.frmwk.print_error('NOT FOUND: %s' % filepath) | [
"def",
"checkfile",
"(",
"self",
",",
"dirpath",
")",
":",
"victim",
"=",
"deepcopy",
"(",
"self",
".",
"victim",
")",
"if",
"self",
".",
"type",
"==",
"'location'",
":",
"victim",
".",
"redirect",
"=",
"True",
"while",
"len",
"(",
"self",
".",
"tmp_... | https://github.com/santatic/web2attack/blob/44b6e481a3d56cf0d98073ae0fb69833dda563d9/w2a/modules/scan/vuln_file.py#L156-L172 | ||||
oracle/graalpython | 577e02da9755d916056184ec441c26e00b70145c | graalpython/lib-python/3/lzma.py | python | LZMAFile.readline | (self, size=-1) | return self._buffer.readline(size) | Read a line of uncompressed bytes from the file.
The terminating newline (if present) is retained. If size is
non-negative, no more than size bytes will be read (in which
case the line may be incomplete). Returns b'' if already at EOF. | Read a line of uncompressed bytes from the file. | [
"Read",
"a",
"line",
"of",
"uncompressed",
"bytes",
"from",
"the",
"file",
"."
] | def readline(self, size=-1):
"""Read a line of uncompressed bytes from the file.
The terminating newline (if present) is retained. If size is
non-negative, no more than size bytes will be read (in which
case the line may be incomplete). Returns b'' if already at EOF.
"""
self._check_can_read()
return self._buffer.readline(size) | [
"def",
"readline",
"(",
"self",
",",
"size",
"=",
"-",
"1",
")",
":",
"self",
".",
"_check_can_read",
"(",
")",
"return",
"self",
".",
"_buffer",
".",
"readline",
"(",
"size",
")"
] | https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/lzma.py#L214-L222 | |
quay/quay | b7d325ed42827db9eda2d9f341cb5a6cdfd155a6 | util/config/configdocs/html_output.py | python | HtmlOutput.generate_output | (self, parsed_items) | return (
self.__get_html_begin() + self.__get_html_middle(parsed_items) + self.__get_html_end()
) | Returns generated HTML strin. | Returns generated HTML strin. | [
"Returns",
"generated",
"HTML",
"strin",
"."
] | def generate_output(self, parsed_items):
"""
Returns generated HTML strin.
"""
return (
self.__get_html_begin() + self.__get_html_middle(parsed_items) + self.__get_html_end()
) | [
"def",
"generate_output",
"(",
"self",
",",
"parsed_items",
")",
":",
"return",
"(",
"self",
".",
"__get_html_begin",
"(",
")",
"+",
"self",
".",
"__get_html_middle",
"(",
"parsed_items",
")",
"+",
"self",
".",
"__get_html_end",
"(",
")",
")"
] | https://github.com/quay/quay/blob/b7d325ed42827db9eda2d9f341cb5a6cdfd155a6/util/config/configdocs/html_output.py#L9-L15 | |
nicholastoddsmith/poeai | e95add37348f402b7e42f0e978b6af466dc0b055 | ProjMap.py | python | ProjMap.Solve3DT | (self, x, y, M = None) | return R.T | Solve for 3d coords given 2d coords (assuming on xy plane)
x: x value of pixel coordinates
y: y value of pixel coordinates | Solve for 3d coords given 2d coords (assuming on xy plane)
x: x value of pixel coordinates
y: y value of pixel coordinates | [
"Solve",
"for",
"3d",
"coords",
"given",
"2d",
"coords",
"(",
"assuming",
"on",
"xy",
"plane",
")",
"x",
":",
"x",
"value",
"of",
"pixel",
"coordinates",
"y",
":",
"y",
"value",
"of",
"pixel",
"coordinates"
] | def Solve3DT(self, x, y, M = None):
'''
Solve for 3d coords given 2d coords (assuming on xy plane)
x: x value of pixel coordinates
y: y value of pixel coordinates
'''
if M is None:
M = self.TM
s1 = M[3, 1] - y * M[3, 2]
s2 = M[1, 1] - y * M[1, 2]
s3 = -M[0, 1] + y * M[0, 2]
try:
R = np.zeros((3, len(x)))
except TypeError:
R = np.zeros((3))
R[0] = (s2 * (M[3, 0] - x * M[3, 2]) - (M[1, 0] - x * M[1, 2]) * s1)/((M[0, 1] - y * M[0, 2]) * (M[1, 0] - x * M[1, 2]) - (M[0, 0] - x * M[0, 2]) * (M[1, 1] - y * M[1, 2]))
R[1] = (M[3, 0] * -s3 + M[0, 0] * -s1 + x * (M[3, 1] * M[0, 2] - M[0, 1] * M[3, 2]))/( M[1, 0] * s3 + M[0, 0] * s2 + x * (-M[1, 1] * M[0, 2] + M[0, 1] * M[1, 2]))
return R.T | [
"def",
"Solve3DT",
"(",
"self",
",",
"x",
",",
"y",
",",
"M",
"=",
"None",
")",
":",
"if",
"M",
"is",
"None",
":",
"M",
"=",
"self",
".",
"TM",
"s1",
"=",
"M",
"[",
"3",
",",
"1",
"]",
"-",
"y",
"*",
"M",
"[",
"3",
",",
"2",
"]",
"s2"... | https://github.com/nicholastoddsmith/poeai/blob/e95add37348f402b7e42f0e978b6af466dc0b055/ProjMap.py#L78-L95 | |
mrJean1/PyGeodesy | 7da5ca71aa3edb7bc49e219e0b8190686e1a7965 | pygeodesy/named.py | python | _NamedTuple._xtend | (self, xTuple, *items) | (INTERNAL) Extend this C{Named-Tuple} with C{items} to an other B{C{xTuple}}. | (INTERNAL) Extend this C{Named-Tuple} with C{items} to an other B{C{xTuple}}. | [
"(",
"INTERNAL",
")",
"Extend",
"this",
"C",
"{",
"Named",
"-",
"Tuple",
"}",
"with",
"C",
"{",
"items",
"}",
"to",
"an",
"other",
"B",
"{",
"C",
"{",
"xTuple",
"}}",
"."
] | def _xtend(self, xTuple, *items):
'''(INTERNAL) Extend this C{Named-Tuple} with C{items} to an other B{C{xTuple}}.
'''
if (issubclassof(xTuple, _NamedTuple) and
(len(self._Names_) + len(items)) == len(xTuple._Names_) and
self._Names_ == xTuple._Names_[:len(self)]):
return self._xnamed(xTuple(self + items)) # *(self + items)
c = NN(self.classname, repr(self._Names_)) # PYCHOK no cover
x = NN(xTuple.__name__, repr(xTuple._Names_)) # PYCHOK no cover
raise TypeError(_SPACE_(c, _vs_, x)) | [
"def",
"_xtend",
"(",
"self",
",",
"xTuple",
",",
"*",
"items",
")",
":",
"if",
"(",
"issubclassof",
"(",
"xTuple",
",",
"_NamedTuple",
")",
"and",
"(",
"len",
"(",
"self",
".",
"_Names_",
")",
"+",
"len",
"(",
"items",
")",
")",
"==",
"len",
"("... | https://github.com/mrJean1/PyGeodesy/blob/7da5ca71aa3edb7bc49e219e0b8190686e1a7965/pygeodesy/named.py#L1047-L1056 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.