nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
PowerScript/KatanaFramework
0f6ad90a88de865d58ec26941cb4460501e75496
lib/setuptools/pkg_resources/_vendor/pyparsing.py
python
pyparsing_common.convertToDatetime
(fmt="%Y-%m-%dT%H:%M:%S.%f")
return cvt_fn
Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
Helper to create a parse action for converting parsed datetime string to Python datetime.datetime
[ "Helper", "to", "create", "a", "parse", "action", "for", "converting", "parsed", "datetime", "string", "to", "Python", "datetime", ".", "datetime" ]
def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): """ Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] """ def cvt_fn(s,l,t): try: return datetime.strptime(t[0], fmt) except ValueError as ve: raise ParseException(s, l, str(ve)) return cvt_fn
[ "def", "convertToDatetime", "(", "fmt", "=", "\"%Y-%m-%dT%H:%M:%S.%f\"", ")", ":", "def", "cvt_fn", "(", "s", ",", "l", ",", "t", ")", ":", "try", ":", "return", "datetime", ".", "strptime", "(", "t", "[", "0", "]", ",", "fmt", ")", "except", "ValueE...
https://github.com/PowerScript/KatanaFramework/blob/0f6ad90a88de865d58ec26941cb4460501e75496/lib/setuptools/pkg_resources/_vendor/pyparsing.py#L5550-L5569
jbjorne/TEES
caf19a4a1352ac59f5dc13a8684cc42ce4342d9d
Utils/Download.py
python
extractPackage
(path, destPath, subPath=None)
return names
[]
def extractPackage(path, destPath, subPath=None): global pbar if path.endswith('.zip'): opener, mode = zipfile.ZipFile, 'r' namelister = zipfile.ZipFile.namelist elif path.endswith('.tar.gz') or path.endswith('.tgz'): opener, mode = tarfile.open, 'r:gz' namelister = tarfile.TarFile.getnames elif path.endswith('.tar.bz2') or path.endswith('.tbz'): opener, mode = tarfile.open, 'r:bz2' namelister = tarfile.TarFile.getnames else: raise ValueError, "Could not extract `%s` as no appropriate extractor is found" % path widgets = [' [', Bar(), '] ', Percentage(), ' ', ETA()] pbar = ProgressBar(widgets=widgets, maxval=100) pbar.start() #package = opener(fileobj=SizeReportingFile(path), mode=mode) package = opener(path, mode) names = namelister(package) if subPath == None: extractWithProgress(package, names, destPath) else: tempdir = tempfile.mkdtemp() extractWithProgress(package, names, tempdir) if os.path.exists(destPath): shutil.rmtree(destPath) shutil.move(os.path.join(tempdir, subPath), destPath) shutil.rmtree(tempdir) package.close() pbar.finish() pbar = None return names
[ "def", "extractPackage", "(", "path", ",", "destPath", ",", "subPath", "=", "None", ")", ":", "global", "pbar", "if", "path", ".", "endswith", "(", "'.zip'", ")", ":", "opener", ",", "mode", "=", "zipfile", ".", "ZipFile", ",", "'r'", "namelister", "="...
https://github.com/jbjorne/TEES/blob/caf19a4a1352ac59f5dc13a8684cc42ce4342d9d/Utils/Download.py#L61-L97
online-judge-tools/oj
70f8a29ae6a1f551fee50b35b61354ccbc81505f
onlinejudge_command/pretty_printers.py
python
_tokens_from_line_diff_ops
(ops: List[_MergedDiffOp], *, char_in_line: int)
return tokens
[]
def _tokens_from_line_diff_ops(ops: List[_MergedDiffOp], *, char_in_line: int) -> List[_PrettyToken]: if not ops: return [_PrettyToken(_PrettyTokenType.HINT, '(no diff)')] left_width = char_in_line // 2 # calculate the widths of lineno max_left_linno = 0 max_right_linno = 0 for op in ops: if op.left_lineno is not None: max_left_linno = op.left_lineno if op.right_lineno is not None: max_right_linno = op.right_lineno left_lineno_width = len(str(max_left_linno + 1)) right_lineno_width = len(str(max_right_linno + 1)) assert left_lineno_width + 2 + 10 <= left_width assert right_lineno_width + 2 + 10 tokens = [] tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, 'output:'.ljust(left_width) + 'expected:')) tokens.append(_PrettyToken(_PrettyTokenType.NEWLINE, '\n')) for op in ops: if op == _MergedDiffOpDots: tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, '...'.ljust(left_width) + '...')) tokens.append(_PrettyToken(_PrettyTokenType.NEWLINE, '\n')) continue left_exists = False if op.left_lineno is not None: tokens.append(_PrettyToken(_PrettyTokenType.LINENO, str(op.left_lineno + 1).rjust(left_lineno_width))) tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, '| ')) tokens.extend([_PrettyToken(token.type, token.value.replace('\n', '')) for token in op.left]) padding = left_width - (left_lineno_width + 2 + _len_of_tokens(op.left)) if padding >= 0: tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, ' ' * padding)) left_exists = True else: tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, '\n')) if op.right_lineno is not None: if not left_exists: tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, ' ' * left_width)) tokens.append(_PrettyToken(_PrettyTokenType.LINENO, str(op.right_lineno + 1).rjust(right_lineno_width))) tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, '| ')) tokens.extend(op.right) else: if left_exists: tokens.append(_PrettyToken(_PrettyTokenType.OTHERS, '\n')) return tokens
[ "def", "_tokens_from_line_diff_ops", "(", "ops", ":", "List", "[", "_MergedDiffOp", "]", ",", "*", ",", "char_in_line", ":", "int", ")", "->", "List", "[", "_PrettyToken", "]", ":", "if", "not", "ops", ":", "return", "[", "_PrettyToken", "(", "_PrettyToken...
https://github.com/online-judge-tools/oj/blob/70f8a29ae6a1f551fee50b35b61354ccbc81505f/onlinejudge_command/pretty_printers.py#L582-L629
graphql-python/gql
5440c6c14b74f0414551e0ebebeed187bdf4ae5a
gql/transport/phoenix_channel_websockets.py
python
PhoenixChannelWebsocketsTransport._find_existing_subscription
(self, query_id: int)
return subscription_id
Perform a reverse lookup to find the subscription id matching a listener's query_id.
Perform a reverse lookup to find the subscription id matching a listener's query_id.
[ "Perform", "a", "reverse", "lookup", "to", "find", "the", "subscription", "id", "matching", "a", "listener", "s", "query_id", "." ]
def _find_existing_subscription(self, query_id: int) -> str: """Perform a reverse lookup to find the subscription id matching a listener's query_id. """ subscription_id, _listener_id = self._find_subscription(query_id) if subscription_id is None: raise TransportProtocolError( f"No subscription registered for listener {query_id}" ) return subscription_id
[ "def", "_find_existing_subscription", "(", "self", ",", "query_id", ":", "int", ")", "->", "str", ":", "subscription_id", ",", "_listener_id", "=", "self", ".", "_find_subscription", "(", "query_id", ")", "if", "subscription_id", "is", "None", ":", "raise", "T...
https://github.com/graphql-python/gql/blob/5440c6c14b74f0414551e0ebebeed187bdf4ae5a/gql/transport/phoenix_channel_websockets.py#L398-L408
mynameisfiber/high_performance_python
615341ff066772dc45125fce1866349d555524fe
01_profiling/dowser/julia1_dowser.py
python
calc_pure_python
(draw_output, desired_width, max_iterations)
Create a list of complex co-ordinates (zs) and complex parameters (cs), build Julia set and display
Create a list of complex co-ordinates (zs) and complex parameters (cs), build Julia set and display
[ "Create", "a", "list", "of", "complex", "co", "-", "ordinates", "(", "zs", ")", "and", "complex", "parameters", "(", "cs", ")", "build", "Julia", "set", "and", "display" ]
def calc_pure_python(draw_output, desired_width, max_iterations): """Create a list of complex co-ordinates (zs) and complex parameters (cs), build Julia set and display""" x_step = (float(x2 - x1) / float(desired_width)) y_step = (float(y1 - y2) / float(desired_width)) x = [] y = [] ycoord = y2 while ycoord > y1: y.append(ycoord) ycoord += y_step xcoord = x1 while xcoord < x2: x.append(xcoord) xcoord += x_step # set width and height to the generated pixel counts, rather than the # pre-rounding desired width and height # build a list of co-ordinates and the initial condition for each cell. # Note that our initial condition is a constant and could easily be removed, # we use it to simulate a real-world scenario with several inputs to our # function zs = [] cs = [] for ycoord in y: for xcoord in x: zs.append(complex(xcoord, ycoord)) cs.append(complex(c_real, c_imag)) launch_memory_usage_server() print "Length of x:", len(x) print "Total elements:", len(zs) start_time = time.time() output = calculate_z_serial_purepython(max_iterations, zs, cs) end_time = time.time() secs = end_time - start_time print calculate_z_serial_purepython.func_name + " took", secs, "seconds" # this sum is expected for 1000^2 grid with 300 iterations assert sum(output) == 33219980 print "now waiting..." while True: time.sleep(1)
[ "def", "calc_pure_python", "(", "draw_output", ",", "desired_width", ",", "max_iterations", ")", ":", "x_step", "=", "(", "float", "(", "x2", "-", "x1", ")", "/", "float", "(", "desired_width", ")", ")", "y_step", "=", "(", "float", "(", "y1", "-", "y2...
https://github.com/mynameisfiber/high_performance_python/blob/615341ff066772dc45125fce1866349d555524fe/01_profiling/dowser/julia1_dowser.py#L36-L80
inventree/InvenTree
4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b
InvenTree/company/models.py
python
SupplierPart.api_instance_filters
(self)
return { 'manufacturer_part': { 'part': self.part.pk } }
[]
def api_instance_filters(self): return { 'manufacturer_part': { 'part': self.part.pk } }
[ "def", "api_instance_filters", "(", "self", ")", ":", "return", "{", "'manufacturer_part'", ":", "{", "'part'", ":", "self", ".", "part", ".", "pk", "}", "}" ]
https://github.com/inventree/InvenTree/blob/4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b/InvenTree/company/models.py#L479-L485
bcbio/bcbio-nextgen
c80f9b6b1be3267d1f981b7035e3b72441d258f2
bcbio/broad/picardrun.py
python
picard_fastq_to_bam
(picard, fastq_one, fastq_two, out_dir, names, order="queryname")
return out_bam
Convert fastq file(s) to BAM, adding sample, run group and platform information.
Convert fastq file(s) to BAM, adding sample, run group and platform information.
[ "Convert", "fastq", "file", "(", "s", ")", "to", "BAM", "adding", "sample", "run", "group", "and", "platform", "information", "." ]
def picard_fastq_to_bam(picard, fastq_one, fastq_two, out_dir, names, order="queryname"): """Convert fastq file(s) to BAM, adding sample, run group and platform information. """ out_bam = os.path.join(out_dir, "%s-fastq.bam" % os.path.splitext(os.path.basename(fastq_one))[0]) if not file_exists(out_bam): with tx_tmpdir(picard._config) as tmp_dir: with file_transaction(picard._config, out_bam) as tx_out_bam: opts = [("--FASTQ", fastq_one), ("--READ_GROUP_NAME", names["rg"]), ("--SAMPLE_NAME", names["sample"]), ("--PLATFORM_UNIT", names["pu"]), ("--PLATFORM", names["pl"]), ("--TMP_DIR", tmp_dir), ("--OUTPUT", tx_out_bam), ("--SORT_ORDER", order)] if fastq_two: opts.append(("--FASTQ2", fastq_two)) picard.run("FastqToSam", opts) return out_bam
[ "def", "picard_fastq_to_bam", "(", "picard", ",", "fastq_one", ",", "fastq_two", ",", "out_dir", ",", "names", ",", "order", "=", "\"queryname\"", ")", ":", "out_bam", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "\"%s-fastq.bam\"", "%", "os",...
https://github.com/bcbio/bcbio-nextgen/blob/c80f9b6b1be3267d1f981b7035e3b72441d258f2/bcbio/broad/picardrun.py#L154-L173
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-47/fabmetheus_utilities/geometry/manipulation_shapes/flip.py
python
getNewDerivation
(elementNode, prefix, sideLength)
return FlipDerivation(elementNode, prefix)
Get new derivation.
Get new derivation.
[ "Get", "new", "derivation", "." ]
def getNewDerivation(elementNode, prefix, sideLength): 'Get new derivation.' return FlipDerivation(elementNode, prefix)
[ "def", "getNewDerivation", "(", "elementNode", ",", "prefix", ",", "sideLength", ")", ":", "return", "FlipDerivation", "(", "elementNode", ",", "prefix", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-47/fabmetheus_utilities/geometry/manipulation_shapes/flip.py#L74-L76
007gzs/dingtalk-sdk
7979da2e259fdbc571728cae2425a04dbc65850a
dingtalk/client/api/taobao.py
python
TbWangWang.taobao_wangwang_eservice_abs_word_add
( self, word )
return self._top_request( "taobao.wangwang.eservice.abs.word.add", { "word": word } )
添加关键词 为聊天记录查询接口添加关键词。 文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=28017 :param word: 关键词
添加关键词 为聊天记录查询接口添加关键词。 文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=28017
[ "添加关键词", "为聊天记录查询接口添加关键词。", "文档地址:https", ":", "//", "open", "-", "doc", ".", "dingtalk", ".", "com", "/", "docs", "/", "api", ".", "htm?apiId", "=", "28017" ]
def taobao_wangwang_eservice_abs_word_add( self, word ): """ 添加关键词 为聊天记录查询接口添加关键词。 文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=28017 :param word: 关键词 """ return self._top_request( "taobao.wangwang.eservice.abs.word.add", { "word": word } )
[ "def", "taobao_wangwang_eservice_abs_word_add", "(", "self", ",", "word", ")", ":", "return", "self", ".", "_top_request", "(", "\"taobao.wangwang.eservice.abs.word.add\"", ",", "{", "\"word\"", ":", "word", "}", ")" ]
https://github.com/007gzs/dingtalk-sdk/blob/7979da2e259fdbc571728cae2425a04dbc65850a/dingtalk/client/api/taobao.py#L19325-L19341
aparo/pyes
96965174760cb5aa5c92eac7ccff346fb5d53cf1
pyes/managers.py
python
Indices.get_alias
(self, alias)
return status['indices'].keys()
Get the index or indices pointed to by a given alias. (See :ref:`es-guide-reference-api-admin-indices-aliases`) :param alias: the name of an alias :return returns a list of index names. :raise IndexMissingException if the alias does not exist.
Get the index or indices pointed to by a given alias. (See :ref:`es-guide-reference-api-admin-indices-aliases`)
[ "Get", "the", "index", "or", "indices", "pointed", "to", "by", "a", "given", "alias", ".", "(", "See", ":", "ref", ":", "es", "-", "guide", "-", "reference", "-", "api", "-", "admin", "-", "indices", "-", "aliases", ")" ]
def get_alias(self, alias): """ Get the index or indices pointed to by a given alias. (See :ref:`es-guide-reference-api-admin-indices-aliases`) :param alias: the name of an alias :return returns a list of index names. :raise IndexMissingException if the alias does not exist. """ status = self.status([alias]) return status['indices'].keys()
[ "def", "get_alias", "(", "self", ",", "alias", ")", ":", "status", "=", "self", ".", "status", "(", "[", "alias", "]", ")", "return", "status", "[", "'indices'", "]", ".", "keys", "(", ")" ]
https://github.com/aparo/pyes/blob/96965174760cb5aa5c92eac7ccff346fb5d53cf1/pyes/managers.py#L30-L42
securityclippy/elasticintel
aa08d3e9f5ab1c000128e95161139ce97ff0e334
ingest_feed_lambda/dateutil/tz/tz.py
python
tzutc.is_ambiguous
(self, dt)
return False
Whether or not the "wall time" of a given datetime is ambiguous in this zone. :param dt: A :py:class:`datetime.datetime`, naive or time zone aware. :return: Returns ``True`` if ambiguous, ``False`` otherwise. .. versionadded:: 2.6.0
Whether or not the "wall time" of a given datetime is ambiguous in this zone.
[ "Whether", "or", "not", "the", "wall", "time", "of", "a", "given", "datetime", "is", "ambiguous", "in", "this", "zone", "." ]
def is_ambiguous(self, dt): """ Whether or not the "wall time" of a given datetime is ambiguous in this zone. :param dt: A :py:class:`datetime.datetime`, naive or time zone aware. :return: Returns ``True`` if ambiguous, ``False`` otherwise. .. versionadded:: 2.6.0 """ return False
[ "def", "is_ambiguous", "(", "self", ",", "dt", ")", ":", "return", "False" ]
https://github.com/securityclippy/elasticintel/blob/aa08d3e9f5ab1c000128e95161139ce97ff0e334/ingest_feed_lambda/dateutil/tz/tz.py#L46-L60
digidotcom/xbee-python
0757f4be0017530c205175fbee8f9f61be9614d1
digi/xbee/packets/zigbee.py
python
RegisterDeviceStatusPacket._get_api_packet_spec_data_dict
(self)
return {DictKeys.STATUS: "%s (%s)" % (self.__status.code, self.__status.description)}
Override method. .. seealso:: | :meth:`.XBeeAPIPacket._get_api_packet_spec_data_dict`
Override method.
[ "Override", "method", "." ]
def _get_api_packet_spec_data_dict(self): """ Override method. .. seealso:: | :meth:`.XBeeAPIPacket._get_api_packet_spec_data_dict` """ return {DictKeys.STATUS: "%s (%s)" % (self.__status.code, self.__status.description)}
[ "def", "_get_api_packet_spec_data_dict", "(", "self", ")", ":", "return", "{", "DictKeys", ".", "STATUS", ":", "\"%s (%s)\"", "%", "(", "self", ".", "__status", ".", "code", ",", "self", ".", "__status", ".", "description", ")", "}" ]
https://github.com/digidotcom/xbee-python/blob/0757f4be0017530c205175fbee8f9f61be9614d1/digi/xbee/packets/zigbee.py#L351-L359
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/ipma/config_flow.py
python
IpmaFlowHandler.async_step_user
(self, user_input=None)
return await self._show_config_form( name=HOME_LOCATION_NAME, latitude=self.hass.config.latitude, longitude=self.hass.config.longitude, )
Handle a flow initialized by the user.
Handle a flow initialized by the user.
[ "Handle", "a", "flow", "initialized", "by", "the", "user", "." ]
async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" self._errors = {} if user_input is not None: if user_input[CONF_NAME] not in self.hass.config_entries.async_entries( DOMAIN ): return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) self._errors[CONF_NAME] = "name_exists" # default location is set hass configuration return await self._show_config_form( name=HOME_LOCATION_NAME, latitude=self.hass.config.latitude, longitude=self.hass.config.longitude, )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "self", ".", "_errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "if", "user_input", "[", "CONF_NAME", "]", "not", "in", "self", ".", "hass", ...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/ipma/config_flow.py#L21-L40
bloomberg/phabricator-tools
09bd1587fe8945d93a891162fd4c89640c6fada7
py/abd/abdt_fs.py
python
Accessor.repo_config_path_list
(self)
return [os.path.join(p, r) for r in os.listdir(p) if r != 'README']
Return a list of string paths to repo configs. :returns: list of string
Return a list of string paths to repo configs.
[ "Return", "a", "list", "of", "string", "paths", "to", "repo", "configs", "." ]
def repo_config_path_list(self): """Return a list of string paths to repo configs. :returns: list of string """ p = self.layout.repository_config_dir return [os.path.join(p, r) for r in os.listdir(p) if r != 'README']
[ "def", "repo_config_path_list", "(", "self", ")", ":", "p", "=", "self", ".", "layout", ".", "repository_config_dir", "return", "[", "os", ".", "path", ".", "join", "(", "p", ",", "r", ")", "for", "r", "in", "os", ".", "listdir", "(", "p", ")", "if...
https://github.com/bloomberg/phabricator-tools/blob/09bd1587fe8945d93a891162fd4c89640c6fada7/py/abd/abdt_fs.py#L391-L398
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/scipy/cluster/vq.py
python
py_vq
(obs, code_book, check_finite=True)
return code, min_dist
Python version of vq algorithm. The algorithm computes the euclidian distance between each observation and every frame in the code_book. Parameters ---------- obs : ndarray Expects a rank 2 array. Each row is one observation. code_book : ndarray Code book to use. Same format than obs. Should have same number of features (eg columns) than obs. check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Default: True Returns ------- code : ndarray code[i] gives the label of the ith obversation, that its code is code_book[code[i]]. mind_dist : ndarray min_dist[i] gives the distance between the ith observation and its corresponding code. Notes ----- This function is slower than the C version but works for all input types. If the inputs have the wrong types for the C versions of the function, this one is called as a last resort. It is about 20 times slower than the C version.
Python version of vq algorithm.
[ "Python", "version", "of", "vq", "algorithm", "." ]
def py_vq(obs, code_book, check_finite=True): """ Python version of vq algorithm. The algorithm computes the euclidian distance between each observation and every frame in the code_book. Parameters ---------- obs : ndarray Expects a rank 2 array. Each row is one observation. code_book : ndarray Code book to use. Same format than obs. Should have same number of features (eg columns) than obs. check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Default: True Returns ------- code : ndarray code[i] gives the label of the ith obversation, that its code is code_book[code[i]]. mind_dist : ndarray min_dist[i] gives the distance between the ith observation and its corresponding code. Notes ----- This function is slower than the C version but works for all input types. If the inputs have the wrong types for the C versions of the function, this one is called as a last resort. It is about 20 times slower than the C version. """ obs = _asarray_validated(obs, check_finite=check_finite) code_book = _asarray_validated(code_book, check_finite=check_finite) if obs.ndim != code_book.ndim: raise ValueError("Observation and code_book should have the same rank") if obs.ndim == 1: obs = obs[:, np.newaxis] code_book = code_book[:, np.newaxis] dist = cdist(obs, code_book) code = dist.argmin(axis=1) min_dist = dist[np.arange(len(code)), code] return code, min_dist
[ "def", "py_vq", "(", "obs", ",", "code_book", ",", "check_finite", "=", "True", ")", ":", "obs", "=", "_asarray_validated", "(", "obs", ",", "check_finite", "=", "check_finite", ")", "code_book", "=", "_asarray_validated", "(", "code_book", ",", "check_finite"...
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/scipy/cluster/vq.py#L213-L263
datawire/forge
d501be4571dcef5691804c7db7008ee877933c8d
forge/kubernetes.py
python
Kubernetes.resources
(self, yaml_dir)
return sh(*cmd).output.split()
[]
def resources(self, yaml_dir): if is_yaml_empty(yaml_dir): return [] cmd = "kubectl", "apply", "--dry-run", "-R", "-f", yaml_dir, "-o", "name" if self.namespace: cmd += "--namespace", self.namespace return sh(*cmd).output.split()
[ "def", "resources", "(", "self", ",", "yaml_dir", ")", ":", "if", "is_yaml_empty", "(", "yaml_dir", ")", ":", "return", "[", "]", "cmd", "=", "\"kubectl\"", ",", "\"apply\"", ",", "\"--dry-run\"", ",", "\"-R\"", ",", "\"-f\"", ",", "yaml_dir", ",", "\"-o...
https://github.com/datawire/forge/blob/d501be4571dcef5691804c7db7008ee877933c8d/forge/kubernetes.py#L129-L135
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/idlelib/ClassBrowser.py
python
ClassBrowserTreeItem.__init__
(self, name, classes, file)
[]
def __init__(self, name, classes, file): self.name = name self.classes = classes self.file = file try: self.cl = self.classes[self.name] except (IndexError, KeyError): self.cl = None self.isfunction = isinstance(self.cl, pyclbr.Function)
[ "def", "__init__", "(", "self", ",", "name", ",", "classes", ",", "file", ")", ":", "self", ".", "name", "=", "name", "self", ".", "classes", "=", "classes", "self", ".", "file", "=", "file", "try", ":", "self", ".", "cl", "=", "self", ".", "clas...
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/idlelib/ClassBrowser.py#L138-L146
sympy/sympy
d822fcba181155b85ff2b29fe525adbafb22b448
sympy/polys/galoistools.py
python
gf_add_ground
(f, a, p, K)
Compute ``f + a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``. Examples ======== >>> from sympy.polys.domains import ZZ >>> from sympy.polys.galoistools import gf_add_ground >>> gf_add_ground([3, 2, 4], 2, 5, ZZ) [3, 2, 1]
Compute ``f + a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``.
[ "Compute", "f", "+", "a", "where", "f", "in", "GF", "(", "p", ")", "[", "x", "]", "and", "a", "in", "GF", "(", "p", ")", "." ]
def gf_add_ground(f, a, p, K): """ Compute ``f + a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``. Examples ======== >>> from sympy.polys.domains import ZZ >>> from sympy.polys.galoistools import gf_add_ground >>> gf_add_ground([3, 2, 4], 2, 5, ZZ) [3, 2, 1] """ if not f: a = a % p else: a = (f[-1] + a) % p if len(f) > 1: return f[:-1] + [a] if not a: return [] else: return [a]
[ "def", "gf_add_ground", "(", "f", ",", "a", ",", "p", ",", "K", ")", ":", "if", "not", "f", ":", "a", "=", "a", "%", "p", "else", ":", "a", "=", "(", "f", "[", "-", "1", "]", "+", "a", ")", "%", "p", "if", "len", "(", "f", ")", ">", ...
https://github.com/sympy/sympy/blob/d822fcba181155b85ff2b29fe525adbafb22b448/sympy/polys/galoistools.py#L364-L389
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/categories/baseclasses.py
python
Diagram.__new__
(cls, *args)
return Basic.__new__(cls, Dict(premises), Dict(conclusions), objects)
Construct a new instance of Diagram. If no arguments are supplied, an empty diagram is created. If at least an argument is supplied, ``args[0]`` is interpreted as the premises of the diagram. If ``args[0]`` is a list, it is interpreted as a list of :class:`Morphism`'s, in which each :class:`Morphism` has an empty set of properties. If ``args[0]`` is a Python dictionary or a :class:`Dict`, it is interpreted as a dictionary associating to some :class:`Morphism`'s some properties. If at least two arguments are supplied ``args[1]`` is interpreted as the conclusions of the diagram. The type of ``args[1]`` is interpreted in exactly the same way as the type of ``args[0]``. If only one argument is supplied, the diagram has no conclusions. Examples ======== >>> from sympy.categories import Object, NamedMorphism >>> from sympy.categories import IdentityMorphism, Diagram >>> A = Object("A") >>> B = Object("B") >>> C = Object("C") >>> f = NamedMorphism(A, B, "f") >>> g = NamedMorphism(B, C, "g") >>> d = Diagram([f, g]) >>> IdentityMorphism(A) in d.premises.keys() True >>> g * f in d.premises.keys() True >>> d = Diagram([f, g], {g * f: "unique"}) >>> d.conclusions[g * f] FiniteSet(unique)
Construct a new instance of Diagram.
[ "Construct", "a", "new", "instance", "of", "Diagram", "." ]
def __new__(cls, *args): """ Construct a new instance of Diagram. If no arguments are supplied, an empty diagram is created. If at least an argument is supplied, ``args[0]`` is interpreted as the premises of the diagram. If ``args[0]`` is a list, it is interpreted as a list of :class:`Morphism`'s, in which each :class:`Morphism` has an empty set of properties. If ``args[0]`` is a Python dictionary or a :class:`Dict`, it is interpreted as a dictionary associating to some :class:`Morphism`'s some properties. If at least two arguments are supplied ``args[1]`` is interpreted as the conclusions of the diagram. The type of ``args[1]`` is interpreted in exactly the same way as the type of ``args[0]``. If only one argument is supplied, the diagram has no conclusions. Examples ======== >>> from sympy.categories import Object, NamedMorphism >>> from sympy.categories import IdentityMorphism, Diagram >>> A = Object("A") >>> B = Object("B") >>> C = Object("C") >>> f = NamedMorphism(A, B, "f") >>> g = NamedMorphism(B, C, "g") >>> d = Diagram([f, g]) >>> IdentityMorphism(A) in d.premises.keys() True >>> g * f in d.premises.keys() True >>> d = Diagram([f, g], {g * f: "unique"}) >>> d.conclusions[g * f] FiniteSet(unique) """ premises = {} conclusions = {} # Here we will keep track of the objects which appear in the # premises. objects = EmptySet if len(args) >= 1: # We've got some premises in the arguments. premises_arg = args[0] if isinstance(premises_arg, list): # The user has supplied a list of morphisms, none of # which have any attributes. empty = EmptySet for morphism in premises_arg: objects |= FiniteSet(morphism.domain, morphism.codomain) Diagram._add_morphism_closure(premises, morphism, empty) elif isinstance(premises_arg, dict) or isinstance(premises_arg, Dict): # The user has supplied a dictionary of morphisms and # their properties. for morphism, props in premises_arg.items(): objects |= FiniteSet(morphism.domain, morphism.codomain) Diagram._add_morphism_closure( premises, morphism, FiniteSet(*props) if iterable(props) else FiniteSet(props)) if len(args) >= 2: # We also have some conclusions. conclusions_arg = args[1] if isinstance(conclusions_arg, list): # The user has supplied a list of morphisms, none of # which have any attributes. empty = EmptySet for morphism in conclusions_arg: # Check that no new objects appear in conclusions. if ((sympify(objects.contains(morphism.domain)) is S.true) and (sympify(objects.contains(morphism.codomain)) is S.true)): # No need to add identities and recurse # composites this time. Diagram._add_morphism_closure( conclusions, morphism, empty, add_identities=False, recurse_composites=False) elif isinstance(conclusions_arg, dict) or \ isinstance(conclusions_arg, Dict): # The user has supplied a dictionary of morphisms and # their properties. for morphism, props in conclusions_arg.items(): # Check that no new objects appear in conclusions. if (morphism.domain in objects) and \ (morphism.codomain in objects): # No need to add identities and recurse # composites this time. Diagram._add_morphism_closure( conclusions, morphism, FiniteSet(*props) if iterable(props) else FiniteSet(props), add_identities=False, recurse_composites=False) return Basic.__new__(cls, Dict(premises), Dict(conclusions), objects)
[ "def", "__new__", "(", "cls", ",", "*", "args", ")", ":", "premises", "=", "{", "}", "conclusions", "=", "{", "}", "# Here we will keep track of the objects which appear in the", "# premises.", "objects", "=", "EmptySet", "if", "len", "(", "args", ")", ">=", "...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/categories/baseclasses.py#L643-L742
tanghaibao/jcvi
5e720870c0928996f8b77a38208106ff0447ccb6
jcvi/apps/gbsubmit.py
python
t384
(args)
%prog t384 Print out a table converting between 96 well to 384 well
%prog t384
[ "%prog", "t384" ]
def t384(args): """ %prog t384 Print out a table converting between 96 well to 384 well """ p = OptionParser(t384.__doc__) p.parse_args(args) plate, splate = get_plate() fw = sys.stdout for i in plate: for j, p in enumerate(i): if j != 0: fw.write("|") fw.write(p) fw.write("\n")
[ "def", "t384", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "t384", ".", "__doc__", ")", "p", ".", "parse_args", "(", "args", ")", "plate", ",", "splate", "=", "get_plate", "(", ")", "fw", "=", "sys", ".", "stdout", "for", "i", "in", "pl...
https://github.com/tanghaibao/jcvi/blob/5e720870c0928996f8b77a38208106ff0447ccb6/jcvi/apps/gbsubmit.py#L527-L544
reviewboard/reviewboard
7395902e4c181bcd1d633f61105012ffb1d18e1b
reviewboard/scmtools/clearcase.py
python
ClearCaseDynamicViewClient.__init__
(self, path)
Initialize the client. Args: path (unicode): The path of the view.
Initialize the client.
[ "Initialize", "the", "client", "." ]
def __init__(self, path): """Initialize the client. Args: path (unicode): The path of the view. """ self.path = path
[ "def", "__init__", "(", "self", ",", "path", ")", ":", "self", ".", "path", "=", "path" ]
https://github.com/reviewboard/reviewboard/blob/7395902e4c181bcd1d633f61105012ffb1d18e1b/reviewboard/scmtools/clearcase.py#L649-L656
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-linux/x64/cffi/model.py
python
BaseType.__eq__
(self, other)
return (self.__class__ == other.__class__ and self._get_items() == other._get_items())
[]
def __eq__(self, other): return (self.__class__ == other.__class__ and self._get_items() == other._get_items())
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "return", "(", "self", ".", "__class__", "==", "other", ".", "__class__", "and", "self", ".", "_get_items", "(", ")", "==", "other", ".", "_get_items", "(", ")", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/cffi/model.py#L74-L76
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v9/services/services/conversion_custom_variable_service/client.py
python
ConversionCustomVariableServiceClient.conversion_custom_variable_path
( customer_id: str, conversion_custom_variable_id: str, )
return "customers/{customer_id}/conversionCustomVariables/{conversion_custom_variable_id}".format( customer_id=customer_id, conversion_custom_variable_id=conversion_custom_variable_id, )
Return a fully-qualified conversion_custom_variable string.
Return a fully-qualified conversion_custom_variable string.
[ "Return", "a", "fully", "-", "qualified", "conversion_custom_variable", "string", "." ]
def conversion_custom_variable_path( customer_id: str, conversion_custom_variable_id: str, ) -> str: """Return a fully-qualified conversion_custom_variable string.""" return "customers/{customer_id}/conversionCustomVariables/{conversion_custom_variable_id}".format( customer_id=customer_id, conversion_custom_variable_id=conversion_custom_variable_id, )
[ "def", "conversion_custom_variable_path", "(", "customer_id", ":", "str", ",", "conversion_custom_variable_id", ":", "str", ",", ")", "->", "str", ":", "return", "\"customers/{customer_id}/conversionCustomVariables/{conversion_custom_variable_id}\"", ".", "format", "(", "cust...
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v9/services/services/conversion_custom_variable_service/client.py#L183-L190
nltk/nltk
3f74ac55681667d7ef78b664557487145f51eb02
nltk/draw/tree.py
python
TreeSegmentWidget.replace_child
(self, oldchild, newchild)
Replace the child ``oldchild`` with ``newchild``.
Replace the child ``oldchild`` with ``newchild``.
[ "Replace", "the", "child", "oldchild", "with", "newchild", "." ]
def replace_child(self, oldchild, newchild): """ Replace the child ``oldchild`` with ``newchild``. """ index = self._subtrees.index(oldchild) self._subtrees[index] = newchild self._remove_child_widget(oldchild) self._add_child_widget(newchild) self.update(newchild)
[ "def", "replace_child", "(", "self", ",", "oldchild", ",", "newchild", ")", ":", "index", "=", "self", ".", "_subtrees", ".", "index", "(", "oldchild", ")", "self", ".", "_subtrees", "[", "index", "]", "=", "newchild", "self", ".", "_remove_child_widget", ...
https://github.com/nltk/nltk/blob/3f74ac55681667d7ef78b664557487145f51eb02/nltk/draw/tree.py#L178-L186
myquant/strategy
17595e6bf4a118e1fa87c90bfb0fd78afa69a60b
AR_MA_STOCK/python/ar_ma_stock.py
python
AR_MA_STOCK.fixation_stop_profit_loss
(self, bar)
功能:固定止盈、止损,盈利或亏损超过了设置的比率则执行止盈、止损
功能:固定止盈、止损,盈利或亏损超过了设置的比率则执行止盈、止损
[ "功能:固定止盈、止损", "盈利或亏损超过了设置的比率则执行止盈、止损" ]
def fixation_stop_profit_loss(self, bar): """ 功能:固定止盈、止损,盈利或亏损超过了设置的比率则执行止盈、止损 """ if self.is_fixation_stop == 0: return symbol = bar.exchange + '.' + bar.sec_id pos = self.get_position(bar.exchange, bar.sec_id, OrderSide_Bid) if pos is not None: if pos.fpnl > 0 and pos.fpnl / pos.cost >= self.stop_fixation_profit: self.close_long(bar.exchange, bar.sec_id, 0, pos.volume - pos.volume_today) self.dict_open_close_signal[symbol] = True logging.info( 'fixnation stop profit: close long, symbol:%s, time:%s, price:%.2f, vwap: %s, volume:%s' % (symbol, bar.strtime, bar.close, pos.vwap, pos.volume)) elif pos.fpnl < 0 and pos.fpnl / pos.cost <= -1 * self.stop_fixation_loss: self.close_long(bar.exchange, bar.sec_id, 0, pos.volume - pos.volume_today) self.dict_open_close_signal[symbol] = True logging.info( 'fixnation stop loss: close long, symbol:%s, time:%s, price:%.2f, vwap:%s, volume:%s' % (symbol, bar.strtime, bar.close, pos.vwap, pos.volume))
[ "def", "fixation_stop_profit_loss", "(", "self", ",", "bar", ")", ":", "if", "self", ".", "is_fixation_stop", "==", "0", ":", "return", "symbol", "=", "bar", ".", "exchange", "+", "'.'", "+", "bar", ".", "sec_id", "pos", "=", "self", ".", "get_position",...
https://github.com/myquant/strategy/blob/17595e6bf4a118e1fa87c90bfb0fd78afa69a60b/AR_MA_STOCK/python/ar_ma_stock.py#L429-L456
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v7/services/services/feed_service/client.py
python
FeedServiceClient.common_project_path
(project: str,)
return "projects/{project}".format(project=project,)
Return a fully-qualified project string.
Return a fully-qualified project string.
[ "Return", "a", "fully", "-", "qualified", "project", "string", "." ]
def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" return "projects/{project}".format(project=project,)
[ "def", "common_project_path", "(", "project", ":", "str", ",", ")", "->", "str", ":", "return", "\"projects/{project}\"", ".", "format", "(", "project", "=", "project", ",", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v7/services/services/feed_service/client.py#L210-L212
cortex-lab/phy
9a330b9437a3d0b40a37a201d147224e6e7fb462
phy/plot/gloo/globject.py
python
GLObject.need_update
(self)
return self._need_update
Whether object needs to be updated
Whether object needs to be updated
[ "Whether", "object", "needs", "to", "be", "updated" ]
def need_update(self): """ Whether object needs to be updated """ return self._need_update
[ "def", "need_update", "(", "self", ")", ":", "return", "self", ".", "_need_update" ]
https://github.com/cortex-lab/phy/blob/9a330b9437a3d0b40a37a201d147224e6e7fb462/phy/plot/gloo/globject.py#L51-L53
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/craft.py
python
addToCraftMenu
( menu )
Add a craft plugin menu.
Add a craft plugin menu.
[ "Add", "a", "craft", "plugin", "menu", "." ]
def addToCraftMenu( menu ): "Add a craft plugin menu." settings.ToolDialog().addPluginToMenu( menu, archive.getUntilDot( os.path.abspath( __file__ ) ) ) menu.add_separator() directoryPath = skeinforge_craft.getPluginsDirectoryPath() directoryFolders = settings.getFolders(directoryPath) pluginFileNames = skeinforge_craft.getPluginFileNames() for pluginFileName in pluginFileNames: pluginFolderName = pluginFileName + '_plugins' pluginPath = os.path.join( directoryPath, pluginFileName ) if pluginFolderName in directoryFolders: addSubmenus( menu, pluginFileName, os.path.join( directoryPath, pluginFolderName ), pluginPath ) else: settings.ToolDialog().addPluginToMenu( menu, pluginPath )
[ "def", "addToCraftMenu", "(", "menu", ")", ":", "settings", ".", "ToolDialog", "(", ")", ".", "addPluginToMenu", "(", "menu", ",", "archive", ".", "getUntilDot", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ")", "menu", ".", "add_...
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/craft.py#L38-L51
LexPredict/lexpredict-contraxsuite
1d5a2540d31f8f3f1adc442cfa13a7c007319899
sdk/python/sdk/openapi_client/model/task.py
python
Task.openapi_types
()
return { 'pk': (str,), # noqa: E501 'name': (str, none_type,), # noqa: E501 'date_start': (datetime,), # noqa: E501 'date_work_start': (datetime, none_type,), # noqa: E501 'user__username': (str,), # noqa: E501 'date_done': (datetime, none_type,), # noqa: E501 'duration': (str,), # noqa: E501 'progress': (int, none_type,), # noqa: E501 'status': (str, none_type,), # noqa: E501 'has_error': (str,), # noqa: E501 'description': (str,), # noqa: E501 }
This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type.
This must be a method because a model may have properties that are of type self, this must run after the class is loaded
[ "This", "must", "be", "a", "method", "because", "a", "model", "may", "have", "properties", "that", "are", "of", "type", "self", "this", "must", "run", "after", "the", "class", "is", "loaded" ]
def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { 'pk': (str,), # noqa: E501 'name': (str, none_type,), # noqa: E501 'date_start': (datetime,), # noqa: E501 'date_work_start': (datetime, none_type,), # noqa: E501 'user__username': (str,), # noqa: E501 'date_done': (datetime, none_type,), # noqa: E501 'duration': (str,), # noqa: E501 'progress': (int, none_type,), # noqa: E501 'status': (str, none_type,), # noqa: E501 'has_error': (str,), # noqa: E501 'description': (str,), # noqa: E501 }
[ "def", "openapi_types", "(", ")", ":", "return", "{", "'pk'", ":", "(", "str", ",", ")", ",", "# noqa: E501", "'name'", ":", "(", "str", ",", "none_type", ",", ")", ",", "# noqa: E501", "'date_start'", ":", "(", "datetime", ",", ")", ",", "# noqa: E501...
https://github.com/LexPredict/lexpredict-contraxsuite/blob/1d5a2540d31f8f3f1adc442cfa13a7c007319899/sdk/python/sdk/openapi_client/model/task.py#L94-L115
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/dates.py
python
RRuleLocator._get_unit
(self)
return self.get_unit_generic(freq)
Return how many days a unit of the locator is; used for intelligent autoscaling.
Return how many days a unit of the locator is; used for intelligent autoscaling.
[ "Return", "how", "many", "days", "a", "unit", "of", "the", "locator", "is", ";", "used", "for", "intelligent", "autoscaling", "." ]
def _get_unit(self): """ Return how many days a unit of the locator is; used for intelligent autoscaling. """ freq = self.rule._rrule._freq return self.get_unit_generic(freq)
[ "def", "_get_unit", "(", "self", ")", ":", "freq", "=", "self", ".", "rule", ".", "_rrule", ".", "_freq", "return", "self", ".", "get_unit_generic", "(", "freq", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/dates.py#L1068-L1074
n1nj4sec/pupy
a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39
pupy/packages/windows/all/reg.py
python
Key.__getitem__
(self, attr)
[]
def __getitem__(self, attr): handle = self._open_key(KEY_QUERY_VALUE) try: return self._query_value(handle, attr) finally: CloseKey(handle)
[ "def", "__getitem__", "(", "self", ",", "attr", ")", ":", "handle", "=", "self", ".", "_open_key", "(", "KEY_QUERY_VALUE", ")", "try", ":", "return", "self", ".", "_query_value", "(", "handle", ",", "attr", ")", "finally", ":", "CloseKey", "(", "handle",...
https://github.com/n1nj4sec/pupy/blob/a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39/pupy/packages/windows/all/reg.py#L714-L719
CastagnaIT/plugin.video.netflix
5cf5fa436eb9956576c0f62aa31a4c7d6c5b8a4a
packages/h2/connection.py
python
H2Connection.inbound_flow_control_window
(self)
return self._inbound_flow_control_window_manager.current_window_size
The size of the inbound flow control window for the connection. This is rarely publicly useful: instead, use :meth:`remote_flow_control_window <h2.connection.H2Connection.remote_flow_control_window>`. This shortcut is largely present to provide a shortcut to this data.
The size of the inbound flow control window for the connection. This is rarely publicly useful: instead, use :meth:`remote_flow_control_window <h2.connection.H2Connection.remote_flow_control_window>`. This shortcut is largely present to provide a shortcut to this data.
[ "The", "size", "of", "the", "inbound", "flow", "control", "window", "for", "the", "connection", ".", "This", "is", "rarely", "publicly", "useful", ":", "instead", "use", ":", "meth", ":", "remote_flow_control_window", "<h2", ".", "connection", ".", "H2Connecti...
def inbound_flow_control_window(self): """ The size of the inbound flow control window for the connection. This is rarely publicly useful: instead, use :meth:`remote_flow_control_window <h2.connection.H2Connection.remote_flow_control_window>`. This shortcut is largely present to provide a shortcut to this data. """ return self._inbound_flow_control_window_manager.current_window_size
[ "def", "inbound_flow_control_window", "(", "self", ")", ":", "return", "self", ".", "_inbound_flow_control_window_manager", ".", "current_window_size" ]
https://github.com/CastagnaIT/plugin.video.netflix/blob/5cf5fa436eb9956576c0f62aa31a4c7d6c5b8a4a/packages/h2/connection.py#L430-L437
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv_mac/lib/python2.7/site-packages/pip/_vendor/ipaddress.py
python
_BaseV4._is_hostmask
(self, ip_str)
return False
Test if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask.
Test if the IP string is a hostmask (rather than a netmask).
[ "Test", "if", "the", "IP", "string", "is", "a", "hostmask", "(", "rather", "than", "a", "netmask", ")", "." ]
def _is_hostmask(self, ip_str): """Test if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask. """ bits = ip_str.split('.') try: parts = [x for x in map(int, bits) if x in self._valid_mask_octets] except ValueError: return False if len(parts) != len(bits): return False if parts[0] < parts[-1]: return True return False
[ "def", "_is_hostmask", "(", "self", ",", "ip_str", ")", ":", "bits", "=", "ip_str", ".", "split", "(", "'.'", ")", "try", ":", "parts", "=", "[", "x", "for", "x", "in", "map", "(", "int", ",", "bits", ")", "if", "x", "in", "self", ".", "_valid_...
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv_mac/lib/python2.7/site-packages/pip/_vendor/ipaddress.py#L1330-L1349
emesene/emesene
4548a4098310e21b16437bb36223a7f632a4f7bc
emesene/e3/xmpp/SleekXMPP/sleekxmpp/plugins/xep_0030/disco.py
python
XEP_0030.add_feature
(self, feature, node=None, jid=None)
Add a feature to a JID/node combination. Arguments: feature -- The namespace of the supported feature. node -- The node to modify. jid -- The JID to modify.
Add a feature to a JID/node combination.
[ "Add", "a", "feature", "to", "a", "JID", "/", "node", "combination", "." ]
def add_feature(self, feature, node=None, jid=None): """ Add a feature to a JID/node combination. Arguments: feature -- The namespace of the supported feature. node -- The node to modify. jid -- The JID to modify. """ kwargs = {'feature': feature} self.api['add_feature'](jid, node, None, kwargs)
[ "def", "add_feature", "(", "self", ",", "feature", ",", "node", "=", "None", ",", "jid", "=", "None", ")", ":", "kwargs", "=", "{", "'feature'", ":", "feature", "}", "self", ".", "api", "[", "'add_feature'", "]", "(", "jid", ",", "node", ",", "None...
https://github.com/emesene/emesene/blob/4548a4098310e21b16437bb36223a7f632a4f7bc/emesene/e3/xmpp/SleekXMPP/sleekxmpp/plugins/xep_0030/disco.py#L512-L522
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/Python3/logging/__init__.py
python
Logger.info
(self, msg, *args, **kwargs)
Log 'msg % args' with severity 'INFO'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
Log 'msg % args' with severity 'INFO'.
[ "Log", "msg", "%", "args", "with", "severity", "INFO", "." ]
def info(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'INFO'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.info("Houston, we have a %s", "interesting problem", exc_info=1) """ if self.isEnabledFor(INFO): self._log(INFO, msg, args, **kwargs)
[ "def", "info", "(", "self", ",", "msg", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "isEnabledFor", "(", "INFO", ")", ":", "self", ".", "_log", "(", "INFO", ",", "msg", ",", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/Python3/logging/__init__.py#L1296-L1306
derek-zhang123/MxShop
22001da19b3d85424f48d78a844b8f424ae1e0e6
extra_apps/rest_framework/request.py
python
Request._parse
(self)
Parse the request content, returning a two-tuple of (data, files) May raise an `UnsupportedMediaType`, or `ParseError` exception.
Parse the request content, returning a two-tuple of (data, files)
[ "Parse", "the", "request", "content", "returning", "a", "two", "-", "tuple", "of", "(", "data", "files", ")" ]
def _parse(self): """ Parse the request content, returning a two-tuple of (data, files) May raise an `UnsupportedMediaType`, or `ParseError` exception. """ media_type = self.content_type try: stream = self.stream except RawPostDataException: if not hasattr(self._request, '_post'): raise # If request.POST has been accessed in middleware, and a method='POST' # request was made with 'multipart/form-data', then the request stream # will already have been exhausted. if self._supports_form_parsing(): return (self._request.POST, self._request.FILES) stream = None if stream is None or media_type is None: if media_type and is_form_media_type(media_type): empty_data = QueryDict('', encoding=self._request._encoding) else: empty_data = {} empty_files = MultiValueDict() return (empty_data, empty_files) parser = self.negotiator.select_parser(self, self.parsers) if not parser: raise exceptions.UnsupportedMediaType(media_type) try: parsed = parser.parse(stream, media_type, self.parser_context) except Exception: # If we get an exception during parsing, fill in empty data and # re-raise. Ensures we don't simply repeat the error when # attempting to render the browsable renderer response, or when # logging the request or similar. self._data = QueryDict('', encoding=self._request._encoding) self._files = MultiValueDict() self._full_data = self._data raise # Parser classes may return the raw data, or a # DataAndFiles object. Unpack the result as required. try: return (parsed.data, parsed.files) except AttributeError: empty_files = MultiValueDict() return (parsed, empty_files)
[ "def", "_parse", "(", "self", ")", ":", "media_type", "=", "self", ".", "content_type", "try", ":", "stream", "=", "self", ".", "stream", "except", "RawPostDataException", ":", "if", "not", "hasattr", "(", "self", ".", "_request", ",", "'_post'", ")", ":...
https://github.com/derek-zhang123/MxShop/blob/22001da19b3d85424f48d78a844b8f424ae1e0e6/extra_apps/rest_framework/request.py#L315-L365
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/numpy/core/_internal.py
python
_ctypes.shape
(self)
return self.shape_as(_getintp_ctype())
(c_intp*self.ndim): A ctypes array of length self.ndim where the basetype is the C-integer corresponding to ``dtype('p')`` on this platform. This base-type could be `ctypes.c_int`, `ctypes.c_long`, or `ctypes.c_longlong` depending on the platform. The c_intp type is defined accordingly in `numpy.ctypeslib`. The ctypes array contains the shape of the underlying array.
(c_intp*self.ndim): A ctypes array of length self.ndim where the basetype is the C-integer corresponding to ``dtype('p')`` on this platform. This base-type could be `ctypes.c_int`, `ctypes.c_long`, or `ctypes.c_longlong` depending on the platform. The c_intp type is defined accordingly in `numpy.ctypeslib`. The ctypes array contains the shape of the underlying array.
[ "(", "c_intp", "*", "self", ".", "ndim", ")", ":", "A", "ctypes", "array", "of", "length", "self", ".", "ndim", "where", "the", "basetype", "is", "the", "C", "-", "integer", "corresponding", "to", "dtype", "(", "p", ")", "on", "this", "platform", "."...
def shape(self): """ (c_intp*self.ndim): A ctypes array of length self.ndim where the basetype is the C-integer corresponding to ``dtype('p')`` on this platform. This base-type could be `ctypes.c_int`, `ctypes.c_long`, or `ctypes.c_longlong` depending on the platform. The c_intp type is defined accordingly in `numpy.ctypeslib`. The ctypes array contains the shape of the underlying array. """ return self.shape_as(_getintp_ctype())
[ "def", "shape", "(", "self", ")", ":", "return", "self", ".", "shape_as", "(", "_getintp_ctype", "(", ")", ")" ]
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/numpy/core/_internal.py#L357-L366
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/whoosh/matching/wrappers.py
python
WrappingMatcher.next
(self)
[]
def next(self): self.child.next()
[ "def", "next", "(", "self", ")", ":", "self", ".", "child", ".", "next", "(", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/whoosh/matching/wrappers.py#L97-L98
dmlc/gluon-nlp
5d4bc9eba7226ea9f9aabbbd39e3b1e886547e48
src/gluonnlp/utils/parameter.py
python
deduplicate_param_dict
(param_dict)
return dedup_param_dict
Get a parameter dict that has been deduplicated Parameters ---------- param_dict The parameter dict returned by `model.collect_params()` Returns ------- dedup_param_dict
Get a parameter dict that has been deduplicated
[ "Get", "a", "parameter", "dict", "that", "has", "been", "deduplicated" ]
def deduplicate_param_dict(param_dict): """Get a parameter dict that has been deduplicated Parameters ---------- param_dict The parameter dict returned by `model.collect_params()` Returns ------- dedup_param_dict """ dedup_param_dict = dict() param_uuid_set = set() for k in sorted(param_dict.keys()): v = param_dict[k] if v._uuid in param_uuid_set: continue dedup_param_dict[k] = v param_uuid_set.add(v._uuid) return dedup_param_dict
[ "def", "deduplicate_param_dict", "(", "param_dict", ")", ":", "dedup_param_dict", "=", "dict", "(", ")", "param_uuid_set", "=", "set", "(", ")", "for", "k", "in", "sorted", "(", "param_dict", ".", "keys", "(", ")", ")", ":", "v", "=", "param_dict", "[", ...
https://github.com/dmlc/gluon-nlp/blob/5d4bc9eba7226ea9f9aabbbd39e3b1e886547e48/src/gluonnlp/utils/parameter.py#L282-L302
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
core/state.py
python
Mem.Shift
(self, n)
[]
def Shift(self, n): # type: (int) -> int frame = self.argv_stack[-1] num_args = len(frame.argv) if (frame.num_shifted + n) <= num_args: frame.num_shifted += n return 0 # success else: return 1
[ "def", "Shift", "(", "self", ",", "n", ")", ":", "# type: (int) -> int", "frame", "=", "self", ".", "argv_stack", "[", "-", "1", "]", "num_args", "=", "len", "(", "frame", ".", "argv", ")", "if", "(", "frame", ".", "num_shifted", "+", "n", ")", "<=...
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/core/state.py#L1220-L1229
caiiiac/Machine-Learning-with-Python
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
MachineLearning/venv/lib/python3.5/site-packages/pip/utils/__init__.py
python
untar_file
(filename, location)
Untar the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions are not preserved), except that regular file members with any execute permissions (user, group, or world) have "chmod +x" applied after being written. Note that for windows, any execute changes using os.chmod are no-ops per the python docs.
Untar the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions are not preserved), except that regular file members with any execute permissions (user, group, or world) have "chmod +x" applied after being written. Note that for windows, any execute changes using os.chmod are no-ops per the python docs.
[ "Untar", "the", "file", "(", "with", "path", "filename", ")", "to", "the", "destination", "location", ".", "All", "files", "are", "written", "based", "on", "system", "defaults", "and", "umask", "(", "i", ".", "e", ".", "permissions", "are", "not", "prese...
def untar_file(filename, location): """ Untar the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions are not preserved), except that regular file members with any execute permissions (user, group, or world) have "chmod +x" applied after being written. Note that for windows, any execute changes using os.chmod are no-ops per the python docs. """ ensure_dir(location) if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): mode = 'r:gz' elif filename.lower().endswith(BZ2_EXTENSIONS): mode = 'r:bz2' elif filename.lower().endswith(XZ_EXTENSIONS): mode = 'r:xz' elif filename.lower().endswith('.tar'): mode = 'r' else: logger.warning( 'Cannot determine compression type for file %s', filename, ) mode = 'r:*' tar = tarfile.open(filename, mode) try: # note: python<=2.5 doesn't seem to know about pax headers, filter them leading = has_leading_dir([ member.name for member in tar.getmembers() if member.name != 'pax_global_header' ]) for member in tar.getmembers(): fn = member.name if fn == 'pax_global_header': continue if leading: fn = split_leading_dir(fn)[1] path = os.path.join(location, fn) if member.isdir(): ensure_dir(path) elif member.issym(): try: tar._extract_member(member, path) except Exception as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( 'In the tar file %s the member %s is invalid: %s', filename, member.name, exc, ) continue else: try: fp = tar.extractfile(member) except (KeyError, AttributeError) as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( 'In the tar file %s the member %s is invalid: %s', filename, member.name, exc, ) continue ensure_dir(os.path.dirname(path)) with open(path, 'wb') as destfp: shutil.copyfileobj(fp, destfp) fp.close() # Update the timestamp (useful for cython compiled files) tar.utime(member, path) # member have any execute permissions for user/group/world? if member.mode & 0o111: # make dest file have execute for user/group/world # no-op on windows per python docs os.chmod(path, (0o777 - current_umask() | 0o111)) finally: tar.close()
[ "def", "untar_file", "(", "filename", ",", "location", ")", ":", "ensure_dir", "(", "location", ")", "if", "filename", ".", "lower", "(", ")", ".", "endswith", "(", "'.gz'", ")", "or", "filename", ".", "lower", "(", ")", ".", "endswith", "(", "'.tgz'",...
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/pip/utils/__init__.py#L515-L588
broadinstitute/viral-ngs
e144969e4c57060d53f38a4c3a270e8227feace1
util/vcf.py
python
VcfReader.get_snp_genos
(self, c, p, as_strings=True)
return len(snps) == 1 and dict(snps[0][3]) or {}
Read a single position from a VCF file and return the genotypes as a sample -> allele map. If there is not exactly one matching row in the VCF file at this position (if there are none or multiple) then we return an empty map: {}.
Read a single position from a VCF file and return the genotypes as a sample -> allele map. If there is not exactly one matching row in the VCF file at this position (if there are none or multiple) then we return an empty map: {}.
[ "Read", "a", "single", "position", "from", "a", "VCF", "file", "and", "return", "the", "genotypes", "as", "a", "sample", "-", ">", "allele", "map", ".", "If", "there", "is", "not", "exactly", "one", "matching", "row", "in", "the", "VCF", "file", "at", ...
def get_snp_genos(self, c, p, as_strings=True): ''' Read a single position from a VCF file and return the genotypes as a sample -> allele map. If there is not exactly one matching row in the VCF file at this position (if there are none or multiple) then we return an empty map: {}. ''' snps = [x for x in self.get_range(c, p, p, as_strings=as_strings)] return len(snps) == 1 and dict(snps[0][3]) or {}
[ "def", "get_snp_genos", "(", "self", ",", "c", ",", "p", ",", "as_strings", "=", "True", ")", ":", "snps", "=", "[", "x", "for", "x", "in", "self", ".", "get_range", "(", "c", ",", "p", ",", "p", ",", "as_strings", "=", "as_strings", ")", "]", ...
https://github.com/broadinstitute/viral-ngs/blob/e144969e4c57060d53f38a4c3a270e8227feace1/util/vcf.py#L315-L322
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cls/v20201016/models.py
python
RuleInfo.__init__
(self)
r""" :param FullText: 全文索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type FullText: :class:`tencentcloud.cls.v20201016.models.FullTextInfo` :param KeyValue: 键值索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type KeyValue: :class:`tencentcloud.cls.v20201016.models.RuleKeyValueInfo` :param Tag: 元字段索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type Tag: :class:`tencentcloud.cls.v20201016.models.RuleTagInfo`
r""" :param FullText: 全文索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type FullText: :class:`tencentcloud.cls.v20201016.models.FullTextInfo` :param KeyValue: 键值索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type KeyValue: :class:`tencentcloud.cls.v20201016.models.RuleKeyValueInfo` :param Tag: 元字段索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type Tag: :class:`tencentcloud.cls.v20201016.models.RuleTagInfo`
[ "r", ":", "param", "FullText", ":", "全文索引配置", "注意:此字段可能返回", "null,表示取不到有效值。", ":", "type", "FullText", ":", ":", "class", ":", "tencentcloud", ".", "cls", ".", "v20201016", ".", "models", ".", "FullTextInfo", ":", "param", "KeyValue", ":", "键值索引配置", "注意:此字段可...
def __init__(self): r""" :param FullText: 全文索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type FullText: :class:`tencentcloud.cls.v20201016.models.FullTextInfo` :param KeyValue: 键值索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type KeyValue: :class:`tencentcloud.cls.v20201016.models.RuleKeyValueInfo` :param Tag: 元字段索引配置 注意:此字段可能返回 null,表示取不到有效值。 :type Tag: :class:`tencentcloud.cls.v20201016.models.RuleTagInfo` """ self.FullText = None self.KeyValue = None self.Tag = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "FullText", "=", "None", "self", ".", "KeyValue", "=", "None", "self", ".", "Tag", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cls/v20201016/models.py#L4719-L4733
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/rest/binary_sensor.py
python
RestBinarySensor.is_on
(self)
return self._is_on
Return true if the binary sensor is on.
Return true if the binary sensor is on.
[ "Return", "true", "if", "the", "binary", "sensor", "is", "on", "." ]
def is_on(self): """Return true if the binary sensor is on.""" return self._is_on
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_is_on" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/rest/binary_sensor.py#L107-L109
open-mmlab/mmdetection
ff9bc39913cb3ff5dde79d3933add7dc2561bab7
mmdet/core/evaluation/class_names.py
python
get_classes
(dataset)
return labels
Get class names of a dataset.
Get class names of a dataset.
[ "Get", "class", "names", "of", "a", "dataset", "." ]
def get_classes(dataset): """Get class names of a dataset.""" alias2name = {} for name, aliases in dataset_aliases.items(): for alias in aliases: alias2name[alias] = name if mmcv.is_str(dataset): if dataset in alias2name: labels = eval(alias2name[dataset] + '_classes()') else: raise ValueError(f'Unrecognized dataset: {dataset}') else: raise TypeError(f'dataset must a str, but got {type(dataset)}') return labels
[ "def", "get_classes", "(", "dataset", ")", ":", "alias2name", "=", "{", "}", "for", "name", ",", "aliases", "in", "dataset_aliases", ".", "items", "(", ")", ":", "for", "alias", "in", "aliases", ":", "alias2name", "[", "alias", "]", "=", "name", "if", ...
https://github.com/open-mmlab/mmdetection/blob/ff9bc39913cb3ff5dde79d3933add7dc2561bab7/mmdet/core/evaluation/class_names.py#L103-L117
nabeel-oz/qlik-py-tools
09d0cd232fadcaa926bb11cebb37d5ae3051bc86
core/__main__.py
python
ExtensionService.__init__
(self, funcdef_file)
Class initializer. :param funcdef_file: a function definition JSON file
Class initializer. :param funcdef_file: a function definition JSON file
[ "Class", "initializer", ".", ":", "param", "funcdef_file", ":", "a", "function", "definition", "JSON", "file" ]
def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled')
[ "def", "__init__", "(", "self", ",", "funcdef_file", ")", ":", "self", ".", "_function_definitions", "=", "funcdef_file", "os", ".", "makedirs", "(", "'logs'", ",", "exist_ok", "=", "True", ")", "log_file", "=", "os", ".", "path", ".", "join", "(", "os",...
https://github.com/nabeel-oz/qlik-py-tools/blob/09d0cd232fadcaa926bb11cebb37d5ae3051bc86/core/__main__.py#L53-L62
pycontribs/pyrax
a0c022981f76a4cba96a22ecc19bb52843ac4fbe
pyrax/object_storage.py
python
Container.move_object
(self, obj, new_container, new_obj_name=None, new_reference=False, content_type=None, extra_info=None)
return self.manager.move_object(self, obj, new_container, new_obj_name=new_obj_name, new_reference=new_reference, content_type=content_type)
Works just like copy_object, except that the source object is deleted after a successful copy. You can optionally change the content_type of the object by supplying that in the 'content_type' parameter. NOTE: any references to the original object will no longer be valid; you will have to get a reference to the new object by passing True for the 'new_reference' parameter. When this is True, a reference to the newly moved object is returned. Otherwise, the etag for the moved object is returned. The 'extra_info' parameter is included for backwards compatibility. It is no longer used at all, and will not be modified with swiftclient info, since swiftclient is not used any more.
Works just like copy_object, except that the source object is deleted after a successful copy.
[ "Works", "just", "like", "copy_object", "except", "that", "the", "source", "object", "is", "deleted", "after", "a", "successful", "copy", "." ]
def move_object(self, obj, new_container, new_obj_name=None, new_reference=False, content_type=None, extra_info=None): """ Works just like copy_object, except that the source object is deleted after a successful copy. You can optionally change the content_type of the object by supplying that in the 'content_type' parameter. NOTE: any references to the original object will no longer be valid; you will have to get a reference to the new object by passing True for the 'new_reference' parameter. When this is True, a reference to the newly moved object is returned. Otherwise, the etag for the moved object is returned. The 'extra_info' parameter is included for backwards compatibility. It is no longer used at all, and will not be modified with swiftclient info, since swiftclient is not used any more. """ return self.manager.move_object(self, obj, new_container, new_obj_name=new_obj_name, new_reference=new_reference, content_type=content_type)
[ "def", "move_object", "(", "self", ",", "obj", ",", "new_container", ",", "new_obj_name", "=", "None", ",", "new_reference", "=", "False", ",", "content_type", "=", "None", ",", "extra_info", "=", "None", ")", ":", "return", "self", ".", "manager", ".", ...
https://github.com/pycontribs/pyrax/blob/a0c022981f76a4cba96a22ecc19bb52843ac4fbe/pyrax/object_storage.py#L609-L630
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/encodings/cp720.py
python
getregentry
()
return codecs.CodecInfo( name='cp720', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
[]
def getregentry(): return codecs.CodecInfo( name='cp720', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
[ "def", "getregentry", "(", ")", ":", "return", "codecs", ".", "CodecInfo", "(", "name", "=", "'cp720'", ",", "encode", "=", "Codec", "(", ")", ".", "encode", ",", "decode", "=", "Codec", "(", ")", ".", "decode", ",", "incrementalencoder", "=", "Increme...
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/encodings/cp720.py#L35-L44
phimpme/phimpme-generator
ba6d11190b9016238f27672e1ad55e6a875b74a0
Phimpme/site-packages/nose/plugins/base.py
python
IPluginInterface.loadTestsFromTestClass
(self, cls)
Return tests in this test class. Class will *not* be a unittest.TestCase subclass. Return None if you are not able to load any tests, an iterable if you are. May be a generator. :param cls: The test case class. Must be **not** be subclass of :class:`unittest.TestCase`.
Return tests in this test class. Class will *not* be a unittest.TestCase subclass. Return None if you are not able to load any tests, an iterable if you are. May be a generator.
[ "Return", "tests", "in", "this", "test", "class", ".", "Class", "will", "*", "not", "*", "be", "a", "unittest", ".", "TestCase", "subclass", ".", "Return", "None", "if", "you", "are", "not", "able", "to", "load", "any", "tests", "an", "iterable", "if",...
def loadTestsFromTestClass(self, cls): """Return tests in this test class. Class will *not* be a unittest.TestCase subclass. Return None if you are not able to load any tests, an iterable if you are. May be a generator. :param cls: The test case class. Must be **not** be subclass of :class:`unittest.TestCase`. """ pass
[ "def", "loadTestsFromTestClass", "(", "self", ",", "cls", ")", ":", "pass" ]
https://github.com/phimpme/phimpme-generator/blob/ba6d11190b9016238f27672e1ad55e6a875b74a0/Phimpme/site-packages/nose/plugins/base.py#L478-L486
Jack-Cherish/Deep-Learning
5fd254b61ad45367fbae28c49976e82b14ff7110
Tutorial/lesson-5/activators.py
python
TanhActivator.backward
(self, output)
return 1 - output * output
[]
def backward(self, output): return 1 - output * output
[ "def", "backward", "(", "self", ",", "output", ")", ":", "return", "1", "-", "output", "*", "output" ]
https://github.com/Jack-Cherish/Deep-Learning/blob/5fd254b61ad45367fbae28c49976e82b14ff7110/Tutorial/lesson-5/activators.py#L36-L37
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/rabbitmq.py
python
set_user_tags
(name, tags, runas=None)
return _format_response(res, msg)
Add user tags via rabbitmqctl set_user_tags CLI Example: .. code-block:: bash salt '*' rabbitmq.set_user_tags myadmin administrator
Add user tags via rabbitmqctl set_user_tags
[ "Add", "user", "tags", "via", "rabbitmqctl", "set_user_tags" ]
def set_user_tags(name, tags, runas=None): """Add user tags via rabbitmqctl set_user_tags CLI Example: .. code-block:: bash salt '*' rabbitmq.set_user_tags myadmin administrator """ if runas is None and not salt.utils.platform.is_windows(): runas = salt.utils.user.get_user() if not isinstance(tags, (list, tuple)): tags = [tags] res = __salt__["cmd.run_all"]( [RABBITMQCTL, "set_user_tags", name] + list(tags), reset_system_locale=False, runas=runas, python_shell=False, ) msg = "Tag(s) set" return _format_response(res, msg)
[ "def", "set_user_tags", "(", "name", ",", "tags", ",", "runas", "=", "None", ")", ":", "if", "runas", "is", "None", "and", "not", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "runas", "=", "salt", ".", "utils", ".", "use...
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/rabbitmq.py#L707-L729
NVIDIA/object-detection-tensorrt-example
feb3632ed289fb71acb84e28a4f51931fc8c7e13
SSD_Model/utils/boxes.py
python
draw_bounding_box_on_image
(image, ymin, xmin, ymax, xmax, color=(255, 0, 0), thickness=4, display_str='', use_normalized_coordinates=True)
Adds a bounding box to an image. Bounding box coordinates can be specified in either absolute (pixel) or normalized coordinates by setting the use_normalized_coordinates argument. The string passed in display_str is displayed above the bounding box in black text on a rectangle filled with the input 'color'. If the top of the bounding box extends to the edge of the image, the string is displayed below the bounding box. Args: image (PIL.Image): PIL.Image object ymin (float): ymin of bounding box xmin (float): xmin of bounding box ymax (float): ymax of bounding box xmax (float): xmax of bounding box color (int, int, int): RGB tuple describing color to draw bounding box thickness (int): line thickness display_str (str): string to display in box use_normalized_coordinates (bool): If True, treat coordinates ymin, xmin, ymax, xmax as relative to the image. Otherwise treat coordinates as absolute
Adds a bounding box to an image.
[ "Adds", "a", "bounding", "box", "to", "an", "image", "." ]
def draw_bounding_box_on_image(image, ymin, xmin, ymax, xmax, color=(255, 0, 0), thickness=4, display_str='', use_normalized_coordinates=True): """Adds a bounding box to an image. Bounding box coordinates can be specified in either absolute (pixel) or normalized coordinates by setting the use_normalized_coordinates argument. The string passed in display_str is displayed above the bounding box in black text on a rectangle filled with the input 'color'. If the top of the bounding box extends to the edge of the image, the string is displayed below the bounding box. Args: image (PIL.Image): PIL.Image object ymin (float): ymin of bounding box xmin (float): xmin of bounding box ymax (float): ymax of bounding box xmax (float): xmax of bounding box color (int, int, int): RGB tuple describing color to draw bounding box thickness (int): line thickness display_str (str): string to display in box use_normalized_coordinates (bool): If True, treat coordinates ymin, xmin, ymax, xmax as relative to the image. Otherwise treat coordinates as absolute """ draw = ImageDraw.Draw(image) im_width, im_height = image.size if use_normalized_coordinates: (left, right, top, bottom) = (xmin * im_width, xmax * im_width, ymin * im_height, ymax * im_height) else: (left, right, top, bottom) = (xmin, xmax, ymin, ymax) draw.line([(left, top), (left, bottom), (right, bottom), (right, top), (left, top)], width=thickness, fill=tuple(color)) try: font = ImageFont.truetype('arial.ttf', 24) except IOError: font = ImageFont.load_default() # If the total height of the display string added to the top of the bounding # box exceeds the top of the image, move the string below the bounding box # instead of above display_str_height = font.getsize(display_str)[1] # Each display_str has a top and bottom margin of 0.05x total_display_str_height = (1 + 2 * 0.05) * display_str_height if top > total_display_str_height: text_bottom = top else: text_bottom = bottom + total_display_str_height text_width, text_height = font.getsize(display_str) margin = np.ceil(0.05 * text_height) draw.rectangle( [(left, text_bottom - text_height - 2 * margin), (left + text_width, text_bottom)], fill=tuple(color)) draw.text( (left + margin, text_bottom - text_height - margin), display_str, fill='black', font=font) text_bottom -= text_height - 2 * margin
[ "def", "draw_bounding_box_on_image", "(", "image", ",", "ymin", ",", "xmin", ",", "ymax", ",", "xmax", ",", "color", "=", "(", "255", ",", "0", ",", "0", ")", ",", "thickness", "=", "4", ",", "display_str", "=", "''", ",", "use_normalized_coordinates", ...
https://github.com/NVIDIA/object-detection-tensorrt-example/blob/feb3632ed289fb71acb84e28a4f51931fc8c7e13/SSD_Model/utils/boxes.py#L35-L104
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_custom_resource_definition.py
python
V1CustomResourceDefinition.__init__
(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None)
V1CustomResourceDefinition - a model defined in OpenAPI
V1CustomResourceDefinition - a model defined in OpenAPI
[ "V1CustomResourceDefinition", "-", "a", "model", "defined", "in", "OpenAPI" ]
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): # noqa: E501 """V1CustomResourceDefinition - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._kind = None self._metadata = None self._spec = None self._status = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata self.spec = spec if status is not None: self.status = status
[ "def", "__init__", "(", "self", ",", "api_version", "=", "None", ",", "kind", "=", "None", ",", "metadata", "=", "None", ",", "spec", "=", "None", ",", "status", "=", "None", ",", "local_vars_configuration", "=", "None", ")", ":", "# noqa: E501", "# noqa...
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_custom_resource_definition.py#L51-L72
PINTO0309/PINTO_model_zoo
2924acda7a7d541d8712efd7cc4fd1c61ef5bddd
082_MediaPipe_Meet_Segmentation/02_segm_full_v679_tflite_to_pb_saved_model.py
python
make_graph
(ops, op_types, interpreter)
[]
def make_graph(ops, op_types, interpreter): height = 144 width = 256 tensors = {} input_details = interpreter.get_input_details() # output_details = interpreter.get_output_details() print(input_details) for input_detail in input_details: tensors[input_detail['index']] = tf.placeholder( dtype=input_detail['dtype'], shape=input_detail['shape'], name=input_detail['name']) for op in ops: print('@@@@@@@@@@@@@@ op:', op) op_type = op_types[op['opcode_index']] if op_type == 'CONV_2D': input_tensor = tensors[op['inputs'][0]] weights = tensors[op['inputs'][1]].transpose(1,2,3,0) bias = tensors[op['inputs'][2]] output_detail = interpreter._get_tensor_details(op['outputs'][0]) options = op['builtin_options'] output_tensor = tf.nn.conv2d( input_tensor, weights, strides=[1, options['stride_h'], options['stride_w'], 1], padding=options['padding'], dilations=[ 1, options['dilation_h_factor'], options['dilation_w_factor'], 1 ], name=output_detail['name'] + '/conv2d') output_tensor = tf.add( output_tensor, bias, name=output_detail['name']) if output_detail['name'].split('/')[-1]=='Relu6': output_tensor = tf.nn.relu6(output_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'DEPTHWISE_CONV_2D': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] weights = tensors[op['inputs'][1]].transpose(1,2,3,0) bias = tensors[op['inputs'][2]] options = op['builtin_options'] output_tensor = tf.nn.depthwise_conv2d( input_tensor, weights, strides=[1, options['stride_h'], options['stride_w'], 1], padding=options['padding'], # dilations=[1, options['dilation_h_factor'], options['dilation_w_factor'], 1], name=output_detail['name'] + '/depthwise_conv2d') output_tensor = tf.add(output_tensor, bias, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'MAX_POOL_2D': input_tensor = tensors[op['inputs'][0]] output_detail = interpreter._get_tensor_details(op['outputs'][0]) options = op['builtin_options'] output_tensor = tf.nn.max_pool( input_tensor, ksize=[ 1, options['filter_height'], options['filter_width'], 1 ], strides=[1, options['stride_h'], options['stride_w'], 1], padding=options['padding'], name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'PAD': input_tensor = tensors[op['inputs'][0]] output_detail = interpreter._get_tensor_details(op['outputs'][0]) paddings_detail = interpreter._get_tensor_details(op['inputs'][1]) paddings_array = interpreter.get_tensor(paddings_detail['index']) paddings = tf.Variable( paddings_array, name=paddings_detail['name']) output_tensor = tf.pad( input_tensor, paddings, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'RELU': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] output_tensor = tf.nn.relu(input_tensor, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'PRELU': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] alpha_detail = interpreter._get_tensor_details(op['inputs'][1]) alpha_array = interpreter.get_tensor(alpha_detail['index']) with tf.variable_scope(name_or_scope=output_detail['name']): alphas = tf.Variable(alpha_array, name=alpha_detail['name']) output_tensor = tf.maximum(alphas * input_tensor, input_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'RELU6': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] output_tensor = tf.nn.relu6(input_tensor, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'RESHAPE': input_tensor = tensors[op['inputs'][0]] output_detail = interpreter._get_tensor_details(op['outputs'][0]) options = op['builtin_options'] output_tensor = tf.reshape(input_tensor, options['new_shape'], name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'ADD': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor_0 = tensors[op['inputs'][0]] try: input_tensor_1 = tensors[op['inputs'][1]] except: param = interpreter._get_tensor_details(op['inputs'][1]) input_tensor_1 = interpreter.get_tensor(param['index']) output_tensor = tf.add(input_tensor_0, input_tensor_1, name=output_detail['name']) if output_detail['name'].split('/')[-1]=='Relu6': output_tensor = tf.nn.relu6(output_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'CONCATENATION': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor_0 = tensors[op['inputs'][0]] input_tensor_1 = tensors[op['inputs'][1]] try: input_tensor_2 = tensors[op['inputs'][2]] options = op['builtin_options'] output_tensor = tf.concat([input_tensor_0, input_tensor_1, input_tensor_2], options['axis'], name=output_detail['name']) except: options = op['builtin_options'] output_tensor = tf.concat([input_tensor_0, input_tensor_1], options['axis'], name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'LOGISTIC': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] output_tensor = tf.math.sigmoid(input_tensor, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'TRANSPOSE_CONV': input_tensor = tensors[op['inputs'][2]] weights_detail = interpreter._get_tensor_details(op['inputs'][1]) output_shape_detail = interpreter._get_tensor_details(op['inputs'][0]) output_detail = interpreter._get_tensor_details(op['outputs'][0]) weights_array = interpreter.get_tensor(weights_detail['index']) weights_array = np.transpose(weights_array, (1, 2, 0, 3)) output_shape_array = interpreter.get_tensor(output_shape_detail['index']) weights = tf.Variable(weights_array, name=weights_detail['name']) shape = tf.Variable(output_shape_array, name=output_shape_detail['name']) options = op['builtin_options'] output_tensor = tf.nn.conv2d_transpose(input_tensor, weights, shape, [1, options['stride_h'], options['stride_w'], 1], padding=options['padding'], name=output_detail['name'] + '/conv2d_transpose') tensors[output_detail['index']] = output_tensor elif op_type == 'MUL': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor_0 = tensors[op['inputs'][0]] input_tensor_1 = None try: input_tensor_1 = tensors[op['inputs'][1]] except: param = interpreter._get_tensor_details(op['inputs'][1]) input_tensor_1 = interpreter.get_tensor(param['index']) output_tensor = tf.multiply(input_tensor_0, input_tensor_1, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'HARD_SWISH': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] output_tensor = optimizing_hardswish_for_edgetpu(input_tensor, name=output_detail['name']) tensors[output_detail['index']] = output_tensor elif op_type == 'AVERAGE_POOL_2D': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] options = op['builtin_options'] pool_size = [options['filter_height'], options['filter_width']] strides = [options['stride_h'], options['stride_w']] padding = options['padding'] output_tensor = tf.keras.layers.AveragePooling2D(pool_size=pool_size, strides=strides, padding=padding, name=output_detail['name'])(input_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'FULLY_CONNECTED': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] weights = tensors[op['inputs'][1]].transpose(1,0) bias = tensors[op['inputs'][2]] output_shape_detail = interpreter._get_tensor_details(op['inputs'][0]) output_shape_array = interpreter.get_tensor(output_shape_detail['index']) output_tensor = tf.keras.layers.Dense(units=output_shape_array.shape[3], use_bias=True, kernel_initializer=tf.keras.initializers.Constant(weights), bias_initializer=tf.keras.initializers.Constant(bias))(input_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'RESIZE_BILINEAR': output_detail = interpreter._get_tensor_details(op['outputs'][0]) input_tensor = tensors[op['inputs'][0]] size_detail = interpreter._get_tensor_details(op['inputs'][1]) size = interpreter.get_tensor(size_detail['index']) size_height = size[0] size_width = size[1] def upsampling2d_bilinear(x, size_height, size_width): if optimizing_for_edgetpu_flg: return tf.image.resize_bilinear(x, (size_height, size_width)) else: return tfv2.image.resize(x, [size_height, size_width], method='bilinear') output_tensor = tf.keras.layers.Lambda(upsampling2d_bilinear, arguments={'size_height': size_height, 'size_width': size_width})(input_tensor) tensors[output_detail['index']] = output_tensor elif op_type == 'DEQUANTIZE': output_detail = interpreter._get_tensor_details(op['outputs'][0]) weights_detail = interpreter._get_tensor_details(op['inputs'][0]) weights = interpreter.get_tensor(weights_detail['index']) output_tensor = weights.astype(np.float32) tensors[output_detail['index']] = output_tensor else: raise ValueError(op_type) # Convolution2DTransposeBias input_tensor = tensors[241] weights = np.load('weights/segment_Kernel').transpose(1,2,0,3).astype(np.float32) bias = np.load('weights/segment_Bias').astype(np.float32) custom_trans = tf.nn.conv2d_transpose(input=input_tensor, filters=weights, output_shape=[1, height, width, 2], strides=[2, 2], padding='SAME', dilations=[1, 1]) output_tensor = tf.math.add(custom_trans, bias, name='segment') tensors[999] = output_tensor
[ "def", "make_graph", "(", "ops", ",", "op_types", ",", "interpreter", ")", ":", "height", "=", "144", "width", "=", "256", "tensors", "=", "{", "}", "input_details", "=", "interpreter", ".", "get_input_details", "(", ")", "# output_details = interpreter.get_outp...
https://github.com/PINTO0309/PINTO_model_zoo/blob/2924acda7a7d541d8712efd7cc4fd1c61ef5bddd/082_MediaPipe_Meet_Segmentation/02_segm_full_v679_tflite_to_pb_saved_model.py#L54-L288
facebookresearch/mmf
fb6fe390287e1da12c3bd28d4ab43c5f7dcdfc9f
tools/sweeps/lib/slurm.py
python
has_started
(save_dir)
return True
[]
def has_started(save_dir): train_log = os.path.join(save_dir, "train.log") if not os.path.exists(train_log): return False return True
[ "def", "has_started", "(", "save_dir", ")", ":", "train_log", "=", "os", ".", "path", ".", "join", "(", "save_dir", ",", "\"train.log\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "train_log", ")", ":", "return", "False", "return", "True...
https://github.com/facebookresearch/mmf/blob/fb6fe390287e1da12c3bd28d4ab43c5f7dcdfc9f/tools/sweeps/lib/slurm.py#L359-L363
PowerScript/KatanaFramework
0f6ad90a88de865d58ec26941cb4460501e75496
lib/scapy/build/lib.linux-i686-2.7/scapy/utils6.py
python
in6_iseui64
(x)
return x == eui64
Return True if provided address has an interface identifier part created in modified EUI-64 format (meaning it matches *::*:*ff:fe*:*). Otherwise, False is returned. Address must be passed in printable format.
Return True if provided address has an interface identifier part created in modified EUI-64 format (meaning it matches *::*:*ff:fe*:*). Otherwise, False is returned. Address must be passed in printable format.
[ "Return", "True", "if", "provided", "address", "has", "an", "interface", "identifier", "part", "created", "in", "modified", "EUI", "-", "64", "format", "(", "meaning", "it", "matches", "*", "::", "*", ":", "*", "ff", ":", "fe", "*", ":", "*", ")", "....
def in6_iseui64(x): """ Return True if provided address has an interface identifier part created in modified EUI-64 format (meaning it matches *::*:*ff:fe*:*). Otherwise, False is returned. Address must be passed in printable format. """ eui64 = inet_pton(socket.AF_INET6, '::ff:fe00:0') x = in6_and(inet_pton(socket.AF_INET6, x), eui64) return x == eui64
[ "def", "in6_iseui64", "(", "x", ")", ":", "eui64", "=", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "'::ff:fe00:0'", ")", "x", "=", "in6_and", "(", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "x", ")", ",", "eui64", ")", "return", "x", "=...
https://github.com/PowerScript/KatanaFramework/blob/0f6ad90a88de865d58ec26941cb4460501e75496/lib/scapy/build/lib.linux-i686-2.7/scapy/utils6.py#L524-L533
dieseldev/diesel
8d48371fce0b79d6631053594bce06e4b9628499
diesel/protocols/riak.py
python
RiakClient.get
(self, bucket, key)
Get the value of key from named bucket. Returns a dictionary with a list of the content for the key and the vector clock (vclock) for the key.
Get the value of key from named bucket.
[ "Get", "the", "value", "of", "key", "from", "named", "bucket", "." ]
def get(self, bucket, key): """Get the value of key from named bucket. Returns a dictionary with a list of the content for the key and the vector clock (vclock) for the key. """ request = riak_palm.RpbGetReq(bucket=bucket, key=key) self._send(request) response = self._receive() if response: return _to_dict(response)
[ "def", "get", "(", "self", ",", "bucket", ",", "key", ")", ":", "request", "=", "riak_palm", ".", "RpbGetReq", "(", "bucket", "=", "bucket", ",", "key", "=", "key", ")", "self", ".", "_send", "(", "request", ")", "response", "=", "self", ".", "_rec...
https://github.com/dieseldev/diesel/blob/8d48371fce0b79d6631053594bce06e4b9628499/diesel/protocols/riak.py#L314-L325
BerkeleyAutomation/meshrender
25b6fb711ef7a7871a5908459e6be5c76a04b631
meshrender/trackball.py
python
Trackball.__init__
(self, T_camera_world, size, scale, target=np.array([0.0, 0.0, 0.0]))
Initialize a trackball with an initial camera-to-world pose and the given parameters. Parameters ---------- T_camera_world : autolab_core.RigidTransform An initial camera-to-world pose for the trackball. size : (float, float) The width and height of the camera image in pixels. scale : float The diagonal of the scene's bounding box -- used for ensuring translation motions are sufficiently fast for differently-sized scenes. target : (3,) float The center of the scene in world coordinates. The trackball will revolve around this point.
Initialize a trackball with an initial camera-to-world pose and the given parameters.
[ "Initialize", "a", "trackball", "with", "an", "initial", "camera", "-", "to", "-", "world", "pose", "and", "the", "given", "parameters", "." ]
def __init__(self, T_camera_world, size, scale, target=np.array([0.0, 0.0, 0.0])): """Initialize a trackball with an initial camera-to-world pose and the given parameters. Parameters ---------- T_camera_world : autolab_core.RigidTransform An initial camera-to-world pose for the trackball. size : (float, float) The width and height of the camera image in pixels. scale : float The diagonal of the scene's bounding box -- used for ensuring translation motions are sufficiently fast for differently-sized scenes. target : (3,) float The center of the scene in world coordinates. The trackball will revolve around this point. """ self._size = np.array(size) self._scale = float(scale) self._T_camera_world = T_camera_world self._n_T_camera_world = T_camera_world self._target = target self._n_target = target self._state = Trackball.STATE_ROTATE
[ "def", "__init__", "(", "self", ",", "T_camera_world", ",", "size", ",", "scale", ",", "target", "=", "np", ".", "array", "(", "[", "0.0", ",", "0.0", ",", "0.0", "]", ")", ")", ":", "self", ".", "_size", "=", "np", ".", "array", "(", "size", "...
https://github.com/BerkeleyAutomation/meshrender/blob/25b6fb711ef7a7871a5908459e6be5c76a04b631/meshrender/trackball.py#L15-L46
explosion/srsly
8617ecc099d1f34a60117b5287bef5424ea2c837
srsly/ruamel_yaml/comments.py
python
CommentedMap._unmerged_contains
(self, key)
return None
[]
def _unmerged_contains(self, key): # type: (Any) -> Any if key in self._ok: return True return None
[ "def", "_unmerged_contains", "(", "self", ",", "key", ")", ":", "# type: (Any) -> Any", "if", "key", "in", "self", ".", "_ok", ":", "return", "True", "return", "None" ]
https://github.com/explosion/srsly/blob/8617ecc099d1f34a60117b5287bef5424ea2c837/srsly/ruamel_yaml/comments.py#L775-L779
TheAlgorithms/Python
9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c
conversions/pressure_conversions.py
python
pressure_conversion
(value: float, from_type: str, to_type: str)
return ( value * PRESSURE_CONVERSION[from_type].from_ * PRESSURE_CONVERSION[to_type].to )
Conversion between pressure units. >>> pressure_conversion(4, "atm", "pascal") 405300 >>> pressure_conversion(1, "pascal", "psi") 0.00014401981999999998 >>> pressure_conversion(1, "bar", "atm") 0.986923 >>> pressure_conversion(3, "kilopascal", "bar") 0.029999991892499998 >>> pressure_conversion(2, "megapascal", "psi") 290.074434314 >>> pressure_conversion(4, "psi", "torr") 206.85984 >>> pressure_conversion(1, "inHg", "atm") 0.0334211 >>> pressure_conversion(1, "torr", "psi") 0.019336718261000002 >>> pressure_conversion(4, "wrongUnit", "atm") Traceback (most recent call last): File "/usr/lib/python3.8/doctest.py", line 1336, in __run exec(compile(example.source, filename, "single", File "<doctest __main__.pressure_conversion[8]>", line 1, in <module> pressure_conversion(4, "wrongUnit", "atm") File "<string>", line 67, in pressure_conversion ValueError: Invalid 'from_type' value: 'wrongUnit' Supported values are: atm, pascal, bar, kilopascal, megapascal, psi, inHg, torr
Conversion between pressure units. >>> pressure_conversion(4, "atm", "pascal") 405300 >>> pressure_conversion(1, "pascal", "psi") 0.00014401981999999998 >>> pressure_conversion(1, "bar", "atm") 0.986923 >>> pressure_conversion(3, "kilopascal", "bar") 0.029999991892499998 >>> pressure_conversion(2, "megapascal", "psi") 290.074434314 >>> pressure_conversion(4, "psi", "torr") 206.85984 >>> pressure_conversion(1, "inHg", "atm") 0.0334211 >>> pressure_conversion(1, "torr", "psi") 0.019336718261000002 >>> pressure_conversion(4, "wrongUnit", "atm") Traceback (most recent call last): File "/usr/lib/python3.8/doctest.py", line 1336, in __run exec(compile(example.source, filename, "single", File "<doctest __main__.pressure_conversion[8]>", line 1, in <module> pressure_conversion(4, "wrongUnit", "atm") File "<string>", line 67, in pressure_conversion ValueError: Invalid 'from_type' value: 'wrongUnit' Supported values are: atm, pascal, bar, kilopascal, megapascal, psi, inHg, torr
[ "Conversion", "between", "pressure", "units", ".", ">>>", "pressure_conversion", "(", "4", "atm", "pascal", ")", "405300", ">>>", "pressure_conversion", "(", "1", "pascal", "psi", ")", "0", ".", "00014401981999999998", ">>>", "pressure_conversion", "(", "1", "ba...
def pressure_conversion(value: float, from_type: str, to_type: str) -> float: """ Conversion between pressure units. >>> pressure_conversion(4, "atm", "pascal") 405300 >>> pressure_conversion(1, "pascal", "psi") 0.00014401981999999998 >>> pressure_conversion(1, "bar", "atm") 0.986923 >>> pressure_conversion(3, "kilopascal", "bar") 0.029999991892499998 >>> pressure_conversion(2, "megapascal", "psi") 290.074434314 >>> pressure_conversion(4, "psi", "torr") 206.85984 >>> pressure_conversion(1, "inHg", "atm") 0.0334211 >>> pressure_conversion(1, "torr", "psi") 0.019336718261000002 >>> pressure_conversion(4, "wrongUnit", "atm") Traceback (most recent call last): File "/usr/lib/python3.8/doctest.py", line 1336, in __run exec(compile(example.source, filename, "single", File "<doctest __main__.pressure_conversion[8]>", line 1, in <module> pressure_conversion(4, "wrongUnit", "atm") File "<string>", line 67, in pressure_conversion ValueError: Invalid 'from_type' value: 'wrongUnit' Supported values are: atm, pascal, bar, kilopascal, megapascal, psi, inHg, torr """ if from_type not in PRESSURE_CONVERSION: raise ValueError( f"Invalid 'from_type' value: {from_type!r} Supported values are:\n" + ", ".join(PRESSURE_CONVERSION) ) if to_type not in PRESSURE_CONVERSION: raise ValueError( f"Invalid 'to_type' value: {to_type!r}. Supported values are:\n" + ", ".join(PRESSURE_CONVERSION) ) return ( value * PRESSURE_CONVERSION[from_type].from_ * PRESSURE_CONVERSION[to_type].to )
[ "def", "pressure_conversion", "(", "value", ":", "float", ",", "from_type", ":", "str", ",", "to_type", ":", "str", ")", "->", "float", ":", "if", "from_type", "not", "in", "PRESSURE_CONVERSION", ":", "raise", "ValueError", "(", "f\"Invalid 'from_type' value: {f...
https://github.com/TheAlgorithms/Python/blob/9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c/conversions/pressure_conversions.py#L38-L79
nlloyd/SubliminalCollaborator
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
libs/twisted/news/database.py
python
INewsStorage.articleRequest
(group, index, id = None)
Returns a deferred whose callback will be passed a file-like object containing the full article text (headers and body) for the article of the specified index in the specified group, and whose errback will be invoked if the article or group does not exist. If id is not None, index is ignored and the article with the given Message-ID will be returned instead, along with its index in the specified group.
Returns a deferred whose callback will be passed a file-like object containing the full article text (headers and body) for the article of the specified index in the specified group, and whose errback will be invoked if the article or group does not exist. If id is not None, index is ignored and the article with the given Message-ID will be returned instead, along with its index in the specified group.
[ "Returns", "a", "deferred", "whose", "callback", "will", "be", "passed", "a", "file", "-", "like", "object", "containing", "the", "full", "article", "text", "(", "headers", "and", "body", ")", "for", "the", "article", "of", "the", "specified", "index", "in...
def articleRequest(group, index, id = None): """ Returns a deferred whose callback will be passed a file-like object containing the full article text (headers and body) for the article of the specified index in the specified group, and whose errback will be invoked if the article or group does not exist. If id is not None, index is ignored and the article with the given Message-ID will be returned instead, along with its index in the specified group. """
[ "def", "articleRequest", "(", "group", ",", "index", ",", "id", "=", "None", ")", ":" ]
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/news/database.py#L167-L176
kevinw/pyflakes
b08949f6cc123eb96f05051a1e29abc8457a6799
pyflakes/checker.py
python
Checker.isDocstring
(self, node)
return isinstance(node, _ast.Str) or \ (isinstance(node, _ast.Expr) and isinstance(node.value, _ast.Str))
Determine if the given node is a docstring, as long as it is at the correct place in the node tree.
Determine if the given node is a docstring, as long as it is at the correct place in the node tree.
[ "Determine", "if", "the", "given", "node", "is", "a", "docstring", "as", "long", "as", "it", "is", "at", "the", "correct", "place", "in", "the", "node", "tree", "." ]
def isDocstring(self, node): """ Determine if the given node is a docstring, as long as it is at the correct place in the node tree. """ return isinstance(node, _ast.Str) or \ (isinstance(node, _ast.Expr) and isinstance(node.value, _ast.Str))
[ "def", "isDocstring", "(", "self", ",", "node", ")", ":", "return", "isinstance", "(", "node", ",", "_ast", ".", "Str", ")", "or", "(", "isinstance", "(", "node", ",", "_ast", ".", "Expr", ")", "and", "isinstance", "(", "node", ".", "value", ",", "...
https://github.com/kevinw/pyflakes/blob/b08949f6cc123eb96f05051a1e29abc8457a6799/pyflakes/checker.py#L340-L347
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/numbers.py
python
Integral.__rxor__
(self, other)
other ^ self
other ^ self
[ "other", "^", "self" ]
def __rxor__(self, other): """other ^ self""" raise NotImplementedError
[ "def", "__rxor__", "(", "self", ",", "other", ")", ":", "raise", "NotImplementedError" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/numbers.py#L355-L357
jhpyle/docassemble
b90c84e57af59aa88b3404d44d0b125c70f832cc
docassemble_base/docassemble/base/util.py
python
DAFile.uses_acroform
(self)
return self.file_info.get('acroform', False)
Returns True if the file uses AcroForm, otherwise returns False.
Returns True if the file uses AcroForm, otherwise returns False.
[ "Returns", "True", "if", "the", "file", "uses", "AcroForm", "otherwise", "returns", "False", "." ]
def uses_acroform(self): """Returns True if the file uses AcroForm, otherwise returns False.""" if not hasattr(self, 'file_info'): self.retrieve() return self.file_info.get('acroform', False)
[ "def", "uses_acroform", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'file_info'", ")", ":", "self", ".", "retrieve", "(", ")", "return", "self", ".", "file_info", ".", "get", "(", "'acroform'", ",", "False", ")" ]
https://github.com/jhpyle/docassemble/blob/b90c84e57af59aa88b3404d44d0b125c70f832cc/docassemble_base/docassemble/base/util.py#L3883-L3887
junsukchoe/ADL
dab2e78163bd96970ec9ae41de62835332dbf4fe
tensorpack/dataflow/imgaug/imgproc.py
python
Hue.__init__
(self, range=(0, 180), rgb=True)
Args: range(list or tuple): range from which the applied hue offset is selected (maximum [-90,90] or [0,180]) rgb (bool): whether input is RGB or BGR.
Args: range(list or tuple): range from which the applied hue offset is selected (maximum [-90,90] or [0,180]) rgb (bool): whether input is RGB or BGR.
[ "Args", ":", "range", "(", "list", "or", "tuple", ")", ":", "range", "from", "which", "the", "applied", "hue", "offset", "is", "selected", "(", "maximum", "[", "-", "90", "90", "]", "or", "[", "0", "180", "]", ")", "rgb", "(", "bool", ")", ":", ...
def __init__(self, range=(0, 180), rgb=True): """ Args: range(list or tuple): range from which the applied hue offset is selected (maximum [-90,90] or [0,180]) rgb (bool): whether input is RGB or BGR. """ super(Hue, self).__init__() rgb = bool(rgb) self._init(locals())
[ "def", "__init__", "(", "self", ",", "range", "=", "(", "0", ",", "180", ")", ",", "rgb", "=", "True", ")", ":", "super", "(", "Hue", ",", "self", ")", ".", "__init__", "(", ")", "rgb", "=", "bool", "(", "rgb", ")", "self", ".", "_init", "(",...
https://github.com/junsukchoe/ADL/blob/dab2e78163bd96970ec9ae41de62835332dbf4fe/tensorpack/dataflow/imgaug/imgproc.py#L18-L26
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/provenance_event_dto.py
python
ProvenanceEventDTO.output_content_claim_container
(self, output_content_claim_container)
Sets the output_content_claim_container of this ProvenanceEventDTO. The container in which the output content claim lives. :param output_content_claim_container: The output_content_claim_container of this ProvenanceEventDTO. :type: str
Sets the output_content_claim_container of this ProvenanceEventDTO. The container in which the output content claim lives.
[ "Sets", "the", "output_content_claim_container", "of", "this", "ProvenanceEventDTO", ".", "The", "container", "in", "which", "the", "output", "content", "claim", "lives", "." ]
def output_content_claim_container(self, output_content_claim_container): """ Sets the output_content_claim_container of this ProvenanceEventDTO. The container in which the output content claim lives. :param output_content_claim_container: The output_content_claim_container of this ProvenanceEventDTO. :type: str """ self._output_content_claim_container = output_content_claim_container
[ "def", "output_content_claim_container", "(", "self", ",", "output_content_claim_container", ")", ":", "self", ".", "_output_content_claim_container", "=", "output_content_claim_container" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/provenance_event_dto.py#L1022-L1031
mitogen-hq/mitogen
5b505f524a7ae170fe68613841ab92b299613d3f
ansible_mitogen/connection.py
python
optional_int
(value)
Convert `value` to an integer if it is not :data:`None`, otherwise return :data:`None`.
Convert `value` to an integer if it is not :data:`None`, otherwise return :data:`None`.
[ "Convert", "value", "to", "an", "integer", "if", "it", "is", "not", ":", "data", ":", "None", "otherwise", "return", ":", "data", ":", "None", "." ]
def optional_int(value): """ Convert `value` to an integer if it is not :data:`None`, otherwise return :data:`None`. """ try: return int(value) except (TypeError, ValueError): return None
[ "def", "optional_int", "(", "value", ")", ":", "try", ":", "return", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "None" ]
https://github.com/mitogen-hq/mitogen/blob/5b505f524a7ae170fe68613841ab92b299613d3f/ansible_mitogen/connection.py#L75-L83
almarklein/visvis
766ed97767b44a55a6ff72c742d7385e074d3d55
wobjects/sliceTextures.py
python
SliceTexture._GetData
(self)
return self._dataRef3D
_GetData() Get a reference to the raw data. For internal use.
_GetData() Get a reference to the raw data. For internal use.
[ "_GetData", "()", "Get", "a", "reference", "to", "the", "raw", "data", ".", "For", "internal", "use", "." ]
def _GetData(self): """ _GetData() Get a reference to the raw data. For internal use. """ return self._dataRef3D
[ "def", "_GetData", "(", "self", ")", ":", "return", "self", ".", "_dataRef3D" ]
https://github.com/almarklein/visvis/blob/766ed97767b44a55a6ff72c742d7385e074d3d55/wobjects/sliceTextures.py#L127-L133
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/cgitb.py
python
reset
()
return '''<!--: spam Content-Type: text/html <body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> <body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> --> </font> </font> </font> </script> </object> </blockquote> </pre> </table> </table> </table> </table> </table> </font> </font> </font>'''
Return a string that resets the CGI and browser to a known state.
Return a string that resets the CGI and browser to a known state.
[ "Return", "a", "string", "that", "resets", "the", "CGI", "and", "browser", "to", "a", "known", "state", "." ]
def reset(): """Return a string that resets the CGI and browser to a known state.""" return '''<!--: spam Content-Type: text/html <body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> <body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> --> </font> </font> </font> </script> </object> </blockquote> </pre> </table> </table> </table> </table> </table> </font> </font> </font>'''
[ "def", "reset", "(", ")", ":", "return", "'''<!--: spam\nContent-Type: text/html\n\n<body bgcolor=\"#f0f0f8\"><font color=\"#f0f0f8\" size=\"-5\"> -->\n<body bgcolor=\"#f0f0f8\"><font color=\"#f0f0f8\" size=\"-5\"> --> -->\n</font> </font> </font> </script> </object> </blockquote> </pre>\n</table> </tab...
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/cgitb.py#L30-L38
BichenWuUCB/SqueezeSeg
6dc53c8849eed91a4463ca22371474caeaab5827
src/nn_skeleton.py
python
ModelSkeleton._bilateral_filter_layer
( self, layer_name, inputs, thetas=[0.9, 0.01], sizes=[3, 5], stride=1, padding='SAME')
return out
Computing pairwise energy with a bilateral filter for CRF. Args: layer_name: layer name inputs: input tensor with shape [batch_size, zenith, azimuth, 2] where the last 2 elements are intensity and range of a lidar point. thetas: theta parameter for bilateral filter. sizes: filter size for zenith and azimuth dimension. strides: kernel strides. padding: padding. Returns: out: bilateral filter weight output with size [batch_size, zenith, azimuth, sizes[0]*sizes[1]-1, num_class]. Each [b, z, a, :, cls] represents filter weights around the center position for each class.
Computing pairwise energy with a bilateral filter for CRF.
[ "Computing", "pairwise", "energy", "with", "a", "bilateral", "filter", "for", "CRF", "." ]
def _bilateral_filter_layer( self, layer_name, inputs, thetas=[0.9, 0.01], sizes=[3, 5], stride=1, padding='SAME'): """Computing pairwise energy with a bilateral filter for CRF. Args: layer_name: layer name inputs: input tensor with shape [batch_size, zenith, azimuth, 2] where the last 2 elements are intensity and range of a lidar point. thetas: theta parameter for bilateral filter. sizes: filter size for zenith and azimuth dimension. strides: kernel strides. padding: padding. Returns: out: bilateral filter weight output with size [batch_size, zenith, azimuth, sizes[0]*sizes[1]-1, num_class]. Each [b, z, a, :, cls] represents filter weights around the center position for each class. """ assert padding == 'SAME', 'currently only supports "SAME" padding stategy' assert stride == 1, 'currently only supports striding of 1' assert sizes[0] % 2 == 1 and sizes[1] % 2 == 1, \ 'Currently only support odd filter size.' mc = self.mc theta_a, theta_r = thetas size_z, size_a = sizes pad_z, pad_a = size_z//2, size_a//2 half_filter_dim = (size_z*size_a)//2 batch, zenith, azimuth, in_channel = inputs.shape.as_list() # assert in_channel == 1, 'Only support input channel == 1' with tf.variable_scope(layer_name) as scope: condensing_kernel = tf.constant( util.condensing_matrix(size_z, size_a, in_channel), dtype=tf.float32, name='condensing_kernel' ) condensed_input = tf.nn.conv2d( inputs, condensing_kernel, [1, 1, stride, 1], padding=padding, name='condensed_input' ) # diff_intensity = tf.reshape( # inputs[:, :, :], [batch, zenith, azimuth, 1]) \ # - condensed_input[:, :, :, ::in_channel] diff_x = tf.reshape( inputs[:, :, :, 0], [batch, zenith, azimuth, 1]) \ - condensed_input[:, :, :, 0::in_channel] diff_y = tf.reshape( inputs[:, :, :, 1], [batch, zenith, azimuth, 1]) \ - condensed_input[:, :, :, 1::in_channel] diff_z = tf.reshape( inputs[:, :, :, 2], [batch, zenith, azimuth, 1]) \ - condensed_input[:, :, :, 2::in_channel] bi_filters = [] for cls in range(mc.NUM_CLASS): theta_a = mc.BILATERAL_THETA_A[cls] theta_r = mc.BILATERAL_THETA_R[cls] bi_filter = tf.exp(-(diff_x**2+diff_y**2+diff_z**2)/2/theta_r**2) bi_filters.append(bi_filter) out = tf.transpose( tf.stack(bi_filters), [1, 2, 3, 4, 0], name='bilateral_filter_weights' ) return out
[ "def", "_bilateral_filter_layer", "(", "self", ",", "layer_name", ",", "inputs", ",", "thetas", "=", "[", "0.9", ",", "0.01", "]", ",", "sizes", "=", "[", "3", ",", "5", "]", ",", "stride", "=", "1", ",", "padding", "=", "'SAME'", ")", ":", "assert...
https://github.com/BichenWuUCB/SqueezeSeg/blob/6dc53c8849eed91a4463ca22371474caeaab5827/src/nn_skeleton.py#L831-L903
benedekrozemberczki/GEMSEC
c023122bdafe88278cdbd24b7fcf9dafe8e95b34
src/calculation_helper.py
python
unit
(g, node_1, node_2)
return 1
Function to calculate the "unit" weight. :param g: NX graph. :param node_1: Node 1. of a pair. :param node_2: Node 2. of a pair.
Function to calculate the "unit" weight. :param g: NX graph. :param node_1: Node 1. of a pair. :param node_2: Node 2. of a pair.
[ "Function", "to", "calculate", "the", "unit", "weight", ".", ":", "param", "g", ":", "NX", "graph", ".", ":", "param", "node_1", ":", "Node", "1", ".", "of", "a", "pair", ".", ":", "param", "node_2", ":", "Node", "2", ".", "of", "a", "pair", "." ...
def unit(g, node_1, node_2): """ Function to calculate the "unit" weight. :param g: NX graph. :param node_1: Node 1. of a pair. :param node_2: Node 2. of a pair. """ return 1
[ "def", "unit", "(", "g", ",", "node_1", ",", "node_2", ")", ":", "return", "1" ]
https://github.com/benedekrozemberczki/GEMSEC/blob/c023122bdafe88278cdbd24b7fcf9dafe8e95b34/src/calculation_helper.py#L35-L42
huawei-noah/CV-Backbones
03e8cdfe92494a55ddfb11cc875ff2e1c33f91da
tnt_pytorch/pyramid_tnt.py
python
Attention.__init__
(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., sr_ratio=1)
[]
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., sr_ratio=1): super().__init__() assert dim % num_heads == 0, f"dim {dim} should be divided by num_heads {num_heads}." self.dim = dim self.num_heads = num_heads head_dim = dim // num_heads self.scale = qk_scale or head_dim ** -0.5 self.q = nn.Linear(dim, dim, bias=qkv_bias) self.kv = nn.Linear(dim, dim * 2, bias=qkv_bias) self.attn_drop = nn.Dropout(attn_drop) self.proj = nn.Linear(dim, dim) self.proj_drop = nn.Dropout(proj_drop) self.sr_ratio = sr_ratio if sr_ratio > 1: self.pool = nn.AvgPool2d(sr_ratio, stride=sr_ratio) self.linear = nn.Linear(dim, dim) self.norm = nn.LayerNorm(dim)
[ "def", "__init__", "(", "self", ",", "dim", ",", "num_heads", "=", "8", ",", "qkv_bias", "=", "False", ",", "qk_scale", "=", "None", ",", "attn_drop", "=", "0.", ",", "proj_drop", "=", "0.", ",", "sr_ratio", "=", "1", ")", ":", "super", "(", ")", ...
https://github.com/huawei-noah/CV-Backbones/blob/03e8cdfe92494a55ddfb11cc875ff2e1c33f91da/tnt_pytorch/pyramid_tnt.py#L115-L134
robotlearn/pyrobolearn
9cd7c060723fda7d2779fa255ac998c2c82b8436
pyrobolearn/utils/data_structures/orderedset.py
python
OrderedSet._check_index
(self, idx)
return idx
Check the given index; if it is in the range of the ordered set, and if it is negative return the corresponding positive index.
Check the given index; if it is in the range of the ordered set, and if it is negative return the corresponding positive index.
[ "Check", "the", "given", "index", ";", "if", "it", "is", "in", "the", "range", "of", "the", "ordered", "set", "and", "if", "it", "is", "negative", "return", "the", "corresponding", "positive", "index", "." ]
def _check_index(self, idx): """ Check the given index; if it is in the range of the ordered set, and if it is negative return the corresponding positive index. """ if not isinstance(idx, int): raise TypeError("idx should be an integer.") if idx > len(self._list) or idx < -len(self._list): return KeyError(idx) if idx < 0: idx = len(self._list) + idx return idx
[ "def", "_check_index", "(", "self", ",", "idx", ")", ":", "if", "not", "isinstance", "(", "idx", ",", "int", ")", ":", "raise", "TypeError", "(", "\"idx should be an integer.\"", ")", "if", "idx", ">", "len", "(", "self", ".", "_list", ")", "or", "idx"...
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/utils/data_structures/orderedset.py#L165-L176
spulec/moto
a688c0032596a7dfef122b69a08f2bec3be2e481
moto/config/models.py
python
validate_tag_key
(tag_key, exception_param="tags.X.member.key")
Validates the tag key. :param tag_key: The tag key to check against. :param exception_param: The exception parameter to send over to help format the message. This is to reflect the difference between the tag and untag APIs. :return:
Validates the tag key.
[ "Validates", "the", "tag", "key", "." ]
def validate_tag_key(tag_key, exception_param="tags.X.member.key"): """Validates the tag key. :param tag_key: The tag key to check against. :param exception_param: The exception parameter to send over to help format the message. This is to reflect the difference between the tag and untag APIs. :return: """ # Validate that the key length is correct: if len(tag_key) > 128: raise TagKeyTooBig(tag_key, param=exception_param) # Validate that the tag key fits the proper Regex: # [\w\s_.:/=+\-@]+ SHOULD be the same as the Java regex on the AWS # documentation: [\p{L}\p{Z}\p{N}_.:/=+\-@]+ match = re.findall(r"[\w\s_.:/=+\-@]+", tag_key) # Kudos if you can come up with a better way of doing a global search :) if not match or len(match[0]) < len(tag_key): raise InvalidTagCharacters(tag_key, param=exception_param)
[ "def", "validate_tag_key", "(", "tag_key", ",", "exception_param", "=", "\"tags.X.member.key\"", ")", ":", "# Validate that the key length is correct:", "if", "len", "(", "tag_key", ")", ">", "128", ":", "raise", "TagKeyTooBig", "(", "tag_key", ",", "param", "=", ...
https://github.com/spulec/moto/blob/a688c0032596a7dfef122b69a08f2bec3be2e481/moto/config/models.py#L117-L136
princewen/leetcode_python
79e6e760e4d81824c96903e6c996630c24d01932
sort_by_leetcode/math/easy/7. Reverse Integer.py
python
Solution.reverse
(self, x)
return res if x > 0 else -res
:type x: int :rtype: int
:type x: int :rtype: int
[ ":", "type", "x", ":", "int", ":", "rtype", ":", "int" ]
def reverse(self, x): """ :type x: int :rtype: int """ n = x if x > 0 else -x res = 0 while n: res = res * 10 + n % 10 n = n / 10 if res > 0x7fffffff: return 0 return res if x > 0 else -res
[ "def", "reverse", "(", "self", ",", "x", ")", ":", "n", "=", "x", "if", "x", ">", "0", "else", "-", "x", "res", "=", "0", "while", "n", ":", "res", "=", "res", "*", "10", "+", "n", "%", "10", "n", "=", "n", "/", "10", "if", "res", ">", ...
https://github.com/princewen/leetcode_python/blob/79e6e760e4d81824c96903e6c996630c24d01932/sort_by_leetcode/math/easy/7. Reverse Integer.py#L23-L35
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/scripts/sshbackdoors/rpyc/experimental/splitbrain.py
python
RemoteModule.__delattr__
(self, name, val)
return setattr(self.__currmod__, name, val)
[]
def __delattr__(self, name, val): return setattr(self.__currmod__, name, val)
[ "def", "__delattr__", "(", "self", ",", "name", ",", "val", ")", ":", "return", "setattr", "(", "self", ".", "__currmod__", ",", "name", ",", "val", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/scripts/sshbackdoors/rpyc/experimental/splitbrain.py#L109-L110
ipython/ipython
c0abea7a6dfe52c1f74c9d0387d4accadba7cc14
IPython/terminal/debugger.py
python
TerminalPdb.cmdloop
(self, intro=None)
Repeatedly issue a prompt, accept input, parse an initial prefix off the received input, and dispatch to action methods, passing them the remainder of the line as argument. override the same methods from cmd.Cmd to provide prompt toolkit replacement.
Repeatedly issue a prompt, accept input, parse an initial prefix off the received input, and dispatch to action methods, passing them the remainder of the line as argument.
[ "Repeatedly", "issue", "a", "prompt", "accept", "input", "parse", "an", "initial", "prefix", "off", "the", "received", "input", "and", "dispatch", "to", "action", "methods", "passing", "them", "the", "remainder", "of", "the", "line", "as", "argument", "." ]
def cmdloop(self, intro=None): """Repeatedly issue a prompt, accept input, parse an initial prefix off the received input, and dispatch to action methods, passing them the remainder of the line as argument. override the same methods from cmd.Cmd to provide prompt toolkit replacement. """ if not self.use_rawinput: raise ValueError('Sorry ipdb does not support use_rawinput=False') # In order to make sure that prompt, which uses asyncio doesn't # interfere with applications in which it's used, we always run the # prompt itself in a different thread (we can't start an event loop # within an event loop). This new thread won't have any event loop # running, and here we run our prompt-loop. self.preloop() try: if intro is not None: self.intro = intro if self.intro: print(self.intro, file=self.stdout) stop = None while not stop: if self.cmdqueue: line = self.cmdqueue.pop(0) else: self._ptcomp.ipy_completer.namespace = self.curframe_locals self._ptcomp.ipy_completer.global_namespace = self.curframe.f_globals # Run the prompt in a different thread. try: line = self.thread_executor.submit(self.pt_app.prompt).result() except EOFError: line = "EOF" line = self.precmd(line) stop = self.onecmd(line) stop = self.postcmd(stop, line) self.postloop() except Exception: raise
[ "def", "cmdloop", "(", "self", ",", "intro", "=", "None", ")", ":", "if", "not", "self", ".", "use_rawinput", ":", "raise", "ValueError", "(", "'Sorry ipdb does not support use_rawinput=False'", ")", "# In order to make sure that prompt, which uses asyncio doesn't", "# in...
https://github.com/ipython/ipython/blob/c0abea7a6dfe52c1f74c9d0387d4accadba7cc14/IPython/terminal/debugger.py#L91-L132
vmware/pyvcloud
d72c615fa41b8ea5ab049a929e18d8ba6460fc59
pyvcloud/vcd/client.py
python
find_link
(resource, rel, media_type, fail_if_absent=True, name=None)
Returns the link of the specified rel and type in the resource. :param lxml.objectify.ObjectifiedElement resource: the resource with the link. :param RelationType rel: the rel of the desired link. :param str media_type: media type of content. :param bool fail_if_absent: if True raise an exception if there's not exactly one link of the specified rel and media type. :return: an object containing Link XML element representing the desired link or None if no such link is present and fail_if_absent is False. :rtype: lxml.objectify.ObjectifiedElement :raises MissingLinkException: if no link of the specified rel and media type is found :raises MultipleLinksException: if multiple links of the specified rel and media type are found
Returns the link of the specified rel and type in the resource.
[ "Returns", "the", "link", "of", "the", "specified", "rel", "and", "type", "in", "the", "resource", "." ]
def find_link(resource, rel, media_type, fail_if_absent=True, name=None): """Returns the link of the specified rel and type in the resource. :param lxml.objectify.ObjectifiedElement resource: the resource with the link. :param RelationType rel: the rel of the desired link. :param str media_type: media type of content. :param bool fail_if_absent: if True raise an exception if there's not exactly one link of the specified rel and media type. :return: an object containing Link XML element representing the desired link or None if no such link is present and fail_if_absent is False. :rtype: lxml.objectify.ObjectifiedElement :raises MissingLinkException: if no link of the specified rel and media type is found :raises MultipleLinksException: if multiple links of the specified rel and media type are found """ links = get_links(resource, rel, media_type, name) num_links = len(links) if num_links == 0: if fail_if_absent: raise MissingLinkException(resource.get('href'), rel, media_type) else: return None elif num_links == 1: return links[0] else: raise MultipleLinksException(resource.get('href'), rel, media_type)
[ "def", "find_link", "(", "resource", ",", "rel", ",", "media_type", ",", "fail_if_absent", "=", "True", ",", "name", "=", "None", ")", ":", "links", "=", "get_links", "(", "resource", ",", "rel", ",", "media_type", ",", "name", ")", "num_links", "=", "...
https://github.com/vmware/pyvcloud/blob/d72c615fa41b8ea5ab049a929e18d8ba6460fc59/pyvcloud/vcd/client.py#L1809-L1839
jupyter/nbgrader
1ae2886e4e734554d8667c6e86861e83cc161451
nbgrader/api.py
python
Gradebook.find_submission
(self, assignment: str, student: str)
return submission
Find a student's submission for a given assignment. Parameters ---------- assignment : string the name of an assignment student : string the unique id of a student Returns ------- submission : :class:`~nbgrader.api.SubmittedAssignment`
Find a student's submission for a given assignment.
[ "Find", "a", "student", "s", "submission", "for", "a", "given", "assignment", "." ]
def find_submission(self, assignment: str, student: str) -> SubmittedAssignment: """Find a student's submission for a given assignment. Parameters ---------- assignment : string the name of an assignment student : string the unique id of a student Returns ------- submission : :class:`~nbgrader.api.SubmittedAssignment` """ try: submission = self.db.query(SubmittedAssignment)\ .join(Assignment, Assignment.id == SubmittedAssignment.assignment_id)\ .join(Student, Student.id == SubmittedAssignment.student_id)\ .filter(Assignment.name == assignment, Student.id == student)\ .one() except NoResultFound: raise MissingEntry("No such submission: {} for {}".format( assignment, student)) return submission
[ "def", "find_submission", "(", "self", ",", "assignment", ":", "str", ",", "student", ":", "str", ")", "->", "SubmittedAssignment", ":", "try", ":", "submission", "=", "self", ".", "db", ".", "query", "(", "SubmittedAssignment", ")", ".", "join", "(", "A...
https://github.com/jupyter/nbgrader/blob/1ae2886e4e734554d8667c6e86861e83cc161451/nbgrader/api.py#L2283-L2309
ray-project/ray
703c1610348615dcb8c2d141a0c46675084660f5
rllib/examples/models/rnn_spy_model.py
python
SpyLayer.spy
(inputs, seq_lens, h_in, c_in, h_out, c_out)
return SpyLayer.output
The actual spy operation: Store inputs in internal_kv.
The actual spy operation: Store inputs in internal_kv.
[ "The", "actual", "spy", "operation", ":", "Store", "inputs", "in", "internal_kv", "." ]
def spy(inputs, seq_lens, h_in, c_in, h_out, c_out): """The actual spy operation: Store inputs in internal_kv.""" if len(inputs) == 1: # don't capture inference inputs return SpyLayer.output # TF runs this function in an isolated context, so we have to use # redis to communicate back to our suite ray.experimental.internal_kv._internal_kv_put( "rnn_spy_in_{}".format(RNNSpyModel.capture_index), pickle.dumps({ "sequences": inputs, "seq_lens": seq_lens, "state_in": [h_in, c_in], "state_out": [h_out, c_out] }), overwrite=True) RNNSpyModel.capture_index += 1 return SpyLayer.output
[ "def", "spy", "(", "inputs", ",", "seq_lens", ",", "h_in", ",", "c_in", ",", "h_out", ",", "c_out", ")", ":", "if", "len", "(", "inputs", ")", "==", "1", ":", "# don't capture inference inputs", "return", "SpyLayer", ".", "output", "# TF runs this function i...
https://github.com/ray-project/ray/blob/703c1610348615dcb8c2d141a0c46675084660f5/rllib/examples/models/rnn_spy_model.py#L49-L67
vstinner/hachoir
8fb142ed4ab8e3603e5b613a75714f79b372b3fe
hachoir/parser/archive/rar.py
python
formatRARVersion
(field)
return "%u.%u" % divmod(field.value, 10)
Decodes the RAR version stored on 1 byte
Decodes the RAR version stored on 1 byte
[ "Decodes", "the", "RAR", "version", "stored", "on", "1", "byte" ]
def formatRARVersion(field): """ Decodes the RAR version stored on 1 byte """ return "%u.%u" % divmod(field.value, 10)
[ "def", "formatRARVersion", "(", "field", ")", ":", "return", "\"%u.%u\"", "%", "divmod", "(", "field", ".", "value", ",", "10", ")" ]
https://github.com/vstinner/hachoir/blob/8fb142ed4ab8e3603e5b613a75714f79b372b3fe/hachoir/parser/archive/rar.py#L61-L65
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/path.py
python
Path.circle
(cls, center=(0., 0.), radius=1., readonly=False)
return Path(vertices * radius + center, codes, readonly=readonly)
Return a Path representing a circle of a given radius and center. Parameters ---------- center : pair of floats The center of the circle. Default ``(0, 0)``. radius : float The radius of the circle. Default is 1. readonly : bool Whether the created path should have the "readonly" argument set when creating the Path instance. Notes ----- The circle is approximated using cubic Bezier curves. This uses 8 splines around the circle using the approach presented here: Lancaster, Don. `Approximating a Circle or an Ellipse Using Four Bezier Cubic Splines <http://www.tinaja.com/glib/ellipse4.pdf>`_.
Return a Path representing a circle of a given radius and center.
[ "Return", "a", "Path", "representing", "a", "circle", "of", "a", "given", "radius", "and", "center", "." ]
def circle(cls, center=(0., 0.), radius=1., readonly=False): """ Return a Path representing a circle of a given radius and center. Parameters ---------- center : pair of floats The center of the circle. Default ``(0, 0)``. radius : float The radius of the circle. Default is 1. readonly : bool Whether the created path should have the "readonly" argument set when creating the Path instance. Notes ----- The circle is approximated using cubic Bezier curves. This uses 8 splines around the circle using the approach presented here: Lancaster, Don. `Approximating a Circle or an Ellipse Using Four Bezier Cubic Splines <http://www.tinaja.com/glib/ellipse4.pdf>`_. """ MAGIC = 0.2652031 SQRTHALF = np.sqrt(0.5) MAGIC45 = SQRTHALF * MAGIC vertices = np.array([[0.0, -1.0], [MAGIC, -1.0], [SQRTHALF-MAGIC45, -SQRTHALF-MAGIC45], [SQRTHALF, -SQRTHALF], [SQRTHALF+MAGIC45, -SQRTHALF+MAGIC45], [1.0, -MAGIC], [1.0, 0.0], [1.0, MAGIC], [SQRTHALF+MAGIC45, SQRTHALF-MAGIC45], [SQRTHALF, SQRTHALF], [SQRTHALF-MAGIC45, SQRTHALF+MAGIC45], [MAGIC, 1.0], [0.0, 1.0], [-MAGIC, 1.0], [-SQRTHALF+MAGIC45, SQRTHALF+MAGIC45], [-SQRTHALF, SQRTHALF], [-SQRTHALF-MAGIC45, SQRTHALF-MAGIC45], [-1.0, MAGIC], [-1.0, 0.0], [-1.0, -MAGIC], [-SQRTHALF-MAGIC45, -SQRTHALF+MAGIC45], [-SQRTHALF, -SQRTHALF], [-SQRTHALF+MAGIC45, -SQRTHALF-MAGIC45], [-MAGIC, -1.0], [0.0, -1.0], [0.0, -1.0]], dtype=float) codes = [cls.CURVE4] * 26 codes[0] = cls.MOVETO codes[-1] = cls.CLOSEPOLY return Path(vertices * radius + center, codes, readonly=readonly)
[ "def", "circle", "(", "cls", ",", "center", "=", "(", "0.", ",", "0.", ")", ",", "radius", "=", "1.", ",", "readonly", "=", "False", ")", ":", "MAGIC", "=", "0.2652031", "SQRTHALF", "=", "np", ".", "sqrt", "(", "0.5", ")", "MAGIC45", "=", "SQRTHA...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/path.py#L717-L785
bjmayor/hacker
e3ce2ad74839c2733b27dac6c0f495e0743e1866
venv/lib/python3.5/site-packages/pip/utils/outdated.py
python
pip_version_check
(session)
Check for an update for pip. Limit the frequency of checks to once per week. State is stored either in the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix of the pip script path.
Check for an update for pip.
[ "Check", "for", "an", "update", "for", "pip", "." ]
def pip_version_check(session): """Check for an update for pip. Limit the frequency of checks to once per week. State is stored either in the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix of the pip script path. """ installed_version = get_installed_version("pip") if installed_version is None: return pip_version = packaging_version.parse(installed_version) pypi_version = None try: state = load_selfcheck_statefile() current_time = datetime.datetime.utcnow() # Determine if we need to refresh the state if "last_check" in state.state and "pypi_version" in state.state: last_check = datetime.datetime.strptime( state.state["last_check"], SELFCHECK_DATE_FMT ) if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60: pypi_version = state.state["pypi_version"] # Refresh the version if we need to or just see if we need to warn if pypi_version is None: resp = session.get( PyPI.pip_json_url, headers={"Accept": "application/json"}, ) resp.raise_for_status() pypi_version = [ v for v in sorted( list(resp.json()["releases"]), key=packaging_version.parse, ) if not packaging_version.parse(v).is_prerelease ][-1] # save that we've performed a check state.save(pypi_version, current_time) remote_version = packaging_version.parse(pypi_version) # Determine if our pypi_version is older if (pip_version < remote_version and pip_version.base_version != remote_version.base_version): # Advise "python -m pip" on Windows to avoid issues # with overwriting pip.exe. if WINDOWS: pip_cmd = "python -m pip" else: pip_cmd = "pip" logger.warning( "You are using pip version %s, however version %s is " "available.\nYou should consider upgrading via the " "'%s install --upgrade pip' command.", pip_version, pypi_version, pip_cmd ) except Exception: logger.debug( "There was an error checking the latest version of pip", exc_info=True, )
[ "def", "pip_version_check", "(", "session", ")", ":", "installed_version", "=", "get_installed_version", "(", "\"pip\"", ")", "if", "installed_version", "is", "None", ":", "return", "pip_version", "=", "packaging_version", ".", "parse", "(", "installed_version", ")"...
https://github.com/bjmayor/hacker/blob/e3ce2ad74839c2733b27dac6c0f495e0743e1866/venv/lib/python3.5/site-packages/pip/utils/outdated.py#L95-L162
getpatchwork/patchwork
60a7b11d12f9e1a6bd08d787d37066c8d89a52ae
patchwork/management/commands/cron.py
python
Command.handle
(self, *args, **kwargs)
[]
def handle(self, *args, **kwargs): errors = send_notifications() for (recipient, error) in errors: self.stderr.write("Failed sending to %s: %s" % (recipient.email, error)) expire_notifications()
[ "def", "handle", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "errors", "=", "send_notifications", "(", ")", "for", "(", "recipient", ",", "error", ")", "in", "errors", ":", "self", ".", "stderr", ".", "write", "(", "\"Failed sen...
https://github.com/getpatchwork/patchwork/blob/60a7b11d12f9e1a6bd08d787d37066c8d89a52ae/patchwork/management/commands/cron.py#L16-L22
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/urllib/parse.py
python
splitattr
(url)
return words[0], words[1:]
splitattr('/path;attr1=value1;attr2=value2;...') -> '/path', ['attr1=value1', 'attr2=value2', ...].
splitattr('/path;attr1=value1;attr2=value2;...') -> '/path', ['attr1=value1', 'attr2=value2', ...].
[ "splitattr", "(", "/", "path", ";", "attr1", "=", "value1", ";", "attr2", "=", "value2", ";", "...", ")", "-", ">", "/", "path", "[", "attr1", "=", "value1", "attr2", "=", "value2", "...", "]", "." ]
def splitattr(url): """splitattr('/path;attr1=value1;attr2=value2;...') -> '/path', ['attr1=value1', 'attr2=value2', ...].""" words = url.split(';') return words[0], words[1:]
[ "def", "splitattr", "(", "url", ")", ":", "words", "=", "url", ".", "split", "(", "';'", ")", "return", "words", "[", "0", "]", ",", "words", "[", "1", ":", "]" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/urllib/parse.py#L1059-L1063
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_node_system_info.py
python
V1NodeSystemInfo.os_image
(self, os_image)
Sets the os_image of this V1NodeSystemInfo. OS Image reported by the node from /etc/os-release (e.g. Debian GNU/Linux 7 (wheezy)). # noqa: E501 :param os_image: The os_image of this V1NodeSystemInfo. # noqa: E501 :type: str
Sets the os_image of this V1NodeSystemInfo.
[ "Sets", "the", "os_image", "of", "this", "V1NodeSystemInfo", "." ]
def os_image(self, os_image): """Sets the os_image of this V1NodeSystemInfo. OS Image reported by the node from /etc/os-release (e.g. Debian GNU/Linux 7 (wheezy)). # noqa: E501 :param os_image: The os_image of this V1NodeSystemInfo. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and os_image is None: # noqa: E501 raise ValueError("Invalid value for `os_image`, must not be `None`") # noqa: E501 self._os_image = os_image
[ "def", "os_image", "(", "self", ",", "os_image", ")", ":", "if", "self", ".", "local_vars_configuration", ".", "client_side_validation", "and", "os_image", "is", "None", ":", "# noqa: E501", "raise", "ValueError", "(", "\"Invalid value for `os_image`, must not be `None`...
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_node_system_info.py#L302-L313
tensorflow/estimator
edb6e18703a0fa00182bcc72a056da6f5ce45e70
tensorflow_estimator/python/estimator/tpu/util.py
python
parse_iterations_per_loop
(iterations_per_loop)
return IterationsPerLoopCounter(value, unit_value)
Parses the `iterations_per_loop` value. The parser expects the value of the `iterations_per_loop` value to be a positive integer value with unit:`count` or time-based value `<N><s|m|h>` where <N> is any positive integer and `s`, `m`, `h` are unit of time in seconds, minutes, hours respectively. Examples of valid values: `3600s`, `60m` , `1h`. Args: iterations_per_loop: Number of iterations or time alloted to spend on per device loop. Returns: A dictionary of `value` and `unit`. The `unit` value can be either a raw `count`, or time in `seconds`. { "value": <positive-integer>, "unit": <unit: `count` | `seconds`> }
Parses the `iterations_per_loop` value.
[ "Parses", "the", "iterations_per_loop", "value", "." ]
def parse_iterations_per_loop(iterations_per_loop): """Parses the `iterations_per_loop` value. The parser expects the value of the `iterations_per_loop` value to be a positive integer value with unit:`count` or time-based value `<N><s|m|h>` where <N> is any positive integer and `s`, `m`, `h` are unit of time in seconds, minutes, hours respectively. Examples of valid values: `3600s`, `60m` , `1h`. Args: iterations_per_loop: Number of iterations or time alloted to spend on per device loop. Returns: A dictionary of `value` and `unit`. The `unit` value can be either a raw `count`, or time in `seconds`. { "value": <positive-integer>, "unit": <unit: `count` | `seconds`> } """ m = _ITERATIONS_PER_LOOP_VALUE_REGEX.match(str(iterations_per_loop)) if m is None: raise ValueError( 'Invalid TPUConfig `iterations_per_loop` value. Value must be positive ' 'integer value or time-based value `<N><s|m|h>` where <N> is any' 'positive integer and `s`, `m`, `h` are unit of time in seconds, ' 'minutes, hours respectively. Examples of valid values: `3600s`, `60m`,' ' `1h`.') unit_value = 'seconds' if m.group('suffix') in ['h', 'm', 's'] else 'count' value = int(m.group('value')) if m.group('suffix') == 'm': value *= 60 elif m.group('suffix') == 'h': value *= 3600 return IterationsPerLoopCounter(value, unit_value)
[ "def", "parse_iterations_per_loop", "(", "iterations_per_loop", ")", ":", "m", "=", "_ITERATIONS_PER_LOOP_VALUE_REGEX", ".", "match", "(", "str", "(", "iterations_per_loop", ")", ")", "if", "m", "is", "None", ":", "raise", "ValueError", "(", "'Invalid TPUConfig `ite...
https://github.com/tensorflow/estimator/blob/edb6e18703a0fa00182bcc72a056da6f5ce45e70/tensorflow_estimator/python/estimator/tpu/util.py#L44-L79
polyaxon/polyaxon
e28d82051c2b61a84d06ce4d2388a40fc8565469
src/core/polyaxon/cli/artifacts.py
python
delete
(ctx, project, version)
Delete a artifact version. \b $ polyaxon artifacts delete // delete `latest` in current project \b $ polyaxon artifacts delete --project=my-project --version=test-version \b $ polyaxon artifacts get -p owner/my-project -ver rc12
Delete a artifact version. \b $ polyaxon artifacts delete // delete `latest` in current project
[ "Delete", "a", "artifact", "version", ".", "\\", "b", "$", "polyaxon", "artifacts", "delete", "//", "delete", "latest", "in", "current", "project" ]
def delete(ctx, project, version): """Delete a artifact version. \b $ polyaxon artifacts delete // delete `latest` in current project \b $ polyaxon artifacts delete --project=my-project --version=test-version \b $ polyaxon artifacts get -p owner/my-project -ver rc12 """ version = version or ctx.obj.get("version") or "latest" owner, project_name = get_project_or_local( project or ctx.obj.get("project"), is_cli=True ) delete_project_version( owner=owner, project_name=project_name, kind=V1ProjectVersionKind.ARTIFACT, version=version, )
[ "def", "delete", "(", "ctx", ",", "project", ",", "version", ")", ":", "version", "=", "version", "or", "ctx", ".", "obj", ".", "get", "(", "\"version\"", ")", "or", "\"latest\"", "owner", ",", "project_name", "=", "get_project_or_local", "(", "project", ...
https://github.com/polyaxon/polyaxon/blob/e28d82051c2b61a84d06ce4d2388a40fc8565469/src/core/polyaxon/cli/artifacts.py#L207-L227
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/importlib/_bootstrap_external.py
python
cache_from_source
(path, debug_override=None, *, optimization=None)
return _path_join(head, _PYCACHE, filename)
Given the path to a .py file, return the path to its .pyc file. The .py file does not need to exist; this simply returns the path to the .pyc file calculated as if the .py file were imported. The 'optimization' parameter controls the presumed optimization level of the bytecode file. If 'optimization' is not None, the string representation of the argument is taken and verified to be alphanumeric (else ValueError is raised). The debug_override parameter is deprecated. If debug_override is not None, a True value is the same as setting 'optimization' to the empty string while a False value is equivalent to setting 'optimization' to '1'. If sys.implementation.cache_tag is None then NotImplementedError is raised.
Given the path to a .py file, return the path to its .pyc file.
[ "Given", "the", "path", "to", "a", ".", "py", "file", "return", "the", "path", "to", "its", ".", "pyc", "file", "." ]
def cache_from_source(path, debug_override=None, *, optimization=None): """Given the path to a .py file, return the path to its .pyc file. The .py file does not need to exist; this simply returns the path to the .pyc file calculated as if the .py file were imported. The 'optimization' parameter controls the presumed optimization level of the bytecode file. If 'optimization' is not None, the string representation of the argument is taken and verified to be alphanumeric (else ValueError is raised). The debug_override parameter is deprecated. If debug_override is not None, a True value is the same as setting 'optimization' to the empty string while a False value is equivalent to setting 'optimization' to '1'. If sys.implementation.cache_tag is None then NotImplementedError is raised. """ if debug_override is not None: _warnings.warn('the debug_override parameter is deprecated; use ' "'optimization' instead", DeprecationWarning) if optimization is not None: message = 'debug_override or optimization must be set to None' raise TypeError(message) optimization = '' if debug_override else 1 path = _os.fspath(path) head, tail = _path_split(path) base, sep, rest = tail.rpartition('.') tag = sys.implementation.cache_tag if tag is None: raise NotImplementedError('sys.implementation.cache_tag is None') almost_filename = ''.join([(base if base else rest), sep, tag]) if optimization is None: if sys.flags.optimize == 0: optimization = '' else: optimization = sys.flags.optimize optimization = str(optimization) if optimization != '': if not optimization.isalnum(): raise ValueError('{!r} is not alphanumeric'.format(optimization)) almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization) filename = almost_filename + BYTECODE_SUFFIXES[0] if sys.pycache_prefix is not None: # We need an absolute path to the py file to avoid the possibility of # collisions within sys.pycache_prefix, if someone has two different # `foo/bar.py` on their system and they import both of them using the # same sys.pycache_prefix. Let's say sys.pycache_prefix is # `C:\Bytecode`; the idea here is that if we get `Foo\Bar`, we first # make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative # (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an # unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`. if not _path_isabs(head): head = _path_join(_os.getcwd(), head) # Strip initial drive from a Windows path. We know we have an absolute # path here, so the second part of the check rules out a POSIX path that # happens to contain a colon at the second character. if head[1] == ':' and head[0] not in path_separators: head = head[2:] # Strip initial path separator from `head` to complete the conversion # back to a root-relative path before joining. return _path_join( sys.pycache_prefix, head.lstrip(path_separators), filename, ) return _path_join(head, _PYCACHE, filename)
[ "def", "cache_from_source", "(", "path", ",", "debug_override", "=", "None", ",", "*", ",", "optimization", "=", "None", ")", ":", "if", "debug_override", "is", "not", "None", ":", "_warnings", ".", "warn", "(", "'the debug_override parameter is deprecated; use '"...
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/importlib/_bootstrap_external.py#L412-L480
tdamdouni/Pythonista
3e082d53b6b9b501a3c8cf3251a8ad4c8be9c2ad
weather/WeatherAnywhereScene-coomlata.py
python
SceneViewer.format_plot_weather
(self, forecast)
return twf, icon_y, y1_y2
If this is Pythonista 2, and an iPhone 6+ or better there is more screen to work with, as Pythonista recognizes the native screen eesolutions of the iOS device being used.
If this is Pythonista 2, and an iPhone 6+ or better there is more screen to work with, as Pythonista recognizes the native screen eesolutions of the iOS device being used.
[ "If", "this", "is", "Pythonista", "2", "and", "an", "iPhone", "6", "+", "or", "better", "there", "is", "more", "screen", "to", "work", "with", "as", "Pythonista", "recognizes", "the", "native", "screen", "eesolutions", "of", "the", "iOS", "device", "being"...
def format_plot_weather(self, forecast): ''' If this is Pythonista 2, and an iPhone 6+ or better there is more screen to work with, as Pythonista recognizes the native screen eesolutions of the iOS device being used. ''' if py_ver == '2' and is_P6: # Variables to aid in plotting coordinates for text & icons wrap_len = 75 icon_y = [-245] y1_y2 = [-245] else: wrap_len = 58 icon_y = [-410] y1_y2 = [-410] new_line = count = z = blanks = x = 0 twf = [] blank_line = [] section_lines = [] forecast = forecast.split('\n') # Loop through each forecast line for line in forecast: # Look for long lines if len(line) > wrap_len and line.find('Precip:') == -1: # Estimate how many wrapped lines here new_line = int((len(line)/wrap_len)) # Wrap the text line = textwrap.fill(line,width = wrap_len) # Append everything to a new list twf.append(line) # Get new line count after added wrap count += 1 + new_line # Clear value for next computation new_line = 0 # Blank lines if not line: # Record line # blank_line.append(count) # If 2 line numbers exist in list if len(blank_line) == 2: ''' Subtract the difference between the 2 blank lines, which gives you the number of lines in a forecast section, multiply that by 11.35, which gives you an equivalent y point to match the end line number of the section, & subtract that from the icon y anchor point on screen to get the approx point to move the icon on the y axis to align it with it's forecast text. The point is then stored in a list to use as one of the y coordinates for the icons in this forecast. ''' icon_y.append(icon_y[z] - ((blank_line[1] - blank_line[0]) * 11.35)) # Clear list for next section of text blank_line = [] # Store blank line number that starts next forecast section blank_line.append(count) # Increment icon_y list counter z += 1 # Increment blank line counter blanks += 1 ''' Odd numbered blank lines indicate the end of one forecast date section & the start of another so we use the same process as above to determine the y points to draw section lines on the screen. ''' if self.is_odd(blanks): section_lines.append(count) if len(section_lines) == 2: line_factor = 11.5 num_lines = section_lines[1] - section_lines[0] #print num_lines if num_lines >= 12: line_factor = 11.63 y1_y2.append(y1_y2[x] - (num_lines) * line_factor) section_lines = [] section_lines.append(count) x += 1 twf = '\n'.join(twf) ''' Replace anchor point y value with y point for the icon that goes with the current weather section. ''' if py_ver == '2' and is_P6: icon_y = [110 if x == -245 else x for x in icon_y] else: icon_y = [35 if x == -410 else x for x in icon_y] return twf, icon_y, y1_y2
[ "def", "format_plot_weather", "(", "self", ",", "forecast", ")", ":", "if", "py_ver", "==", "'2'", "and", "is_P6", ":", "# Variables to aid in plotting coordinates for text & icons", "wrap_len", "=", "75", "icon_y", "=", "[", "-", "245", "]", "y1_y2", "=", "[", ...
https://github.com/tdamdouni/Pythonista/blob/3e082d53b6b9b501a3c8cf3251a8ad4c8be9c2ad/weather/WeatherAnywhereScene-coomlata.py#L526-L619
pyparsing/pyparsing
1ccf846394a055924b810faaf9628dac53633848
examples/pymicko.py
python
SymbolTable.insert_constant
(self, cname, ctype)
return index
Inserts a constant (or returns index if the constant already exists) Additionally, checks for range.
Inserts a constant (or returns index if the constant already exists) Additionally, checks for range.
[ "Inserts", "a", "constant", "(", "or", "returns", "index", "if", "the", "constant", "already", "exists", ")", "Additionally", "checks", "for", "range", "." ]
def insert_constant(self, cname, ctype): """ Inserts a constant (or returns index if the constant already exists) Additionally, checks for range. """ index = self.lookup_symbol(cname, stype=ctype) if index == None: num = int(cname) if ctype == SharedData.TYPES.INT: if (num < SharedData.MIN_INT) or (num > SharedData.MAX_INT): raise SemanticException( "Integer constant '%s' out of range" % cname ) elif ctype == SharedData.TYPES.UNSIGNED: if (num < 0) or (num > SharedData.MAX_UNSIGNED): raise SemanticException( "Unsigned constant '%s' out of range" % cname ) index = self.insert_symbol(cname, SharedData.KINDS.CONSTANT, ctype) return index
[ "def", "insert_constant", "(", "self", ",", "cname", ",", "ctype", ")", ":", "index", "=", "self", ".", "lookup_symbol", "(", "cname", ",", "stype", "=", "ctype", ")", "if", "index", "==", "None", ":", "num", "=", "int", "(", "cname", ")", "if", "c...
https://github.com/pyparsing/pyparsing/blob/1ccf846394a055924b810faaf9628dac53633848/examples/pymicko.py#L534-L553
CGCookie/retopoflow
3d8b3a47d1d661f99ab0aeb21d31370bf15de35e
retopoflow/rf/rf_target.py
python
RetopoFlow_Target.select_edge_loop
(self, edge, only=True, **kwargs)
[]
def select_edge_loop(self, edge, only=True, **kwargs): eloop,connected = self.get_edge_loop(edge) self.rftarget.select(eloop, only=only, **kwargs)
[ "def", "select_edge_loop", "(", "self", ",", "edge", ",", "only", "=", "True", ",", "*", "*", "kwargs", ")", ":", "eloop", ",", "connected", "=", "self", ".", "get_edge_loop", "(", "edge", ")", "self", ".", "rftarget", ".", "select", "(", "eloop", ",...
https://github.com/CGCookie/retopoflow/blob/3d8b3a47d1d661f99ab0aeb21d31370bf15de35e/retopoflow/rf/rf_target.py#L741-L743
uber-research/UPSNet
aa8434e5a721ed217849607815304f68dfd7720a
lib/nn/optimizer.py
python
SGD.step
(self, lr, closure=None)
return loss
Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss.
Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss.
[ "Performs", "a", "single", "optimization", "step", ".", "Arguments", ":", "closure", "(", "callable", "optional", ")", ":", "A", "closure", "that", "reevaluates", "the", "model", "and", "returns", "the", "loss", "." ]
def step(self, lr, closure=None): """Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss. """ loss = None if closure is not None: loss = closure() for group in self.param_groups: weight_decay = group['weight_decay'] momentum = group['momentum'] nesterov = group['nesterov'] for p in group['params']: if p.grad is None: continue d_p = p.grad.data if weight_decay != 0: d_p.add_(weight_decay, p.data) if momentum != 0: param_state = self.state[p] if 'momentum_buffer' not in param_state: buf = param_state['momentum_buffer'] = p.data.new().resize_as_(p.data).zero_() buf.mul_(momentum).add_(group['lr'] * lr, d_p) else: buf = param_state['momentum_buffer'] buf.mul_(momentum).add_(group['lr'] * lr, d_p) if nesterov: d_p = d_p.add(momentum, buf) else: d_p = buf p.data.add_(-1, d_p) return loss
[ "def", "step", "(", "self", ",", "lr", ",", "closure", "=", "None", ")", ":", "loss", "=", "None", "if", "closure", "is", "not", "None", ":", "loss", "=", "closure", "(", ")", "for", "group", "in", "self", ".", "param_groups", ":", "weight_decay", ...
https://github.com/uber-research/UPSNet/blob/aa8434e5a721ed217849607815304f68dfd7720a/lib/nn/optimizer.py#L70-L106
CGCookie/retopoflow
3d8b3a47d1d661f99ab0aeb21d31370bf15de35e
addon_common/common/utils.py
python
kwargopts
(kwargs, defvals=None, **mykwargs)
return factory()
[]
def kwargopts(kwargs, defvals=None, **mykwargs): opts = defvals.copy() if defvals else {} opts.update(mykwargs) opts.update(kwargs) if 'opts' in kwargs: opts.update(opts['opts']) def factory(): class Opts(): ''' pretend to be a dictionary, but also add . access fns ''' def __init__(self): self.touched = set() def __getattr__(self, opt): self.touched.add(opt) return opts[opt] def __getitem__(self, opt): self.touched.add(opt) return opts[opt] def __len__(self): return len(opts) def has_key(self, opt): return opt in opts def keys(self): return opts.keys() def values(self): return opts.values() def items(self): return opts.items() def __contains__(self, opt): return opt in opts def __iter__(self): return iter(opts) def print_untouched(self): print('untouched: %s' % str(set(opts.keys()) - self.touched)) def pass_through(self, *args): return {key:self[key] for key in args} return Opts() return factory()
[ "def", "kwargopts", "(", "kwargs", ",", "defvals", "=", "None", ",", "*", "*", "mykwargs", ")", ":", "opts", "=", "defvals", ".", "copy", "(", ")", "if", "defvals", "else", "{", "}", "opts", ".", "update", "(", "mykwargs", ")", "opts", ".", "update...
https://github.com/CGCookie/retopoflow/blob/3d8b3a47d1d661f99ab0aeb21d31370bf15de35e/addon_common/common/utils.py#L220-L248
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-0.96/django/core/servers/basehttp.py
python
ServerHandler.start_response
(self, status, headers,exc_info=None)
return self.write
start_response()' callable as specified by PEP 333
start_response()' callable as specified by PEP 333
[ "start_response", "()", "callable", "as", "specified", "by", "PEP", "333" ]
def start_response(self, status, headers,exc_info=None): """'start_response()' callable as specified by PEP 333""" if exc_info: try: if self.headers_sent: # Re-raise original exception if headers sent raise exc_info[0], exc_info[1], exc_info[2] finally: exc_info = None # avoid dangling circular ref elif self.headers is not None: raise AssertionError("Headers already set!") assert type(status) is StringType,"Status must be a string" assert len(status)>=4,"Status must be at least 4 characters" assert int(status[:3]),"Status message must begin w/3-digit code" assert status[3]==" ", "Status message must have a space after code" if __debug__: for name,val in headers: assert type(name) is StringType,"Header names must be strings" assert type(val) is StringType,"Header values must be strings" assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed" self.status = status self.headers = self.headers_class(headers) return self.write
[ "def", "start_response", "(", "self", ",", "status", ",", "headers", ",", "exc_info", "=", "None", ")", ":", "if", "exc_info", ":", "try", ":", "if", "self", ".", "headers_sent", ":", "# Re-raise original exception if headers sent", "raise", "exc_info", "[", "...
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-0.96/django/core/servers/basehttp.py#L342-L366
snare/binjatron
4bbff5c4fa489a6718037126e2ea46816875e268
__init__.py
python
_get_function
(view, address)
return func
[]
def _get_function(view, address): func = view.get_function_at(address) if func is None: return view.get_function_at(view.get_previous_function_start_before(address)) return func
[ "def", "_get_function", "(", "view", ",", "address", ")", ":", "func", "=", "view", ".", "get_function_at", "(", "address", ")", "if", "func", "is", "None", ":", "return", "view", ".", "get_function_at", "(", "view", ".", "get_previous_function_start_before", ...
https://github.com/snare/binjatron/blob/4bbff5c4fa489a6718037126e2ea46816875e268/__init__.py#L42-L46
9miao/Firefly
fd2795b8c26de6ab63bbec23d11f18c3dfb39a50
firefly/dbentrust/util.py
python
DeleteFromDB
(tablename,props)
return bool(count)
从数据库中删除
从数据库中删除
[ "从数据库中删除" ]
def DeleteFromDB(tablename,props): '''从数据库中删除 ''' prers = FormatCondition(props) sql = """DELETE FROM %s WHERE %s ;"""%(tablename,prers) conn = dbpool.connection() cursor = conn.cursor() count = 0 try: count = cursor.execute(sql) conn.commit() except Exception,e: log.err(e) log.err(sql) cursor.close() conn.close() return bool(count)
[ "def", "DeleteFromDB", "(", "tablename", ",", "props", ")", ":", "prers", "=", "FormatCondition", "(", "props", ")", "sql", "=", "\"\"\"DELETE FROM %s WHERE %s ;\"\"\"", "%", "(", "tablename", ",", "prers", ")", "conn", "=", "dbpool", ".", "connection", "(", ...
https://github.com/9miao/Firefly/blob/fd2795b8c26de6ab63bbec23d11f18c3dfb39a50/firefly/dbentrust/util.py#L119-L135
roclark/sportsipy
c19f545d3376d62ded6304b137dc69238ac620a9
sportsipy/fb/roster.py
python
SquadPlayer.penalty_kicks_saved
(self)
return self._penalty_kicks_saved
Returns an ``int`` of the number of penalty kicks a keeper has saved during regular play.
Returns an ``int`` of the number of penalty kicks a keeper has saved during regular play.
[ "Returns", "an", "int", "of", "the", "number", "of", "penalty", "kicks", "a", "keeper", "has", "saved", "during", "regular", "play", "." ]
def penalty_kicks_saved(self): """ Returns an ``int`` of the number of penalty kicks a keeper has saved during regular play. """ return self._penalty_kicks_saved
[ "def", "penalty_kicks_saved", "(", "self", ")", ":", "return", "self", ".", "_penalty_kicks_saved" ]
https://github.com/roclark/sportsipy/blob/c19f545d3376d62ded6304b137dc69238ac620a9/sportsipy/fb/roster.py#L726-L731
chb/indivo_server
9826c67ab17d7fc0df935db327344fb0c7d237e5
indivo/migrations/0021_old_problems_to_smart_problems.py
python
Migration.forwards
(self, orm)
Write your forwards methods here.
Write your forwards methods here.
[ "Write", "your", "forwards", "methods", "here", "." ]
def forwards(self, orm): "Write your forwards methods here." for p in orm.Problem.objects.all(): p.startDate = p.date_onset p.endDate = p.date_resolution p.name_identifier = p.name_value p.name_system = p.name_type p.name_title = p.name p.notes = p.comments p.save()
[ "def", "forwards", "(", "self", ",", "orm", ")", ":", "for", "p", "in", "orm", ".", "Problem", ".", "objects", ".", "all", "(", ")", ":", "p", ".", "startDate", "=", "p", ".", "date_onset", "p", ".", "endDate", "=", "p", ".", "date_resolution", "...
https://github.com/chb/indivo_server/blob/9826c67ab17d7fc0df935db327344fb0c7d237e5/indivo/migrations/0021_old_problems_to_smart_problems.py#L9-L18
FuYanzhe2/Name-Entity-Recognition
598b264262d667257c9e26646c49df45f7d76547
BERT-BiLSTM-CRF-NER/bert/run_squad.py
python
get_final_text
(pred_text, orig_text, do_lower_case)
return output_text
Project the tokenized prediction back to the original text.
Project the tokenized prediction back to the original text.
[ "Project", "the", "tokenized", "prediction", "back", "to", "the", "original", "text", "." ]
def get_final_text(pred_text, orig_text, do_lower_case): """Project the tokenized prediction back to the original text.""" # When we created the data, we kept track of the alignment between original # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So # now `orig_text` contains the span of our original text corresponding to the # span that we predicted. # # However, `orig_text` may contain extra characters that we don't want in # our prediction. # # For example, let's say: # pred_text = steve smith # orig_text = Steve Smith's # # We don't want to return `orig_text` because it contains the extra "'s". # # We don't want to return `pred_text` because it's already been normalized # (the SQuAD eval script also does punctuation stripping/lower casing but # our tokenizer does additional normalization like stripping accent # characters). # # What we really want to return is "Steve Smith". # # Therefore, we have to apply a semi-complicated alignment heruistic between # `pred_text` and `orig_text` to get a character-to-charcter alignment. This # can fail in certain cases in which case we just return `orig_text`. def _strip_spaces(text): ns_chars = [] ns_to_s_map = collections.OrderedDict() for (i, c) in enumerate(text): if c == " ": continue ns_to_s_map[len(ns_chars)] = i ns_chars.append(c) ns_text = "".join(ns_chars) return (ns_text, ns_to_s_map) # We first tokenize `orig_text`, strip whitespace from the result # and `pred_text`, and check if they are the same length. If they are # NOT the same length, the heuristic has failed. If they are the same # length, we assume the characters are one-to-one aligned. tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case) tok_text = " ".join(tokenizer.tokenize(orig_text)) start_position = tok_text.find(pred_text) if start_position == -1: if FLAGS.verbose_logging: tf.logging.info( "Unable to find text: '%s' in '%s'" % (pred_text, orig_text)) return orig_text end_position = start_position + len(pred_text) - 1 (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text) (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text) if len(orig_ns_text) != len(tok_ns_text): if FLAGS.verbose_logging: tf.logging.info("Length not equal after stripping spaces: '%s' vs '%s'", orig_ns_text, tok_ns_text) return orig_text # We then project the characters in `pred_text` back to `orig_text` using # the character-to-character alignment. tok_s_to_ns_map = {} for (i, tok_index) in six.iteritems(tok_ns_to_s_map): tok_s_to_ns_map[tok_index] = i orig_start_position = None if start_position in tok_s_to_ns_map: ns_start_position = tok_s_to_ns_map[start_position] if ns_start_position in orig_ns_to_s_map: orig_start_position = orig_ns_to_s_map[ns_start_position] if orig_start_position is None: if FLAGS.verbose_logging: tf.logging.info("Couldn't map start position") return orig_text orig_end_position = None if end_position in tok_s_to_ns_map: ns_end_position = tok_s_to_ns_map[end_position] if ns_end_position in orig_ns_to_s_map: orig_end_position = orig_ns_to_s_map[ns_end_position] if orig_end_position is None: if FLAGS.verbose_logging: tf.logging.info("Couldn't map end position") return orig_text output_text = orig_text[orig_start_position:(orig_end_position + 1)] return output_text
[ "def", "get_final_text", "(", "pred_text", ",", "orig_text", ",", "do_lower_case", ")", ":", "# When we created the data, we kept track of the alignment between original", "# (whitespace tokenized) tokens and our WordPiece tokenized tokens. So", "# now `orig_text` contains the span of our or...
https://github.com/FuYanzhe2/Name-Entity-Recognition/blob/598b264262d667257c9e26646c49df45f7d76547/BERT-BiLSTM-CRF-NER/bert/run_squad.py#L926-L1019