nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
TheHive-Project/TheHive4py
6bd75773889dac6c664310b187d0b264607075cf
thehive4py/query.py
python
Gte
(field, value)
return {'_gte': {field: value}}
A criterion used to search for a field greater or equal than a certain value. For example * search for TLP >= 2 * search for customFields.cvss >= 4.5 * search for date >= now Arguments: field (value): field name value (Any): field value Returns: dict: JSON repsentation of the criterion ```python query = Gte('tlp', 1) ``` produces ```json {"_gte": {"tlp": 1}} ```
A criterion used to search for a field greater or equal than a certain value. For example
[ "A", "criterion", "used", "to", "search", "for", "a", "field", "greater", "or", "equal", "than", "a", "certain", "value", ".", "For", "example" ]
def Gte(field, value): """ A criterion used to search for a field greater or equal than a certain value. For example * search for TLP >= 2 * search for customFields.cvss >= 4.5 * search for date >= now Arguments: field (value): field name value (Any): field value Returns: dict: JSON repsentation of the criterion ```python query = Gte('tlp', 1) ``` produces ```json {"_gte": {"tlp": 1}} ``` """ return {'_gte': {field: value}}
[ "def", "Gte", "(", "field", ",", "value", ")", ":", "return", "{", "'_gte'", ":", "{", "field", ":", "value", "}", "}" ]
https://github.com/TheHive-Project/TheHive4py/blob/6bd75773889dac6c664310b187d0b264607075cf/thehive4py/query.py#L94-L116
istresearch/scrapy-cluster
01861c2dca1563aab740417d315cc4ebf9b73f72
redis-monitor/plugins/info_monitor.py
python
InfoMonitor._build_crawlid_info
(self, master, dict)
return master
Builds the crawlid info object @param master: the master dict @param dict: the dict object received @return: the crawlid info object
Builds the crawlid info object
[ "Builds", "the", "crawlid", "info", "object" ]
def _build_crawlid_info(self, master, dict): ''' Builds the crawlid info object @param master: the master dict @param dict: the dict object received @return: the crawlid info object ''' master['total_pending'] = 0 master['total_domains'] = 0 master['appid'] = dict['appid'] master['crawlid'] = dict['crawlid'] master['spiderid'] = dict['spiderid'] master['domains'] = {} timeout_key = 'timeout:{sid}:{aid}:{cid}'.format(sid=dict['spiderid'], aid=dict['appid'], cid=dict['crawlid']) if self.redis_conn.exists(timeout_key): master['expires'] = self.redis_conn.get(timeout_key) # get all domain queues match_string = '{sid}:*:queue'.format(sid=dict['spiderid']) for key in self.redis_conn.scan_iter(match=match_string): domain = key.split(":")[1] sortedDict = self._get_bin(key) # now iterate through binned dict for score in sortedDict: for item in sortedDict[score]: if 'meta' in item: item = item['meta'] if item['appid'] == dict['appid'] and item['crawlid'] == dict['crawlid']: if domain not in master['domains']: master['domains'][domain] = {} master['domains'][domain]['total'] = 0 master['domains'][domain]['high_priority'] = -9999 master['domains'][domain]['low_priority'] = 9999 master['total_domains'] = master['total_domains'] + 1 master['domains'][domain]['total'] = master['domains'][domain]['total'] + 1 if item['priority'] > master['domains'][domain]['high_priority']: master['domains'][domain]['high_priority'] = item['priority'] if item['priority'] < master['domains'][domain]['low_priority']: master['domains'][domain]['low_priority'] = item['priority'] master['total_pending'] = master['total_pending'] + 1 return master
[ "def", "_build_crawlid_info", "(", "self", ",", "master", ",", "dict", ")", ":", "master", "[", "'total_pending'", "]", "=", "0", "master", "[", "'total_domains'", "]", "=", "0", "master", "[", "'appid'", "]", "=", "dict", "[", "'appid'", "]", "master", ...
https://github.com/istresearch/scrapy-cluster/blob/01861c2dca1563aab740417d315cc4ebf9b73f72/redis-monitor/plugins/info_monitor.py#L152-L202
veusz/veusz
5a1e2af5f24df0eb2a2842be51f2997c4999c7fb
veusz/widgets/page.py
python
AxisDependHelper._updateRangeFromPlotter
(self, axis, plotter, plotterdep)
Update the range for axis from the plotter.
Update the range for axis from the plotter.
[ "Update", "the", "range", "for", "axis", "from", "the", "plotter", "." ]
def _updateRangeFromPlotter(self, axis, plotter, plotterdep): """Update the range for axis from the plotter.""" if axis.isLinked(): # take range and map back to real axis therange = list(defaultrange) plotter.getRange(axis, plotterdep, therange) if therange != defaultrange: # follow up chain loopcheck = set() while axis.isLinked(): loopcheck.add(axis) therange = axis.invertFunctionVals(therange) axis = axis.getLinkedAxis() if axis in loopcheck: axis = None if axis is not None and therange is not None: self.ranges[axis] = [ N.nanmin((self.ranges[axis][0], therange[0])), N.nanmax((self.ranges[axis][1], therange[1])) ] else: plotter.getRange(axis, plotterdep, self.ranges[axis])
[ "def", "_updateRangeFromPlotter", "(", "self", ",", "axis", ",", "plotter", ",", "plotterdep", ")", ":", "if", "axis", ".", "isLinked", "(", ")", ":", "# take range and map back to real axis", "therange", "=", "list", "(", "defaultrange", ")", "plotter", ".", ...
https://github.com/veusz/veusz/blob/5a1e2af5f24df0eb2a2842be51f2997c4999c7fb/veusz/widgets/page.py#L194-L217
brython-dev/brython
9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3
www/src/Lib/_pydecimal.py
python
Decimal._compare_check_nans
(self, other, context)
return 0
Version of _check_nans used for the signaling comparisons compare_signal, __le__, __lt__, __ge__, __gt__. Signal InvalidOperation if either self or other is a (quiet or signaling) NaN. Signaling NaNs take precedence over quiet NaNs. Return 0 if neither operand is a NaN.
Version of _check_nans used for the signaling comparisons compare_signal, __le__, __lt__, __ge__, __gt__.
[ "Version", "of", "_check_nans", "used", "for", "the", "signaling", "comparisons", "compare_signal", "__le__", "__lt__", "__ge__", "__gt__", "." ]
def _compare_check_nans(self, other, context): """Version of _check_nans used for the signaling comparisons compare_signal, __le__, __lt__, __ge__, __gt__. Signal InvalidOperation if either self or other is a (quiet or signaling) NaN. Signaling NaNs take precedence over quiet NaNs. Return 0 if neither operand is a NaN. """ if context is None: context = getcontext() if self._is_special or other._is_special: if self.is_snan(): return context._raise_error(InvalidOperation, 'comparison involving sNaN', self) elif other.is_snan(): return context._raise_error(InvalidOperation, 'comparison involving sNaN', other) elif self.is_qnan(): return context._raise_error(InvalidOperation, 'comparison involving NaN', self) elif other.is_qnan(): return context._raise_error(InvalidOperation, 'comparison involving NaN', other) return 0
[ "def", "_compare_check_nans", "(", "self", ",", "other", ",", "context", ")", ":", "if", "context", "is", "None", ":", "context", "=", "getcontext", "(", ")", "if", "self", ".", "_is_special", "or", "other", ".", "_is_special", ":", "if", "self", ".", ...
https://github.com/brython-dev/brython/blob/9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3/www/src/Lib/_pydecimal.py#L777-L808
MichaelGrupp/evo
c65af3b69188aaadbbd7b5f99ac7973d74343d65
evo/core/transformations.py
python
translation_matrix
(direction)
return M
Return matrix to translate by direction vector. >>> v = numpy.random.random(3) - 0.5 >>> numpy.allclose(v, translation_matrix(v)[:3, 3]) True
Return matrix to translate by direction vector.
[ "Return", "matrix", "to", "translate", "by", "direction", "vector", "." ]
def translation_matrix(direction): """Return matrix to translate by direction vector. >>> v = numpy.random.random(3) - 0.5 >>> numpy.allclose(v, translation_matrix(v)[:3, 3]) True """ M = numpy.identity(4) M[:3, 3] = direction[:3] return M
[ "def", "translation_matrix", "(", "direction", ")", ":", "M", "=", "numpy", ".", "identity", "(", "4", ")", "M", "[", ":", "3", ",", "3", "]", "=", "direction", "[", ":", "3", "]", "return", "M" ]
https://github.com/MichaelGrupp/evo/blob/c65af3b69188aaadbbd7b5f99ac7973d74343d65/evo/core/transformations.py#L226-L236
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/yamaha_musiccast/media_player.py
python
MusicCastMediaPlayer.async_set_volume_level
(self, volume)
Set the volume level, range 0..1.
Set the volume level, range 0..1.
[ "Set", "the", "volume", "level", "range", "0", "..", "1", "." ]
async def async_set_volume_level(self, volume): """Set the volume level, range 0..1.""" await self.coordinator.musiccast.set_volume_level(self._zone_id, volume) self.async_write_ha_state()
[ "async", "def", "async_set_volume_level", "(", "self", ",", "volume", ")", ":", "await", "self", ".", "coordinator", ".", "musiccast", ".", "set_volume_level", "(", "self", ".", "_zone_id", ",", "volume", ")", "self", ".", "async_write_ha_state", "(", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/yamaha_musiccast/media_player.py#L285-L288
nosmokingbandit/Watcher3
0217e75158b563bdefc8e01c3be7620008cf3977
lib/sqlalchemy/dialects/firebird/base.py
python
FBDDLCompiler.visit_drop_sequence
(self, drop)
Generate a ``DROP GENERATOR`` statement for the sequence.
Generate a ``DROP GENERATOR`` statement for the sequence.
[ "Generate", "a", "DROP", "GENERATOR", "statement", "for", "the", "sequence", "." ]
def visit_drop_sequence(self, drop): """Generate a ``DROP GENERATOR`` statement for the sequence.""" if self.dialect._version_two: return "DROP SEQUENCE %s" % \ self.preparer.format_sequence(drop.element) else: return "DROP GENERATOR %s" % \ self.preparer.format_sequence(drop.element)
[ "def", "visit_drop_sequence", "(", "self", ",", "drop", ")", ":", "if", "self", ".", "dialect", ".", "_version_two", ":", "return", "\"DROP SEQUENCE %s\"", "%", "self", ".", "preparer", ".", "format_sequence", "(", "drop", ".", "element", ")", "else", ":", ...
https://github.com/nosmokingbandit/Watcher3/blob/0217e75158b563bdefc8e01c3be7620008cf3977/lib/sqlalchemy/dialects/firebird/base.py#L347-L355
LumaPictures/pymel
fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72
pymel/core/datatypes.py
python
Color.hsvblend
(self, other, weight=0.5)
return self.__class__(c, mode='hsv')
c1.hsvblend(c2) --> Color Returns the result of blending c1 with c2 in hsv space, using the given weight
c1.hsvblend(c2) --> Color Returns the result of blending c1 with c2 in hsv space, using the given weight
[ "c1", ".", "hsvblend", "(", "c2", ")", "--", ">", "Color", "Returns", "the", "result", "of", "blending", "c1", "with", "c2", "in", "hsv", "space", "using", "the", "given", "weight" ]
def hsvblend(self, other, weight=0.5): """ c1.hsvblend(c2) --> Color Returns the result of blending c1 with c2 in hsv space, using the given weight """ c1 = list(self.hsva) c2 = list(other.hsva) if abs(c2[0] - c1[0]) >= 0.5: if abs(c2[0] - c1[0]) == 0.5: c1[1], c2[1] = 0.0, 0.0 if c1[0] > 0.5: c1[0] -= 1.0 if c2[0] > 0.5: c2[0] -= 1.0 c = blend(c1, c2, weight=weight) if c[0] < 0.0: c[0] += 1.0 return self.__class__(c, mode='hsv')
[ "def", "hsvblend", "(", "self", ",", "other", ",", "weight", "=", "0.5", ")", ":", "c1", "=", "list", "(", "self", ".", "hsva", ")", "c2", "=", "list", "(", "other", ".", "hsva", ")", "if", "abs", "(", "c2", "[", "0", "]", "-", "c1", "[", "...
https://github.com/LumaPictures/pymel/blob/fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72/pymel/core/datatypes.py#L1525-L1540
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/external/npyscreen/wgmultilinetree.py
python
MLTree.set_up_handlers
(self)
TreeLineAnnotated.set_up_handlers.
TreeLineAnnotated.set_up_handlers.
[ "TreeLineAnnotated", ".", "set_up_handlers", "." ]
def set_up_handlers(self): '''TreeLineAnnotated.set_up_handlers.''' super(MLTree, self).set_up_handlers() self.handlers.update({ ord('<'): self.h_collapse_tree, ord('>'): self.h_expand_tree, ord('['): self.h_collapse_tree, ord(']'): self.h_expand_tree, ord('{'): self.h_collapse_all, ord('}'): self.h_expand_all, ord('h'): self.h_collapse_tree, ord('l'): self.h_expand_tree, })
[ "def", "set_up_handlers", "(", "self", ")", ":", "super", "(", "MLTree", ",", "self", ")", ".", "set_up_handlers", "(", ")", "self", ".", "handlers", ".", "update", "(", "{", "ord", "(", "'<'", ")", ":", "self", ".", "h_collapse_tree", ",", "ord", "(...
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/external/npyscreen/wgmultilinetree.py#L451-L463
caiiiac/Machine-Learning-with-Python
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
MachineLearning/venv/lib/python3.5/site-packages/pip/_vendor/distlib/manifest.py
python
Manifest._include_pattern
(self, pattern, anchor=True, prefix=None, is_regex=False)
return found
Select strings (presumably filenames) from 'self.files' that match 'pattern', a Unix-style wildcard (glob) pattern. Patterns are not quite the same as implemented by the 'fnmatch' module: '*' and '?' match non-special characters, where "special" is platform-dependent: slash on Unix; colon, slash, and backslash on DOS/Windows; and colon on Mac OS. If 'anchor' is true (the default), then the pattern match is more stringent: "*.py" will match "foo.py" but not "foo/bar.py". If 'anchor' is false, both of these will match. If 'prefix' is supplied, then only filenames starting with 'prefix' (itself a pattern) and ending with 'pattern', with anything in between them, will match. 'anchor' is ignored in this case. If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and 'pattern' is assumed to be either a string containing a regex or a regex object -- no translation is done, the regex is just compiled and used as-is. Selected strings will be added to self.files. Return True if files are found.
Select strings (presumably filenames) from 'self.files' that match 'pattern', a Unix-style wildcard (glob) pattern.
[ "Select", "strings", "(", "presumably", "filenames", ")", "from", "self", ".", "files", "that", "match", "pattern", "a", "Unix", "-", "style", "wildcard", "(", "glob", ")", "pattern", "." ]
def _include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False): """Select strings (presumably filenames) from 'self.files' that match 'pattern', a Unix-style wildcard (glob) pattern. Patterns are not quite the same as implemented by the 'fnmatch' module: '*' and '?' match non-special characters, where "special" is platform-dependent: slash on Unix; colon, slash, and backslash on DOS/Windows; and colon on Mac OS. If 'anchor' is true (the default), then the pattern match is more stringent: "*.py" will match "foo.py" but not "foo/bar.py". If 'anchor' is false, both of these will match. If 'prefix' is supplied, then only filenames starting with 'prefix' (itself a pattern) and ending with 'pattern', with anything in between them, will match. 'anchor' is ignored in this case. If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and 'pattern' is assumed to be either a string containing a regex or a regex object -- no translation is done, the regex is just compiled and used as-is. Selected strings will be added to self.files. Return True if files are found. """ # XXX docstring lying about what the special chars are? found = False pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) # delayed loading of allfiles list if self.allfiles is None: self.findall() for name in self.allfiles: if pattern_re.search(name): self.files.add(name) found = True return found
[ "def", "_include_pattern", "(", "self", ",", "pattern", ",", "anchor", "=", "True", ",", "prefix", "=", "None", ",", "is_regex", "=", "False", ")", ":", "# XXX docstring lying about what the special chars are?", "found", "=", "False", "pattern_re", "=", "self", ...
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/pip/_vendor/distlib/manifest.py#L256-L295
diefenbach/django-lfs
3bbcb3453d324c181ec68d11d5d35115a60a2fd5
lfs/criteria/models.py
python
LengthCriterion.get_operators
(self)
return self.NUMBER_OPERATORS
Returns the available operators for the criterion.
Returns the available operators for the criterion.
[ "Returns", "the", "available", "operators", "for", "the", "criterion", "." ]
def get_operators(self): """ Returns the available operators for the criterion. """ return self.NUMBER_OPERATORS
[ "def", "get_operators", "(", "self", ")", ":", "return", "self", ".", "NUMBER_OPERATORS" ]
https://github.com/diefenbach/django-lfs/blob/3bbcb3453d324c181ec68d11d5d35115a60a2fd5/lfs/criteria/models.py#L562-L566
inventree/InvenTree
4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b
InvenTree/part/templatetags/inventree_extras.py
python
setting_object
(key, *args, **kwargs)
return InvenTreeSetting.get_setting_object(key)
Return a setting object speciifed by the given key (Or return None if the setting does not exist) if a user-setting was requested return that
Return a setting object speciifed by the given key (Or return None if the setting does not exist) if a user-setting was requested return that
[ "Return", "a", "setting", "object", "speciifed", "by", "the", "given", "key", "(", "Or", "return", "None", "if", "the", "setting", "does", "not", "exist", ")", "if", "a", "user", "-", "setting", "was", "requested", "return", "that" ]
def setting_object(key, *args, **kwargs): """ Return a setting object speciifed by the given key (Or return None if the setting does not exist) if a user-setting was requested return that """ if 'plugin' in kwargs: # Note, 'plugin' is an instance of an InvenTreePlugin class plugin = kwargs['plugin'] return PluginSetting.get_setting_object(key, plugin=plugin) if 'user' in kwargs: return InvenTreeUserSetting.get_setting_object(key, user=kwargs['user']) return InvenTreeSetting.get_setting_object(key)
[ "def", "setting_object", "(", "key", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'plugin'", "in", "kwargs", ":", "# Note, 'plugin' is an instance of an InvenTreePlugin class", "plugin", "=", "kwargs", "[", "'plugin'", "]", "return", "PluginSetting"...
https://github.com/inventree/InvenTree/blob/4a5e4a88ac3e91d64a21e8cab3708ecbc6e2bd8b/InvenTree/part/templatetags/inventree_extras.py#L222-L239
postlund/pyatv
4ed1f5539f37d86d80272663d1f2ea34a6c41ec4
pyatv/protocols/airplay/player.py
python
AirPlayPlayer.__init__
(self, http: HttpConnection)
Initialize a new AirPlay instance.
Initialize a new AirPlay instance.
[ "Initialize", "a", "new", "AirPlay", "instance", "." ]
def __init__(self, http: HttpConnection) -> None: """Initialize a new AirPlay instance.""" self.http = http
[ "def", "__init__", "(", "self", ",", "http", ":", "HttpConnection", ")", "->", "None", ":", "self", ".", "http", "=", "http" ]
https://github.com/postlund/pyatv/blob/4ed1f5539f37d86d80272663d1f2ea34a6c41ec4/pyatv/protocols/airplay/player.py#L25-L27
theotherp/nzbhydra
4b03d7f769384b97dfc60dade4806c0fc987514e
libs/pydoc.py
python
TextDoc.section
(self, title, contents)
return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n'
Format a section with a given heading.
Format a section with a given heading.
[ "Format", "a", "section", "with", "a", "given", "heading", "." ]
def section(self, title, contents): """Format a section with a given heading.""" return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n'
[ "def", "section", "(", "self", ",", "title", ",", "contents", ")", ":", "return", "self", ".", "bold", "(", "title", ")", "+", "'\\n'", "+", "rstrip", "(", "self", ".", "indent", "(", "contents", ")", ")", "+", "'\\n\\n'" ]
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/pydoc.py#L1047-L1049
apache/superset
3d829fc3c838358dd8c798ecaeefd34c502edca0
superset/viz.py
python
BaseViz.run_extra_queries
(self)
Lifecycle method to use when more than one query is needed In rare-ish cases, a visualization may need to execute multiple queries. That is the case for FilterBox or for time comparison in Line chart for instance. In those cases, we need to make sure these queries run before the main `get_payload` method gets called, so that the overall caching metadata can be right. The way it works here is that if any of the previous `get_df_payload` calls hit the cache, the main payload's metadata will reflect that. The multi-query support may need more work to become a first class use case in the framework, and for the UI to reflect the subtleties (show that only some of the queries were served from cache for instance). In the meantime, since multi-query is rare, we treat it with a bit of a hack. Note that the hack became necessary when moving from caching the visualization's data itself, to caching the underlying query(ies).
Lifecycle method to use when more than one query is needed
[ "Lifecycle", "method", "to", "use", "when", "more", "than", "one", "query", "is", "needed" ]
def run_extra_queries(self) -> None: """Lifecycle method to use when more than one query is needed In rare-ish cases, a visualization may need to execute multiple queries. That is the case for FilterBox or for time comparison in Line chart for instance. In those cases, we need to make sure these queries run before the main `get_payload` method gets called, so that the overall caching metadata can be right. The way it works here is that if any of the previous `get_df_payload` calls hit the cache, the main payload's metadata will reflect that. The multi-query support may need more work to become a first class use case in the framework, and for the UI to reflect the subtleties (show that only some of the queries were served from cache for instance). In the meantime, since multi-query is rare, we treat it with a bit of a hack. Note that the hack became necessary when moving from caching the visualization's data itself, to caching the underlying query(ies). """
[ "def", "run_extra_queries", "(", "self", ")", "->", "None", ":" ]
https://github.com/apache/superset/blob/3d829fc3c838358dd8c798ecaeefd34c502edca0/superset/viz.py#L198-L218
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/entur_public_transport/sensor.py
python
EnturProxy.async_update
(self)
Update data in client.
Update data in client.
[ "Update", "data", "in", "client", "." ]
async def async_update(self) -> None: """Update data in client.""" await self._api.update()
[ "async", "def", "async_update", "(", "self", ")", "->", "None", ":", "await", "self", ".", "_api", ".", "update", "(", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/entur_public_transport/sensor.py#L151-L153
libtcod/python-tcod
e12c4172baa9efdfd74aff6ee9bab8454a835248
tcod/event.py
python
EventDispatch.ev_windowfocusgained
(self, event: tcod.event.WindowEvent)
Called when the window gains keyboard focus.
Called when the window gains keyboard focus.
[ "Called", "when", "the", "window", "gains", "keyboard", "focus", "." ]
def ev_windowfocusgained(self, event: tcod.event.WindowEvent) -> Optional[T]: """Called when the window gains keyboard focus."""
[ "def", "ev_windowfocusgained", "(", "self", ",", "event", ":", "tcod", ".", "event", ".", "WindowEvent", ")", "->", "Optional", "[", "T", "]", ":" ]
https://github.com/libtcod/python-tcod/blob/e12c4172baa9efdfd74aff6ee9bab8454a835248/tcod/event.py#L1035-L1036
pandaproject/panda
133baa47882a289773a30c9656e2ea4efe569387
panda/migrations/0022_create_notification_urls.py
python
Migration.forwards
(self, orm)
Write your forwards methods here.
Write your forwards methods here.
[ "Write", "your", "forwards", "methods", "here", "." ]
def forwards(self, orm): "Write your forwards methods here." if not db.dry_run: for notification in orm.Notification.objects.all(): if notification.related_export: notification.url = '#export/%i' % notification.related_export.id elif notification.related_dataset: notification.url = '#dataset/%s' % notification.related_dataset.slug notification.save()
[ "def", "forwards", "(", "self", ",", "orm", ")", ":", "if", "not", "db", ".", "dry_run", ":", "for", "notification", "in", "orm", ".", "Notification", ".", "objects", ".", "all", "(", ")", ":", "if", "notification", ".", "related_export", ":", "notific...
https://github.com/pandaproject/panda/blob/133baa47882a289773a30c9656e2ea4efe569387/panda/migrations/0022_create_notification_urls.py#L9-L18
hzlzh/AlfredWorkflow.com
7055f14f6922c80ea5943839eb0caff11ae57255
Sources/Workflows/delicious/alp/request/requests/packages/urllib3/packages/ordered_dict.py
python
OrderedDict.itervalues
(self)
od.itervalues -> an iterator over the values in od
od.itervalues -> an iterator over the values in od
[ "od", ".", "itervalues", "-", ">", "an", "iterator", "over", "the", "values", "in", "od" ]
def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k]
[ "def", "itervalues", "(", "self", ")", ":", "for", "k", "in", "self", ":", "yield", "self", "[", "k", "]" ]
https://github.com/hzlzh/AlfredWorkflow.com/blob/7055f14f6922c80ea5943839eb0caff11ae57255/Sources/Workflows/delicious/alp/request/requests/packages/urllib3/packages/ordered_dict.py#L133-L136
yadayada/acd_cli
cd4a9eea52f1740aa8de10d8c75ab2f6c17de52b
acdcli/acd_fuse.py
python
ACDFuse.readdir
(self, path, fh)
return [_ for _ in ['.', '..'] + [c for c in self.cache.childrens_names(node.id)]]
Lists the path's contents. :raises: FuseOSError if path is not a node or path is not a folder
Lists the path's contents.
[ "Lists", "the", "path", "s", "contents", "." ]
def readdir(self, path, fh) -> 'List[str]': """Lists the path's contents. :raises: FuseOSError if path is not a node or path is not a folder""" node = self.cache.resolve(path) if not node: raise FuseOSError(errno.ENOENT) if not node.type == 'folder': raise FuseOSError(errno.ENOTDIR) return [_ for _ in ['.', '..'] + [c for c in self.cache.childrens_names(node.id)]]
[ "def", "readdir", "(", "self", ",", "path", ",", "fh", ")", "->", "'List[str]'", ":", "node", "=", "self", ".", "cache", ".", "resolve", "(", "path", ")", "if", "not", "node", ":", "raise", "FuseOSError", "(", "errno", ".", "ENOENT", ")", "if", "no...
https://github.com/yadayada/acd_cli/blob/cd4a9eea52f1740aa8de10d8c75ab2f6c17de52b/acdcli/acd_fuse.py#L422-L433
KalleHallden/AutoTimer
2d954216700c4930baa154e28dbddc34609af7ce
env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
python
unpack_archive
(filename, extract_dir=None, format=None)
Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised.
Unpack an archive.
[ "Unpack", "an", "archive", "." ]
def unpack_archive(filename, extract_dir=None, format=None): """Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised. """ if extract_dir is None: extract_dir = os.getcwd() if format is not None: try: format_info = _UNPACK_FORMATS[format] except KeyError: raise ValueError("Unknown unpack format '{0}'".format(format)) func = format_info[1] func(filename, extract_dir, **dict(format_info[2])) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) if format is None: raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs)
[ "def", "unpack_archive", "(", "filename", ",", "extract_dir", "=", "None", ",", "format", "=", "None", ")", ":", "if", "extract_dir", "is", "None", ":", "extract_dir", "=", "os", ".", "getcwd", "(", ")", "if", "format", "is", "not", "None", ":", "try",...
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py#L727-L761
horstjens/ThePythonGameBook
e9be46cb519c65379c0d2c59d6434f2fa61b1232
en/pygame/004_vectorsprites.py
python
PygView.run
(self)
The mainloop
The mainloop
[ "The", "mainloop" ]
def run(self): """The mainloop """ self.paint() running = True while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False elif event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: running = False if event.key == pygame.K_c: self.background.fill((255,255,255)) if event.key == pygame.K_PLUS: self.africa.zoom += 0.25 self.africa.draw() if event.key == pygame.K_MINUS: self.africa.zoom -= 0.25 self.africa.draw() if event.key == pygame.K_RIGHT: self.africa.startpoint = Vec2d(self.africa.startpoint.x + 20, self.africa.startpoint.y) self.africa.draw() if event.key == pygame.K_q: self.africa.rotate(1) self.africa.draw() if event.key == pygame.K_e: self.africa.rotate(-1) self.africa.draw() #if event.key == pygame.K_f: # self.africa.forward(10) # self.africa.draw() pressed = pygame.key.get_pressed() if pressed[pygame.K_f]: self.africa.forward(10) self.africa.draw() milliseconds = self.clock.tick(self.fps) self.playtime += milliseconds / 1000.0 self.draw_text("FPS: {:6.3}{}PLAYTIME: {:6.3} SECONDS".format( self.clock.get_fps(), " "*5, self.playtime)) pygame.display.flip() self.screen.blit(self.background, (0, 0)) pygame.quit()
[ "def", "run", "(", "self", ")", ":", "self", ".", "paint", "(", ")", "running", "=", "True", "while", "running", ":", "for", "event", "in", "pygame", ".", "event", ".", "get", "(", ")", ":", "if", "event", ".", "type", "==", "pygame", ".", "QUIT"...
https://github.com/horstjens/ThePythonGameBook/blob/e9be46cb519c65379c0d2c59d6434f2fa61b1232/en/pygame/004_vectorsprites.py#L419-L466
aaronportnoy/toolbag
2d39457a7617b2f334d203d8c8cf88a5a25ef1fa
toolbag/agent/dbg/vtrace/platforms/base.py
python
TracerBase.shouldRunAgain
(self)
return False
A unified place for the test as to weather this trace should be told to run again after reaching some stopping condition.
A unified place for the test as to weather this trace should be told to run again after reaching some stopping condition.
[ "A", "unified", "place", "for", "the", "test", "as", "to", "weather", "this", "trace", "should", "be", "told", "to", "run", "again", "after", "reaching", "some", "stopping", "condition", "." ]
def shouldRunAgain(self): """ A unified place for the test as to weather this trace should be told to run again after reaching some stopping condition. """ if not self.attached: return False if self.exited: return False if self.getMode("RunForever"): return True if self.runagain: return True return False
[ "def", "shouldRunAgain", "(", "self", ")", ":", "if", "not", "self", ".", "attached", ":", "return", "False", "if", "self", ".", "exited", ":", "return", "False", "if", "self", ".", "getMode", "(", "\"RunForever\"", ")", ":", "return", "True", "if", "s...
https://github.com/aaronportnoy/toolbag/blob/2d39457a7617b2f334d203d8c8cf88a5a25ef1fa/toolbag/agent/dbg/vtrace/platforms/base.py#L270-L288
misterch0c/shadowbroker
e3a069bea47a2c1009697941ac214adc6f90aa8d
windows/Resources/Python/Core/Lib/inspect.py
python
getsource
(object)
return string.join(lines, '')
Return the text of the source code for an object. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a single string. An IOError is raised if the source code cannot be retrieved.
Return the text of the source code for an object. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a single string. An IOError is raised if the source code cannot be retrieved.
[ "Return", "the", "text", "of", "the", "source", "code", "for", "an", "object", ".", "The", "argument", "may", "be", "a", "module", "class", "method", "function", "traceback", "frame", "or", "code", "object", ".", "The", "source", "code", "is", "returned", ...
def getsource(object): """Return the text of the source code for an object. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a single string. An IOError is raised if the source code cannot be retrieved.""" lines, lnum = getsourcelines(object) return string.join(lines, '')
[ "def", "getsource", "(", "object", ")", ":", "lines", ",", "lnum", "=", "getsourcelines", "(", "object", ")", "return", "string", ".", "join", "(", "lines", ",", "''", ")" ]
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/inspect.py#L701-L708
ialbert/biostar-central
2dc7bd30691a50b2da9c2833ba354056bc686afa
biostar/recipes/views.py
python
data_serve
(request, uid, path)
return file_serve(request=request, path=path, obj=obj)
Serves files from a data directory.
Serves files from a data directory.
[ "Serves", "files", "from", "a", "data", "directory", "." ]
def data_serve(request, uid, path): """ Serves files from a data directory. """ obj = Data.objects.filter(uid=uid).first() return file_serve(request=request, path=path, obj=obj)
[ "def", "data_serve", "(", "request", ",", "uid", ",", "path", ")", ":", "obj", "=", "Data", ".", "objects", ".", "filter", "(", "uid", "=", "uid", ")", ".", "first", "(", ")", "return", "file_serve", "(", "request", "=", "request", ",", "path", "="...
https://github.com/ialbert/biostar-central/blob/2dc7bd30691a50b2da9c2833ba354056bc686afa/biostar/recipes/views.py#L774-L779
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/packages/site-packages/sqlalchemy/util/_collections.py
python
AbstractKeyedTuple.keys
(self)
return list(self._fields)
Return a list of string key names for this :class:`.KeyedTuple`. .. seealso:: :attr:`.KeyedTuple._fields`
Return a list of string key names for this :class:`.KeyedTuple`.
[ "Return", "a", "list", "of", "string", "key", "names", "for", "this", ":", "class", ":", ".", "KeyedTuple", "." ]
def keys(self): """Return a list of string key names for this :class:`.KeyedTuple`. .. seealso:: :attr:`.KeyedTuple._fields` """ return list(self._fields)
[ "def", "keys", "(", "self", ")", ":", "return", "list", "(", "self", ".", "_fields", ")" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/site-packages/sqlalchemy/util/_collections.py#L25-L34
stepjam/PyRep
d778d5d4ffa3be366d4e699f6e2941553fd47ecc
pyrep/backend/utils.py
python
script_call
(function_name_at_script_name: str, script_handle_or_type: int, ints=(), floats=(), strings=(), bytes='')
return sim.simExtCallScriptFunction( function_name_at_script_name, script_handle_or_type, list(ints), list(floats), list(strings), bytes)
Calls a script function (from a plugin, the main client application, or from another script). This represents a callback inside of a script. :param function_name_at_script_name: A string representing the function name and script name, e.g. myFunctionName@theScriptName. When the script is not associated with an object, then just specify the function name. :param script_handle_or_type: The handle of the script, otherwise the type of the script. :param ints: The input ints to the script. :param floats: The input floats to the script. :param strings: The input strings to the script. :param bytes: The input bytes to the script (as a string). :return: Any number of return values from the called Lua function.
Calls a script function (from a plugin, the main client application, or from another script). This represents a callback inside of a script.
[ "Calls", "a", "script", "function", "(", "from", "a", "plugin", "the", "main", "client", "application", "or", "from", "another", "script", ")", ".", "This", "represents", "a", "callback", "inside", "of", "a", "script", "." ]
def script_call(function_name_at_script_name: str, script_handle_or_type: int, ints=(), floats=(), strings=(), bytes='') -> ( Tuple[List[int], List[float], List[str], str]): """Calls a script function (from a plugin, the main client application, or from another script). This represents a callback inside of a script. :param function_name_at_script_name: A string representing the function name and script name, e.g. myFunctionName@theScriptName. When the script is not associated with an object, then just specify the function name. :param script_handle_or_type: The handle of the script, otherwise the type of the script. :param ints: The input ints to the script. :param floats: The input floats to the script. :param strings: The input strings to the script. :param bytes: The input bytes to the script (as a string). :return: Any number of return values from the called Lua function. """ return sim.simExtCallScriptFunction( function_name_at_script_name, script_handle_or_type, list(ints), list(floats), list(strings), bytes)
[ "def", "script_call", "(", "function_name_at_script_name", ":", "str", ",", "script_handle_or_type", ":", "int", ",", "ints", "=", "(", ")", ",", "floats", "=", "(", ")", ",", "strings", "=", "(", ")", ",", "bytes", "=", "''", ")", "->", "(", "Tuple", ...
https://github.com/stepjam/PyRep/blob/d778d5d4ffa3be366d4e699f6e2941553fd47ecc/pyrep/backend/utils.py#L46-L67
Netflix/brutal
13052ddbaf873acd9c9dac54b19a42ab7b70f7a9
brutal/core/bot.py
python
Bot.__str__
(self)
return repr(self)
[]
def __str__(self): return repr(self)
[ "def", "__str__", "(", "self", ")", ":", "return", "repr", "(", "self", ")" ]
https://github.com/Netflix/brutal/blob/13052ddbaf873acd9c9dac54b19a42ab7b70f7a9/brutal/core/bot.py#L56-L57
mozilla/kitsune
7c7cf9baed57aa776547aea744243ccad6ca91fb
kitsune/questions/models.py
python
Question.allows_delete
(self, user)
return user.has_perm("questions.delete_question")
Return whether `user` can delete this question.
Return whether `user` can delete this question.
[ "Return", "whether", "user", "can", "delete", "this", "question", "." ]
def allows_delete(self, user): """Return whether `user` can delete this question.""" return user.has_perm("questions.delete_question")
[ "def", "allows_delete", "(", "self", ",", "user", ")", ":", "return", "user", ".", "has_perm", "(", "\"questions.delete_question\"", ")" ]
https://github.com/mozilla/kitsune/blob/7c7cf9baed57aa776547aea744243ccad6ca91fb/kitsune/questions/models.py#L594-L596
lukalabs/cakechat
844507281b30d81b3fe3674895fe27826dba8438
cakechat/api/utils.py
python
_is_list_of_unicode_strings
(data)
return data and isinstance(data, (list, tuple)) and all(isinstance(s, str) for s in data)
[]
def _is_list_of_unicode_strings(data): return data and isinstance(data, (list, tuple)) and all(isinstance(s, str) for s in data)
[ "def", "_is_list_of_unicode_strings", "(", "data", ")", ":", "return", "data", "and", "isinstance", "(", "data", ",", "(", "list", ",", "tuple", ")", ")", "and", "all", "(", "isinstance", "(", "s", ",", "str", ")", "for", "s", "in", "data", ")" ]
https://github.com/lukalabs/cakechat/blob/844507281b30d81b3fe3674895fe27826dba8438/cakechat/api/utils.py#L9-L10
datastax/python-driver
5fdb0061f56f53b9d8d8ad67b99110899653ad77
cassandra/cluster.py
python
Cluster.get_max_connections_per_host
(self, host_distance)
return self._max_connections_per_host[host_distance]
Gets the maximum number of connections per Session that will be opened for each host with :class:`~.HostDistance` equal to `host_distance`. The default is 8 for :attr:`~HostDistance.LOCAL` and 2 for :attr:`~HostDistance.REMOTE`. This property is ignored if :attr:`~.Cluster.protocol_version` is 3 or higher.
Gets the maximum number of connections per Session that will be opened for each host with :class:`~.HostDistance` equal to `host_distance`. The default is 8 for :attr:`~HostDistance.LOCAL` and 2 for :attr:`~HostDistance.REMOTE`.
[ "Gets", "the", "maximum", "number", "of", "connections", "per", "Session", "that", "will", "be", "opened", "for", "each", "host", "with", ":", "class", ":", "~", ".", "HostDistance", "equal", "to", "host_distance", ".", "The", "default", "is", "8", "for", ...
def get_max_connections_per_host(self, host_distance): """ Gets the maximum number of connections per Session that will be opened for each host with :class:`~.HostDistance` equal to `host_distance`. The default is 8 for :attr:`~HostDistance.LOCAL` and 2 for :attr:`~HostDistance.REMOTE`. This property is ignored if :attr:`~.Cluster.protocol_version` is 3 or higher. """ return self._max_connections_per_host[host_distance]
[ "def", "get_max_connections_per_host", "(", "self", ",", "host_distance", ")", ":", "return", "self", ".", "_max_connections_per_host", "[", "host_distance", "]" ]
https://github.com/datastax/python-driver/blob/5fdb0061f56f53b9d8d8ad67b99110899653ad77/cassandra/cluster.py#L1584-L1594
google/mobly
d2efcf909824e4f1f457dc57530395c84dbc255f
mobly/asserts.py
python
assert_raises_regex
(expected_exception, expected_regex, extras=None, *args, **kwargs)
return context
Assert that an exception is raised when a function is called. If no exception is raised, test fail. If an exception is raised but not of the expected type, the exception is let through. If an exception of the expected type is raised but the error message does not match the expected_regex, test fail. This should only be used as a context manager: with assert_raises(Exception): func() Args: expected_exception: An exception class that is expected to be raised. extras: An optional field for extra information to be included in test result.
Assert that an exception is raised when a function is called.
[ "Assert", "that", "an", "exception", "is", "raised", "when", "a", "function", "is", "called", "." ]
def assert_raises_regex(expected_exception, expected_regex, extras=None, *args, **kwargs): """Assert that an exception is raised when a function is called. If no exception is raised, test fail. If an exception is raised but not of the expected type, the exception is let through. If an exception of the expected type is raised but the error message does not match the expected_regex, test fail. This should only be used as a context manager: with assert_raises(Exception): func() Args: expected_exception: An exception class that is expected to be raised. extras: An optional field for extra information to be included in test result. """ context = _AssertRaisesContext(expected_exception, expected_regex, extras=extras) return context
[ "def", "assert_raises_regex", "(", "expected_exception", ",", "expected_regex", ",", "extras", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "context", "=", "_AssertRaisesContext", "(", "expected_exception", ",", "expected_regex", ",", "extra...
https://github.com/google/mobly/blob/d2efcf909824e4f1f457dc57530395c84dbc255f/mobly/asserts.py#L315-L340
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/site-packages/ZSI-2.0-py2.7.egg/ZSI/writer.py
python
SoapWriter.__init__
(self, envelope=True, encodingStyle=None, header=True, nsdict={}, outputclass=None, **kw)
Initialize.
Initialize.
[ "Initialize", "." ]
def __init__(self, envelope=True, encodingStyle=None, header=True, nsdict={}, outputclass=None, **kw): '''Initialize. ''' outputclass = outputclass or ElementProxy if not issubclass(outputclass, MessageInterface): raise TypeError, 'outputclass must subclass MessageInterface' self.dom, self.memo, self.nsdict= \ outputclass(self), [], nsdict self.envelope = envelope self.encodingStyle = encodingStyle self.header = header self.body = None self.callbacks = [] self.closed = False
[ "def", "__init__", "(", "self", ",", "envelope", "=", "True", ",", "encodingStyle", "=", "None", ",", "header", "=", "True", ",", "nsdict", "=", "{", "}", ",", "outputclass", "=", "None", ",", "*", "*", "kw", ")", ":", "outputclass", "=", "outputclas...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/site-packages/ZSI-2.0-py2.7.egg/ZSI/writer.py#L33-L48
TeamMsgExtractor/msg-extractor
8a3a0255a7306bdb8073bd8f222d3be5c688080a
extract_msg/message_base.py
python
MessageBase.rtfBody
(self)
return compressed_rtf.decompress(self.compressedRtf)
Returns the decompressed Rtf body from the message.
Returns the decompressed Rtf body from the message.
[ "Returns", "the", "decompressed", "Rtf", "body", "from", "the", "message", "." ]
def rtfBody(self): """ Returns the decompressed Rtf body from the message. """ return compressed_rtf.decompress(self.compressedRtf)
[ "def", "rtfBody", "(", "self", ")", ":", "return", "compressed_rtf", ".", "decompress", "(", "self", ".", "compressedRtf", ")" ]
https://github.com/TeamMsgExtractor/msg-extractor/blob/8a3a0255a7306bdb8073bd8f222d3be5c688080a/extract_msg/message_base.py#L360-L364
vmware/vsphere-automation-sdk-python
ba7d4e0742f58a641dfed9538ecbbb1db4f3891e
samples/vsphere/common/vim/datastore_file.py
python
FileArray.delete
(self, path=None)
return self[0].delete(path)
[]
def delete(self, path=None): self._check_unique() return self[0].delete(path)
[ "def", "delete", "(", "self", ",", "path", "=", "None", ")", ":", "self", ".", "_check_unique", "(", ")", "return", "self", "[", "0", "]", ".", "delete", "(", "path", ")" ]
https://github.com/vmware/vsphere-automation-sdk-python/blob/ba7d4e0742f58a641dfed9538ecbbb1db4f3891e/samples/vsphere/common/vim/datastore_file.py#L84-L86
nvaccess/nvda
20d5a25dced4da34338197f0ef6546270ebca5d0
source/NVDAObjects/IAccessible/chromium.py
python
ChromeVBufTextInfo._calculateDescriptionFrom
(self, attrs)
return super()._calculateDescriptionFrom(attrs)
Overridable calculation of DescriptionFrom @param attrs: source attributes for the TextInfo @return: the origin for accDescription. @note: Chrome provides 'IAccessible2::attribute_description-from' which declares the origin used for accDescription. Chrome also provides `IAccessible2::attribute_description` to maintain compatibility with FireFox.
Overridable calculation of DescriptionFrom
[ "Overridable", "calculation", "of", "DescriptionFrom" ]
def _calculateDescriptionFrom(self, attrs) -> controlTypes.DescriptionFrom: """Overridable calculation of DescriptionFrom @param attrs: source attributes for the TextInfo @return: the origin for accDescription. @note: Chrome provides 'IAccessible2::attribute_description-from' which declares the origin used for accDescription. Chrome also provides `IAccessible2::attribute_description` to maintain compatibility with FireFox. """ ia2attrDescriptionFrom = attrs.get("IAccessible2::attribute_description-from") try: return controlTypes.DescriptionFrom(ia2attrDescriptionFrom) except ValueError: if ia2attrDescriptionFrom: log.debugWarning(f"Unknown 'description-from' IA2Attribute value: {ia2attrDescriptionFrom}") # fallback to Firefox approach return super()._calculateDescriptionFrom(attrs)
[ "def", "_calculateDescriptionFrom", "(", "self", ",", "attrs", ")", "->", "controlTypes", ".", "DescriptionFrom", ":", "ia2attrDescriptionFrom", "=", "attrs", ".", "get", "(", "\"IAccessible2::attribute_description-from\"", ")", "try", ":", "return", "controlTypes", "...
https://github.com/nvaccess/nvda/blob/20d5a25dced4da34338197f0ef6546270ebca5d0/source/NVDAObjects/IAccessible/chromium.py#L21-L36
rucio/rucio
6d0d358e04f5431f0b9a98ae40f31af0ddff4833
lib/rucio/rse/protocols/rclone.py
python
Default.put
(self, filename, target, source_dir, transfer_timeout=None)
Allows to store files inside the referred RSE. :param source: path to the source file on the client file system :param target: path to the destination file on the storage :param source_dir: Path where the to be transferred files are stored in the local file system :param transfer_timeout: Transfer timeout (in seconds) - dummy :raises DestinationNotAccessible: if the destination storage was not accessible. :raises ServiceUnavailable: if some generic error occured in the library. :raises SourceNotFound: if the source file was not found on the referred storage.
Allows to store files inside the referred RSE.
[ "Allows", "to", "store", "files", "inside", "the", "referred", "RSE", "." ]
def put(self, filename, target, source_dir, transfer_timeout=None): """ Allows to store files inside the referred RSE. :param source: path to the source file on the client file system :param target: path to the destination file on the storage :param source_dir: Path where the to be transferred files are stored in the local file system :param transfer_timeout: Transfer timeout (in seconds) - dummy :raises DestinationNotAccessible: if the destination storage was not accessible. :raises ServiceUnavailable: if some generic error occured in the library. :raises SourceNotFound: if the source file was not found on the referred storage. """ self.logger(logging.DEBUG, 'rclone.put: filename: {} target: {}'.format(filename, target)) source_dir = source_dir or '.' source_url = '%s/%s' % (source_dir, filename) self.logger(logging.DEBUG, 'rclone.put: source url: {}'.format(source_url)) path = self.pfn2path(target) if not os.path.exists(source_url): raise exception.SourceNotFound() try: cmd = 'rclone copyto %s %s:%s' % (source_url, self.hostname, path) self.logger(logging.DEBUG, 'rclone.put: cmd: {}'.format(cmd)) status, out, err = execute(cmd) if status: raise exception.RucioException(err) except Exception as e: raise exception.ServiceUnavailable(e)
[ "def", "put", "(", "self", ",", "filename", ",", "target", ",", "source_dir", ",", "transfer_timeout", "=", "None", ")", ":", "self", ".", "logger", "(", "logging", ".", "DEBUG", ",", "'rclone.put: filename: {} target: {}'", ".", "format", "(", "filename", "...
https://github.com/rucio/rucio/blob/6d0d358e04f5431f0b9a98ae40f31af0ddff4833/lib/rucio/rse/protocols/rclone.py#L325-L353
ilayn/harold
2bfa00fca4549313d47386991a27c84c8fc91637
harold/_classes.py
python
Transfer.DiscretizedWith
(self)
This property is used internally to keep track of (if applicable) the original method used for discretization. It is used by the ``undiscretize()`` function to reach back to the continuous model that would hopefully minimize the discretization errors. It is also possible to manually set this property such that ``undiscretize`` uses the provided method.
This property is used internally to keep track of (if applicable) the original method used for discretization. It is used by the ``undiscretize()`` function to reach back to the continuous model that would hopefully minimize the discretization errors. It is also possible to manually set this property such that ``undiscretize`` uses the provided method.
[ "This", "property", "is", "used", "internally", "to", "keep", "track", "of", "(", "if", "applicable", ")", "the", "original", "method", "used", "for", "discretization", ".", "It", "is", "used", "by", "the", "undiscretize", "()", "function", "to", "reach", ...
def DiscretizedWith(self): """ This property is used internally to keep track of (if applicable) the original method used for discretization. It is used by the ``undiscretize()`` function to reach back to the continuous model that would hopefully minimize the discretization errors. It is also possible to manually set this property such that ``undiscretize`` uses the provided method. """ if self.SamplingSet == 'R' or self._DiscretizedWith is None: return None else: return self._DiscretizedWith
[ "def", "DiscretizedWith", "(", "self", ")", ":", "if", "self", ".", "SamplingSet", "==", "'R'", "or", "self", ".", "_DiscretizedWith", "is", "None", ":", "return", "None", "else", ":", "return", "self", ".", "_DiscretizedWith" ]
https://github.com/ilayn/harold/blob/2bfa00fca4549313d47386991a27c84c8fc91637/harold/_classes.py#L144-L156
rizsotto/scan-build
728e65fb6354022549797dbd399f5639e7bb92da
libscanbuild/report.py
python
bug_report
(output_dir, prefix)
return name
Creates a fragment from the analyzer reports.
Creates a fragment from the analyzer reports.
[ "Creates", "a", "fragment", "from", "the", "analyzer", "reports", "." ]
def bug_report(output_dir, prefix): # type: (str, str) -> str """ Creates a fragment from the analyzer reports. """ # pretty = prettify_bug(prefix, output_dir) # bugs = (pretty(bug) for bug in read_bugs(output_dir, True)) name = os.path.join(output_dir, 'bugs.html.fragment') with open(name, 'w') as handle: indent = 4 handle.write(reindent(""" |<h2>Reports</h2> |<table class="sortable" style="table-layout:automatic"> | <thead> | <tr> | <td>Bug Group</td> | <td class="sorttable_sorted"> | Bug Type | <span id="sorttable_sortfwdind">&nbsp;&#x25BE;</span> | </td> | <td>File</td> | <td>Function/Method</td> | <td class="Q">Line</td> | <td class="Q">Path Length</td> | <td class="sorttable_nosort"></td> | </tr> | </thead> | <tbody>""", indent)) handle.write(comment('REPORTBUGCOL')) for bug in read_bugs(output_dir, True): current = bug.pretty(prefix, output_dir) handle.write(reindent(""" | <tr class="{bug_type_class}"> | <td class="DESC">{bug_category}</td> | <td class="DESC">{bug_type}</td> | <td>{bug_file}</td> | <td class="DESC">{bug_function}</td> | <td class="Q">{bug_line}</td> | <td class="Q">{bug_path_length}</td> | <td><a href="{report_file}#EndPath">View Report</a></td> | </tr>""", indent).format(**current)) handle.write(comment('REPORTBUG', {'id': current['report_file']})) handle.write(reindent(""" | </tbody> |</table>""", indent)) handle.write(comment('REPORTBUGEND')) return name
[ "def", "bug_report", "(", "output_dir", ",", "prefix", ")", ":", "# type: (str, str) -> str", "# pretty = prettify_bug(prefix, output_dir)", "# bugs = (pretty(bug) for bug in read_bugs(output_dir, True))", "name", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", ...
https://github.com/rizsotto/scan-build/blob/728e65fb6354022549797dbd399f5639e7bb92da/libscanbuild/report.py#L166-L212
cagbal/ros_people_object_detection_tensorflow
982ffd4a54b8059638f5cd4aa167299c7fc9e61f
src/object_detection/core/model.py
python
DetectionModel.postprocess
(self, prediction_dict, true_image_shapes, **params)
Convert predicted output tensors to final detections. Outputs adhere to the following conventions: * Classes are integers in [0, num_classes); background classes are removed and the first non-background class is mapped to 0. If the model produces class-agnostic detections, then no output is produced for classes. * Boxes are to be interpreted as being in [y_min, x_min, y_max, x_max] format and normalized relative to the image window. * `num_detections` is provided for settings where detections are padded to a fixed number of boxes. * We do not specifically assume any kind of probabilistic interpretation of the scores --- the only important thing is their relative ordering. Thus implementations of the postprocess function are free to output logits, probabilities, calibrated probabilities, or anything else. Args: prediction_dict: a dictionary holding prediction tensors. true_image_shapes: int32 tensor of shape [batch, 3] where each row is of the form [height, width, channels] indicating the shapes of true images in the resized images, as resized images can be padded with zeros. **params: Additional keyword arguments for specific implementations of DetectionModel. Returns: detections: a dictionary containing the following fields detection_boxes: [batch, max_detections, 4] detection_scores: [batch, max_detections] detection_classes: [batch, max_detections] (If a model is producing class-agnostic detections, this field may be missing) instance_masks: [batch, max_detections, image_height, image_width] (optional) keypoints: [batch, max_detections, num_keypoints, 2] (optional) num_detections: [batch]
Convert predicted output tensors to final detections.
[ "Convert", "predicted", "output", "tensors", "to", "final", "detections", "." ]
def postprocess(self, prediction_dict, true_image_shapes, **params): """Convert predicted output tensors to final detections. Outputs adhere to the following conventions: * Classes are integers in [0, num_classes); background classes are removed and the first non-background class is mapped to 0. If the model produces class-agnostic detections, then no output is produced for classes. * Boxes are to be interpreted as being in [y_min, x_min, y_max, x_max] format and normalized relative to the image window. * `num_detections` is provided for settings where detections are padded to a fixed number of boxes. * We do not specifically assume any kind of probabilistic interpretation of the scores --- the only important thing is their relative ordering. Thus implementations of the postprocess function are free to output logits, probabilities, calibrated probabilities, or anything else. Args: prediction_dict: a dictionary holding prediction tensors. true_image_shapes: int32 tensor of shape [batch, 3] where each row is of the form [height, width, channels] indicating the shapes of true images in the resized images, as resized images can be padded with zeros. **params: Additional keyword arguments for specific implementations of DetectionModel. Returns: detections: a dictionary containing the following fields detection_boxes: [batch, max_detections, 4] detection_scores: [batch, max_detections] detection_classes: [batch, max_detections] (If a model is producing class-agnostic detections, this field may be missing) instance_masks: [batch, max_detections, image_height, image_width] (optional) keypoints: [batch, max_detections, num_keypoints, 2] (optional) num_detections: [batch] """ pass
[ "def", "postprocess", "(", "self", ",", "prediction_dict", ",", "true_image_shapes", ",", "*", "*", "params", ")", ":", "pass" ]
https://github.com/cagbal/ros_people_object_detection_tensorflow/blob/982ffd4a54b8059638f5cd4aa167299c7fc9e61f/src/object_detection/core/model.py#L176-L213
TheSouthFrog/stylealign
910632d2fccc9db61b00c265ae18a88913113c1d
pytorch_code/networks.py
python
VAEGen.forward
(self, images)
return images_recon, hiddens
[]
def forward(self, images): # This is a reduced VAE implementation where we assume the outputs are multivariate Gaussian distribution with mean = hiddens and std_dev = all ones. hiddens = self.encode(images) if self.training == True: noise = Variable(torch.randn(hiddens.size()).cuda(hiddens.data.get_device())) images_recon = self.decode(hiddens + noise) else: images_recon = self.decode(hiddens) return images_recon, hiddens
[ "def", "forward", "(", "self", ",", "images", ")", ":", "# This is a reduced VAE implementation where we assume the outputs are multivariate Gaussian distribution with mean = hiddens and std_dev = all ones.", "hiddens", "=", "self", ".", "encode", "(", "images", ")", "if", "self"...
https://github.com/TheSouthFrog/stylealign/blob/910632d2fccc9db61b00c265ae18a88913113c1d/pytorch_code/networks.py#L166-L174
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v8/services/services/hotel_performance_view_service/client.py
python
HotelPerformanceViewServiceClient.parse_common_project_path
(path: str)
return m.groupdict() if m else {}
Parse a project path into its component segments.
Parse a project path into its component segments.
[ "Parse", "a", "project", "path", "into", "its", "component", "segments", "." ]
def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P<project>.+?)$", path) return m.groupdict() if m else {}
[ "def", "parse_common_project_path", "(", "path", ":", "str", ")", "->", "Dict", "[", "str", ",", "str", "]", ":", "m", "=", "re", ".", "match", "(", "r\"^projects/(?P<project>.+?)$\"", ",", "path", ")", "return", "m", ".", "groupdict", "(", ")", "if", ...
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/hotel_performance_view_service/client.py#L221-L224
getnikola/nikola
2da876e9322e42a93f8295f950e336465c6a4ee5
nikola/plugin_categories.py
python
TemplateSystem.get_string_deps
(self, text: str, context=None)
Find dependencies for a template string.
Find dependencies for a template string.
[ "Find", "dependencies", "for", "a", "template", "string", "." ]
def get_string_deps(self, text: str, context=None): """Find dependencies for a template string.""" raise NotImplementedError()
[ "def", "get_string_deps", "(", "self", ",", "text", ":", "str", ",", "context", "=", "None", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/getnikola/nikola/blob/2da876e9322e42a93f8295f950e336465c6a4ee5/nikola/plugin_categories.py#L233-L235
tensorflow/data-validation
6c68c219c5d78d3736fd011d8a7c53fbcb94379c
tensorflow_data_validation/statistics/generators/partitioned_stats_generator.py
python
PartitionedStatisticsAnalyzer.create_accumulator
(self)
return _PartitionedStatisticsAnalyzerAccumulator()
Creates an accumulator, which stores partial state of meta-statistics.
Creates an accumulator, which stores partial state of meta-statistics.
[ "Creates", "an", "accumulator", "which", "stores", "partial", "state", "of", "meta", "-", "statistics", "." ]
def create_accumulator(self) -> _PartitionedStatisticsAnalyzerAccumulator: """Creates an accumulator, which stores partial state of meta-statistics.""" return _PartitionedStatisticsAnalyzerAccumulator()
[ "def", "create_accumulator", "(", "self", ")", "->", "_PartitionedStatisticsAnalyzerAccumulator", ":", "return", "_PartitionedStatisticsAnalyzerAccumulator", "(", ")" ]
https://github.com/tensorflow/data-validation/blob/6c68c219c5d78d3736fd011d8a7c53fbcb94379c/tensorflow_data_validation/statistics/generators/partitioned_stats_generator.py#L162-L165
pypa/setuptools
9f37366aab9cd8f6baa23e6a77cfdb8daf97757e
setuptools/__init__.py
python
findall
(dir=os.curdir)
return list(files)
Find all files under 'dir' and return the list of full filenames. Unless dir is '.', return full filenames with dir prepended.
Find all files under 'dir' and return the list of full filenames. Unless dir is '.', return full filenames with dir prepended.
[ "Find", "all", "files", "under", "dir", "and", "return", "the", "list", "of", "full", "filenames", ".", "Unless", "dir", "is", ".", "return", "full", "filenames", "with", "dir", "prepended", "." ]
def findall(dir=os.curdir): """ Find all files under 'dir' and return the list of full filenames. Unless dir is '.', return full filenames with dir prepended. """ files = _find_all_simple(dir) if dir == os.curdir: make_rel = functools.partial(os.path.relpath, start=dir) files = map(make_rel, files) return list(files)
[ "def", "findall", "(", "dir", "=", "os", ".", "curdir", ")", ":", "files", "=", "_find_all_simple", "(", "dir", ")", "if", "dir", "==", "os", ".", "curdir", ":", "make_rel", "=", "functools", ".", "partial", "(", "os", ".", "path", ".", "relpath", ...
https://github.com/pypa/setuptools/blob/9f37366aab9cd8f6baa23e6a77cfdb8daf97757e/setuptools/__init__.py#L227-L236
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cdb/v20170320/models.py
python
DescribeUploadedFilesRequest.__init__
(self)
r""" :param Path: 文件路径。该字段应填用户主账号的OwnerUin信息。 :type Path: str :param Offset: 记录偏移量,默认值为0。 :type Offset: int :param Limit: 单次请求返回的数量,默认值为20。 :type Limit: int
r""" :param Path: 文件路径。该字段应填用户主账号的OwnerUin信息。 :type Path: str :param Offset: 记录偏移量,默认值为0。 :type Offset: int :param Limit: 单次请求返回的数量,默认值为20。 :type Limit: int
[ "r", ":", "param", "Path", ":", "文件路径。该字段应填用户主账号的OwnerUin信息。", ":", "type", "Path", ":", "str", ":", "param", "Offset", ":", "记录偏移量,默认值为0。", ":", "type", "Offset", ":", "int", ":", "param", "Limit", ":", "单次请求返回的数量,默认值为20。", ":", "type", "Limit", ":", "int...
def __init__(self): r""" :param Path: 文件路径。该字段应填用户主账号的OwnerUin信息。 :type Path: str :param Offset: 记录偏移量,默认值为0。 :type Offset: int :param Limit: 单次请求返回的数量,默认值为20。 :type Limit: int """ self.Path = None self.Offset = None self.Limit = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "Path", "=", "None", "self", ".", "Offset", "=", "None", "self", ".", "Limit", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cdb/v20170320/models.py#L5876-L5887
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/data_structures/mutable_poset.py
python
MutablePoset.union
(self, *other)
return new
r""" Return the union of the given posets as a new poset INPUT: - ``other`` -- a poset or an iterable. In the latter case the iterated objects are seen as elements of a poset. It is possible to specify more than one ``other`` as variadic arguments (arbitrary argument lists). OUTPUT: A poset. .. NOTE:: The key of an element is used for comparison. Thus elements with the same key are considered as equal. Due to keys and a ``merge`` function (see :class:`MutablePoset`) this operation might not be commutative. .. TODO:: Use the already existing information in the other poset to speed up this function. (At the moment each element of the other poset is inserted one by one and without using this information.) EXAMPLES:: sage: from sage.data_structures.mutable_poset import MutablePoset as MP sage: P = MP([3, 42, 7]); P poset(3, 7, 42) sage: Q = MP([4, 8, 42]); Q poset(4, 8, 42) sage: P.union(Q) poset(3, 4, 7, 8, 42) .. SEEALSO:: :meth:`union_update`, :meth:`difference`, :meth:`difference_update`, :meth:`intersection`, :meth:`intersection_update`, :meth:`symmetric_difference`, :meth:`symmetric_difference_update`, :meth:`is_disjoint`, :meth:`is_subset`, :meth:`is_superset`. TESTS:: sage: P.union(P, Q, Q, P) poset(3, 4, 7, 8, 42)
r""" Return the union of the given posets as a new poset
[ "r", "Return", "the", "union", "of", "the", "given", "posets", "as", "a", "new", "poset" ]
def union(self, *other): r""" Return the union of the given posets as a new poset INPUT: - ``other`` -- a poset or an iterable. In the latter case the iterated objects are seen as elements of a poset. It is possible to specify more than one ``other`` as variadic arguments (arbitrary argument lists). OUTPUT: A poset. .. NOTE:: The key of an element is used for comparison. Thus elements with the same key are considered as equal. Due to keys and a ``merge`` function (see :class:`MutablePoset`) this operation might not be commutative. .. TODO:: Use the already existing information in the other poset to speed up this function. (At the moment each element of the other poset is inserted one by one and without using this information.) EXAMPLES:: sage: from sage.data_structures.mutable_poset import MutablePoset as MP sage: P = MP([3, 42, 7]); P poset(3, 7, 42) sage: Q = MP([4, 8, 42]); Q poset(4, 8, 42) sage: P.union(Q) poset(3, 4, 7, 8, 42) .. SEEALSO:: :meth:`union_update`, :meth:`difference`, :meth:`difference_update`, :meth:`intersection`, :meth:`intersection_update`, :meth:`symmetric_difference`, :meth:`symmetric_difference_update`, :meth:`is_disjoint`, :meth:`is_subset`, :meth:`is_superset`. TESTS:: sage: P.union(P, Q, Q, P) poset(3, 4, 7, 8, 42) """ new = self.copy() new.update(*other) return new
[ "def", "union", "(", "self", ",", "*", "other", ")", ":", "new", "=", "self", ".", "copy", "(", ")", "new", ".", "update", "(", "*", "other", ")", "return", "new" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/data_structures/mutable_poset.py#L2577-L2633
qutip/qutip
52d01da181a21b810c3407812c670f35fdc647e8
qutip/qip/pulse.py
python
Pulse.get_full_tlist
(self, tol=1.0e-10)
return full_tlist
Return the full tlist of the pulses and noise. It means that if different ``tlist`` are present, they will be merged to one with all time points stored in a sorted array. Returns ------- full_tlist: array-like 1d The full time sequence for the noisy evolution.
Return the full tlist of the pulses and noise. It means that if different ``tlist`` are present, they will be merged to one with all time points stored in a sorted array.
[ "Return", "the", "full", "tlist", "of", "the", "pulses", "and", "noise", ".", "It", "means", "that", "if", "different", "tlist", "are", "present", "they", "will", "be", "merged", "to", "one", "with", "all", "time", "points", "stored", "in", "a", "sorted"...
def get_full_tlist(self, tol=1.0e-10): """ Return the full tlist of the pulses and noise. It means that if different ``tlist`` are present, they will be merged to one with all time points stored in a sorted array. Returns ------- full_tlist: array-like 1d The full time sequence for the noisy evolution. """ # TODO add test all_tlists = [] all_tlists.append(self.ideal_pulse.tlist) for pulse in self.coherent_noise: all_tlists.append(pulse.tlist) for c_op in self.lindblad_noise: all_tlists.append(c_op.tlist) all_tlists = [tlist for tlist in all_tlists if tlist is not None] if not all_tlists: return None full_tlist = np.unique(np.sort(np.hstack(all_tlists))) full_tlist = np.concatenate( (full_tlist[:1], full_tlist[1:][np.diff(full_tlist) > tol])) return full_tlist
[ "def", "get_full_tlist", "(", "self", ",", "tol", "=", "1.0e-10", ")", ":", "# TODO add test", "all_tlists", "=", "[", "]", "all_tlists", ".", "append", "(", "self", ".", "ideal_pulse", ".", "tlist", ")", "for", "pulse", "in", "self", ".", "coherent_noise"...
https://github.com/qutip/qutip/blob/52d01da181a21b810c3407812c670f35fdc647e8/qutip/qip/pulse.py#L402-L426
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/qparser/syntax.py
python
GroupNode.pop
(self, *args, **kwargs)
return self.nodes.pop(*args, **kwargs)
[]
def pop(self, *args, **kwargs): return self.nodes.pop(*args, **kwargs)
[ "def", "pop", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "nodes", ".", "pop", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/qparser/syntax.py#L310-L311
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-50/fabmetheus_utilities/geometry/manipulation_matrix/rotate.py
python
getManipulatedGeometryOutput
(elementNode, geometryOutput, prefix)
return geometryOutput
Get equated geometryOutput.
Get equated geometryOutput.
[ "Get", "equated", "geometryOutput", "." ]
def getManipulatedGeometryOutput(elementNode, geometryOutput, prefix): 'Get equated geometryOutput.' rotatePoints(elementNode, matrix.getVertexes(geometryOutput), prefix) return geometryOutput
[ "def", "getManipulatedGeometryOutput", "(", "elementNode", ",", "geometryOutput", ",", "prefix", ")", ":", "rotatePoints", "(", "elementNode", ",", "matrix", ".", "getVertexes", "(", "geometryOutput", ")", ",", "prefix", ")", "return", "geometryOutput" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-50/fabmetheus_utilities/geometry/manipulation_matrix/rotate.py#L26-L29
littlecodersh/MyPlatform
6f9a946605466f580205f6e9e96e533720fce578
vendor/requests/packages/urllib3/_collections.py
python
HTTPHeaderDict.iteritems
(self)
Iterate over all header lines, including duplicate ones.
Iterate over all header lines, including duplicate ones.
[ "Iterate", "over", "all", "header", "lines", "including", "duplicate", "ones", "." ]
def iteritems(self): """Iterate over all header lines, including duplicate ones.""" for key in self: vals = self._container[key.lower()] for val in vals[1:]: yield vals[0], val
[ "def", "iteritems", "(", "self", ")", ":", "for", "key", "in", "self", ":", "vals", "=", "self", ".", "_container", "[", "key", ".", "lower", "(", ")", "]", "for", "val", "in", "vals", "[", "1", ":", "]", ":", "yield", "vals", "[", "0", "]", ...
https://github.com/littlecodersh/MyPlatform/blob/6f9a946605466f580205f6e9e96e533720fce578/vendor/requests/packages/urllib3/_collections.py#L291-L296
ifwe/digsby
f5fe00244744aa131e07f09348d10563f3d8fa99
digsby/src/common/logger.py
python
Logger.pathfor
(self, account, buddy)
return self.OutputDir.joinpath(buddy_path(account, buddy))
Returns the path to the directory where logs for the specified buddy is stored.
Returns the path to the directory where logs for the specified buddy is stored.
[ "Returns", "the", "path", "to", "the", "directory", "where", "logs", "for", "the", "specified", "buddy", "is", "stored", "." ]
def pathfor(self, account, buddy): 'Returns the path to the directory where logs for the specified buddy is stored.' return self.OutputDir.joinpath(buddy_path(account, buddy))
[ "def", "pathfor", "(", "self", ",", "account", ",", "buddy", ")", ":", "return", "self", ".", "OutputDir", ".", "joinpath", "(", "buddy_path", "(", "account", ",", "buddy", ")", ")" ]
https://github.com/ifwe/digsby/blob/f5fe00244744aa131e07f09348d10563f3d8fa99/digsby/src/common/logger.py#L268-L271
kuangliu/pytorch-cifar
49b7aa97b0c12fe0d4054e670403a16b6b834ddd
models/pnasnet.py
python
PNASNet._make_layer
(self, planes, num_cells)
return nn.Sequential(*layers)
[]
def _make_layer(self, planes, num_cells): layers = [] for _ in range(num_cells): layers.append(self.cell_type(self.in_planes, planes, stride=1)) self.in_planes = planes return nn.Sequential(*layers)
[ "def", "_make_layer", "(", "self", ",", "planes", ",", "num_cells", ")", ":", "layers", "=", "[", "]", "for", "_", "in", "range", "(", "num_cells", ")", ":", "layers", ".", "append", "(", "self", ".", "cell_type", "(", "self", ".", "in_planes", ",", ...
https://github.com/kuangliu/pytorch-cifar/blob/49b7aa97b0c12fe0d4054e670403a16b6b834ddd/models/pnasnet.py#L88-L93
nosmokingbandit/watcher
dadacd21a5790ee609058a98a17fcc8954d24439
lib/infi/pkg_resources/_vendor/pyparsing.py
python
ParserElement.addParseAction
( self, *fns, **kwargs )
return self
Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. See examples in L{I{copy}<copy>}.
Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. See examples in L{I{copy}<copy>}.
[ "Add", "parse", "action", "to", "expression", "s", "list", "of", "parse", "actions", ".", "See", "L", "{", "I", "{", "setParseAction", "}", "<setParseAction", ">", "}", ".", "See", "examples", "in", "L", "{", "I", "{", "copy", "}", "<copy", ">", "}",...
def addParseAction( self, *fns, **kwargs ): """ Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. See examples in L{I{copy}<copy>}. """ self.parseAction += list(map(_trim_arity, list(fns))) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self
[ "def", "addParseAction", "(", "self", ",", "*", "fns", ",", "*", "*", "kwargs", ")", ":", "self", ".", "parseAction", "+=", "list", "(", "map", "(", "_trim_arity", ",", "list", "(", "fns", ")", ")", ")", "self", ".", "callDuringTry", "=", "self", "...
https://github.com/nosmokingbandit/watcher/blob/dadacd21a5790ee609058a98a17fcc8954d24439/lib/infi/pkg_resources/_vendor/pyparsing.py#L1265-L1273
google/pytype
fa43edc95dd42ade6e3147d6580d63e778c9d506
pytype/pytd/builtin_stubs.py
python
BuiltinsAndTyping.load
(self, options)
return b, t
Read builtins.pytd and typing.pytd, and return the parsed modules.
Read builtins.pytd and typing.pytd, and return the parsed modules.
[ "Read", "builtins", ".", "pytd", "and", "typing", ".", "pytd", "and", "return", "the", "parsed", "modules", "." ]
def load(self, options): """Read builtins.pytd and typing.pytd, and return the parsed modules.""" t = self._parse_predefined("typing", options) b = self._parse_predefined("builtins", options) b = b.Visit(visitors.LookupExternalTypes({"typing": t}, self_name="builtins")) t = t.Visit(visitors.LookupBuiltins(b)) b = b.Visit(visitors.NamedTypeToClassType()) t = t.Visit(visitors.NamedTypeToClassType()) b = b.Visit(visitors.AdjustTypeParameters()) t = t.Visit(visitors.AdjustTypeParameters()) b = b.Visit(visitors.CanonicalOrderingVisitor()) t = t.Visit(visitors.CanonicalOrderingVisitor()) b.Visit(visitors.FillInLocalPointers({"": b, "typing": t, "builtins": b})) t.Visit(visitors.FillInLocalPointers({"": t, "typing": t, "builtins": b})) b.Visit(visitors.VerifyLookup()) t.Visit(visitors.VerifyLookup()) b.Visit(visitors.VerifyContainers()) t.Visit(visitors.VerifyContainers()) return b, t
[ "def", "load", "(", "self", ",", "options", ")", ":", "t", "=", "self", ".", "_parse_predefined", "(", "\"typing\"", ",", "options", ")", "b", "=", "self", ".", "_parse_predefined", "(", "\"builtins\"", ",", "options", ")", "b", "=", "b", ".", "Visit",...
https://github.com/google/pytype/blob/fa43edc95dd42ade6e3147d6580d63e778c9d506/pytype/pytd/builtin_stubs.py#L52-L73
adamchainz/django-mysql
389594dc078f73c9f204306014332344fe4b6d04
src/django_mysql/models/fields/lists.py
python
ListCharField.check
(self, **kwargs: Any)
return errors
[]
def check(self, **kwargs: Any) -> List[checks.CheckMessage]: errors = super().check(**kwargs) # Unfortunately this check can't really be done for IntegerFields since # they have boundless length has_base_error = any(e.id == "django_mysql.E004" for e in errors) if ( not has_base_error and self.max_length is not None and isinstance(self.base_field, CharField) and self.size ): max_size = ( # The chars used (self.size * (self.base_field.max_length)) # The commas + self.size - 1 ) if max_size > self.max_length: errors.append( checks.Error( "Field can overrun - set contains CharFields of max " "length %s, leading to a comma-combined max length of " "%s, which is greater than the space reserved for the " "set - %s" % (self.base_field.max_length, max_size, self.max_length), hint=None, obj=self, id="django_mysql.E006", ) ) return errors
[ "def", "check", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "List", "[", "checks", ".", "CheckMessage", "]", ":", "errors", "=", "super", "(", ")", ".", "check", "(", "*", "*", "kwargs", ")", "# Unfortunately this check can't really be d...
https://github.com/adamchainz/django-mysql/blob/389594dc078f73c9f204306014332344fe4b6d04/src/django_mysql/models/fields/lists.py#L167-L199
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/dates.py
python
DateConverter.default_units
(x, axis)
return None
Return the tzinfo instance of *x* or of its first element, or None
Return the tzinfo instance of *x* or of its first element, or None
[ "Return", "the", "tzinfo", "instance", "of", "*", "x", "*", "or", "of", "its", "first", "element", "or", "None" ]
def default_units(x, axis): """ Return the tzinfo instance of *x* or of its first element, or None """ if isinstance(x, np.ndarray): x = x.ravel() try: x = cbook.safe_first_element(x) except (TypeError, StopIteration): pass try: return x.tzinfo except AttributeError: pass return None
[ "def", "default_units", "(", "x", ",", "axis", ")", ":", "if", "isinstance", "(", "x", ",", "np", ".", "ndarray", ")", ":", "x", "=", "x", ".", "ravel", "(", ")", "try", ":", "x", "=", "cbook", ".", "safe_first_element", "(", "x", ")", "except", ...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/dates.py#L1821-L1837
TesterlifeRaymond/doraemon
d5cb6e34bd5f2aa97273ce0c0c9303e32beaa333
src/lib/BeautifulReport.py
python
ReportTestResult.success_counter
(self)
return self.success_count
set success counter
set success counter
[ "set", "success", "counter" ]
def success_counter(self) -> int: """ set success counter """ return self.success_count
[ "def", "success_counter", "(", "self", ")", "->", "int", ":", "return", "self", ".", "success_count" ]
https://github.com/TesterlifeRaymond/doraemon/blob/d5cb6e34bd5f2aa97273ce0c0c9303e32beaa333/src/lib/BeautifulReport.py#L140-L142
microsoft/ptvsd
99c8513921021d2cc7cd82e132b65c644c256768
src/ptvsd/_vendored/pydevd/pycompletionserver.py
python
complete_from_dir
(directory)
This is necessary so that we get the imports from the same directory where the file we are completing is located.
This is necessary so that we get the imports from the same directory where the file we are completing is located.
[ "This", "is", "necessary", "so", "that", "we", "get", "the", "imports", "from", "the", "same", "directory", "where", "the", "file", "we", "are", "completing", "is", "located", "." ]
def complete_from_dir(directory): ''' This is necessary so that we get the imports from the same directory where the file we are completing is located. ''' global currDirModule if currDirModule is not None: if len(sys.path) > 0 and sys.path[0] == currDirModule: del sys.path[0] currDirModule = directory sys.path.insert(0, directory)
[ "def", "complete_from_dir", "(", "directory", ")", ":", "global", "currDirModule", "if", "currDirModule", "is", "not", "None", ":", "if", "len", "(", "sys", ".", "path", ")", ">", "0", "and", "sys", ".", "path", "[", "0", "]", "==", "currDirModule", ":...
https://github.com/microsoft/ptvsd/blob/99c8513921021d2cc7cd82e132b65c644c256768/src/ptvsd/_vendored/pydevd/pycompletionserver.py#L101-L112
trailofbits/algo
0c6e45a1944613636ccc03f66c4a3f3fe54411aa
library/linode_stackscript_v4.py
python
initialise_module
()
return AnsibleModule( argument_spec=dict( label=dict(type='str', required=True), state=dict( type='str', required=True, choices=['present', 'absent'] ), access_token=dict( type='str', required=True, no_log=True, fallback=(env_fallback, ['LINODE_ACCESS_TOKEN']), ), script=dict(type='str', required=True), images=dict(type='list', required=True), description=dict(type='str', required=False), public=dict(type='bool', required=False, default=False), ), supports_check_mode=False )
Initialise the module parameter specification.
Initialise the module parameter specification.
[ "Initialise", "the", "module", "parameter", "specification", "." ]
def initialise_module(): """Initialise the module parameter specification.""" return AnsibleModule( argument_spec=dict( label=dict(type='str', required=True), state=dict( type='str', required=True, choices=['present', 'absent'] ), access_token=dict( type='str', required=True, no_log=True, fallback=(env_fallback, ['LINODE_ACCESS_TOKEN']), ), script=dict(type='str', required=True), images=dict(type='list', required=True), description=dict(type='str', required=False), public=dict(type='bool', required=False, default=False), ), supports_check_mode=False )
[ "def", "initialise_module", "(", ")", ":", "return", "AnsibleModule", "(", "argument_spec", "=", "dict", "(", "label", "=", "dict", "(", "type", "=", "'str'", ",", "required", "=", "True", ")", ",", "state", "=", "dict", "(", "type", "=", "'str'", ",",...
https://github.com/trailofbits/algo/blob/0c6e45a1944613636ccc03f66c4a3f3fe54411aa/library/linode_stackscript_v4.py#L47-L69
gkrizek/bash-lambda-layer
703b0ade8174022d44779d823172ab7ac33a5505
bin/docutils/utils/math/math2html.py
python
Bracket.innerformula
(self, pos)
Parse a whole formula inside the bracket
Parse a whole formula inside the bracket
[ "Parse", "a", "whole", "formula", "inside", "the", "bracket" ]
def innerformula(self, pos): "Parse a whole formula inside the bracket" while not pos.finished(): self.add(self.factory.parseany(pos))
[ "def", "innerformula", "(", "self", ",", "pos", ")", ":", "while", "not", "pos", ".", "finished", "(", ")", ":", "self", ".", "add", "(", "self", ".", "factory", ".", "parseany", "(", "pos", ")", ")" ]
https://github.com/gkrizek/bash-lambda-layer/blob/703b0ade8174022d44779d823172ab7ac33a5505/bin/docutils/utils/math/math2html.py#L2795-L2798
okpy/ok
50a00190f05363d096478dd8e53aa1a36dd40c4a
server/models.py
python
User.is_staff
(self)
return query.count() > 0
Return True if the user is a staff member in any course.
Return True if the user is a staff member in any course.
[ "Return", "True", "if", "the", "user", "is", "a", "staff", "member", "in", "any", "course", "." ]
def is_staff(self): """ Return True if the user is a staff member in any course. """ if self.is_admin: return True query = (Enrollment.query.filter(Enrollment.user_id == self.id) .filter(Enrollment.role.in_(STAFF_ROLES))) return query.count() > 0
[ "def", "is_staff", "(", "self", ")", ":", "if", "self", ".", "is_admin", ":", "return", "True", "query", "=", "(", "Enrollment", ".", "query", ".", "filter", "(", "Enrollment", ".", "user_id", "==", "self", ".", "id", ")", ".", "filter", "(", "Enroll...
https://github.com/okpy/ok/blob/50a00190f05363d096478dd8e53aa1a36dd40c4a/server/models.py#L224-L230
dbt-labs/dbt-core
e943b9fc842535e958ef4fd0b8703adc91556bc6
core/dbt/task/generate.py
python
GenerateTask._get_manifest
(self)
return self.manifest
[]
def _get_manifest(self) -> Manifest: if self.manifest is None: raise InternalException( 'manifest should not be None in _get_manifest' ) return self.manifest
[ "def", "_get_manifest", "(", "self", ")", "->", "Manifest", ":", "if", "self", ".", "manifest", "is", "None", ":", "raise", "InternalException", "(", "'manifest should not be None in _get_manifest'", ")", "return", "self", ".", "manifest" ]
https://github.com/dbt-labs/dbt-core/blob/e943b9fc842535e958ef4fd0b8703adc91556bc6/core/dbt/task/generate.py#L197-L202
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/functions/special/spherical_harmonics.py
python
Ynm._eval_evalf
(self, prec)
return Expr._from_mpmath(res, prec)
[]
def _eval_evalf(self, prec): # Note: works without this function by just calling # mpmath for Legendre polynomials. But using # the dedicated function directly is cleaner. from sympy.mpmath import mp from sympy import Expr n = self.args[0]._to_mpmath(prec) m = self.args[1]._to_mpmath(prec) theta = self.args[2]._to_mpmath(prec) phi = self.args[3]._to_mpmath(prec) oprec = mp.prec mp.prec = prec res = mp.spherharm(n, m, theta, phi) mp.prec = oprec return Expr._from_mpmath(res, prec)
[ "def", "_eval_evalf", "(", "self", ",", "prec", ")", ":", "# Note: works without this function by just calling", "# mpmath for Legendre polynomials. But using", "# the dedicated function directly is cleaner.", "from", "sympy", ".", "mpmath", "import", "mp", "from", "s...
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/functions/special/spherical_harmonics.py#L216-L230
online-ml/river
3732f700da72642afe54095d4b252b05c5018c7d
river/neighbors/sam_knn.py
python
STMSizer._adapt_histories
(n_deletions, prediction_histories)
return prediction_histories
Removes predictions of the largest window size and shifts the remaining ones accordingly.
Removes predictions of the largest window size and shifts the remaining ones accordingly.
[ "Removes", "predictions", "of", "the", "largest", "window", "size", "and", "shifts", "the", "remaining", "ones", "accordingly", "." ]
def _adapt_histories(n_deletions, prediction_histories): """ Removes predictions of the largest window size and shifts the remaining ones accordingly. """ for i in range(n_deletions): sortedKeys = np.sort(list(prediction_histories.keys())) prediction_histories.pop(sortedKeys[0], None) delta = sortedKeys[1] for j in range(1, len(sortedKeys)): prediction_histories[sortedKeys[j] - delta] = prediction_histories.pop( sortedKeys[j] ) return prediction_histories
[ "def", "_adapt_histories", "(", "n_deletions", ",", "prediction_histories", ")", ":", "for", "i", "in", "range", "(", "n_deletions", ")", ":", "sortedKeys", "=", "np", ".", "sort", "(", "list", "(", "prediction_histories", ".", "keys", "(", ")", ")", ")", ...
https://github.com/online-ml/river/blob/3732f700da72642afe54095d4b252b05c5018c7d/river/neighbors/sam_knn.py#L649-L662
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/analyses/decompiler/sequence_walker.py
python
SequenceWalker._handle_MultiNode
(self, node, **kwargs)
return None
[]
def _handle_MultiNode(self, node, **kwargs): i = 0 nodes_copy = list(node.nodes) while i < len(nodes_copy): node_ = nodes_copy[i] self._handle(node_, parent=node, index=i) i += 1 return None
[ "def", "_handle_MultiNode", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "i", "=", "0", "nodes_copy", "=", "list", "(", "node", ".", "nodes", ")", "while", "i", "<", "len", "(", "nodes_copy", ")", ":", "node_", "=", "nodes_copy", "[...
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/analyses/decompiler/sequence_walker.py#L63-L70
pymedusa/Medusa
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
medusa/subtitles.py
python
subtitle_code_filter
()
return {code for code in language_converters['opensubtitles'].codes if len(code) == 3}
Return a set of all 3-letter code languages of opensubtitles. :return: all 3-letter language codes :rtype: set of str
Return a set of all 3-letter code languages of opensubtitles.
[ "Return", "a", "set", "of", "all", "3", "-", "letter", "code", "languages", "of", "opensubtitles", "." ]
def subtitle_code_filter(): """Return a set of all 3-letter code languages of opensubtitles. :return: all 3-letter language codes :rtype: set of str """ return {code for code in language_converters['opensubtitles'].codes if len(code) == 3}
[ "def", "subtitle_code_filter", "(", ")", ":", "return", "{", "code", "for", "code", "in", "language_converters", "[", "'opensubtitles'", "]", ".", "codes", "if", "len", "(", "code", ")", "==", "3", "}" ]
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/medusa/subtitles.py#L147-L153
bitcraze/crazyflie-lib-python
876f0dc003b91ba5e4de05daae9d0b79cf600f81
cflib/bootloader/__init__.py
python
Bootloader.flash_full
(self, cf: Optional[Crazyflie] = None, filename: Optional[str] = None, warm: bool = True, targets: Optional[Tuple[str, ...]] = None, info_cb: Optional[Callable[[int, TargetTypes], NoReturn]] = None, progress_cb: Optional[Callable[[str, int], NoReturn]] = None, terminate_flash_cb: Optional[Callable[[], bool]] = None)
Flash .zip or bin .file to list of targets. Reset to firmware when done.
Flash .zip or bin .file to list of targets. Reset to firmware when done.
[ "Flash", ".", "zip", "or", "bin", ".", "file", "to", "list", "of", "targets", ".", "Reset", "to", "firmware", "when", "done", "." ]
def flash_full(self, cf: Optional[Crazyflie] = None, filename: Optional[str] = None, warm: bool = True, targets: Optional[Tuple[str, ...]] = None, info_cb: Optional[Callable[[int, TargetTypes], NoReturn]] = None, progress_cb: Optional[Callable[[str, int], NoReturn]] = None, terminate_flash_cb: Optional[Callable[[], bool]] = None): """ Flash .zip or bin .file to list of targets. Reset to firmware when done. """ if progress_cb is not None: self.progress_cb = progress_cb if terminate_flash_cb is not None: self.terminate_flashing_cb = terminate_flash_cb if not self.start_bootloader(warm_boot=warm, cf=cf): raise Exception('Could not connect to bootloader') if info_cb is not None: connected = (self.get_target(TargetTypes.STM32),) if self.protocol_version == BootVersion.CF2_PROTO_VER: connected += (self.get_target(TargetTypes.NRF51),) info_cb(self.protocol_version, connected) if filename is not None: self.flash(filename, targets, cf) self.reset_to_firmware()
[ "def", "flash_full", "(", "self", ",", "cf", ":", "Optional", "[", "Crazyflie", "]", "=", "None", ",", "filename", ":", "Optional", "[", "str", "]", "=", "None", ",", "warm", ":", "bool", "=", "True", ",", "targets", ":", "Optional", "[", "Tuple", ...
https://github.com/bitcraze/crazyflie-lib-python/blob/876f0dc003b91ba5e4de05daae9d0b79cf600f81/cflib/bootloader/__init__.py#L191-L218
mozman/ezdxf
59d0fc2ea63f5cf82293428f5931da7e9f9718e9
src/ezdxf/math/parametrize.py
python
estimate_tangents
( points: List[Vec3], method: str = "5-points", normalize=True )
Estimate tangents for curve defined by given fit points. Calculated tangents are normalized (unit-vectors). Available tangent estimation methods: - "3-points": 3 point interpolation - "5-points": 5 point interpolation - "bezier": tangents from an interpolated cubic bezier curve - "diff": finite difference Args: points: start-, end- and passing points of curve method: tangent estimation method normalize: normalize tangents if ``True`` Returns: tangents as list of :class:`Vec3` objects
Estimate tangents for curve defined by given fit points. Calculated tangents are normalized (unit-vectors).
[ "Estimate", "tangents", "for", "curve", "defined", "by", "given", "fit", "points", ".", "Calculated", "tangents", "are", "normalized", "(", "unit", "-", "vectors", ")", "." ]
def estimate_tangents( points: List[Vec3], method: str = "5-points", normalize=True ) -> List[Vec3]: """Estimate tangents for curve defined by given fit points. Calculated tangents are normalized (unit-vectors). Available tangent estimation methods: - "3-points": 3 point interpolation - "5-points": 5 point interpolation - "bezier": tangents from an interpolated cubic bezier curve - "diff": finite difference Args: points: start-, end- and passing points of curve method: tangent estimation method normalize: normalize tangents if ``True`` Returns: tangents as list of :class:`Vec3` objects """ method = method.lower() if method.startswith("bez"): return tangents_cubic_bezier_interpolation(points, normalize=normalize) elif method.startswith("3-p"): return tangents_3_point_interpolation(points, normalize=normalize) elif method.startswith("5-p"): return tangents_5_point_interpolation(points, normalize=normalize) elif method.startswith("dif"): return finite_difference_interpolation(points, normalize=normalize) else: raise ValueError(f"Unknown method: {method}")
[ "def", "estimate_tangents", "(", "points", ":", "List", "[", "Vec3", "]", ",", "method", ":", "str", "=", "\"5-points\"", ",", "normalize", "=", "True", ")", "->", "List", "[", "Vec3", "]", ":", "method", "=", "method", ".", "lower", "(", ")", "if", ...
https://github.com/mozman/ezdxf/blob/59d0fc2ea63f5cf82293428f5931da7e9f9718e9/src/ezdxf/math/parametrize.py#L100-L132
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/widgets.py
python
CheckButtons.get_status
(self)
return [l1.get_visible() for (l1, l2) in self.lines]
returns a tuple of the status (True/False) of all of the check buttons
returns a tuple of the status (True/False) of all of the check buttons
[ "returns", "a", "tuple", "of", "the", "status", "(", "True", "/", "False", ")", "of", "all", "of", "the", "check", "buttons" ]
def get_status(self): """ returns a tuple of the status (True/False) of all of the check buttons """ return [l1.get_visible() for (l1, l2) in self.lines]
[ "def", "get_status", "(", "self", ")", ":", "return", "[", "l1", ".", "get_visible", "(", ")", "for", "(", "l1", ",", "l2", ")", "in", "self", ".", "lines", "]" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/widgets.py#L610-L614
zbyte64/django-hyperadmin
9ac2ae284b76efb3c50a1c2899f383a27154cb54
hyperadmin/templatetags/hyperadmin_utils.py
python
include_namespace
(namespace)
return {'state':namespace.state, 'link':namespace.link, 'namespaces':namespace.get_namespaces(),}
[]
def include_namespace(namespace): return {'state':namespace.state, 'link':namespace.link, 'namespaces':namespace.get_namespaces(),}
[ "def", "include_namespace", "(", "namespace", ")", ":", "return", "{", "'state'", ":", "namespace", ".", "state", ",", "'link'", ":", "namespace", ".", "link", ",", "'namespaces'", ":", "namespace", ".", "get_namespaces", "(", ")", ",", "}" ]
https://github.com/zbyte64/django-hyperadmin/blob/9ac2ae284b76efb3c50a1c2899f383a27154cb54/hyperadmin/templatetags/hyperadmin_utils.py#L33-L36
axcore/tartube
36dd493642923fe8b9190a41db596c30c043ae90
tartube/config.py
python
GenericEditWin.on_radiobutton_toggled
(self, radiobutton, prop, value)
Called from a callback in self.add_radiobutton(). Adds a key-value pair to self.edit_dict, but only if this radiobutton (from those in the group) is the selected one. Args: radiobutton (Gtk.RadioButton): The widget clicked prop (str): The attribute in self.edit_obj to modify value (-): The attribute's new value
Called from a callback in self.add_radiobutton().
[ "Called", "from", "a", "callback", "in", "self", ".", "add_radiobutton", "()", "." ]
def on_radiobutton_toggled(self, radiobutton, prop, value): """Called from a callback in self.add_radiobutton(). Adds a key-value pair to self.edit_dict, but only if this radiobutton (from those in the group) is the selected one. Args: radiobutton (Gtk.RadioButton): The widget clicked prop (str): The attribute in self.edit_obj to modify value (-): The attribute's new value """ if radiobutton.get_active(): self.edit_dict[prop] = value
[ "def", "on_radiobutton_toggled", "(", "self", ",", "radiobutton", ",", "prop", ",", "value", ")", ":", "if", "radiobutton", ".", "get_active", "(", ")", ":", "self", ".", "edit_dict", "[", "prop", "]", "=", "value" ]
https://github.com/axcore/tartube/blob/36dd493642923fe8b9190a41db596c30c043ae90/tartube/config.py#L1793-L1811
sqlfluff/sqlfluff
c2278f41f270a29ef5ffc6b179236abf32dc18e1
src/sqlfluff/core/linter/linter.py
python
Linter.lint_string
( self, in_str: str = "", fname: str = "<string input>", fix: bool = False, config: Optional[FluffConfig] = None, encoding: str = "utf8", )
return self.lint_parsed( parsed, rule_set, fix=fix, formatter=self.formatter, encoding=encoding, )
Lint a string. Returns: :obj:`LintedFile`: an object representing that linted file.
Lint a string.
[ "Lint", "a", "string", "." ]
def lint_string( self, in_str: str = "", fname: str = "<string input>", fix: bool = False, config: Optional[FluffConfig] = None, encoding: str = "utf8", ) -> LintedFile: """Lint a string. Returns: :obj:`LintedFile`: an object representing that linted file. """ # Sort out config, defaulting to the built in config if no override config = config or self.config # Parse the string. parsed = self.parse_string( in_str=in_str, fname=fname, config=config, ) # Get rules as appropriate rule_set = self.get_ruleset(config=config) # Lint the file and return the LintedFile return self.lint_parsed( parsed, rule_set, fix=fix, formatter=self.formatter, encoding=encoding, )
[ "def", "lint_string", "(", "self", ",", "in_str", ":", "str", "=", "\"\"", ",", "fname", ":", "str", "=", "\"<string input>\"", ",", "fix", ":", "bool", "=", "False", ",", "config", ":", "Optional", "[", "FluffConfig", "]", "=", "None", ",", "encoding"...
https://github.com/sqlfluff/sqlfluff/blob/c2278f41f270a29ef5ffc6b179236abf32dc18e1/src/sqlfluff/core/linter/linter.py#L725-L756
tflearn/tflearn
db5176773299b67a2a75c5889fb2aba7fd0fea8a
tflearn/models/dnn.py
python
DNN.predict_label
(self, X)
Predict Label. Predict class labels for input X. Arguments: X: array, `list` of array (if multiple inputs) or `dict` (with inputs layer name as keys). Data to feed for prediction. Returns: array or `list` of array. The predicted classes index array, sorted by descendant probability value.
Predict Label.
[ "Predict", "Label", "." ]
def predict_label(self, X): """ Predict Label. Predict class labels for input X. Arguments: X: array, `list` of array (if multiple inputs) or `dict` (with inputs layer name as keys). Data to feed for prediction. Returns: array or `list` of array. The predicted classes index array, sorted by descendant probability value. """ feed_dict = feed_dict_builder(X, None, self.inputs, None) labels = np.argsort(self.predictor.predict(feed_dict)) if labels.ndim == 1: return labels[::-1] else: return labels[:, ::-1]
[ "def", "predict_label", "(", "self", ",", "X", ")", ":", "feed_dict", "=", "feed_dict_builder", "(", "X", ",", "None", ",", "self", ".", "inputs", ",", "None", ")", "labels", "=", "np", ".", "argsort", "(", "self", ".", "predictor", ".", "predict", "...
https://github.com/tflearn/tflearn/blob/db5176773299b67a2a75c5889fb2aba7fd0fea8a/tflearn/models/dnn.py#L253-L272
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/google/appengine/tools/sdk_update_checker.py
python
SDKUpdateChecker.CheckForUpdates
(self)
Queries the server for updates and nags the user if appropriate. Queries the server for the latest SDK version at the same time reporting the local SDK version. The server will respond with a yaml document containing the fields: 'release': The name of the release (e.g. 1.2). 'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). 'api_versions': A list of api_version strings (e.g. ['1', 'beta']). We will nag the user with increasing severity if: - There is a new release. - There is a new release with a new api_version. - There is a new release that does not support an api_version named in a configuration in self.configs.
Queries the server for updates and nags the user if appropriate.
[ "Queries", "the", "server", "for", "updates", "and", "nags", "the", "user", "if", "appropriate", "." ]
def CheckForUpdates(self): """Queries the server for updates and nags the user if appropriate. Queries the server for the latest SDK version at the same time reporting the local SDK version. The server will respond with a yaml document containing the fields: 'release': The name of the release (e.g. 1.2). 'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). 'api_versions': A list of api_version strings (e.g. ['1', 'beta']). We will nag the user with increasing severity if: - There is a new release. - There is a new release with a new api_version. - There is a new release that does not support an api_version named in a configuration in self.configs. """ version = self._ParseVersionFile() if version is None: logging.info('Skipping update check') return logging.info('Checking for updates to the SDK.') responses = {} try: for runtime in self.runtimes: responses[runtime] = yaml.safe_load(self.rpcserver.Send( '/api/updatecheck', timeout=UPDATE_CHECK_TIMEOUT, release=version['release'], timestamp=version['timestamp'], api_versions=version['api_versions'], runtime=runtime)) except (urllib2.URLError, socket.error), e: logging.info('Update check failed: %s', e) return try: latest = sorted(responses.values(), reverse=True, key=lambda release: _VersionList(release['release']))[0] except ValueError: logging.warn('Could not parse this release version') if version['release'] == latest['release']: logging.info('The SDK is up to date.') return try: this_release = _VersionList(version['release']) except ValueError: logging.warn('Could not parse this release version (%r)', version['release']) else: try: advertised_release = _VersionList(latest['release']) except ValueError: logging.warn('Could not parse advertised release version (%r)', latest['release']) else: if this_release > advertised_release: logging.info('This SDK release is newer than the advertised release.') return for runtime, response in responses.items(): api_versions = response['api_versions'] obsolete_versions = sorted( self.runtime_to_api_version[runtime] - set(api_versions)) if len(obsolete_versions) == 1: self._Nag( 'The api version you are using (%s) is obsolete! You should\n' 'upgrade your SDK and test that your code works with the new\n' 'api version.' % obsolete_versions[0], response, version, force=True) elif obsolete_versions: self._Nag( 'The api versions you are using (%s) are obsolete! You should\n' 'upgrade your SDK and test that your code works with the new\n' 'api version.' % obsolete_versions, response, version, force=True) deprecated_versions = sorted( self.runtime_to_api_version[runtime].intersection(api_versions[:-1])) if len(deprecated_versions) == 1: self._Nag( 'The api version you are using (%s) is deprecated. You should\n' 'upgrade your SDK to try the new functionality.' % deprecated_versions[0], response, version) elif deprecated_versions: self._Nag( 'The api versions you are using (%s) are deprecated. You should\n' 'upgrade your SDK to try the new functionality.' % deprecated_versions, response, version) self._Nag('There is a new release of the SDK available.', latest, version)
[ "def", "CheckForUpdates", "(", "self", ")", ":", "version", "=", "self", ".", "_ParseVersionFile", "(", ")", "if", "version", "is", "None", ":", "logging", ".", "info", "(", "'Skipping update check'", ")", "return", "logging", ".", "info", "(", "'Checking fo...
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/appengine/tools/sdk_update_checker.py#L225-L323
santhoshkolloju/Abstractive-Summarization-With-Transfer-Learning
97ff2ae3ba9f2d478e174444c4e0f5349f28c319
texar_repo/texar/modules/classifiers/conv_classifiers.py
python
Conv1DClassifier.trainable_variables
(self)
return self._encoder.trainable_variables
The list of trainable variables of the module.
The list of trainable variables of the module.
[ "The", "list", "of", "trainable", "variables", "of", "the", "module", "." ]
def trainable_variables(self): """The list of trainable variables of the module. """ if not self._built: raise TexarError( "Attempting to access trainable_variables before module %s " "was fully built. The module is built once it is called, " "e.g., with `%s(...)`" % (self.name, self.name)) return self._encoder.trainable_variables
[ "def", "trainable_variables", "(", "self", ")", ":", "if", "not", "self", ".", "_built", ":", "raise", "TexarError", "(", "\"Attempting to access trainable_variables before module %s \"", "\"was fully built. The module is built once it is called, \"", "\"e.g., with `%s(...)`\"", ...
https://github.com/santhoshkolloju/Abstractive-Summarization-With-Transfer-Learning/blob/97ff2ae3ba9f2d478e174444c4e0f5349f28c319/texar_repo/texar/modules/classifiers/conv_classifiers.py#L199-L207
barseghyanartur/django-fobi
a998feae007d7fe3637429a80e42952ec7cda79f
src/fobi/views/function_based.py
python
edit_form_handler_entry
(request, form_handler_entry_id, theme=None, template_name=None)
return render(request, template_name, context)
Edit form handler entry. :param django.http.HttpRequest request: :param int form_handler_entry_id: :param fobi.base.BaseTheme theme: Theme instance. :param string template_name: :return django.http.HttpResponse:
Edit form handler entry.
[ "Edit", "form", "handler", "entry", "." ]
def edit_form_handler_entry(request, form_handler_entry_id, theme=None, template_name=None): """Edit form handler entry. :param django.http.HttpRequest request: :param int form_handler_entry_id: :param fobi.base.BaseTheme theme: Theme instance. :param string template_name: :return django.http.HttpResponse: """ try: obj = FormHandlerEntry._default_manager \ .select_related('form_entry') \ .get(pk=form_handler_entry_id) except ObjectDoesNotExist as err: raise Http404(gettext("Form handler entry not found.")) form_entry = obj.form_entry form_handler_plugin = obj.get_plugin(request=request) form_handler_plugin.request = request FormHandlerPluginForm = form_handler_plugin.get_form() form = None if not FormHandlerPluginForm: messages.info( request, gettext('The form handler plugin "{0}" is not ' 'configurable!').format(form_handler_plugin.name) ) return redirect('fobi.edit_form_entry', form_entry_id=form_entry.pk) elif request.method == 'POST': form = form_handler_plugin.get_initialised_edit_form_or_404( data=request.POST, files=request.FILES ) if form.is_valid(): # Saving the plugin form data. form.save_plugin_data(request=request) # Getting the plugin data. obj.plugin_data = form.get_plugin_data(request=request) # Save the object. obj.save() messages.info( request, gettext('The form handler plugin "{0}" was edited ' 'successfully.').format(form_handler_plugin.name) ) return redirect('fobi.edit_form_entry', form_entry_id=form_entry.pk) else: form = form_handler_plugin.get_initialised_edit_form_or_404() context = { 'form': form, 'form_entry': form_entry, 'form_handler_plugin': form_handler_plugin, } # If given, pass to the template (and override the value set by # the context processor. if theme: context.update({'fobi_theme': theme}) if not template_name: if not theme: theme = get_theme(request=request, as_instance=True) template_name = theme.edit_form_handler_entry_template return render(request, template_name, context)
[ "def", "edit_form_handler_entry", "(", "request", ",", "form_handler_entry_id", ",", "theme", "=", "None", ",", "template_name", "=", "None", ")", ":", "try", ":", "obj", "=", "FormHandlerEntry", ".", "_default_manager", ".", "select_related", "(", "'form_entry'",...
https://github.com/barseghyanartur/django-fobi/blob/a998feae007d7fe3637429a80e42952ec7cda79f/src/fobi/views/function_based.py#L986-L1065
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/pyclbr.py
python
readmodule
(module, path=None)
return res
Return Class objects for the top-level classes in module. This is the original interface, before Functions were added.
Return Class objects for the top-level classes in module.
[ "Return", "Class", "objects", "for", "the", "top", "-", "level", "classes", "in", "module", "." ]
def readmodule(module, path=None): """Return Class objects for the top-level classes in module. This is the original interface, before Functions were added. """ res = {} for key, value in _readmodule(module, path or []).items(): if isinstance(value, Class): res[key] = value return res
[ "def", "readmodule", "(", "module", ",", "path", "=", "None", ")", ":", "res", "=", "{", "}", "for", "key", ",", "value", "in", "_readmodule", "(", "module", ",", "path", "or", "[", "]", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", ...
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/pyclbr.py#L100-L110
log2timeline/dftimewolf
18b4d0760d6a6ca378ac06c2242d04a0d8caf072
dftimewolf/lib/processors/turbinia_artifact.py
python
TurbiniaArtifactProcessor.SetUp
(self, turbinia_config_file: str, project: str, turbinia_zone: str, directory_path: str, sketch_id: int, run_all_jobs: bool)
Sets up the object attributes. Args: turbinia_config_file (str): Full path to the Turbinia config file to use. project (str): name of the GCP project containing the disk to process. turbinia_zone (str): GCP zone in which the Turbinia server is running. directory_path (str): Name of the directory to process. sketch_id (int): The Timesketch sketch ID. run_all_jobs (bool): Whether to run all jobs instead of a faster subset.
Sets up the object attributes.
[ "Sets", "up", "the", "object", "attributes", "." ]
def SetUp(self, turbinia_config_file: str, project: str, turbinia_zone: str, directory_path: str, sketch_id: int, run_all_jobs: bool) -> None: """Sets up the object attributes. Args: turbinia_config_file (str): Full path to the Turbinia config file to use. project (str): name of the GCP project containing the disk to process. turbinia_zone (str): GCP zone in which the Turbinia server is running. directory_path (str): Name of the directory to process. sketch_id (int): The Timesketch sketch ID. run_all_jobs (bool): Whether to run all jobs instead of a faster subset. """ self.turbinia_config_file = turbinia_config_file self.directory_path = directory_path if not self.directory_path: self.directory_path = tempfile.mkdtemp(prefix='turbinia-results') self.logger.success('Turbinia results will be dumped to {0:s}'.format( self.directory_path)) try: self.TurbiniaSetUp(project, turbinia_zone, sketch_id, run_all_jobs) except TurbiniaException as exception: self.ModuleError(str(exception), critical=True) return
[ "def", "SetUp", "(", "self", ",", "turbinia_config_file", ":", "str", ",", "project", ":", "str", ",", "turbinia_zone", ":", "str", ",", "directory_path", ":", "str", ",", "sketch_id", ":", "int", ",", "run_all_jobs", ":", "bool", ")", "->", "None", ":",...
https://github.com/log2timeline/dftimewolf/blob/18b4d0760d6a6ca378ac06c2242d04a0d8caf072/dftimewolf/lib/processors/turbinia_artifact.py#L45-L72
exodrifter/unity-python
bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d
Lib/mailbox.py
python
Mailbox.__setitem__
(self, key, message)
Replace the keyed message; raise KeyError if it doesn't exist.
Replace the keyed message; raise KeyError if it doesn't exist.
[ "Replace", "the", "keyed", "message", ";", "raise", "KeyError", "if", "it", "doesn", "t", "exist", "." ]
def __setitem__(self, key, message): """Replace the keyed message; raise KeyError if it doesn't exist.""" raise NotImplementedError('Method must be implemented by subclass')
[ "def", "__setitem__", "(", "self", ",", "key", ",", "message", ")", ":", "raise", "NotImplementedError", "(", "'Method must be implemented by subclass'", ")" ]
https://github.com/exodrifter/unity-python/blob/bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d/Lib/mailbox.py#L66-L68
mozilla/pontoon
d26999eea57902a30b5c15e9b77277fe7e76a60f
pontoon/insights/tasks.py
python
get_active_users_actions
(start_of_today)
return group_dict_by(actions, "translation__locale")
Get actions of the previous year, needed for the Active users charts.
Get actions of the previous year, needed for the Active users charts.
[ "Get", "actions", "of", "the", "previous", "year", "needed", "for", "the", "Active", "users", "charts", "." ]
def get_active_users_actions(start_of_today): """Get actions of the previous year, needed for the Active users charts.""" actions = ( ActionLog.objects.filter( created_at__gte=start_of_today - relativedelta(year=1), created_at__lt=start_of_today, ) .values("action_type", "created_at", "performed_by", "translation__locale") .distinct() ) return group_dict_by(actions, "translation__locale")
[ "def", "get_active_users_actions", "(", "start_of_today", ")", ":", "actions", "=", "(", "ActionLog", ".", "objects", ".", "filter", "(", "created_at__gte", "=", "start_of_today", "-", "relativedelta", "(", "year", "=", "1", ")", ",", "created_at__lt", "=", "s...
https://github.com/mozilla/pontoon/blob/d26999eea57902a30b5c15e9b77277fe7e76a60f/pontoon/insights/tasks.py#L137-L148
pdm-project/pdm
34ba2ea48bf079044b0ca8c0017f3c0e7d9e198b
pdm/cli/commands/plugin.py
python
RemoveCommand._resolve_dependencies_to_remove
(self, packages: list[str])
return sorted(result)
Perform a BFS to find all unneeded dependencies
Perform a BFS to find all unneeded dependencies
[ "Perform", "a", "BFS", "to", "find", "all", "unneeded", "dependencies" ]
def _resolve_dependencies_to_remove(self, packages: list[str]) -> list[str]: """Perform a BFS to find all unneeded dependencies""" result: set[str] = set() to_resolve = list(packages) ws = WorkingSet() graph = build_dependency_graph(ws) while to_resolve: temp: list[Package] = [] for name in to_resolve: key = normalize_name(name) if key in ws: result.add(key) package = Package(key, "0.0.0", {}) if package not in graph: continue for dep in graph.iter_children(package): temp.append(dep) graph.remove(package) to_resolve.clear() for dep in temp: if not any(graph.iter_parents(dep)) and dep.name != "pdm": to_resolve.append(dep.name) return sorted(result)
[ "def", "_resolve_dependencies_to_remove", "(", "self", ",", "packages", ":", "list", "[", "str", "]", ")", "->", "list", "[", "str", "]", ":", "result", ":", "set", "[", "str", "]", "=", "set", "(", ")", "to_resolve", "=", "list", "(", "packages", ")...
https://github.com/pdm-project/pdm/blob/34ba2ea48bf079044b0ca8c0017f3c0e7d9e198b/pdm/cli/commands/plugin.py#L137-L162
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/core/leoAtFile.py
python
AtFile.putSentinel
(self, s)
Write a sentinel whose text is s, applying the CWEB hack if needed. This method outputs all sentinels.
Write a sentinel whose text is s, applying the CWEB hack if needed.
[ "Write", "a", "sentinel", "whose", "text", "is", "s", "applying", "the", "CWEB", "hack", "if", "needed", "." ]
def putSentinel(self, s): """ Write a sentinel whose text is s, applying the CWEB hack if needed. This method outputs all sentinels. """ at = self if at.sentinels or hasattr(at, 'force_sentinels'): at.putIndent(at.indent) at.os(at.startSentinelComment) # #2194. The following would follow the black convention, # but doing so is a dubious idea. # at.os(' ') # Apply the cweb hack to s: # If the opening comment delim ends in '@', # double all '@' signs except the first. start = at.startSentinelComment if start and start[-1] == '@': s = s.replace('@', '@@')[1:] at.os(s) if at.endSentinelComment: at.os(at.endSentinelComment) at.onl()
[ "def", "putSentinel", "(", "self", ",", "s", ")", ":", "at", "=", "self", "if", "at", ".", "sentinels", "or", "hasattr", "(", "at", ",", "'force_sentinels'", ")", ":", "at", ".", "putIndent", "(", "at", ".", "indent", ")", "at", ".", "os", "(", "...
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/core/leoAtFile.py#L2074-L2096
nschaetti/EchoTorch
cba209c49e0fda73172d2e853b85c747f9f5117e
echotorch/transforms/Transformer.py
python
Transformer.__repr__
(self)
return init_str
String :return:
String :return:
[ "String", ":", "return", ":" ]
def __repr__(self): """ String :return: """ # Class init_str = type(self).__name__ + "(" # For each attributes index = 0 for attr in dir(self): if "_" not in attr: attr_value = getattr(self, attr) if type(attr_value) is int or type(attr_value) is float or type(attr_value) is str\ or type(attr_value) is tuple: add_begin = " " if index != 0 else "" init_str += add_begin + "{}={}, ".format(attr, getattr(self, attr)) index += 1 # end if # end if # end for # Remove ", " if init_str[-2:] == ", ": init_str = init_str[:-2] # end if # ) init_str += ")" return init_str
[ "def", "__repr__", "(", "self", ")", ":", "# Class", "init_str", "=", "type", "(", "self", ")", ".", "__name__", "+", "\"(\"", "# For each attributes", "index", "=", "0", "for", "attr", "in", "dir", "(", "self", ")", ":", "if", "\"_\"", "not", "in", ...
https://github.com/nschaetti/EchoTorch/blob/cba209c49e0fda73172d2e853b85c747f9f5117e/echotorch/transforms/Transformer.py#L119-L149
FSecureLABS/Jandroid
e31d0dab58a2bfd6ed8e0a387172b8bd7c893436
gui/appJar.py
python
gui.enableEnter
(self, func, replace=False)
Binds <Return> to the specified function - all widgets
Binds <Return> to the specified function - all widgets
[ "Binds", "<Return", ">", "to", "the", "specified", "function", "-", "all", "widgets" ]
def enableEnter(self, func, replace=False): """ Binds <Return> to the specified function - all widgets """ self.bindKey("Return", func, replace)
[ "def", "enableEnter", "(", "self", ",", "func", ",", "replace", "=", "False", ")", ":", "self", ".", "bindKey", "(", "\"Return\"", ",", "func", ",", "replace", ")" ]
https://github.com/FSecureLABS/Jandroid/blob/e31d0dab58a2bfd6ed8e0a387172b8bd7c893436/gui/appJar.py#L2282-L2284
lfz/Guided-Denoise
8881ab768d16eaf87342da4ff7dc8271e183e205
Attackset/fgsm_ensv3_random/nets/resnet_v1.py
python
resnet_v1_101
(inputs, num_classes=None, is_training=True, global_pool=True, output_stride=None, spatial_squeeze=True, reuse=None, scope='resnet_v1_101')
return resnet_v1(inputs, blocks, num_classes, is_training, global_pool=global_pool, output_stride=output_stride, include_root_block=True, spatial_squeeze=spatial_squeeze, reuse=reuse, scope=scope)
ResNet-101 model of [1]. See resnet_v1() for arg and return description.
ResNet-101 model of [1]. See resnet_v1() for arg and return description.
[ "ResNet", "-", "101", "model", "of", "[", "1", "]", ".", "See", "resnet_v1", "()", "for", "arg", "and", "return", "description", "." ]
def resnet_v1_101(inputs, num_classes=None, is_training=True, global_pool=True, output_stride=None, spatial_squeeze=True, reuse=None, scope='resnet_v1_101'): """ResNet-101 model of [1]. See resnet_v1() for arg and return description.""" blocks = [ resnet_v1_block('block1', base_depth=64, num_units=3, stride=2), resnet_v1_block('block2', base_depth=128, num_units=4, stride=2), resnet_v1_block('block3', base_depth=256, num_units=23, stride=2), resnet_v1_block('block4', base_depth=512, num_units=3, stride=1), ] return resnet_v1(inputs, blocks, num_classes, is_training, global_pool=global_pool, output_stride=output_stride, include_root_block=True, spatial_squeeze=spatial_squeeze, reuse=reuse, scope=scope)
[ "def", "resnet_v1_101", "(", "inputs", ",", "num_classes", "=", "None", ",", "is_training", "=", "True", ",", "global_pool", "=", "True", ",", "output_stride", "=", "None", ",", "spatial_squeeze", "=", "True", ",", "reuse", "=", "None", ",", "scope", "=", ...
https://github.com/lfz/Guided-Denoise/blob/8881ab768d16eaf87342da4ff7dc8271e183e205/Attackset/fgsm_ensv3_random/nets/resnet_v1.py#L280-L298
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/docutils-0.14/docutils/writers/latex2e/__init__.py
python
LaTeXTranslator.visit_literal_block
(self, node)
Render a literal block.
Render a literal block.
[ "Render", "a", "literal", "block", "." ]
def visit_literal_block(self, node): """Render a literal block.""" # environments and packages to typeset literal blocks packages = {'alltt': r'\usepackage{alltt}', 'listing': r'\usepackage{moreverb}', 'lstlisting': r'\usepackage{listings}', 'Verbatim': r'\usepackage{fancyvrb}', # 'verbatim': '', 'verbatimtab': r'\usepackage{moreverb}'} if node.get('ids'): self.out += ['\n'] + self.ids_to_labels(node) self.duclass_open(node) if not self.active_table.is_open(): # no quote inside tables, to avoid vertical space between # table border and literal block. # TODO: fails if normal text precedes the literal block. # check parent node instead? self.out.append('\\begin{quote}\n') self.context.append('\n\\end{quote}\n') else: self.context.append('\n') if self.is_plaintext(node): environment = self.literal_block_env self.requirements['literal_block'] = packages.get(environment, '') if environment == 'alltt': self.alltt = True else: self.verbatim = True self.out.append('\\begin{%s}%s\n' % (environment, self.literal_block_options)) self.context.append('\n\\end{%s}' % environment) else: self.literal = True self.insert_newline = True self.insert_non_breaking_blanks = True if 'code' in node['classes'] and ( self.settings.syntax_highlight != 'none'): self.requirements['color'] = PreambleCmds.color self.fallbacks['code'] = PreambleCmds.highlight_rules self.out.append('{\\ttfamily \\raggedright \\noindent\n') self.context.append('\n}')
[ "def", "visit_literal_block", "(", "self", ",", "node", ")", ":", "# environments and packages to typeset literal blocks", "packages", "=", "{", "'alltt'", ":", "r'\\usepackage{alltt}'", ",", "'listing'", ":", "r'\\usepackage{moreverb}'", ",", "'lstlisting'", ":", "r'\\us...
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/docutils-0.14/docutils/writers/latex2e/__init__.py#L2517-L2560
Kinto/kinto
a9e46e57de8f33c7be098c6f583de18df03b2824
kinto/core/errors.py
python
request_GET
(request)
Catches a UnicodeDecode error in request.GET in case a wrong request was received. Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161
Catches a UnicodeDecode error in request.GET in case a wrong request was received. Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161
[ "Catches", "a", "UnicodeDecode", "error", "in", "request", ".", "GET", "in", "case", "a", "wrong", "request", "was", "received", ".", "Fixing", "a", "webob", "long", "term", "issue", ":", "https", ":", "//", "github", ".", "com", "/", "Pylons", "/", "w...
def request_GET(request): """Catches a UnicodeDecode error in request.GET in case a wrong request was received. Fixing a webob long term issue: https://github.com/Pylons/webob/issues/161 """ try: return request.GET except UnicodeDecodeError: querystring = request.environ.get("QUERY_STRING", "") logger = logging.getLogger(__name__) logger.warning("Error decoding QUERY_STRING: %s" % request.environ) raise http_error( httpexceptions.HTTPBadRequest(), errno=ERRORS.INVALID_PARAMETERS, message="A request with an incorrect encoding in the querystring was" "received. Please make sure your requests are encoded in UTF-8: %s" % querystring, )
[ "def", "request_GET", "(", "request", ")", ":", "try", ":", "return", "request", ".", "GET", "except", "UnicodeDecodeError", ":", "querystring", "=", "request", ".", "environ", ".", "get", "(", "\"QUERY_STRING\"", ",", "\"\"", ")", "logger", "=", "logging", ...
https://github.com/Kinto/kinto/blob/a9e46e57de8f33c7be098c6f583de18df03b2824/kinto/core/errors.py#L199-L214
ankush-me/SynthText
e694abf03f298ce0e362cfbc5f67cf0ce6cbf301
synth_utils.py
python
plot_xyzrgb
(xyz,rgb,show=False)
xyz : nx3 float rgb : nx3 uint8 Plots a RGB-D point-cloud in mayavi.
xyz : nx3 float rgb : nx3 uint8
[ "xyz", ":", "nx3", "float", "rgb", ":", "nx3", "uint8" ]
def plot_xyzrgb(xyz,rgb,show=False): """ xyz : nx3 float rgb : nx3 uint8 Plots a RGB-D point-cloud in mayavi. """ rgb_s = LUT_RGB.rgb2scalar(rgb) pts_glyph = mym.points3d(xyz[:,0],xyz[:,1],xyz[:,2], rgb_s,mode='point') LUT_RGB.set_rgb_lut(pts_glyph) if show: mym.view(180,180) mym.orientation_axes() mym.show()
[ "def", "plot_xyzrgb", "(", "xyz", ",", "rgb", ",", "show", "=", "False", ")", ":", "rgb_s", "=", "LUT_RGB", ".", "rgb2scalar", "(", "rgb", ")", "pts_glyph", "=", "mym", ".", "points3d", "(", "xyz", "[", ":", ",", "0", "]", ",", "xyz", "[", ":", ...
https://github.com/ankush-me/SynthText/blob/e694abf03f298ce0e362cfbc5f67cf0ce6cbf301/synth_utils.py#L42-L56
mdiazcl/fuzzbunch-debian
2b76c2249ade83a389ae3badb12a1bd09901fd2c
windows/Resources/Python/Core/Lib/StringIO.py
python
StringIO.seek
(self, pos, mode=0)
Set the file's current position. The mode argument is optional and defaults to 0 (absolute file positioning); other values are 1 (seek relative to the current position) and 2 (seek relative to the file's end). There is no return value.
Set the file's current position. The mode argument is optional and defaults to 0 (absolute file positioning); other values are 1 (seek relative to the current position) and 2 (seek relative to the file's end). There is no return value.
[ "Set", "the", "file", "s", "current", "position", ".", "The", "mode", "argument", "is", "optional", "and", "defaults", "to", "0", "(", "absolute", "file", "positioning", ")", ";", "other", "values", "are", "1", "(", "seek", "relative", "to", "the", "curr...
def seek(self, pos, mode=0): """Set the file's current position. The mode argument is optional and defaults to 0 (absolute file positioning); other values are 1 (seek relative to the current position) and 2 (seek relative to the file's end). There is no return value. """ _complain_ifclosed(self.closed) if self.buflist: self.buf += ''.join(self.buflist) self.buflist = [] if mode == 1: pos += self.pos elif mode == 2: pos += self.len self.pos = max(0, pos)
[ "def", "seek", "(", "self", ",", "pos", ",", "mode", "=", "0", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "if", "self", ".", "buflist", ":", "self", ".", "buf", "+=", "''", ".", "join", "(", "self", ".", "buflist", ")", "sel...
https://github.com/mdiazcl/fuzzbunch-debian/blob/2b76c2249ade83a389ae3badb12a1bd09901fd2c/windows/Resources/Python/Core/Lib/StringIO.py#L102-L119
minerllabs/minerl
0123527c334c96ebb3f0cf313df1552fa4302691
minerl/herobraine/hero/handlers/agent/observations/inventory.py
python
FlatInventoryObservation.__eq__
(self, other)
return isinstance(other, FlatInventoryObservation) and \ (self.items) == (other.items)
[]
def __eq__(self, other): return isinstance(other, FlatInventoryObservation) and \ (self.items) == (other.items)
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "return", "isinstance", "(", "other", ",", "FlatInventoryObservation", ")", "and", "(", "self", ".", "items", ")", "==", "(", "other", ".", "items", ")" ]
https://github.com/minerllabs/minerl/blob/0123527c334c96ebb3f0cf313df1552fa4302691/minerl/herobraine/hero/handlers/agent/observations/inventory.py#L149-L151
pkkid/python-plexapi
8a048d28360b0cdc728a41fa7d3077d0593b68fb
plexapi/myplex.py
python
MyPlexAccount.resources
(self)
return [MyPlexResource(self, elem) for elem in data]
Returns a list of all :class:`~plexapi.myplex.MyPlexResource` objects connected to the server.
Returns a list of all :class:`~plexapi.myplex.MyPlexResource` objects connected to the server.
[ "Returns", "a", "list", "of", "all", ":", "class", ":", "~plexapi", ".", "myplex", ".", "MyPlexResource", "objects", "connected", "to", "the", "server", "." ]
def resources(self): """ Returns a list of all :class:`~plexapi.myplex.MyPlexResource` objects connected to the server. """ data = self.query(MyPlexResource.key) return [MyPlexResource(self, elem) for elem in data]
[ "def", "resources", "(", "self", ")", ":", "data", "=", "self", ".", "query", "(", "MyPlexResource", ".", "key", ")", "return", "[", "MyPlexResource", "(", "self", ",", "elem", ")", "for", "elem", "in", "data", "]" ]
https://github.com/pkkid/python-plexapi/blob/8a048d28360b0cdc728a41fa7d3077d0593b68fb/plexapi/myplex.py#L199-L202
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/site-packages/Mako-0.7.3-py2.7.egg/mako/runtime.py
python
_inherit_from
(context, uri, calling_uri)
return (template.callable_, lclcontext)
called by the _inherit method in template modules to set up the inheritance chain at the start of a template's execution.
called by the _inherit method in template modules to set up the inheritance chain at the start of a template's execution.
[ "called", "by", "the", "_inherit", "method", "in", "template", "modules", "to", "set", "up", "the", "inheritance", "chain", "at", "the", "start", "of", "a", "template", "s", "execution", "." ]
def _inherit_from(context, uri, calling_uri): """called by the _inherit method in template modules to set up the inheritance chain at the start of a template's execution.""" if uri is None: return None template = _lookup_template(context, uri, calling_uri) self_ns = context['self'] ih = self_ns while ih.inherits is not None: ih = ih.inherits lclcontext = context.locals_({'next':ih}) ih.inherits = TemplateNamespace("self:%s" % template.uri, lclcontext, template = template, populate_self=False) context._data['parent'] = lclcontext._data['local'] = ih.inherits callable_ = getattr(template.module, '_mako_inherit', None) if callable_ is not None: ret = callable_(template, lclcontext) if ret: return ret gen_ns = getattr(template.module, '_mako_generate_namespaces', None) if gen_ns is not None: gen_ns(context) return (template.callable_, lclcontext)
[ "def", "_inherit_from", "(", "context", ",", "uri", ",", "calling_uri", ")", ":", "if", "uri", "is", "None", ":", "return", "None", "template", "=", "_lookup_template", "(", "context", ",", "uri", ",", "calling_uri", ")", "self_ns", "=", "context", "[", ...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/site-packages/Mako-0.7.3-py2.7.egg/mako/runtime.py#L695-L722
blackye/webdirdig
11eb3df84d228127dde1dd4afcb922f5075903a2
thirdparty_libs/requests/utils.py
python
get_auth_from_url
(url)
Given a url with authentication components, extract them into a tuple of username,password.
Given a url with authentication components, extract them into a tuple of username,password.
[ "Given", "a", "url", "with", "authentication", "components", "extract", "them", "into", "a", "tuple", "of", "username", "password", "." ]
def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of username,password.""" if url: parsed = urlparse(url) return (parsed.username, parsed.password) else: return ('', '')
[ "def", "get_auth_from_url", "(", "url", ")", ":", "if", "url", ":", "parsed", "=", "urlparse", "(", "url", ")", "return", "(", "parsed", ".", "username", ",", "parsed", ".", "password", ")", "else", ":", "return", "(", "''", ",", "''", ")" ]
https://github.com/blackye/webdirdig/blob/11eb3df84d228127dde1dd4afcb922f5075903a2/thirdparty_libs/requests/utils.py#L541-L548
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/runpy.py
python
run_module
(mod_name, init_globals=None, run_name=None, alter_sys=False)
return _run_module_code(code, init_globals, run_name, filename, loader, alter_sys)
Execute a module's code without importing it Returns the resulting top level namespace dictionary
Execute a module's code without importing it
[ "Execute", "a", "module", "s", "code", "without", "importing", "it" ]
def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False): """Execute a module's code without importing it Returns the resulting top level namespace dictionary """ loader = get_loader(mod_name) if loader is None: raise ImportError("No module named " + mod_name) code = loader.get_code(mod_name) if code is None: raise ImportError("No code object available for " + mod_name) filename = _get_filename(loader, mod_name) if run_name is None: run_name = mod_name return _run_module_code(code, init_globals, run_name, filename, loader, alter_sys)
[ "def", "run_module", "(", "mod_name", ",", "init_globals", "=", "None", ",", "run_name", "=", "None", ",", "alter_sys", "=", "False", ")", ":", "loader", "=", "get_loader", "(", "mod_name", ")", "if", "loader", "is", "None", ":", "raise", "ImportError", ...
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/runpy.py#L79-L95
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/tools/inject/thirdparty/beautifulsoup/beautifulsoup.py
python
SoupStrainer.search
(self, markup)
return found
[]
def search(self, markup): #print 'looking for %s in %s' % (self, markup) found = None # If given a list of items, scan it for a text element that # matches. if hasattr(markup, "__iter__") \ and not isinstance(markup, Tag): for element in markup: if isinstance(element, NavigableString) \ and self.search(element): found = element break # If it's a Tag, make sure its name or attributes match. # Don't bother with Tags if we're searching for text. elif isinstance(markup, Tag): if not self.text: found = self.searchTag(markup) # If it's text, make sure the text matches. elif isinstance(markup, NavigableString) or \ isinstance(markup, basestring): if self._matches(markup, self.text): found = markup else: raise Exception, "I don't know how to match against a %s" \ % markup.__class__ return found
[ "def", "search", "(", "self", ",", "markup", ")", ":", "#print 'looking for %s in %s' % (self, markup)", "found", "=", "None", "# If given a list of items, scan it for a text element that", "# matches.", "if", "hasattr", "(", "markup", ",", "\"__iter__\"", ")", "and", "no...
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/tools/inject/thirdparty/beautifulsoup/beautifulsoup.py#L947-L972
CGCookie/retopoflow
3d8b3a47d1d661f99ab0aeb21d31370bf15de35e
addon_common/common/ui_core.py
python
UI_Element._draw_cache
(self, offset)
[]
def _draw_cache(self, offset): ox,oy = offset with ScissorStack.wrap(self._l+ox, self._t+oy, self._w, self._h): if self._cacheRenderBuf: bgl.glEnable(bgl.GL_BLEND) bgl.glBlendFunc(bgl.GL_ONE, bgl.GL_ONE_MINUS_SRC_ALPHA) texture_id = self._cacheRenderBuf.color_texture draw_texture_2d(texture_id, (self._l+ox, self._b+oy), self._w, self._h) else: bgl.glBlendFunc(bgl.GL_ONE, bgl.GL_ONE_MINUS_SRC_ALPHA) self._draw_real(offset)
[ "def", "_draw_cache", "(", "self", ",", "offset", ")", ":", "ox", ",", "oy", "=", "offset", "with", "ScissorStack", ".", "wrap", "(", "self", ".", "_l", "+", "ox", ",", "self", ".", "_t", "+", "oy", ",", "self", ".", "_w", ",", "self", ".", "_h...
https://github.com/CGCookie/retopoflow/blob/3d8b3a47d1d661f99ab0aeb21d31370bf15de35e/addon_common/common/ui_core.py#L1846-L1856
wxWidgets/Phoenix
b2199e299a6ca6d866aa6f3d0888499136ead9d6
wx/lib/agw/flatmenu.py
python
FlatMenu.GetAccelTable
(self)
return table
Returns the menu accelerator table, an instance of :class:`AcceleratorTable`.
Returns the menu accelerator table, an instance of :class:`AcceleratorTable`.
[ "Returns", "the", "menu", "accelerator", "table", "an", "instance", "of", ":", "class", ":", "AcceleratorTable", "." ]
def GetAccelTable(self): """ Returns the menu accelerator table, an instance of :class:`AcceleratorTable`. """ n = len(self._accelArray) if n == 0: return wx.NullAcceleratorTable entries = [wx.AcceleratorEntry() for ii in range(n)] for counter in len(entries): entries[counter] = self._accelArray[counter] table = wx.AcceleratorTable(entries) del entries return table
[ "def", "GetAccelTable", "(", "self", ")", ":", "n", "=", "len", "(", "self", ".", "_accelArray", ")", "if", "n", "==", "0", ":", "return", "wx", ".", "NullAcceleratorTable", "entries", "=", "[", "wx", ".", "AcceleratorEntry", "(", ")", "for", "ii", "...
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/flatmenu.py#L6950-L6965
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/javapath.py
python
islink
(path)
return stat.S_ISLNK(st.st_mode)
Test whether a path is a symbolic link
Test whether a path is a symbolic link
[ "Test", "whether", "a", "path", "is", "a", "symbolic", "link" ]
def islink(path): """Test whether a path is a symbolic link""" try: st = os.lstat(path) except (os.error, AttributeError): return False return stat.S_ISLNK(st.st_mode)
[ "def", "islink", "(", "path", ")", ":", "try", ":", "st", "=", "os", ".", "lstat", "(", "path", ")", "except", "(", "os", ".", "error", ",", "AttributeError", ")", ":", "return", "False", "return", "stat", ".", "S_ISLNK", "(", "st", ".", "st_mode",...
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/javapath.py#L156-L162
rafellerc/Pytorch-SiamFC
d64d0fee08c7972726337be07e783d44bd9c275e
appSiamFC/producer.py
python
ProducerThread.__init__
(self, seq, buffer, dataset_path, model_path, set_type='train', max_res=800, branch_arch='alexnet', ctx_mode='max')
Args: seq: (int) The number of the sequence according to the get_sequence function, which mirrors the indexing of the ImageNetVID class. buffer: (queue.Queue) The data buffer between the producerThread and the consumer application (the display). The elements stored in this buffer are defined by the BufferElement namedtuple. dataset_path: (string) The path to the root of the ImageNet dataset. model_path: (string) The path to the models .pth.tar file containing the model's weights. set_type: (string) The subset of the ImageNet VID dataset, can be 'train' or 'val'. max_res: (int) The maximum resolution in pixels. If any dimension of the image exceeds this value, the final image published by the producer is resized (keeping the aspect ratio). Used to balance the load between the consumer (main) thread and the producer. branch_arch: (string) The architecture of the branch of the siamese net. Might be: 'alexnet', 'vgg11_5c'. ctx_mode: (string) The strategy used to define the context region around the target, using the bounding box dimensions. The 'max' mode uses the biggest dimension, while the 'mean' mode uses the mean of the dimensions.
Args: seq: (int) The number of the sequence according to the get_sequence function, which mirrors the indexing of the ImageNetVID class. buffer: (queue.Queue) The data buffer between the producerThread and the consumer application (the display). The elements stored in this buffer are defined by the BufferElement namedtuple. dataset_path: (string) The path to the root of the ImageNet dataset. model_path: (string) The path to the models .pth.tar file containing the model's weights. set_type: (string) The subset of the ImageNet VID dataset, can be 'train' or 'val'. max_res: (int) The maximum resolution in pixels. If any dimension of the image exceeds this value, the final image published by the producer is resized (keeping the aspect ratio). Used to balance the load between the consumer (main) thread and the producer. branch_arch: (string) The architecture of the branch of the siamese net. Might be: 'alexnet', 'vgg11_5c'. ctx_mode: (string) The strategy used to define the context region around the target, using the bounding box dimensions. The 'max' mode uses the biggest dimension, while the 'mean' mode uses the mean of the dimensions.
[ "Args", ":", "seq", ":", "(", "int", ")", "The", "number", "of", "the", "sequence", "according", "to", "the", "get_sequence", "function", "which", "mirrors", "the", "indexing", "of", "the", "ImageNetVID", "class", ".", "buffer", ":", "(", "queue", ".", "...
def __init__(self, seq, buffer, dataset_path, model_path, set_type='train', max_res=800, branch_arch='alexnet', ctx_mode='max'): """ Args: seq: (int) The number of the sequence according to the get_sequence function, which mirrors the indexing of the ImageNetVID class. buffer: (queue.Queue) The data buffer between the producerThread and the consumer application (the display). The elements stored in this buffer are defined by the BufferElement namedtuple. dataset_path: (string) The path to the root of the ImageNet dataset. model_path: (string) The path to the models .pth.tar file containing the model's weights. set_type: (string) The subset of the ImageNet VID dataset, can be 'train' or 'val'. max_res: (int) The maximum resolution in pixels. If any dimension of the image exceeds this value, the final image published by the producer is resized (keeping the aspect ratio). Used to balance the load between the consumer (main) thread and the producer. branch_arch: (string) The architecture of the branch of the siamese net. Might be: 'alexnet', 'vgg11_5c'. ctx_mode: (string) The strategy used to define the context region around the target, using the bounding box dimensions. The 'max' mode uses the biggest dimension, while the 'mean' mode uses the mean of the dimensions. """ super(ProducerThread, self).__init__(daemon=True) self.frames, self.bboxes_norm, self.valid_frames, self.vid_dims = ( get_sequence(seq, dataset_path, set_type=set_type)) self.idx = 0 self.seq_size = len(self.frames) self.buffer = buffer # TODO put the model info inside the checkpoint file. if branch_arch == 'alexnet': self.net = mdl.SiameseNet(mdl.BaselineEmbeddingNet(), stride=4) elif branch_arch == 'vgg11_5c': self.net = mdl.SiameseNet(mdl.VGG11EmbeddingNet_5c(), stride=4) elif branch_arch == "vgg16_8c": self.net = mdl.SiameseNet(mdl.VGG16EmbeddingNet_8c(), stride=4) checkpoint = torch.load(model_path, map_location=lambda storage, loc: storage) self.net.load_state_dict(checkpoint['state_dict']) # Tuple of (H, w), the dimensions to which the image will be resized. self.resize_dims = None self.net = self.net.to(device) self.net.eval() self.ref, self.ref_emb = self.make_ref(ctx_mode=ctx_mode)
[ "def", "__init__", "(", "self", ",", "seq", ",", "buffer", ",", "dataset_path", ",", "model_path", ",", "set_type", "=", "'train'", ",", "max_res", "=", "800", ",", "branch_arch", "=", "'alexnet'", ",", "ctx_mode", "=", "'max'", ")", ":", "super", "(", ...
https://github.com/rafellerc/Pytorch-SiamFC/blob/d64d0fee08c7972726337be07e783d44bd9c275e/appSiamFC/producer.py#L31-L76