nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
python
TarFile._check
(self, mode=None)
Check if TarFile is still open, and if the operation's mode corresponds to TarFile's mode.
Check if TarFile is still open, and if the operation's mode corresponds to TarFile's mode.
[ "Check", "if", "TarFile", "is", "still", "open", "and", "if", "the", "operation", "s", "mode", "corresponds", "to", "TarFile", "s", "mode", "." ]
def _check(self, mode=None): """Check if TarFile is still open, and if the operation's mode corresponds to TarFile's mode. """ if self.closed: raise IOError("%s is closed" % self.__class__.__name__) if mode is not None and self.mode not in mode: raise IOError("bad operation for mode %r" % self.mode)
[ "def", "_check", "(", "self", ",", "mode", "=", "None", ")", ":", "if", "self", ".", "closed", ":", "raise", "IOError", "(", "\"%s is closed\"", "%", "self", ".", "__class__", ".", "__name__", ")", "if", "mode", "is", "not", "None", "and", "self", "....
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L2496-L2503
etetoolkit/ete
2b207357dc2a40ccad7bfd8f54964472c72e4726
ete3/treeview/faces.py
python
BarChartFace._height
(self)
return self.item.rect().height()
[]
def _height(self): return self.item.rect().height()
[ "def", "_height", "(", "self", ")", ":", "return", "self", ".", "item", ".", "rect", "(", ")", ".", "height", "(", ")" ]
https://github.com/etetoolkit/ete/blob/2b207357dc2a40ccad7bfd8f54964472c72e4726/ete3/treeview/faces.py#L1381-L1382
pyjs/pyjs
6c4a3d3a67300cd5df7f95a67ca9dcdc06950523
pyjswaddons/SWFUpload.py
python
Settings.setButtonHTML
(self, button_text)
@param button_text: Buttons html text
[]
def setButtonHTML(self, button_text): """ @param button_text: Buttons html text """ self.button_text = button_text
[ "def", "setButtonHTML", "(", "self", ",", "button_text", ")", ":", "self", ".", "button_text", "=", "button_text" ]
https://github.com/pyjs/pyjs/blob/6c4a3d3a67300cd5df7f95a67ca9dcdc06950523/pyjswaddons/SWFUpload.py#L918-L922
getavalon/core
31e8cb4760e00e3db64443f6f932b7fd8e96d41d
avalon/tools/models.py
python
AssetModel.refresh
(self)
Refresh the data for the model.
Refresh the data for the model.
[ "Refresh", "the", "data", "for", "the", "model", "." ]
def refresh(self): """Refresh the data for the model.""" self.clear() self.beginResetModel() # Get all assets sorted by name db_assets = io.find({"type": "asset"}).sort("name", 1) project_doc = io.find_one({"type": "project"}) silos = None if lib.project_use_silo(project_doc): silos = db_assets.distinct("silo") # Group the assets by their visual parent's id assets_by_parent = collections.defaultdict(list) for asset in db_assets: parent_id = asset.get("data", {}).get("visualParent") if parent_id is None and silos is not None: parent_id = asset.get("silo") assets_by_parent[parent_id].append(asset) # Build the hierarchical tree items recursively self._add_hierarchy( assets_by_parent, parent=None, silos=silos ) self.endResetModel()
[ "def", "refresh", "(", "self", ")", ":", "self", ".", "clear", "(", ")", "self", ".", "beginResetModel", "(", ")", "# Get all assets sorted by name", "db_assets", "=", "io", ".", "find", "(", "{", "\"type\"", ":", "\"asset\"", "}", ")", ".", "sort", "(",...
https://github.com/getavalon/core/blob/31e8cb4760e00e3db64443f6f932b7fd8e96d41d/avalon/tools/models.py#L389-L418
facebookarchive/doh-proxy
8622150e6a96e91bfb168c847d084bba67355c9b
dohproxy/proxy.py
python
H2Protocol.return_415
(self, stream_id: int)
We don't support the given media, so we want to return a 415 response.
We don't support the given media, so we want to return a 415 response.
[ "We", "don", "t", "support", "the", "given", "media", "so", "we", "want", "to", "return", "a", "415", "response", "." ]
def return_415(self, stream_id: int): """ We don't support the given media, so we want to return a 415 response. """ self.return_XXX(stream_id, 415, body=b"Unsupported content type")
[ "def", "return_415", "(", "self", ",", "stream_id", ":", "int", ")", ":", "self", ".", "return_XXX", "(", "stream_id", ",", "415", ",", "body", "=", "b\"Unsupported content type\"", ")" ]
https://github.com/facebookarchive/doh-proxy/blob/8622150e6a96e91bfb168c847d084bba67355c9b/dohproxy/proxy.py#L236-L240
clinton-hall/nzbToMedia
27669389216902d1085660167e7bda0bd8527ecf
libs/common/beets/autotag/match.py
python
_recommendation
(results)
return rec
Given a sorted list of AlbumMatch or TrackMatch objects, return a recommendation based on the results' distances. If the recommendation is higher than the configured maximum for an applied penalty, the recommendation will be downgraded to the configured maximum for that penalty.
Given a sorted list of AlbumMatch or TrackMatch objects, return a recommendation based on the results' distances.
[ "Given", "a", "sorted", "list", "of", "AlbumMatch", "or", "TrackMatch", "objects", "return", "a", "recommendation", "based", "on", "the", "results", "distances", "." ]
def _recommendation(results): """Given a sorted list of AlbumMatch or TrackMatch objects, return a recommendation based on the results' distances. If the recommendation is higher than the configured maximum for an applied penalty, the recommendation will be downgraded to the configured maximum for that penalty. """ if not results: # No candidates: no recommendation. return Recommendation.none # Basic distance thresholding. min_dist = results[0].distance if min_dist < config['match']['strong_rec_thresh'].as_number(): # Strong recommendation level. rec = Recommendation.strong elif min_dist <= config['match']['medium_rec_thresh'].as_number(): # Medium recommendation level. rec = Recommendation.medium elif len(results) == 1: # Only a single candidate. rec = Recommendation.low elif results[1].distance - min_dist >= \ config['match']['rec_gap_thresh'].as_number(): # Gap between first two candidates is large. rec = Recommendation.low else: # No conclusion. Return immediately. Can't be downgraded any further. return Recommendation.none # Downgrade to the max rec if it is lower than the current rec for an # applied penalty. keys = set(min_dist.keys()) if isinstance(results[0], hooks.AlbumMatch): for track_dist in min_dist.tracks.values(): keys.update(list(track_dist.keys())) max_rec_view = config['match']['max_rec'] for key in keys: if key in list(max_rec_view.keys()): max_rec = max_rec_view[key].as_choice({ 'strong': Recommendation.strong, 'medium': Recommendation.medium, 'low': Recommendation.low, 'none': Recommendation.none, }) rec = min(rec, max_rec) return rec
[ "def", "_recommendation", "(", "results", ")", ":", "if", "not", "results", ":", "# No candidates: no recommendation.", "return", "Recommendation", ".", "none", "# Basic distance thresholding.", "min_dist", "=", "results", "[", "0", "]", ".", "distance", "if", "min_...
https://github.com/clinton-hall/nzbToMedia/blob/27669389216902d1085660167e7bda0bd8527ecf/libs/common/beets/autotag/match.py#L292-L340
wxGlade/wxGlade
44ed0d1cba78f27c5c0a56918112a737653b7b27
config.py
python
get_hg_version
()
return ver
Query the local hg repository to get the current release or return None
Query the local hg repository to get the current release or return None
[ "Query", "the", "local", "hg", "repository", "to", "get", "the", "current", "release", "or", "return", "None" ]
def get_hg_version(): "Query the local hg repository to get the current release or return None" try: from mercurial.hg import repository from mercurial.ui import ui from mercurial.node import short from mercurial.error import RepoError except: return None # try to open local hg repository try: repo = repository(ui(), os.path.dirname(__file__)) except RepoError: # no mercurial repository found return None release = '' context = repo[None] parents = context.parents() repo_changed = context.files() + context.deleted() if len(parents) == 1 and not repo_changed: # release tag isn't at tip it's -2 (one below tip) parents = parents[0].parents() node = parents[0].node() tags = repo.nodetags(node) # look for the special 'rel_X_X_X' or 'rel_X_X' tag for tag in tags: if tag.startswith('rel_') and len(tag) > 4: release = tag[4:].replace('_', '.') break # handle untagged release e.g. tip if not release: release = short(node) else: release = '%s' % '+'.join([short(p.node()) for p in parents]) suffix_changed = repo_changed and '+' or '' ver = '%s%s' % (release, suffix_changed) return ver
[ "def", "get_hg_version", "(", ")", ":", "try", ":", "from", "mercurial", ".", "hg", "import", "repository", "from", "mercurial", ".", "ui", "import", "ui", "from", "mercurial", ".", "node", "import", "short", "from", "mercurial", ".", "error", "import", "R...
https://github.com/wxGlade/wxGlade/blob/44ed0d1cba78f27c5c0a56918112a737653b7b27/config.py#L295-L335
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/tiia/v20190529/tiia_client.py
python
TiiaClient.CreateGroup
(self, request)
用于创建一个空的图片库,如果图片库已存在则返回错误。 :param request: Request instance for CreateGroup. :type request: :class:`tencentcloud.tiia.v20190529.models.CreateGroupRequest` :rtype: :class:`tencentcloud.tiia.v20190529.models.CreateGroupResponse`
用于创建一个空的图片库,如果图片库已存在则返回错误。
[ "用于创建一个空的图片库,如果图片库已存在则返回错误。" ]
def CreateGroup(self, request): """用于创建一个空的图片库,如果图片库已存在则返回错误。 :param request: Request instance for CreateGroup. :type request: :class:`tencentcloud.tiia.v20190529.models.CreateGroupRequest` :rtype: :class:`tencentcloud.tiia.v20190529.models.CreateGroupResponse` """ try: params = request._serialize() body = self.call("CreateGroup", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.CreateGroupResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "CreateGroup", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"CreateGroup\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", ...
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/tiia/v20190529/tiia_client.py#L59-L84
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/qtconsole/console_widget.py
python
ConsoleWidget._control_key_down
(self, modifiers, include_command=False)
Given a KeyboardModifiers flags object, return whether the Control key is down. Parameters ---------- include_command : bool, optional (default True) Whether to treat the Command key as a (mutually exclusive) synonym for Control when in Mac OS.
Given a KeyboardModifiers flags object, return whether the Control key is down.
[ "Given", "a", "KeyboardModifiers", "flags", "object", "return", "whether", "the", "Control", "key", "is", "down", "." ]
def _control_key_down(self, modifiers, include_command=False): """ Given a KeyboardModifiers flags object, return whether the Control key is down. Parameters ---------- include_command : bool, optional (default True) Whether to treat the Command key as a (mutually exclusive) synonym for Control when in Mac OS. """ # Note that on Mac OS, ControlModifier corresponds to the Command key # while MetaModifier corresponds to the Control key. if sys.platform == 'darwin': down = include_command and (modifiers & QtCore.Qt.ControlModifier) return bool(down) ^ bool(modifiers & QtCore.Qt.MetaModifier) else: return bool(modifiers & QtCore.Qt.ControlModifier)
[ "def", "_control_key_down", "(", "self", ",", "modifiers", ",", "include_command", "=", "False", ")", ":", "# Note that on Mac OS, ControlModifier corresponds to the Command key", "# while MetaModifier corresponds to the Control key.", "if", "sys", ".", "platform", "==", "'darw...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/qtconsole/console_widget.py#L1080-L1096
AutodeskRoboticsLab/Mimic
85447f0d346be66988303a6a054473d92f1ed6f4
mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/graphicsItems/PlotCurveItem.py
python
PlotCurveItem.setClickable
(self, s, width=None)
Sets whether the item responds to mouse clicks. The *width* argument specifies the width in pixels orthogonal to the curve that will respond to a mouse click.
Sets whether the item responds to mouse clicks.
[ "Sets", "whether", "the", "item", "responds", "to", "mouse", "clicks", "." ]
def setClickable(self, s, width=None): """Sets whether the item responds to mouse clicks. The *width* argument specifies the width in pixels orthogonal to the curve that will respond to a mouse click. """ self.clickable = s if width is not None: self.opts['mouseWidth'] = width self._mouseShape = None self._boundingRect = None
[ "def", "setClickable", "(", "self", ",", "s", ",", "width", "=", "None", ")", ":", "self", ".", "clickable", "=", "s", "if", "width", "is", "not", "None", ":", "self", ".", "opts", "[", "'mouseWidth'", "]", "=", "width", "self", ".", "_mouseShape", ...
https://github.com/AutodeskRoboticsLab/Mimic/blob/85447f0d346be66988303a6a054473d92f1ed6f4/mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/graphicsItems/PlotCurveItem.py#L88-L98
nschloe/quadpy
c4c076d8ddfa968486a2443a95e2fb3780dcde0f
src/quadpy/c2/_sommariva/__init__.py
python
sommariva_41
()
return _read(this_dir / "sommariva_41.json", _source)
[]
def sommariva_41(): return _read(this_dir / "sommariva_41.json", _source)
[ "def", "sommariva_41", "(", ")", ":", "return", "_read", "(", "this_dir", "/", "\"sommariva_41.json\"", ",", "_source", ")" ]
https://github.com/nschloe/quadpy/blob/c4c076d8ddfa968486a2443a95e2fb3780dcde0f/src/quadpy/c2/_sommariva/__init__.py#L175-L176
kovidgoyal/calibre
2b41671370f2a9eb1109b9ae901ccf915f1bd0c8
src/calibre/ebooks/docx/block_styles.py
python
twips
(val, mult=0.05)
Parse val as either a pure number representing twentieths of a point or a number followed by the suffix pt, representing pts.
Parse val as either a pure number representing twentieths of a point or a number followed by the suffix pt, representing pts.
[ "Parse", "val", "as", "either", "a", "pure", "number", "representing", "twentieths", "of", "a", "point", "or", "a", "number", "followed", "by", "the", "suffix", "pt", "representing", "pts", "." ]
def twips(val, mult=0.05): ''' Parse val as either a pure number representing twentieths of a point or a number followed by the suffix pt, representing pts.''' try: return float(val) * mult except (ValueError, TypeError, AttributeError, KeyError): if val and val.endswith('pt') and mult == 0.05: return twips(val[:-2], mult=1.0)
[ "def", "twips", "(", "val", ",", "mult", "=", "0.05", ")", ":", "try", ":", "return", "float", "(", "val", ")", "*", "mult", "except", "(", "ValueError", ",", "TypeError", ",", "AttributeError", ",", "KeyError", ")", ":", "if", "val", "and", "val", ...
https://github.com/kovidgoyal/calibre/blob/2b41671370f2a9eb1109b9ae901ccf915f1bd0c8/src/calibre/ebooks/docx/block_styles.py#L61-L67
sailfish-team/sailfish
b7140c580e1f4cb406dde947c8d8d5a8a6b2d79e
sailfish/subdomain_runner.py
python
SubdomainRunner._get_global_idx
(self, location, dist_num=0)
Returns a global index (in the distributions array). :param location: position of the node in the natural order :param dist_num: distribution number
Returns a global index (in the distributions array).
[ "Returns", "a", "global", "index", "(", "in", "the", "distributions", "array", ")", "." ]
def _get_global_idx(self, location, dist_num=0): """Returns a global index (in the distributions array). :param location: position of the node in the natural order :param dist_num: distribution number """ def _indirect(idxs): if self.config.node_addressing == 'indirect': # Get actual node addresses. # TODO: Ideally, we would filter out unused nodes here. This # requires careful handling between what the source domain and the # destination domain see/expect. ret = self._host_indirect_address.flat[idxs] mask = ret != self.INVALID_NODE if ret.size == 1: if ret != self.INVALID_NODE: ret += self.num_active_nodes * dist_num else: if np.array(dist_num).size == 1: ret[mask] += self.num_active_nodes * dist_num else: ret[mask] += (self.num_active_nodes * dist_num)[mask] return ret else: return idxs + self.num_phys_nodes * dist_num # XXX: handle indirect here if self.dim == 2: gx, gy = location arr_nx = self._physical_size[1] return _indirect(gx + arr_nx * gy) else: gx, gy, gz = location arr_nx = self._physical_size[2] arr_ny = self._physical_size[1] return _indirect(gx + arr_nx * gy + arr_nx * arr_ny * gz)
[ "def", "_get_global_idx", "(", "self", ",", "location", ",", "dist_num", "=", "0", ")", ":", "def", "_indirect", "(", "idxs", ")", ":", "if", "self", ".", "config", ".", "node_addressing", "==", "'indirect'", ":", "# Get actual node addresses.", "# TODO: Ideal...
https://github.com/sailfish-team/sailfish/blob/b7140c580e1f4cb406dde947c8d8d5a8a6b2d79e/sailfish/subdomain_runner.py#L569-L604
GoogleCloudPlatform/PerfKitBenchmarker
6e3412d7d5e414b8ca30ed5eaf970cef1d919a67
perfkitbenchmarker/linux_packages/ycsb.py
python
_GetWorkloadFileList
()
return [data.ResourcePath(workload) for workload in FLAGS.ycsb_workload_files]
Returns the list of workload files to run. Returns: In order of preference: * The argument to --ycsb_workload_files. * Bundled YCSB workloads A and B.
Returns the list of workload files to run.
[ "Returns", "the", "list", "of", "workload", "files", "to", "run", "." ]
def _GetWorkloadFileList(): """Returns the list of workload files to run. Returns: In order of preference: * The argument to --ycsb_workload_files. * Bundled YCSB workloads A and B. """ return [data.ResourcePath(workload) for workload in FLAGS.ycsb_workload_files]
[ "def", "_GetWorkloadFileList", "(", ")", ":", "return", "[", "data", ".", "ResourcePath", "(", "workload", ")", "for", "workload", "in", "FLAGS", ".", "ycsb_workload_files", "]" ]
https://github.com/GoogleCloudPlatform/PerfKitBenchmarker/blob/6e3412d7d5e414b8ca30ed5eaf970cef1d919a67/perfkitbenchmarker/linux_packages/ycsb.py#L276-L285
timonwong/OmniMarkupPreviewer
21921ac7a99d2b5924a2219b33679a5b53621392
OmniMarkupLib/Renderers/libs/python3/docutils/utils/math/math2html.py
python
TextParser.parse
(self, reader)
return contents
Parse lines as long as they are text
Parse lines as long as they are text
[ "Parse", "lines", "as", "long", "as", "they", "are", "text" ]
def parse(self, reader): "Parse lines as long as they are text" TextParser.stack.append(self.ending) self.endings = TextParser.stack + [ContainerConfig.endings['Layout'], ContainerConfig.endings['Inset'], self.ending] contents = [] while not self.isending(reader): self.parsecontainer(reader, contents) return contents
[ "def", "parse", "(", "self", ",", "reader", ")", ":", "TextParser", ".", "stack", ".", "append", "(", "self", ".", "ending", ")", "self", ".", "endings", "=", "TextParser", ".", "stack", "+", "[", "ContainerConfig", ".", "endings", "[", "'Layout'", "]"...
https://github.com/timonwong/OmniMarkupPreviewer/blob/21921ac7a99d2b5924a2219b33679a5b53621392/OmniMarkupLib/Renderers/libs/python3/docutils/utils/math/math2html.py#L1374-L1382
PixarAnimationStudios/OpenTimelineIO
990a54ccbe6488180a93753370fc87902b982962
src/py-opentimelineio/opentimelineio/schema/timeline.py
python
each_child
(self, search_range=None, descended_from_type=_otio.Composable)
Generator that returns each child contained in the timeline in the order in which it is found. Note that this function is now deprecated, please consider using children_if() instead. Arguments: search_range: if specified, only children whose range overlaps with the search range will be yielded. descended_from_type: if specified, only children who are a descendent of the descended_from_type will be yielded.
Generator that returns each child contained in the timeline in the order in which it is found.
[ "Generator", "that", "returns", "each", "child", "contained", "in", "the", "timeline", "in", "the", "order", "in", "which", "it", "is", "found", "." ]
def each_child(self, search_range=None, descended_from_type=_otio.Composable): """ Generator that returns each child contained in the timeline in the order in which it is found. Note that this function is now deprecated, please consider using children_if() instead. Arguments: search_range: if specified, only children whose range overlaps with the search range will be yielded. descended_from_type: if specified, only children who are a descendent of the descended_from_type will be yielded. """ for child in self.children_if(descended_from_type, search_range): yield child
[ "def", "each_child", "(", "self", ",", "search_range", "=", "None", ",", "descended_from_type", "=", "_otio", ".", "Composable", ")", ":", "for", "child", "in", "self", ".", "children_if", "(", "descended_from_type", ",", "search_range", ")", ":", "yield", "...
https://github.com/PixarAnimationStudios/OpenTimelineIO/blob/990a54ccbe6488180a93753370fc87902b982962/src/py-opentimelineio/opentimelineio/schema/timeline.py#L21-L35
datacenter/ACI
9240622e6be03047f48628deacb5450212bd7ebe
configuration-python/generic_code/apicPython/createRoutedOutside.py
python
create_routed_outside
(fv_tenant, routed_outside_name, **args)
return l3ext_out
Create a Routed Outside policy
Create a Routed Outside policy
[ "Create", "a", "Routed", "Outside", "policy" ]
def create_routed_outside(fv_tenant, routed_outside_name, **args): """Create a Routed Outside policy""" args = args['optional_args'] if 'optional_args' in args.keys() else args l3ext_out = Out(fv_tenant, routed_outside_name) if 'private_network' in args.keys(): l3ext_rsectx = RsEctx(l3ext_out, tnFvCtxName=args['private_network']) if 'bgp' in args.keys() and args['bgp']: bgp_extp = bgpExtP(l3ext_out) if 'ospf' in args.keys() and args['ospf']: ospf_extp = ospfExtP(l3ext_out, areaId='0.0.0.' + str(get_value(args, 'ospf_area_id', '1'))) if 'tags' in args.keys() and is_valid(args['tags']): tag_inst = Inst(l3ext_out, args['tags']) return l3ext_out
[ "def", "create_routed_outside", "(", "fv_tenant", ",", "routed_outside_name", ",", "*", "*", "args", ")", ":", "args", "=", "args", "[", "'optional_args'", "]", "if", "'optional_args'", "in", "args", ".", "keys", "(", ")", "else", "args", "l3ext_out", "=", ...
https://github.com/datacenter/ACI/blob/9240622e6be03047f48628deacb5450212bd7ebe/configuration-python/generic_code/apicPython/createRoutedOutside.py#L32-L44
apple/coremltools
141a83af482fcbdd5179807c9eaff9a7999c2c49
deps/protobuf/objectivec/DevTools/pddm.py
python
SourceFile.__init__
(self, a_file, import_resolver=None)
Initializes the file reading in the file. Args: a_file: The file to read in. import_resolver: a function that given a path will return a stream for the contents. Raises: PDDMError if there are any issues.
Initializes the file reading in the file.
[ "Initializes", "the", "file", "reading", "in", "the", "file", "." ]
def __init__(self, a_file, import_resolver=None): """Initializes the file reading in the file. Args: a_file: The file to read in. import_resolver: a function that given a path will return a stream for the contents. Raises: PDDMError if there are any issues. """ self._sections = [] self._original_content = a_file.read() self._import_resolver = import_resolver self._processed_content = None
[ "def", "__init__", "(", "self", ",", "a_file", ",", "import_resolver", "=", "None", ")", ":", "self", ".", "_sections", "=", "[", "]", "self", ".", "_original_content", "=", "a_file", ".", "read", "(", ")", "self", ".", "_import_resolver", "=", "import_r...
https://github.com/apple/coremltools/blob/141a83af482fcbdd5179807c9eaff9a7999c2c49/deps/protobuf/objectivec/DevTools/pddm.py#L361-L375
srusskih/SublimeJEDI
8a5054f0a053c8a8170c06c56216245240551d54
dependencies/jedi/api/classes.py
python
ParamName.to_string
(self)
return self._name.to_string()
Returns a simple representation of a param, like ``f: Callable[..., Any]``. :rtype: str
Returns a simple representation of a param, like ``f: Callable[..., Any]``.
[ "Returns", "a", "simple", "representation", "of", "a", "param", "like", "f", ":", "Callable", "[", "...", "Any", "]", "." ]
def to_string(self): """ Returns a simple representation of a param, like ``f: Callable[..., Any]``. :rtype: str """ return self._name.to_string()
[ "def", "to_string", "(", "self", ")", ":", "return", "self", ".", "_name", ".", "to_string", "(", ")" ]
https://github.com/srusskih/SublimeJEDI/blob/8a5054f0a053c8a8170c06c56216245240551d54/dependencies/jedi/api/classes.py#L872-L879
santatic/web2attack
44b6e481a3d56cf0d98073ae0fb69833dda563d9
w2a/lib/smtpd.py
python
SMTPChannel.__line
(self, value)
[]
def __line(self, value): warn("Setting __line attribute on SMTPChannel is deprecated, " "set 'received_lines' instead", PendingDeprecationWarning, 2) self.received_lines = value
[ "def", "__line", "(", "self", ",", "value", ")", ":", "warn", "(", "\"Setting __line attribute on SMTPChannel is deprecated, \"", "\"set 'received_lines' instead\"", ",", "PendingDeprecationWarning", ",", "2", ")", "self", ".", "received_lines", "=", "value" ]
https://github.com/santatic/web2attack/blob/44b6e481a3d56cf0d98073ae0fb69833dda563d9/w2a/lib/smtpd.py#L159-L162
ytdl-org/youtube-dl
5014bd67c22b421207b2650d4dc874b95b36dda1
youtube_dl/utils.py
python
unified_strdate
(date_str, day_first=True)
Return a string with the date in the format YYYYMMDD
Return a string with the date in the format YYYYMMDD
[ "Return", "a", "string", "with", "the", "date", "in", "the", "format", "YYYYMMDD" ]
def unified_strdate(date_str, day_first=True): """Return a string with the date in the format YYYYMMDD""" if date_str is None: return None upload_date = None # Replace commas date_str = date_str.replace(',', ' ') # Remove AM/PM + timezone date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str) _, date_str = extract_timezone(date_str) for expression in date_formats(day_first): try: upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d') except ValueError: pass if upload_date is None: timetuple = email.utils.parsedate_tz(date_str) if timetuple: try: upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d') except ValueError: pass if upload_date is not None: return compat_str(upload_date)
[ "def", "unified_strdate", "(", "date_str", ",", "day_first", "=", "True", ")", ":", "if", "date_str", "is", "None", ":", "return", "None", "upload_date", "=", "None", "# Replace commas", "date_str", "=", "date_str", ".", "replace", "(", "','", ",", "' '", ...
https://github.com/ytdl-org/youtube-dl/blob/5014bd67c22b421207b2650d4dc874b95b36dda1/youtube_dl/utils.py#L2980-L3005
vijos/vj4
1c80506bafee31c028042ab3020a2273bf0930b9
vj4/service/bus.py
python
unsubscribe
(callback)
Unsubscribe buses for a callback. Args: callback: coroutine function for bus callback.
Unsubscribe buses for a callback.
[ "Unsubscribe", "buses", "for", "a", "callback", "." ]
def unsubscribe(callback): """Unsubscribe buses for a callback. Args: callback: coroutine function for bus callback. """ if callback in _subscribers: del _subscribers[callback]
[ "def", "unsubscribe", "(", "callback", ")", ":", "if", "callback", "in", "_subscribers", ":", "del", "_subscribers", "[", "callback", "]" ]
https://github.com/vijos/vj4/blob/1c80506bafee31c028042ab3020a2273bf0930b9/vj4/service/bus.py#L73-L80
mininet/mininet
8a50d3867c49781c60b6171acc6e4b46954b4281
examples/consoles.py
python
Graph.yview
( self, *args )
Scroll both scale and graph.
Scroll both scale and graph.
[ "Scroll", "both", "scale", "and", "graph", "." ]
def yview( self, *args ): "Scroll both scale and graph." self.graph.yview( *args ) self.scale.yview( *args )
[ "def", "yview", "(", "self", ",", "*", "args", ")", ":", "self", ".", "graph", ".", "yview", "(", "*", "args", ")", "self", ".", "scale", ".", "yview", "(", "*", "args", ")" ]
https://github.com/mininet/mininet/blob/8a50d3867c49781c60b6171acc6e4b46954b4281/examples/consoles.py#L218-L221
dbt-labs/dbt-core
e943b9fc842535e958ef4fd0b8703adc91556bc6
core/dbt/parser/manifest.py
python
_process_docs_for_source
( context: Dict[str, Any], source: ParsedSourceDefinition, )
[]
def _process_docs_for_source( context: Dict[str, Any], source: ParsedSourceDefinition, ): table_description = source.description source_description = source.source_description table_description = get_rendered(table_description, context) source_description = get_rendered(source_description, context) source.description = table_description source.source_description = source_description for column in source.columns.values(): column_desc = column.description column_desc = get_rendered(column_desc, context) column.description = column_desc
[ "def", "_process_docs_for_source", "(", "context", ":", "Dict", "[", "str", ",", "Any", "]", ",", "source", ":", "ParsedSourceDefinition", ",", ")", ":", "table_description", "=", "source", ".", "description", "source_description", "=", "source", ".", "source_de...
https://github.com/dbt-labs/dbt-core/blob/e943b9fc842535e958ef4fd0b8703adc91556bc6/core/dbt/parser/manifest.py#L1013-L1027
haoctopus/molohub
7699ef2b8b92bfddd2726b966b927648aa44395f
molohub/molo_client_config.py
python
MoloConfigs.get_config_object
(self)
return self.config_object
Get config_object, reload if not exist.
Get config_object, reload if not exist.
[ "Get", "config_object", "reload", "if", "not", "exist", "." ]
def get_config_object(self): """Get config_object, reload if not exist.""" if not self.config_object: self.load('release') return self.config_object
[ "def", "get_config_object", "(", "self", ")", ":", "if", "not", "self", ".", "config_object", ":", "self", ".", "load", "(", "'release'", ")", "return", "self", ".", "config_object" ]
https://github.com/haoctopus/molohub/blob/7699ef2b8b92bfddd2726b966b927648aa44395f/molohub/molo_client_config.py#L42-L46
facebookresearch/ParlAI
e4d59c30eef44f1f67105961b82a83fd28d7d78b
parlai/agents/rag/retrievers.py
python
DPRThenTorchReranker._retrieve_initial
( self, query: torch.LongTensor )
return DPRRetriever.retrieve_and_score(self, query)
Initial DPR retrieval. Just call superclass to retrieve first stage. :param query: encoding of query :param mask: optional query mask :return (docs, scores): docs: list of (text, title) tuples for each batch example scores: doc scores
Initial DPR retrieval.
[ "Initial", "DPR", "retrieval", "." ]
def _retrieve_initial( self, query: torch.LongTensor ) -> Tuple[List[List[Document]], torch.Tensor]: """ Initial DPR retrieval. Just call superclass to retrieve first stage. :param query: encoding of query :param mask: optional query mask :return (docs, scores): docs: list of (text, title) tuples for each batch example scores: doc scores """ return DPRRetriever.retrieve_and_score(self, query)
[ "def", "_retrieve_initial", "(", "self", ",", "query", ":", "torch", ".", "LongTensor", ")", "->", "Tuple", "[", "List", "[", "List", "[", "Document", "]", "]", ",", "torch", ".", "Tensor", "]", ":", "return", "DPRRetriever", ".", "retrieve_and_score", "...
https://github.com/facebookresearch/ParlAI/blob/e4d59c30eef44f1f67105961b82a83fd28d7d78b/parlai/agents/rag/retrievers.py#L859-L876
DamnWidget/anaconda
a9998fb362320f907d5ccbc6fcf5b62baca677c0
anaconda_lib/jedi/inference/docstrings.py
python
_execute_types_in_stmt
(module_context, stmt)
return ValueSet.from_sets( _execute_array_values(module_context.inference_state, d) for d in definitions )
Executing all types or general elements that we find in a statement. This doesn't include tuple, list and dict literals, because the stuff they contain is executed. (Used as type information).
Executing all types or general elements that we find in a statement. This doesn't include tuple, list and dict literals, because the stuff they contain is executed. (Used as type information).
[ "Executing", "all", "types", "or", "general", "elements", "that", "we", "find", "in", "a", "statement", ".", "This", "doesn", "t", "include", "tuple", "list", "and", "dict", "literals", "because", "the", "stuff", "they", "contain", "is", "executed", ".", "...
def _execute_types_in_stmt(module_context, stmt): """ Executing all types or general elements that we find in a statement. This doesn't include tuple, list and dict literals, because the stuff they contain is executed. (Used as type information). """ definitions = module_context.infer_node(stmt) return ValueSet.from_sets( _execute_array_values(module_context.inference_state, d) for d in definitions )
[ "def", "_execute_types_in_stmt", "(", "module_context", ",", "stmt", ")", ":", "definitions", "=", "module_context", ".", "infer_node", "(", "stmt", ")", "return", "ValueSet", ".", "from_sets", "(", "_execute_array_values", "(", "module_context", ".", "inference_sta...
https://github.com/DamnWidget/anaconda/blob/a9998fb362320f907d5ccbc6fcf5b62baca677c0/anaconda_lib/jedi/inference/docstrings.py#L238-L248
mfessenden/SceneGraph
0fa3429059c77c881d1b58b28e89dcb44c609909
core/plugins.py
python
parse_module_variable
(module, key)
return None
Parse a named variable from a given module. params: module (module) - module object. key (str) - string variable to search for. returns: (str) - parsed variable value.
Parse a named variable from a given module.
[ "Parse", "a", "named", "variable", "from", "a", "given", "module", "." ]
def parse_module_variable(module, key): """ Parse a named variable from a given module. params: module (module) - module object. key (str) - string variable to search for. returns: (str) - parsed variable value. """ for cname, obj in inspect.getmembers(module): if cname==key: return obj return None
[ "def", "parse_module_variable", "(", "module", ",", "key", ")", ":", "for", "cname", ",", "obj", "in", "inspect", ".", "getmembers", "(", "module", ")", ":", "if", "cname", "==", "key", ":", "return", "obj", "return", "None" ]
https://github.com/mfessenden/SceneGraph/blob/0fa3429059c77c881d1b58b28e89dcb44c609909/core/plugins.py#L652-L666
mozman/ezdxf
59d0fc2ea63f5cf82293428f5931da7e9f9718e9
src/ezdxf/addons/r12writer.py
python
R12FastStreamWriter.add_text
( self, text: str, insert: Vertex = (0, 0), height: float = 1.0, width: float = 1.0, align: str = "LEFT", rotation: float = 0.0, oblique: float = 0.0, style: str = "STANDARD", layer: str = "0", color: int = None, )
Add a one line TEXT entity. Args: text: the text as string insert: insert location as ``(x, y)`` tuple height: text height in drawing units width: text width as factor align: text alignment, see table below rotation: text rotation in degrees as float oblique: oblique in degrees as float, vertical = ``0`` (default) style: text style name as string, if FIXED-TABLES are written some predefined text styles are available, else text style is always ``'STANDARD'``. layer: layer name as string see :meth:`add_line` color: color as :ref:`ACI` see :meth:`add_line` ============ =============== ================= ===== Vert/Horiz Left Center Right ============ =============== ================= ===== Top ``TOP_LEFT`` ``TOP_CENTER`` ``TOP_RIGHT`` Middle ``MIDDLE_LEFT`` ``MIDDLE_CENTER`` ``MIDDLE_RIGHT`` Bottom ``BOTTOM_LEFT`` ``BOTTOM_CENTER`` ``BOTTOM_RIGHT`` Baseline ``LEFT`` ``CENTER`` ``RIGHT`` ============ =============== ================= ===== The special alignments ``ALIGNED`` and ``FIT`` are not available.
Add a one line TEXT entity.
[ "Add", "a", "one", "line", "TEXT", "entity", "." ]
def add_text( self, text: str, insert: Vertex = (0, 0), height: float = 1.0, width: float = 1.0, align: str = "LEFT", rotation: float = 0.0, oblique: float = 0.0, style: str = "STANDARD", layer: str = "0", color: int = None, ) -> None: """Add a one line TEXT entity. Args: text: the text as string insert: insert location as ``(x, y)`` tuple height: text height in drawing units width: text width as factor align: text alignment, see table below rotation: text rotation in degrees as float oblique: oblique in degrees as float, vertical = ``0`` (default) style: text style name as string, if FIXED-TABLES are written some predefined text styles are available, else text style is always ``'STANDARD'``. layer: layer name as string see :meth:`add_line` color: color as :ref:`ACI` see :meth:`add_line` ============ =============== ================= ===== Vert/Horiz Left Center Right ============ =============== ================= ===== Top ``TOP_LEFT`` ``TOP_CENTER`` ``TOP_RIGHT`` Middle ``MIDDLE_LEFT`` ``MIDDLE_CENTER`` ``MIDDLE_RIGHT`` Bottom ``BOTTOM_LEFT`` ``BOTTOM_CENTER`` ``BOTTOM_RIGHT`` Baseline ``LEFT`` ``CENTER`` ``RIGHT`` ============ =============== ================= ===== The special alignments ``ALIGNED`` and ``FIT`` are not available. """ # text style is always STANDARD without a TABLES section dxf = ["0\nTEXT\n"] dxf.append(dxf_attribs(layer, color)) dxf.append(dxf_vertex(insert, code=10)) dxf.append(dxf_tag(1, str(text))) dxf.append(dxf_tag(40, str(rnd(height)))) if width != 1.0: dxf.append(dxf_tag(41, str(rnd(width)))) if rotation != 0.0: dxf.append(dxf_tag(50, str(rnd(rotation)))) if oblique != 0.0: dxf.append(dxf_tag(51, str(rnd(oblique)))) if style != "STANDARD": dxf.append(dxf_tag(7, str(style))) halign, valign = TEXT_ALIGN_FLAGS[align.upper()] dxf.append(dxf_tag(72, str(halign))) dxf.append(dxf_tag(73, str(valign))) dxf.append(dxf_vertex(insert, code=11)) # align point self.stream.write("".join(dxf))
[ "def", "add_text", "(", "self", ",", "text", ":", "str", ",", "insert", ":", "Vertex", "=", "(", "0", ",", "0", ")", ",", "height", ":", "float", "=", "1.0", ",", "width", ":", "float", "=", "1.0", ",", "align", ":", "str", "=", "\"LEFT\"", ","...
https://github.com/mozman/ezdxf/blob/59d0fc2ea63f5cf82293428f5931da7e9f9718e9/src/ezdxf/addons/r12writer.py#L560-L619
LumaPictures/pymel
fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72
pymel/core/language.py
python
Env.setUpAxis
(self, axis, rotateView=False)
This flag specifies the axis as the world up direction. The valid axis are either 'y' or 'z'.
This flag specifies the axis as the world up direction. The valid axis are either 'y' or 'z'.
[ "This", "flag", "specifies", "the", "axis", "as", "the", "world", "up", "direction", ".", "The", "valid", "axis", "are", "either", "y", "or", "z", "." ]
def setUpAxis(self, axis, rotateView=False): """This flag specifies the axis as the world up direction. The valid axis are either 'y' or 'z'.""" cmds.upAxis(axis=axis, rotateView=rotateView)
[ "def", "setUpAxis", "(", "self", ",", "axis", ",", "rotateView", "=", "False", ")", ":", "cmds", ".", "upAxis", "(", "axis", "=", "axis", ",", "rotateView", "=", "rotateView", ")" ]
https://github.com/LumaPictures/pymel/blob/fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72/pymel/core/language.py#L760-L763
LiDan456/MAD-GANs
3139a73a4112d3f3f18182c9a6cdc2c671e7cfe8
differential_privacy/privacy_accountant/tf/accountant.py
python
GaussianMomentsAccountant.__init__
(self, total_examples, moment_orders=32)
Initialization. Args: total_examples: total number of examples. moment_orders: the order of moments to keep.
Initialization.
[ "Initialization", "." ]
def __init__(self, total_examples, moment_orders=32): """Initialization. Args: total_examples: total number of examples. moment_orders: the order of moments to keep. """ super(self.__class__, self).__init__(total_examples, moment_orders) self._binomial_table = utils.GenerateBinomialTable(self._max_moment_order)
[ "def", "__init__", "(", "self", ",", "total_examples", ",", "moment_orders", "=", "32", ")", ":", "super", "(", "self", ".", "__class__", ",", "self", ")", ".", "__init__", "(", "total_examples", ",", "moment_orders", ")", "self", ".", "_binomial_table", "...
https://github.com/LiDan456/MAD-GANs/blob/3139a73a4112d3f3f18182c9a6cdc2c671e7cfe8/differential_privacy/privacy_accountant/tf/accountant.py#L332-L340
hatRiot/zarp
2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad
src/modules/sniffer/parser_mysql.py
python
num_fields
(pkt)
return int(pkt[4], 16)
Return the number of fields in a query response
Return the number of fields in a query response
[ "Return", "the", "number", "of", "fields", "in", "a", "query", "response" ]
def num_fields(pkt): """Return the number of fields in a query response""" return int(pkt[4], 16)
[ "def", "num_fields", "(", "pkt", ")", ":", "return", "int", "(", "pkt", "[", "4", "]", ",", "16", ")" ]
https://github.com/hatRiot/zarp/blob/2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad/src/modules/sniffer/parser_mysql.py#L27-L29
guildai/guildai
1665985a3d4d788efc1a3180ca51cc417f71ca78
guild/external/pip/_vendor/lockfile/pidlockfile.py
python
PIDLockFile.acquire
(self, timeout=None)
Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired.
Acquire the lock.
[ "Acquire", "the", "lock", "." ]
def acquire(self, timeout=None): """ Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired. """ timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout while True: try: write_pid_to_pidfile(self.path) except OSError as exc: if exc.errno == errno.EEXIST: # The lock creation failed. Maybe sleep a bit. if time.time() > end_time: if timeout is not None and timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: raise AlreadyLocked("%s is already locked" % self.path) time.sleep(timeout is not None and timeout / 10 or 0.1) else: raise LockFailed("failed to create %s" % self.path) else: return
[ "def", "acquire", "(", "self", ",", "timeout", "=", "None", ")", ":", "timeout", "=", "timeout", "if", "timeout", "is", "not", "None", "else", "self", ".", "timeout", "end_time", "=", "time", ".", "time", "(", ")", "if", "timeout", "is", "not", "None...
https://github.com/guildai/guildai/blob/1665985a3d4d788efc1a3180ca51cc417f71ca78/guild/external/pip/_vendor/lockfile/pidlockfile.py#L63-L93
Fizzadar/pyinfra
ff0913d6a172966760b63fe59e55dff9ea852e0d
pyinfra/facts/pacman.py
python
PacmanUnpackGroup.process
(self, output)
return output
[]
def process(self, output): return output
[ "def", "process", "(", "self", ",", "output", ")", ":", "return", "output" ]
https://github.com/Fizzadar/pyinfra/blob/ff0913d6a172966760b63fe59e55dff9ea852e0d/pyinfra/facts/pacman.py#L28-L29
malwaredllc/byob
3924dd6aea6d0421397cdf35f692933b340bfccf
web-gui/buildyourownbotnet/core/payloads.py
python
Payload.escalate
(self)
Attempt UAC bypass to escalate privileges
Attempt UAC bypass to escalate privileges
[ "Attempt", "UAC", "bypass", "to", "escalate", "privileges" ]
def escalate(self): """ Attempt UAC bypass to escalate privileges """ try: if 'escalate' not in globals(): self.load('escalate') return globals()['escalate'].run(sys.argv[0]) except Exception as e: log("{} error: {}".format(self.escalate.__name__, str(e)))
[ "def", "escalate", "(", "self", ")", ":", "try", ":", "if", "'escalate'", "not", "in", "globals", "(", ")", ":", "self", ".", "load", "(", "'escalate'", ")", "return", "globals", "(", ")", "[", "'escalate'", "]", ".", "run", "(", "sys", ".", "argv"...
https://github.com/malwaredllc/byob/blob/3924dd6aea6d0421397cdf35f692933b340bfccf/web-gui/buildyourownbotnet/core/payloads.py#L799-L809
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/controller_status_dto.py
python
ControllerStatusDTO.invalid_count
(self)
return self._invalid_count
Gets the invalid_count of this ControllerStatusDTO. The number of invalid components in the NiFi. :return: The invalid_count of this ControllerStatusDTO. :rtype: int
Gets the invalid_count of this ControllerStatusDTO. The number of invalid components in the NiFi.
[ "Gets", "the", "invalid_count", "of", "this", "ControllerStatusDTO", ".", "The", "number", "of", "invalid", "components", "in", "the", "NiFi", "." ]
def invalid_count(self): """ Gets the invalid_count of this ControllerStatusDTO. The number of invalid components in the NiFi. :return: The invalid_count of this ControllerStatusDTO. :rtype: int """ return self._invalid_count
[ "def", "invalid_count", "(", "self", ")", ":", "return", "self", ".", "_invalid_count" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/controller_status_dto.py#L288-L296
evhub/coconut
27a4af9dc06667870f736f20c862930001b8cbb2
coconut/compiler/util.py
python
ComputationNode.name
(self)
return name if name is not None else repr(self.action)
Get the name of the action.
Get the name of the action.
[ "Get", "the", "name", "of", "the", "action", "." ]
def name(self): """Get the name of the action.""" name = getattr(self.action, "__name__", None) # repr(action) not defined for all actions, so must only be evaluated if getattr fails return name if name is not None else repr(self.action)
[ "def", "name", "(", "self", ")", ":", "name", "=", "getattr", "(", "self", ".", "action", ",", "\"__name__\"", ",", "None", ")", "# repr(action) not defined for all actions, so must only be evaluated if getattr fails", "return", "name", "if", "name", "is", "not", "N...
https://github.com/evhub/coconut/blob/27a4af9dc06667870f736f20c862930001b8cbb2/coconut/compiler/util.py#L187-L191
mapproxy/mapproxy
45ae81b3dd6c8a1a0b473ba8c669afd0ec7ecd10
mapproxy/cache/riak.py
python
RiakCache._key_iterator
(self, level)
Generator for all tile keys in `level`.
Generator for all tile keys in `level`.
[ "Generator", "for", "all", "tile", "keys", "in", "level", "." ]
def _key_iterator(self, level): """ Generator for all tile keys in `level`. """ # index() returns a list of all keys so we check for tiles in # batches of `chunk_size`*`chunk_size`. grid_size = self.tile_grid.grid_sizes[level] chunk_size = 256 for x in range(grid_size[0]/chunk_size): start_x = x * chunk_size end_x = start_x + chunk_size - 1 for y in range(grid_size[1]/chunk_size): start_y = y * chunk_size end_y = start_y + chunk_size - 1 query = self.bucket.get_index('tile_coord_bin', '%02d-%07d-%07d' % (level, start_x, start_y), '%02d-%07d-%07d' % (level, end_x, end_y)) for link in query.run(): yield link.get_key()
[ "def", "_key_iterator", "(", "self", ",", "level", ")", ":", "# index() returns a list of all keys so we check for tiles in", "# batches of `chunk_size`*`chunk_size`.", "grid_size", "=", "self", ".", "tile_grid", ".", "grid_sizes", "[", "level", "]", "chunk_size", "=", "2...
https://github.com/mapproxy/mapproxy/blob/45ae81b3dd6c8a1a0b473ba8c669afd0ec7ecd10/mapproxy/cache/riak.py#L171-L189
Yelp/mrjob
091572e87bc24cc64be40278dd0f5c3617c98d4b
mrjob/cmd.py
python
_command
(name, description=None)
return decorator
Decorate a function used to call a command. If you don't set *description*, it won't be included in help (useful for deprecated commands).
Decorate a function used to call a command.
[ "Decorate", "a", "function", "used", "to", "call", "a", "command", "." ]
def _command(name, description=None): """Decorate a function used to call a command. If you don't set *description*, it won't be included in help (useful for deprecated commands).""" def decorator(f): commands[name] = f if description: descriptions[name] = description return f return decorator
[ "def", "_command", "(", "name", ",", "description", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "commands", "[", "name", "]", "=", "f", "if", "description", ":", "descriptions", "[", "name", "]", "=", "description", "return", "f", ...
https://github.com/Yelp/mrjob/blob/091572e87bc24cc64be40278dd0f5c3617c98d4b/mrjob/cmd.py#L48-L58
GoogleCloudPlatform/PerfKitBenchmarker
6e3412d7d5e414b8ca30ed5eaf970cef1d919a67
perfkitbenchmarker/linux_benchmarks/openssl_speed_benchmark.py
python
ParseOpenSSLOutput
(raw_result: str, version: str, parallelism: int)
return results
Parse output from openssl speed and return as samples.
Parse output from openssl speed and return as samples.
[ "Parse", "output", "from", "openssl", "speed", "and", "return", "as", "samples", "." ]
def ParseOpenSSLOutput(raw_result: str, version: str, parallelism: int): """Parse output from openssl speed and return as samples.""" matches = regex_util.ExtractExactlyOneMatch(r'evp\s+(.*)', raw_result).split() results = [] for idx, blocksize in enumerate(BLOCKSIZES_IN_BYTES): value_unit_tuple = regex_util.ExtractExactlyOneMatch( r'([\d\.]+)(\w+)', matches[idx]) metadata = { 'duration': _OPENSSL_SPEED_DURATION.value, 'algorithm': _OPENSSL_SPEED_ALGORITHM.value, 'parallelism': parallelism, 'version': version, 'blocksize': blocksize } results.append( sample.Sample('Throughput', float(value_unit_tuple[0]), value_unit_tuple[1], metadata)) return results
[ "def", "ParseOpenSSLOutput", "(", "raw_result", ":", "str", ",", "version", ":", "str", ",", "parallelism", ":", "int", ")", ":", "matches", "=", "regex_util", ".", "ExtractExactlyOneMatch", "(", "r'evp\\s+(.*)'", ",", "raw_result", ")", ".", "split", "(", "...
https://github.com/GoogleCloudPlatform/PerfKitBenchmarker/blob/6e3412d7d5e414b8ca30ed5eaf970cef1d919a67/perfkitbenchmarker/linux_benchmarks/openssl_speed_benchmark.py#L59-L76
numba/numba
bf480b9e0da858a65508c2b17759a72ee6a44c51
numba/core/base.py
python
BaseContext.unpack_value
(self, builder, ty, ptr, align=None)
return dm.load_from_data_pointer(builder, ptr, align)
Unpack value from the array storage at *ptr*. If *align* is given, it is the guaranteed alignment for *ptr* (by default, the standard ABI alignment).
Unpack value from the array storage at *ptr*. If *align* is given, it is the guaranteed alignment for *ptr* (by default, the standard ABI alignment).
[ "Unpack", "value", "from", "the", "array", "storage", "at", "*", "ptr", "*", ".", "If", "*", "align", "*", "is", "given", "it", "is", "the", "guaranteed", "alignment", "for", "*", "ptr", "*", "(", "by", "default", "the", "standard", "ABI", "alignment",...
def unpack_value(self, builder, ty, ptr, align=None): """ Unpack value from the array storage at *ptr*. If *align* is given, it is the guaranteed alignment for *ptr* (by default, the standard ABI alignment). """ dm = self.data_model_manager[ty] return dm.load_from_data_pointer(builder, ptr, align)
[ "def", "unpack_value", "(", "self", ",", "builder", ",", "ty", ",", "ptr", ",", "align", "=", "None", ")", ":", "dm", "=", "self", ".", "data_model_manager", "[", "ty", "]", "return", "dm", ".", "load_from_data_pointer", "(", "builder", ",", "ptr", ","...
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/core/base.py#L505-L512
osmr/imgclsmob
f2993d3ce73a2f7ddba05da3891defb08547d504
gluon/gluoncv2/models/preresnet.py
python
preresnetbc14b
(**kwargs)
return get_preresnet(blocks=14, bottleneck=True, conv1_stride=False, model_name="preresnetbc14b", **kwargs)
PreResNet-BC-14b model from 'Identity Mappings in Deep Residual Networks,' https://arxiv.org/abs/1603.05027. It's an experimental model (bottleneck compressed). Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters.
PreResNet-BC-14b model from 'Identity Mappings in Deep Residual Networks,' https://arxiv.org/abs/1603.05027. It's an experimental model (bottleneck compressed).
[ "PreResNet", "-", "BC", "-", "14b", "model", "from", "Identity", "Mappings", "in", "Deep", "Residual", "Networks", "https", ":", "//", "arxiv", ".", "org", "/", "abs", "/", "1603", ".", "05027", ".", "It", "s", "an", "experimental", "model", "(", "bott...
def preresnetbc14b(**kwargs): """ PreResNet-BC-14b model from 'Identity Mappings in Deep Residual Networks,' https://arxiv.org/abs/1603.05027. It's an experimental model (bottleneck compressed). Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters. """ return get_preresnet(blocks=14, bottleneck=True, conv1_stride=False, model_name="preresnetbc14b", **kwargs)
[ "def", "preresnetbc14b", "(", "*", "*", "kwargs", ")", ":", "return", "get_preresnet", "(", "blocks", "=", "14", ",", "bottleneck", "=", "True", ",", "conv1_stride", "=", "False", ",", "model_name", "=", "\"preresnetbc14b\"", ",", "*", "*", "kwargs", ")" ]
https://github.com/osmr/imgclsmob/blob/f2993d3ce73a2f7ddba05da3891defb08547d504/gluon/gluoncv2/models/preresnet.py#L494-L508
microsoft/MPNet
081523a788c1556f28dd90cbc629810f48b083fb
pretraining/fairseq/data/legacy/masked_lm_dictionary.py
python
BertDictionary.cls
(self)
return self.cls_index
Helper to get index of cls symbol
Helper to get index of cls symbol
[ "Helper", "to", "get", "index", "of", "cls", "symbol" ]
def cls(self): """Helper to get index of cls symbol""" return self.cls_index
[ "def", "cls", "(", "self", ")", ":", "return", "self", ".", "cls_index" ]
https://github.com/microsoft/MPNet/blob/081523a788c1556f28dd90cbc629810f48b083fb/pretraining/fairseq/data/legacy/masked_lm_dictionary.py#L52-L54
googleapis/python-dialogflow
e48ea001b7c8a4a5c1fe4b162bad49ea397458e9
google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py
python
KnowledgeBasesAsyncClient.list_knowledge_bases
( self, request: Union[knowledge_base.ListKnowledgeBasesRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), )
return response
r"""Returns the list of all knowledge bases of the specified agent. Args: request (Union[google.cloud.dialogflow_v2.types.ListKnowledgeBasesRequest, dict]): The request object. Request message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases]. parent (:class:`str`): Required. The project to list of knowledge bases for. Format: ``projects/<Project ID>/locations/<Location ID>``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2.services.knowledge_bases.pagers.ListKnowledgeBasesAsyncPager: Response message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases]. Iterating over this object will yield results and resolve additional pages automatically.
r"""Returns the list of all knowledge bases of the specified agent.
[ "r", "Returns", "the", "list", "of", "all", "knowledge", "bases", "of", "the", "specified", "agent", "." ]
async def list_knowledge_bases( self, request: Union[knowledge_base.ListKnowledgeBasesRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListKnowledgeBasesAsyncPager: r"""Returns the list of all knowledge bases of the specified agent. Args: request (Union[google.cloud.dialogflow_v2.types.ListKnowledgeBasesRequest, dict]): The request object. Request message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases]. parent (:class:`str`): Required. The project to list of knowledge bases for. Format: ``projects/<Project ID>/locations/<Location ID>``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2.services.knowledge_bases.pagers.ListKnowledgeBasesAsyncPager: Response message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases]. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = knowledge_base.ListKnowledgeBasesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_knowledge_bases, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListKnowledgeBasesAsyncPager( method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response
[ "async", "def", "list_knowledge_bases", "(", "self", ",", "request", ":", "Union", "[", "knowledge_base", ".", "ListKnowledgeBasesRequest", ",", "dict", "]", "=", "None", ",", "*", ",", "parent", ":", "str", "=", "None", ",", "retry", ":", "OptionalRetry", ...
https://github.com/googleapis/python-dialogflow/blob/e48ea001b7c8a4a5c1fe4b162bad49ea397458e9/google/cloud/dialogflow_v2/services/knowledge_bases/async_client.py#L175-L255
uqfoundation/multiprocess
028cc73f02655e6451d92e5147d19d8c10aebe50
py3.6/multiprocess/managers.py
python
Server.number_of_objects
(self, c)
return len(self.id_to_refcount)
Number of shared objects
Number of shared objects
[ "Number", "of", "shared", "objects" ]
def number_of_objects(self, c): ''' Number of shared objects ''' # Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0' return len(self.id_to_refcount)
[ "def", "number_of_objects", "(", "self", ",", "c", ")", ":", "# Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'", "return", "len", "(", "self", ".", "id_to_refcount", ")" ]
https://github.com/uqfoundation/multiprocess/blob/028cc73f02655e6451d92e5147d19d8c10aebe50/py3.6/multiprocess/managers.py#L327-L332
tryolabs/luminoth
9109d8b98bcbb0ad4e8e0c928da0b20627fa774f
luminoth/models/base/truncated_base_network.py
python
TruncatedBaseNetwork.get_trainable_vars
(self)
return trainable_vars
Returns a list of the variables that are trainable. Returns: trainable_variables: a tuple of `tf.Variable`.
Returns a list of the variables that are trainable.
[ "Returns", "a", "list", "of", "the", "variables", "that", "are", "trainable", "." ]
def get_trainable_vars(self): """ Returns a list of the variables that are trainable. Returns: trainable_variables: a tuple of `tf.Variable`. """ all_trainable = super(TruncatedBaseNetwork, self).get_trainable_vars() # Get the index of the last endpoint scope variable. # For example, if the endpoint for ResNet-50 is set as # "block4/unit_3/bottleneck_v1/conv2", then it will get 155, # because the variables (with their indexes) are: # 153 block4/unit_3/bottleneck_v1/conv2/weights:0 # 154 block4/unit_3/bottleneck_v1/conv2/BatchNorm/beta:0 # 155 block4/unit_3/bottleneck_v1/conv2/BatchNorm/gamma:0 var_iter = enumerate(v.name for v in all_trainable) scope_var_index_iter = ( i for i, name in var_iter if self._endpoint in name ) index = None for index in scope_var_index_iter: pass if index is None: # Resulting `trainable_vars` is empty, possibly due to the # `fine_tune_from` starting after the endpoint. trainable_vars = tuple() else: trainable_vars = all_trainable[:index + 1] if self._use_tail and not self._freeze_tail: if self._architecture == 'resnet_v1_101': # Retrieve the trainable vars out of the tail. # TODO: Tail should be configurable too, to avoid hard-coding # the trainable portion to `block4` and allow using something # in block4 as endpoint. var_iter = enumerate(v.name for v in all_trainable) try: index = next(i for i, name in var_iter if 'block4' in name) except StopIteration: raise ValueError( '"block4" not present in the trainable vars retrieved ' 'from base network.' ) trainable_vars += all_trainable[index:] return trainable_vars
[ "def", "get_trainable_vars", "(", "self", ")", ":", "all_trainable", "=", "super", "(", "TruncatedBaseNetwork", ",", "self", ")", ".", "get_trainable_vars", "(", ")", "# Get the index of the last endpoint scope variable.", "# For example, if the endpoint for ResNet-50 is set as...
https://github.com/tryolabs/luminoth/blob/9109d8b98bcbb0ad4e8e0c928da0b20627fa774f/luminoth/models/base/truncated_base_network.py#L97-L144
romanvm/python-web-pdb
243f69b91907af97f0c14e91c30217f8850eea6c
web_pdb/__init__.py
python
set_trace
(host='', port=5555, patch_stdstreams=False)
Start the debugger This method suspends execution of the current script and starts a PDB debugging session. The web-interface is opened on the specified port (default: ``5555``). Example:: import web_pdb;web_pdb.set_trace() Subsequent :func:`set_trace` calls can be used as hardcoded breakpoints. :param host: web-UI hostname or IP-address :type host: str :param port: web-UI port. If ``port=-1``, choose a random port value between 32768 and 65536. :type port: int :param patch_stdstreams: redirect all standard input and output streams to the web-UI. :type patch_stdstreams: bool
Start the debugger
[ "Start", "the", "debugger" ]
def set_trace(host='', port=5555, patch_stdstreams=False): """ Start the debugger This method suspends execution of the current script and starts a PDB debugging session. The web-interface is opened on the specified port (default: ``5555``). Example:: import web_pdb;web_pdb.set_trace() Subsequent :func:`set_trace` calls can be used as hardcoded breakpoints. :param host: web-UI hostname or IP-address :type host: str :param port: web-UI port. If ``port=-1``, choose a random port value between 32768 and 65536. :type port: int :param patch_stdstreams: redirect all standard input and output streams to the web-UI. :type patch_stdstreams: bool """ pdb = WebPdb.active_instance if pdb is None: pdb = WebPdb(host, port, patch_stdstreams) else: # If the debugger is still attached reset trace to a new location pdb.remove_trace() pdb.set_trace(sys._getframe().f_back)
[ "def", "set_trace", "(", "host", "=", "''", ",", "port", "=", "5555", ",", "patch_stdstreams", "=", "False", ")", ":", "pdb", "=", "WebPdb", ".", "active_instance", "if", "pdb", "is", "None", ":", "pdb", "=", "WebPdb", "(", "host", ",", "port", ",", ...
https://github.com/romanvm/python-web-pdb/blob/243f69b91907af97f0c14e91c30217f8850eea6c/web_pdb/__init__.py#L250-L279
yuanxiaosc/Schema-based-Knowledge-Extraction
ac0f07cd1088f24cb8f76c271fd2b49ea6100ca8
evaluation.py
python
del_bookname
(entity_name)
return entity_name
delete the book name
delete the book name
[ "delete", "the", "book", "name" ]
def del_bookname(entity_name): """delete the book name""" if entity_name.startswith(u'《') and entity_name.endswith(u'》'): entity_name = entity_name[1:-1] return entity_name
[ "def", "del_bookname", "(", "entity_name", ")", ":", "if", "entity_name", ".", "startswith", "(", "u'《') ", "a", "d e", "tity_name.e", "n", "dswith(u", "'", "》'):", "", "", "entity_name", "=", "entity_name", "[", "1", ":", "-", "1", "]", "return", "entit...
https://github.com/yuanxiaosc/Schema-based-Knowledge-Extraction/blob/ac0f07cd1088f24cb8f76c271fd2b49ea6100ca8/evaluation.py#L16-L20
kanzure/nanoengineer
874e4c9f8a9190f093625b267f9767e19f82e6c4
cad/src/graphics/widgets/ThumbView.py
python
ElementView.resetView
(self, scale = 2.0)
Reset current view.
Reset current view.
[ "Reset", "current", "view", "." ]
def resetView(self, scale = 2.0): """ Reset current view. """ ThumbView.resetView(self) self.scale = scale
[ "def", "resetView", "(", "self", ",", "scale", "=", "2.0", ")", ":", "ThumbView", ".", "resetView", "(", "self", ")", "self", ".", "scale", "=", "scale" ]
https://github.com/kanzure/nanoengineer/blob/874e4c9f8a9190f093625b267f9767e19f82e6c4/cad/src/graphics/widgets/ThumbView.py#L717-L722
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/core/leoGlobals.py
python
KeyStroke.finalize_char
(self, s)
return self.strip_shift(s)
Perform very-last-minute translations on bindings.
Perform very-last-minute translations on bindings.
[ "Perform", "very", "-", "last", "-", "minute", "translations", "on", "bindings", "." ]
def finalize_char(self, s): """Perform very-last-minute translations on bindings.""" # # Retain "bigger" spelling for gang-of-four bindings with modifiers. shift_d = { 'bksp': 'BackSpace', 'backspace': 'BackSpace', 'backtab': 'Tab', # The shift mod will convert to 'Shift+Tab', 'linefeed': 'Return', '\r': 'Return', 'return': 'Return', 'tab': 'Tab', } if self.mods and s.lower() in shift_d: return shift_d.get(s.lower()) # Returning '' breaks existing code. # # Make all other translations... # # This dict ensures proper capitalization. # It also translates legacy Tk binding names to ascii chars. translate_d = { # # The gang of four... 'bksp': 'BackSpace', 'backspace': 'BackSpace', 'backtab': 'Tab', # The shift mod will convert to 'Shift+Tab', 'linefeed': '\n', '\r': '\n', 'return': '\n', 'tab': 'Tab', # # Special chars... 'delete': 'Delete', 'down': 'Down', 'end': 'End', 'enter': 'Enter', 'escape': 'Escape', 'home': 'Home', 'insert': 'Insert', 'left': 'Left', 'next': 'Next', 'prior': 'Prior', 'right': 'Right', 'up': 'Up', # # Qt key names... 'del': 'Delete', 'dnarrow': 'Down', 'esc': 'Escape', 'ins': 'Insert', 'ltarrow': 'Left', 'pagedn': 'Next', 'pageup': 'Prior', 'pgdown': 'Next', 'pgup': 'Prior', 'rtarrow': 'Right', 'uparrow': 'Up', # # Legacy Tk binding names... "ampersand": "&", "asciicircum": "^", "asciitilde": "~", "asterisk": "*", "at": "@", "backslash": "\\", "bar": "|", "braceleft": "{", "braceright": "}", "bracketleft": "[", "bracketright": "]", "colon": ":", "comma": ",", "dollar": "$", "equal": "=", "exclam": "!", "greater": ">", "less": "<", "minus": "-", "numbersign": "#", "quotedbl": '"', "quoteright": "'", "parenleft": "(", "parenright": ")", "percent": "%", "period": ".", "plus": "+", "question": "?", "quoteleft": "`", "semicolon": ";", "slash": "/", "space": " ", "underscore": "_", } # # pylint: disable=undefined-loop-variable # Looks like a pylint bug. if s in (None, 'none', 'None'): return 'None' if s.lower() in translate_d: s = translate_d.get(s.lower()) return self.strip_shift(s) if len(s) > 1 and s.find(' ') > -1: # #917: not a pure, but should be ignored. return '' if s.isalpha(): if len(s) == 1: if 'shift' in self.mods: if len(self.mods) == 1: self.mods.remove('shift') s = s.upper() else: s = s.lower() elif self.mods: s = s.lower() else: # 917: Ignore multi-byte alphas not in the table. s = '' if 0: # Make sure all special chars are in translate_d. if g.app.gui: # It may not exist yet. if s.capitalize() in g.app.gui.specialChars: s = s.capitalize() return s # # Translate shifted keys to their appropriate alternatives. return self.strip_shift(s)
[ "def", "finalize_char", "(", "self", ",", "s", ")", ":", "#", "# Retain \"bigger\" spelling for gang-of-four bindings with modifiers.", "shift_d", "=", "{", "'bksp'", ":", "'BackSpace'", ",", "'backspace'", ":", "'BackSpace'", ",", "'backtab'", ":", "'Tab'", ",", "#...
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/core/leoGlobals.py#L676-L802
leaderj1001/Stand-Alone-Self-Attention
a983f0f643632b1f2b7b8b27693182f22e9e574c
model.py
python
Model._make_layer
(self, block, planes, num_blocks, stride)
return nn.Sequential(*layers)
[]
def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1] * (num_blocks - 1) layers = [] for stride in strides: layers.append(block(self.in_places, planes, stride)) self.in_places = planes * block.expansion return nn.Sequential(*layers)
[ "def", "_make_layer", "(", "self", ",", "block", ",", "planes", ",", "num_blocks", ",", "stride", ")", ":", "strides", "=", "[", "stride", "]", "+", "[", "1", "]", "*", "(", "num_blocks", "-", "1", ")", "layers", "=", "[", "]", "for", "stride", "...
https://github.com/leaderj1001/Stand-Alone-Self-Attention/blob/a983f0f643632b1f2b7b8b27693182f22e9e574c/model.py#L89-L95
JaniceWuo/MovieRecommend
4c86db64ca45598917d304f535413df3bc9fea65
movierecommend/venv1/Lib/site-packages/django/db/backends/base/base.py
python
BaseDatabaseWrapper.disable_constraint_checking
(self)
return False
Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled.
Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled.
[ "Backends", "can", "implement", "as", "needed", "to", "temporarily", "disable", "foreign", "key", "constraint", "checking", ".", "Should", "return", "True", "if", "the", "constraints", "were", "disabled", "and", "will", "need", "to", "be", "reenabled", "." ]
def disable_constraint_checking(self): """ Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled. """ return False
[ "def", "disable_constraint_checking", "(", "self", ")", ":", "return", "False" ]
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/django/db/backends/base/base.py#L465-L471
openai/mujoco-worldgen
39f52b1b47aed499925a6a214b58bdbdb4e2f75e
mujoco_worldgen/objs/obj.py
python
Obj.set_absolute_position
(self, origin)
Set absolute position of objects, recursing throught all children. origin - absolute position of this object's origin
Set absolute position of objects, recursing throught all children. origin - absolute position of this object's origin
[ "Set", "absolute", "position", "of", "objects", "recursing", "throught", "all", "children", ".", "origin", "-", "absolute", "position", "of", "this", "object", "s", "origin" ]
def set_absolute_position(self, origin): ''' Set absolute position of objects, recursing throught all children. origin - absolute position of this object's origin ''' assert len(origin) == 3, "Invalid origin: {}".format(origin) assert len(self.relative_position) == 2, \ "Invalid relative_position: {}".format(self.relative_position) self.absolute_position = np.array(origin, dtype=np.float) # Note relative_position is X,Y but our absolute_position is X,Y,Z self.absolute_position[:2] += self.relative_position for placement_name, children in self.children.items(): placement = self.placements[placement_name] offset = self.absolute_position + placement['origin'] for child, _ in children: if child.placeable: child.set_absolute_position(offset) # Calculate positions of markers for marker in self.markers: if marker['relative_xyz'] is not None: relative_xyz = np.array(marker['relative_xyz'], dtype='f8') marker['position'] = relative_xyz * np.array(self.size, dtype=np.float) for i in range(3): if np.abs(self.size[i]) < 1e-4: marker["position"][i] = relative_xyz[i] marker['position'] -= self.size * 0.5 elif marker['absolute_xyz'] is not None: marker['position'] = np.array(marker['absolute_xyz'], dtype='f8') else: assert False, 'Neither absolute nor relative xyz provided.'
[ "def", "set_absolute_position", "(", "self", ",", "origin", ")", ":", "assert", "len", "(", "origin", ")", "==", "3", ",", "\"Invalid origin: {}\"", ".", "format", "(", "origin", ")", "assert", "len", "(", "self", ".", "relative_position", ")", "==", "2", ...
https://github.com/openai/mujoco-worldgen/blob/39f52b1b47aed499925a6a214b58bdbdb4e2f75e/mujoco_worldgen/objs/obj.py#L345-L374
NervanaSystems/neon
8c3fb8a93b4a89303467b25817c60536542d08bd
neon/backends/layer_mkl.py
python
BatchNormLayerMKL.__init__
(self, in_shape)
[]
def __init__(self, in_shape): self.dnnPrimitives = np.zeros((1, 20), dtype=np.uint64) self.init_f = 0 self.init_b = 0 self.in_shape = in_shape self.shape5D = None
[ "def", "__init__", "(", "self", ",", "in_shape", ")", ":", "self", ".", "dnnPrimitives", "=", "np", ".", "zeros", "(", "(", "1", ",", "20", ")", ",", "dtype", "=", "np", ".", "uint64", ")", "self", ".", "init_f", "=", "0", "self", ".", "init_b", ...
https://github.com/NervanaSystems/neon/blob/8c3fb8a93b4a89303467b25817c60536542d08bd/neon/backends/layer_mkl.py#L239-L244
django-haystack/django-haystack
b6dd72e6b5c97b782f5436b7bb4e8227ba6e3b06
haystack/views.py
python
SearchView.get_context
(self)
return context
[]
def get_context(self): (paginator, page) = self.build_page() context = { "query": self.query, "form": self.form, "page": page, "paginator": paginator, "suggestion": None, } if ( hasattr(self.results, "query") and self.results.query.backend.include_spelling ): context["suggestion"] = self.form.get_suggestion() context.update(self.extra_context()) return context
[ "def", "get_context", "(", "self", ")", ":", "(", "paginator", ",", "page", ")", "=", "self", ".", "build_page", "(", ")", "context", "=", "{", "\"query\"", ":", "self", ".", "query", ",", "\"form\"", ":", "self", ".", "form", ",", "\"page\"", ":", ...
https://github.com/django-haystack/django-haystack/blob/b6dd72e6b5c97b782f5436b7bb4e8227ba6e3b06/haystack/views.py#L128-L147
celery/billiard
269ef67354a3a205cea780aa8ea451e0d17cd37c
billiard/util.py
python
get_pdeathsig
()
Return the current value of the parent process death signal
Return the current value of the parent process death signal
[ "Return", "the", "current", "value", "of", "the", "parent", "process", "death", "signal" ]
def get_pdeathsig(): """ Return the current value of the parent process death signal """ if not sys.platform.startswith('linux'): # currently we support only linux platform. raise OSError() try: if 'cffi' in sys.modules: ffi = cffi.FFI() ffi.cdef("int prctl (int __option, ...);") arg = ffi.new("int *") C = ffi.dlopen(None) C.prctl(PR_GET_PDEATHSIG, arg) return arg[0] else: sig = ctypes.c_int() libc = ctypes.cdll.LoadLibrary("libc.so.6") libc.prctl(PR_GET_PDEATHSIG, ctypes.byref(sig)) return sig.value except Exception: raise OSError()
[ "def", "get_pdeathsig", "(", ")", ":", "if", "not", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", ":", "# currently we support only linux platform.", "raise", "OSError", "(", ")", "try", ":", "if", "'cffi'", "in", "sys", ".", "modules", ":"...
https://github.com/celery/billiard/blob/269ef67354a3a205cea780aa8ea451e0d17cd37c/billiard/util.py#L173-L194
WikidPad/WikidPad
558109638807bc76b4672922686e416ab2d5f79c
WikidPad/lib/aui/tabart.py
python
AuiCommandCapture.ProcessEvent
(self, event)
return False
Processes an event, searching event tables and calling zero or more suitable event handler function(s). :param `event`: the event to process. :note: Normally, your application would not call this function: it is called in the wxPython implementation to dispatch incoming user interface events to the framework (and application). However, you might need to call it if implementing new functionality (such as a new control) where you define new event types, as opposed to allowing the user to override functions. An instance where you might actually override the :meth:`ProcessEvent` function is where you want to direct event processing to event handlers not normally noticed by wxPython. For example, in the document/view architecture, documents and views are potential event handlers. When an event reaches a frame, :meth:`ProcessEvent` will need to be called on the associated document and view in case event handler functions are associated with these objects. The normal order of event table searching is as follows: 1. If the object is disabled (via a call to :meth:`EvtHandler.SetEvtHandlerEnabled`) the function skips to step (6). 2. If the object is a :class:`wx.Window`, :meth:`ProcessEvent` is recursively called on the window's :class:`wx.Validator`. If this returns ``True``, the function exits. 3. wxWidgets `SearchEventTable` is called for this event handler. If this fails, the base class table is tried, and so on until no more tables exist or an appropriate function was found, in which case the function exits. 4. The search is applied down the entire chain of event handlers (usually the chain has a length of one). If this succeeds, the function exits. 5. If the object is a :class:`wx.Window` and the event is a :class:`CommandEvent`, :meth:`ProcessEvent` is recursively applied to the parent window's event handler. If this returns ``True``, the function exits. 6. Finally, :meth:`ProcessEvent` is called on the :class:`App` object.
Processes an event, searching event tables and calling zero or more suitable event handler function(s).
[ "Processes", "an", "event", "searching", "event", "tables", "and", "calling", "zero", "or", "more", "suitable", "event", "handler", "function", "(", "s", ")", "." ]
def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). :param `event`: the event to process. :note: Normally, your application would not call this function: it is called in the wxPython implementation to dispatch incoming user interface events to the framework (and application). However, you might need to call it if implementing new functionality (such as a new control) where you define new event types, as opposed to allowing the user to override functions. An instance where you might actually override the :meth:`ProcessEvent` function is where you want to direct event processing to event handlers not normally noticed by wxPython. For example, in the document/view architecture, documents and views are potential event handlers. When an event reaches a frame, :meth:`ProcessEvent` will need to be called on the associated document and view in case event handler functions are associated with these objects. The normal order of event table searching is as follows: 1. If the object is disabled (via a call to :meth:`EvtHandler.SetEvtHandlerEnabled`) the function skips to step (6). 2. If the object is a :class:`wx.Window`, :meth:`ProcessEvent` is recursively called on the window's :class:`wx.Validator`. If this returns ``True``, the function exits. 3. wxWidgets `SearchEventTable` is called for this event handler. If this fails, the base class table is tried, and so on until no more tables exist or an appropriate function was found, in which case the function exits. 4. The search is applied down the entire chain of event handlers (usually the chain has a length of one). If this succeeds, the function exits. 5. If the object is a :class:`wx.Window` and the event is a :class:`CommandEvent`, :meth:`ProcessEvent` is recursively applied to the parent window's event handler. If this returns ``True``, the function exits. 6. Finally, :meth:`ProcessEvent` is called on the :class:`App` object. """ if event.GetEventType() == wx.wxEVT_COMMAND_MENU_SELECTED: self._last_id = event.GetId() return True if self.GetNextHandler(): return self.GetNextHandler().ProcessEvent(event) return False
[ "def", "ProcessEvent", "(", "self", ",", "event", ")", ":", "if", "event", ".", "GetEventType", "(", ")", "==", "wx", ".", "wxEVT_COMMAND_MENU_SELECTED", ":", "self", ".", "_last_id", "=", "event", ".", "GetId", "(", ")", "return", "True", "if", "self", ...
https://github.com/WikidPad/WikidPad/blob/558109638807bc76b4672922686e416ab2d5f79c/WikidPad/lib/aui/tabart.py#L65-L110
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/cgi.py
python
FieldStorage.make_file
(self)
Overridable: return a readable & writable file. The file will be used as follows: - data is written to it - seek(0) - data is read from it The file is opened in binary mode for files, in text mode for other fields This version opens a temporary file for reading and writing, and immediately deletes (unlinks) it. The trick (on Unix!) is that the file can still be used, but it can't be opened by another process, and it will automatically be deleted when it is closed or when the current process terminates. If you want a more permanent file, you derive a class which overrides this method. If you want a visible temporary file that is nevertheless automatically deleted when the script terminates, try defining a __del__ method in a derived class which unlinks the temporary files you have created.
Overridable: return a readable & writable file.
[ "Overridable", ":", "return", "a", "readable", "&", "writable", "file", "." ]
def make_file(self): """Overridable: return a readable & writable file. The file will be used as follows: - data is written to it - seek(0) - data is read from it The file is opened in binary mode for files, in text mode for other fields This version opens a temporary file for reading and writing, and immediately deletes (unlinks) it. The trick (on Unix!) is that the file can still be used, but it can't be opened by another process, and it will automatically be deleted when it is closed or when the current process terminates. If you want a more permanent file, you derive a class which overrides this method. If you want a visible temporary file that is nevertheless automatically deleted when the script terminates, try defining a __del__ method in a derived class which unlinks the temporary files you have created. """ if self._binary_file: return tempfile.TemporaryFile("wb+") else: return tempfile.TemporaryFile("w+", encoding=self.encoding, newline = '\n')
[ "def", "make_file", "(", "self", ")", ":", "if", "self", ".", "_binary_file", ":", "return", "tempfile", ".", "TemporaryFile", "(", "\"wb+\"", ")", "else", ":", "return", "tempfile", ".", "TemporaryFile", "(", "\"w+\"", ",", "encoding", "=", "self", ".", ...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/cgi.py#L797-L825
conan7882/adversarial-autoencoders
4960f252784a7dd2fbe203d7dad65938b57ee9c2
src/models/layers.py
python
transpose_conv
( filter_size, out_dim, layer_dict, inputs=None, out_shape=None, stride=2, padding='SAME', trainable=True, nl=tf.identity, init_w=None, init_b=tf.zeros_initializer(), wd=0, constant_init=False, name='dconv')
[]
def transpose_conv( filter_size, out_dim, layer_dict, inputs=None, out_shape=None, stride=2, padding='SAME', trainable=True, nl=tf.identity, init_w=None, init_b=tf.zeros_initializer(), wd=0, constant_init=False, name='dconv'): if inputs is None: inputs = layer_dict['cur_input'] stride = get_shape4D(stride) in_dim = inputs.get_shape().as_list()[-1] # TODO other ways to determine the output shape x_shape = tf.shape(inputs) # assume output shape is input_shape*stride if out_shape is None: out_shape = tf.stack([x_shape[0], tf.multiply(x_shape[1], stride[1]), tf.multiply(x_shape[2], stride[2]), out_dim]) filter_shape = get_shape2D(filter_size) + [out_dim, in_dim] with tf.variable_scope(name) as scope: if wd > 0: regularizer = tf.contrib.layers.l2_regularizer(scale=wd) else: regularizer=None weights = tf.get_variable('weights', filter_shape, initializer=init_w, trainable=trainable, regularizer=regularizer) biases = tf.get_variable('biases', [out_dim], initializer=init_b, trainable=trainable) output = tf.nn.conv2d_transpose(inputs, weights, output_shape=out_shape, strides=stride, padding=padding, name=scope.name) output = tf.nn.bias_add(output, biases) output.set_shape([None, None, None, out_dim]) output = nl(output, name='output') layer_dict['cur_input'] = output return output
[ "def", "transpose_conv", "(", "filter_size", ",", "out_dim", ",", "layer_dict", ",", "inputs", "=", "None", ",", "out_shape", "=", "None", ",", "stride", "=", "2", ",", "padding", "=", "'SAME'", ",", "trainable", "=", "True", ",", "nl", "=", "tf", ".",...
https://github.com/conan7882/adversarial-autoencoders/blob/4960f252784a7dd2fbe203d7dad65938b57ee9c2/src/models/layers.py#L112-L169
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
example/ctp/option/ApiStruct.py
python
QryExchangeMarginRateAdjust.__init__
(self, BrokerID='', InstrumentID='', HedgeFlag=HF_Speculation)
[]
def __init__(self, BrokerID='', InstrumentID='', HedgeFlag=HF_Speculation): self.BrokerID = '' #经纪公司代码, char[11] self.InstrumentID = '' #合约代码, char[31] self.HedgeFlag = ''
[ "def", "__init__", "(", "self", ",", "BrokerID", "=", "''", ",", "InstrumentID", "=", "''", ",", "HedgeFlag", "=", "HF_Speculation", ")", ":", "self", ".", "BrokerID", "=", "''", "#经纪公司代码, char[11]", "self", ".", "InstrumentID", "=", "''", "#合约代码, char[31]",...
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/option/ApiStruct.py#L3259-L3262
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/build/lib.linux-x86_64-2.7/flaskbb/utils/populate.py
python
create_latest_db
(target="default@head")
Creates the database including the schema using SQLAlchemy's db.create_all method instead of going through all the database revisions. The revision will be set to 'head' which indicates the latest alembic revision. :param target: The target branch. Defaults to 'default@head'.
Creates the database including the schema using SQLAlchemy's db.create_all method instead of going through all the database revisions. The revision will be set to 'head' which indicates the latest alembic revision.
[ "Creates", "the", "database", "including", "the", "schema", "using", "SQLAlchemy", "s", "db", ".", "create_all", "method", "instead", "of", "going", "through", "all", "the", "database", "revisions", ".", "The", "revision", "will", "be", "set", "to", "head", ...
def create_latest_db(target="default@head"): """Creates the database including the schema using SQLAlchemy's db.create_all method instead of going through all the database revisions. The revision will be set to 'head' which indicates the latest alembic revision. :param target: The target branch. Defaults to 'default@head'. """ if not database_exists(db.engine.url): create_database(db.engine.url) db.create_all() alembic.stamp(target=target)
[ "def", "create_latest_db", "(", "target", "=", "\"default@head\"", ")", ":", "if", "not", "database_exists", "(", "db", ".", "engine", ".", "url", ")", ":", "create_database", "(", "db", ".", "engine", ".", "url", ")", "db", ".", "create_all", "(", ")", ...
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/build/lib.linux-x86_64-2.7/flaskbb/utils/populate.py#L390-L402
ScrumDoLLC/ScrumDo
641729480c79d706e63bb6f1e15c5a9eac11ba6b
scrumdo-web/apps/django_evolution/mutations.py
python
SQLMutation.simulate
(self, app_label, proj_sig, database=None)
SQL mutations cannot be simulated unless an update function is provided
SQL mutations cannot be simulated unless an update function is provided
[ "SQL", "mutations", "cannot", "be", "simulated", "unless", "an", "update", "function", "is", "provided" ]
def simulate(self, app_label, proj_sig, database=None): """SQL mutations cannot be simulated unless an update function is provided""" if callable(self.update_func): self.update_func(app_label, proj_sig) else: raise CannotSimulate('Cannot simulate SQLMutations')
[ "def", "simulate", "(", "self", ",", "app_label", ",", "proj_sig", ",", "database", "=", "None", ")", ":", "if", "callable", "(", "self", ".", "update_func", ")", ":", "self", ".", "update_func", "(", "app_label", ",", "proj_sig", ")", "else", ":", "ra...
https://github.com/ScrumDoLLC/ScrumDo/blob/641729480c79d706e63bb6f1e15c5a9eac11ba6b/scrumdo-web/apps/django_evolution/mutations.py#L288-L295
home-assistant-libs/pytradfri
ef2614b0ccdf628abc3b9559dc00b95ec6e4bd72
pytradfri/gateway.py
python
GatewayInfo.id
(self)
return self.raw.get(ATTR_GATEWAY_ID)
Return the gateway id.
Return the gateway id.
[ "Return", "the", "gateway", "id", "." ]
def id(self): """Return the gateway id.""" return self.raw.get(ATTR_GATEWAY_ID)
[ "def", "id", "(", "self", ")", ":", "return", "self", ".", "raw", ".", "get", "(", "ATTR_GATEWAY_ID", ")" ]
https://github.com/home-assistant-libs/pytradfri/blob/ef2614b0ccdf628abc3b9559dc00b95ec6e4bd72/pytradfri/gateway.py#L243-L245
snorkel-team/snorkel
942e8a7b3af2bf108ef82c920bc7c2ddcbf9eda7
snorkel/augmentation/apply/pandas.py
python
PandasTFApplier.apply
(self, df: pd.DataFrame, progress_bar: bool = True)
return pd.concat(x_transformed, axis=1).T.infer_objects()
Augment a Pandas DataFrame of data points using TFs and policy. Parameters ---------- df Pandas DataFrame containing data points to be transformed progress_bar Display a progress bar? Returns ------- pd.DataFrame Pandas DataFrame of data points in augmented data set
Augment a Pandas DataFrame of data points using TFs and policy.
[ "Augment", "a", "Pandas", "DataFrame", "of", "data", "points", "using", "TFs", "and", "policy", "." ]
def apply(self, df: pd.DataFrame, progress_bar: bool = True) -> pd.DataFrame: """Augment a Pandas DataFrame of data points using TFs and policy. Parameters ---------- df Pandas DataFrame containing data points to be transformed progress_bar Display a progress bar? Returns ------- pd.DataFrame Pandas DataFrame of data points in augmented data set """ x_transformed: List[pd.Series] = [] for _, x in tqdm(df.iterrows(), total=len(df), disable=(not progress_bar)): x_transformed.extend(self._apply_policy_to_data_point(x)) return pd.concat(x_transformed, axis=1).T.infer_objects()
[ "def", "apply", "(", "self", ",", "df", ":", "pd", ".", "DataFrame", ",", "progress_bar", ":", "bool", "=", "True", ")", "->", "pd", ".", "DataFrame", ":", "x_transformed", ":", "List", "[", "pd", ".", "Series", "]", "=", "[", "]", "for", "_", ",...
https://github.com/snorkel-team/snorkel/blob/942e8a7b3af2bf108ef82c920bc7c2ddcbf9eda7/snorkel/augmentation/apply/pandas.py#L47-L65
w3h/isf
6faf0a3df185465ec17369c90ccc16e2a03a1870
lib/thirdparty/scapy/packet.py
python
Packet.sprintf
(self, fmt, relax=1)
return s
sprintf(format, [relax=1]) -> str where format is a string that can include directives. A directive begins and ends by % and has the following format %[fmt[r],][cls[:nb].]field%. fmt is a classic printf directive, "r" can be appended for raw substitution (ex: IP.flags=0x18 instead of SA), nb is the number of the layer we want (ex: for IP/IP packets, IP:2.src is the src of the upper IP layer). Special case : "%.time%" is the creation time. Ex : p.sprintf("%.time% %-15s,IP.src% -> %-15s,IP.dst% %IP.chksum% " "%03xr,IP.proto% %r,TCP.flags%") Moreover, the format string can include conditionnal statements. A conditionnal statement looks like : {layer:string} where layer is a layer name, and string is the string to insert in place of the condition if it is true, i.e. if layer is present. If layer is preceded by a "!", the result si inverted. Conditions can be imbricated. A valid statement can be : p.sprintf("This is a{TCP: TCP}{UDP: UDP}{ICMP:n ICMP} packet") p.sprintf("{IP:%IP.dst% {ICMP:%ICMP.type%}{TCP:%TCP.dport%}}") A side effect is that, to obtain "{" and "}" characters, you must use "%(" and "%)".
sprintf(format, [relax=1]) -> str where format is a string that can include directives. A directive begins and ends by % and has the following format %[fmt[r],][cls[:nb].]field%.
[ "sprintf", "(", "format", "[", "relax", "=", "1", "]", ")", "-", ">", "str", "where", "format", "is", "a", "string", "that", "can", "include", "directives", ".", "A", "directive", "begins", "and", "ends", "by", "%", "and", "has", "the", "following", ...
def sprintf(self, fmt, relax=1): """sprintf(format, [relax=1]) -> str where format is a string that can include directives. A directive begins and ends by % and has the following format %[fmt[r],][cls[:nb].]field%. fmt is a classic printf directive, "r" can be appended for raw substitution (ex: IP.flags=0x18 instead of SA), nb is the number of the layer we want (ex: for IP/IP packets, IP:2.src is the src of the upper IP layer). Special case : "%.time%" is the creation time. Ex : p.sprintf("%.time% %-15s,IP.src% -> %-15s,IP.dst% %IP.chksum% " "%03xr,IP.proto% %r,TCP.flags%") Moreover, the format string can include conditionnal statements. A conditionnal statement looks like : {layer:string} where layer is a layer name, and string is the string to insert in place of the condition if it is true, i.e. if layer is present. If layer is preceded by a "!", the result si inverted. Conditions can be imbricated. A valid statement can be : p.sprintf("This is a{TCP: TCP}{UDP: UDP}{ICMP:n ICMP} packet") p.sprintf("{IP:%IP.dst% {ICMP:%ICMP.type%}{TCP:%TCP.dport%}}") A side effect is that, to obtain "{" and "}" characters, you must use "%(" and "%)". """ escape = { "%": "%", "(": "{", ")": "}" } # Evaluate conditions while "{" in fmt: i = fmt.rindex("{") j = fmt[i+1:].index("}") cond = fmt[i+1:i+j+1] k = cond.find(":") if k < 0: raise Scapy_Exception("Bad condition in format string: [%s] (read sprintf doc!)"%cond) cond,format = cond[:k],cond[k+1:] res = False if cond[0] == "!": res = True cond = cond[1:] if self.haslayer(cond): res = not res if not res: format = "" fmt = fmt[:i]+format+fmt[i+j+2:] # Evaluate directives s = "" while "%" in fmt: i = fmt.index("%") s += fmt[:i] fmt = fmt[i+1:] if fmt and fmt[0] in escape: s += escape[fmt[0]] fmt = fmt[1:] continue try: i = fmt.index("%") sfclsfld = fmt[:i] fclsfld = sfclsfld.split(",") if len(fclsfld) == 1: f = "s" clsfld = fclsfld[0] elif len(fclsfld) == 2: f,clsfld = fclsfld else: raise Scapy_Exception if "." in clsfld: cls,fld = clsfld.split(".") else: cls = self.__class__.__name__ fld = clsfld num = 1 if ":" in cls: cls,num = cls.split(":") num = int(num) fmt = fmt[i+1:] except: raise Scapy_Exception("Bad format string [%%%s%s]" % (fmt[:25], fmt[25:] and "...")) else: if fld == "time": val = time.strftime("%H:%M:%S.%%06i", time.localtime(self.time)) % int((self.time-int(self.time))*1000000) elif cls == self.__class__.__name__ and hasattr(self, fld): if num > 1: val = self.payload.sprintf("%%%s,%s:%s.%s%%" % (f,cls,num-1,fld), relax) f = "s" elif f[-1] == "r": # Raw field value val = getattr(self,fld) f = f[:-1] if not f: f = "s" else: val = getattr(self,fld) if fld in self.fieldtype: val = self.fieldtype[fld].i2repr(self,val) else: val = self.payload.sprintf("%%%s%%" % sfclsfld, relax) f = "s" s += ("%"+f) % val s += fmt return s
[ "def", "sprintf", "(", "self", ",", "fmt", ",", "relax", "=", "1", ")", ":", "escape", "=", "{", "\"%\"", ":", "\"%\"", ",", "\"(\"", ":", "\"{\"", ",", "\")\"", ":", "\"}\"", "}", "# Evaluate conditions", "while", "\"{\"", "in", "fmt", ":", "i", "...
https://github.com/w3h/isf/blob/6faf0a3df185465ec17369c90ccc16e2a03a1870/lib/thirdparty/scapy/packet.py#L874-L977
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit /tools/inject/plugins/generic/takeover.py
python
Takeover.osSmb
(self)
[]
def osSmb(self): self.checkDbmsOs() if not Backend.isOs(OS.WINDOWS): errMsg = "the back-end DBMS underlying operating system is " errMsg += "not Windows: it is not possible to perform the SMB " errMsg += "relay attack" raise SqlmapUnsupportedDBMSException(errMsg) if not isStackingAvailable() and not conf.direct: if Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.MSSQL): errMsg = "on this back-end DBMS it is only possible to " errMsg += "perform the SMB relay attack if stacked " errMsg += "queries are supported" raise SqlmapUnsupportedDBMSException(errMsg) elif Backend.isDbms(DBMS.MYSQL): debugMsg = "since stacked queries are not supported, " debugMsg += "sqlmap is going to perform the SMB relay " debugMsg += "attack via inference blind SQL injection" logger.debug(debugMsg) printWarn = True warnMsg = "it is unlikely that this attack will be successful " if Backend.isDbms(DBMS.MYSQL): warnMsg += "because by default MySQL on Windows runs as " warnMsg += "Local System which is not a real user, it does " warnMsg += "not send the NTLM session hash when connecting to " warnMsg += "a SMB service" elif Backend.isDbms(DBMS.PGSQL): warnMsg += "because by default PostgreSQL on Windows runs " warnMsg += "as postgres user which is a real user of the " warnMsg += "system, but not within the Administrators group" elif Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): warnMsg += "because often Microsoft SQL Server %s " % Backend.getVersion() warnMsg += "runs as Network Service which is not a real user, " warnMsg += "it does not send the NTLM session hash when " warnMsg += "connecting to a SMB service" else: printWarn = False if printWarn: logger.warn(warnMsg) self.smb()
[ "def", "osSmb", "(", "self", ")", ":", "self", ".", "checkDbmsOs", "(", ")", "if", "not", "Backend", ".", "isOs", "(", "OS", ".", "WINDOWS", ")", ":", "errMsg", "=", "\"the back-end DBMS underlying operating system is \"", "errMsg", "+=", "\"not Windows: it is n...
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/inject/plugins/generic/takeover.py#L271-L319
Yelp/elastalert
1dc4f30f30d39a689f419ce19c7e2e4d67a50be3
elastalert/ruletypes.py
python
SpikeRule.add_count_data
(self, data)
Add count data to the rule. Data should be of the form {ts: count}.
Add count data to the rule. Data should be of the form {ts: count}.
[ "Add", "count", "data", "to", "the", "rule", ".", "Data", "should", "be", "of", "the", "form", "{", "ts", ":", "count", "}", "." ]
def add_count_data(self, data): """ Add count data to the rule. Data should be of the form {ts: count}. """ if len(data) > 1: raise EAException('add_count_data can only accept one count at a time') for ts, count in data.items(): self.handle_event({self.ts_field: ts}, count, 'all')
[ "def", "add_count_data", "(", "self", ",", "data", ")", ":", "if", "len", "(", "data", ")", ">", "1", ":", "raise", "EAException", "(", "'add_count_data can only accept one count at a time'", ")", "for", "ts", ",", "count", "in", "data", ".", "items", "(", ...
https://github.com/Yelp/elastalert/blob/1dc4f30f30d39a689f419ce19c7e2e4d67a50be3/elastalert/ruletypes.py#L401-L406
sergioburdisso/pyss3
70c37853f3f56a60c3df9b94b678ca3f0db843de
pyss3/util.py
python
is_a_collection
(o)
return hasattr(o, "__getitem__") and ((PY2 and not isinstance(o, basestring)) or (not PY2 and not isinstance(o, (str, bytes))))
Return True when the object ``o`` is a collection.
Return True when the object ``o`` is a collection.
[ "Return", "True", "when", "the", "object", "o", "is", "a", "collection", "." ]
def is_a_collection(o): """Return True when the object ``o`` is a collection.""" return hasattr(o, "__getitem__") and ((PY2 and not isinstance(o, basestring)) or (not PY2 and not isinstance(o, (str, bytes))))
[ "def", "is_a_collection", "(", "o", ")", ":", "return", "hasattr", "(", "o", ",", "\"__getitem__\"", ")", "and", "(", "(", "PY2", "and", "not", "isinstance", "(", "o", ",", "basestring", ")", ")", "or", "(", "not", "PY2", "and", "not", "isinstance", ...
https://github.com/sergioburdisso/pyss3/blob/70c37853f3f56a60c3df9b94b678ca3f0db843de/pyss3/util.py#L2348-L2351
robotlearn/pyrobolearn
9cd7c060723fda7d2779fa255ac998c2c82b8436
pyrobolearn/simulators/middlewares/robots/ur5.py
python
UR5ROSMiddleware.get_pid
(self, joint_ids)
Get the PID coefficients associated to the given joint ids. Args: joint_ids (list[int]): list of unique joint ids. Returns: list[np.array[float[3]]]: list of PID coefficients for each joint.
Get the PID coefficients associated to the given joint ids.
[ "Get", "the", "PID", "coefficients", "associated", "to", "the", "given", "joint", "ids", "." ]
def get_pid(self, joint_ids): """ Get the PID coefficients associated to the given joint ids. Args: joint_ids (list[int]): list of unique joint ids. Returns: list[np.array[float[3]]]: list of PID coefficients for each joint. """ pass
[ "def", "get_pid", "(", "self", ",", "joint_ids", ")", ":", "pass" ]
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/simulators/middlewares/robots/ur5.py#L279-L289
brightmart/slot_filling_intent_joint_model
06ee6932bca2e07b3667e7edbef878a79cdcf2d1
joint_model_knowl_v3_bi_directional_cnn_tmall/a1_data_util.py
python
index_sentence_with_vocabulary
(sentence,word2id,sequence_length=None,knowledge_path=None)
return index_list
index sentence with vocabulary, return list of index
index sentence with vocabulary, return list of index
[ "index", "sentence", "with", "vocabulary", "return", "list", "of", "index" ]
def index_sentence_with_vocabulary(sentence,word2id,sequence_length=None,knowledge_path=None): """index sentence with vocabulary, return list of index""" #print("index_sentence_with_vocabulary:",knowledge_path) result_list=tokenize_sentence(sentence,knowledge_path=knowledge_path) result_list=result_list[0:sequence_length] #truncate unk_id=word2id[UNK] index_list=[word2id[PAD]]*sequence_length #pad for i,element in enumerate(result_list): index_list[i]=word2id.get(element,unk_id) #print("####index_sentence_with_vocabulary.sentence:",sentence);print(index_list) return index_list
[ "def", "index_sentence_with_vocabulary", "(", "sentence", ",", "word2id", ",", "sequence_length", "=", "None", ",", "knowledge_path", "=", "None", ")", ":", "#print(\"index_sentence_with_vocabulary:\",knowledge_path)", "result_list", "=", "tokenize_sentence", "(", "sentence...
https://github.com/brightmart/slot_filling_intent_joint_model/blob/06ee6932bca2e07b3667e7edbef878a79cdcf2d1/joint_model_knowl_v3_bi_directional_cnn_tmall/a1_data_util.py#L315-L325
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Lib/idlelib/RemoteDebugger.py
python
IdbProxy.run
(self, cmd, locals)
[]
def run(self, cmd, locals): # Ignores locals on purpose! seq = self.conn.asyncqueue(self.oid, "run", (cmd,), {}) self.shell.interp.active_seq = seq
[ "def", "run", "(", "self", ",", "cmd", ",", "locals", ")", ":", "# Ignores locals on purpose!", "seq", "=", "self", ".", "conn", ".", "asyncqueue", "(", "self", ".", "oid", ",", "\"run\"", ",", "(", "cmd", ",", ")", ",", "{", "}", ")", "self", ".",...
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Lib/idlelib/RemoteDebugger.py#L300-L303
tartiflette/tartiflette
e292c28ed4fa279ecedb8980fc3741965bd28c87
tartiflette/language/parsers/lark/transformers/converters.py
python
lark_to_non_null_type_node
(tree: "Tree")
return NonNullTypeNode( type=tree.children[0].value, location=lark_to_location_node(tree.meta) )
Creates and returns a NonNullTypeNode instance extracted from the parsing of the tree instance. :param tree: the Tree to parse in order to extract the proper node :type tree: Tree :return: a NonNullTypeNode instance extracted from the parsing of the tree :rtype: NonNullTypeNode
Creates and returns a NonNullTypeNode instance extracted from the parsing of the tree instance. :param tree: the Tree to parse in order to extract the proper node :type tree: Tree :return: a NonNullTypeNode instance extracted from the parsing of the tree :rtype: NonNullTypeNode
[ "Creates", "and", "returns", "a", "NonNullTypeNode", "instance", "extracted", "from", "the", "parsing", "of", "the", "tree", "instance", ".", ":", "param", "tree", ":", "the", "Tree", "to", "parse", "in", "order", "to", "extract", "the", "proper", "node", ...
def lark_to_non_null_type_node(tree: "Tree") -> "NonNullTypeNode": """ Creates and returns a NonNullTypeNode instance extracted from the parsing of the tree instance. :param tree: the Tree to parse in order to extract the proper node :type tree: Tree :return: a NonNullTypeNode instance extracted from the parsing of the tree :rtype: NonNullTypeNode """ return NonNullTypeNode( type=tree.children[0].value, location=lark_to_location_node(tree.meta) )
[ "def", "lark_to_non_null_type_node", "(", "tree", ":", "\"Tree\"", ")", "->", "\"NonNullTypeNode\"", ":", "return", "NonNullTypeNode", "(", "type", "=", "tree", ".", "children", "[", "0", "]", ".", "value", ",", "location", "=", "lark_to_location_node", "(", "...
https://github.com/tartiflette/tartiflette/blob/e292c28ed4fa279ecedb8980fc3741965bd28c87/tartiflette/language/parsers/lark/transformers/converters.py#L461-L472
mikecrittenden/zen-coding-gedit
49966219b1e9b7a1d0d8b4def6a32b6c386b8041
zencoding/filters/haml.py
python
make_attributes_string
(tag, profile)
return attrs
Creates HTML attributes string from tag according to profile settings @type tag: ZenNode @type profile: dict
Creates HTML attributes string from tag according to profile settings
[ "Creates", "HTML", "attributes", "string", "from", "tag", "according", "to", "profile", "settings" ]
def make_attributes_string(tag, profile): """ Creates HTML attributes string from tag according to profile settings @type tag: ZenNode @type profile: dict """ # make attribute string attrs = '' attr_quote = profile['attr_quotes'] == 'single' and "'" or '"' cursor = profile['place_cursor'] and zen_coding.get_caret_placeholder() or '' # use short notation for ID and CLASS attributes for a in tag.attributes: name_lower = a['name'].lower() if name_lower == 'id': attrs += '#' + (a['value'] or cursor) elif name_lower == 'class': attrs += '.' + (a['value'] or cursor) other_attrs = [] # process other attributes for a in tag.attributes: name_lower = a['name'].lower() if name_lower != 'id' and name_lower != 'class': attr_name = profile['attr_case'] == 'upper' and a['name'].upper() or name_lower other_attrs.append(':' + attr_name + ' => ' + attr_quote + (a['value'] or cursor) + attr_quote) if other_attrs: attrs += '{' + ', '.join(other_attrs) + '}' return attrs
[ "def", "make_attributes_string", "(", "tag", ",", "profile", ")", ":", "# make attribute string", "attrs", "=", "''", "attr_quote", "=", "profile", "[", "'attr_quotes'", "]", "==", "'single'", "and", "\"'\"", "or", "'\"'", "cursor", "=", "profile", "[", "'plac...
https://github.com/mikecrittenden/zen-coding-gedit/blob/49966219b1e9b7a1d0d8b4def6a32b6c386b8041/zencoding/filters/haml.py#L13-L44
respeaker/get_started_with_respeaker
ec859759fcec7e683a5e09328a8ea307046f353d
files/usr/lib/python2.7/site-packages/tornado/auth.py
python
TwitterMixin.authenticate_redirect
(self, callback_uri=None)
Just like `~OAuthMixin.authorize_redirect`, but auto-redirects if authorized. This is generally the right interface to use if you are using Twitter for single-sign on.
Just like `~OAuthMixin.authorize_redirect`, but auto-redirects if authorized.
[ "Just", "like", "~OAuthMixin", ".", "authorize_redirect", "but", "auto", "-", "redirects", "if", "authorized", "." ]
def authenticate_redirect(self, callback_uri=None): """Just like `~OAuthMixin.authorize_redirect`, but auto-redirects if authorized. This is generally the right interface to use if you are using Twitter for single-sign on. """ http = self.get_auth_http_client() http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), self.async_callback( self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
[ "def", "authenticate_redirect", "(", "self", ",", "callback_uri", "=", "None", ")", ":", "http", "=", "self", ".", "get_auth_http_client", "(", ")", "http", ".", "fetch", "(", "self", ".", "_oauth_request_token_url", "(", "callback_uri", "=", "callback_uri", "...
https://github.com/respeaker/get_started_with_respeaker/blob/ec859759fcec7e683a5e09328a8ea307046f353d/files/usr/lib/python2.7/site-packages/tornado/auth.py#L595-L604
anson0910/CNN_face_detection
62cf7ea5a737b758095a9b7e7a07760cb4f306df
face_detection/face_48_fddb_fullconv.py
python
cal_face_24c
(caffe_img, rectangles)
return result
:param caffe_image: image in caffe style to detect faces :param rectangles: rectangles in form [x11, y11, x12, y12, confidence, current_scale] :return: rectangles after calibration
:param caffe_image: image in caffe style to detect faces :param rectangles: rectangles in form [x11, y11, x12, y12, confidence, current_scale] :return: rectangles after calibration
[ ":", "param", "caffe_image", ":", "image", "in", "caffe", "style", "to", "detect", "faces", ":", "param", "rectangles", ":", "rectangles", "in", "form", "[", "x11", "y11", "x12", "y12", "confidence", "current_scale", "]", ":", "return", ":", "rectangles", ...
def cal_face_24c(caffe_img, rectangles): ''' :param caffe_image: image in caffe style to detect faces :param rectangles: rectangles in form [x11, y11, x12, y12, confidence, current_scale] :return: rectangles after calibration ''' height, width, channels = caffe_img.shape result = [] for cur_rectangle in rectangles: original_x1 = cur_rectangle[0] original_y1 = cur_rectangle[1] original_x2 = cur_rectangle[2] original_y2 = cur_rectangle[3] original_w = original_x2 - original_x1 original_h = original_y2 - original_y1 cropped_caffe_img = caffe_img[original_y1:original_y2, original_x1:original_x2] # crop image output = net_24_cal.predict([cropped_caffe_img]) # predict through caffe prediction = output[0] # (44, 1) ndarray threshold = 0.1 indices = np.nonzero(prediction > threshold)[0] # ndarray of indices where prediction is larger than threshold number_of_cals = len(indices) # number of calibrations larger than threshold if number_of_cals == 0: # if no calibration is needed, check next rectangle result.append(cur_rectangle) continue total_s_change = 0 total_x_change = 0 total_y_change = 0 for current_cal in range(number_of_cals): # accumulate changes, and calculate average cal_label = int(indices[current_cal]) # should be number in 0~44 if (cal_label >= 0) and (cal_label <= 8): # decide s change total_s_change += 0.83 elif (cal_label >= 9) and (cal_label <= 17): total_s_change += 0.91 elif (cal_label >= 18) and (cal_label <= 26): total_s_change += 1.0 elif (cal_label >= 27) and (cal_label <= 35): total_s_change += 1.10 else: total_s_change += 1.21 if cal_label % 9 <= 2: # decide x change total_x_change += -0.17 elif (cal_label % 9 >= 6) and (cal_label % 9 <= 8): # ignore case when 3<=x<=5, since adding 0 doesn't change total_x_change += 0.17 if cal_label % 3 == 0: # decide y change total_y_change += -0.17 elif cal_label % 3 == 2: # ignore case when 1, since adding 0 doesn't change total_y_change += 0.17 s_change = total_s_change / number_of_cals # calculate average x_change = total_x_change / number_of_cals y_change = total_y_change / number_of_cals cur_result = cur_rectangle # inherit format and last two attributes from original rectangle cur_result[0] = int(max(0, original_x1 - original_w * x_change / s_change)) cur_result[1] = int(max(0, original_y1 - original_h * y_change / s_change)) cur_result[2] = int(min(width, cur_result[0] + original_w / s_change)) cur_result[3] = int(min(height, cur_result[1] + original_h / s_change)) result.append(cur_result) return result
[ "def", "cal_face_24c", "(", "caffe_img", ",", "rectangles", ")", ":", "height", ",", "width", ",", "channels", "=", "caffe_img", ".", "shape", "result", "=", "[", "]", "for", "cur_rectangle", "in", "rectangles", ":", "original_x1", "=", "cur_rectangle", "[",...
https://github.com/anson0910/CNN_face_detection/blob/62cf7ea5a737b758095a9b7e7a07760cb4f306df/face_detection/face_48_fddb_fullconv.py#L371-L441
KeplerGO/pyke
c74a62e48e3cf1ea367524656cd836283c5ddd94
kepcotrend.py
python
get_pcompsum
(pcomps,s)
return pcompsum
calculates the sum of basis vectors which are to be subtracted from the light curve to produce the corrected data.
calculates the sum of basis vectors which are to be subtracted from the light curve to produce the corrected data.
[ "calculates", "the", "sum", "of", "basis", "vectors", "which", "are", "to", "be", "subtracted", "from", "the", "light", "curve", "to", "produce", "the", "corrected", "data", "." ]
def get_pcompsum(pcomps,s): """ calculates the sum of basis vectors which are to be subtracted from the light curve to produce the corrected data. """ pcompsum = 0. for i in range(len(s)): pcompsum += s[i]*pcomps[i] return pcompsum
[ "def", "get_pcompsum", "(", "pcomps", ",", "s", ")", ":", "pcompsum", "=", "0.", "for", "i", "in", "range", "(", "len", "(", "s", ")", ")", ":", "pcompsum", "+=", "s", "[", "i", "]", "*", "pcomps", "[", "i", "]", "return", "pcompsum" ]
https://github.com/KeplerGO/pyke/blob/c74a62e48e3cf1ea367524656cd836283c5ddd94/kepcotrend.py#L256-L265
FSecureLABS/Jandroid
e31d0dab58a2bfd6ed8e0a387172b8bd7c893436
libs/platform-tools/platform-tools_windows/systrace/catapult/third_party/pyserial/serial/rfc2217.py
python
TelnetOption.__init__
(self, connection, name, option, send_yes, send_no, ack_yes, ack_no, initial_state, activation_callback=None)
\ Initialize option. :param connection: connection used to transmit answers :param name: a readable name for debug outputs :param send_yes: what to send when option is to be enabled. :param send_no: what to send when option is to be disabled. :param ack_yes: what to expect when remote agrees on option. :param ack_no: what to expect when remote disagrees on option. :param initial_state: options initialized with REQUESTED are tried to be enabled on startup. use INACTIVE for all others.
\ Initialize option. :param connection: connection used to transmit answers :param name: a readable name for debug outputs :param send_yes: what to send when option is to be enabled. :param send_no: what to send when option is to be disabled. :param ack_yes: what to expect when remote agrees on option. :param ack_no: what to expect when remote disagrees on option. :param initial_state: options initialized with REQUESTED are tried to be enabled on startup. use INACTIVE for all others.
[ "\\", "Initialize", "option", ".", ":", "param", "connection", ":", "connection", "used", "to", "transmit", "answers", ":", "param", "name", ":", "a", "readable", "name", "for", "debug", "outputs", ":", "param", "send_yes", ":", "what", "to", "send", "when...
def __init__(self, connection, name, option, send_yes, send_no, ack_yes, ack_no, initial_state, activation_callback=None): """\ Initialize option. :param connection: connection used to transmit answers :param name: a readable name for debug outputs :param send_yes: what to send when option is to be enabled. :param send_no: what to send when option is to be disabled. :param ack_yes: what to expect when remote agrees on option. :param ack_no: what to expect when remote disagrees on option. :param initial_state: options initialized with REQUESTED are tried to be enabled on startup. use INACTIVE for all others. """ self.connection = connection self.name = name self.option = option self.send_yes = send_yes self.send_no = send_no self.ack_yes = ack_yes self.ack_no = ack_no self.state = initial_state self.active = False self.activation_callback = activation_callback
[ "def", "__init__", "(", "self", ",", "connection", ",", "name", ",", "option", ",", "send_yes", ",", "send_no", ",", "ack_yes", ",", "ack_no", ",", "initial_state", ",", "activation_callback", "=", "None", ")", ":", "self", ".", "connection", "=", "connect...
https://github.com/FSecureLABS/Jandroid/blob/e31d0dab58a2bfd6ed8e0a387172b8bd7c893436/libs/platform-tools/platform-tools_windows/systrace/catapult/third_party/pyserial/serial/rfc2217.py#L228-L249
Tautulli/Tautulli
2410eb33805aaac4bd1c5dad0f71e4f15afaf742
lib/bleach/_vendor/parse.py
python
_checknetloc
(netloc)
[]
def _checknetloc(netloc): if not netloc or not any(ord(c) > 127 for c in netloc): return # looking for characters like \u2100 that expand to 'a/c' # IDNA uses NFKC equivalence, so normalize for this check import unicodedata n = netloc.replace('@', '') # ignore characters already included n = n.replace(':', '') # but not the surrounding text n = n.replace('#', '') n = n.replace('?', '') netloc2 = unicodedata.normalize('NFKC', n) if n == netloc2: return for c in '/?#@:': if c in netloc2: raise ValueError("netloc '" + netloc + "' contains invalid " + "characters under NFKC normalization")
[ "def", "_checknetloc", "(", "netloc", ")", ":", "if", "not", "netloc", "or", "not", "any", "(", "ord", "(", "c", ")", ">", "127", "for", "c", "in", "netloc", ")", ":", "return", "# looking for characters like \\u2100 that expand to 'a/c'", "# IDNA uses NFKC equi...
https://github.com/Tautulli/Tautulli/blob/2410eb33805aaac4bd1c5dad0f71e4f15afaf742/lib/bleach/_vendor/parse.py#L397-L413
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/reports/generic.py
python
GenericReportView.template_report
(self)
return original_template
[]
def template_report(self): original_template = self.report_template_path or "reports/async/basic.html" if self.is_rendered_as_email: self.context.update(original_template=original_template) return self.override_template return original_template
[ "def", "template_report", "(", "self", ")", ":", "original_template", "=", "self", ".", "report_template_path", "or", "\"reports/async/basic.html\"", "if", "self", ".", "is_rendered_as_email", ":", "self", ".", "context", ".", "update", "(", "original_template", "="...
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/reports/generic.py#L288-L293
IBM/lale
b4d6829c143a4735b06083a0e6c70d2cca244162
lale/search/search_space.py
python
SearchSpace.default
(self)
return self._default
Return an optional default value, if None. if not None, the default value should be in the search space
Return an optional default value, if None. if not None, the default value should be in the search space
[ "Return", "an", "optional", "default", "value", "if", "None", ".", "if", "not", "None", "the", "default", "value", "should", "be", "in", "the", "search", "space" ]
def default(self) -> Optional[Any]: """Return an optional default value, if None. if not None, the default value should be in the search space """ return self._default
[ "def", "default", "(", "self", ")", "->", "Optional", "[", "Any", "]", ":", "return", "self", ".", "_default" ]
https://github.com/IBM/lale/blob/b4d6829c143a4735b06083a0e6c70d2cca244162/lale/search/search_space.py#L51-L56
hyperledger/aries-cloudagent-python
2f36776e99f6053ae92eed8123b5b1b2e891c02a
aries_cloudagent/transport/queue/base.py
python
BaseMessageQueue.enqueue
(self, message)
Enqueue a message. Args: message: The message to add to the end of the queue Raises: asyncio.CancelledError if the queue has been stopped
Enqueue a message.
[ "Enqueue", "a", "message", "." ]
async def enqueue(self, message): """ Enqueue a message. Args: message: The message to add to the end of the queue Raises: asyncio.CancelledError if the queue has been stopped """
[ "async", "def", "enqueue", "(", "self", ",", "message", ")", ":" ]
https://github.com/hyperledger/aries-cloudagent-python/blob/2f36776e99f6053ae92eed8123b5b1b2e891c02a/aries_cloudagent/transport/queue/base.py#L11-L21
microsoft/azure-devops-python-api
451cade4c475482792cbe9e522c1fee32393139e
azure-devops/azure/devops/released/work_item_tracking/work_item_tracking_client.py
python
WorkItemTrackingClient.get_deleted_work_item_shallow_references
(self, project=None)
return self._deserialize('[WorkItemDeleteShallowReference]', self._unwrap_collection(response))
GetDeletedWorkItemShallowReferences. Gets a list of the IDs and the URLs of the deleted the work items in the Recycle Bin. :param str project: Project ID or project name :rtype: [WorkItemDeleteShallowReference]
GetDeletedWorkItemShallowReferences. Gets a list of the IDs and the URLs of the deleted the work items in the Recycle Bin. :param str project: Project ID or project name :rtype: [WorkItemDeleteShallowReference]
[ "GetDeletedWorkItemShallowReferences", ".", "Gets", "a", "list", "of", "the", "IDs", "and", "the", "URLs", "of", "the", "deleted", "the", "work", "items", "in", "the", "Recycle", "Bin", ".", ":", "param", "str", "project", ":", "Project", "ID", "or", "proj...
def get_deleted_work_item_shallow_references(self, project=None): """GetDeletedWorkItemShallowReferences. Gets a list of the IDs and the URLs of the deleted the work items in the Recycle Bin. :param str project: Project ID or project name :rtype: [WorkItemDeleteShallowReference] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') response = self._send(http_method='GET', location_id='b70d8d39-926c-465e-b927-b1bf0e5ca0e0', version='5.1', route_values=route_values) return self._deserialize('[WorkItemDeleteShallowReference]', self._unwrap_collection(response))
[ "def", "get_deleted_work_item_shallow_references", "(", "self", ",", "project", "=", "None", ")", ":", "route_values", "=", "{", "}", "if", "project", "is", "not", "None", ":", "route_values", "[", "'project'", "]", "=", "self", ".", "_serialize", ".", "url"...
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/released/work_item_tracking/work_item_tracking_client.py#L566-L579
IntelLabs/nlp-architect
60afd0dd1bfd74f01b4ac8f613cb484777b80284
solutions/trend_analysis/topic_extraction.py
python
write_folder_corpus_to_file
(corpus, writer)
Merge content of a folder into a single text file Args: corpus: A folder containing text files (String) writer: A file writer
Merge content of a folder into a single text file
[ "Merge", "content", "of", "a", "folder", "into", "a", "single", "text", "file" ]
def write_folder_corpus_to_file(corpus, writer): """ Merge content of a folder into a single text file Args: corpus: A folder containing text files (String) writer: A file writer """ for filename in os.listdir(corpus): try: file_path = str(path.join(corpus, filename)) with open(file_path, "r", encoding="utf-8") as f: lines = f.readlines() writer.writelines(lines) except Exception as e: logger.error("Error: %s. skipping file: %s", str(e), str(filename))
[ "def", "write_folder_corpus_to_file", "(", "corpus", ",", "writer", ")", ":", "for", "filename", "in", "os", ".", "listdir", "(", "corpus", ")", ":", "try", ":", "file_path", "=", "str", "(", "path", ".", "join", "(", "corpus", ",", "filename", ")", ")...
https://github.com/IntelLabs/nlp-architect/blob/60afd0dd1bfd74f01b4ac8f613cb484777b80284/solutions/trend_analysis/topic_extraction.py#L193-L208
facelessuser/ColorHelper
cfed17c35dbae4db49a14165ef222407c48a3014
lib/coloraide/spaces/hsl/__init__.py
python
HSL.s
(self, value: float)
Saturate or unsaturate the color by the given factor.
Saturate or unsaturate the color by the given factor.
[ "Saturate", "or", "unsaturate", "the", "color", "by", "the", "given", "factor", "." ]
def s(self, value: float) -> None: """Saturate or unsaturate the color by the given factor.""" self._coords[1] = self._handle_input(value)
[ "def", "s", "(", "self", ",", "value", ":", "float", ")", "->", "None", ":", "self", ".", "_coords", "[", "1", "]", "=", "self", ".", "_handle_input", "(", "value", ")" ]
https://github.com/facelessuser/ColorHelper/blob/cfed17c35dbae4db49a14165ef222407c48a3014/lib/coloraide/spaces/hsl/__init__.py#L96-L99
yongzhuo/Keras-TextClassification
640e3f44f90d9d8046546f7e1a93a29ebe5c8d30
keras_textclassification/m03_CharCNN/graph_yoon_kim.py
python
Highway.call
(self, x)
return value
[]
def call(self, x): dim = K.int_shape(x)[-1] transform_gate = self.dense_1(x) transform_gate = Activation("sigmoid")(transform_gate) carry_gate = Lambda(lambda x: 1.0 - x, output_shape=(dim,))(transform_gate) transformed_data = self.dense_2(x) transformed_data = Activation(self.activation)(transformed_data) transformed_gated = Multiply()([transform_gate, transformed_data]) identity_gated = Multiply()([carry_gate, x]) value = Add()([transformed_gated, identity_gated]) return value
[ "def", "call", "(", "self", ",", "x", ")", ":", "dim", "=", "K", ".", "int_shape", "(", "x", ")", "[", "-", "1", "]", "transform_gate", "=", "self", ".", "dense_1", "(", "x", ")", "transform_gate", "=", "Activation", "(", "\"sigmoid\"", ")", "(", ...
https://github.com/yongzhuo/Keras-TextClassification/blob/640e3f44f90d9d8046546f7e1a93a29ebe5c8d30/keras_textclassification/m03_CharCNN/graph_yoon_kim.py#L154-L164
LiuChangFreeman/C--Compiler
99fd44982c54310e8eba4133653e3d9a7736a499
SyntaxParser/LR(0)/prettytable.py
python
PrettyTable._get_attributes
(self)
return self._attributes
A dictionary of HTML attribute name/type pairs to be included in the <Table> tag when printing HTML Arguments: attributes - dictionary of attributes
A dictionary of HTML attribute name/type pairs to be included in the <Table> tag when printing HTML
[ "A", "dictionary", "of", "HTML", "attribute", "name", "/", "type", "pairs", "to", "be", "included", "in", "the", "<Table", ">", "tag", "when", "printing", "HTML" ]
def _get_attributes(self): """A dictionary of HTML attribute name/type pairs to be included in the <Table> tag when printing HTML Arguments: attributes - dictionary of attributes""" return self._attributes
[ "def", "_get_attributes", "(", "self", ")", ":", "return", "self", ".", "_attributes" ]
https://github.com/LiuChangFreeman/C--Compiler/blob/99fd44982c54310e8eba4133653e3d9a7736a499/SyntaxParser/LR(0)/prettytable.py#L720-L726
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/lib-tk/Tkinter.py
python
Canvas.create_arc
(self, *args, **kw)
return self._create('arc', args, kw)
Create arc shaped region with coordinates x1,y1,x2,y2.
Create arc shaped region with coordinates x1,y1,x2,y2.
[ "Create", "arc", "shaped", "region", "with", "coordinates", "x1", "y1", "x2", "y2", "." ]
def create_arc(self, *args, **kw): """Create arc shaped region with coordinates x1,y1,x2,y2.""" return self._create('arc', args, kw)
[ "def", "create_arc", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "_create", "(", "'arc'", ",", "args", ",", "kw", ")" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/lib-tk/Tkinter.py#L2252-L2254
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/SQLAlchemy-1.3.17/lib/sqlalchemy/engine/default.py
python
DefaultExecutionContext._init_ddl
(cls, dialect, connection, dbapi_connection, compiled_ddl)
return self
Initialize execution context for a DDLElement construct.
Initialize execution context for a DDLElement construct.
[ "Initialize", "execution", "context", "for", "a", "DDLElement", "construct", "." ]
def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl): """Initialize execution context for a DDLElement construct.""" self = cls.__new__(cls) self.root_connection = connection self._dbapi_connection = dbapi_connection self.dialect = connection.dialect self.compiled = compiled = compiled_ddl self.isddl = True self.execution_options = compiled.execution_options if connection._execution_options: self.execution_options = dict(self.execution_options) self.execution_options.update(connection._execution_options) if not dialect.supports_unicode_statements: self.unicode_statement = util.text_type(compiled) self.statement = dialect._encoder(self.unicode_statement)[0] else: self.statement = self.unicode_statement = util.text_type(compiled) self.cursor = self.create_cursor() self.compiled_parameters = [] if dialect.positional: self.parameters = [dialect.execute_sequence_format()] else: self.parameters = [{}] return self
[ "def", "_init_ddl", "(", "cls", ",", "dialect", ",", "connection", ",", "dbapi_connection", ",", "compiled_ddl", ")", ":", "self", "=", "cls", ".", "__new__", "(", "cls", ")", "self", ".", "root_connection", "=", "connection", "self", ".", "_dbapi_connection...
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/SQLAlchemy-1.3.17/lib/sqlalchemy/engine/default.py#L721-L751
DrewNF/Tensorflow_Object_Tracking_Video
e183c292651fd635dba0e2236ee93b85da447d5f
INCEPTION/image_retraining/retrain.py
python
cache_bottlenecks
(sess, image_lists, image_dir, bottleneck_dir, jpeg_data_tensor, bottleneck_tensor)
Ensures all the training, testing, and validation bottlenecks are cached. Because we're likely to read the same image multiple times (if there are no distortions applied during training) it can speed things up a lot if we calculate the bottleneck layer values once for each image during preprocessing, and then just read those cached values repeatedly during training. Here we go through all the images we've found, calculate those values, and save them off. Args: sess: The current active TensorFlow Session. image_lists: Dictionary of training images for each label. image_dir: Root folder string of the subfolders containing the training images. bottleneck_dir: Folder string holding cached files of bottleneck values. jpeg_data_tensor: Input tensor for jpeg data from file. bottleneck_tensor: The penultimate output layer of the graph. Returns: Nothing.
Ensures all the training, testing, and validation bottlenecks are cached.
[ "Ensures", "all", "the", "training", "testing", "and", "validation", "bottlenecks", "are", "cached", "." ]
def cache_bottlenecks(sess, image_lists, image_dir, bottleneck_dir, jpeg_data_tensor, bottleneck_tensor): """Ensures all the training, testing, and validation bottlenecks are cached. Because we're likely to read the same image multiple times (if there are no distortions applied during training) it can speed things up a lot if we calculate the bottleneck layer values once for each image during preprocessing, and then just read those cached values repeatedly during training. Here we go through all the images we've found, calculate those values, and save them off. Args: sess: The current active TensorFlow Session. image_lists: Dictionary of training images for each label. image_dir: Root folder string of the subfolders containing the training images. bottleneck_dir: Folder string holding cached files of bottleneck values. jpeg_data_tensor: Input tensor for jpeg data from file. bottleneck_tensor: The penultimate output layer of the graph. Returns: Nothing. """ how_many_bottlenecks = 0 ensure_dir_exists(bottleneck_dir) for label_name, label_lists in image_lists.items(): for category in ['training', 'testing', 'validation']: category_list = label_lists[category] for index, unused_base_name in enumerate(category_list): get_or_create_bottleneck(sess, image_lists, label_name, index, image_dir, category, bottleneck_dir, jpeg_data_tensor, bottleneck_tensor) how_many_bottlenecks += 1 if how_many_bottlenecks % 100 == 0: print(str(how_many_bottlenecks) + ' bottleneck files created.')
[ "def", "cache_bottlenecks", "(", "sess", ",", "image_lists", ",", "image_dir", ",", "bottleneck_dir", ",", "jpeg_data_tensor", ",", "bottleneck_tensor", ")", ":", "how_many_bottlenecks", "=", "0", "ensure_dir_exists", "(", "bottleneck_dir", ")", "for", "label_name", ...
https://github.com/DrewNF/Tensorflow_Object_Tracking_Video/blob/e183c292651fd635dba0e2236ee93b85da447d5f/INCEPTION/image_retraining/retrain.py#L462-L496
fritzy/SleekXMPP
cc1d470397de768ffcc41d2ed5ac3118d19f09f5
examples/echo_client.py
python
EchoBot.start
(self, event)
Process the session_start event. Typical actions for the session_start event are requesting the roster and broadcasting an initial presence stanza. Arguments: event -- An empty dictionary. The session_start event does not provide any additional data.
Process the session_start event.
[ "Process", "the", "session_start", "event", "." ]
def start(self, event): """ Process the session_start event. Typical actions for the session_start event are requesting the roster and broadcasting an initial presence stanza. Arguments: event -- An empty dictionary. The session_start event does not provide any additional data. """ self.send_presence() self.get_roster()
[ "def", "start", "(", "self", ",", "event", ")", ":", "self", ".", "send_presence", "(", ")", "self", ".", "get_roster", "(", ")" ]
https://github.com/fritzy/SleekXMPP/blob/cc1d470397de768ffcc41d2ed5ac3118d19f09f5/examples/echo_client.py#L52-L66
gammapy/gammapy
735b25cd5bbed35e2004d633621896dcd5295e8b
gammapy/datasets/map.py
python
MapDataset.npred_background
(self)
return background
Predicted background counts The predicted background counts depend on the parameters of the `FoVBackgroundModel` defined in the dataset. Returns ------- npred_background : `Map` Predicted counts from the background.
Predicted background counts
[ "Predicted", "background", "counts" ]
def npred_background(self): """Predicted background counts The predicted background counts depend on the parameters of the `FoVBackgroundModel` defined in the dataset. Returns ------- npred_background : `Map` Predicted counts from the background. """ background = self.background if self.background_model and background: if self._background_parameters_changed: values = self.background_model.evaluate_geom(geom=self.background.geom) if self._background_cached is None: self._background_cached = background * values else: self._background_cached.data = background.data * values.value self._background_cached.unit = background.unit return self._background_cached else: return background return background
[ "def", "npred_background", "(", "self", ")", ":", "background", "=", "self", ".", "background", "if", "self", ".", "background_model", "and", "background", ":", "if", "self", ".", "_background_parameters_changed", ":", "values", "=", "self", ".", "background_mod...
https://github.com/gammapy/gammapy/blob/735b25cd5bbed35e2004d633621896dcd5295e8b/gammapy/datasets/map.py#L399-L423
miguelgrinberg/python-socketio
4ee3649514b98c50cc0bf70d3f269389da52772d
src/socketio/client.py
python
Client._handle_eio_disconnect
(self)
Handle the Engine.IO disconnection event.
Handle the Engine.IO disconnection event.
[ "Handle", "the", "Engine", ".", "IO", "disconnection", "event", "." ]
def _handle_eio_disconnect(self): """Handle the Engine.IO disconnection event.""" self.logger.info('Engine.IO connection dropped') if self.connected: for n in self.namespaces: self._trigger_event('disconnect', namespace=n) self.namespaces = {} self.connected = False self.callbacks = {} self._binary_packet = None self.sid = None if self.eio.state == 'connected' and self.reconnection: self._reconnect_task = self.start_background_task( self._handle_reconnect)
[ "def", "_handle_eio_disconnect", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "'Engine.IO connection dropped'", ")", "if", "self", ".", "connected", ":", "for", "n", "in", "self", ".", "namespaces", ":", "self", ".", "_trigger_event", "(",...
https://github.com/miguelgrinberg/python-socketio/blob/4ee3649514b98c50cc0bf70d3f269389da52772d/src/socketio/client.py#L716-L729
Accenture/AmpliGraph
97a0bb6b6b74531cb4e2f71fc6c5a8b18a445cfa
ampligraph/latent_features/models/EmbeddingModel.py
python
EmbeddingModel._generate_corruptions_for_large_graphs
(self)
Corruption generator for large graph mode only. It generates corruptions in batches and also yields the corresponding entity embeddings.
Corruption generator for large graph mode only. It generates corruptions in batches and also yields the corresponding entity embeddings.
[ "Corruption", "generator", "for", "large", "graph", "mode", "only", ".", "It", "generates", "corruptions", "in", "batches", "and", "also", "yields", "the", "corresponding", "entity", "embeddings", "." ]
def _generate_corruptions_for_large_graphs(self): """Corruption generator for large graph mode only. It generates corruptions in batches and also yields the corresponding entity embeddings. """ corruption_entities = self.eval_config.get('corruption_entities', constants.DEFAULT_CORRUPTION_ENTITIES) if corruption_entities == 'all': all_entities_np = np.arange(len(self.ent_to_idx)) corruption_entities = all_entities_np elif isinstance(corruption_entities, np.ndarray): corruption_entities = corruption_entities else: msg = 'Invalid type for corruption entities.' logger.error(msg) raise ValueError(msg) entity_embeddings = np.empty(shape=(0, self.internal_k), dtype=np.float32) for i in range(self.corr_batches_count): all_ent = corruption_entities[i * self.corr_batch_size:(i + 1) * self.corr_batch_size] needed = (self.corr_batch_size - all_ent.shape[0]) large_number = np.zeros((needed, self.ent_emb_cpu.shape[1]), dtype=np.float32) + np.nan entity_embeddings = np.concatenate((self.ent_emb_cpu[all_ent, :], large_number), axis=0) all_ent = all_ent.reshape(-1, 1) yield all_ent, entity_embeddings
[ "def", "_generate_corruptions_for_large_graphs", "(", "self", ")", ":", "corruption_entities", "=", "self", ".", "eval_config", ".", "get", "(", "'corruption_entities'", ",", "constants", ".", "DEFAULT_CORRUPTION_ENTITIES", ")", "if", "corruption_entities", "==", "'all'...
https://github.com/Accenture/AmpliGraph/blob/97a0bb6b6b74531cb4e2f71fc6c5a8b18a445cfa/ampligraph/latent_features/models/EmbeddingModel.py#L1273-L1299
fuzzbunch/fuzzbunch
4b60a6c7cf9f84cf389d3fcdb9281de84ffb5802
fuzzbunch/pyreadline/modes/emacs.py
python
EmacsMode.next_history
(self, e)
Move forward through the history list, fetching the next command.
Move forward through the history list, fetching the next command.
[ "Move", "forward", "through", "the", "history", "list", "fetching", "the", "next", "command", "." ]
def next_history(self, e): # (C-n) '''Move forward through the history list, fetching the next command. ''' self._history.next_history(self.l_buffer)
[ "def", "next_history", "(", "self", ",", "e", ")", ":", "# (C-n)", "self", ".", "_history", ".", "next_history", "(", "self", ".", "l_buffer", ")" ]
https://github.com/fuzzbunch/fuzzbunch/blob/4b60a6c7cf9f84cf389d3fcdb9281de84ffb5802/fuzzbunch/pyreadline/modes/emacs.py#L142-L144
OpenMDAO/OpenMDAO-Framework
f2e37b7de3edeaaeb2d251b375917adec059db9b
openmdao.main/src/openmdao/main/container.py
python
Container.set_metadata
(self, traitpath, metaname, value)
Set the metadata associated with the trait found using traitpath.
Set the metadata associated with the trait found using traitpath.
[ "Set", "the", "metadata", "associated", "with", "the", "trait", "found", "using", "traitpath", "." ]
def set_metadata(self, traitpath, metaname, value): """Set the metadata associated with the trait found using traitpath.""" if metaname in ('iotype',): self.raise_exception("Can't set %s on %s, read-only" % (metaname, traitpath), TypeError) self.get_metadata(traitpath)[metaname] = value
[ "def", "set_metadata", "(", "self", ",", "traitpath", ",", "metaname", ",", "value", ")", ":", "if", "metaname", "in", "(", "'iotype'", ",", ")", ":", "self", ".", "raise_exception", "(", "\"Can't set %s on %s, read-only\"", "%", "(", "metaname", ",", "trait...
https://github.com/OpenMDAO/OpenMDAO-Framework/blob/f2e37b7de3edeaaeb2d251b375917adec059db9b/openmdao.main/src/openmdao/main/container.py#L902-L907
palantir/python-language-server
a91a257d2c8687a7931721d387b2ffeb6aa71fc2
versioneer.py
python
render_pep440_pre
(pieces)
return rendered
TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE
TAG[.post.devDISTANCE] -- No -dirty.
[ "TAG", "[", ".", "post", ".", "devDISTANCE", "]", "--", "No", "-", "dirty", "." ]
def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered
[ "def", "render_pep440_pre", "(", "pieces", ")", ":", "if", "pieces", "[", "\"closest-tag\"", "]", ":", "rendered", "=", "pieces", "[", "\"closest-tag\"", "]", "if", "pieces", "[", "\"distance\"", "]", ":", "rendered", "+=", "\".post.dev%d\"", "%", "pieces", ...
https://github.com/palantir/python-language-server/blob/a91a257d2c8687a7931721d387b2ffeb6aa71fc2/versioneer.py#L1261-L1274
shiweibsw/Translation-Tools
2fbbf902364e557fa7017f9a74a8797b7440c077
venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py
python
TreeBuilder.reset
(self)
[]
def reset(self): base.TreeBuilder.reset(self) self.insertComment = self.insertCommentInitial self.initial_comments = [] self.doctype = None
[ "def", "reset", "(", "self", ")", ":", "base", ".", "TreeBuilder", ".", "reset", "(", "self", ")", "self", ".", "insertComment", "=", "self", ".", "insertCommentInitial", "self", ".", "initial_comments", "=", "[", "]", "self", ".", "doctype", "=", "None"...
https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py#L259-L263
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/plugins/leo_to_html.py
python
pluginController.show_html_number
(self, event=None)
Command handler for leo_to_html. See modules docstring for details.
Command handler for leo_to_html. See modules docstring for details.
[ "Command", "handler", "for", "leo_to_html", ".", "See", "modules", "docstring", "for", "details", "." ]
def show_html_number(self, event=None): """Command handler for leo_to_html. See modules docstring for details.""" self.show_html(bullet='number')
[ "def", "show_html_number", "(", "self", ",", "event", "=", "None", ")", ":", "self", ".", "show_html", "(", "bullet", "=", "'number'", ")" ]
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/plugins/leo_to_html.py#L283-L285
myquant/strategy
17595e6bf4a118e1fa87c90bfb0fd78afa69a60b
AR_MA_STOCK/python/ar_ma_stock.py
python
AR_MA_STOCK.cal_ar_index
(self, ticker)
return ar_index
功能:计算ar指标
功能:计算ar指标
[ "功能:计算ar指标" ]
def cal_ar_index(self, ticker): """ 功能:计算ar指标 """ ar_index = None if (len(self.dict_price[ticker][0]) < self.ar_period or abs( self.dict_price[ticker][0][-1] - INIT_OPEN_PRICE) < EPS) \ or (len(self.dict_price[ticker][1]) < self.ar_period or abs( self.dict_price[ticker][1][-1] - INIT_HIGH_PRICE) < EPS) \ or (len(self.dict_price[ticker][2]) < self.ar_period or abs( self.dict_price[ticker][2][-1] - INIT_LOW_PRICE) < EPS) \ or (len(self.dict_price[ticker][3]) < self.ar_period or abs( self.dict_price[ticker][3][-1] - INIT_CLOSE_PRICE) < EPS): # 历史数据不足 return ar_index open_ls = self.dict_price[ticker][0][len(self.dict_price[ticker][0]) - self.ar_period:] high_ls = self.dict_price[ticker][1][len(self.dict_price[ticker][1]) - self.ar_period:] low_ls = self.dict_price[ticker][2][len(self.dict_price[ticker][2]) - self.ar_period:] high_minus_ls = [a_b[0] - a_b[1] for a_b in zip(high_ls, open_ls)] low_minus_ls = [a_b1[0] - a_b1[1] for a_b1 in zip(open_ls, low_ls)] ar_index = 0.0 for pos in range(len(high_minus_ls)): if low_minus_ls[pos] > EPS: ar_index += high_minus_ls[pos] / low_minus_ls[pos] return ar_index
[ "def", "cal_ar_index", "(", "self", ",", "ticker", ")", ":", "ar_index", "=", "None", "if", "(", "len", "(", "self", ".", "dict_price", "[", "ticker", "]", "[", "0", "]", ")", "<", "self", ".", "ar_period", "or", "abs", "(", "self", ".", "dict_pric...
https://github.com/myquant/strategy/blob/17595e6bf4a118e1fa87c90bfb0fd78afa69a60b/AR_MA_STOCK/python/ar_ma_stock.py#L287-L316
pjkundert/cpppo
4c217b6c06b88bede3888cc5ea2731f271a95086
server/enip/parser.py
python
communications_service.__init__
( self, name=None, **kwds )
[]
def __init__( self, name=None, **kwds ): name = name or kwds.setdefault( 'context', self.__class__.__name__ ) vers = UINT( context='version' ) vers[True] = capa = UINT( context='capability' ) capa[True] = svnm = string_bytes( 'service_name', context='service_name', greedy=True, initial='[^\x00]*', decode='iso-8859-1' ) svnm[b'\0'[0]] = octets_drop( 'NUL', repeat=1, terminal=True ) super( communications_service, self ).__init__( name=name, initial=vers, **kwds )
[ "def", "__init__", "(", "self", ",", "name", "=", "None", ",", "*", "*", "kwds", ")", ":", "name", "=", "name", "or", "kwds", ".", "setdefault", "(", "'context'", ",", "self", ".", "__class__", ".", "__name__", ")", "vers", "=", "UINT", "(", "conte...
https://github.com/pjkundert/cpppo/blob/4c217b6c06b88bede3888cc5ea2731f271a95086/server/enip/parser.py#L1284-L1295