repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
itamarst/eliot | eliot/twisted.py | DeferredContext.addErrback | def addErrback(self, errback, *args, **kw):
"""
Add a failure callback that will be run in the context of an eliot
action.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error.
"""
return self.addCallbacks(
_passthrough, errback, errbackArgs=args, errbackKeywords=kw) | python | def addErrback(self, errback, *args, **kw):
"""
Add a failure callback that will be run in the context of an eliot
action.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error.
"""
return self.addCallbacks(
_passthrough, errback, errbackArgs=args, errbackKeywords=kw) | [
"def",
"addErrback",
"(",
"self",
",",
"errback",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"return",
"self",
".",
"addCallbacks",
"(",
"_passthrough",
",",
"errback",
",",
"errbackArgs",
"=",
"args",
",",
"errbackKeywords",
"=",
"kw",
")"
] | Add a failure callback that will be run in the context of an eliot
action.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error. | [
"Add",
"a",
"failure",
"callback",
"that",
"will",
"be",
"run",
"in",
"the",
"context",
"of",
"an",
"eliot",
"action",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/twisted.py#L117-L129 | train | 205,700 |
itamarst/eliot | eliot/twisted.py | DeferredContext.addBoth | def addBoth(self, callback, *args, **kw):
"""
Add a single callback as both success and failure callbacks.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error.
"""
return self.addCallbacks(callback, callback, args, kw, args, kw) | python | def addBoth(self, callback, *args, **kw):
"""
Add a single callback as both success and failure callbacks.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error.
"""
return self.addCallbacks(callback, callback, args, kw, args, kw) | [
"def",
"addBoth",
"(",
"self",
",",
"callback",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"return",
"self",
".",
"addCallbacks",
"(",
"callback",
",",
"callback",
",",
"args",
",",
"kw",
",",
"args",
",",
"kw",
")"
] | Add a single callback as both success and failure callbacks.
@return: C{self}
@rtype: L{DeferredContext}
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called. This indicates a programmer error. | [
"Add",
"a",
"single",
"callback",
"as",
"both",
"success",
"and",
"failure",
"callbacks",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/twisted.py#L131-L141 | train | 205,701 |
itamarst/eliot | eliot/twisted.py | DeferredContext.addActionFinish | def addActionFinish(self):
"""
Indicates all callbacks that should run within the action's context
have been added, and that the action should therefore finish once
those callbacks have fired.
@return: The wrapped L{Deferred}.
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called previously. This indicates a programmer error.
"""
if self._finishAdded:
raise AlreadyFinished()
self._finishAdded = True
def done(result):
if isinstance(result, Failure):
exception = result.value
else:
exception = None
self._action.finish(exception)
return result
self.result.addBoth(done)
return self.result | python | def addActionFinish(self):
"""
Indicates all callbacks that should run within the action's context
have been added, and that the action should therefore finish once
those callbacks have fired.
@return: The wrapped L{Deferred}.
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called previously. This indicates a programmer error.
"""
if self._finishAdded:
raise AlreadyFinished()
self._finishAdded = True
def done(result):
if isinstance(result, Failure):
exception = result.value
else:
exception = None
self._action.finish(exception)
return result
self.result.addBoth(done)
return self.result | [
"def",
"addActionFinish",
"(",
"self",
")",
":",
"if",
"self",
".",
"_finishAdded",
":",
"raise",
"AlreadyFinished",
"(",
")",
"self",
".",
"_finishAdded",
"=",
"True",
"def",
"done",
"(",
"result",
")",
":",
"if",
"isinstance",
"(",
"result",
",",
"Fail... | Indicates all callbacks that should run within the action's context
have been added, and that the action should therefore finish once
those callbacks have fired.
@return: The wrapped L{Deferred}.
@raises AlreadyFinished: L{DeferredContext.addActionFinish} has been
called previously. This indicates a programmer error. | [
"Indicates",
"all",
"callbacks",
"that",
"should",
"run",
"within",
"the",
"action",
"s",
"context",
"have",
"been",
"added",
"and",
"that",
"the",
"action",
"should",
"therefore",
"finish",
"once",
"those",
"callbacks",
"have",
"fired",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/twisted.py#L143-L167 | train | 205,702 |
itamarst/eliot | eliot/_bytesjson.py | _loads | def _loads(s):
"""
Support decoding bytes.
"""
if isinstance(s, bytes):
s = s.decode("utf-8")
return pyjson.loads(s) | python | def _loads(s):
"""
Support decoding bytes.
"""
if isinstance(s, bytes):
s = s.decode("utf-8")
return pyjson.loads(s) | [
"def",
"_loads",
"(",
"s",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"bytes",
")",
":",
"s",
"=",
"s",
".",
"decode",
"(",
"\"utf-8\"",
")",
"return",
"pyjson",
".",
"loads",
"(",
"s",
")"
] | Support decoding bytes. | [
"Support",
"decoding",
"bytes",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_bytesjson.py#L17-L23 | train | 205,703 |
itamarst/eliot | eliot/_bytesjson.py | _dumps | def _dumps(obj, cls=pyjson.JSONEncoder):
"""
Encode to bytes, and presume bytes in inputs are UTF-8 encoded strings.
"""
class WithBytes(cls):
"""
JSON encoder that supports L{bytes}.
"""
def default(self, o):
if isinstance(o, bytes):
warnings.warn(
"Eliot will soon stop supporting encoding bytes in JSON"
" on Python 3", DeprecationWarning
)
return o.decode("utf-8")
return cls.default(self, o)
return pyjson.dumps(obj, cls=WithBytes).encode("utf-8") | python | def _dumps(obj, cls=pyjson.JSONEncoder):
"""
Encode to bytes, and presume bytes in inputs are UTF-8 encoded strings.
"""
class WithBytes(cls):
"""
JSON encoder that supports L{bytes}.
"""
def default(self, o):
if isinstance(o, bytes):
warnings.warn(
"Eliot will soon stop supporting encoding bytes in JSON"
" on Python 3", DeprecationWarning
)
return o.decode("utf-8")
return cls.default(self, o)
return pyjson.dumps(obj, cls=WithBytes).encode("utf-8") | [
"def",
"_dumps",
"(",
"obj",
",",
"cls",
"=",
"pyjson",
".",
"JSONEncoder",
")",
":",
"class",
"WithBytes",
"(",
"cls",
")",
":",
"\"\"\"\n JSON encoder that supports L{bytes}.\n \"\"\"",
"def",
"default",
"(",
"self",
",",
"o",
")",
":",
"if",
"... | Encode to bytes, and presume bytes in inputs are UTF-8 encoded strings. | [
"Encode",
"to",
"bytes",
"and",
"presume",
"bytes",
"in",
"inputs",
"are",
"UTF",
"-",
"8",
"encoded",
"strings",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_bytesjson.py#L26-L45 | train | 205,704 |
itamarst/eliot | eliot/_util.py | load_module | def load_module(name, original_module):
"""
Load a copy of a module, distinct from what you'd get if you imported
it directly.
@param str name: The name of the new module.
@param original_module: The original module we're recreating.
@return: A new, distinct module.
"""
module = ModuleType(name)
if PY3:
import importlib.util
spec = importlib.util.find_spec(original_module.__name__)
source = spec.loader.get_code(original_module.__name__)
else:
if getattr(sys, "frozen", False):
raise NotImplementedError("Can't load modules on Python 2 with PyInstaller")
path = original_module.__file__
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
with open(path) as f:
source = f.read()
exec_(source, module.__dict__, module.__dict__)
return module | python | def load_module(name, original_module):
"""
Load a copy of a module, distinct from what you'd get if you imported
it directly.
@param str name: The name of the new module.
@param original_module: The original module we're recreating.
@return: A new, distinct module.
"""
module = ModuleType(name)
if PY3:
import importlib.util
spec = importlib.util.find_spec(original_module.__name__)
source = spec.loader.get_code(original_module.__name__)
else:
if getattr(sys, "frozen", False):
raise NotImplementedError("Can't load modules on Python 2 with PyInstaller")
path = original_module.__file__
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
with open(path) as f:
source = f.read()
exec_(source, module.__dict__, module.__dict__)
return module | [
"def",
"load_module",
"(",
"name",
",",
"original_module",
")",
":",
"module",
"=",
"ModuleType",
"(",
"name",
")",
"if",
"PY3",
":",
"import",
"importlib",
".",
"util",
"spec",
"=",
"importlib",
".",
"util",
".",
"find_spec",
"(",
"original_module",
".",
... | Load a copy of a module, distinct from what you'd get if you imported
it directly.
@param str name: The name of the new module.
@param original_module: The original module we're recreating.
@return: A new, distinct module. | [
"Load",
"a",
"copy",
"of",
"a",
"module",
"distinct",
"from",
"what",
"you",
"d",
"get",
"if",
"you",
"imported",
"it",
"directly",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_util.py#L45-L69 | train | 205,705 |
itamarst/eliot | eliot/_traceback.py | _writeTracebackMessage | def _writeTracebackMessage(logger, typ, exception, traceback):
"""
Write a traceback to the log.
@param typ: The class of the exception.
@param exception: The L{Exception} instance.
@param traceback: The traceback, a C{str}.
"""
msg = TRACEBACK_MESSAGE(
reason=exception, traceback=traceback, exception=typ)
msg = msg.bind(
**_error_extraction.get_fields_for_exception(logger, exception))
msg.write(logger) | python | def _writeTracebackMessage(logger, typ, exception, traceback):
"""
Write a traceback to the log.
@param typ: The class of the exception.
@param exception: The L{Exception} instance.
@param traceback: The traceback, a C{str}.
"""
msg = TRACEBACK_MESSAGE(
reason=exception, traceback=traceback, exception=typ)
msg = msg.bind(
**_error_extraction.get_fields_for_exception(logger, exception))
msg.write(logger) | [
"def",
"_writeTracebackMessage",
"(",
"logger",
",",
"typ",
",",
"exception",
",",
"traceback",
")",
":",
"msg",
"=",
"TRACEBACK_MESSAGE",
"(",
"reason",
"=",
"exception",
",",
"traceback",
"=",
"traceback",
",",
"exception",
"=",
"typ",
")",
"msg",
"=",
"... | Write a traceback to the log.
@param typ: The class of the exception.
@param exception: The L{Exception} instance.
@param traceback: The traceback, a C{str}. | [
"Write",
"a",
"traceback",
"to",
"the",
"log",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_traceback.py#L30-L44 | train | 205,706 |
itamarst/eliot | eliot/_traceback.py | write_traceback | def write_traceback(logger=None, exc_info=None):
"""
Write the latest traceback to the log.
This should be used inside an C{except} block. For example:
try:
dostuff()
except:
write_traceback(logger)
Or you can pass the result of C{sys.exc_info()} to the C{exc_info}
parameter.
"""
if exc_info is None:
exc_info = sys.exc_info()
typ, exception, tb = exc_info
traceback = "".join(_traceback_no_io.format_exception(typ, exception, tb))
_writeTracebackMessage(logger, typ, exception, traceback) | python | def write_traceback(logger=None, exc_info=None):
"""
Write the latest traceback to the log.
This should be used inside an C{except} block. For example:
try:
dostuff()
except:
write_traceback(logger)
Or you can pass the result of C{sys.exc_info()} to the C{exc_info}
parameter.
"""
if exc_info is None:
exc_info = sys.exc_info()
typ, exception, tb = exc_info
traceback = "".join(_traceback_no_io.format_exception(typ, exception, tb))
_writeTracebackMessage(logger, typ, exception, traceback) | [
"def",
"write_traceback",
"(",
"logger",
"=",
"None",
",",
"exc_info",
"=",
"None",
")",
":",
"if",
"exc_info",
"is",
"None",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"typ",
",",
"exception",
",",
"tb",
"=",
"exc_info",
"traceback",
"=",... | Write the latest traceback to the log.
This should be used inside an C{except} block. For example:
try:
dostuff()
except:
write_traceback(logger)
Or you can pass the result of C{sys.exc_info()} to the C{exc_info}
parameter. | [
"Write",
"the",
"latest",
"traceback",
"to",
"the",
"log",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_traceback.py#L80-L98 | train | 205,707 |
itamarst/eliot | eliot/_output.py | exclusively | def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f | python | def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f | [
"def",
"exclusively",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"exclusively_f",
"(",
"self",
",",
"*",
"a",
",",
"*",
"*",
"kw",
")",
":",
"with",
"self",
".",
"_lock",
":",
"return",
"f",
"(",
"self",
",",
"*",
"a",
",",
"*",
... | Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock. | [
"Decorate",
"a",
"function",
"to",
"make",
"it",
"thread",
"-",
"safe",
"by",
"serializing",
"invocations",
"using",
"a",
"per",
"-",
"instance",
"lock",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_output.py#L236-L245 | train | 205,708 |
itamarst/eliot | eliot/_output.py | to_file | def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
"""
Logger._destinations.add(
FileDestination(file=output_file, encoder=encoder)
) | python | def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
"""
Logger._destinations.add(
FileDestination(file=output_file, encoder=encoder)
) | [
"def",
"to_file",
"(",
"output_file",
",",
"encoder",
"=",
"EliotJSONEncoder",
")",
":",
"Logger",
".",
"_destinations",
".",
"add",
"(",
"FileDestination",
"(",
"file",
"=",
"output_file",
",",
"encoder",
"=",
"encoder",
")",
")"
] | Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object. | [
"Add",
"a",
"destination",
"that",
"writes",
"a",
"JSON",
"message",
"per",
"line",
"to",
"the",
"given",
"file",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_output.py#L472-L480 | train | 205,709 |
itamarst/eliot | eliot/_output.py | Destinations.send | def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors) | python | def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors) | [
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"message",
".",
"update",
"(",
"self",
".",
"_globalFields",
")",
"errors",
"=",
"[",
"]",
"for",
"dest",
"in",
"self",
".",
"_destinations",
":",
"try",
":",
"dest",
"(",
"message",
")",
"except"... | Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict} | [
"Deliver",
"a",
"message",
"to",
"all",
"destinations",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_output.py#L81-L98 | train | 205,710 |
itamarst/eliot | eliot/_output.py | Destinations.add | def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message) | python | def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message) | [
"def",
"add",
"(",
"self",
",",
"*",
"destinations",
")",
":",
"buffered_messages",
"=",
"None",
"if",
"not",
"self",
".",
"_any_added",
":",
"# These are first set of messages added, so we need to clear",
"# BufferingDestination:",
"self",
".",
"_any_added",
"=",
"Tr... | Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries. | [
"Adds",
"new",
"destinations",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_output.py#L100-L121 | train | 205,711 |
itamarst/eliot | eliot/tai64n.py | encode | def encode(timestamp):
"""
Convert seconds since epoch to TAI64N string.
@param timestamp: Seconds since UTC Unix epoch as C{float}.
@return: TAI64N-encoded time, as C{unicode}.
"""
seconds = int(timestamp)
nanoseconds = int((timestamp - seconds) * 1000000000)
seconds = seconds + _OFFSET
encoded = b2a_hex(struct.pack(_STRUCTURE, seconds, nanoseconds))
return "@" + encoded.decode("ascii") | python | def encode(timestamp):
"""
Convert seconds since epoch to TAI64N string.
@param timestamp: Seconds since UTC Unix epoch as C{float}.
@return: TAI64N-encoded time, as C{unicode}.
"""
seconds = int(timestamp)
nanoseconds = int((timestamp - seconds) * 1000000000)
seconds = seconds + _OFFSET
encoded = b2a_hex(struct.pack(_STRUCTURE, seconds, nanoseconds))
return "@" + encoded.decode("ascii") | [
"def",
"encode",
"(",
"timestamp",
")",
":",
"seconds",
"=",
"int",
"(",
"timestamp",
")",
"nanoseconds",
"=",
"int",
"(",
"(",
"timestamp",
"-",
"seconds",
")",
"*",
"1000000000",
")",
"seconds",
"=",
"seconds",
"+",
"_OFFSET",
"encoded",
"=",
"b2a_hex"... | Convert seconds since epoch to TAI64N string.
@param timestamp: Seconds since UTC Unix epoch as C{float}.
@return: TAI64N-encoded time, as C{unicode}. | [
"Convert",
"seconds",
"since",
"epoch",
"to",
"TAI64N",
"string",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/tai64n.py#L18-L30 | train | 205,712 |
itamarst/eliot | eliot/tai64n.py | decode | def decode(tai64n):
"""
Convert TAI64N string to seconds since epoch.
Note that dates before 2013 may not decode accurately due to leap second
issues. If you need correct decoding for earlier dates you can try the
tai64n package available from PyPI (U{https://pypi.python.org/pypi/tai64n}).
@param tai64n: TAI64N-encoded time, as C{unicode}.
@return: Seconds since UTC Unix epoch as C{float}.
"""
seconds, nanoseconds = struct.unpack(_STRUCTURE, a2b_hex(tai64n[1:]))
seconds -= _OFFSET
return seconds + (nanoseconds / 1000000000.0) | python | def decode(tai64n):
"""
Convert TAI64N string to seconds since epoch.
Note that dates before 2013 may not decode accurately due to leap second
issues. If you need correct decoding for earlier dates you can try the
tai64n package available from PyPI (U{https://pypi.python.org/pypi/tai64n}).
@param tai64n: TAI64N-encoded time, as C{unicode}.
@return: Seconds since UTC Unix epoch as C{float}.
"""
seconds, nanoseconds = struct.unpack(_STRUCTURE, a2b_hex(tai64n[1:]))
seconds -= _OFFSET
return seconds + (nanoseconds / 1000000000.0) | [
"def",
"decode",
"(",
"tai64n",
")",
":",
"seconds",
",",
"nanoseconds",
"=",
"struct",
".",
"unpack",
"(",
"_STRUCTURE",
",",
"a2b_hex",
"(",
"tai64n",
"[",
"1",
":",
"]",
")",
")",
"seconds",
"-=",
"_OFFSET",
"return",
"seconds",
"+",
"(",
"nanosecon... | Convert TAI64N string to seconds since epoch.
Note that dates before 2013 may not decode accurately due to leap second
issues. If you need correct decoding for earlier dates you can try the
tai64n package available from PyPI (U{https://pypi.python.org/pypi/tai64n}).
@param tai64n: TAI64N-encoded time, as C{unicode}.
@return: Seconds since UTC Unix epoch as C{float}. | [
"Convert",
"TAI64N",
"string",
"to",
"seconds",
"since",
"epoch",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/tai64n.py#L33-L47 | train | 205,713 |
itamarst/eliot | eliot/_action.py | preserve_context | def preserve_context(f):
"""
Package up the given function with the current Eliot context, and then
restore context and call given function when the resulting callable is
run. This allows continuing the action context within a different thread.
The result should only be used once, since it relies on
L{Action.serialize_task_id} whose results should only be deserialized
once.
@param f: A callable.
@return: One-time use callable that calls given function in context of
a child of current Eliot action.
"""
action = current_action()
if action is None:
return f
task_id = action.serialize_task_id()
called = threading.Lock()
def restore_eliot_context(*args, **kwargs):
# Make sure the function has not already been called:
if not called.acquire(False):
raise TooManyCalls(f)
with Action.continue_task(task_id=task_id):
return f(*args, **kwargs)
return restore_eliot_context | python | def preserve_context(f):
"""
Package up the given function with the current Eliot context, and then
restore context and call given function when the resulting callable is
run. This allows continuing the action context within a different thread.
The result should only be used once, since it relies on
L{Action.serialize_task_id} whose results should only be deserialized
once.
@param f: A callable.
@return: One-time use callable that calls given function in context of
a child of current Eliot action.
"""
action = current_action()
if action is None:
return f
task_id = action.serialize_task_id()
called = threading.Lock()
def restore_eliot_context(*args, **kwargs):
# Make sure the function has not already been called:
if not called.acquire(False):
raise TooManyCalls(f)
with Action.continue_task(task_id=task_id):
return f(*args, **kwargs)
return restore_eliot_context | [
"def",
"preserve_context",
"(",
"f",
")",
":",
"action",
"=",
"current_action",
"(",
")",
"if",
"action",
"is",
"None",
":",
"return",
"f",
"task_id",
"=",
"action",
".",
"serialize_task_id",
"(",
")",
"called",
"=",
"threading",
".",
"Lock",
"(",
")",
... | Package up the given function with the current Eliot context, and then
restore context and call given function when the resulting callable is
run. This allows continuing the action context within a different thread.
The result should only be used once, since it relies on
L{Action.serialize_task_id} whose results should only be deserialized
once.
@param f: A callable.
@return: One-time use callable that calls given function in context of
a child of current Eliot action. | [
"Package",
"up",
"the",
"given",
"function",
"with",
"the",
"current",
"Eliot",
"context",
"and",
"then",
"restore",
"context",
"and",
"call",
"given",
"function",
"when",
"the",
"resulting",
"callable",
"is",
"run",
".",
"This",
"allows",
"continuing",
"the",... | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L820-L849 | train | 205,714 |
itamarst/eliot | eliot/_action.py | Action.serialize_task_id | def serialize_task_id(self):
"""
Create a unique identifier for the current location within the task.
The format is C{b"<task_uuid>@<task_level>"}.
@return: L{bytes} encoding the current location within the task.
"""
return "{}@{}".format(
self._identification[TASK_UUID_FIELD],
self._nextTaskLevel().toString()).encode("ascii") | python | def serialize_task_id(self):
"""
Create a unique identifier for the current location within the task.
The format is C{b"<task_uuid>@<task_level>"}.
@return: L{bytes} encoding the current location within the task.
"""
return "{}@{}".format(
self._identification[TASK_UUID_FIELD],
self._nextTaskLevel().toString()).encode("ascii") | [
"def",
"serialize_task_id",
"(",
"self",
")",
":",
"return",
"\"{}@{}\"",
".",
"format",
"(",
"self",
".",
"_identification",
"[",
"TASK_UUID_FIELD",
"]",
",",
"self",
".",
"_nextTaskLevel",
"(",
")",
".",
"toString",
"(",
")",
")",
".",
"encode",
"(",
"... | Create a unique identifier for the current location within the task.
The format is C{b"<task_uuid>@<task_level>"}.
@return: L{bytes} encoding the current location within the task. | [
"Create",
"a",
"unique",
"identifier",
"for",
"the",
"current",
"location",
"within",
"the",
"task",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L234-L244 | train | 205,715 |
itamarst/eliot | eliot/_action.py | Action.continue_task | def continue_task(cls, logger=None, task_id=_TASK_ID_NOT_SUPPLIED):
"""
Start a new action which is part of a serialized task.
@param logger: The L{eliot.ILogger} to which to write
messages, or C{None} if the default one should be used.
@param task_id: A serialized task identifier, the output of
L{Action.serialize_task_id}, either ASCII-encoded bytes or unicode
string. Required.
@return: The new L{Action} instance.
"""
if task_id is _TASK_ID_NOT_SUPPLIED:
raise RuntimeError("You must supply a task_id keyword argument.")
if isinstance(task_id, bytes):
task_id = task_id.decode("ascii")
uuid, task_level = task_id.split("@")
action = cls(
logger, uuid, TaskLevel.fromString(task_level),
"eliot:remote_task")
action._start({})
return action | python | def continue_task(cls, logger=None, task_id=_TASK_ID_NOT_SUPPLIED):
"""
Start a new action which is part of a serialized task.
@param logger: The L{eliot.ILogger} to which to write
messages, or C{None} if the default one should be used.
@param task_id: A serialized task identifier, the output of
L{Action.serialize_task_id}, either ASCII-encoded bytes or unicode
string. Required.
@return: The new L{Action} instance.
"""
if task_id is _TASK_ID_NOT_SUPPLIED:
raise RuntimeError("You must supply a task_id keyword argument.")
if isinstance(task_id, bytes):
task_id = task_id.decode("ascii")
uuid, task_level = task_id.split("@")
action = cls(
logger, uuid, TaskLevel.fromString(task_level),
"eliot:remote_task")
action._start({})
return action | [
"def",
"continue_task",
"(",
"cls",
",",
"logger",
"=",
"None",
",",
"task_id",
"=",
"_TASK_ID_NOT_SUPPLIED",
")",
":",
"if",
"task_id",
"is",
"_TASK_ID_NOT_SUPPLIED",
":",
"raise",
"RuntimeError",
"(",
"\"You must supply a task_id keyword argument.\"",
")",
"if",
"... | Start a new action which is part of a serialized task.
@param logger: The L{eliot.ILogger} to which to write
messages, or C{None} if the default one should be used.
@param task_id: A serialized task identifier, the output of
L{Action.serialize_task_id}, either ASCII-encoded bytes or unicode
string. Required.
@return: The new L{Action} instance. | [
"Start",
"a",
"new",
"action",
"which",
"is",
"part",
"of",
"a",
"serialized",
"task",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L247-L269 | train | 205,716 |
itamarst/eliot | eliot/_action.py | Action._start | def _start(self, fields):
"""
Log the start message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}.
"""
fields[ACTION_STATUS_FIELD] = STARTED_STATUS
fields.update(self._identification)
if self._serializers is None:
serializer = None
else:
serializer = self._serializers.start
Message(fields, serializer).write(self._logger, self) | python | def _start(self, fields):
"""
Log the start message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}.
"""
fields[ACTION_STATUS_FIELD] = STARTED_STATUS
fields.update(self._identification)
if self._serializers is None:
serializer = None
else:
serializer = self._serializers.start
Message(fields, serializer).write(self._logger, self) | [
"def",
"_start",
"(",
"self",
",",
"fields",
")",
":",
"fields",
"[",
"ACTION_STATUS_FIELD",
"]",
"=",
"STARTED_STATUS",
"fields",
".",
"update",
"(",
"self",
".",
"_identification",
")",
"if",
"self",
".",
"_serializers",
"is",
"None",
":",
"serializer",
... | Log the start message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}. | [
"Log",
"the",
"start",
"message",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L289-L305 | train | 205,717 |
itamarst/eliot | eliot/_action.py | Action.finish | def finish(self, exception=None):
"""
Log the finish message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}.
@param exception: C{None}, in which case the fields added with
L{Action.addSuccessFields} are used. Or an L{Exception}, in
which case an C{"exception"} field is added with the given
L{Exception} type and C{"reason"} with its contents.
"""
if self._finished:
return
self._finished = True
serializer = None
if exception is None:
fields = self._successFields
fields[ACTION_STATUS_FIELD] = SUCCEEDED_STATUS
if self._serializers is not None:
serializer = self._serializers.success
else:
fields = _error_extraction.get_fields_for_exception(
self._logger, exception)
fields[EXCEPTION_FIELD] = "%s.%s" % (
exception.__class__.__module__, exception.__class__.__name__)
fields[REASON_FIELD] = safeunicode(exception)
fields[ACTION_STATUS_FIELD] = FAILED_STATUS
if self._serializers is not None:
serializer = self._serializers.failure
fields.update(self._identification)
Message(fields, serializer).write(self._logger, self) | python | def finish(self, exception=None):
"""
Log the finish message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}.
@param exception: C{None}, in which case the fields added with
L{Action.addSuccessFields} are used. Or an L{Exception}, in
which case an C{"exception"} field is added with the given
L{Exception} type and C{"reason"} with its contents.
"""
if self._finished:
return
self._finished = True
serializer = None
if exception is None:
fields = self._successFields
fields[ACTION_STATUS_FIELD] = SUCCEEDED_STATUS
if self._serializers is not None:
serializer = self._serializers.success
else:
fields = _error_extraction.get_fields_for_exception(
self._logger, exception)
fields[EXCEPTION_FIELD] = "%s.%s" % (
exception.__class__.__module__, exception.__class__.__name__)
fields[REASON_FIELD] = safeunicode(exception)
fields[ACTION_STATUS_FIELD] = FAILED_STATUS
if self._serializers is not None:
serializer = self._serializers.failure
fields.update(self._identification)
Message(fields, serializer).write(self._logger, self) | [
"def",
"finish",
"(",
"self",
",",
"exception",
"=",
"None",
")",
":",
"if",
"self",
".",
"_finished",
":",
"return",
"self",
".",
"_finished",
"=",
"True",
"serializer",
"=",
"None",
"if",
"exception",
"is",
"None",
":",
"fields",
"=",
"self",
".",
... | Log the finish message.
The action identification fields, and any additional given fields,
will be logged.
In general you shouldn't call this yourself, instead using a C{with}
block or L{Action.finish}.
@param exception: C{None}, in which case the fields added with
L{Action.addSuccessFields} are used. Or an L{Exception}, in
which case an C{"exception"} field is added with the given
L{Exception} type and C{"reason"} with its contents. | [
"Log",
"the",
"finish",
"message",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L307-L342 | train | 205,718 |
itamarst/eliot | eliot/_action.py | Action.context | def context(self):
"""
Create a context manager that ensures code runs within action's context.
The action does NOT finish when the context is exited.
"""
parent = _ACTION_CONTEXT.set(self)
try:
yield self
finally:
_ACTION_CONTEXT.reset(parent) | python | def context(self):
"""
Create a context manager that ensures code runs within action's context.
The action does NOT finish when the context is exited.
"""
parent = _ACTION_CONTEXT.set(self)
try:
yield self
finally:
_ACTION_CONTEXT.reset(parent) | [
"def",
"context",
"(",
"self",
")",
":",
"parent",
"=",
"_ACTION_CONTEXT",
".",
"set",
"(",
"self",
")",
"try",
":",
"yield",
"self",
"finally",
":",
"_ACTION_CONTEXT",
".",
"reset",
"(",
"parent",
")"
] | Create a context manager that ensures code runs within action's context.
The action does NOT finish when the context is exited. | [
"Create",
"a",
"context",
"manager",
"that",
"ensures",
"code",
"runs",
"within",
"action",
"s",
"context",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L390-L400 | train | 205,719 |
itamarst/eliot | eliot/_action.py | WrittenAction.children | def children(self):
"""
The list of child messages and actions sorted by task level, excluding the
start and end messages.
"""
return pvector(
sorted(self._children.values(), key=lambda m: m.task_level)) | python | def children(self):
"""
The list of child messages and actions sorted by task level, excluding the
start and end messages.
"""
return pvector(
sorted(self._children.values(), key=lambda m: m.task_level)) | [
"def",
"children",
"(",
"self",
")",
":",
"return",
"pvector",
"(",
"sorted",
"(",
"self",
".",
"_children",
".",
"values",
"(",
")",
",",
"key",
"=",
"lambda",
"m",
":",
"m",
".",
"task_level",
")",
")"
] | The list of child messages and actions sorted by task level, excluding the
start and end messages. | [
"The",
"list",
"of",
"child",
"messages",
"and",
"actions",
"sorted",
"by",
"task",
"level",
"excluding",
"the",
"start",
"and",
"end",
"messages",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L643-L649 | train | 205,720 |
itamarst/eliot | eliot/_action.py | WrittenAction._start | def _start(self, start_message):
"""
Start this action given its start message.
@param WrittenMessage start_message: A start message that has the
same level as this action.
@raise InvalidStartMessage: If C{start_message} does not have a
C{ACTION_STATUS_FIELD} of C{STARTED_STATUS}, or if it has a
C{task_level} indicating that it is not the first message of an
action.
"""
if start_message.contents.get(
ACTION_STATUS_FIELD, None) != STARTED_STATUS:
raise InvalidStartMessage.wrong_status(start_message)
if start_message.task_level.level[-1] != 1:
raise InvalidStartMessage.wrong_task_level(start_message)
return self.set(start_message=start_message) | python | def _start(self, start_message):
"""
Start this action given its start message.
@param WrittenMessage start_message: A start message that has the
same level as this action.
@raise InvalidStartMessage: If C{start_message} does not have a
C{ACTION_STATUS_FIELD} of C{STARTED_STATUS}, or if it has a
C{task_level} indicating that it is not the first message of an
action.
"""
if start_message.contents.get(
ACTION_STATUS_FIELD, None) != STARTED_STATUS:
raise InvalidStartMessage.wrong_status(start_message)
if start_message.task_level.level[-1] != 1:
raise InvalidStartMessage.wrong_task_level(start_message)
return self.set(start_message=start_message) | [
"def",
"_start",
"(",
"self",
",",
"start_message",
")",
":",
"if",
"start_message",
".",
"contents",
".",
"get",
"(",
"ACTION_STATUS_FIELD",
",",
"None",
")",
"!=",
"STARTED_STATUS",
":",
"raise",
"InvalidStartMessage",
".",
"wrong_status",
"(",
"start_message"... | Start this action given its start message.
@param WrittenMessage start_message: A start message that has the
same level as this action.
@raise InvalidStartMessage: If C{start_message} does not have a
C{ACTION_STATUS_FIELD} of C{STARTED_STATUS}, or if it has a
C{task_level} indicating that it is not the first message of an
action. | [
"Start",
"this",
"action",
"given",
"its",
"start",
"message",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_action.py#L686-L703 | train | 205,721 |
itamarst/eliot | eliot/_validation.py | _MessageSerializer.serialize | def serialize(self, message):
"""
Serialize the given message in-place, converting inputs to outputs.
We do this in-place for performance reasons. There are more fields in
a message than there are L{Field} objects because of the timestamp,
task_level and task_uuid fields. By only iterating over our L{Fields}
we therefore reduce the number of function calls in a critical code
path.
@param message: A C{dict}.
"""
for key, field in self.fields.items():
message[key] = field.serialize(message[key]) | python | def serialize(self, message):
"""
Serialize the given message in-place, converting inputs to outputs.
We do this in-place for performance reasons. There are more fields in
a message than there are L{Field} objects because of the timestamp,
task_level and task_uuid fields. By only iterating over our L{Fields}
we therefore reduce the number of function calls in a critical code
path.
@param message: A C{dict}.
"""
for key, field in self.fields.items():
message[key] = field.serialize(message[key]) | [
"def",
"serialize",
"(",
"self",
",",
"message",
")",
":",
"for",
"key",
",",
"field",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
":",
"message",
"[",
"key",
"]",
"=",
"field",
".",
"serialize",
"(",
"message",
"[",
"key",
"]",
")"
] | Serialize the given message in-place, converting inputs to outputs.
We do this in-place for performance reasons. There are more fields in
a message than there are L{Field} objects because of the timestamp,
task_level and task_uuid fields. By only iterating over our L{Fields}
we therefore reduce the number of function calls in a critical code
path.
@param message: A C{dict}. | [
"Serialize",
"the",
"given",
"message",
"in",
"-",
"place",
"converting",
"inputs",
"to",
"outputs",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_validation.py#L230-L243 | train | 205,722 |
itamarst/eliot | eliot/_validation.py | _MessageSerializer.validate | def validate(self, message):
"""
Validate the given message.
@param message: A C{dict}.
@raises ValidationError: If the message has the wrong fields or one of
its field values fail validation.
"""
for key, field in self.fields.items():
if key not in message:
raise ValidationError(message, "Field %r is missing" % (key, ))
field.validate(message[key])
if self.allow_additional_fields:
return
# Otherwise, additional fields are not allowed:
fieldSet = set(self.fields) | set(RESERVED_FIELDS)
for key in message:
if key not in fieldSet:
raise ValidationError(message, "Unexpected field %r" % (key, )) | python | def validate(self, message):
"""
Validate the given message.
@param message: A C{dict}.
@raises ValidationError: If the message has the wrong fields or one of
its field values fail validation.
"""
for key, field in self.fields.items():
if key not in message:
raise ValidationError(message, "Field %r is missing" % (key, ))
field.validate(message[key])
if self.allow_additional_fields:
return
# Otherwise, additional fields are not allowed:
fieldSet = set(self.fields) | set(RESERVED_FIELDS)
for key in message:
if key not in fieldSet:
raise ValidationError(message, "Unexpected field %r" % (key, )) | [
"def",
"validate",
"(",
"self",
",",
"message",
")",
":",
"for",
"key",
",",
"field",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
":",
"if",
"key",
"not",
"in",
"message",
":",
"raise",
"ValidationError",
"(",
"message",
",",
"\"Field %r is mi... | Validate the given message.
@param message: A C{dict}.
@raises ValidationError: If the message has the wrong fields or one of
its field values fail validation. | [
"Validate",
"the",
"given",
"message",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_validation.py#L245-L265 | train | 205,723 |
itamarst/eliot | eliot/prettyprint.py | pretty_format | def pretty_format(message):
"""
Convert a message dictionary into a human-readable string.
@param message: Message to parse, as dictionary.
@return: Unicode string.
"""
skip = {
TIMESTAMP_FIELD, TASK_UUID_FIELD, TASK_LEVEL_FIELD, MESSAGE_TYPE_FIELD,
ACTION_TYPE_FIELD, ACTION_STATUS_FIELD}
def add_field(previous, key, value):
value = unicode(pprint.pformat(value, width=40)).replace(
"\\n", "\n ").replace("\\t", "\t")
# Reindent second line and later to match up with first line's
# indentation:
lines = value.split("\n")
# indent lines are " <key length>| <value>"
indent = "{}| ".format(" " * (2 + len(key)))
value = "\n".join([lines[0]] + [indent + l for l in lines[1:]])
return " %s: %s\n" % (key, value)
remaining = ""
for field in [ACTION_TYPE_FIELD, MESSAGE_TYPE_FIELD, ACTION_STATUS_FIELD]:
if field in message:
remaining += add_field(remaining, field, message[field])
for (key, value) in sorted(message.items()):
if key not in skip:
remaining += add_field(remaining, key, value)
level = "/" + "/".join(map(unicode, message[TASK_LEVEL_FIELD]))
return "%s -> %s\n%sZ\n%s" % (
message[TASK_UUID_FIELD],
level,
# If we were returning or storing the datetime we'd want to use an
# explicit timezone instead of a naive datetime, but since we're
# just using it for formatting we needn't bother.
datetime.utcfromtimestamp(message[TIMESTAMP_FIELD]).isoformat(
sep=str(" ")),
remaining, ) | python | def pretty_format(message):
"""
Convert a message dictionary into a human-readable string.
@param message: Message to parse, as dictionary.
@return: Unicode string.
"""
skip = {
TIMESTAMP_FIELD, TASK_UUID_FIELD, TASK_LEVEL_FIELD, MESSAGE_TYPE_FIELD,
ACTION_TYPE_FIELD, ACTION_STATUS_FIELD}
def add_field(previous, key, value):
value = unicode(pprint.pformat(value, width=40)).replace(
"\\n", "\n ").replace("\\t", "\t")
# Reindent second line and later to match up with first line's
# indentation:
lines = value.split("\n")
# indent lines are " <key length>| <value>"
indent = "{}| ".format(" " * (2 + len(key)))
value = "\n".join([lines[0]] + [indent + l for l in lines[1:]])
return " %s: %s\n" % (key, value)
remaining = ""
for field in [ACTION_TYPE_FIELD, MESSAGE_TYPE_FIELD, ACTION_STATUS_FIELD]:
if field in message:
remaining += add_field(remaining, field, message[field])
for (key, value) in sorted(message.items()):
if key not in skip:
remaining += add_field(remaining, key, value)
level = "/" + "/".join(map(unicode, message[TASK_LEVEL_FIELD]))
return "%s -> %s\n%sZ\n%s" % (
message[TASK_UUID_FIELD],
level,
# If we were returning or storing the datetime we'd want to use an
# explicit timezone instead of a naive datetime, but since we're
# just using it for formatting we needn't bother.
datetime.utcfromtimestamp(message[TIMESTAMP_FIELD]).isoformat(
sep=str(" ")),
remaining, ) | [
"def",
"pretty_format",
"(",
"message",
")",
":",
"skip",
"=",
"{",
"TIMESTAMP_FIELD",
",",
"TASK_UUID_FIELD",
",",
"TASK_LEVEL_FIELD",
",",
"MESSAGE_TYPE_FIELD",
",",
"ACTION_TYPE_FIELD",
",",
"ACTION_STATUS_FIELD",
"}",
"def",
"add_field",
"(",
"previous",
",",
... | Convert a message dictionary into a human-readable string.
@param message: Message to parse, as dictionary.
@return: Unicode string. | [
"Convert",
"a",
"message",
"dictionary",
"into",
"a",
"human",
"-",
"readable",
"string",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/prettyprint.py#L48-L88 | train | 205,724 |
itamarst/eliot | eliot/prettyprint.py | _main | def _main():
"""
Command-line program that reads in JSON from stdin and writes out
pretty-printed messages to stdout.
"""
if argv[1:]:
stdout.write(_CLI_HELP)
raise SystemExit()
for line in stdin:
try:
message = loads(line)
except ValueError:
stdout.write("Not JSON: {}\n\n".format(line.rstrip(b"\n")))
continue
if REQUIRED_FIELDS - set(message.keys()):
stdout.write(
"Not an Eliot message: {}\n\n".format(line.rstrip(b"\n")))
continue
result = pretty_format(message) + "\n"
if PY2:
result = result.encode("utf-8")
stdout.write(result) | python | def _main():
"""
Command-line program that reads in JSON from stdin and writes out
pretty-printed messages to stdout.
"""
if argv[1:]:
stdout.write(_CLI_HELP)
raise SystemExit()
for line in stdin:
try:
message = loads(line)
except ValueError:
stdout.write("Not JSON: {}\n\n".format(line.rstrip(b"\n")))
continue
if REQUIRED_FIELDS - set(message.keys()):
stdout.write(
"Not an Eliot message: {}\n\n".format(line.rstrip(b"\n")))
continue
result = pretty_format(message) + "\n"
if PY2:
result = result.encode("utf-8")
stdout.write(result) | [
"def",
"_main",
"(",
")",
":",
"if",
"argv",
"[",
"1",
":",
"]",
":",
"stdout",
".",
"write",
"(",
"_CLI_HELP",
")",
"raise",
"SystemExit",
"(",
")",
"for",
"line",
"in",
"stdin",
":",
"try",
":",
"message",
"=",
"loads",
"(",
"line",
")",
"excep... | Command-line program that reads in JSON from stdin and writes out
pretty-printed messages to stdout. | [
"Command",
"-",
"line",
"program",
"that",
"reads",
"in",
"JSON",
"from",
"stdin",
"and",
"writes",
"out",
"pretty",
"-",
"printed",
"messages",
"to",
"stdout",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/prettyprint.py#L100-L121 | train | 205,725 |
itamarst/eliot | eliot/_errors.py | ErrorExtraction.get_fields_for_exception | def get_fields_for_exception(self, logger, exception):
"""
Given an exception instance, return fields to add to the failed action
message.
@param logger: ``ILogger`` currently being used.
@param exception: An exception instance.
@return: Dictionary with fields to include.
"""
for klass in getmro(exception.__class__):
if klass in self.registry:
extractor = self.registry[klass]
try:
return extractor(exception)
except:
from ._traceback import write_traceback
write_traceback(logger)
return {}
return {} | python | def get_fields_for_exception(self, logger, exception):
"""
Given an exception instance, return fields to add to the failed action
message.
@param logger: ``ILogger`` currently being used.
@param exception: An exception instance.
@return: Dictionary with fields to include.
"""
for klass in getmro(exception.__class__):
if klass in self.registry:
extractor = self.registry[klass]
try:
return extractor(exception)
except:
from ._traceback import write_traceback
write_traceback(logger)
return {}
return {} | [
"def",
"get_fields_for_exception",
"(",
"self",
",",
"logger",
",",
"exception",
")",
":",
"for",
"klass",
"in",
"getmro",
"(",
"exception",
".",
"__class__",
")",
":",
"if",
"klass",
"in",
"self",
".",
"registry",
":",
"extractor",
"=",
"self",
".",
"re... | Given an exception instance, return fields to add to the failed action
message.
@param logger: ``ILogger`` currently being used.
@param exception: An exception instance.
@return: Dictionary with fields to include. | [
"Given",
"an",
"exception",
"instance",
"return",
"fields",
"to",
"add",
"to",
"the",
"failed",
"action",
"message",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/_errors.py#L32-L51 | train | 205,726 |
itamarst/eliot | eliot/filter.py | EliotFilter.run | def run(self):
"""
For each incoming message, decode the JSON, evaluate expression, encode
as JSON and write that to the output file.
"""
for line in self.incoming:
message = loads(line)
result = self._evaluate(message)
if result is self._SKIP:
continue
self.output.write(dumps(result, cls=_DatetimeJSONEncoder) + b"\n") | python | def run(self):
"""
For each incoming message, decode the JSON, evaluate expression, encode
as JSON and write that to the output file.
"""
for line in self.incoming:
message = loads(line)
result = self._evaluate(message)
if result is self._SKIP:
continue
self.output.write(dumps(result, cls=_DatetimeJSONEncoder) + b"\n") | [
"def",
"run",
"(",
"self",
")",
":",
"for",
"line",
"in",
"self",
".",
"incoming",
":",
"message",
"=",
"loads",
"(",
"line",
")",
"result",
"=",
"self",
".",
"_evaluate",
"(",
"message",
")",
"if",
"result",
"is",
"self",
".",
"_SKIP",
":",
"conti... | For each incoming message, decode the JSON, evaluate expression, encode
as JSON and write that to the output file. | [
"For",
"each",
"incoming",
"message",
"decode",
"the",
"JSON",
"evaluate",
"expression",
"encode",
"as",
"JSON",
"and",
"write",
"that",
"to",
"the",
"output",
"file",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/filter.py#L52-L62 | train | 205,727 |
itamarst/eliot | eliot/filter.py | EliotFilter._evaluate | def _evaluate(self, message):
"""
Evaluate the expression with the given Python object in its locals.
@param message: A decoded JSON input.
@return: The resulting object.
"""
return eval(
self.code,
globals(), {
"J": message,
"timedelta": timedelta,
"datetime": datetime,
"SKIP": self._SKIP}) | python | def _evaluate(self, message):
"""
Evaluate the expression with the given Python object in its locals.
@param message: A decoded JSON input.
@return: The resulting object.
"""
return eval(
self.code,
globals(), {
"J": message,
"timedelta": timedelta,
"datetime": datetime,
"SKIP": self._SKIP}) | [
"def",
"_evaluate",
"(",
"self",
",",
"message",
")",
":",
"return",
"eval",
"(",
"self",
".",
"code",
",",
"globals",
"(",
")",
",",
"{",
"\"J\"",
":",
"message",
",",
"\"timedelta\"",
":",
"timedelta",
",",
"\"datetime\"",
":",
"datetime",
",",
"\"SK... | Evaluate the expression with the given Python object in its locals.
@param message: A decoded JSON input.
@return: The resulting object. | [
"Evaluate",
"the",
"expression",
"with",
"the",
"given",
"Python",
"object",
"in",
"its",
"locals",
"."
] | c03c96520c5492fadfc438b4b0f6336e2785ba2d | https://github.com/itamarst/eliot/blob/c03c96520c5492fadfc438b4b0f6336e2785ba2d/eliot/filter.py#L64-L78 | train | 205,728 |
cherrypy/cheroot | cheroot/wsgi.py | Gateway.respond | def respond(self):
"""Process the current request.
From :pep:`333`:
The start_response callable must not actually transmit
the response headers. Instead, it must store them for the
server or gateway to transmit only after the first
iteration of the application return value that yields
a NON-EMPTY string, or upon the application's first
invocation of the write() callable.
"""
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in filter(None, response):
if not isinstance(chunk, six.binary_type):
raise ValueError('WSGI Applications must yield bytes')
self.write(chunk)
finally:
# Send headers if not already sent
self.req.ensure_headers_sent()
if hasattr(response, 'close'):
response.close() | python | def respond(self):
"""Process the current request.
From :pep:`333`:
The start_response callable must not actually transmit
the response headers. Instead, it must store them for the
server or gateway to transmit only after the first
iteration of the application return value that yields
a NON-EMPTY string, or upon the application's first
invocation of the write() callable.
"""
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in filter(None, response):
if not isinstance(chunk, six.binary_type):
raise ValueError('WSGI Applications must yield bytes')
self.write(chunk)
finally:
# Send headers if not already sent
self.req.ensure_headers_sent()
if hasattr(response, 'close'):
response.close() | [
"def",
"respond",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"req",
".",
"server",
".",
"wsgi_app",
"(",
"self",
".",
"env",
",",
"self",
".",
"start_response",
")",
"try",
":",
"for",
"chunk",
"in",
"filter",
"(",
"None",
",",
"response",
... | Process the current request.
From :pep:`333`:
The start_response callable must not actually transmit
the response headers. Instead, it must store them for the
server or gateway to transmit only after the first
iteration of the application return value that yields
a NON-EMPTY string, or upon the application's first
invocation of the write() callable. | [
"Process",
"the",
"current",
"request",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/wsgi.py#L131-L153 | train | 205,729 |
cherrypy/cheroot | cheroot/wsgi.py | Gateway._encode_status | def _encode_status(status):
"""Cast status to bytes representation of current Python version.
According to :pep:`3333`, when using Python 3, the response status
and headers must be bytes masquerading as unicode; that is, they
must be of type "str" but are restricted to code points in the
"latin-1" set.
"""
if six.PY2:
return status
if not isinstance(status, str):
raise TypeError('WSGI response status is not of type str.')
return status.encode('ISO-8859-1') | python | def _encode_status(status):
"""Cast status to bytes representation of current Python version.
According to :pep:`3333`, when using Python 3, the response status
and headers must be bytes masquerading as unicode; that is, they
must be of type "str" but are restricted to code points in the
"latin-1" set.
"""
if six.PY2:
return status
if not isinstance(status, str):
raise TypeError('WSGI response status is not of type str.')
return status.encode('ISO-8859-1') | [
"def",
"_encode_status",
"(",
"status",
")",
":",
"if",
"six",
".",
"PY2",
":",
"return",
"status",
"if",
"not",
"isinstance",
"(",
"status",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"'WSGI response status is not of type str.'",
")",
"return",
"status"... | Cast status to bytes representation of current Python version.
According to :pep:`3333`, when using Python 3, the response status
and headers must be bytes masquerading as unicode; that is, they
must be of type "str" but are restricted to code points in the
"latin-1" set. | [
"Cast",
"status",
"to",
"bytes",
"representation",
"of",
"current",
"Python",
"version",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/wsgi.py#L194-L206 | train | 205,730 |
cherrypy/cheroot | cheroot/wsgi.py | Gateway.write | def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError('WSGI write called before start_response.')
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
if not self.req.sent_headers:
# Whew. We can send a 500 to the client.
self.req.simple_response(
'500 Internal Server Error',
'The requested resource returned more bytes than the '
'declared Content-Length.',
)
else:
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
self.req.ensure_headers_sent()
self.req.write(chunk)
if rbo is not None:
rbo -= chunklen
if rbo < 0:
raise ValueError(
'Response body exceeds the declared Content-Length.',
) | python | def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError('WSGI write called before start_response.')
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
if not self.req.sent_headers:
# Whew. We can send a 500 to the client.
self.req.simple_response(
'500 Internal Server Error',
'The requested resource returned more bytes than the '
'declared Content-Length.',
)
else:
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
self.req.ensure_headers_sent()
self.req.write(chunk)
if rbo is not None:
rbo -= chunklen
if rbo < 0:
raise ValueError(
'Response body exceeds the declared Content-Length.',
) | [
"def",
"write",
"(",
"self",
",",
"chunk",
")",
":",
"if",
"not",
"self",
".",
"started_response",
":",
"raise",
"AssertionError",
"(",
"'WSGI write called before start_response.'",
")",
"chunklen",
"=",
"len",
"(",
"chunk",
")",
"rbo",
"=",
"self",
".",
"re... | WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application). | [
"WSGI",
"callable",
"to",
"write",
"unbuffered",
"data",
"to",
"the",
"client",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/wsgi.py#L208-L241 | train | 205,731 |
cherrypy/cheroot | cheroot/makefile.py | BufferedWriter.write | def write(self, b):
"""Write bytes to buffer."""
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b) | python | def write(self, b):
"""Write bytes to buffer."""
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b) | [
"def",
"write",
"(",
"self",
",",
"b",
")",
":",
"self",
".",
"_checkClosed",
"(",
")",
"if",
"isinstance",
"(",
"b",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"\"can't write str to binary stream\"",
")",
"with",
"self",
".",
"_write_lock",
":",
"... | Write bytes to buffer. | [
"Write",
"bytes",
"to",
"buffer",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/makefile.py#L28-L37 | train | 205,732 |
cherrypy/cheroot | cheroot/makefile.py | MakeFile_PY2.write | def write(self, data):
"""Sendall for non-blocking sockets."""
bytes_sent = 0
data_mv = memoryview(data)
payload_size = len(data_mv)
while bytes_sent < payload_size:
try:
bytes_sent += self.send(
data_mv[bytes_sent:bytes_sent + SOCK_WRITE_BLOCKSIZE],
)
except socket.error as e:
if e.args[0] not in errors.socket_errors_nonblocking:
raise | python | def write(self, data):
"""Sendall for non-blocking sockets."""
bytes_sent = 0
data_mv = memoryview(data)
payload_size = len(data_mv)
while bytes_sent < payload_size:
try:
bytes_sent += self.send(
data_mv[bytes_sent:bytes_sent + SOCK_WRITE_BLOCKSIZE],
)
except socket.error as e:
if e.args[0] not in errors.socket_errors_nonblocking:
raise | [
"def",
"write",
"(",
"self",
",",
"data",
")",
":",
"bytes_sent",
"=",
"0",
"data_mv",
"=",
"memoryview",
"(",
"data",
")",
"payload_size",
"=",
"len",
"(",
"data_mv",
")",
"while",
"bytes_sent",
"<",
"payload_size",
":",
"try",
":",
"bytes_sent",
"+=",
... | Sendall for non-blocking sockets. | [
"Sendall",
"for",
"non",
"-",
"blocking",
"sockets",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/makefile.py#L70-L82 | train | 205,733 |
cherrypy/cheroot | cheroot/makefile.py | MakeFile_PY2.flush | def flush(self):
"""Write all data from buffer to socket and reset write buffer."""
if self._wbuf:
buffer = ''.join(self._wbuf)
self._wbuf = []
self.write(buffer) | python | def flush(self):
"""Write all data from buffer to socket and reset write buffer."""
if self._wbuf:
buffer = ''.join(self._wbuf)
self._wbuf = []
self.write(buffer) | [
"def",
"flush",
"(",
"self",
")",
":",
"if",
"self",
".",
"_wbuf",
":",
"buffer",
"=",
"''",
".",
"join",
"(",
"self",
".",
"_wbuf",
")",
"self",
".",
"_wbuf",
"=",
"[",
"]",
"self",
".",
"write",
"(",
"buffer",
")"
] | Write all data from buffer to socket and reset write buffer. | [
"Write",
"all",
"data",
"from",
"buffer",
"to",
"socket",
"and",
"reset",
"write",
"buffer",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/makefile.py#L90-L95 | train | 205,734 |
cherrypy/cheroot | cheroot/cli.py | parse_wsgi_bind_location | def parse_wsgi_bind_location(bind_addr_string):
"""Convert bind address string to a BindLocation."""
# try and match for an IP/hostname and port
match = six.moves.urllib.parse.urlparse('//{}'.format(bind_addr_string))
try:
addr = match.hostname
port = match.port
if addr is not None or port is not None:
return TCPSocket(addr, port)
except ValueError:
pass
# else, assume a UNIX socket path
# if the string begins with an @ symbol, use an abstract socket
if bind_addr_string.startswith('@'):
return AbstractSocket(bind_addr_string[1:])
return UnixSocket(path=bind_addr_string) | python | def parse_wsgi_bind_location(bind_addr_string):
"""Convert bind address string to a BindLocation."""
# try and match for an IP/hostname and port
match = six.moves.urllib.parse.urlparse('//{}'.format(bind_addr_string))
try:
addr = match.hostname
port = match.port
if addr is not None or port is not None:
return TCPSocket(addr, port)
except ValueError:
pass
# else, assume a UNIX socket path
# if the string begins with an @ symbol, use an abstract socket
if bind_addr_string.startswith('@'):
return AbstractSocket(bind_addr_string[1:])
return UnixSocket(path=bind_addr_string) | [
"def",
"parse_wsgi_bind_location",
"(",
"bind_addr_string",
")",
":",
"# try and match for an IP/hostname and port",
"match",
"=",
"six",
".",
"moves",
".",
"urllib",
".",
"parse",
".",
"urlparse",
"(",
"'//{}'",
".",
"format",
"(",
"bind_addr_string",
")",
")",
"... | Convert bind address string to a BindLocation. | [
"Convert",
"bind",
"address",
"string",
"to",
"a",
"BindLocation",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/cli.py#L129-L145 | train | 205,735 |
cherrypy/cheroot | cheroot/cli.py | main | def main():
"""Create a new Cheroot instance with arguments from the command line."""
parser = argparse.ArgumentParser(
description='Start an instance of the Cheroot WSGI/HTTP server.',
)
for arg, spec in _arg_spec.items():
parser.add_argument(arg, **spec)
raw_args = parser.parse_args()
# ensure cwd in sys.path
'' in sys.path or sys.path.insert(0, '')
# create a server based on the arguments provided
raw_args._wsgi_app.server(raw_args).safe_start() | python | def main():
"""Create a new Cheroot instance with arguments from the command line."""
parser = argparse.ArgumentParser(
description='Start an instance of the Cheroot WSGI/HTTP server.',
)
for arg, spec in _arg_spec.items():
parser.add_argument(arg, **spec)
raw_args = parser.parse_args()
# ensure cwd in sys.path
'' in sys.path or sys.path.insert(0, '')
# create a server based on the arguments provided
raw_args._wsgi_app.server(raw_args).safe_start() | [
"def",
"main",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Start an instance of the Cheroot WSGI/HTTP server.'",
",",
")",
"for",
"arg",
",",
"spec",
"in",
"_arg_spec",
".",
"items",
"(",
")",
":",
"parser",
".",... | Create a new Cheroot instance with arguments from the command line. | [
"Create",
"a",
"new",
"Cheroot",
"instance",
"with",
"arguments",
"from",
"the",
"command",
"line",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/cli.py#L221-L234 | train | 205,736 |
cherrypy/cheroot | cheroot/cli.py | Application.server_args | def server_args(self, parsed_args):
"""Return keyword args for Server class."""
args = {
arg: value
for arg, value in vars(parsed_args).items()
if not arg.startswith('_') and value is not None
}
args.update(vars(self))
return args | python | def server_args(self, parsed_args):
"""Return keyword args for Server class."""
args = {
arg: value
for arg, value in vars(parsed_args).items()
if not arg.startswith('_') and value is not None
}
args.update(vars(self))
return args | [
"def",
"server_args",
"(",
"self",
",",
"parsed_args",
")",
":",
"args",
"=",
"{",
"arg",
":",
"value",
"for",
"arg",
",",
"value",
"in",
"vars",
"(",
"parsed_args",
")",
".",
"items",
"(",
")",
"if",
"not",
"arg",
".",
"startswith",
"(",
"'_'",
")... | Return keyword args for Server class. | [
"Return",
"keyword",
"args",
"for",
"Server",
"class",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/cli.py#L96-L104 | train | 205,737 |
cherrypy/cheroot | cheroot/errors.py | plat_specific_errors | def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
missing_attr = set([None, ])
unique_nums = set(getattr(errno, k, None) for k in errnames)
return list(unique_nums - missing_attr) | python | def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
missing_attr = set([None, ])
unique_nums = set(getattr(errno, k, None) for k in errnames)
return list(unique_nums - missing_attr) | [
"def",
"plat_specific_errors",
"(",
"*",
"errnames",
")",
":",
"missing_attr",
"=",
"set",
"(",
"[",
"None",
",",
"]",
")",
"unique_nums",
"=",
"set",
"(",
"getattr",
"(",
"errno",
",",
"k",
",",
"None",
")",
"for",
"k",
"in",
"errnames",
")",
"retur... | Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names. | [
"Return",
"error",
"numbers",
"for",
"all",
"errors",
"in",
"errnames",
"on",
"this",
"platform",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/errors.py#L25-L34 | train | 205,738 |
cherrypy/cheroot | cheroot/ssl/builtin.py | _assert_ssl_exc_contains | def _assert_ssl_exc_contains(exc, *msgs):
"""Check whether SSL exception contains either of messages provided."""
if len(msgs) < 1:
raise TypeError(
'_assert_ssl_exc_contains() requires '
'at least one message to be passed.',
)
err_msg_lower = str(exc).lower()
return any(m.lower() in err_msg_lower for m in msgs) | python | def _assert_ssl_exc_contains(exc, *msgs):
"""Check whether SSL exception contains either of messages provided."""
if len(msgs) < 1:
raise TypeError(
'_assert_ssl_exc_contains() requires '
'at least one message to be passed.',
)
err_msg_lower = str(exc).lower()
return any(m.lower() in err_msg_lower for m in msgs) | [
"def",
"_assert_ssl_exc_contains",
"(",
"exc",
",",
"*",
"msgs",
")",
":",
"if",
"len",
"(",
"msgs",
")",
"<",
"1",
":",
"raise",
"TypeError",
"(",
"'_assert_ssl_exc_contains() requires '",
"'at least one message to be passed.'",
",",
")",
"err_msg_lower",
"=",
"s... | Check whether SSL exception contains either of messages provided. | [
"Check",
"whether",
"SSL",
"exception",
"contains",
"either",
"of",
"messages",
"provided",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/builtin.py#L41-L49 | train | 205,739 |
cherrypy/cheroot | cheroot/ssl/builtin.py | BuiltinSSLAdapter.env_dn_dict | def env_dn_dict(self, env_prefix, cert_value):
"""Return a dict of WSGI environment variables for a client cert DN.
E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc.
See SSL_CLIENT_S_DN_x509 at
https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
"""
if not cert_value:
return {}
env = {}
for rdn in cert_value:
for attr_name, val in rdn:
attr_code = self.CERT_KEY_TO_LDAP_CODE.get(attr_name)
if attr_code:
env['%s_%s' % (env_prefix, attr_code)] = val
return env | python | def env_dn_dict(self, env_prefix, cert_value):
"""Return a dict of WSGI environment variables for a client cert DN.
E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc.
See SSL_CLIENT_S_DN_x509 at
https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
"""
if not cert_value:
return {}
env = {}
for rdn in cert_value:
for attr_name, val in rdn:
attr_code = self.CERT_KEY_TO_LDAP_CODE.get(attr_name)
if attr_code:
env['%s_%s' % (env_prefix, attr_code)] = val
return env | [
"def",
"env_dn_dict",
"(",
"self",
",",
"env_prefix",
",",
"cert_value",
")",
":",
"if",
"not",
"cert_value",
":",
"return",
"{",
"}",
"env",
"=",
"{",
"}",
"for",
"rdn",
"in",
"cert_value",
":",
"for",
"attr_name",
",",
"val",
"in",
"rdn",
":",
"att... | Return a dict of WSGI environment variables for a client cert DN.
E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc.
See SSL_CLIENT_S_DN_x509 at
https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars. | [
"Return",
"a",
"dict",
"of",
"WSGI",
"environment",
"variables",
"for",
"a",
"client",
"cert",
"DN",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/builtin.py#L189-L205 | train | 205,740 |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | SSLFileobjectMixin._safe_call | def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
# Ref: https://stackoverflow.com/a/5133568/595220
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
errnum = e.args[0]
if is_reader and errnum in errors.socket_errors_to_ignore:
return b''
raise socket.error(errnum)
except SSL.Error as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise errors.NoSSLError()
raise errors.FatalSSLAlert(*e.args)
if time.time() - start > self.ssl_timeout:
raise socket.timeout('timed out') | python | def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
# Ref: https://stackoverflow.com/a/5133568/595220
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
errnum = e.args[0]
if is_reader and errnum in errors.socket_errors_to_ignore:
return b''
raise socket.error(errnum)
except SSL.Error as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise errors.NoSSLError()
raise errors.FatalSSLAlert(*e.args)
if time.time() - start > self.ssl_timeout:
raise socket.timeout('timed out') | [
"def",
"_safe_call",
"(",
"self",
",",
"is_reader",
",",
"call",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"start",
"=",
"time",
".",
"time",
"(",
")",
"while",
"True",
":",
"try",
":",
"return",
"call",
"(",
"*",
"args",
",",
"*",
"... | Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets). | [
"Wrap",
"the",
"given",
"call",
"with",
"SSL",
"error",
"-",
"trapping",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/pyopenssl.py#L65-L109 | train | 205,741 |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | SSLFileobjectMixin.sendall | def sendall(self, *args, **kwargs):
"""Send whole message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).sendall,
*args, **kwargs
) | python | def sendall(self, *args, **kwargs):
"""Send whole message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).sendall,
*args, **kwargs
) | [
"def",
"sendall",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_safe_call",
"(",
"False",
",",
"super",
"(",
"SSLFileobjectMixin",
",",
"self",
")",
".",
"sendall",
",",
"*",
"args",
",",
"*",
"*",
"kwar... | Send whole message to the socket. | [
"Send",
"whole",
"message",
"to",
"the",
"socket",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/pyopenssl.py#L131-L137 | train | 205,742 |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | pyOpenSSLAdapter.bind | def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn | python | def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn | [
"def",
"bind",
"(",
"self",
",",
"sock",
")",
":",
"if",
"self",
".",
"context",
"is",
"None",
":",
"self",
".",
"context",
"=",
"self",
".",
"get_context",
"(",
")",
"conn",
"=",
"SSLConnection",
"(",
"self",
".",
"context",
",",
"sock",
")",
"sel... | Wrap and return the given socket. | [
"Wrap",
"and",
"return",
"the",
"given",
"socket",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/pyopenssl.py#L258-L264 | train | 205,743 |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | pyOpenSSLAdapter.get_context | def get_context(self):
"""Return an SSL.Context from self attributes."""
# See https://code.activestate.com/recipes/442473/
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c | python | def get_context(self):
"""Return an SSL.Context from self attributes."""
# See https://code.activestate.com/recipes/442473/
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c | [
"def",
"get_context",
"(",
"self",
")",
":",
"# See https://code.activestate.com/recipes/442473/",
"c",
"=",
"SSL",
".",
"Context",
"(",
"SSL",
".",
"SSLv23_METHOD",
")",
"c",
".",
"use_privatekey_file",
"(",
"self",
".",
"private_key",
")",
"if",
"self",
".",
... | Return an SSL.Context from self attributes. | [
"Return",
"an",
"SSL",
".",
"Context",
"from",
"self",
"attributes",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/pyopenssl.py#L270-L278 | train | 205,744 |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | pyOpenSSLAdapter.get_environ | def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
'HTTPS': 'on',
# pyOpenSSL doesn't provide access to any of these AFAICT
# 'SSL_PROTOCOL': 'SSLv2',
# SSL_CIPHER string The cipher specification name
# SSL_VERSION_INTERFACE string The mod_ssl program version
# SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
# 'SSL_SERVER_V_START':
# Validity of server's certificate (start time),
# 'SSL_SERVER_V_END':
# Validity of server's certificate (end time),
})
for prefix, dn in [
('I', cert.get_issuer()),
('S', cert.get_subject()),
]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind('=')
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind('/')
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ | python | def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
'HTTPS': 'on',
# pyOpenSSL doesn't provide access to any of these AFAICT
# 'SSL_PROTOCOL': 'SSLv2',
# SSL_CIPHER string The cipher specification name
# SSL_VERSION_INTERFACE string The mod_ssl program version
# SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
# 'SSL_SERVER_V_START':
# Validity of server's certificate (start time),
# 'SSL_SERVER_V_END':
# Validity of server's certificate (end time),
})
for prefix, dn in [
('I', cert.get_issuer()),
('S', cert.get_subject()),
]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind('=')
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind('/')
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ | [
"def",
"get_environ",
"(",
"self",
")",
":",
"ssl_environ",
"=",
"{",
"'HTTPS'",
":",
"'on'",
",",
"# pyOpenSSL doesn't provide access to any of these AFAICT",
"# 'SSL_PROTOCOL': 'SSLv2',",
"# SSL_CIPHER string The cipher specification name",
"# SSL_VERSION_INTERFACE string ... | Return WSGI environ entries to be merged into each request. | [
"Return",
"WSGI",
"environ",
"entries",
"to",
"be",
"merged",
"into",
"each",
"request",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/ssl/pyopenssl.py#L280-L327 | train | 205,745 |
cherrypy/cheroot | cheroot/server.py | get_ssl_adapter_class | def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, six.string_types):
last_dot = adapter.rfind('.')
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter | python | def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, six.string_types):
last_dot = adapter.rfind('.')
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter | [
"def",
"get_ssl_adapter_class",
"(",
"name",
"=",
"'builtin'",
")",
":",
"adapter",
"=",
"ssl_adapters",
"[",
"name",
".",
"lower",
"(",
")",
"]",
"if",
"isinstance",
"(",
"adapter",
",",
"six",
".",
"string_types",
")",
":",
"last_dot",
"=",
"adapter",
... | Return an SSL adapter class for the given name. | [
"Return",
"an",
"SSL",
"adapter",
"class",
"for",
"the",
"given",
"name",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L2159-L2182 | train | 205,746 |
cherrypy/cheroot | cheroot/server.py | ChunkedRFile.read_trailer_lines | def read_trailer_lines(self):
"""Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines.
"""
if not self.closed:
raise ValueError(
'Cannot read trailers until the request body has been read.',
)
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError('Request Entity Too Large')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
yield line | python | def read_trailer_lines(self):
"""Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines.
"""
if not self.closed:
raise ValueError(
'Cannot read trailers until the request body has been read.',
)
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError('Request Entity Too Large')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
yield line | [
"def",
"read_trailer_lines",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"closed",
":",
"raise",
"ValueError",
"(",
"'Cannot read trailers until the request body has been read.'",
",",
")",
"while",
"True",
":",
"line",
"=",
"self",
".",
"rfile",
".",
"read... | Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines. | [
"Read",
"HTTP",
"headers",
"and",
"yield",
"them",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L606-L634 | train | 205,747 |
cherrypy/cheroot | cheroot/server.py | HTTPRequest.send_headers | def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b'content-length' not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
needs_chunked = (
self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'
)
if needs_chunked:
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b'Transfer-Encoding', b'chunked'))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b'connection' not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b'Connection', b'close'))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b'Connection', b'Keep-Alive'))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b'date' not in hkeys:
self.outheaders.append((
b'Date',
email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
))
if b'server' not in hkeys:
self.outheaders.append((
b'Server',
self.server.server_name.encode('ISO-8859-1'),
))
proto = self.server.protocol.encode('ascii')
buf = [proto + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf)) | python | def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b'content-length' not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
needs_chunked = (
self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'
)
if needs_chunked:
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b'Transfer-Encoding', b'chunked'))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b'connection' not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b'Connection', b'close'))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b'Connection', b'Keep-Alive'))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b'date' not in hkeys:
self.outheaders.append((
b'Date',
email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
))
if b'server' not in hkeys:
self.outheaders.append((
b'Server',
self.server.server_name.encode('ISO-8859-1'),
))
proto = self.server.protocol.encode('ascii')
buf = [proto + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf)) | [
"def",
"send_headers",
"(",
"self",
")",
":",
"hkeys",
"=",
"[",
"key",
".",
"lower",
"(",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"outheaders",
"]",
"status",
"=",
"int",
"(",
"self",
".",
"status",
"[",
":",
"3",
"]",
")",
"if",
"... | Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this. | [
"Assert",
"process",
"and",
"send",
"the",
"HTTP",
"response",
"message",
"-",
"headers",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1135-L1209 | train | 205,748 |
cherrypy/cheroot | cheroot/server.py | HTTPConnection._conditional_error | def _conditional_error(self, req, response):
"""Respond with an error.
Don't bother writing if a response
has already started being written.
"""
if not req or req.sent_headers:
return
try:
req.simple_response(response)
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req) | python | def _conditional_error(self, req, response):
"""Respond with an error.
Don't bother writing if a response
has already started being written.
"""
if not req or req.sent_headers:
return
try:
req.simple_response(response)
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req) | [
"def",
"_conditional_error",
"(",
"self",
",",
"req",
",",
"response",
")",
":",
"if",
"not",
"req",
"or",
"req",
".",
"sent_headers",
":",
"return",
"try",
":",
"req",
".",
"simple_response",
"(",
"response",
")",
"except",
"errors",
".",
"FatalSSLAlert",... | Respond with an error.
Don't bother writing if a response
has already started being written. | [
"Respond",
"with",
"an",
"error",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1324-L1338 | train | 205,749 |
cherrypy/cheroot | cheroot/server.py | HTTPConnection.resolve_peer_creds | def resolve_peer_creds(self): # LRU cached on per-instance basis
"""Return the username and group tuple of the peercreds if available.
Raises:
NotImplementedError: in case of unsupported OS
RuntimeError: in case of UID/GID lookup unsupported or disabled
"""
if not IS_UID_GID_RESOLVABLE:
raise NotImplementedError(
'UID/GID lookup is unavailable under current platform. '
'It can only be done under UNIX-like OS '
'but not under the Google App Engine',
)
elif not self.peercreds_resolve_enabled:
raise RuntimeError(
'UID/GID lookup is disabled within this server',
)
user = pwd.getpwuid(self.peer_uid).pw_name # [0]
group = grp.getgrgid(self.peer_gid).gr_name # [0]
return user, group | python | def resolve_peer_creds(self): # LRU cached on per-instance basis
"""Return the username and group tuple of the peercreds if available.
Raises:
NotImplementedError: in case of unsupported OS
RuntimeError: in case of UID/GID lookup unsupported or disabled
"""
if not IS_UID_GID_RESOLVABLE:
raise NotImplementedError(
'UID/GID lookup is unavailable under current platform. '
'It can only be done under UNIX-like OS '
'but not under the Google App Engine',
)
elif not self.peercreds_resolve_enabled:
raise RuntimeError(
'UID/GID lookup is disabled within this server',
)
user = pwd.getpwuid(self.peer_uid).pw_name # [0]
group = grp.getgrgid(self.peer_gid).gr_name # [0]
return user, group | [
"def",
"resolve_peer_creds",
"(",
"self",
")",
":",
"# LRU cached on per-instance basis",
"if",
"not",
"IS_UID_GID_RESOLVABLE",
":",
"raise",
"NotImplementedError",
"(",
"'UID/GID lookup is unavailable under current platform. '",
"'It can only be done under UNIX-like OS '",
"'but not... | Return the username and group tuple of the peercreds if available.
Raises:
NotImplementedError: in case of unsupported OS
RuntimeError: in case of UID/GID lookup unsupported or disabled | [
"Return",
"the",
"username",
"and",
"group",
"tuple",
"of",
"the",
"peercreds",
"if",
"available",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1420-L1442 | train | 205,750 |
cherrypy/cheroot | cheroot/server.py | HTTPConnection._close_kernel_socket | def _close_kernel_socket(self):
"""Close kernel socket in outdated Python versions.
On old Python versions,
Python's socket module does NOT call close on the kernel
socket when you call socket.close(). We do so manually here
because we want this server to send a FIN TCP segment
immediately. Note this must be called *before* calling
socket.close(), because the latter drops its reference to
the kernel socket.
"""
if six.PY2 and hasattr(self.socket, '_sock'):
self.socket._sock.close() | python | def _close_kernel_socket(self):
"""Close kernel socket in outdated Python versions.
On old Python versions,
Python's socket module does NOT call close on the kernel
socket when you call socket.close(). We do so manually here
because we want this server to send a FIN TCP segment
immediately. Note this must be called *before* calling
socket.close(), because the latter drops its reference to
the kernel socket.
"""
if six.PY2 and hasattr(self.socket, '_sock'):
self.socket._sock.close() | [
"def",
"_close_kernel_socket",
"(",
"self",
")",
":",
"if",
"six",
".",
"PY2",
"and",
"hasattr",
"(",
"self",
".",
"socket",
",",
"'_sock'",
")",
":",
"self",
".",
"socket",
".",
"_sock",
".",
"close",
"(",
")"
] | Close kernel socket in outdated Python versions.
On old Python versions,
Python's socket module does NOT call close on the kernel
socket when you call socket.close(). We do so manually here
because we want this server to send a FIN TCP segment
immediately. Note this must be called *before* calling
socket.close(), because the latter drops its reference to
the kernel socket. | [
"Close",
"kernel",
"socket",
"in",
"outdated",
"Python",
"versions",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1456-L1468 | train | 205,751 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.clear_stats | def clear_stats(self):
"""Reset server stat counters.."""
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, 'qsize', None),
'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum(
[w['Requests'](w) for w in s['Worker Threads'].values()], 0,
),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0,
),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) for w in s['Worker Threads'].values()],
0,
),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
[w['Work Time'](w) for w in s['Worker Threads'].values()], 0,
),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0,
),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0,
),
'Worker Threads': {},
}
logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats | python | def clear_stats(self):
"""Reset server stat counters.."""
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, 'qsize', None),
'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum(
[w['Requests'](w) for w in s['Worker Threads'].values()], 0,
),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0,
),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) for w in s['Worker Threads'].values()],
0,
),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
[w['Work Time'](w) for w in s['Worker Threads'].values()], 0,
),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0,
),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0,
),
'Worker Threads': {},
}
logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats | [
"def",
"clear_stats",
"(",
"self",
")",
":",
"self",
".",
"_start_time",
"=",
"None",
"self",
".",
"_run_time",
"=",
"0",
"self",
".",
"stats",
"=",
"{",
"'Enabled'",
":",
"False",
",",
"'Bind Address'",
":",
"lambda",
"s",
":",
"repr",
"(",
"self",
... | Reset server stat counters.. | [
"Reset",
"server",
"stat",
"counters",
".."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1610-L1647 | train | 205,752 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.runtime | def runtime(self):
"""Return server uptime."""
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time) | python | def runtime(self):
"""Return server uptime."""
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time) | [
"def",
"runtime",
"(",
"self",
")",
":",
"if",
"self",
".",
"_start_time",
"is",
"None",
":",
"return",
"self",
".",
"_run_time",
"else",
":",
"return",
"self",
".",
"_run_time",
"+",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time... | Return server uptime. | [
"Return",
"server",
"uptime",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1649-L1654 | train | 205,753 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.bind_addr | def bind_addr(self, value):
"""Set the interface on which to listen for connections."""
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError(
"Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
'to listen on all active interfaces.',
)
self._bind_addr = value | python | def bind_addr(self, value):
"""Set the interface on which to listen for connections."""
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError(
"Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
'to listen on all active interfaces.',
)
self._bind_addr = value | [
"def",
"bind_addr",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"tuple",
")",
"and",
"value",
"[",
"0",
"]",
"in",
"(",
"''",
",",
"None",
")",
":",
"# Despite the socket module docs, using '' does not",
"# allow AI_PASSIVE to w... | Set the interface on which to listen for connections. | [
"Set",
"the",
"interface",
"on",
"which",
"to",
"listen",
"for",
"connections",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1682-L1700 | train | 205,754 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.safe_start | def safe_start(self):
"""Run the server forever, and stop it cleanly on exit."""
try:
self.start()
except (KeyboardInterrupt, IOError):
# The time.sleep call might raise
# "IOError: [Errno 4] Interrupted function call" on KBInt.
self.error_log('Keyboard Interrupt: shutting down')
self.stop()
raise
except SystemExit:
self.error_log('SystemExit raised: shutting down')
self.stop()
raise | python | def safe_start(self):
"""Run the server forever, and stop it cleanly on exit."""
try:
self.start()
except (KeyboardInterrupt, IOError):
# The time.sleep call might raise
# "IOError: [Errno 4] Interrupted function call" on KBInt.
self.error_log('Keyboard Interrupt: shutting down')
self.stop()
raise
except SystemExit:
self.error_log('SystemExit raised: shutting down')
self.stop()
raise | [
"def",
"safe_start",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"start",
"(",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"IOError",
")",
":",
"# The time.sleep call might raise",
"# \"IOError: [Errno 4] Interrupted function call\" on KBInt.",
"self",
".",
"err... | Run the server forever, and stop it cleanly on exit. | [
"Run",
"the",
"server",
"forever",
"and",
"stop",
"it",
"cleanly",
"on",
"exit",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1702-L1715 | train | 205,755 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.prepare | def prepare(self):
"""Prepare server to serving requests.
It binds a socket's port, setups the socket to ``listen()`` and does
other preparing things.
"""
self._interrupt = None
if self.software is None:
self.software = '%s Server' % self.version
# Select the appropriate socket
self.socket = None
msg = 'No socket could be created'
if os.getenv('LISTEN_PID', None):
# systemd socket activation
self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(self.bind_addr, six.string_types):
# AF_UNIX socket
try:
self.bind_unix_socket(self.bind_addr)
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, self.bind_addr, serr)
six.raise_from(socket.error(msg), serr)
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
# addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(
host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE,
)
except socket.gaierror:
sock_type = socket.AF_INET
bind_addr = self.bind_addr
if ':' in host:
sock_type = socket.AF_INET6
bind_addr = bind_addr + (0, 0)
info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
break
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, sa, serr)
if self.socket:
self.socket.close()
self.socket = None
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time() | python | def prepare(self):
"""Prepare server to serving requests.
It binds a socket's port, setups the socket to ``listen()`` and does
other preparing things.
"""
self._interrupt = None
if self.software is None:
self.software = '%s Server' % self.version
# Select the appropriate socket
self.socket = None
msg = 'No socket could be created'
if os.getenv('LISTEN_PID', None):
# systemd socket activation
self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(self.bind_addr, six.string_types):
# AF_UNIX socket
try:
self.bind_unix_socket(self.bind_addr)
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, self.bind_addr, serr)
six.raise_from(socket.error(msg), serr)
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
# addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(
host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE,
)
except socket.gaierror:
sock_type = socket.AF_INET
bind_addr = self.bind_addr
if ':' in host:
sock_type = socket.AF_INET6
bind_addr = bind_addr + (0, 0)
info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
break
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, sa, serr)
if self.socket:
self.socket.close()
self.socket = None
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time() | [
"def",
"prepare",
"(",
"self",
")",
":",
"self",
".",
"_interrupt",
"=",
"None",
"if",
"self",
".",
"software",
"is",
"None",
":",
"self",
".",
"software",
"=",
"'%s Server'",
"%",
"self",
".",
"version",
"# Select the appropriate socket",
"self",
".",
"so... | Prepare server to serving requests.
It binds a socket's port, setups the socket to ``listen()`` and does
other preparing things. | [
"Prepare",
"server",
"to",
"serving",
"requests",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1717-L1783 | train | 205,756 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.error_log | def error_log(self, msg='', level=20, traceback=False):
"""Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not
"""
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush() | python | def error_log(self, msg='', level=20, traceback=False):
"""Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not
"""
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush() | [
"def",
"error_log",
"(",
"self",
",",
"msg",
"=",
"''",
",",
"level",
"=",
"20",
",",
"traceback",
"=",
"False",
")",
":",
"# Override this in subclasses as desired",
"sys",
".",
"stderr",
".",
"write",
"(",
"msg",
"+",
"'\\n'",
")",
"sys",
".",
"stderr"... | Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not | [
"Write",
"error",
"message",
"to",
"log",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1817-L1831 | train | 205,757 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.prepare_socket | def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter):
"""Create and prepare the socket object."""
sock = socket.socket(family, type, proto)
prevent_socket_inheritance(sock)
host, port = bind_addr[:2]
IS_EPHEMERAL_PORT = port == 0
if not (IS_WINDOWS or IS_EPHEMERAL_PORT):
"""Enable SO_REUSEADDR for the current socket.
Skip for Windows (has different semantics)
or ephemeral ports (can steal ports from others).
Refs:
* https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
* https://github.com/cherrypy/cheroot/issues/114
* https://gavv.github.io/blog/ephemeral-port-reuse/
"""
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if nodelay and not isinstance(bind_addr, str):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if ssl_adapter is not None:
sock = ssl_adapter.bind(sock)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See
# https://github.com/cherrypy/cherrypy/issues/871.
listening_ipv6 = (
hasattr(socket, 'AF_INET6')
and family == socket.AF_INET6
and host in ('::', '::0', '::0.0.0.0')
)
if listening_ipv6:
try:
sock.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0,
)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
return sock | python | def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter):
"""Create and prepare the socket object."""
sock = socket.socket(family, type, proto)
prevent_socket_inheritance(sock)
host, port = bind_addr[:2]
IS_EPHEMERAL_PORT = port == 0
if not (IS_WINDOWS or IS_EPHEMERAL_PORT):
"""Enable SO_REUSEADDR for the current socket.
Skip for Windows (has different semantics)
or ephemeral ports (can steal ports from others).
Refs:
* https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
* https://github.com/cherrypy/cheroot/issues/114
* https://gavv.github.io/blog/ephemeral-port-reuse/
"""
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if nodelay and not isinstance(bind_addr, str):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if ssl_adapter is not None:
sock = ssl_adapter.bind(sock)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See
# https://github.com/cherrypy/cherrypy/issues/871.
listening_ipv6 = (
hasattr(socket, 'AF_INET6')
and family == socket.AF_INET6
and host in ('::', '::0', '::0.0.0.0')
)
if listening_ipv6:
try:
sock.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0,
)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
return sock | [
"def",
"prepare_socket",
"(",
"bind_addr",
",",
"family",
",",
"type",
",",
"proto",
",",
"nodelay",
",",
"ssl_adapter",
")",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
"family",
",",
"type",
",",
"proto",
")",
"prevent_socket_inheritance",
"(",
"sock... | Create and prepare the socket object. | [
"Create",
"and",
"prepare",
"the",
"socket",
"object",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1909-L1953 | train | 205,758 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.resolve_real_bind_addr | def resolve_real_bind_addr(socket_):
"""Retrieve actual bind addr from bound socket."""
# FIXME: keep requested bind_addr separate real bound_addr (port
# is different in case of ephemeral port 0)
bind_addr = socket_.getsockname()
if socket_.family in (
# Windows doesn't have socket.AF_UNIX, so not using it in check
socket.AF_INET,
socket.AF_INET6,
):
"""UNIX domain sockets are strings or bytes.
In case of bytes with a leading null-byte it's an abstract socket.
"""
return bind_addr[:2]
return bind_addr | python | def resolve_real_bind_addr(socket_):
"""Retrieve actual bind addr from bound socket."""
# FIXME: keep requested bind_addr separate real bound_addr (port
# is different in case of ephemeral port 0)
bind_addr = socket_.getsockname()
if socket_.family in (
# Windows doesn't have socket.AF_UNIX, so not using it in check
socket.AF_INET,
socket.AF_INET6,
):
"""UNIX domain sockets are strings or bytes.
In case of bytes with a leading null-byte it's an abstract socket.
"""
return bind_addr[:2]
return bind_addr | [
"def",
"resolve_real_bind_addr",
"(",
"socket_",
")",
":",
"# FIXME: keep requested bind_addr separate real bound_addr (port",
"# is different in case of ephemeral port 0)",
"bind_addr",
"=",
"socket_",
".",
"getsockname",
"(",
")",
"if",
"socket_",
".",
"family",
"in",
"(",
... | Retrieve actual bind addr from bound socket. | [
"Retrieve",
"actual",
"bind",
"addr",
"from",
"bound",
"socket",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L1962-L1977 | train | 205,759 |
cherrypy/cheroot | cheroot/server.py | HTTPServer.interrupt | def interrupt(self, interrupt):
"""Perform the shutdown of this server and save the exception."""
self._interrupt = True
self.stop()
self._interrupt = interrupt | python | def interrupt(self, interrupt):
"""Perform the shutdown of this server and save the exception."""
self._interrupt = True
self.stop()
self._interrupt = interrupt | [
"def",
"interrupt",
"(",
"self",
",",
"interrupt",
")",
":",
"self",
".",
"_interrupt",
"=",
"True",
"self",
".",
"stop",
"(",
")",
"self",
".",
"_interrupt",
"=",
"interrupt"
] | Perform the shutdown of this server and save the exception. | [
"Perform",
"the",
"shutdown",
"of",
"this",
"server",
"and",
"save",
"the",
"exception",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/server.py#L2081-L2085 | train | 205,760 |
cherrypy/cheroot | cheroot/workers/threadpool.py | WorkerThread.run | def run(self):
"""Process incoming HTTP connections.
Retrieves incoming connections from thread pool.
"""
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
try:
conn.communicate()
finally:
conn.close()
if self.server.stats['Enabled']:
self.requests_seen += self.conn.requests_seen
self.bytes_read += self.conn.rfile.bytes_read
self.bytes_written += self.conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
except (KeyboardInterrupt, SystemExit) as ex:
self.server.interrupt = ex | python | def run(self):
"""Process incoming HTTP connections.
Retrieves incoming connections from thread pool.
"""
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
try:
conn.communicate()
finally:
conn.close()
if self.server.stats['Enabled']:
self.requests_seen += self.conn.requests_seen
self.bytes_read += self.conn.rfile.bytes_read
self.bytes_written += self.conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
except (KeyboardInterrupt, SystemExit) as ex:
self.server.interrupt = ex | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"server",
".",
"stats",
"[",
"'Worker Threads'",
"]",
"[",
"self",
".",
"getName",
"(",
")",
"]",
"=",
"self",
".",
"stats",
"try",
":",
"self",
".",
"ready",
"=",
"True",
"while",
"True",
":",
"c... | Process incoming HTTP connections.
Retrieves incoming connections from thread pool. | [
"Process",
"incoming",
"HTTP",
"connections",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/workers/threadpool.py#L97-L125 | train | 205,761 |
cherrypy/cheroot | cheroot/workers/threadpool.py | ThreadPool.put | def put(self, obj):
"""Put request into queue.
Args:
obj (cheroot.server.HTTPConnection): HTTP connection
waiting to be processed
"""
self._queue.put(obj, block=True, timeout=self._queue_put_timeout)
if obj is _SHUTDOWNREQUEST:
return | python | def put(self, obj):
"""Put request into queue.
Args:
obj (cheroot.server.HTTPConnection): HTTP connection
waiting to be processed
"""
self._queue.put(obj, block=True, timeout=self._queue_put_timeout)
if obj is _SHUTDOWNREQUEST:
return | [
"def",
"put",
"(",
"self",
",",
"obj",
")",
":",
"self",
".",
"_queue",
".",
"put",
"(",
"obj",
",",
"block",
"=",
"True",
",",
"timeout",
"=",
"self",
".",
"_queue_put_timeout",
")",
"if",
"obj",
"is",
"_SHUTDOWNREQUEST",
":",
"return"
] | Put request into queue.
Args:
obj (cheroot.server.HTTPConnection): HTTP connection
waiting to be processed | [
"Put",
"request",
"into",
"queue",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/workers/threadpool.py#L175-L184 | train | 205,762 |
cherrypy/cheroot | cheroot/workers/threadpool.py | ThreadPool.stop | def stop(self, timeout=5):
"""Terminate all worker threads.
Args:
timeout (int): time to wait for threads to stop gracefully
"""
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout is not None and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (
AssertionError,
# Ignore repeated Ctrl-C.
# See
# https://github.com/cherrypy/cherrypy/issues/691.
KeyboardInterrupt,
):
pass | python | def stop(self, timeout=5):
"""Terminate all worker threads.
Args:
timeout (int): time to wait for threads to stop gracefully
"""
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout is not None and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (
AssertionError,
# Ignore repeated Ctrl-C.
# See
# https://github.com/cherrypy/cherrypy/issues/691.
KeyboardInterrupt,
):
pass | [
"def",
"stop",
"(",
"self",
",",
"timeout",
"=",
"5",
")",
":",
"# Must shut down threads here so the code that calls",
"# this method can know when all threads are stopped.",
"for",
"worker",
"in",
"self",
".",
"_threads",
":",
"self",
".",
"_queue",
".",
"put",
"(",... | Terminate all worker threads.
Args:
timeout (int): time to wait for threads to stop gracefully | [
"Terminate",
"all",
"worker",
"threads",
"."
] | 2af3b1798d66da697957480d3a8b4831a405770b | https://github.com/cherrypy/cheroot/blob/2af3b1798d66da697957480d3a8b4831a405770b/cheroot/workers/threadpool.py#L228-L271 | train | 205,763 |
jendrikseipp/vulture | vulture/lines.py | _get_last_child_with_lineno | def _get_last_child_with_lineno(node):
"""
Return the last direct child of `node` that has a lineno attribute,
or None if `node` has no such children.
Almost all node._field lists are sorted by the order in which they
appear in source code. For some nodes however, we have to skip some
fields that either don't have line numbers (e.g., "ctx" and "names")
or that are in the wrong position (e.g., "decorator_list" and
"returns"). Then we choose the first field (i.e., the field with the
highest line number) that actually contains a node. If it contains a
list of nodes, we return the last one.
"""
ignored_fields = set(['ctx', 'decorator_list', 'names', 'returns'])
fields = node._fields
# The fields of ast.Call are in the wrong order.
if isinstance(node, ast.Call):
fields = ('func', 'args', 'starargs', 'keywords', 'kwargs')
for name in reversed(fields):
if name in ignored_fields:
continue
try:
last_field = getattr(node, name)
except AttributeError:
continue
# Ignore non-AST objects like "is_async", "level" and "nl".
if isinstance(last_field, ast.AST):
return last_field
elif isinstance(last_field, list) and last_field:
return last_field[-1]
return None | python | def _get_last_child_with_lineno(node):
"""
Return the last direct child of `node` that has a lineno attribute,
or None if `node` has no such children.
Almost all node._field lists are sorted by the order in which they
appear in source code. For some nodes however, we have to skip some
fields that either don't have line numbers (e.g., "ctx" and "names")
or that are in the wrong position (e.g., "decorator_list" and
"returns"). Then we choose the first field (i.e., the field with the
highest line number) that actually contains a node. If it contains a
list of nodes, we return the last one.
"""
ignored_fields = set(['ctx', 'decorator_list', 'names', 'returns'])
fields = node._fields
# The fields of ast.Call are in the wrong order.
if isinstance(node, ast.Call):
fields = ('func', 'args', 'starargs', 'keywords', 'kwargs')
for name in reversed(fields):
if name in ignored_fields:
continue
try:
last_field = getattr(node, name)
except AttributeError:
continue
# Ignore non-AST objects like "is_async", "level" and "nl".
if isinstance(last_field, ast.AST):
return last_field
elif isinstance(last_field, list) and last_field:
return last_field[-1]
return None | [
"def",
"_get_last_child_with_lineno",
"(",
"node",
")",
":",
"ignored_fields",
"=",
"set",
"(",
"[",
"'ctx'",
",",
"'decorator_list'",
",",
"'names'",
",",
"'returns'",
"]",
")",
"fields",
"=",
"node",
".",
"_fields",
"# The fields of ast.Call are in the wrong order... | Return the last direct child of `node` that has a lineno attribute,
or None if `node` has no such children.
Almost all node._field lists are sorted by the order in which they
appear in source code. For some nodes however, we have to skip some
fields that either don't have line numbers (e.g., "ctx" and "names")
or that are in the wrong position (e.g., "decorator_list" and
"returns"). Then we choose the first field (i.e., the field with the
highest line number) that actually contains a node. If it contains a
list of nodes, we return the last one. | [
"Return",
"the",
"last",
"direct",
"child",
"of",
"node",
"that",
"has",
"a",
"lineno",
"attribute",
"or",
"None",
"if",
"node",
"has",
"no",
"such",
"children",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/lines.py#L4-L37 | train | 205,764 |
jendrikseipp/vulture | vulture/lines.py | get_last_line_number | def get_last_line_number(node):
"""Estimate last line number of the given AST node.
The estimate is based on the line number of the last descendant of
`node` that has a lineno attribute. Therefore, it underestimates the
size of code ending with, e.g., multiline strings and comments.
When traversing the tree, we may see a mix of nodes with line
numbers and nodes without line numbers. We therefore, store the
maximum line number seen so far and report it at the end. A more
accurate (but also slower to compute) estimate would traverse all
children, instead of just the last one, since choosing the last one
may lead to a path that ends with a node without line number.
"""
max_lineno = node.lineno
while True:
last_child = _get_last_child_with_lineno(node)
if last_child is None:
return max_lineno
else:
try:
max_lineno = max(max_lineno, last_child.lineno)
except AttributeError:
pass
node = last_child | python | def get_last_line_number(node):
"""Estimate last line number of the given AST node.
The estimate is based on the line number of the last descendant of
`node` that has a lineno attribute. Therefore, it underestimates the
size of code ending with, e.g., multiline strings and comments.
When traversing the tree, we may see a mix of nodes with line
numbers and nodes without line numbers. We therefore, store the
maximum line number seen so far and report it at the end. A more
accurate (but also slower to compute) estimate would traverse all
children, instead of just the last one, since choosing the last one
may lead to a path that ends with a node without line number.
"""
max_lineno = node.lineno
while True:
last_child = _get_last_child_with_lineno(node)
if last_child is None:
return max_lineno
else:
try:
max_lineno = max(max_lineno, last_child.lineno)
except AttributeError:
pass
node = last_child | [
"def",
"get_last_line_number",
"(",
"node",
")",
":",
"max_lineno",
"=",
"node",
".",
"lineno",
"while",
"True",
":",
"last_child",
"=",
"_get_last_child_with_lineno",
"(",
"node",
")",
"if",
"last_child",
"is",
"None",
":",
"return",
"max_lineno",
"else",
":"... | Estimate last line number of the given AST node.
The estimate is based on the line number of the last descendant of
`node` that has a lineno attribute. Therefore, it underestimates the
size of code ending with, e.g., multiline strings and comments.
When traversing the tree, we may see a mix of nodes with line
numbers and nodes without line numbers. We therefore, store the
maximum line number seen so far and report it at the end. A more
accurate (but also slower to compute) estimate would traverse all
children, instead of just the last one, since choosing the last one
may lead to a path that ends with a node without line number. | [
"Estimate",
"last",
"line",
"number",
"of",
"the",
"given",
"AST",
"node",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/lines.py#L40-L65 | train | 205,765 |
jendrikseipp/vulture | vulture/utils.py | _safe_eval | def _safe_eval(node, default):
"""
Safely evaluate the Boolean expression under the given AST node.
Substitute `default` for all sub-expressions that cannot be
evaluated (because variables or functions are undefined).
We could use eval() to evaluate more sub-expressions. However, this
function is not safe for arbitrary Python code. Even after
overwriting the "__builtins__" dictionary, the original dictionary
can be restored
(https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html).
"""
if isinstance(node, ast.BoolOp):
results = [_safe_eval(value, default) for value in node.values]
if isinstance(node.op, ast.And):
return all(results)
else:
return any(results)
elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
return not _safe_eval(node.operand, not default)
else:
try:
return ast.literal_eval(node)
except ValueError:
return default | python | def _safe_eval(node, default):
"""
Safely evaluate the Boolean expression under the given AST node.
Substitute `default` for all sub-expressions that cannot be
evaluated (because variables or functions are undefined).
We could use eval() to evaluate more sub-expressions. However, this
function is not safe for arbitrary Python code. Even after
overwriting the "__builtins__" dictionary, the original dictionary
can be restored
(https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html).
"""
if isinstance(node, ast.BoolOp):
results = [_safe_eval(value, default) for value in node.values]
if isinstance(node.op, ast.And):
return all(results)
else:
return any(results)
elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
return not _safe_eval(node.operand, not default)
else:
try:
return ast.literal_eval(node)
except ValueError:
return default | [
"def",
"_safe_eval",
"(",
"node",
",",
"default",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"ast",
".",
"BoolOp",
")",
":",
"results",
"=",
"[",
"_safe_eval",
"(",
"value",
",",
"default",
")",
"for",
"value",
"in",
"node",
".",
"values",
"]",
... | Safely evaluate the Boolean expression under the given AST node.
Substitute `default` for all sub-expressions that cannot be
evaluated (because variables or functions are undefined).
We could use eval() to evaluate more sub-expressions. However, this
function is not safe for arbitrary Python code. Even after
overwriting the "__builtins__" dictionary, the original dictionary
can be restored
(https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html). | [
"Safely",
"evaluate",
"the",
"Boolean",
"expression",
"under",
"the",
"given",
"AST",
"node",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/utils.py#L15-L41 | train | 205,766 |
jendrikseipp/vulture | vulture/utils.py | get_modules | def get_modules(paths, toplevel=True):
"""Take files from the command line even if they don't end with .py."""
modules = []
for path in paths:
path = os.path.abspath(path)
if toplevel and path.endswith('.pyc'):
sys.exit('.pyc files are not supported: {0}'.format(path))
if os.path.isfile(path) and (path.endswith('.py') or toplevel):
modules.append(path)
elif os.path.isdir(path):
subpaths = [
os.path.join(path, filename)
for filename in sorted(os.listdir(path))]
modules.extend(get_modules(subpaths, toplevel=False))
elif toplevel:
sys.exit('Error: {0} could not be found.'.format(path))
return modules | python | def get_modules(paths, toplevel=True):
"""Take files from the command line even if they don't end with .py."""
modules = []
for path in paths:
path = os.path.abspath(path)
if toplevel and path.endswith('.pyc'):
sys.exit('.pyc files are not supported: {0}'.format(path))
if os.path.isfile(path) and (path.endswith('.py') or toplevel):
modules.append(path)
elif os.path.isdir(path):
subpaths = [
os.path.join(path, filename)
for filename in sorted(os.listdir(path))]
modules.extend(get_modules(subpaths, toplevel=False))
elif toplevel:
sys.exit('Error: {0} could not be found.'.format(path))
return modules | [
"def",
"get_modules",
"(",
"paths",
",",
"toplevel",
"=",
"True",
")",
":",
"modules",
"=",
"[",
"]",
"for",
"path",
"in",
"paths",
":",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
"if",
"toplevel",
"and",
"path",
".",
"endswi... | Take files from the command line even if they don't end with .py. | [
"Take",
"files",
"from",
"the",
"command",
"line",
"even",
"if",
"they",
"don",
"t",
"end",
"with",
".",
"py",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/utils.py#L70-L86 | train | 205,767 |
jendrikseipp/vulture | vulture/core.py | Vulture.get_unused_code | def get_unused_code(self, min_confidence=0, sort_by_size=False):
"""
Return ordered list of unused Item objects.
"""
if not 0 <= min_confidence <= 100:
raise ValueError('min_confidence must be between 0 and 100.')
def by_name(item):
return (item.filename.lower(), item.first_lineno)
def by_size(item):
return (item.size,) + by_name(item)
unused_code = (self.unused_attrs + self.unused_classes +
self.unused_funcs + self.unused_imports +
self.unused_props + self.unused_vars +
self.unreachable_code)
confidently_unused = [obj for obj in unused_code
if obj.confidence >= min_confidence]
return sorted(confidently_unused,
key=by_size if sort_by_size else by_name) | python | def get_unused_code(self, min_confidence=0, sort_by_size=False):
"""
Return ordered list of unused Item objects.
"""
if not 0 <= min_confidence <= 100:
raise ValueError('min_confidence must be between 0 and 100.')
def by_name(item):
return (item.filename.lower(), item.first_lineno)
def by_size(item):
return (item.size,) + by_name(item)
unused_code = (self.unused_attrs + self.unused_classes +
self.unused_funcs + self.unused_imports +
self.unused_props + self.unused_vars +
self.unreachable_code)
confidently_unused = [obj for obj in unused_code
if obj.confidence >= min_confidence]
return sorted(confidently_unused,
key=by_size if sort_by_size else by_name) | [
"def",
"get_unused_code",
"(",
"self",
",",
"min_confidence",
"=",
"0",
",",
"sort_by_size",
"=",
"False",
")",
":",
"if",
"not",
"0",
"<=",
"min_confidence",
"<=",
"100",
":",
"raise",
"ValueError",
"(",
"'min_confidence must be between 0 and 100.'",
")",
"def"... | Return ordered list of unused Item objects. | [
"Return",
"ordered",
"list",
"of",
"unused",
"Item",
"objects",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/core.py#L250-L272 | train | 205,768 |
jendrikseipp/vulture | vulture/core.py | Vulture.report | def report(self, min_confidence=0, sort_by_size=False,
make_whitelist=False):
"""
Print ordered list of Item objects to stdout.
"""
for item in self.get_unused_code(
min_confidence=min_confidence, sort_by_size=sort_by_size):
print(item.get_whitelist_string() if make_whitelist
else item.get_report(add_size=sort_by_size))
self.found_dead_code_or_error = True
return self.found_dead_code_or_error | python | def report(self, min_confidence=0, sort_by_size=False,
make_whitelist=False):
"""
Print ordered list of Item objects to stdout.
"""
for item in self.get_unused_code(
min_confidence=min_confidence, sort_by_size=sort_by_size):
print(item.get_whitelist_string() if make_whitelist
else item.get_report(add_size=sort_by_size))
self.found_dead_code_or_error = True
return self.found_dead_code_or_error | [
"def",
"report",
"(",
"self",
",",
"min_confidence",
"=",
"0",
",",
"sort_by_size",
"=",
"False",
",",
"make_whitelist",
"=",
"False",
")",
":",
"for",
"item",
"in",
"self",
".",
"get_unused_code",
"(",
"min_confidence",
"=",
"min_confidence",
",",
"sort_by_... | Print ordered list of Item objects to stdout. | [
"Print",
"ordered",
"list",
"of",
"Item",
"objects",
"to",
"stdout",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/core.py#L274-L284 | train | 205,769 |
jendrikseipp/vulture | vulture/core.py | Vulture._handle_ast_list | def _handle_ast_list(self, ast_list):
"""
Find unreachable nodes in the given sequence of ast nodes.
"""
for index, node in enumerate(ast_list):
if isinstance(node, (ast.Break, ast.Continue, ast.Raise,
ast.Return)):
try:
first_unreachable_node = ast_list[index + 1]
except IndexError:
continue
class_name = node.__class__.__name__.lower()
self._define(
self.unreachable_code,
class_name,
first_unreachable_node,
last_node=ast_list[-1],
message="unreachable code after '{class_name}'".format(
**locals()),
confidence=100)
return | python | def _handle_ast_list(self, ast_list):
"""
Find unreachable nodes in the given sequence of ast nodes.
"""
for index, node in enumerate(ast_list):
if isinstance(node, (ast.Break, ast.Continue, ast.Raise,
ast.Return)):
try:
first_unreachable_node = ast_list[index + 1]
except IndexError:
continue
class_name = node.__class__.__name__.lower()
self._define(
self.unreachable_code,
class_name,
first_unreachable_node,
last_node=ast_list[-1],
message="unreachable code after '{class_name}'".format(
**locals()),
confidence=100)
return | [
"def",
"_handle_ast_list",
"(",
"self",
",",
"ast_list",
")",
":",
"for",
"index",
",",
"node",
"in",
"enumerate",
"(",
"ast_list",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"(",
"ast",
".",
"Break",
",",
"ast",
".",
"Continue",
",",
"ast",
"."... | Find unreachable nodes in the given sequence of ast nodes. | [
"Find",
"unreachable",
"nodes",
"in",
"the",
"given",
"sequence",
"of",
"ast",
"nodes",
"."
] | fed11fb7e7ed065058a9fb1acd10052ece37f984 | https://github.com/jendrikseipp/vulture/blob/fed11fb7e7ed065058a9fb1acd10052ece37f984/vulture/core.py#L488-L508 | train | 205,770 |
python-rope/rope | rope/contrib/findit.py | find_occurrences | def find_occurrences(project, resource, offset, unsure=False, resources=None,
in_hierarchy=False,
task_handle=taskhandle.NullTaskHandle()):
"""Return a list of `Location`\s
If `unsure` is `True`, possible matches are returned, too. You
can use `Location.unsure` to see which are unsure occurrences.
`resources` can be a list of `rope.base.resource.File`\s that
should be searched for occurrences; if `None` all python files
in the project are searched.
"""
name = worder.get_name_at(resource, offset)
this_pymodule = project.get_pymodule(resource)
primary, pyname = rope.base.evaluate.eval_location2(
this_pymodule, offset)
def is_match(occurrence):
return unsure
finder = occurrences.create_finder(
project, name, pyname, unsure=is_match,
in_hierarchy=in_hierarchy, instance=primary)
if resources is None:
resources = project.get_python_files()
job_set = task_handle.create_jobset('Finding Occurrences',
count=len(resources))
return _find_locations(finder, resources, job_set) | python | def find_occurrences(project, resource, offset, unsure=False, resources=None,
in_hierarchy=False,
task_handle=taskhandle.NullTaskHandle()):
"""Return a list of `Location`\s
If `unsure` is `True`, possible matches are returned, too. You
can use `Location.unsure` to see which are unsure occurrences.
`resources` can be a list of `rope.base.resource.File`\s that
should be searched for occurrences; if `None` all python files
in the project are searched.
"""
name = worder.get_name_at(resource, offset)
this_pymodule = project.get_pymodule(resource)
primary, pyname = rope.base.evaluate.eval_location2(
this_pymodule, offset)
def is_match(occurrence):
return unsure
finder = occurrences.create_finder(
project, name, pyname, unsure=is_match,
in_hierarchy=in_hierarchy, instance=primary)
if resources is None:
resources = project.get_python_files()
job_set = task_handle.create_jobset('Finding Occurrences',
count=len(resources))
return _find_locations(finder, resources, job_set) | [
"def",
"find_occurrences",
"(",
"project",
",",
"resource",
",",
"offset",
",",
"unsure",
"=",
"False",
",",
"resources",
"=",
"None",
",",
"in_hierarchy",
"=",
"False",
",",
"task_handle",
"=",
"taskhandle",
".",
"NullTaskHandle",
"(",
")",
")",
":",
"nam... | Return a list of `Location`\s
If `unsure` is `True`, possible matches are returned, too. You
can use `Location.unsure` to see which are unsure occurrences.
`resources` can be a list of `rope.base.resource.File`\s that
should be searched for occurrences; if `None` all python files
in the project are searched. | [
"Return",
"a",
"list",
"of",
"Location",
"\\",
"s"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/contrib/findit.py#L9-L35 | train | 205,771 |
python-rope/rope | rope/contrib/findit.py | find_implementations | def find_implementations(project, resource, offset, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Find the places a given method is overridden.
Finds the places a method is implemented. Returns a list of
`Location`\s.
"""
name = worder.get_name_at(resource, offset)
this_pymodule = project.get_pymodule(resource)
pyname = rope.base.evaluate.eval_location(this_pymodule, offset)
if pyname is not None:
pyobject = pyname.get_object()
if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \
pyobject.get_kind() != 'method':
raise exceptions.BadIdentifierError('Not a method!')
else:
raise exceptions.BadIdentifierError('Cannot resolve the identifier!')
def is_defined(occurrence):
if not occurrence.is_defined():
return False
def not_self(occurrence):
if occurrence.get_pyname().get_object() == pyname.get_object():
return False
filters = [is_defined, not_self,
occurrences.InHierarchyFilter(pyname, True)]
finder = occurrences.Finder(project, name, filters=filters)
if resources is None:
resources = project.get_python_files()
job_set = task_handle.create_jobset('Finding Implementations',
count=len(resources))
return _find_locations(finder, resources, job_set) | python | def find_implementations(project, resource, offset, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Find the places a given method is overridden.
Finds the places a method is implemented. Returns a list of
`Location`\s.
"""
name = worder.get_name_at(resource, offset)
this_pymodule = project.get_pymodule(resource)
pyname = rope.base.evaluate.eval_location(this_pymodule, offset)
if pyname is not None:
pyobject = pyname.get_object()
if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \
pyobject.get_kind() != 'method':
raise exceptions.BadIdentifierError('Not a method!')
else:
raise exceptions.BadIdentifierError('Cannot resolve the identifier!')
def is_defined(occurrence):
if not occurrence.is_defined():
return False
def not_self(occurrence):
if occurrence.get_pyname().get_object() == pyname.get_object():
return False
filters = [is_defined, not_self,
occurrences.InHierarchyFilter(pyname, True)]
finder = occurrences.Finder(project, name, filters=filters)
if resources is None:
resources = project.get_python_files()
job_set = task_handle.create_jobset('Finding Implementations',
count=len(resources))
return _find_locations(finder, resources, job_set) | [
"def",
"find_implementations",
"(",
"project",
",",
"resource",
",",
"offset",
",",
"resources",
"=",
"None",
",",
"task_handle",
"=",
"taskhandle",
".",
"NullTaskHandle",
"(",
")",
")",
":",
"name",
"=",
"worder",
".",
"get_name_at",
"(",
"resource",
",",
... | Find the places a given method is overridden.
Finds the places a method is implemented. Returns a list of
`Location`\s. | [
"Find",
"the",
"places",
"a",
"given",
"method",
"is",
"overridden",
"."
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/contrib/findit.py#L38-L70 | train | 205,772 |
python-rope/rope | rope/contrib/finderrors.py | find_errors | def find_errors(project, resource):
"""Find possible bad name and attribute accesses
It returns a list of `Error`\s.
"""
pymodule = project.get_pymodule(resource)
finder = _BadAccessFinder(pymodule)
ast.walk(pymodule.get_ast(), finder)
return finder.errors | python | def find_errors(project, resource):
"""Find possible bad name and attribute accesses
It returns a list of `Error`\s.
"""
pymodule = project.get_pymodule(resource)
finder = _BadAccessFinder(pymodule)
ast.walk(pymodule.get_ast(), finder)
return finder.errors | [
"def",
"find_errors",
"(",
"project",
",",
"resource",
")",
":",
"pymodule",
"=",
"project",
".",
"get_pymodule",
"(",
"resource",
")",
"finder",
"=",
"_BadAccessFinder",
"(",
"pymodule",
")",
"ast",
".",
"walk",
"(",
"pymodule",
".",
"get_ast",
"(",
")",
... | Find possible bad name and attribute accesses
It returns a list of `Error`\s. | [
"Find",
"possible",
"bad",
"name",
"and",
"attribute",
"accesses"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/contrib/finderrors.py#L29-L37 | train | 205,773 |
python-rope/rope | rope/base/oi/transform.py | PyObjectToTextual.transform | def transform(self, pyobject):
"""Transform a `PyObject` to textual form"""
if pyobject is None:
return ('none',)
object_type = type(pyobject)
try:
method = getattr(self, object_type.__name__ + '_to_textual')
return method(pyobject)
except AttributeError:
return ('unknown',) | python | def transform(self, pyobject):
"""Transform a `PyObject` to textual form"""
if pyobject is None:
return ('none',)
object_type = type(pyobject)
try:
method = getattr(self, object_type.__name__ + '_to_textual')
return method(pyobject)
except AttributeError:
return ('unknown',) | [
"def",
"transform",
"(",
"self",
",",
"pyobject",
")",
":",
"if",
"pyobject",
"is",
"None",
":",
"return",
"(",
"'none'",
",",
")",
"object_type",
"=",
"type",
"(",
"pyobject",
")",
"try",
":",
"method",
"=",
"getattr",
"(",
"self",
",",
"object_type",... | Transform a `PyObject` to textual form | [
"Transform",
"a",
"PyObject",
"to",
"textual",
"form"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/oi/transform.py#L20-L29 | train | 205,774 |
python-rope/rope | rope/base/oi/transform.py | TextualToPyObject.transform | def transform(self, textual):
"""Transform an object from textual form to `PyObject`"""
if textual is None:
return None
type = textual[0]
try:
method = getattr(self, type + '_to_pyobject')
return method(textual)
except AttributeError:
return None | python | def transform(self, textual):
"""Transform an object from textual form to `PyObject`"""
if textual is None:
return None
type = textual[0]
try:
method = getattr(self, type + '_to_pyobject')
return method(textual)
except AttributeError:
return None | [
"def",
"transform",
"(",
"self",
",",
"textual",
")",
":",
"if",
"textual",
"is",
"None",
":",
"return",
"None",
"type",
"=",
"textual",
"[",
"0",
"]",
"try",
":",
"method",
"=",
"getattr",
"(",
"self",
",",
"type",
"+",
"'_to_pyobject'",
")",
"retur... | Transform an object from textual form to `PyObject` | [
"Transform",
"an",
"object",
"from",
"textual",
"form",
"to",
"PyObject"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/oi/transform.py#L111-L120 | train | 205,775 |
python-rope/rope | rope/refactor/patchedast.py | get_patched_ast | def get_patched_ast(source, sorted_children=False):
"""Adds ``region`` and ``sorted_children`` fields to nodes
Adds ``sorted_children`` field only if `sorted_children` is True.
"""
return patch_ast(ast.parse(source), source, sorted_children) | python | def get_patched_ast(source, sorted_children=False):
"""Adds ``region`` and ``sorted_children`` fields to nodes
Adds ``sorted_children`` field only if `sorted_children` is True.
"""
return patch_ast(ast.parse(source), source, sorted_children) | [
"def",
"get_patched_ast",
"(",
"source",
",",
"sorted_children",
"=",
"False",
")",
":",
"return",
"patch_ast",
"(",
"ast",
".",
"parse",
"(",
"source",
")",
",",
"source",
",",
"sorted_children",
")"
] | Adds ``region`` and ``sorted_children`` fields to nodes
Adds ``sorted_children`` field only if `sorted_children` is True. | [
"Adds",
"region",
"and",
"sorted_children",
"fields",
"to",
"nodes"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/patchedast.py#L14-L20 | train | 205,776 |
python-rope/rope | rope/refactor/patchedast.py | patch_ast | def patch_ast(node, source, sorted_children=False):
"""Patches the given node
After calling, each node in `node` will have a new field named
`region` that is a tuple containing the start and end offsets
of the code that generated it.
If `sorted_children` is true, a `sorted_children` field will
be created for each node, too. It is a list containing child
nodes as well as whitespaces and comments that occur between
them.
"""
if hasattr(node, 'region'):
return node
walker = _PatchingASTWalker(source, children=sorted_children)
ast.call_for_nodes(node, walker)
return node | python | def patch_ast(node, source, sorted_children=False):
"""Patches the given node
After calling, each node in `node` will have a new field named
`region` that is a tuple containing the start and end offsets
of the code that generated it.
If `sorted_children` is true, a `sorted_children` field will
be created for each node, too. It is a list containing child
nodes as well as whitespaces and comments that occur between
them.
"""
if hasattr(node, 'region'):
return node
walker = _PatchingASTWalker(source, children=sorted_children)
ast.call_for_nodes(node, walker)
return node | [
"def",
"patch_ast",
"(",
"node",
",",
"source",
",",
"sorted_children",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"node",
",",
"'region'",
")",
":",
"return",
"node",
"walker",
"=",
"_PatchingASTWalker",
"(",
"source",
",",
"children",
"=",
"sorted_chi... | Patches the given node
After calling, each node in `node` will have a new field named
`region` that is a tuple containing the start and end offsets
of the code that generated it.
If `sorted_children` is true, a `sorted_children` field will
be created for each node, too. It is a list containing child
nodes as well as whitespaces and comments that occur between
them. | [
"Patches",
"the",
"given",
"node"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/patchedast.py#L23-L40 | train | 205,777 |
python-rope/rope | rope/refactor/patchedast.py | write_ast | def write_ast(patched_ast_node):
"""Extract source form a patched AST node with `sorted_children` field
If the node is patched with sorted_children turned off you can use
`node_region` function for obtaining code using module source code.
"""
result = []
for child in patched_ast_node.sorted_children:
if isinstance(child, ast.AST):
result.append(write_ast(child))
else:
result.append(child)
return ''.join(result) | python | def write_ast(patched_ast_node):
"""Extract source form a patched AST node with `sorted_children` field
If the node is patched with sorted_children turned off you can use
`node_region` function for obtaining code using module source code.
"""
result = []
for child in patched_ast_node.sorted_children:
if isinstance(child, ast.AST):
result.append(write_ast(child))
else:
result.append(child)
return ''.join(result) | [
"def",
"write_ast",
"(",
"patched_ast_node",
")",
":",
"result",
"=",
"[",
"]",
"for",
"child",
"in",
"patched_ast_node",
".",
"sorted_children",
":",
"if",
"isinstance",
"(",
"child",
",",
"ast",
".",
"AST",
")",
":",
"result",
".",
"append",
"(",
"writ... | Extract source form a patched AST node with `sorted_children` field
If the node is patched with sorted_children turned off you can use
`node_region` function for obtaining code using module source code. | [
"Extract",
"source",
"form",
"a",
"patched",
"AST",
"node",
"with",
"sorted_children",
"field"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/patchedast.py#L48-L60 | train | 205,778 |
python-rope/rope | rope/refactor/patchedast.py | _PatchingASTWalker._handle_parens | def _handle_parens(self, children, start, formats):
"""Changes `children` and returns new start"""
opens, closes = self._count_needed_parens(formats)
old_end = self.source.offset
new_end = None
for i in range(closes):
new_end = self.source.consume(')')[1]
if new_end is not None:
if self.children:
children.append(self.source[old_end:new_end])
new_start = start
for i in range(opens):
new_start = self.source.rfind_token('(', 0, new_start)
if new_start != start:
if self.children:
children.appendleft(self.source[new_start:start])
start = new_start
return start | python | def _handle_parens(self, children, start, formats):
"""Changes `children` and returns new start"""
opens, closes = self._count_needed_parens(formats)
old_end = self.source.offset
new_end = None
for i in range(closes):
new_end = self.source.consume(')')[1]
if new_end is not None:
if self.children:
children.append(self.source[old_end:new_end])
new_start = start
for i in range(opens):
new_start = self.source.rfind_token('(', 0, new_start)
if new_start != start:
if self.children:
children.appendleft(self.source[new_start:start])
start = new_start
return start | [
"def",
"_handle_parens",
"(",
"self",
",",
"children",
",",
"start",
",",
"formats",
")",
":",
"opens",
",",
"closes",
"=",
"self",
".",
"_count_needed_parens",
"(",
"formats",
")",
"old_end",
"=",
"self",
".",
"source",
".",
"offset",
"new_end",
"=",
"N... | Changes `children` and returns new start | [
"Changes",
"children",
"and",
"returns",
"new",
"start"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/patchedast.py#L155-L172 | train | 205,779 |
python-rope/rope | rope/refactor/patchedast.py | _Source._good_token | def _good_token(self, token, offset, start=None):
"""Checks whether consumed token is in comments"""
if start is None:
start = self.offset
try:
comment_index = self.source.rindex('#', start, offset)
except ValueError:
return True
try:
new_line_index = self.source.rindex('\n', start, offset)
except ValueError:
return False
return comment_index < new_line_index | python | def _good_token(self, token, offset, start=None):
"""Checks whether consumed token is in comments"""
if start is None:
start = self.offset
try:
comment_index = self.source.rindex('#', start, offset)
except ValueError:
return True
try:
new_line_index = self.source.rindex('\n', start, offset)
except ValueError:
return False
return comment_index < new_line_index | [
"def",
"_good_token",
"(",
"self",
",",
"token",
",",
"offset",
",",
"start",
"=",
"None",
")",
":",
"if",
"start",
"is",
"None",
":",
"start",
"=",
"self",
".",
"offset",
"try",
":",
"comment_index",
"=",
"self",
".",
"source",
".",
"rindex",
"(",
... | Checks whether consumed token is in comments | [
"Checks",
"whether",
"consumed",
"token",
"is",
"in",
"comments"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/patchedast.py#L784-L796 | train | 205,780 |
python-rope/rope | rope/base/codeanalyze.py | get_block_start | def get_block_start(lines, lineno, maximum_indents=80):
"""Approximate block start"""
pattern = get_block_start_patterns()
for i in range(lineno, 0, -1):
match = pattern.search(lines.get_line(i))
if match is not None and \
count_line_indents(lines.get_line(i)) <= maximum_indents:
striped = match.string.lstrip()
# Maybe we're in a list comprehension or generator expression
if i > 1 and striped.startswith('if') or striped.startswith('for'):
bracs = 0
for j in range(i, min(i + 5, lines.length() + 1)):
for c in lines.get_line(j):
if c == '#':
break
if c in '[(':
bracs += 1
if c in ')]':
bracs -= 1
if bracs < 0:
break
if bracs < 0:
break
if bracs < 0:
continue
return i
return 1 | python | def get_block_start(lines, lineno, maximum_indents=80):
"""Approximate block start"""
pattern = get_block_start_patterns()
for i in range(lineno, 0, -1):
match = pattern.search(lines.get_line(i))
if match is not None and \
count_line_indents(lines.get_line(i)) <= maximum_indents:
striped = match.string.lstrip()
# Maybe we're in a list comprehension or generator expression
if i > 1 and striped.startswith('if') or striped.startswith('for'):
bracs = 0
for j in range(i, min(i + 5, lines.length() + 1)):
for c in lines.get_line(j):
if c == '#':
break
if c in '[(':
bracs += 1
if c in ')]':
bracs -= 1
if bracs < 0:
break
if bracs < 0:
break
if bracs < 0:
continue
return i
return 1 | [
"def",
"get_block_start",
"(",
"lines",
",",
"lineno",
",",
"maximum_indents",
"=",
"80",
")",
":",
"pattern",
"=",
"get_block_start_patterns",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"lineno",
",",
"0",
",",
"-",
"1",
")",
":",
"match",
"=",
"patter... | Approximate block start | [
"Approximate",
"block",
"start"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/codeanalyze.py#L300-L326 | train | 205,781 |
python-rope/rope | rope/base/codeanalyze.py | CachingLogicalLineFinder._init_logicals | def _init_logicals(self):
"""Should initialize _starts and _ends attributes"""
size = self.lines.length() + 1
self._starts = [None] * size
self._ends = [None] * size
for start, end in self._generate(self.lines):
self._starts[start] = True
self._ends[end] = True | python | def _init_logicals(self):
"""Should initialize _starts and _ends attributes"""
size = self.lines.length() + 1
self._starts = [None] * size
self._ends = [None] * size
for start, end in self._generate(self.lines):
self._starts[start] = True
self._ends[end] = True | [
"def",
"_init_logicals",
"(",
"self",
")",
":",
"size",
"=",
"self",
".",
"lines",
".",
"length",
"(",
")",
"+",
"1",
"self",
".",
"_starts",
"=",
"[",
"None",
"]",
"*",
"size",
"self",
".",
"_ends",
"=",
"[",
"None",
"]",
"*",
"size",
"for",
"... | Should initialize _starts and _ends attributes | [
"Should",
"initialize",
"_starts",
"and",
"_ends",
"attributes"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/codeanalyze.py#L272-L279 | train | 205,782 |
python-rope/rope | rope/refactor/move.py | create_move | def create_move(project, resource, offset=None):
"""A factory for creating Move objects
Based on `resource` and `offset`, return one of `MoveModule`,
`MoveGlobal` or `MoveMethod` for performing move refactoring.
"""
if offset is None:
return MoveModule(project, resource)
this_pymodule = project.get_pymodule(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if pyname is not None:
pyobject = pyname.get_object()
if isinstance(pyobject, pyobjects.PyModule) or \
isinstance(pyobject, pyobjects.PyPackage):
return MoveModule(project, pyobject.get_resource())
if isinstance(pyobject, pyobjects.PyFunction) and \
isinstance(pyobject.parent, pyobjects.PyClass):
return MoveMethod(project, resource, offset)
if isinstance(pyobject, pyobjects.PyDefinedObject) and \
isinstance(pyobject.parent, pyobjects.PyModule) or \
isinstance(pyname, pynames.AssignedName):
return MoveGlobal(project, resource, offset)
raise exceptions.RefactoringError(
'Move only works on global classes/functions/variables, modules and '
'methods.') | python | def create_move(project, resource, offset=None):
"""A factory for creating Move objects
Based on `resource` and `offset`, return one of `MoveModule`,
`MoveGlobal` or `MoveMethod` for performing move refactoring.
"""
if offset is None:
return MoveModule(project, resource)
this_pymodule = project.get_pymodule(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if pyname is not None:
pyobject = pyname.get_object()
if isinstance(pyobject, pyobjects.PyModule) or \
isinstance(pyobject, pyobjects.PyPackage):
return MoveModule(project, pyobject.get_resource())
if isinstance(pyobject, pyobjects.PyFunction) and \
isinstance(pyobject.parent, pyobjects.PyClass):
return MoveMethod(project, resource, offset)
if isinstance(pyobject, pyobjects.PyDefinedObject) and \
isinstance(pyobject.parent, pyobjects.PyModule) or \
isinstance(pyname, pynames.AssignedName):
return MoveGlobal(project, resource, offset)
raise exceptions.RefactoringError(
'Move only works on global classes/functions/variables, modules and '
'methods.') | [
"def",
"create_move",
"(",
"project",
",",
"resource",
",",
"offset",
"=",
"None",
")",
":",
"if",
"offset",
"is",
"None",
":",
"return",
"MoveModule",
"(",
"project",
",",
"resource",
")",
"this_pymodule",
"=",
"project",
".",
"get_pymodule",
"(",
"resour... | A factory for creating Move objects
Based on `resource` and `offset`, return one of `MoveModule`,
`MoveGlobal` or `MoveMethod` for performing move refactoring. | [
"A",
"factory",
"for",
"creating",
"Move",
"objects"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/move.py#L14-L39 | train | 205,783 |
python-rope/rope | rope/refactor/move.py | MoveMethod.get_changes | def get_changes(self, dest_attr, new_name=None, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
"""
changes = ChangeSet('Moving method <%s>' % self.method_name)
if resources is None:
resources = self.project.get_python_files()
if new_name is None:
new_name = self.get_method_name()
resource1, start1, end1, new_content1 = \
self._get_changes_made_by_old_class(dest_attr, new_name)
collector1 = codeanalyze.ChangeCollector(resource1.read())
collector1.add_change(start1, end1, new_content1)
resource2, start2, end2, new_content2 = \
self._get_changes_made_by_new_class(dest_attr, new_name)
if resource1 == resource2:
collector1.add_change(start2, end2, new_content2)
else:
collector2 = codeanalyze.ChangeCollector(resource2.read())
collector2.add_change(start2, end2, new_content2)
result = collector2.get_changed()
import_tools = importutils.ImportTools(self.project)
new_imports = self._get_used_imports(import_tools)
if new_imports:
goal_pymodule = libutils.get_string_module(
self.project, result, resource2)
result = _add_imports_to_module(
import_tools, goal_pymodule, new_imports)
if resource2 in resources:
changes.add_change(ChangeContents(resource2, result))
if resource1 in resources:
changes.add_change(ChangeContents(resource1,
collector1.get_changed()))
return changes | python | def get_changes(self, dest_attr, new_name=None, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
"""
changes = ChangeSet('Moving method <%s>' % self.method_name)
if resources is None:
resources = self.project.get_python_files()
if new_name is None:
new_name = self.get_method_name()
resource1, start1, end1, new_content1 = \
self._get_changes_made_by_old_class(dest_attr, new_name)
collector1 = codeanalyze.ChangeCollector(resource1.read())
collector1.add_change(start1, end1, new_content1)
resource2, start2, end2, new_content2 = \
self._get_changes_made_by_new_class(dest_attr, new_name)
if resource1 == resource2:
collector1.add_change(start2, end2, new_content2)
else:
collector2 = codeanalyze.ChangeCollector(resource2.read())
collector2.add_change(start2, end2, new_content2)
result = collector2.get_changed()
import_tools = importutils.ImportTools(self.project)
new_imports = self._get_used_imports(import_tools)
if new_imports:
goal_pymodule = libutils.get_string_module(
self.project, result, resource2)
result = _add_imports_to_module(
import_tools, goal_pymodule, new_imports)
if resource2 in resources:
changes.add_change(ChangeContents(resource2, result))
if resource1 in resources:
changes.add_change(ChangeContents(resource1,
collector1.get_changed()))
return changes | [
"def",
"get_changes",
"(",
"self",
",",
"dest_attr",
",",
"new_name",
"=",
"None",
",",
"resources",
"=",
"None",
",",
"task_handle",
"=",
"taskhandle",
".",
"NullTaskHandle",
"(",
")",
")",
":",
"changes",
"=",
"ChangeSet",
"(",
"'Moving method <%s>'",
"%",... | Return the changes needed for this refactoring
Parameters:
- `dest_attr`: the name of the destination attribute
- `new_name`: the name of the new method; if `None` uses
the old name
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files. | [
"Return",
"the",
"changes",
"needed",
"for",
"this",
"refactoring"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/move.py#L61-L106 | train | 205,784 |
python-rope/rope | rope/base/pyobjectsdef.py | PyFunction.get_kind | def get_kind(self):
"""Get function type
It returns one of 'function', 'method', 'staticmethod' or
'classmethod' strs.
"""
scope = self.parent.get_scope()
if isinstance(self.parent, PyClass):
for decorator in self.decorators:
pyname = rope.base.evaluate.eval_node(scope, decorator)
if pyname == rope.base.builtins.builtins['staticmethod']:
return 'staticmethod'
if pyname == rope.base.builtins.builtins['classmethod']:
return 'classmethod'
return 'method'
return 'function' | python | def get_kind(self):
"""Get function type
It returns one of 'function', 'method', 'staticmethod' or
'classmethod' strs.
"""
scope = self.parent.get_scope()
if isinstance(self.parent, PyClass):
for decorator in self.decorators:
pyname = rope.base.evaluate.eval_node(scope, decorator)
if pyname == rope.base.builtins.builtins['staticmethod']:
return 'staticmethod'
if pyname == rope.base.builtins.builtins['classmethod']:
return 'classmethod'
return 'method'
return 'function' | [
"def",
"get_kind",
"(",
"self",
")",
":",
"scope",
"=",
"self",
".",
"parent",
".",
"get_scope",
"(",
")",
"if",
"isinstance",
"(",
"self",
".",
"parent",
",",
"PyClass",
")",
":",
"for",
"decorator",
"in",
"self",
".",
"decorators",
":",
"pyname",
"... | Get function type
It returns one of 'function', 'method', 'staticmethod' or
'classmethod' strs. | [
"Get",
"function",
"type"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/pyobjectsdef.py#L89-L105 | train | 205,785 |
python-rope/rope | rope/base/ast.py | walk | def walk(node, walker):
"""Walk the syntax tree"""
method_name = '_' + node.__class__.__name__
method = getattr(walker, method_name, None)
if method is not None:
if isinstance(node, _ast.ImportFrom) and node.module is None:
# In python < 2.7 ``node.module == ''`` for relative imports
# but for python 2.7 it is None. Generalizing it to ''.
node.module = ''
return method(node)
for child in get_child_nodes(node):
walk(child, walker) | python | def walk(node, walker):
"""Walk the syntax tree"""
method_name = '_' + node.__class__.__name__
method = getattr(walker, method_name, None)
if method is not None:
if isinstance(node, _ast.ImportFrom) and node.module is None:
# In python < 2.7 ``node.module == ''`` for relative imports
# but for python 2.7 it is None. Generalizing it to ''.
node.module = ''
return method(node)
for child in get_child_nodes(node):
walk(child, walker) | [
"def",
"walk",
"(",
"node",
",",
"walker",
")",
":",
"method_name",
"=",
"'_'",
"+",
"node",
".",
"__class__",
".",
"__name__",
"method",
"=",
"getattr",
"(",
"walker",
",",
"method_name",
",",
"None",
")",
"if",
"method",
"is",
"not",
"None",
":",
"... | Walk the syntax tree | [
"Walk",
"the",
"syntax",
"tree"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/ast.py#L30-L41 | train | 205,786 |
python-rope/rope | rope/base/ast.py | call_for_nodes | def call_for_nodes(node, callback, recursive=False):
"""If callback returns `True` the child nodes are skipped"""
result = callback(node)
if recursive and not result:
for child in get_child_nodes(node):
call_for_nodes(child, callback, recursive) | python | def call_for_nodes(node, callback, recursive=False):
"""If callback returns `True` the child nodes are skipped"""
result = callback(node)
if recursive and not result:
for child in get_child_nodes(node):
call_for_nodes(child, callback, recursive) | [
"def",
"call_for_nodes",
"(",
"node",
",",
"callback",
",",
"recursive",
"=",
"False",
")",
":",
"result",
"=",
"callback",
"(",
"node",
")",
"if",
"recursive",
"and",
"not",
"result",
":",
"for",
"child",
"in",
"get_child_nodes",
"(",
"node",
")",
":",
... | If callback returns `True` the child nodes are skipped | [
"If",
"callback",
"returns",
"True",
"the",
"child",
"nodes",
"are",
"skipped"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/ast.py#L60-L65 | train | 205,787 |
python-rope/rope | rope/base/project.py | _realpath | def _realpath(path):
"""Return the real path of `path`
Is equivalent to ``realpath(abspath(expanduser(path)))``.
Of the particular notice is the hack dealing with the unfortunate
sitaution of running native-Windows python (os.name == 'nt') inside
of Cygwin (abspath starts with '/'), which apparently normal
os.path.realpath completely messes up.
"""
# there is a bug in cygwin for os.path.abspath() for abs paths
if sys.platform == 'cygwin':
if path[1:3] == ':\\':
return path
elif path[1:3] == ':/':
path = "/cygdrive/" + path[0] + path[2:]
return os.path.abspath(os.path.expanduser(path))
return os.path.realpath(os.path.abspath(os.path.expanduser(path))) | python | def _realpath(path):
"""Return the real path of `path`
Is equivalent to ``realpath(abspath(expanduser(path)))``.
Of the particular notice is the hack dealing with the unfortunate
sitaution of running native-Windows python (os.name == 'nt') inside
of Cygwin (abspath starts with '/'), which apparently normal
os.path.realpath completely messes up.
"""
# there is a bug in cygwin for os.path.abspath() for abs paths
if sys.platform == 'cygwin':
if path[1:3] == ':\\':
return path
elif path[1:3] == ':/':
path = "/cygdrive/" + path[0] + path[2:]
return os.path.abspath(os.path.expanduser(path))
return os.path.realpath(os.path.abspath(os.path.expanduser(path))) | [
"def",
"_realpath",
"(",
"path",
")",
":",
"# there is a bug in cygwin for os.path.abspath() for abs paths",
"if",
"sys",
".",
"platform",
"==",
"'cygwin'",
":",
"if",
"path",
"[",
"1",
":",
"3",
"]",
"==",
"':\\\\'",
":",
"return",
"path",
"elif",
"path",
"["... | Return the real path of `path`
Is equivalent to ``realpath(abspath(expanduser(path)))``.
Of the particular notice is the hack dealing with the unfortunate
sitaution of running native-Windows python (os.name == 'nt') inside
of Cygwin (abspath starts with '/'), which apparently normal
os.path.realpath completely messes up. | [
"Return",
"the",
"real",
"path",
"of",
"path"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L456-L474 | train | 205,788 |
python-rope/rope | rope/base/project.py | _Project.get_resource | def get_resource(self, resource_name):
"""Get a resource in a project.
`resource_name` is the path of a resource in a project. It is
the path of a resource relative to project root. Project root
folder address is an empty string. If the resource does not
exist a `exceptions.ResourceNotFound` exception would be
raised. Use `get_file()` and `get_folder()` when you need to
get nonexistent `Resource`\s.
"""
path = self._get_resource_path(resource_name)
if not os.path.exists(path):
raise exceptions.ResourceNotFoundError(
'Resource <%s> does not exist' % resource_name)
elif os.path.isfile(path):
return File(self, resource_name)
elif os.path.isdir(path):
return Folder(self, resource_name)
else:
raise exceptions.ResourceNotFoundError('Unknown resource '
+ resource_name) | python | def get_resource(self, resource_name):
"""Get a resource in a project.
`resource_name` is the path of a resource in a project. It is
the path of a resource relative to project root. Project root
folder address is an empty string. If the resource does not
exist a `exceptions.ResourceNotFound` exception would be
raised. Use `get_file()` and `get_folder()` when you need to
get nonexistent `Resource`\s.
"""
path = self._get_resource_path(resource_name)
if not os.path.exists(path):
raise exceptions.ResourceNotFoundError(
'Resource <%s> does not exist' % resource_name)
elif os.path.isfile(path):
return File(self, resource_name)
elif os.path.isdir(path):
return Folder(self, resource_name)
else:
raise exceptions.ResourceNotFoundError('Unknown resource '
+ resource_name) | [
"def",
"get_resource",
"(",
"self",
",",
"resource_name",
")",
":",
"path",
"=",
"self",
".",
"_get_resource_path",
"(",
"resource_name",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"raise",
"exceptions",
".",
"ResourceNotFou... | Get a resource in a project.
`resource_name` is the path of a resource in a project. It is
the path of a resource relative to project root. Project root
folder address is an empty string. If the resource does not
exist a `exceptions.ResourceNotFound` exception would be
raised. Use `get_file()` and `get_folder()` when you need to
get nonexistent `Resource`\s. | [
"Get",
"a",
"resource",
"in",
"a",
"project",
"."
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L28-L49 | train | 205,789 |
python-rope/rope | rope/base/project.py | _Project.get_module | def get_module(self, name, folder=None):
"""Returns a `PyObject` if the module was found."""
# check if this is a builtin module
pymod = self.pycore.builtin_module(name)
if pymod is not None:
return pymod
module = self.find_module(name, folder)
if module is None:
raise ModuleNotFoundError('Module %s not found' % name)
return self.pycore.resource_to_pyobject(module) | python | def get_module(self, name, folder=None):
"""Returns a `PyObject` if the module was found."""
# check if this is a builtin module
pymod = self.pycore.builtin_module(name)
if pymod is not None:
return pymod
module = self.find_module(name, folder)
if module is None:
raise ModuleNotFoundError('Module %s not found' % name)
return self.pycore.resource_to_pyobject(module) | [
"def",
"get_module",
"(",
"self",
",",
"name",
",",
"folder",
"=",
"None",
")",
":",
"# check if this is a builtin module",
"pymod",
"=",
"self",
".",
"pycore",
".",
"builtin_module",
"(",
"name",
")",
"if",
"pymod",
"is",
"not",
"None",
":",
"return",
"py... | Returns a `PyObject` if the module was found. | [
"Returns",
"a",
"PyObject",
"if",
"the",
"module",
"was",
"found",
"."
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L51-L60 | train | 205,790 |
python-rope/rope | rope/base/project.py | _Project.get_source_folders | def get_source_folders(self):
"""Returns project source folders"""
if self.root is None:
return []
result = list(self._custom_source_folders)
result.extend(self.pycore._find_source_folders(self.root))
return result | python | def get_source_folders(self):
"""Returns project source folders"""
if self.root is None:
return []
result = list(self._custom_source_folders)
result.extend(self.pycore._find_source_folders(self.root))
return result | [
"def",
"get_source_folders",
"(",
"self",
")",
":",
"if",
"self",
".",
"root",
"is",
"None",
":",
"return",
"[",
"]",
"result",
"=",
"list",
"(",
"self",
".",
"_custom_source_folders",
")",
"result",
".",
"extend",
"(",
"self",
".",
"pycore",
".",
"_fi... | Returns project source folders | [
"Returns",
"project",
"source",
"folders"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L77-L83 | train | 205,791 |
python-rope/rope | rope/base/project.py | _Project.validate | def validate(self, folder):
"""Validate files and folders contained in this folder
It validates all of the files and folders contained in this
folder if some observers are interested in them.
"""
for observer in list(self.observers):
observer.validate(folder) | python | def validate(self, folder):
"""Validate files and folders contained in this folder
It validates all of the files and folders contained in this
folder if some observers are interested in them.
"""
for observer in list(self.observers):
observer.validate(folder) | [
"def",
"validate",
"(",
"self",
",",
"folder",
")",
":",
"for",
"observer",
"in",
"list",
"(",
"self",
".",
"observers",
")",
":",
"observer",
".",
"validate",
"(",
"folder",
")"
] | Validate files and folders contained in this folder
It validates all of the files and folders contained in this
folder if some observers are interested in them. | [
"Validate",
"files",
"and",
"folders",
"contained",
"in",
"this",
"folder"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L85-L93 | train | 205,792 |
python-rope/rope | rope/base/project.py | _Project.do | def do(self, changes, task_handle=taskhandle.NullTaskHandle()):
"""Apply the changes in a `ChangeSet`
Most of the time you call this function for committing the
changes for a refactoring.
"""
self.history.do(changes, task_handle=task_handle) | python | def do(self, changes, task_handle=taskhandle.NullTaskHandle()):
"""Apply the changes in a `ChangeSet`
Most of the time you call this function for committing the
changes for a refactoring.
"""
self.history.do(changes, task_handle=task_handle) | [
"def",
"do",
"(",
"self",
",",
"changes",
",",
"task_handle",
"=",
"taskhandle",
".",
"NullTaskHandle",
"(",
")",
")",
":",
"self",
".",
"history",
".",
"do",
"(",
"changes",
",",
"task_handle",
"=",
"task_handle",
")"
] | Apply the changes in a `ChangeSet`
Most of the time you call this function for committing the
changes for a refactoring. | [
"Apply",
"the",
"changes",
"in",
"a",
"ChangeSet"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L107-L113 | train | 205,793 |
python-rope/rope | rope/base/project.py | _Project.find_module | def find_module(self, modname, folder=None):
"""Returns a resource corresponding to the given module
returns None if it can not be found
"""
for src in self.get_source_folders():
module = _find_module_in_folder(src, modname)
if module is not None:
return module
for src in self.get_python_path_folders():
module = _find_module_in_folder(src, modname)
if module is not None:
return module
if folder is not None:
module = _find_module_in_folder(folder, modname)
if module is not None:
return module
return None | python | def find_module(self, modname, folder=None):
"""Returns a resource corresponding to the given module
returns None if it can not be found
"""
for src in self.get_source_folders():
module = _find_module_in_folder(src, modname)
if module is not None:
return module
for src in self.get_python_path_folders():
module = _find_module_in_folder(src, modname)
if module is not None:
return module
if folder is not None:
module = _find_module_in_folder(folder, modname)
if module is not None:
return module
return None | [
"def",
"find_module",
"(",
"self",
",",
"modname",
",",
"folder",
"=",
"None",
")",
":",
"for",
"src",
"in",
"self",
".",
"get_source_folders",
"(",
")",
":",
"module",
"=",
"_find_module_in_folder",
"(",
"src",
",",
"modname",
")",
"if",
"module",
"is",... | Returns a resource corresponding to the given module
returns None if it can not be found | [
"Returns",
"a",
"resource",
"corresponding",
"to",
"the",
"given",
"module"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L138-L155 | train | 205,794 |
python-rope/rope | rope/base/project.py | Project.get_python_files | def get_python_files(self):
"""Returns all python files available in the project"""
return [resource for resource in self.get_files()
if self.pycore.is_python_file(resource)] | python | def get_python_files(self):
"""Returns all python files available in the project"""
return [resource for resource in self.get_files()
if self.pycore.is_python_file(resource)] | [
"def",
"get_python_files",
"(",
"self",
")",
":",
"return",
"[",
"resource",
"for",
"resource",
"in",
"self",
".",
"get_files",
"(",
")",
"if",
"self",
".",
"pycore",
".",
"is_python_file",
"(",
"resource",
")",
"]"
] | Returns all python files available in the project | [
"Returns",
"all",
"python",
"files",
"available",
"in",
"the",
"project"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/project.py#L235-L238 | train | 205,795 |
python-rope/rope | rope/base/pynamesdef.py | ParameterName.get_objects | def get_objects(self):
"""Returns the list of objects passed as this parameter"""
return rope.base.oi.soi.get_passed_objects(
self.pyfunction, self.index) | python | def get_objects(self):
"""Returns the list of objects passed as this parameter"""
return rope.base.oi.soi.get_passed_objects(
self.pyfunction, self.index) | [
"def",
"get_objects",
"(",
"self",
")",
":",
"return",
"rope",
".",
"base",
".",
"oi",
".",
"soi",
".",
"get_passed_objects",
"(",
"self",
".",
"pyfunction",
",",
"self",
".",
"index",
")"
] | Returns the list of objects passed as this parameter | [
"Returns",
"the",
"list",
"of",
"objects",
"passed",
"as",
"this",
"parameter"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/pynamesdef.py#L47-L50 | train | 205,796 |
python-rope/rope | rope/refactor/multiproject.py | _MultiRefactoring.get_all_changes | def get_all_changes(self, *args, **kwds):
"""Get a project to changes dict"""
result = []
for project, refactoring in zip(self.projects, self.refactorings):
args, kwds = self._resources_for_args(project, args, kwds)
result.append((project, refactoring.get_changes(*args, **kwds)))
return result | python | def get_all_changes(self, *args, **kwds):
"""Get a project to changes dict"""
result = []
for project, refactoring in zip(self.projects, self.refactorings):
args, kwds = self._resources_for_args(project, args, kwds)
result.append((project, refactoring.get_changes(*args, **kwds)))
return result | [
"def",
"get_all_changes",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"result",
"=",
"[",
"]",
"for",
"project",
",",
"refactoring",
"in",
"zip",
"(",
"self",
".",
"projects",
",",
"self",
".",
"refactorings",
")",
":",
"args",
... | Get a project to changes dict | [
"Get",
"a",
"project",
"to",
"changes",
"dict"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/refactor/multiproject.py#L44-L50 | train | 205,797 |
python-rope/rope | rope/base/utils/__init__.py | saveit | def saveit(func):
"""A decorator that caches the return value of a function"""
name = '_' + func.__name__
def _wrapper(self, *args, **kwds):
if not hasattr(self, name):
setattr(self, name, func(self, *args, **kwds))
return getattr(self, name)
return _wrapper | python | def saveit(func):
"""A decorator that caches the return value of a function"""
name = '_' + func.__name__
def _wrapper(self, *args, **kwds):
if not hasattr(self, name):
setattr(self, name, func(self, *args, **kwds))
return getattr(self, name)
return _wrapper | [
"def",
"saveit",
"(",
"func",
")",
":",
"name",
"=",
"'_'",
"+",
"func",
".",
"__name__",
"def",
"_wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"name",
")",
":",
"setattr",
... | A decorator that caches the return value of a function | [
"A",
"decorator",
"that",
"caches",
"the",
"return",
"value",
"of",
"a",
"function"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/utils/__init__.py#L5-L14 | train | 205,798 |
python-rope/rope | rope/base/utils/__init__.py | prevent_recursion | def prevent_recursion(default):
"""A decorator that returns the return value of `default` in recursions"""
def decorator(func):
name = '_calling_%s_' % func.__name__
def newfunc(self, *args, **kwds):
if getattr(self, name, False):
return default()
setattr(self, name, True)
try:
return func(self, *args, **kwds)
finally:
setattr(self, name, False)
return newfunc
return decorator | python | def prevent_recursion(default):
"""A decorator that returns the return value of `default` in recursions"""
def decorator(func):
name = '_calling_%s_' % func.__name__
def newfunc(self, *args, **kwds):
if getattr(self, name, False):
return default()
setattr(self, name, True)
try:
return func(self, *args, **kwds)
finally:
setattr(self, name, False)
return newfunc
return decorator | [
"def",
"prevent_recursion",
"(",
"default",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"name",
"=",
"'_calling_%s_'",
"%",
"func",
".",
"__name__",
"def",
"newfunc",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"if",
"get... | A decorator that returns the return value of `default` in recursions | [
"A",
"decorator",
"that",
"returns",
"the",
"return",
"value",
"of",
"default",
"in",
"recursions"
] | 1c9f9cd5964b099a99a9111e998f0dc728860688 | https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/utils/__init__.py#L19-L33 | train | 205,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.