repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
hodgesds/elasticsearch_tornado
elasticsearch_tornado/client.py
BaseClient.put_template
def put_template(self, temp_id, body, params={}, callback=None, **kwargs): """ Create a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID :arg body: The document """ url = self.mk_url(*['_search', 'template', temp_id]) self.client.fetch( self.mk_req(url, method='PUT', body=body, **kwargs), callback = callback )
python
def put_template(self, temp_id, body, params={}, callback=None, **kwargs): """ Create a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID :arg body: The document """ url = self.mk_url(*['_search', 'template', temp_id]) self.client.fetch( self.mk_req(url, method='PUT', body=body, **kwargs), callback = callback )
[ "def", "put_template", "(", "self", ",", "temp_id", ",", "body", ",", "params", "=", "{", "}", ",", "callback", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", "=", "self", ".", "mk_url", "(", "*", "[", "'_search'", ",", "'template'", ",", ...
Create a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID :arg body: The document
[ "Create", "a", "search", "template", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "current", "/", "search", "-", "template", ".", "html", ">", "_", ":", "arg", ...
train
https://github.com/hodgesds/elasticsearch_tornado/blob/5acc1385589c92ffe3587ad05b7921c2cd1a30da/elasticsearch_tornado/client.py#L1360-L1373
hodgesds/elasticsearch_tornado
elasticsearch_tornado/client.py
BaseClient.delete_template
def delete_template(self, temp_id=None, params={}, callback=None, **kwargs): """ Delete a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID """ url = self.mk_url(*['_search', 'template', temp_id]) self.client.fetch( self.mk_req(url, method='DELETE', **kwargs), callback = callback )
python
def delete_template(self, temp_id=None, params={}, callback=None, **kwargs): """ Delete a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID """ url = self.mk_url(*['_search', 'template', temp_id]) self.client.fetch( self.mk_req(url, method='DELETE', **kwargs), callback = callback )
[ "def", "delete_template", "(", "self", ",", "temp_id", "=", "None", ",", "params", "=", "{", "}", ",", "callback", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", "=", "self", ".", "mk_url", "(", "*", "[", "'_search'", ",", "'template'", ","...
Delete a search template. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-template.html>`_ :arg temp_id: Template ID
[ "Delete", "a", "search", "template", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "current", "/", "search", "-", "template", ".", "html", ">", "_", ":", "arg", ...
train
https://github.com/hodgesds/elasticsearch_tornado/blob/5acc1385589c92ffe3587ad05b7921c2cd1a30da/elasticsearch_tornado/client.py#L1390-L1402
theiviaxx/Frog
frog/views/errorreporting.py
report
def report(title='Unhandled Exception', exec_info=(), **kwargs): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. :param title: Title of error email :type title: str :param exec_info: exc_info from traceback """ exc_type, exc_value, tb = exec_info or sys.exc_info() reporter = ExceptionReporter(exc_type, exc_value, tb) html = reporter.get_traceback_html(**kwargs) mail_admins(title, 'html only', html_message=html)
python
def report(title='Unhandled Exception', exec_info=(), **kwargs): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. :param title: Title of error email :type title: str :param exec_info: exc_info from traceback """ exc_type, exc_value, tb = exec_info or sys.exc_info() reporter = ExceptionReporter(exc_type, exc_value, tb) html = reporter.get_traceback_html(**kwargs) mail_admins(title, 'html only', html_message=html)
[ "def", "report", "(", "title", "=", "'Unhandled Exception'", ",", "exec_info", "=", "(", ")", ",", "*", "*", "kwargs", ")", ":", "exc_type", ",", "exc_value", ",", "tb", "=", "exec_info", "or", "sys", ".", "exc_info", "(", ")", "reporter", "=", "Except...
Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. :param title: Title of error email :type title: str :param exec_info: exc_info from traceback
[ "Create", "a", "technical", "server", "error", "response", ".", "The", "last", "three", "arguments", "are", "the", "values", "returned", "from", "sys", ".", "exc_info", "()", "and", "friends", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L24-L38
theiviaxx/Frog
frog/views/errorreporting.py
ExceptionReporter.get_traceback_data
def get_traceback_data(self): """Return a dictionary containing traceback information.""" default_template_engine = None if default_template_engine is None: template_loaders = [] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pformat(v) # The escape filter assume unicode, make sure that works if isinstance(v, six.binary_type): v = v.decode('utf-8', 'replace') # don't choke on non-utf-8 input # Trim large blobs of data if v and len(v) > 4096: v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, v)) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] c = { 'is_email': False, 'frames': frames, 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'sys_path': sys.path, } # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = self.exc_value if frames: c['lastframe'] = frames[-1] return c
python
def get_traceback_data(self): """Return a dictionary containing traceback information.""" default_template_engine = None if default_template_engine is None: template_loaders = [] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pformat(v) # The escape filter assume unicode, make sure that works if isinstance(v, six.binary_type): v = v.decode('utf-8', 'replace') # don't choke on non-utf-8 input # Trim large blobs of data if v and len(v) > 4096: v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, v)) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] c = { 'is_email': False, 'frames': frames, 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'sys_path': sys.path, } # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = self.exc_value if frames: c['lastframe'] = frames[-1] return c
[ "def", "get_traceback_data", "(", "self", ")", ":", "default_template_engine", "=", "None", "if", "default_template_engine", "is", "None", ":", "template_loaders", "=", "[", "]", "frames", "=", "self", ".", "get_traceback_frames", "(", ")", "for", "i", ",", "f...
Return a dictionary containing traceback information.
[ "Return", "a", "dictionary", "containing", "traceback", "information", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L59-L104
theiviaxx/Frog
frog/views/errorreporting.py
ExceptionReporter.get_traceback_html
def get_traceback_html(self, **kwargs): "Return HTML version of debug 500 HTTP error page." t = Template(TECHNICAL_500_TEMPLATE) c = self.get_traceback_data() c['kwargs'] = kwargs return t.render(Context(c))
python
def get_traceback_html(self, **kwargs): "Return HTML version of debug 500 HTTP error page." t = Template(TECHNICAL_500_TEMPLATE) c = self.get_traceback_data() c['kwargs'] = kwargs return t.render(Context(c))
[ "def", "get_traceback_html", "(", "self", ",", "*", "*", "kwargs", ")", ":", "t", "=", "Template", "(", "TECHNICAL_500_TEMPLATE", ")", "c", "=", "self", ".", "get_traceback_data", "(", ")", "c", "[", "'kwargs'", "]", "=", "kwargs", "return", "t", ".", ...
Return HTML version of debug 500 HTTP error page.
[ "Return", "HTML", "version", "of", "debug", "500", "HTTP", "error", "page", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L106-L111
theiviaxx/Frog
frog/views/errorreporting.py
ExceptionReporter._get_lines_from_file
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ source = None if loader is not None and hasattr(loader, "get_source"): try: source = loader.get_source(module_name) except ImportError: pass if source is not None: source = source.splitlines() if source is None: try: with open(filename, 'rb') as fp: source = fp.read().splitlines() except (OSError, IOError): pass if source is None: return None, [], None, [] # If we just read the source from a file, or if the loader did not # apply tokenize.detect_encoding to decode the source into a Unicode # string, then we should do that ourselves. if isinstance(source[0], six.binary_type): encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (http://www.python.org/dev/peps/pep-0263/) match = re.search(br'coding[:=]\s*([-\w.]+)', line) if match: encoding = match.group(1).decode('ascii') break source = [six.text_type(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = source[lower_bound:lineno] context_line = source[lineno] post_context = source[lineno + 1:upper_bound] return lower_bound, pre_context, context_line, post_context
python
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ source = None if loader is not None and hasattr(loader, "get_source"): try: source = loader.get_source(module_name) except ImportError: pass if source is not None: source = source.splitlines() if source is None: try: with open(filename, 'rb') as fp: source = fp.read().splitlines() except (OSError, IOError): pass if source is None: return None, [], None, [] # If we just read the source from a file, or if the loader did not # apply tokenize.detect_encoding to decode the source into a Unicode # string, then we should do that ourselves. if isinstance(source[0], six.binary_type): encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (http://www.python.org/dev/peps/pep-0263/) match = re.search(br'coding[:=]\s*([-\w.]+)', line) if match: encoding = match.group(1).decode('ascii') break source = [six.text_type(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = source[lower_bound:lineno] context_line = source[lineno] post_context = source[lineno + 1:upper_bound] return lower_bound, pre_context, context_line, post_context
[ "def", "_get_lines_from_file", "(", "self", ",", "filename", ",", "lineno", ",", "context_lines", ",", "loader", "=", "None", ",", "module_name", "=", "None", ")", ":", "source", "=", "None", "if", "loader", "is", "not", "None", "and", "hasattr", "(", "l...
Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context).
[ "Returns", "context_lines", "before", "and", "after", "lineno", "from", "file", ".", "Returns", "(", "pre_context_lineno", "pre_context", "context_line", "post_context", ")", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L113-L156
theiviaxx/Frog
frog/views/errorreporting.py
ExceptionReporter.get_traceback_frames
def get_traceback_frames(self): """Returns the traceback frames as a list""" frames = [] tb = self.tb while tb is not None: # Support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') or '' pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file( filename, lineno, 7, loader, module_name, ) if pre_context_lineno is not None: frames.append({ 'tb': tb, 'type': 'django' if module_name.startswith('django.') else 'user', 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': list(six.iteritems(tb.tb_frame.f_locals)), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) tb = tb.tb_next return frames
python
def get_traceback_frames(self): """Returns the traceback frames as a list""" frames = [] tb = self.tb while tb is not None: # Support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') or '' pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file( filename, lineno, 7, loader, module_name, ) if pre_context_lineno is not None: frames.append({ 'tb': tb, 'type': 'django' if module_name.startswith('django.') else 'user', 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': list(six.iteritems(tb.tb_frame.f_locals)), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) tb = tb.tb_next return frames
[ "def", "get_traceback_frames", "(", "self", ")", ":", "frames", "=", "[", "]", "tb", "=", "self", ".", "tb", "while", "tb", "is", "not", "None", ":", "# Support for __traceback_hide__ which is used by a few libraries", "# to hide internal frames.", "if", "tb", ".", ...
Returns the traceback frames as a list
[ "Returns", "the", "traceback", "frames", "as", "a", "list" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L158-L192
theiviaxx/Frog
frog/views/errorreporting.py
ExceptionReporter.format_exception
def format_exception(self): """ Return the same data as from traceback.format_exception. """ import traceback frames = self.get_traceback_frames() tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames] list = ['Traceback (most recent call last):\n'] list += traceback.format_list(tb) list += traceback.format_exception_only(self.exc_type, self.exc_value) return list
python
def format_exception(self): """ Return the same data as from traceback.format_exception. """ import traceback frames = self.get_traceback_frames() tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames] list = ['Traceback (most recent call last):\n'] list += traceback.format_list(tb) list += traceback.format_exception_only(self.exc_type, self.exc_value) return list
[ "def", "format_exception", "(", "self", ")", ":", "import", "traceback", "frames", "=", "self", ".", "get_traceback_frames", "(", ")", "tb", "=", "[", "(", "f", "[", "'filename'", "]", ",", "f", "[", "'lineno'", "]", ",", "f", "[", "'function'", "]", ...
Return the same data as from traceback.format_exception.
[ "Return", "the", "same", "data", "as", "from", "traceback", ".", "format_exception", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/errorreporting.py#L194-L204
gmr/tredis
tredis/keys.py
KeysMixin.delete
def delete(self, *keys): """Removes the specified keys. A key is ignored if it does not exist. Returns :data:`True` if all keys are removed. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys that will be removed. When a key to remove holds a value other than a string, the individual complexity for this key is ``O(M)`` where ``M`` is the number of elements in the list, set, sorted set or hash. Removing a single key that holds a string value is ``O(1)``. :param keys: One or more keys to remove :type keys: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DEL'] + list(keys), len(keys))
python
def delete(self, *keys): """Removes the specified keys. A key is ignored if it does not exist. Returns :data:`True` if all keys are removed. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys that will be removed. When a key to remove holds a value other than a string, the individual complexity for this key is ``O(M)`` where ``M`` is the number of elements in the list, set, sorted set or hash. Removing a single key that holds a string value is ``O(1)``. :param keys: One or more keys to remove :type keys: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DEL'] + list(keys), len(keys))
[ "def", "delete", "(", "self", ",", "*", "keys", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'DEL'", "]", "+", "list", "(", "keys", ")", ",", "len", "(", "keys", ")", ")" ]
Removes the specified keys. A key is ignored if it does not exist. Returns :data:`True` if all keys are removed. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys that will be removed. When a key to remove holds a value other than a string, the individual complexity for this key is ``O(M)`` where ``M`` is the number of elements in the list, set, sorted set or hash. Removing a single key that holds a string value is ``O(1)``. :param keys: One or more keys to remove :type keys: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Removes", "the", "specified", "keys", ".", "A", "key", "is", "ignored", "if", "it", "does", "not", "exist", ".", "Returns", ":", "data", ":", "True", "if", "all", "keys", "are", "removed", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L12-L30
gmr/tredis
tredis/keys.py
KeysMixin.expire
def expire(self, key, timeout): """Set a timeout on key. After the timeout has expired, the key will automatically be deleted. A key with an associated timeout is often said to be volatile in Redis terminology. The timeout is cleared only when the key is removed using the :meth:`~tredis.RedisClient.delete` method or overwritten using the :meth:`~tredis.RedisClient.set` or :meth:`~tredis.RedisClient.getset` methods. This means that all the operations that conceptually alter the value stored at the key without replacing it with a new one will leave the timeout untouched. For instance, incrementing the value of a key with :meth:`~tredis.RedisClient.incr`, pushing a new value into a list with :meth:`~tredis.RedisClient.lpush`, or altering the field value of a hash with :meth:`~tredis.RedisClient.hset` are all operations that will leave the timeout untouched. The timeout can also be cleared, turning the key back into a persistent key, using the :meth:`~tredis.RedisClient.persist` method. If a key is renamed with :meth:`~tredis.RedisClient.rename`, the associated time to live is transferred to the new key name. If a key is overwritten by :meth:`~tredis.RedisClient.rename`, like in the case of an existing key ``Key_A`` that is overwritten by a call like ``client.rename(Key_B, Key_A)`` it does not matter if the original ``Key_A`` had a timeout associated or not, the new key ``Key_A`` will inherit all the characteristics of ``Key_B``. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of seconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIRE', key, ascii(timeout).encode('ascii')], 1)
python
def expire(self, key, timeout): """Set a timeout on key. After the timeout has expired, the key will automatically be deleted. A key with an associated timeout is often said to be volatile in Redis terminology. The timeout is cleared only when the key is removed using the :meth:`~tredis.RedisClient.delete` method or overwritten using the :meth:`~tredis.RedisClient.set` or :meth:`~tredis.RedisClient.getset` methods. This means that all the operations that conceptually alter the value stored at the key without replacing it with a new one will leave the timeout untouched. For instance, incrementing the value of a key with :meth:`~tredis.RedisClient.incr`, pushing a new value into a list with :meth:`~tredis.RedisClient.lpush`, or altering the field value of a hash with :meth:`~tredis.RedisClient.hset` are all operations that will leave the timeout untouched. The timeout can also be cleared, turning the key back into a persistent key, using the :meth:`~tredis.RedisClient.persist` method. If a key is renamed with :meth:`~tredis.RedisClient.rename`, the associated time to live is transferred to the new key name. If a key is overwritten by :meth:`~tredis.RedisClient.rename`, like in the case of an existing key ``Key_A`` that is overwritten by a call like ``client.rename(Key_B, Key_A)`` it does not matter if the original ``Key_A`` had a timeout associated or not, the new key ``Key_A`` will inherit all the characteristics of ``Key_B``. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of seconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIRE', key, ascii(timeout).encode('ascii')], 1)
[ "def", "expire", "(", "self", ",", "key", ",", "timeout", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'EXPIRE'", ",", "key", ",", "ascii", "(", "timeout", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
Set a timeout on key. After the timeout has expired, the key will automatically be deleted. A key with an associated timeout is often said to be volatile in Redis terminology. The timeout is cleared only when the key is removed using the :meth:`~tredis.RedisClient.delete` method or overwritten using the :meth:`~tredis.RedisClient.set` or :meth:`~tredis.RedisClient.getset` methods. This means that all the operations that conceptually alter the value stored at the key without replacing it with a new one will leave the timeout untouched. For instance, incrementing the value of a key with :meth:`~tredis.RedisClient.incr`, pushing a new value into a list with :meth:`~tredis.RedisClient.lpush`, or altering the field value of a hash with :meth:`~tredis.RedisClient.hset` are all operations that will leave the timeout untouched. The timeout can also be cleared, turning the key back into a persistent key, using the :meth:`~tredis.RedisClient.persist` method. If a key is renamed with :meth:`~tredis.RedisClient.rename`, the associated time to live is transferred to the new key name. If a key is overwritten by :meth:`~tredis.RedisClient.rename`, like in the case of an existing key ``Key_A`` that is overwritten by a call like ``client.rename(Key_B, Key_A)`` it does not matter if the original ``Key_A`` had a timeout associated or not, the new key ``Key_A`` will inherit all the characteristics of ``Key_B``. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of seconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Set", "a", "timeout", "on", "key", ".", "After", "the", "timeout", "has", "expired", "the", "key", "will", "automatically", "be", "deleted", ".", "A", "key", "with", "an", "associated", "timeout", "is", "often", "said", "to", "be", "volatile", "in", "Re...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L86-L126
gmr/tredis
tredis/keys.py
KeysMixin.expireat
def expireat(self, key, timestamp): """:meth:`~tredis.RedisClient.expireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expire`, but instead of specifying the number of seconds representing the TTL (time to live), it takes an absolute Unix timestamp (seconds since January 1, 1970). Please for the specific semantics of the command refer to the documentation of :meth:`~tredis.RedisClient.expire`. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The UNIX epoch value for the expiration :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIREAT', key, ascii(timestamp).encode('ascii')], 1)
python
def expireat(self, key, timestamp): """:meth:`~tredis.RedisClient.expireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expire`, but instead of specifying the number of seconds representing the TTL (time to live), it takes an absolute Unix timestamp (seconds since January 1, 1970). Please for the specific semantics of the command refer to the documentation of :meth:`~tredis.RedisClient.expire`. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The UNIX epoch value for the expiration :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIREAT', key, ascii(timestamp).encode('ascii')], 1)
[ "def", "expireat", "(", "self", ",", "key", ",", "timestamp", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'EXPIREAT'", ",", "key", ",", "ascii", "(", "timestamp", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
:meth:`~tredis.RedisClient.expireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expire`, but instead of specifying the number of seconds representing the TTL (time to live), it takes an absolute Unix timestamp (seconds since January 1, 1970). Please for the specific semantics of the command refer to the documentation of :meth:`~tredis.RedisClient.expire`. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The UNIX epoch value for the expiration :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "expireat", "has", "the", "same", "effect", "and", "semantic", "as", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "expire", "but", "instead", "of", "specifying", "the", "number", "of", "seconds...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L128-L150
gmr/tredis
tredis/keys.py
KeysMixin.migrate
def migrate(self, host, port, key, destination_db, timeout, copy=False, replace=False): """Atomically transfer a key from a source Redis instance to a destination Redis instance. On success the key is deleted from the original instance and is guaranteed to exist in the target instance. The command is atomic and blocks the two instances for the time required to transfer the key, at any given time the key will appear to exist in a given instance or in the other instance, unless a timeout error occurs. .. note:: **Time complexity**: This command actually executes a DUMP+DEL in the source instance, and a RESTORE in the target instance. See the pages of these commands for time complexity. Also an ``O(N)`` data transfer between the two instances is performed. :param host: The host to migrate the key to :type host: bytes, str :param int port: The port to connect on :param key: The key to migrate :type key: bytes, str :param int destination_db: The database number to select :param int timeout: The maximum idle time in milliseconds :param bool copy: Do not remove the key from the local instance :param bool replace: Replace existing key on the remote instance :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'MIGRATE', host, ascii(port).encode('ascii'), key, ascii(destination_db).encode('ascii'), ascii(timeout).encode('ascii') ] if copy is True: command.append(b'COPY') if replace is True: command.append(b'REPLACE') return self._execute(command, b'OK')
python
def migrate(self, host, port, key, destination_db, timeout, copy=False, replace=False): """Atomically transfer a key from a source Redis instance to a destination Redis instance. On success the key is deleted from the original instance and is guaranteed to exist in the target instance. The command is atomic and blocks the two instances for the time required to transfer the key, at any given time the key will appear to exist in a given instance or in the other instance, unless a timeout error occurs. .. note:: **Time complexity**: This command actually executes a DUMP+DEL in the source instance, and a RESTORE in the target instance. See the pages of these commands for time complexity. Also an ``O(N)`` data transfer between the two instances is performed. :param host: The host to migrate the key to :type host: bytes, str :param int port: The port to connect on :param key: The key to migrate :type key: bytes, str :param int destination_db: The database number to select :param int timeout: The maximum idle time in milliseconds :param bool copy: Do not remove the key from the local instance :param bool replace: Replace existing key on the remote instance :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'MIGRATE', host, ascii(port).encode('ascii'), key, ascii(destination_db).encode('ascii'), ascii(timeout).encode('ascii') ] if copy is True: command.append(b'COPY') if replace is True: command.append(b'REPLACE') return self._execute(command, b'OK')
[ "def", "migrate", "(", "self", ",", "host", ",", "port", ",", "key", ",", "destination_db", ",", "timeout", ",", "copy", "=", "False", ",", "replace", "=", "False", ")", ":", "command", "=", "[", "b'MIGRATE'", ",", "host", ",", "ascii", "(", "port", ...
Atomically transfer a key from a source Redis instance to a destination Redis instance. On success the key is deleted from the original instance and is guaranteed to exist in the target instance. The command is atomic and blocks the two instances for the time required to transfer the key, at any given time the key will appear to exist in a given instance or in the other instance, unless a timeout error occurs. .. note:: **Time complexity**: This command actually executes a DUMP+DEL in the source instance, and a RESTORE in the target instance. See the pages of these commands for time complexity. Also an ``O(N)`` data transfer between the two instances is performed. :param host: The host to migrate the key to :type host: bytes, str :param int port: The port to connect on :param key: The key to migrate :type key: bytes, str :param int destination_db: The database number to select :param int timeout: The maximum idle time in milliseconds :param bool copy: Do not remove the key from the local instance :param bool replace: Replace existing key on the remote instance :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Atomically", "transfer", "a", "key", "from", "a", "source", "Redis", "instance", "to", "a", "destination", "Redis", "instance", ".", "On", "success", "the", "key", "is", "deleted", "from", "the", "original", "instance", "and", "is", "guaranteed", "to", "exi...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L191-L238
gmr/tredis
tredis/keys.py
KeysMixin.move
def move(self, key, db): """Move key from the currently selected database (see :meth:`~tredis.RedisClient.select`) to the specified destination database. When key already exists in the destination database, or it does not exist in the source database, it does nothing. It is possible to use :meth:`~tredis.RedisClient.move` as a locking primitive because of this. .. note:: **Time complexity**: ``O(1)`` :param key: The key to move :type key: :class:`str`, :class:`bytes` :param int db: The database number :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'MOVE', key, ascii(db).encode('ascii')], 1)
python
def move(self, key, db): """Move key from the currently selected database (see :meth:`~tredis.RedisClient.select`) to the specified destination database. When key already exists in the destination database, or it does not exist in the source database, it does nothing. It is possible to use :meth:`~tredis.RedisClient.move` as a locking primitive because of this. .. note:: **Time complexity**: ``O(1)`` :param key: The key to move :type key: :class:`str`, :class:`bytes` :param int db: The database number :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'MOVE', key, ascii(db).encode('ascii')], 1)
[ "def", "move", "(", "self", ",", "key", ",", "db", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'MOVE'", ",", "key", ",", "ascii", "(", "db", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
Move key from the currently selected database (see :meth:`~tredis.RedisClient.select`) to the specified destination database. When key already exists in the destination database, or it does not exist in the source database, it does nothing. It is possible to use :meth:`~tredis.RedisClient.move` as a locking primitive because of this. .. note:: **Time complexity**: ``O(1)`` :param key: The key to move :type key: :class:`str`, :class:`bytes` :param int db: The database number :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Move", "key", "from", "the", "currently", "selected", "database", "(", "see", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "select", ")", "to", "the", "specified", "destination", "database", ".", "When", "key", "already", "exists", "in", "the", "...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L240-L259
gmr/tredis
tredis/keys.py
KeysMixin.pexpire
def pexpire(self, key, timeout): """This command works exactly like :meth:`~tredis.RedisClient.pexpire` but the time to live of the key is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of milliseconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PEXPIRE', key, ascii(timeout).encode('ascii')], 1)
python
def pexpire(self, key, timeout): """This command works exactly like :meth:`~tredis.RedisClient.pexpire` but the time to live of the key is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of milliseconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PEXPIRE', key, ascii(timeout).encode('ascii')], 1)
[ "def", "pexpire", "(", "self", ",", "key", ",", "timeout", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'PEXPIRE'", ",", "key", ",", "ascii", "(", "timeout", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
This command works exactly like :meth:`~tredis.RedisClient.pexpire` but the time to live of the key is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of milliseconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "This", "command", "works", "exactly", "like", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "pexpire", "but", "the", "time", "to", "live", "of", "the", "key", "is", "specified", "in", "milliseconds", "instead", "of", "seconds", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L328-L345
gmr/tredis
tredis/keys.py
KeysMixin.pexpireat
def pexpireat(self, key, timestamp): """:meth:`~tredis.RedisClient.pexpireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expireat`, but the Unix time at which the key will expire is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The expiration UNIX epoch value in milliseconds :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PEXPIREAT', key, ascii(timestamp).encode('ascii')], 1)
python
def pexpireat(self, key, timestamp): """:meth:`~tredis.RedisClient.pexpireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expireat`, but the Unix time at which the key will expire is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The expiration UNIX epoch value in milliseconds :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PEXPIREAT', key, ascii(timestamp).encode('ascii')], 1)
[ "def", "pexpireat", "(", "self", ",", "key", ",", "timestamp", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'PEXPIREAT'", ",", "key", ",", "ascii", "(", "timestamp", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
:meth:`~tredis.RedisClient.pexpireat` has the same effect and semantic as :meth:`~tredis.RedisClient.expireat`, but the Unix time at which the key will expire is specified in milliseconds instead of seconds. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timestamp: The expiration UNIX epoch value in milliseconds :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "pexpireat", "has", "the", "same", "effect", "and", "semantic", "as", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "expireat", "but", "the", "Unix", "time", "at", "which", "the", "key", "will...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L347-L366
gmr/tredis
tredis/keys.py
KeysMixin.rename
def rename(self, key, new_key): """Renames ``key`` to ``new_key``. It returns an error when the source and destination names are the same, or when ``key`` does not exist. If ``new_key`` already exists it is overwritten, when this happens :meth:`~tredis.RedisClient.rename` executes an implicit :meth:`~tredis.RedisClient.delete` operation, so if the deleted key contains a very big value it may cause high latency even if :meth:`~tredis.RedisClient.rename` itself is usually a constant-time operation. .. note:: **Time complexity**: ``O(1)`` :param key: The key to rename :type key: :class:`str`, :class:`bytes` :param new_key: The key to rename it to :type new_key: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'RENAME', key, new_key], b'OK')
python
def rename(self, key, new_key): """Renames ``key`` to ``new_key``. It returns an error when the source and destination names are the same, or when ``key`` does not exist. If ``new_key`` already exists it is overwritten, when this happens :meth:`~tredis.RedisClient.rename` executes an implicit :meth:`~tredis.RedisClient.delete` operation, so if the deleted key contains a very big value it may cause high latency even if :meth:`~tredis.RedisClient.rename` itself is usually a constant-time operation. .. note:: **Time complexity**: ``O(1)`` :param key: The key to rename :type key: :class:`str`, :class:`bytes` :param new_key: The key to rename it to :type new_key: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'RENAME', key, new_key], b'OK')
[ "def", "rename", "(", "self", ",", "key", ",", "new_key", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'RENAME'", ",", "key", ",", "new_key", "]", ",", "b'OK'", ")" ]
Renames ``key`` to ``new_key``. It returns an error when the source and destination names are the same, or when ``key`` does not exist. If ``new_key`` already exists it is overwritten, when this happens :meth:`~tredis.RedisClient.rename` executes an implicit :meth:`~tredis.RedisClient.delete` operation, so if the deleted key contains a very big value it may cause high latency even if :meth:`~tredis.RedisClient.rename` itself is usually a constant-time operation. .. note:: **Time complexity**: ``O(1)`` :param key: The key to rename :type key: :class:`str`, :class:`bytes` :param new_key: The key to rename it to :type new_key: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Renames", "key", "to", "new_key", ".", "It", "returns", "an", "error", "when", "the", "source", "and", "destination", "names", "are", "the", "same", "or", "when", "key", "does", "not", "exist", ".", "If", "new_key", "already", "exists", "it", "is", "ove...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L409-L431
gmr/tredis
tredis/keys.py
KeysMixin.restore
def restore(self, key, ttl, value, replace=False): """Create a key associated with a value that is obtained by deserializing the provided serialized value (obtained via :meth:`~tredis.RedisClient.dump`). If ``ttl`` is ``0`` the key is created without any expire, otherwise the specified expire time (in milliseconds) is set. :meth:`~tredis.RedisClient.restore` will return a ``Target key name is busy`` error when key already exists unless you use the :meth:`~tredis.RedisClient.restore` modifier (Redis 3.0 or greater). :meth:`~tredis.RedisClient.restore` checks the RDB version and data checksum. If they don't match an error is returned. .. note:: **Time complexity**: ``O(1)`` to create the new key and additional ``O(N*M)`` to reconstruct the serialized value, where ``N`` is the number of Redis objects composing the value and ``M`` their average size. For small string values the time complexity is thus ``O(1)+O(1*M)`` where ``M`` is small, so simply ``O(1)``. However for sorted set values the complexity is ``O(N*M*log(N))`` because inserting values into sorted sets is ``O(log(N))``. :param key: The key to get the TTL for :type key: :class:`str`, :class:`bytes` :param int ttl: The number of seconds to set the timeout to :param value: The value to restore to the key :type value: :class:`str`, :class:`bytes` :param bool replace: Replace a pre-existing key :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'RESTORE', key, ascii(ttl).encode('ascii'), value] if replace: command.append(b'REPLACE') return self._execute(command, b'OK')
python
def restore(self, key, ttl, value, replace=False): """Create a key associated with a value that is obtained by deserializing the provided serialized value (obtained via :meth:`~tredis.RedisClient.dump`). If ``ttl`` is ``0`` the key is created without any expire, otherwise the specified expire time (in milliseconds) is set. :meth:`~tredis.RedisClient.restore` will return a ``Target key name is busy`` error when key already exists unless you use the :meth:`~tredis.RedisClient.restore` modifier (Redis 3.0 or greater). :meth:`~tredis.RedisClient.restore` checks the RDB version and data checksum. If they don't match an error is returned. .. note:: **Time complexity**: ``O(1)`` to create the new key and additional ``O(N*M)`` to reconstruct the serialized value, where ``N`` is the number of Redis objects composing the value and ``M`` their average size. For small string values the time complexity is thus ``O(1)+O(1*M)`` where ``M`` is small, so simply ``O(1)``. However for sorted set values the complexity is ``O(N*M*log(N))`` because inserting values into sorted sets is ``O(log(N))``. :param key: The key to get the TTL for :type key: :class:`str`, :class:`bytes` :param int ttl: The number of seconds to set the timeout to :param value: The value to restore to the key :type value: :class:`str`, :class:`bytes` :param bool replace: Replace a pre-existing key :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'RESTORE', key, ascii(ttl).encode('ascii'), value] if replace: command.append(b'REPLACE') return self._execute(command, b'OK')
[ "def", "restore", "(", "self", ",", "key", ",", "ttl", ",", "value", ",", "replace", "=", "False", ")", ":", "command", "=", "[", "b'RESTORE'", ",", "key", ",", "ascii", "(", "ttl", ")", ".", "encode", "(", "'ascii'", ")", ",", "value", "]", "if"...
Create a key associated with a value that is obtained by deserializing the provided serialized value (obtained via :meth:`~tredis.RedisClient.dump`). If ``ttl`` is ``0`` the key is created without any expire, otherwise the specified expire time (in milliseconds) is set. :meth:`~tredis.RedisClient.restore` will return a ``Target key name is busy`` error when key already exists unless you use the :meth:`~tredis.RedisClient.restore` modifier (Redis 3.0 or greater). :meth:`~tredis.RedisClient.restore` checks the RDB version and data checksum. If they don't match an error is returned. .. note:: **Time complexity**: ``O(1)`` to create the new key and additional ``O(N*M)`` to reconstruct the serialized value, where ``N`` is the number of Redis objects composing the value and ``M`` their average size. For small string values the time complexity is thus ``O(1)+O(1*M)`` where ``M`` is small, so simply ``O(1)``. However for sorted set values the complexity is ``O(N*M*log(N))`` because inserting values into sorted sets is ``O(log(N))``. :param key: The key to get the TTL for :type key: :class:`str`, :class:`bytes` :param int ttl: The number of seconds to set the timeout to :param value: The value to restore to the key :type value: :class:`str`, :class:`bytes` :param bool replace: Replace a pre-existing key :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Create", "a", "key", "associated", "with", "a", "value", "that", "is", "obtained", "by", "deserializing", "the", "provided", "serialized", "value", "(", "obtained", "via", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "dump", ")", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L452-L491
gmr/tredis
tredis/keys.py
KeysMixin.scan
def scan(self, cursor=0, pattern=None, count=None): """The :meth:`~tredis.RedisClient.scan` command and the closely related commands :meth:`~tredis.RedisClient.sscan`, :meth:`~tredis.RedisClient.hscan` and :meth:`~tredis.RedisClient.zscan` are used in order to incrementally iterate over a collection of elements. - :meth:`~tredis.RedisClient.scan` iterates the set of keys in the currently selected Redis database. - :meth:`~tredis.RedisClient.sscan` iterates elements of Sets types. - :meth:`~tredis.RedisClient.hscan` iterates fields of Hash types and their associated values. - :meth:`~tredis.RedisClient.zscan` iterates elements of Sorted Set types and their associated scores. **Basic usage** :meth:`~tredis.RedisClient.scan` is a cursor based iterator. This means that at every call of the command, the server returns an updated cursor that the user needs to use as the cursor argument in the next call. An iteration starts when the cursor is set to ``0``, and terminates when the cursor returned by the server is ``0``. For more information on :meth:`~tredis.RedisClient.scan`, visit the `Redis docs on scan <http://redis.io/commands/scan>`_. .. note:: **Time complexity**: ``O(1)`` for every call. ``O(N)`` for a complete iteration, including enough command calls for the cursor to return back to ``0``. ``N`` is the number of elements inside the collection. :param int cursor: The server specified cursor value or ``0`` :param pattern: An optional pattern to apply for key matching :type pattern: :class:`str`, :class:`bytes` :param int count: An optional amount of work to perform in the scan :rtype: int, list :returns: A tuple containing the cursor and the list of keys :raises: :exc:`~tredis.exceptions.RedisError` """ def format_response(value): """Format the response from redis :param tuple value: The return response from redis :rtype: tuple(int, list) """ return int(value[0]), value[1] command = [b'SCAN', ascii(cursor).encode('ascii')] if pattern: command += [b'MATCH', pattern] if count: command += [b'COUNT', ascii(count).encode('ascii')] return self._execute(command, format_callback=format_response)
python
def scan(self, cursor=0, pattern=None, count=None): """The :meth:`~tredis.RedisClient.scan` command and the closely related commands :meth:`~tredis.RedisClient.sscan`, :meth:`~tredis.RedisClient.hscan` and :meth:`~tredis.RedisClient.zscan` are used in order to incrementally iterate over a collection of elements. - :meth:`~tredis.RedisClient.scan` iterates the set of keys in the currently selected Redis database. - :meth:`~tredis.RedisClient.sscan` iterates elements of Sets types. - :meth:`~tredis.RedisClient.hscan` iterates fields of Hash types and their associated values. - :meth:`~tredis.RedisClient.zscan` iterates elements of Sorted Set types and their associated scores. **Basic usage** :meth:`~tredis.RedisClient.scan` is a cursor based iterator. This means that at every call of the command, the server returns an updated cursor that the user needs to use as the cursor argument in the next call. An iteration starts when the cursor is set to ``0``, and terminates when the cursor returned by the server is ``0``. For more information on :meth:`~tredis.RedisClient.scan`, visit the `Redis docs on scan <http://redis.io/commands/scan>`_. .. note:: **Time complexity**: ``O(1)`` for every call. ``O(N)`` for a complete iteration, including enough command calls for the cursor to return back to ``0``. ``N`` is the number of elements inside the collection. :param int cursor: The server specified cursor value or ``0`` :param pattern: An optional pattern to apply for key matching :type pattern: :class:`str`, :class:`bytes` :param int count: An optional amount of work to perform in the scan :rtype: int, list :returns: A tuple containing the cursor and the list of keys :raises: :exc:`~tredis.exceptions.RedisError` """ def format_response(value): """Format the response from redis :param tuple value: The return response from redis :rtype: tuple(int, list) """ return int(value[0]), value[1] command = [b'SCAN', ascii(cursor).encode('ascii')] if pattern: command += [b'MATCH', pattern] if count: command += [b'COUNT', ascii(count).encode('ascii')] return self._execute(command, format_callback=format_response)
[ "def", "scan", "(", "self", ",", "cursor", "=", "0", ",", "pattern", "=", "None", ",", "count", "=", "None", ")", ":", "def", "format_response", "(", "value", ")", ":", "\"\"\"Format the response from redis\n\n :param tuple value: The return response from r...
The :meth:`~tredis.RedisClient.scan` command and the closely related commands :meth:`~tredis.RedisClient.sscan`, :meth:`~tredis.RedisClient.hscan` and :meth:`~tredis.RedisClient.zscan` are used in order to incrementally iterate over a collection of elements. - :meth:`~tredis.RedisClient.scan` iterates the set of keys in the currently selected Redis database. - :meth:`~tredis.RedisClient.sscan` iterates elements of Sets types. - :meth:`~tredis.RedisClient.hscan` iterates fields of Hash types and their associated values. - :meth:`~tredis.RedisClient.zscan` iterates elements of Sorted Set types and their associated scores. **Basic usage** :meth:`~tredis.RedisClient.scan` is a cursor based iterator. This means that at every call of the command, the server returns an updated cursor that the user needs to use as the cursor argument in the next call. An iteration starts when the cursor is set to ``0``, and terminates when the cursor returned by the server is ``0``. For more information on :meth:`~tredis.RedisClient.scan`, visit the `Redis docs on scan <http://redis.io/commands/scan>`_. .. note:: **Time complexity**: ``O(1)`` for every call. ``O(N)`` for a complete iteration, including enough command calls for the cursor to return back to ``0``. ``N`` is the number of elements inside the collection. :param int cursor: The server specified cursor value or ``0`` :param pattern: An optional pattern to apply for key matching :type pattern: :class:`str`, :class:`bytes` :param int count: An optional amount of work to perform in the scan :rtype: int, list :returns: A tuple containing the cursor and the list of keys :raises: :exc:`~tredis.exceptions.RedisError`
[ "The", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "scan", "command", "and", "the", "closely", "related", "commands", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "sscan", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "hscan", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L493-L552
gmr/tredis
tredis/keys.py
KeysMixin.sort
def sort(self, key, by=None, external=None, offset=0, limit=None, order=None, alpha=False, store_as=None): """Returns or stores the elements contained in the list, set or sorted set at key. By default, sorting is numeric and elements are compared by their value interpreted as double precision floating point number. The ``external`` parameter is used to specify the `GET <http://redis.io/commands/sort#retrieving-external-keys>_` parameter for retrieving external keys. It can be a single string or a list of strings. .. note:: **Time complexity**: ``O(N+M*log(M))`` where ``N`` is the number of elements in the list or set to sort, and ``M`` the number of returned elements. When the elements are not sorted, complexity is currently ``O(N)`` as there is a copy step that will be avoided in next releases. :param key: The key to get the refcount for :type key: :class:`str`, :class:`bytes` :param by: The optional pattern for external sorting keys :type by: :class:`str`, :class:`bytes` :param external: Pattern or list of patterns to return external keys :type external: :class:`str`, :class:`bytes`, list :param int offset: The starting offset when using limit :param int limit: The number of elements to return :param order: The sort order - one of ``ASC`` or ``DESC`` :type order: :class:`str`, :class:`bytes` :param bool alpha: Sort the results lexicographically :param store_as: When specified, the key to store the results as :type store_as: :class:`str`, :class:`bytes`, None :rtype: list|int :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError` """ if order and order not in [b'ASC', b'DESC', 'ASC', 'DESC']: raise ValueError('invalid sort order "{}"'.format(order)) command = [b'SORT', key] if by: command += [b'BY', by] if external and isinstance(external, list): for entry in external: command += [b'GET', entry] elif external: command += [b'GET', external] if limit: command += [ b'LIMIT', ascii(offset).encode('utf-8'), ascii(limit).encode('utf-8') ] if order: command.append(order) if alpha is True: command.append(b'ALPHA') if store_as: command += [b'STORE', store_as] return self._execute(command)
python
def sort(self, key, by=None, external=None, offset=0, limit=None, order=None, alpha=False, store_as=None): """Returns or stores the elements contained in the list, set or sorted set at key. By default, sorting is numeric and elements are compared by their value interpreted as double precision floating point number. The ``external`` parameter is used to specify the `GET <http://redis.io/commands/sort#retrieving-external-keys>_` parameter for retrieving external keys. It can be a single string or a list of strings. .. note:: **Time complexity**: ``O(N+M*log(M))`` where ``N`` is the number of elements in the list or set to sort, and ``M`` the number of returned elements. When the elements are not sorted, complexity is currently ``O(N)`` as there is a copy step that will be avoided in next releases. :param key: The key to get the refcount for :type key: :class:`str`, :class:`bytes` :param by: The optional pattern for external sorting keys :type by: :class:`str`, :class:`bytes` :param external: Pattern or list of patterns to return external keys :type external: :class:`str`, :class:`bytes`, list :param int offset: The starting offset when using limit :param int limit: The number of elements to return :param order: The sort order - one of ``ASC`` or ``DESC`` :type order: :class:`str`, :class:`bytes` :param bool alpha: Sort the results lexicographically :param store_as: When specified, the key to store the results as :type store_as: :class:`str`, :class:`bytes`, None :rtype: list|int :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError` """ if order and order not in [b'ASC', b'DESC', 'ASC', 'DESC']: raise ValueError('invalid sort order "{}"'.format(order)) command = [b'SORT', key] if by: command += [b'BY', by] if external and isinstance(external, list): for entry in external: command += [b'GET', entry] elif external: command += [b'GET', external] if limit: command += [ b'LIMIT', ascii(offset).encode('utf-8'), ascii(limit).encode('utf-8') ] if order: command.append(order) if alpha is True: command.append(b'ALPHA') if store_as: command += [b'STORE', store_as] return self._execute(command)
[ "def", "sort", "(", "self", ",", "key", ",", "by", "=", "None", ",", "external", "=", "None", ",", "offset", "=", "0", ",", "limit", "=", "None", ",", "order", "=", "None", ",", "alpha", "=", "False", ",", "store_as", "=", "None", ")", ":", "if...
Returns or stores the elements contained in the list, set or sorted set at key. By default, sorting is numeric and elements are compared by their value interpreted as double precision floating point number. The ``external`` parameter is used to specify the `GET <http://redis.io/commands/sort#retrieving-external-keys>_` parameter for retrieving external keys. It can be a single string or a list of strings. .. note:: **Time complexity**: ``O(N+M*log(M))`` where ``N`` is the number of elements in the list or set to sort, and ``M`` the number of returned elements. When the elements are not sorted, complexity is currently ``O(N)`` as there is a copy step that will be avoided in next releases. :param key: The key to get the refcount for :type key: :class:`str`, :class:`bytes` :param by: The optional pattern for external sorting keys :type by: :class:`str`, :class:`bytes` :param external: Pattern or list of patterns to return external keys :type external: :class:`str`, :class:`bytes`, list :param int offset: The starting offset when using limit :param int limit: The number of elements to return :param order: The sort order - one of ``ASC`` or ``DESC`` :type order: :class:`str`, :class:`bytes` :param bool alpha: Sort the results lexicographically :param store_as: When specified, the key to store the results as :type store_as: :class:`str`, :class:`bytes`, None :rtype: list|int :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError`
[ "Returns", "or", "stores", "the", "elements", "contained", "in", "the", "list", "set", "or", "sorted", "set", "at", "key", ".", "By", "default", "sorting", "is", "numeric", "and", "elements", "are", "compared", "by", "their", "value", "interpreted", "as", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L554-L623
gmr/tredis
tredis/keys.py
KeysMixin.wait
def wait(self, num_slaves, timeout=0): """his command blocks the current client until all the previous write commands are successfully transferred and acknowledged by at least the specified number of slaves. If the timeout, specified in milliseconds, is reached, the command returns even if the specified number of slaves were not yet reached. The command will always return the number of slaves that acknowledged the write commands sent before the :meth:`~tredis.RedisClient.wait` command, both in the case where the specified number of slaves are reached, or when the timeout is reached. .. note:: **Time complexity**: ``O(1)`` :param int num_slaves: Number of slaves to acknowledge previous writes :param int timeout: Timeout in milliseconds :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'WAIT', ascii(num_slaves).encode('ascii'), ascii(timeout).encode('ascii') ] return self._execute(command)
python
def wait(self, num_slaves, timeout=0): """his command blocks the current client until all the previous write commands are successfully transferred and acknowledged by at least the specified number of slaves. If the timeout, specified in milliseconds, is reached, the command returns even if the specified number of slaves were not yet reached. The command will always return the number of slaves that acknowledged the write commands sent before the :meth:`~tredis.RedisClient.wait` command, both in the case where the specified number of slaves are reached, or when the timeout is reached. .. note:: **Time complexity**: ``O(1)`` :param int num_slaves: Number of slaves to acknowledge previous writes :param int timeout: Timeout in milliseconds :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ command = [ b'WAIT', ascii(num_slaves).encode('ascii'), ascii(timeout).encode('ascii') ] return self._execute(command)
[ "def", "wait", "(", "self", ",", "num_slaves", ",", "timeout", "=", "0", ")", ":", "command", "=", "[", "b'WAIT'", ",", "ascii", "(", "num_slaves", ")", ".", "encode", "(", "'ascii'", ")", ",", "ascii", "(", "timeout", ")", ".", "encode", "(", "'as...
his command blocks the current client until all the previous write commands are successfully transferred and acknowledged by at least the specified number of slaves. If the timeout, specified in milliseconds, is reached, the command returns even if the specified number of slaves were not yet reached. The command will always return the number of slaves that acknowledged the write commands sent before the :meth:`~tredis.RedisClient.wait` command, both in the case where the specified number of slaves are reached, or when the timeout is reached. .. note:: **Time complexity**: ``O(1)`` :param int num_slaves: Number of slaves to acknowledge previous writes :param int timeout: Timeout in milliseconds :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "his", "command", "blocks", "the", "current", "client", "until", "all", "the", "previous", "write", "commands", "are", "successfully", "transferred", "and", "acknowledged", "by", "at", "least", "the", "specified", "number", "of", "slaves", ".", "If", "the", "t...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/keys.py#L659-L686
lobocv/pyperform
pyperform/thread.py
enable_thread_profiling
def enable_thread_profiling(profile_dir, exception_callback=None): """ Monkey-patch the threading.Thread class with our own ProfiledThread. Any subsequent imports of threading.Thread will reference ProfiledThread instead. """ global profiled_thread_enabled, Thread, Process if os.path.isdir(profile_dir): _Profiler.profile_dir = profile_dir else: raise OSError('%s does not exist' % profile_dir) _Profiler.exception_callback = exception_callback Thread = threading.Thread = ProfiledThread Process = multiprocessing.Process = ProfiledProcess profiled_thread_enabled = True
python
def enable_thread_profiling(profile_dir, exception_callback=None): """ Monkey-patch the threading.Thread class with our own ProfiledThread. Any subsequent imports of threading.Thread will reference ProfiledThread instead. """ global profiled_thread_enabled, Thread, Process if os.path.isdir(profile_dir): _Profiler.profile_dir = profile_dir else: raise OSError('%s does not exist' % profile_dir) _Profiler.exception_callback = exception_callback Thread = threading.Thread = ProfiledThread Process = multiprocessing.Process = ProfiledProcess profiled_thread_enabled = True
[ "def", "enable_thread_profiling", "(", "profile_dir", ",", "exception_callback", "=", "None", ")", ":", "global", "profiled_thread_enabled", ",", "Thread", ",", "Process", "if", "os", ".", "path", ".", "isdir", "(", "profile_dir", ")", ":", "_Profiler", ".", "...
Monkey-patch the threading.Thread class with our own ProfiledThread. Any subsequent imports of threading.Thread will reference ProfiledThread instead.
[ "Monkey", "-", "patch", "the", "threading", ".", "Thread", "class", "with", "our", "own", "ProfiledThread", ".", "Any", "subsequent", "imports", "of", "threading", ".", "Thread", "will", "reference", "ProfiledThread", "instead", "." ]
train
https://github.com/lobocv/pyperform/blob/97d87e8b9ddb35bd8f2a6782965fd7735ab0349f/pyperform/thread.py#L22-L35
lobocv/pyperform
pyperform/thread.py
enable_thread_logging
def enable_thread_logging(exception_callback=None): """ Monkey-patch the threading.Thread class with our own LoggedThread. Any subsequent imports of threading.Thread will reference LoggedThread instead. """ global logged_thread_enabled, Thread LoggedThread.exception_callback = exception_callback Thread = threading.Thread = LoggedThread logged_thread_enabled = True
python
def enable_thread_logging(exception_callback=None): """ Monkey-patch the threading.Thread class with our own LoggedThread. Any subsequent imports of threading.Thread will reference LoggedThread instead. """ global logged_thread_enabled, Thread LoggedThread.exception_callback = exception_callback Thread = threading.Thread = LoggedThread logged_thread_enabled = True
[ "def", "enable_thread_logging", "(", "exception_callback", "=", "None", ")", ":", "global", "logged_thread_enabled", ",", "Thread", "LoggedThread", ".", "exception_callback", "=", "exception_callback", "Thread", "=", "threading", ".", "Thread", "=", "LoggedThread", "l...
Monkey-patch the threading.Thread class with our own LoggedThread. Any subsequent imports of threading.Thread will reference LoggedThread instead.
[ "Monkey", "-", "patch", "the", "threading", ".", "Thread", "class", "with", "our", "own", "LoggedThread", ".", "Any", "subsequent", "imports", "of", "threading", ".", "Thread", "will", "reference", "LoggedThread", "instead", "." ]
train
https://github.com/lobocv/pyperform/blob/97d87e8b9ddb35bd8f2a6782965fd7735ab0349f/pyperform/thread.py#L38-L46
theiviaxx/Frog
frog/views/piece.py
image
def image(request, obj_id): """Handles a request based on method and calls the appropriate function""" obj = Image.objects.get(pk=obj_id) if request.method == 'POST': return post(request, obj) elif request.method == 'PUT': getPutData(request) return put(request, obj) elif request.method == 'DELETE': getPutData(request) return delete(request, obj)
python
def image(request, obj_id): """Handles a request based on method and calls the appropriate function""" obj = Image.objects.get(pk=obj_id) if request.method == 'POST': return post(request, obj) elif request.method == 'PUT': getPutData(request) return put(request, obj) elif request.method == 'DELETE': getPutData(request) return delete(request, obj)
[ "def", "image", "(", "request", ",", "obj_id", ")", ":", "obj", "=", "Image", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "if", "request", ".", "method", "==", "'POST'", ":", "return", "post", "(", "request", ",", "obj", ")", "elif",...
Handles a request based on method and calls the appropriate function
[ "Handles", "a", "request", "based", "on", "method", "and", "calls", "the", "appropriate", "function" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/piece.py#L57-L67
theiviaxx/Frog
frog/views/piece.py
video
def video(request, obj_id): """Handles a request based on method and calls the appropriate function""" obj = Video.objects.get(pk=obj_id) if request.method == 'POST': return post(request, obj) elif request.method == 'PUT': getPutData(request) return put(request, obj) elif request.method == 'DELETE': getPutData(request) return delete(request, obj)
python
def video(request, obj_id): """Handles a request based on method and calls the appropriate function""" obj = Video.objects.get(pk=obj_id) if request.method == 'POST': return post(request, obj) elif request.method == 'PUT': getPutData(request) return put(request, obj) elif request.method == 'DELETE': getPutData(request) return delete(request, obj)
[ "def", "video", "(", "request", ",", "obj_id", ")", ":", "obj", "=", "Video", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "if", "request", ".", "method", "==", "'POST'", ":", "return", "post", "(", "request", ",", "obj", ")", "elif",...
Handles a request based on method and calls the appropriate function
[ "Handles", "a", "request", "based", "on", "method", "and", "calls", "the", "appropriate", "function" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/piece.py#L71-L81
aiidateam/aiida-ase
aiida_ase/parsers/ase.py
AseParser.parse_from_calc
def parse_from_calc(self): """ Parses the datafolder, stores results. This parser for this simple code does simply store in the DB a node representing the file of forces in real space """ from aiida.common.exceptions import InvalidOperation from aiida.common import aiidalogger from aiida.backends.djsite.utils import get_dblogger_extra import ase, ase.io parserlogger = aiidalogger.getChild('aseparser') logger_extra = get_dblogger_extra(self._calc) # suppose at the start that the job is successful successful = True # check that calculation is in the right state state = self._calc.get_state() if state != calc_states.PARSING: raise InvalidOperation("Calculation not in {} state" .format(calc_states.PARSING) ) # select the folder object out_folder = self._calc.get_retrieved_node() # check what is inside the folder list_of_files = out_folder.get_folder_list() # at least the stdout should exist if not self._calc._OUTPUT_FILE_NAME in list_of_files: successful = False parserlogger.error("Standard output not found",extra=logger_extra) return successful,() # output structure has_out_atoms = True if self._calc._output_aseatoms in list_of_files else False if has_out_atoms: out_atoms = ase.io.read( out_folder.get_abs_path( self._calc._output_aseatoms ) ) out_structure = StructureData().set_ase(out_atoms) # load the results dictionary json_outfile = out_folder.get_abs_path( self._calc._OUTPUT_FILE_NAME ) with open(json_outfile,'r') as f: json_params = json.load(f) # extract arrays from json_params dictionary_array = {} for k,v in list(json_params.iteritems()): if isinstance(v, (list,tuple)): dictionary_array[k] = json_params.pop(k) # look at warnings warnings = [] with open(out_folder.get_abs_path( self._calc._SCHED_ERROR_FILE )) as f: errors = f.read() if errors: warnings = [errors] json_params['warnings'] = warnings # save the outputs new_nodes_list= [] # save the arrays if dictionary_array: array_data = ArrayData() for k,v in dictionary_array.iteritems(): array_data.set_array(k,numpy.array(v)) new_nodes_list.append( (self._outarray_name, array_data) ) # save the parameters if json_params: parameter_data = ParameterData( dict=json_params ) new_nodes_list.append( (self._outdict_name, parameter_data) ) if has_out_atoms: structure_data = StructureData() new_nodes_list.append( (self._outstruc_name, structure_data) ) return successful,new_nodes_list
python
def parse_from_calc(self): """ Parses the datafolder, stores results. This parser for this simple code does simply store in the DB a node representing the file of forces in real space """ from aiida.common.exceptions import InvalidOperation from aiida.common import aiidalogger from aiida.backends.djsite.utils import get_dblogger_extra import ase, ase.io parserlogger = aiidalogger.getChild('aseparser') logger_extra = get_dblogger_extra(self._calc) # suppose at the start that the job is successful successful = True # check that calculation is in the right state state = self._calc.get_state() if state != calc_states.PARSING: raise InvalidOperation("Calculation not in {} state" .format(calc_states.PARSING) ) # select the folder object out_folder = self._calc.get_retrieved_node() # check what is inside the folder list_of_files = out_folder.get_folder_list() # at least the stdout should exist if not self._calc._OUTPUT_FILE_NAME in list_of_files: successful = False parserlogger.error("Standard output not found",extra=logger_extra) return successful,() # output structure has_out_atoms = True if self._calc._output_aseatoms in list_of_files else False if has_out_atoms: out_atoms = ase.io.read( out_folder.get_abs_path( self._calc._output_aseatoms ) ) out_structure = StructureData().set_ase(out_atoms) # load the results dictionary json_outfile = out_folder.get_abs_path( self._calc._OUTPUT_FILE_NAME ) with open(json_outfile,'r') as f: json_params = json.load(f) # extract arrays from json_params dictionary_array = {} for k,v in list(json_params.iteritems()): if isinstance(v, (list,tuple)): dictionary_array[k] = json_params.pop(k) # look at warnings warnings = [] with open(out_folder.get_abs_path( self._calc._SCHED_ERROR_FILE )) as f: errors = f.read() if errors: warnings = [errors] json_params['warnings'] = warnings # save the outputs new_nodes_list= [] # save the arrays if dictionary_array: array_data = ArrayData() for k,v in dictionary_array.iteritems(): array_data.set_array(k,numpy.array(v)) new_nodes_list.append( (self._outarray_name, array_data) ) # save the parameters if json_params: parameter_data = ParameterData( dict=json_params ) new_nodes_list.append( (self._outdict_name, parameter_data) ) if has_out_atoms: structure_data = StructureData() new_nodes_list.append( (self._outstruc_name, structure_data) ) return successful,new_nodes_list
[ "def", "parse_from_calc", "(", "self", ")", ":", "from", "aiida", ".", "common", ".", "exceptions", "import", "InvalidOperation", "from", "aiida", ".", "common", "import", "aiidalogger", "from", "aiida", ".", "backends", ".", "djsite", ".", "utils", "import", ...
Parses the datafolder, stores results. This parser for this simple code does simply store in the DB a node representing the file of forces in real space
[ "Parses", "the", "datafolder", "stores", "results", ".", "This", "parser", "for", "this", "simple", "code", "does", "simply", "store", "in", "the", "DB", "a", "node", "representing", "the", "file", "of", "forces", "in", "real", "space" ]
train
https://github.com/aiidateam/aiida-ase/blob/688a01fa872717ee3babdb1f10405b306371cf44/aiida_ase/parsers/ase.py#L33-L113
gmr/tredis
tredis/hashes.py
HashesMixin.hset
def hset(self, key, field, value): """Sets `field` in the hash stored at `key` to `value`. If `key` does not exist, a new key holding a hash is created. If `field` already exists in the hash, it is overwritten. .. note:: **Time complexity**: always ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set; otherwise, ``0`` if `field` already exists in the hash and the value was updated :rtype: int """ return self._execute([b'HSET', key, field, value])
python
def hset(self, key, field, value): """Sets `field` in the hash stored at `key` to `value`. If `key` does not exist, a new key holding a hash is created. If `field` already exists in the hash, it is overwritten. .. note:: **Time complexity**: always ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set; otherwise, ``0`` if `field` already exists in the hash and the value was updated :rtype: int """ return self._execute([b'HSET', key, field, value])
[ "def", "hset", "(", "self", ",", "key", ",", "field", ",", "value", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'HSET'", ",", "key", ",", "field", ",", "value", "]", ")" ]
Sets `field` in the hash stored at `key` to `value`. If `key` does not exist, a new key holding a hash is created. If `field` already exists in the hash, it is overwritten. .. note:: **Time complexity**: always ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set; otherwise, ``0`` if `field` already exists in the hash and the value was updated :rtype: int
[ "Sets", "field", "in", "the", "hash", "stored", "at", "key", "to", "value", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L8-L29
gmr/tredis
tredis/hashes.py
HashesMixin.hgetall
def hgetall(self, key): """ Returns all fields and values of the has stored at `key`. The underlying redis `HGETALL`_ command returns an array of pairs. This method converts that to a Python :class:`dict`. It will return an empty :class:`dict` when the key is not found. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the size of the hash. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :returns: a :class:`dict` of key to value mappings for all fields in the hash .. _HGETALL: http://redis.io/commands/hgetall """ def format_response(value): return dict(zip(value[::2], value[1::2])) return self._execute( [b'HGETALL', key], format_callback=format_response)
python
def hgetall(self, key): """ Returns all fields and values of the has stored at `key`. The underlying redis `HGETALL`_ command returns an array of pairs. This method converts that to a Python :class:`dict`. It will return an empty :class:`dict` when the key is not found. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the size of the hash. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :returns: a :class:`dict` of key to value mappings for all fields in the hash .. _HGETALL: http://redis.io/commands/hgetall """ def format_response(value): return dict(zip(value[::2], value[1::2])) return self._execute( [b'HGETALL', key], format_callback=format_response)
[ "def", "hgetall", "(", "self", ",", "key", ")", ":", "def", "format_response", "(", "value", ")", ":", "return", "dict", "(", "zip", "(", "value", "[", ":", ":", "2", "]", ",", "value", "[", "1", ":", ":", "2", "]", ")", ")", "return", "self", ...
Returns all fields and values of the has stored at `key`. The underlying redis `HGETALL`_ command returns an array of pairs. This method converts that to a Python :class:`dict`. It will return an empty :class:`dict` when the key is not found. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the size of the hash. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :returns: a :class:`dict` of key to value mappings for all fields in the hash .. _HGETALL: http://redis.io/commands/hgetall
[ "Returns", "all", "fields", "and", "values", "of", "the", "has", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L49-L76
gmr/tredis
tredis/hashes.py
HashesMixin.hmset
def hmset(self, key, value_dict): """ Sets fields to values as in `value_dict` in the hash stored at `key`. Sets the specified fields to their respective values in the hash stored at `key`. This command overwrites any specified fields already existing in the hash. If `key` does not exist, a new key holding a hash is created. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of fields being set. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param value_dict: field to value mapping :type value_dict: :class:`dict` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ if not value_dict: future = concurrent.TracebackFuture() future.set_result(False) else: command = [b'HMSET', key] command.extend(sum(value_dict.items(), ())) future = self._execute(command) return future
python
def hmset(self, key, value_dict): """ Sets fields to values as in `value_dict` in the hash stored at `key`. Sets the specified fields to their respective values in the hash stored at `key`. This command overwrites any specified fields already existing in the hash. If `key` does not exist, a new key holding a hash is created. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of fields being set. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param value_dict: field to value mapping :type value_dict: :class:`dict` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ if not value_dict: future = concurrent.TracebackFuture() future.set_result(False) else: command = [b'HMSET', key] command.extend(sum(value_dict.items(), ())) future = self._execute(command) return future
[ "def", "hmset", "(", "self", ",", "key", ",", "value_dict", ")", ":", "if", "not", "value_dict", ":", "future", "=", "concurrent", ".", "TracebackFuture", "(", ")", "future", ".", "set_result", "(", "False", ")", "else", ":", "command", "=", "[", "b'HM...
Sets fields to values as in `value_dict` in the hash stored at `key`. Sets the specified fields to their respective values in the hash stored at `key`. This command overwrites any specified fields already existing in the hash. If `key` does not exist, a new key holding a hash is created. .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of fields being set. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param value_dict: field to value mapping :type value_dict: :class:`dict` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Sets", "fields", "to", "values", "as", "in", "value_dict", "in", "the", "hash", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L78-L107
gmr/tredis
tredis/hashes.py
HashesMixin.hmget
def hmget(self, key, *fields): """ Returns the values associated with the specified `fields` in a hash. For every ``field`` that does not exist in the hash, :data:`None` is returned. Because a non-existing keys are treated as empty hashes, calling :meth:`hmget` against a non-existing key will return a list of :data:`None` values. .. note:: *Time complexity*: ``O(N)`` where ``N`` is the number of fields being requested. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: a :class:`dict` of field name to value mappings for each of the requested fields :rtype: dict """ def format_response(val_array): return dict(zip(fields, val_array)) command = [b'HMGET', key] command.extend(fields) return self._execute(command, format_callback=format_response)
python
def hmget(self, key, *fields): """ Returns the values associated with the specified `fields` in a hash. For every ``field`` that does not exist in the hash, :data:`None` is returned. Because a non-existing keys are treated as empty hashes, calling :meth:`hmget` against a non-existing key will return a list of :data:`None` values. .. note:: *Time complexity*: ``O(N)`` where ``N`` is the number of fields being requested. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: a :class:`dict` of field name to value mappings for each of the requested fields :rtype: dict """ def format_response(val_array): return dict(zip(fields, val_array)) command = [b'HMGET', key] command.extend(fields) return self._execute(command, format_callback=format_response)
[ "def", "hmget", "(", "self", ",", "key", ",", "*", "fields", ")", ":", "def", "format_response", "(", "val_array", ")", ":", "return", "dict", "(", "zip", "(", "fields", ",", "val_array", ")", ")", "command", "=", "[", "b'HMGET'", ",", "key", "]", ...
Returns the values associated with the specified `fields` in a hash. For every ``field`` that does not exist in the hash, :data:`None` is returned. Because a non-existing keys are treated as empty hashes, calling :meth:`hmget` against a non-existing key will return a list of :data:`None` values. .. note:: *Time complexity*: ``O(N)`` where ``N`` is the number of fields being requested. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: a :class:`dict` of field name to value mappings for each of the requested fields :rtype: dict
[ "Returns", "the", "values", "associated", "with", "the", "specified", "fields", "in", "a", "hash", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L109-L137
gmr/tredis
tredis/hashes.py
HashesMixin.hdel
def hdel(self, key, *fields): """ Remove the specified fields from the hash stored at `key`. Specified fields that do not exist within this hash are ignored. If `key` does not exist, it is treated as an empty hash and this command returns zero. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: the number of fields that were removed from the hash, not including specified by non-existing fields. :rtype: int """ if not fields: future = concurrent.TracebackFuture() future.set_result(0) else: future = self._execute([b'HDEL', key] + list(fields)) return future
python
def hdel(self, key, *fields): """ Remove the specified fields from the hash stored at `key`. Specified fields that do not exist within this hash are ignored. If `key` does not exist, it is treated as an empty hash and this command returns zero. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: the number of fields that were removed from the hash, not including specified by non-existing fields. :rtype: int """ if not fields: future = concurrent.TracebackFuture() future.set_result(0) else: future = self._execute([b'HDEL', key] + list(fields)) return future
[ "def", "hdel", "(", "self", ",", "key", ",", "*", "fields", ")", ":", "if", "not", "fields", ":", "future", "=", "concurrent", ".", "TracebackFuture", "(", ")", "future", ".", "set_result", "(", "0", ")", "else", ":", "future", "=", "self", ".", "_...
Remove the specified fields from the hash stored at `key`. Specified fields that do not exist within this hash are ignored. If `key` does not exist, it is treated as an empty hash and this command returns zero. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param fields: iterable of field names to retrieve :returns: the number of fields that were removed from the hash, not including specified by non-existing fields. :rtype: int
[ "Remove", "the", "specified", "fields", "from", "the", "hash", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L139-L160
gmr/tredis
tredis/hashes.py
HashesMixin.hincrby
def hincrby(self, key, field, increment): """ Increments the number stored at `field` in the hash stored at `key`. If `key` does not exist, a new key holding a hash is created. If `field` does not exist the value is set to ``0`` before the operation is performed. The range of values supported is limited to 64-bit signed integers. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: int :returns: the value at `field` after the increment occurs :rtype: int """ return self._execute( [b'HINCRBY', key, field, increment], format_callback=int)
python
def hincrby(self, key, field, increment): """ Increments the number stored at `field` in the hash stored at `key`. If `key` does not exist, a new key holding a hash is created. If `field` does not exist the value is set to ``0`` before the operation is performed. The range of values supported is limited to 64-bit signed integers. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: int :returns: the value at `field` after the increment occurs :rtype: int """ return self._execute( [b'HINCRBY', key, field, increment], format_callback=int)
[ "def", "hincrby", "(", "self", ",", "key", ",", "field", ",", "increment", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'HINCRBY'", ",", "key", ",", "field", ",", "increment", "]", ",", "format_callback", "=", "int", ")" ]
Increments the number stored at `field` in the hash stored at `key`. If `key` does not exist, a new key holding a hash is created. If `field` does not exist the value is set to ``0`` before the operation is performed. The range of values supported is limited to 64-bit signed integers. :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: int :returns: the value at `field` after the increment occurs :rtype: int
[ "Increments", "the", "number", "stored", "at", "field", "in", "the", "hash", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L179-L200
gmr/tredis
tredis/hashes.py
HashesMixin.hincrbyfloat
def hincrbyfloat(self, key, field, increment): """ Increments the number stored at `field` in the hash stored at `key`. If the increment value is negative, the result is to have the hash field **decremented** instead of incremented. If the field does not exist, it is set to ``0`` before performing the operation. An error is returned if one of the following conditions occur: - the field contains a value of the wrong type (not a string) - the current field content or the specified increment are not parseable as a double precision floating point number .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: float :returns: the value at `field` after the increment occurs :rtype: float """ return self._execute( [b'HINCRBYFLOAT', key, field, increment], format_callback=float)
python
def hincrbyfloat(self, key, field, increment): """ Increments the number stored at `field` in the hash stored at `key`. If the increment value is negative, the result is to have the hash field **decremented** instead of incremented. If the field does not exist, it is set to ``0`` before performing the operation. An error is returned if one of the following conditions occur: - the field contains a value of the wrong type (not a string) - the current field content or the specified increment are not parseable as a double precision floating point number .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: float :returns: the value at `field` after the increment occurs :rtype: float """ return self._execute( [b'HINCRBYFLOAT', key, field, increment], format_callback=float)
[ "def", "hincrbyfloat", "(", "self", ",", "key", ",", "field", ",", "increment", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'HINCRBYFLOAT'", ",", "key", ",", "field", ",", "increment", "]", ",", "format_callback", "=", "float", ")" ]
Increments the number stored at `field` in the hash stored at `key`. If the increment value is negative, the result is to have the hash field **decremented** instead of incremented. If the field does not exist, it is set to ``0`` before performing the operation. An error is returned if one of the following conditions occur: - the field contains a value of the wrong type (not a string) - the current field content or the specified increment are not parseable as a double precision floating point number .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: name of the field to increment :type key: :class:`str`, :class:`bytes` :param increment: amount to increment by :type increment: float :returns: the value at `field` after the increment occurs :rtype: float
[ "Increments", "the", "number", "stored", "at", "field", "in", "the", "hash", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L202-L231
gmr/tredis
tredis/hashes.py
HashesMixin.hsetnx
def hsetnx(self, key, field, value): """ Sets `field` in the hash stored at `key` only if it does not exist. Sets `field` in the hash stored at `key` only if `field` does not yet exist. If `key` does not exist, a new key holding a hash is created. If `field` already exists, this operation has no effect. .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set. ``0`` if `field` already exists in the hash and no operation was performed :rtype: int """ return self._execute([b'HSETNX', key, field, value])
python
def hsetnx(self, key, field, value): """ Sets `field` in the hash stored at `key` only if it does not exist. Sets `field` in the hash stored at `key` only if `field` does not yet exist. If `key` does not exist, a new key holding a hash is created. If `field` already exists, this operation has no effect. .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set. ``0`` if `field` already exists in the hash and no operation was performed :rtype: int """ return self._execute([b'HSETNX', key, field, value])
[ "def", "hsetnx", "(", "self", ",", "key", ",", "field", ",", "value", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'HSETNX'", ",", "key", ",", "field", ",", "value", "]", ")" ]
Sets `field` in the hash stored at `key` only if it does not exist. Sets `field` in the hash stored at `key` only if `field` does not yet exist. If `key` does not exist, a new key holding a hash is created. If `field` already exists, this operation has no effect. .. note:: *Time complexity*: ``O(1)`` :param key: The key of the hash :type key: :class:`str`, :class:`bytes` :param field: The field in the hash to set :type key: :class:`str`, :class:`bytes` :param value: The value to set the field to :returns: ``1`` if `field` is a new field in the hash and `value` was set. ``0`` if `field` already exists in the hash and no operation was performed :rtype: int
[ "Sets", "field", "in", "the", "hash", "stored", "at", "key", "only", "if", "it", "does", "not", "exist", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/hashes.py#L266-L289
hodgesds/elasticsearch_tornado
elasticsearch_tornado/snapshot.py
SnapshotClient.create_snapshot
def create_snapshot(self, repository, snapshot, body, params={}, callback=None, **kwargs): """ Create a snapshot in repository `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html>`_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: The snapshot definition :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning, default False """ query_params = ('master_timeout', 'wait_for_completion',) params = self._filter_params(query_params, params) url = self.mk_url(*['_snapshot', repository, snapshot], **params) self.client.fetch( self.mk_req(url, body=body, method='PUT', **kwargs), callback = callback )
python
def create_snapshot(self, repository, snapshot, body, params={}, callback=None, **kwargs): """ Create a snapshot in repository `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html>`_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: The snapshot definition :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning, default False """ query_params = ('master_timeout', 'wait_for_completion',) params = self._filter_params(query_params, params) url = self.mk_url(*['_snapshot', repository, snapshot], **params) self.client.fetch( self.mk_req(url, body=body, method='PUT', **kwargs), callback = callback )
[ "def", "create_snapshot", "(", "self", ",", "repository", ",", "snapshot", ",", "body", ",", "params", "=", "{", "}", ",", "callback", "=", "None", ",", "*", "*", "kwargs", ")", ":", "query_params", "=", "(", "'master_timeout'", ",", "'wait_for_completion'...
Create a snapshot in repository `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/modules-snapshots.html>`_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: The snapshot definition :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning, default False
[ "Create", "a", "snapshot", "in", "repository", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "master", "/", "modules", "-", "snapshots", ".", "html", ">", "_" ]
train
https://github.com/hodgesds/elasticsearch_tornado/blob/5acc1385589c92ffe3587ad05b7921c2cd1a30da/elasticsearch_tornado/snapshot.py#L6-L29
hodgesds/elasticsearch_tornado
elasticsearch_tornado/snapshot.py
SnapshotClient.verify_repository
def verify_repository(self, repository, master_timeout = 10, timeout = 10, body = '', params = {}, callback = None, **kwargs ): """ Returns a list of nodes where repository was successfully verified or an error message if verification process failed. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ query_params = ('master_timeout', 'timeout',) params = self._filter_params(query_params, params) url = self.mk_url(*['_snapshot', repository, '_verify'], **params) self.client.fetch( self.mk_req(url, body=body, method='POST', **kwargs), callback = callback )
python
def verify_repository(self, repository, master_timeout = 10, timeout = 10, body = '', params = {}, callback = None, **kwargs ): """ Returns a list of nodes where repository was successfully verified or an error message if verification process failed. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ query_params = ('master_timeout', 'timeout',) params = self._filter_params(query_params, params) url = self.mk_url(*['_snapshot', repository, '_verify'], **params) self.client.fetch( self.mk_req(url, body=body, method='POST', **kwargs), callback = callback )
[ "def", "verify_repository", "(", "self", ",", "repository", ",", "master_timeout", "=", "10", ",", "timeout", "=", "10", ",", "body", "=", "''", ",", "params", "=", "{", "}", ",", "callback", "=", "None", ",", "*", "*", "kwargs", ")", ":", "query_par...
Returns a list of nodes where repository was successfully verified or an error message if verification process failed. `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-snapshots.html>`_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout
[ "Returns", "a", "list", "of", "nodes", "where", "repository", "was", "successfully", "verified", "or", "an", "error", "message", "if", "verification", "process", "failed", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", ...
train
https://github.com/hodgesds/elasticsearch_tornado/blob/5acc1385589c92ffe3587ad05b7921c2cd1a30da/elasticsearch_tornado/snapshot.py#L200-L229
daethnir/authprogs
authprogs/authprogs.py
main
def main(): # pylint: disable-msg=R0912,R0915 """Main.""" parser = optparse.OptionParser() parser.usage = textwrap.dedent("""\ %prog {--run|--install_key|--dump_config} [options] SSH command authenticator. Used to restrict which commands can be run via trusted SSH keys. """) group = optparse.OptionGroup( parser, 'Run Mode Options', 'These options determine in which mode the authprogs ' 'program runs.') group.add_option( '-r', '--run', dest='run', action='store_true', help='Act as ssh command authenticator. Use this ' 'when calling from authorized_keys.') group.add_option( '--dump_config', dest='dump_config', action='store_true', help='Dump configuration (python format) ' 'to standard out and exit.') group.add_option( '--install_key', dest='install_key', help='Install the named ssh public key file to ' 'authorized_keys.', metavar='FILE') parser.add_option_group(group) group = optparse.OptionGroup(parser, 'Other Options') group.add_option( '--keyname', dest='keyname', help='Name for this key, used when matching ' 'config blocks.') group.add_option( '--configfile', dest='configfile', help='Path to authprogs configuration file. ' 'Defaults to ~/.ssh/authprogs.yaml', metavar='FILE') group.add_option( '--configdir', dest='configdir', help='Path to authprogs configuration directory. ' 'Defaults to ~/.ssh/authprogs.d', metavar='DIR') group.add_option('--logfile', dest='logfile', help='Write logging info to this file. ' 'Defaults to no logging.', metavar='FILE') group.add_option('--debug', dest='debug', action='store_true', help='Write additional debugging information ' 'to --logfile') group.add_option('--authorized_keys', dest='authorized_keys', default=os.path.expanduser('~/.ssh/authorized_keys'), help='Location of authorized_keys file for ' '--install_key. Defaults to ~/.ssh/authorized_keys', metavar='FILE') parser.add_option_group(group) opts, args = parser.parse_args() if args: sys.exit('authprogs does not accept commandline arguments.') if not opts.configfile: cfg = os.path.expanduser('~/.ssh/authprogs.yaml') if os.path.isfile(cfg): opts.configfile = cfg if not opts.configdir: cfg = os.path.expanduser('~/.ssh/authprogs.d') if os.path.isdir(cfg): opts.configdir = cfg if opts.debug and not opts.logfile: parser.error('--debug requires use of --logfile') ap = None try: ap = AuthProgs(logfile=opts.logfile, # pylint: disable-msg=C0103 configfile=opts.configfile, configdir=opts.configdir, debug=opts.debug, keyname=opts.keyname) if opts.dump_config: ap.dump_config() sys.exit(0) elif opts.install_key: try: ap.install_key(opts.install_key, opts.authorized_keys) sys.stderr.write('Key installed successfully.\n') sys.exit(0) except InstallError as err: sys.stderr.write('Key install failed: %s' % err) sys.exit(1) elif opts.run: ap.exec_command() sys.exit('authprogs command returned - should ' 'never happen.') else: parser.error('Not sure what to do. Consider --help') except SSHEnvironmentError as err: ap.log('SSHEnvironmentError "%s"\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs: %s' % err) except ConfigError as err: ap.log('ConfigError "%s"\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs: %s' % err) except CommandRejected as err: sys.exit('authprogs: %s' % err) except Exception as err: if ap: ap.log('Unexpected exception: %s\n%s\n' % ( err, traceback.format_exc())) else: sys.stderr.write('Unexpected exception: %s\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs experienced an unexpected exception.')
python
def main(): # pylint: disable-msg=R0912,R0915 """Main.""" parser = optparse.OptionParser() parser.usage = textwrap.dedent("""\ %prog {--run|--install_key|--dump_config} [options] SSH command authenticator. Used to restrict which commands can be run via trusted SSH keys. """) group = optparse.OptionGroup( parser, 'Run Mode Options', 'These options determine in which mode the authprogs ' 'program runs.') group.add_option( '-r', '--run', dest='run', action='store_true', help='Act as ssh command authenticator. Use this ' 'when calling from authorized_keys.') group.add_option( '--dump_config', dest='dump_config', action='store_true', help='Dump configuration (python format) ' 'to standard out and exit.') group.add_option( '--install_key', dest='install_key', help='Install the named ssh public key file to ' 'authorized_keys.', metavar='FILE') parser.add_option_group(group) group = optparse.OptionGroup(parser, 'Other Options') group.add_option( '--keyname', dest='keyname', help='Name for this key, used when matching ' 'config blocks.') group.add_option( '--configfile', dest='configfile', help='Path to authprogs configuration file. ' 'Defaults to ~/.ssh/authprogs.yaml', metavar='FILE') group.add_option( '--configdir', dest='configdir', help='Path to authprogs configuration directory. ' 'Defaults to ~/.ssh/authprogs.d', metavar='DIR') group.add_option('--logfile', dest='logfile', help='Write logging info to this file. ' 'Defaults to no logging.', metavar='FILE') group.add_option('--debug', dest='debug', action='store_true', help='Write additional debugging information ' 'to --logfile') group.add_option('--authorized_keys', dest='authorized_keys', default=os.path.expanduser('~/.ssh/authorized_keys'), help='Location of authorized_keys file for ' '--install_key. Defaults to ~/.ssh/authorized_keys', metavar='FILE') parser.add_option_group(group) opts, args = parser.parse_args() if args: sys.exit('authprogs does not accept commandline arguments.') if not opts.configfile: cfg = os.path.expanduser('~/.ssh/authprogs.yaml') if os.path.isfile(cfg): opts.configfile = cfg if not opts.configdir: cfg = os.path.expanduser('~/.ssh/authprogs.d') if os.path.isdir(cfg): opts.configdir = cfg if opts.debug and not opts.logfile: parser.error('--debug requires use of --logfile') ap = None try: ap = AuthProgs(logfile=opts.logfile, # pylint: disable-msg=C0103 configfile=opts.configfile, configdir=opts.configdir, debug=opts.debug, keyname=opts.keyname) if opts.dump_config: ap.dump_config() sys.exit(0) elif opts.install_key: try: ap.install_key(opts.install_key, opts.authorized_keys) sys.stderr.write('Key installed successfully.\n') sys.exit(0) except InstallError as err: sys.stderr.write('Key install failed: %s' % err) sys.exit(1) elif opts.run: ap.exec_command() sys.exit('authprogs command returned - should ' 'never happen.') else: parser.error('Not sure what to do. Consider --help') except SSHEnvironmentError as err: ap.log('SSHEnvironmentError "%s"\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs: %s' % err) except ConfigError as err: ap.log('ConfigError "%s"\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs: %s' % err) except CommandRejected as err: sys.exit('authprogs: %s' % err) except Exception as err: if ap: ap.log('Unexpected exception: %s\n%s\n' % ( err, traceback.format_exc())) else: sys.stderr.write('Unexpected exception: %s\n%s\n' % ( err, traceback.format_exc())) sys.exit('authprogs experienced an unexpected exception.')
[ "def", "main", "(", ")", ":", "# pylint: disable-msg=R0912,R0915", "parser", "=", "optparse", ".", "OptionParser", "(", ")", "parser", ".", "usage", "=", "textwrap", ".", "dedent", "(", "\"\"\"\\\n %prog {--run|--install_key|--dump_config} [options]\n\n SSH command au...
Main.
[ "Main", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L449-L569
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.raise_and_log_error
def raise_and_log_error(self, error, message): """Raise error, including message and original traceback. error: the error to raise message: the user-facing error message """ self.log('raising %s, traceback %s\n' % (error, traceback.format_exc())) raise error(message)
python
def raise_and_log_error(self, error, message): """Raise error, including message and original traceback. error: the error to raise message: the user-facing error message """ self.log('raising %s, traceback %s\n' % (error, traceback.format_exc())) raise error(message)
[ "def", "raise_and_log_error", "(", "self", ",", "error", ",", "message", ")", ":", "self", ".", "log", "(", "'raising %s, traceback %s\\n'", "%", "(", "error", ",", "traceback", ".", "format_exc", "(", ")", ")", ")", "raise", "error", "(", "message", ")" ]
Raise error, including message and original traceback. error: the error to raise message: the user-facing error message
[ "Raise", "error", "including", "message", "and", "original", "traceback", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L128-L136
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.get_client_ip
def get_client_ip(self): """Return the client IP from the environment.""" if self.client_ip: return self.client_ip try: client = os.environ.get('SSH_CONNECTION', os.environ.get('SSH_CLIENT')) self.client_ip = client.split()[0] self.logdebug('client_ip: %s\n' % self.client_ip) return self.client_ip except: raise SSHEnvironmentError('cannot identify the ssh client ' 'IP address')
python
def get_client_ip(self): """Return the client IP from the environment.""" if self.client_ip: return self.client_ip try: client = os.environ.get('SSH_CONNECTION', os.environ.get('SSH_CLIENT')) self.client_ip = client.split()[0] self.logdebug('client_ip: %s\n' % self.client_ip) return self.client_ip except: raise SSHEnvironmentError('cannot identify the ssh client ' 'IP address')
[ "def", "get_client_ip", "(", "self", ")", ":", "if", "self", ".", "client_ip", ":", "return", "self", ".", "client_ip", "try", ":", "client", "=", "os", ".", "environ", ".", "get", "(", "'SSH_CONNECTION'", ",", "os", ".", "environ", ".", "get", "(", ...
Return the client IP from the environment.
[ "Return", "the", "client", "IP", "from", "the", "environment", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L138-L152
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.check_keyname
def check_keyname(self, rule): """If a key name is specified, verify it is permitted.""" keynames = rule.get('keynames') if not keynames: self.logdebug('no keynames requirement.\n') return True if not isinstance(keynames, list): keynames = [keynames] if self.keyname in keynames: self.logdebug('keyname "%s" matches rule.\n' % self.keyname) return True else: self.logdebug('keyname "%s" does not match rule.\n' % self.keyname) return False
python
def check_keyname(self, rule): """If a key name is specified, verify it is permitted.""" keynames = rule.get('keynames') if not keynames: self.logdebug('no keynames requirement.\n') return True if not isinstance(keynames, list): keynames = [keynames] if self.keyname in keynames: self.logdebug('keyname "%s" matches rule.\n' % self.keyname) return True else: self.logdebug('keyname "%s" does not match rule.\n' % self.keyname) return False
[ "def", "check_keyname", "(", "self", ",", "rule", ")", ":", "keynames", "=", "rule", ".", "get", "(", "'keynames'", ")", "if", "not", "keynames", ":", "self", ".", "logdebug", "(", "'no keynames requirement.\\n'", ")", "return", "True", "if", "not", "isins...
If a key name is specified, verify it is permitted.
[ "If", "a", "key", "name", "is", "specified", "verify", "it", "is", "permitted", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L164-L179
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.check_client_ip
def check_client_ip(self, rule): """If a client IP is specified, verify it is permitted.""" if not rule.get('from'): self.logdebug('no "from" requirement.\n') return True allow_from = rule.get('from') if not isinstance(allow_from, list): allow_from = [allow_from] client_ip = self.get_client_ip() if client_ip in allow_from: self.logdebug('client_ip %s in %s\n' % (client_ip, allow_from)) return True else: self.logdebug('client_ip %s not in %s' % (client_ip, allow_from)) return False
python
def check_client_ip(self, rule): """If a client IP is specified, verify it is permitted.""" if not rule.get('from'): self.logdebug('no "from" requirement.\n') return True allow_from = rule.get('from') if not isinstance(allow_from, list): allow_from = [allow_from] client_ip = self.get_client_ip() if client_ip in allow_from: self.logdebug('client_ip %s in %s\n' % (client_ip, allow_from)) return True else: self.logdebug('client_ip %s not in %s' % (client_ip, allow_from)) return False
[ "def", "check_client_ip", "(", "self", ",", "rule", ")", ":", "if", "not", "rule", ".", "get", "(", "'from'", ")", ":", "self", ".", "logdebug", "(", "'no \"from\" requirement.\\n'", ")", "return", "True", "allow_from", "=", "rule", ".", "get", "(", "'fr...
If a client IP is specified, verify it is permitted.
[ "If", "a", "client", "IP", "is", "specified", "verify", "it", "is", "permitted", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L181-L198
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.get_merged_config
def get_merged_config(self): """Get merged config file. Returns an open StringIO containing the merged config file. """ if self.yamldocs: return loadfiles = [] if self.configfile: loadfiles.append(self.configfile) if self.configdir: # Gets list of all non-dotfile files from configdir. loadfiles.extend( [f for f in [os.path.join(self.configdir, x) for x in os.listdir(self.configdir)] if os.path.isfile(f) and not os.path.basename(f).startswith('.')]) merged_configfile = io.StringIO() merged_configfile.write('-\n') for thefile in loadfiles: self.logdebug('reading in config file %s\n' % thefile) merged_configfile.write(open(thefile).read()) merged_configfile.write('\n-\n') merged_configfile.seek(0) self.logdebug('merged log file: """\n%s\n"""\n' % merged_configfile.read()) merged_configfile.seek(0) return merged_configfile
python
def get_merged_config(self): """Get merged config file. Returns an open StringIO containing the merged config file. """ if self.yamldocs: return loadfiles = [] if self.configfile: loadfiles.append(self.configfile) if self.configdir: # Gets list of all non-dotfile files from configdir. loadfiles.extend( [f for f in [os.path.join(self.configdir, x) for x in os.listdir(self.configdir)] if os.path.isfile(f) and not os.path.basename(f).startswith('.')]) merged_configfile = io.StringIO() merged_configfile.write('-\n') for thefile in loadfiles: self.logdebug('reading in config file %s\n' % thefile) merged_configfile.write(open(thefile).read()) merged_configfile.write('\n-\n') merged_configfile.seek(0) self.logdebug('merged log file: """\n%s\n"""\n' % merged_configfile.read()) merged_configfile.seek(0) return merged_configfile
[ "def", "get_merged_config", "(", "self", ")", ":", "if", "self", ".", "yamldocs", ":", "return", "loadfiles", "=", "[", "]", "if", "self", ".", "configfile", ":", "loadfiles", ".", "append", "(", "self", ".", "configfile", ")", "if", "self", ".", "conf...
Get merged config file. Returns an open StringIO containing the merged config file.
[ "Get", "merged", "config", "file", ".", "Returns", "an", "open", "StringIO", "containing", "the", "merged", "config", "file", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L200-L232
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.load
def load(self): """Load our config, log and raise on error.""" try: merged_configfile = self.get_merged_config() self.yamldocs = yaml.load(merged_configfile, Loader=Loader) # Strip out the top level 'None's we get from concatenation. # Functionally not required, but makes dumps cleaner. self.yamldocs = [x for x in self.yamldocs if x] self.logdebug('parsed_rules:\n%s\n' % pretty(self.yamldocs)) except (yaml.scanner.ScannerError, yaml.parser.ParserError): self.raise_and_log_error(ConfigError, 'error parsing config.')
python
def load(self): """Load our config, log and raise on error.""" try: merged_configfile = self.get_merged_config() self.yamldocs = yaml.load(merged_configfile, Loader=Loader) # Strip out the top level 'None's we get from concatenation. # Functionally not required, but makes dumps cleaner. self.yamldocs = [x for x in self.yamldocs if x] self.logdebug('parsed_rules:\n%s\n' % pretty(self.yamldocs)) except (yaml.scanner.ScannerError, yaml.parser.ParserError): self.raise_and_log_error(ConfigError, 'error parsing config.')
[ "def", "load", "(", "self", ")", ":", "try", ":", "merged_configfile", "=", "self", ".", "get_merged_config", "(", ")", "self", ".", "yamldocs", "=", "yaml", ".", "load", "(", "merged_configfile", ",", "Loader", "=", "Loader", ")", "# Strip out the top level...
Load our config, log and raise on error.
[ "Load", "our", "config", "log", "and", "raise", "on", "error", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L234-L246
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.dump_config
def dump_config(self): """Pretty print the configuration dict to stdout.""" yaml_content = self.get_merged_config() print('YAML Configuration\n%s\n' % yaml_content.read()) try: self.load() print('Python Configuration\n%s\n' % pretty(self.yamldocs)) except ConfigError: sys.stderr.write( 'config parse error. try running with --logfile=/dev/tty\n') raise
python
def dump_config(self): """Pretty print the configuration dict to stdout.""" yaml_content = self.get_merged_config() print('YAML Configuration\n%s\n' % yaml_content.read()) try: self.load() print('Python Configuration\n%s\n' % pretty(self.yamldocs)) except ConfigError: sys.stderr.write( 'config parse error. try running with --logfile=/dev/tty\n') raise
[ "def", "dump_config", "(", "self", ")", ":", "yaml_content", "=", "self", ".", "get_merged_config", "(", ")", "print", "(", "'YAML Configuration\\n%s\\n'", "%", "yaml_content", ".", "read", "(", ")", ")", "try", ":", "self", ".", "load", "(", ")", "print",...
Pretty print the configuration dict to stdout.
[ "Pretty", "print", "the", "configuration", "dict", "to", "stdout", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L248-L258
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.install_key_data
def install_key_data(self, keydata, target): """Install the key data into the open file.""" target.seek(0) contents = target.read() ssh_opts = 'no-port-forwarding' if keydata in contents: raise InstallError('key data already in file - refusing ' 'to double-install.\n') command = '%s --run' % self.authprogs_binary if self.logfile: command += ' --logfile=%s' % self.logfile if self.keyname: command += ' --keyname=%s' % self.keyname target.write('command="%(command)s",%(ssh_opts)s %(keydata)s\n' % {'command': command, 'keydata': keydata, 'ssh_opts': ssh_opts})
python
def install_key_data(self, keydata, target): """Install the key data into the open file.""" target.seek(0) contents = target.read() ssh_opts = 'no-port-forwarding' if keydata in contents: raise InstallError('key data already in file - refusing ' 'to double-install.\n') command = '%s --run' % self.authprogs_binary if self.logfile: command += ' --logfile=%s' % self.logfile if self.keyname: command += ' --keyname=%s' % self.keyname target.write('command="%(command)s",%(ssh_opts)s %(keydata)s\n' % {'command': command, 'keydata': keydata, 'ssh_opts': ssh_opts})
[ "def", "install_key_data", "(", "self", ",", "keydata", ",", "target", ")", ":", "target", ".", "seek", "(", "0", ")", "contents", "=", "target", ".", "read", "(", ")", "ssh_opts", "=", "'no-port-forwarding'", "if", "keydata", "in", "contents", ":", "rai...
Install the key data into the open file.
[ "Install", "the", "key", "data", "into", "the", "open", "file", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L260-L278
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.install_key
def install_key(self, keyfile, authorized_keys): """Install a key into the authorized_keys file.""" # Make the directory containing the authorized_keys # file, if it doesn't exist. (Typically ~/.ssh). # Ignore errors; we'll fail shortly if we can't # create the authkeys file. try: os.makedirs(os.path.dirname(authorized_keys), 0o700) except OSError: pass keydata = open(keyfile).read() target_fd = os.open(authorized_keys, os.O_RDWR | os.O_CREAT, 0o600) self.install_key_data(keydata, os.fdopen(target_fd, 'w+'))
python
def install_key(self, keyfile, authorized_keys): """Install a key into the authorized_keys file.""" # Make the directory containing the authorized_keys # file, if it doesn't exist. (Typically ~/.ssh). # Ignore errors; we'll fail shortly if we can't # create the authkeys file. try: os.makedirs(os.path.dirname(authorized_keys), 0o700) except OSError: pass keydata = open(keyfile).read() target_fd = os.open(authorized_keys, os.O_RDWR | os.O_CREAT, 0o600) self.install_key_data(keydata, os.fdopen(target_fd, 'w+'))
[ "def", "install_key", "(", "self", ",", "keyfile", ",", "authorized_keys", ")", ":", "# Make the directory containing the authorized_keys", "# file, if it doesn't exist. (Typically ~/.ssh).", "# Ignore errors; we'll fail shortly if we can't", "# create the authkeys file.", "try", ":", ...
Install a key into the authorized_keys file.
[ "Install", "a", "key", "into", "the", "authorized_keys", "file", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L280-L294
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.find_match_scp
def find_match_scp(self, rule): # pylint: disable-msg=R0911,R0912 """Handle scp commands.""" orig_list = [] orig_list.extend(self.original_command_list) binary = orig_list.pop(0) allowed_binaries = ['scp', '/usr/bin/scp'] if binary not in allowed_binaries: self.logdebug('skipping scp processing - binary "%s" ' 'not in approved list.\n' % binary) return filepath = orig_list.pop() arguments = orig_list if '-f' in arguments: if not rule.get('allow_download'): self.logdebug('scp denied - downloading forbidden.\n') return if '-t' in arguments: if not rule.get('allow_upload'): self.log('scp denied - uploading forbidden.\n') return if '-r' in arguments: if not rule.get('allow_recursion'): self.log('scp denied - recursive transfers forbidden.\n') return if '-p' in arguments: if not rule.get('allow_permissions', 'true'): self.log('scp denied - set/getting permissions ' 'forbidden.\n') return if rule.get('files'): files = rule.get('files') if not isinstance(files, list): files = [files] if filepath not in files: self.log('scp denied - file "%s" - not in approved ' 'list %s\n' % (filepath, files)) return # Allow it! return {'command': self.original_command_list}
python
def find_match_scp(self, rule): # pylint: disable-msg=R0911,R0912 """Handle scp commands.""" orig_list = [] orig_list.extend(self.original_command_list) binary = orig_list.pop(0) allowed_binaries = ['scp', '/usr/bin/scp'] if binary not in allowed_binaries: self.logdebug('skipping scp processing - binary "%s" ' 'not in approved list.\n' % binary) return filepath = orig_list.pop() arguments = orig_list if '-f' in arguments: if not rule.get('allow_download'): self.logdebug('scp denied - downloading forbidden.\n') return if '-t' in arguments: if not rule.get('allow_upload'): self.log('scp denied - uploading forbidden.\n') return if '-r' in arguments: if not rule.get('allow_recursion'): self.log('scp denied - recursive transfers forbidden.\n') return if '-p' in arguments: if not rule.get('allow_permissions', 'true'): self.log('scp denied - set/getting permissions ' 'forbidden.\n') return if rule.get('files'): files = rule.get('files') if not isinstance(files, list): files = [files] if filepath not in files: self.log('scp denied - file "%s" - not in approved ' 'list %s\n' % (filepath, files)) return # Allow it! return {'command': self.original_command_list}
[ "def", "find_match_scp", "(", "self", ",", "rule", ")", ":", "# pylint: disable-msg=R0911,R0912", "orig_list", "=", "[", "]", "orig_list", ".", "extend", "(", "self", ".", "original_command_list", ")", "binary", "=", "orig_list", ".", "pop", "(", "0", ")", "...
Handle scp commands.
[ "Handle", "scp", "commands", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L296-L342
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.find_match_command
def find_match_command(self, rule): """Return a matching (possibly munged) command, if found in rule.""" command_string = rule['command'] command_list = command_string.split() self.logdebug('comparing "%s" to "%s"\n' % (command_list, self.original_command_list)) if rule.get('allow_trailing_args'): self.logdebug('allow_trailing_args is true - comparing initial ' 'list.\n') # Verify the initial arguments are all the same if (self.original_command_list[:len(command_list)] == command_list): self.logdebug('initial list is same\n') return {'command': self.original_command_list} else: self.logdebug('initial list is not same\n') elif rule.get('pcre_match'): if re.search(command_string, self.original_command_string): return {'command': self.original_command_list} elif command_list == self.original_command_list: return {'command': command_list}
python
def find_match_command(self, rule): """Return a matching (possibly munged) command, if found in rule.""" command_string = rule['command'] command_list = command_string.split() self.logdebug('comparing "%s" to "%s"\n' % (command_list, self.original_command_list)) if rule.get('allow_trailing_args'): self.logdebug('allow_trailing_args is true - comparing initial ' 'list.\n') # Verify the initial arguments are all the same if (self.original_command_list[:len(command_list)] == command_list): self.logdebug('initial list is same\n') return {'command': self.original_command_list} else: self.logdebug('initial list is not same\n') elif rule.get('pcre_match'): if re.search(command_string, self.original_command_string): return {'command': self.original_command_list} elif command_list == self.original_command_list: return {'command': command_list}
[ "def", "find_match_command", "(", "self", ",", "rule", ")", ":", "command_string", "=", "rule", "[", "'command'", "]", "command_list", "=", "command_string", ".", "split", "(", ")", "self", ".", "logdebug", "(", "'comparing \"%s\" to \"%s\"\\n'", "%", "(", "co...
Return a matching (possibly munged) command, if found in rule.
[ "Return", "a", "matching", "(", "possibly", "munged", ")", "command", "if", "found", "in", "rule", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L344-L368
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.find_match
def find_match(self): """Load the config and find a matching rule. returns the results of find_match_command, a dict of the command and (in the future) other metadata. """ self.load() for yamldoc in self.yamldocs: self.logdebug('\nchecking rule """%s"""\n' % yamldoc) if not yamldoc: continue if not self.check_client_ip(yamldoc): # Rejected - Client IP does not match continue if not self.check_keyname(yamldoc): # Rejected - keyname does not match continue rules = yamldoc.get('allow') if not isinstance(rules, list): rules = [rules] for rule in rules: rule_type = rule.get('rule_type', 'command') if rule_type == 'command': sub = self.find_match_command elif rule_type == 'scp': sub = self.find_match_scp else: self.log('fatal: no such rule_type "%s"\n' % rule_type) self.raise_and_log_error(ConfigError, 'error parsing config.') match = sub(rule) if match: return match # No matches, time to give up. raise CommandRejected('command "%s" denied.' % self.original_command_string)
python
def find_match(self): """Load the config and find a matching rule. returns the results of find_match_command, a dict of the command and (in the future) other metadata. """ self.load() for yamldoc in self.yamldocs: self.logdebug('\nchecking rule """%s"""\n' % yamldoc) if not yamldoc: continue if not self.check_client_ip(yamldoc): # Rejected - Client IP does not match continue if not self.check_keyname(yamldoc): # Rejected - keyname does not match continue rules = yamldoc.get('allow') if not isinstance(rules, list): rules = [rules] for rule in rules: rule_type = rule.get('rule_type', 'command') if rule_type == 'command': sub = self.find_match_command elif rule_type == 'scp': sub = self.find_match_scp else: self.log('fatal: no such rule_type "%s"\n' % rule_type) self.raise_and_log_error(ConfigError, 'error parsing config.') match = sub(rule) if match: return match # No matches, time to give up. raise CommandRejected('command "%s" denied.' % self.original_command_string)
[ "def", "find_match", "(", "self", ")", ":", "self", ".", "load", "(", ")", "for", "yamldoc", "in", "self", ".", "yamldocs", ":", "self", ".", "logdebug", "(", "'\\nchecking rule \"\"\"%s\"\"\"\\n'", "%", "yamldoc", ")", "if", "not", "yamldoc", ":", "contin...
Load the config and find a matching rule. returns the results of find_match_command, a dict of the command and (in the future) other metadata.
[ "Load", "the", "config", "and", "find", "a", "matching", "rule", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L370-L413
daethnir/authprogs
authprogs/authprogs.py
AuthProgs.exec_command
def exec_command(self): """Glean the command to run and exec. On problems, sys.exit. This method should *never* return. """ if not self.original_command_string: raise SSHEnvironmentError('no SSH command found; ' 'interactive shell disallowed.') command_info = {'from': self.get_client_ip(), 'keyname': self.keyname, 'ssh_original_comand': self.original_command_string, 'time': time.time()} os.environ['AUTHPROGS_KEYNAME'] = self.keyname retcode = 126 try: match = self.find_match() command_info['command'] = match.get('command') self.logdebug('find_match returned "%s"\n' % match) command = match['command'] retcode = subprocess.call(command) command_info['code'] = retcode self.log('result: %s\n' % command_info) sys.exit(retcode) except (CommandRejected, OSError) as err: command_info['exception'] = '%s' % err self.log('result: %s\n' % command_info) sys.exit(retcode)
python
def exec_command(self): """Glean the command to run and exec. On problems, sys.exit. This method should *never* return. """ if not self.original_command_string: raise SSHEnvironmentError('no SSH command found; ' 'interactive shell disallowed.') command_info = {'from': self.get_client_ip(), 'keyname': self.keyname, 'ssh_original_comand': self.original_command_string, 'time': time.time()} os.environ['AUTHPROGS_KEYNAME'] = self.keyname retcode = 126 try: match = self.find_match() command_info['command'] = match.get('command') self.logdebug('find_match returned "%s"\n' % match) command = match['command'] retcode = subprocess.call(command) command_info['code'] = retcode self.log('result: %s\n' % command_info) sys.exit(retcode) except (CommandRejected, OSError) as err: command_info['exception'] = '%s' % err self.log('result: %s\n' % command_info) sys.exit(retcode)
[ "def", "exec_command", "(", "self", ")", ":", "if", "not", "self", ".", "original_command_string", ":", "raise", "SSHEnvironmentError", "(", "'no SSH command found; '", "'interactive shell disallowed.'", ")", "command_info", "=", "{", "'from'", ":", "self", ".", "ge...
Glean the command to run and exec. On problems, sys.exit. This method should *never* return.
[ "Glean", "the", "command", "to", "run", "and", "exec", "." ]
train
https://github.com/daethnir/authprogs/blob/0b1e13a609ebeabdb0f10d11fc5dc6e0b20c0343/authprogs/authprogs.py#L415-L446
gmr/tredis
tredis/crc16.py
_py2_crc16
def _py2_crc16(value): """Calculate the CRC for the value in Python 2 :param str value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ \ _CRC16_LOOKUP[((crc >> 8) ^ ord(byte)) & 0xff] return crc
python
def _py2_crc16(value): """Calculate the CRC for the value in Python 2 :param str value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ \ _CRC16_LOOKUP[((crc >> 8) ^ ord(byte)) & 0xff] return crc
[ "def", "_py2_crc16", "(", "value", ")", ":", "crc", "=", "0", "for", "byte", "in", "value", ":", "crc", "=", "(", "(", "crc", "<<", "8", ")", "&", "0xffff", ")", "^", "_CRC16_LOOKUP", "[", "(", "(", "crc", ">>", "8", ")", "^", "ord", "(", "by...
Calculate the CRC for the value in Python 2 :param str value: The value to return for the CRC Checksum :rtype: int
[ "Calculate", "the", "CRC", "for", "the", "value", "in", "Python", "2" ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/crc16.py#L40-L51
gmr/tredis
tredis/crc16.py
_py3_crc16
def _py3_crc16(value): """Calculate the CRC for the value in Python 3 :param bytes value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ _CRC16_LOOKUP[((crc >> 8) ^ byte) & 0xff] return crc
python
def _py3_crc16(value): """Calculate the CRC for the value in Python 3 :param bytes value: The value to return for the CRC Checksum :rtype: int """ crc = 0 for byte in value: crc = ((crc << 8) & 0xffff) ^ _CRC16_LOOKUP[((crc >> 8) ^ byte) & 0xff] return crc
[ "def", "_py3_crc16", "(", "value", ")", ":", "crc", "=", "0", "for", "byte", "in", "value", ":", "crc", "=", "(", "(", "crc", "<<", "8", ")", "&", "0xffff", ")", "^", "_CRC16_LOOKUP", "[", "(", "(", "crc", ">>", "8", ")", "^", "byte", ")", "&...
Calculate the CRC for the value in Python 3 :param bytes value: The value to return for the CRC Checksum :rtype: int
[ "Calculate", "the", "CRC", "for", "the", "value", "in", "Python", "3" ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/crc16.py#L54-L64
radzak/rtv-downloader
rtv/extractors/common.py
Extractor.validate_url
def validate_url(cls, url: str) -> Optional[Match[str]]: """Check if the Extractor can handle the given url.""" match = re.match(cls._VALID_URL, url) return match
python
def validate_url(cls, url: str) -> Optional[Match[str]]: """Check if the Extractor can handle the given url.""" match = re.match(cls._VALID_URL, url) return match
[ "def", "validate_url", "(", "cls", ",", "url", ":", "str", ")", "->", "Optional", "[", "Match", "[", "str", "]", "]", ":", "match", "=", "re", ".", "match", "(", "cls", ".", "_VALID_URL", ",", "url", ")", "return", "match" ]
Check if the Extractor can handle the given url.
[ "Check", "if", "the", "Extractor", "can", "handle", "the", "given", "url", "." ]
train
https://github.com/radzak/rtv-downloader/blob/b9114b7f4c35fabe6ec9ad1764a65858667a866e/rtv/extractors/common.py#L29-L32
radzak/rtv-downloader
rtv/extractors/common.py
Extractor.get_info
def get_info(self) -> dict: """Get information about the videos from YoutubeDL package.""" with suppress_stdout(): with youtube_dl.YoutubeDL() as ydl: info_dict = ydl.extract_info(self.url, download=False) return info_dict
python
def get_info(self) -> dict: """Get information about the videos from YoutubeDL package.""" with suppress_stdout(): with youtube_dl.YoutubeDL() as ydl: info_dict = ydl.extract_info(self.url, download=False) return info_dict
[ "def", "get_info", "(", "self", ")", "->", "dict", ":", "with", "suppress_stdout", "(", ")", ":", "with", "youtube_dl", ".", "YoutubeDL", "(", ")", "as", "ydl", ":", "info_dict", "=", "ydl", ".", "extract_info", "(", "self", ".", "url", ",", "download"...
Get information about the videos from YoutubeDL package.
[ "Get", "information", "about", "the", "videos", "from", "YoutubeDL", "package", "." ]
train
https://github.com/radzak/rtv-downloader/blob/b9114b7f4c35fabe6ec9ad1764a65858667a866e/rtv/extractors/common.py#L40-L45
radzak/rtv-downloader
rtv/extractors/common.py
Extractor.update_entries
def update_entries(entries: Entries, data: dict) -> None: """Update each entry in the list with some data.""" # TODO: Is mutating the list okay, making copies is such a pain in the ass for entry in entries: entry.update(data)
python
def update_entries(entries: Entries, data: dict) -> None: """Update each entry in the list with some data.""" # TODO: Is mutating the list okay, making copies is such a pain in the ass for entry in entries: entry.update(data)
[ "def", "update_entries", "(", "entries", ":", "Entries", ",", "data", ":", "dict", ")", "->", "None", ":", "# TODO: Is mutating the list okay, making copies is such a pain in the ass", "for", "entry", "in", "entries", ":", "entry", ".", "update", "(", "data", ")" ]
Update each entry in the list with some data.
[ "Update", "each", "entry", "in", "the", "list", "with", "some", "data", "." ]
train
https://github.com/radzak/rtv-downloader/blob/b9114b7f4c35fabe6ec9ad1764a65858667a866e/rtv/extractors/common.py#L48-L52
mk-fg/feedjack
feedjack/fjlib.py
get_extra_context
def get_extra_context(site, ctx): 'Returns extra data useful to the templates.' # XXX: clean this up from obsolete stuff ctx['site'] = site ctx['feeds'] = feeds = site.active_feeds.order_by('name') def get_mod_chk(k): mod, chk = ( (max(vals) if vals else None) for vals in ( filter(None, it.imap(op.attrgetter(k), feeds)) for k in ['last_modified', 'last_checked'] ) ) chk = chk or datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc) ctx['last_modified'], ctx['last_checked'] = mod or chk, chk return ctx[k] for k in 'last_modified', 'last_checked': ctx[k] = lambda: get_mod_chk(k) # media_url is set here for historical reasons, # use static_url or STATIC_URL (from django context) in any new templates. ctx['media_url'] = ctx['static_url'] =\ '{}feedjack/{}'.format(settings.STATIC_URL, site.template)
python
def get_extra_context(site, ctx): 'Returns extra data useful to the templates.' # XXX: clean this up from obsolete stuff ctx['site'] = site ctx['feeds'] = feeds = site.active_feeds.order_by('name') def get_mod_chk(k): mod, chk = ( (max(vals) if vals else None) for vals in ( filter(None, it.imap(op.attrgetter(k), feeds)) for k in ['last_modified', 'last_checked'] ) ) chk = chk or datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc) ctx['last_modified'], ctx['last_checked'] = mod or chk, chk return ctx[k] for k in 'last_modified', 'last_checked': ctx[k] = lambda: get_mod_chk(k) # media_url is set here for historical reasons, # use static_url or STATIC_URL (from django context) in any new templates. ctx['media_url'] = ctx['static_url'] =\ '{}feedjack/{}'.format(settings.STATIC_URL, site.template)
[ "def", "get_extra_context", "(", "site", ",", "ctx", ")", ":", "# XXX: clean this up from obsolete stuff", "ctx", "[", "'site'", "]", "=", "site", "ctx", "[", "'feeds'", "]", "=", "feeds", "=", "site", ".", "active_feeds", ".", "order_by", "(", "'name'", ")"...
Returns extra data useful to the templates.
[ "Returns", "extra", "data", "useful", "to", "the", "templates", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjlib.py#L65-L85
mk-fg/feedjack
feedjack/fjlib.py
get_posts_tags
def get_posts_tags(subscribers, object_list, feed, tag_name): '''Adds a qtags property in every post object in a page. Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.''' tagd = dict() user_obj = None tag_obj = None tags = models.Tag.objects.extra( select=dict(post_id='{0}.{1}'.format( *it.imap( connection.ops.quote_name, ('feedjack_post_tags', 'post_id') ) )), tables=['feedjack_post_tags'], where=[ '{0}.{1}={2}.{3}'.format(*it.imap( connection.ops.quote_name, ('feedjack_tag', 'id', 'feedjack_post_tags', 'tag_id') )), '{0}.{1} IN ({2})'.format( connection.ops.quote_name('feedjack_post_tags'), connection.ops.quote_name('post_id'), ', '.join([str(post.id) for post in object_list]) ) ] ) for tag in tags: if tag.post_id not in tagd: tagd[tag.post_id] = list() tagd[tag.post_id].append(tag) if tag_name and tag.name == tag_name: tag_obj = tag subd = dict() for sub in subscribers: subd[sub.feed.id] = sub for post in object_list: if post.id in tagd: post.qtags = tagd[post.id] else: post.qtags = list() post.subscriber = subd[post.feed.id] if feed == post.feed: user_obj = post.subscriber return user_obj, tag_obj
python
def get_posts_tags(subscribers, object_list, feed, tag_name): '''Adds a qtags property in every post object in a page. Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.''' tagd = dict() user_obj = None tag_obj = None tags = models.Tag.objects.extra( select=dict(post_id='{0}.{1}'.format( *it.imap( connection.ops.quote_name, ('feedjack_post_tags', 'post_id') ) )), tables=['feedjack_post_tags'], where=[ '{0}.{1}={2}.{3}'.format(*it.imap( connection.ops.quote_name, ('feedjack_tag', 'id', 'feedjack_post_tags', 'tag_id') )), '{0}.{1} IN ({2})'.format( connection.ops.quote_name('feedjack_post_tags'), connection.ops.quote_name('post_id'), ', '.join([str(post.id) for post in object_list]) ) ] ) for tag in tags: if tag.post_id not in tagd: tagd[tag.post_id] = list() tagd[tag.post_id].append(tag) if tag_name and tag.name == tag_name: tag_obj = tag subd = dict() for sub in subscribers: subd[sub.feed.id] = sub for post in object_list: if post.id in tagd: post.qtags = tagd[post.id] else: post.qtags = list() post.subscriber = subd[post.feed.id] if feed == post.feed: user_obj = post.subscriber return user_obj, tag_obj
[ "def", "get_posts_tags", "(", "subscribers", ",", "object_list", ",", "feed", ",", "tag_name", ")", ":", "tagd", "=", "dict", "(", ")", "user_obj", "=", "None", "tag_obj", "=", "None", "tags", "=", "models", ".", "Tag", ".", "objects", ".", "extra", "(...
Adds a qtags property in every post object in a page. Use "qtags" instead of "tags" in templates to avoid unnecesary DB hits.
[ "Adds", "a", "qtags", "property", "in", "every", "post", "object", "in", "a", "page", ".", "Use", "qtags", "instead", "of", "tags", "in", "templates", "to", "avoid", "unnecesary", "DB", "hits", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjlib.py#L88-L121
mk-fg/feedjack
feedjack/fjlib.py
get_page
def get_page(site, page=1, **criterias): 'Returns a paginator object and a requested page from it.' global _since_formats_vary if 'since' in criterias: since = criterias['since'] if since in _since_offsets: since = datetime.today() - timedelta(_since_offsets[since]) else: if _since_formats_vary: for fmt, substs in it.product( list(_since_formats), it.chain.from_iterable( it.combinations(_since_formats_vary, n) for n in xrange(1, len(_since_formats_vary)) ) ): for src, dst in substs: fmt = fmt.replace(src, dst) _since_formats.add(fmt) _since_formats_vary = None # to avoid doing it again for fmt in _since_formats: try: since = datetime.strptime(since, fmt) except ValueError: pass else: break else: raise Http404 # invalid format try: criterias['since'] = timezone.make_aware( since, timezone.get_current_timezone() ) except ( timezone.pytz.exceptions.AmbiguousTimeError if timezone.pytz else RuntimeError ): # Since there's no "right" way here anyway... criterias['since'] = since.replace(tzinfo=timezone) order_force = criterias.pop('asc', None) posts = models.Post.objects.filtered(site, **criterias)\ .sorted(site.order_posts_by, force=order_force)\ .select_related('feed') paginator = Paginator(posts, site.posts_per_page) try: return paginator.page(page) except InvalidPage: raise Http404
python
def get_page(site, page=1, **criterias): 'Returns a paginator object and a requested page from it.' global _since_formats_vary if 'since' in criterias: since = criterias['since'] if since in _since_offsets: since = datetime.today() - timedelta(_since_offsets[since]) else: if _since_formats_vary: for fmt, substs in it.product( list(_since_formats), it.chain.from_iterable( it.combinations(_since_formats_vary, n) for n in xrange(1, len(_since_formats_vary)) ) ): for src, dst in substs: fmt = fmt.replace(src, dst) _since_formats.add(fmt) _since_formats_vary = None # to avoid doing it again for fmt in _since_formats: try: since = datetime.strptime(since, fmt) except ValueError: pass else: break else: raise Http404 # invalid format try: criterias['since'] = timezone.make_aware( since, timezone.get_current_timezone() ) except ( timezone.pytz.exceptions.AmbiguousTimeError if timezone.pytz else RuntimeError ): # Since there's no "right" way here anyway... criterias['since'] = since.replace(tzinfo=timezone) order_force = criterias.pop('asc', None) posts = models.Post.objects.filtered(site, **criterias)\ .sorted(site.order_posts_by, force=order_force)\ .select_related('feed') paginator = Paginator(posts, site.posts_per_page) try: return paginator.page(page) except InvalidPage: raise Http404
[ "def", "get_page", "(", "site", ",", "page", "=", "1", ",", "*", "*", "criterias", ")", ":", "global", "_since_formats_vary", "if", "'since'", "in", "criterias", ":", "since", "=", "criterias", "[", "'since'", "]", "if", "since", "in", "_since_offsets", ...
Returns a paginator object and a requested page from it.
[ "Returns", "a", "paginator", "object", "and", "a", "requested", "page", "from", "it", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjlib.py#L131-L169
mk-fg/feedjack
feedjack/fjlib.py
page_context
def page_context(request, site, **criterias): 'Returns the context dictionary for a page view.' try: page = int(request.GET.get('page', 1)) except ValueError: page = 1 feed, tag = criterias.get('feed'), criterias.get('tag') if feed: try: feed = models.Feed.objects.get(pk=feed) except ObjectDoesNotExist: raise Http404 page = get_page(site, page=page, **criterias) subscribers = site.active_subscribers if site.show_tagcloud and page.object_list: from feedjack import fjcloud # This will hit the DB once per page instead of once for every post in # a page. To take advantage of this the template designer must call # the qtags property in every item, instead of the default tags property. user_obj, tag_obj = get_posts_tags( subscribers, page.object_list, feed, tag ) tag_cloud = fjcloud.getcloud(site, feed and feed.id) else: tag_obj, tag_cloud = None, tuple() try: user_obj = models.Subscriber.objects\ .get(site=site, feed=feed) if feed else None except ObjectDoesNotExist: raise Http404 site_proc_tags = site.processing_tags.strip() if site_proc_tags != 'none': site_proc_tags = filter( None, map(op.methodcaller('strip'), site.processing_tags.split(',')) ) # XXX: database hit that can be cached for site_feed, posts in it.groupby(page.object_list, key=op.attrgetter('feed')): proc = site_feed.processor_for_tags(site_proc_tags) if proc: proc.apply_overlay_to_posts(posts) ctx = dict( last_modified = max(it.imap( op.attrgetter('date_updated'), page.object_list ))\ if len(page.object_list) else datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc), object_list = page.object_list, subscribers = subscribers.select_related('feed'), tag = tag_obj, tagcloud = tag_cloud, feed = feed, url_suffix = ''.join(( '/feed/{0}'.format(feed.id) if feed else '', '/tag/{0}'.format(escape(tag)) if tag else '' )), p = page, # "page" is taken by legacy number p_10neighbors = OrderedDict( # OrderedDict of "num: exists" values # Use as "{% for p_num, p_exists in p_10neighbors.items|slice:"7:-7" %}" (p, p >= 1 and p <= page.paginator.num_pages) for p in ((page.number + n) for n in xrange(-10, 11)) ), ## DEPRECATED: # Totally misnamed and inconsistent b/w user/user_obj, # use "feed" and "subscribers" instead. user_id = feed and feed.id, user = user_obj, # Legacy flat pagination context, use "p" instead. is_paginated = page.paginator.num_pages > 1, results_per_page = site.posts_per_page, has_next = page.has_next(), has_previous = page.has_previous(), page = page.number, next = page.number + 1, previous = page.number - 1, pages = page.paginator.num_pages, hits = page.paginator.count ) get_extra_context(site, ctx) return ctx
python
def page_context(request, site, **criterias): 'Returns the context dictionary for a page view.' try: page = int(request.GET.get('page', 1)) except ValueError: page = 1 feed, tag = criterias.get('feed'), criterias.get('tag') if feed: try: feed = models.Feed.objects.get(pk=feed) except ObjectDoesNotExist: raise Http404 page = get_page(site, page=page, **criterias) subscribers = site.active_subscribers if site.show_tagcloud and page.object_list: from feedjack import fjcloud # This will hit the DB once per page instead of once for every post in # a page. To take advantage of this the template designer must call # the qtags property in every item, instead of the default tags property. user_obj, tag_obj = get_posts_tags( subscribers, page.object_list, feed, tag ) tag_cloud = fjcloud.getcloud(site, feed and feed.id) else: tag_obj, tag_cloud = None, tuple() try: user_obj = models.Subscriber.objects\ .get(site=site, feed=feed) if feed else None except ObjectDoesNotExist: raise Http404 site_proc_tags = site.processing_tags.strip() if site_proc_tags != 'none': site_proc_tags = filter( None, map(op.methodcaller('strip'), site.processing_tags.split(',')) ) # XXX: database hit that can be cached for site_feed, posts in it.groupby(page.object_list, key=op.attrgetter('feed')): proc = site_feed.processor_for_tags(site_proc_tags) if proc: proc.apply_overlay_to_posts(posts) ctx = dict( last_modified = max(it.imap( op.attrgetter('date_updated'), page.object_list ))\ if len(page.object_list) else datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc), object_list = page.object_list, subscribers = subscribers.select_related('feed'), tag = tag_obj, tagcloud = tag_cloud, feed = feed, url_suffix = ''.join(( '/feed/{0}'.format(feed.id) if feed else '', '/tag/{0}'.format(escape(tag)) if tag else '' )), p = page, # "page" is taken by legacy number p_10neighbors = OrderedDict( # OrderedDict of "num: exists" values # Use as "{% for p_num, p_exists in p_10neighbors.items|slice:"7:-7" %}" (p, p >= 1 and p <= page.paginator.num_pages) for p in ((page.number + n) for n in xrange(-10, 11)) ), ## DEPRECATED: # Totally misnamed and inconsistent b/w user/user_obj, # use "feed" and "subscribers" instead. user_id = feed and feed.id, user = user_obj, # Legacy flat pagination context, use "p" instead. is_paginated = page.paginator.num_pages > 1, results_per_page = site.posts_per_page, has_next = page.has_next(), has_previous = page.has_previous(), page = page.number, next = page.number + 1, previous = page.number - 1, pages = page.paginator.num_pages, hits = page.paginator.count ) get_extra_context(site, ctx) return ctx
[ "def", "page_context", "(", "request", ",", "site", ",", "*", "*", "criterias", ")", ":", "try", ":", "page", "=", "int", "(", "request", ".", "GET", ".", "get", "(", "'page'", ",", "1", ")", ")", "except", "ValueError", ":", "page", "=", "1", "f...
Returns the context dictionary for a page view.
[ "Returns", "the", "context", "dictionary", "for", "a", "page", "view", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjlib.py#L172-L251
gmr/tredis
tredis/strings.py
StringsMixin.bitcount
def bitcount(self, key, start=None, end=None): """Count the number of set bits (population counting) in a string. By default all the bytes contained in the string are examined. It is possible to specify the counting operation only in an interval passing the additional arguments start and end. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings, so the command will return zero. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ command = [b'BITCOUNT', key] if start is not None and end is None: raise ValueError('Can not specify start without an end') elif start is None and end is not None: raise ValueError('Can not specify start without an end') elif start is not None and end is not None: command += [ascii(start), ascii(end)] return self._execute(command)
python
def bitcount(self, key, start=None, end=None): """Count the number of set bits (population counting) in a string. By default all the bytes contained in the string are examined. It is possible to specify the counting operation only in an interval passing the additional arguments start and end. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings, so the command will return zero. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ command = [b'BITCOUNT', key] if start is not None and end is None: raise ValueError('Can not specify start without an end') elif start is None and end is not None: raise ValueError('Can not specify start without an end') elif start is not None and end is not None: command += [ascii(start), ascii(end)] return self._execute(command)
[ "def", "bitcount", "(", "self", ",", "key", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "command", "=", "[", "b'BITCOUNT'", ",", "key", "]", "if", "start", "is", "not", "None", "and", "end", "is", "None", ":", "raise", "ValueError...
Count the number of set bits (population counting) in a string. By default all the bytes contained in the string are examined. It is possible to specify the counting operation only in an interval passing the additional arguments start and end. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings, so the command will return zero. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError`
[ "Count", "the", "number", "of", "set", "bits", "(", "population", "counting", ")", "in", "a", "string", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L58-L92
gmr/tredis
tredis/strings.py
StringsMixin.bitop
def bitop(self, operation, dest_key, *keys): """Perform a bitwise operation between multiple keys (containing string values) and store the result in the destination key. The values for operation can be one of: - ``b'AND'`` - ``b'OR'`` - ``b'XOR'`` - ``b'NOT'`` - :data:`tredis.BITOP_AND` or ``b'&'`` - :data:`tredis.BITOP_OR` or ``b'|'`` - :data:`tredis.BITOP_XOR` or ``b'^'`` - :data:`tredis.BITOP_NOT` or ``b'~'`` ``b'NOT'`` is special as it only takes an input key, because it performs inversion of bits so it only makes sense as an unary operator. The result of the operation is always stored at ``dest_key``. **Handling of strings with different lengths** When an operation is performed between strings having different lengths, all the strings shorter than the longest string in the set are treated as if they were zero-padded up to the length of the longest string. The same holds true for non-existent keys, that are considered as a stream of zero bytes up to the length of the longest string. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param bytes operation: The operation to perform :param dest_key: The key to store the bitwise operation results to :type dest_key: :class:`str`, :class:`bytes` :param keys: One or more keys as keyword parameters for the bitwise op :type keys: :class:`str`, :class:`bytes` :return: The size of the string stored in the destination key, that is equal to the size of the longest input string. :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ if (operation not in _BITOPTS.keys() and operation not in _BITOPTS.values()): raise ValueError('Invalid operation value: {}'.format(operation)) elif operation in [b'~', b'NOT'] and len(keys) > 1: raise ValueError('NOT can only be used with 1 key') if operation in _BITOPTS.keys(): operation = _BITOPTS[operation] return self._execute([b'BITOP', operation, dest_key] + list(keys))
python
def bitop(self, operation, dest_key, *keys): """Perform a bitwise operation between multiple keys (containing string values) and store the result in the destination key. The values for operation can be one of: - ``b'AND'`` - ``b'OR'`` - ``b'XOR'`` - ``b'NOT'`` - :data:`tredis.BITOP_AND` or ``b'&'`` - :data:`tredis.BITOP_OR` or ``b'|'`` - :data:`tredis.BITOP_XOR` or ``b'^'`` - :data:`tredis.BITOP_NOT` or ``b'~'`` ``b'NOT'`` is special as it only takes an input key, because it performs inversion of bits so it only makes sense as an unary operator. The result of the operation is always stored at ``dest_key``. **Handling of strings with different lengths** When an operation is performed between strings having different lengths, all the strings shorter than the longest string in the set are treated as if they were zero-padded up to the length of the longest string. The same holds true for non-existent keys, that are considered as a stream of zero bytes up to the length of the longest string. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param bytes operation: The operation to perform :param dest_key: The key to store the bitwise operation results to :type dest_key: :class:`str`, :class:`bytes` :param keys: One or more keys as keyword parameters for the bitwise op :type keys: :class:`str`, :class:`bytes` :return: The size of the string stored in the destination key, that is equal to the size of the longest input string. :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ if (operation not in _BITOPTS.keys() and operation not in _BITOPTS.values()): raise ValueError('Invalid operation value: {}'.format(operation)) elif operation in [b'~', b'NOT'] and len(keys) > 1: raise ValueError('NOT can only be used with 1 key') if operation in _BITOPTS.keys(): operation = _BITOPTS[operation] return self._execute([b'BITOP', operation, dest_key] + list(keys))
[ "def", "bitop", "(", "self", ",", "operation", ",", "dest_key", ",", "*", "keys", ")", ":", "if", "(", "operation", "not", "in", "_BITOPTS", ".", "keys", "(", ")", "and", "operation", "not", "in", "_BITOPTS", ".", "values", "(", ")", ")", ":", "rai...
Perform a bitwise operation between multiple keys (containing string values) and store the result in the destination key. The values for operation can be one of: - ``b'AND'`` - ``b'OR'`` - ``b'XOR'`` - ``b'NOT'`` - :data:`tredis.BITOP_AND` or ``b'&'`` - :data:`tredis.BITOP_OR` or ``b'|'`` - :data:`tredis.BITOP_XOR` or ``b'^'`` - :data:`tredis.BITOP_NOT` or ``b'~'`` ``b'NOT'`` is special as it only takes an input key, because it performs inversion of bits so it only makes sense as an unary operator. The result of the operation is always stored at ``dest_key``. **Handling of strings with different lengths** When an operation is performed between strings having different lengths, all the strings shorter than the longest string in the set are treated as if they were zero-padded up to the length of the longest string. The same holds true for non-existent keys, that are considered as a stream of zero bytes up to the length of the longest string. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param bytes operation: The operation to perform :param dest_key: The key to store the bitwise operation results to :type dest_key: :class:`str`, :class:`bytes` :param keys: One or more keys as keyword parameters for the bitwise op :type keys: :class:`str`, :class:`bytes` :return: The size of the string stored in the destination key, that is equal to the size of the longest input string. :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError`
[ "Perform", "a", "bitwise", "operation", "between", "multiple", "keys", "(", "containing", "string", "values", ")", "and", "store", "the", "result", "in", "the", "destination", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L94-L148
gmr/tredis
tredis/strings.py
StringsMixin.bitpos
def bitpos(self, key, bit, start=None, end=None): """Return the position of the first bit set to ``1`` or ``0`` in a string. The position is returned, thinking of the string as an array of bits from left to right, where the first byte's most significant bit is at position 0, the second byte's most significant bit is at position ``8``, and so forth. The same bit position convention is followed by :meth:`~tredis.RedisClient.getbit` and :meth:`~tredis.RedisClient.setbit`. By default, all the bytes contained in the string are examined. It is possible to look for bits only in a specified interval passing the additional arguments start and end (it is possible to just pass start, the operation will assume that the end is the last byte of the string. However there are semantic differences as explained later). The range is interpreted as a range of bytes and not a range of bits, so ``start=0`` and ``end=2`` means to look at the first three bytes. Note that bit positions are returned always as absolute values starting from bit zero even when start and end are used to specify a range. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int bit: The bit value to search for (``1`` or ``0``) :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :returns: The position of the first bit set to ``1`` or ``0`` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) command = [b'BITPOS', key, ascii(bit)] if start is not None and end is None: raise ValueError('Can not specify start without an end') elif start is None and end is not None: raise ValueError('Can not specify start without an end') elif start is not None and end is not None: command += [ascii(start), ascii(end)] return self._execute(command)
python
def bitpos(self, key, bit, start=None, end=None): """Return the position of the first bit set to ``1`` or ``0`` in a string. The position is returned, thinking of the string as an array of bits from left to right, where the first byte's most significant bit is at position 0, the second byte's most significant bit is at position ``8``, and so forth. The same bit position convention is followed by :meth:`~tredis.RedisClient.getbit` and :meth:`~tredis.RedisClient.setbit`. By default, all the bytes contained in the string are examined. It is possible to look for bits only in a specified interval passing the additional arguments start and end (it is possible to just pass start, the operation will assume that the end is the last byte of the string. However there are semantic differences as explained later). The range is interpreted as a range of bytes and not a range of bits, so ``start=0`` and ``end=2`` means to look at the first three bytes. Note that bit positions are returned always as absolute values starting from bit zero even when start and end are used to specify a range. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int bit: The bit value to search for (``1`` or ``0``) :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :returns: The position of the first bit set to ``1`` or ``0`` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) command = [b'BITPOS', key, ascii(bit)] if start is not None and end is None: raise ValueError('Can not specify start without an end') elif start is None and end is not None: raise ValueError('Can not specify start without an end') elif start is not None and end is not None: command += [ascii(start), ascii(end)] return self._execute(command)
[ "def", "bitpos", "(", "self", ",", "key", ",", "bit", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "if", "0", "<", "bit", ">", "1", ":", "raise", "ValueError", "(", "'bit must be 1 or 0, not {}'", ".", "format", "(", "bit", ")", ")...
Return the position of the first bit set to ``1`` or ``0`` in a string. The position is returned, thinking of the string as an array of bits from left to right, where the first byte's most significant bit is at position 0, the second byte's most significant bit is at position ``8``, and so forth. The same bit position convention is followed by :meth:`~tredis.RedisClient.getbit` and :meth:`~tredis.RedisClient.setbit`. By default, all the bytes contained in the string are examined. It is possible to look for bits only in a specified interval passing the additional arguments start and end (it is possible to just pass start, the operation will assume that the end is the last byte of the string. However there are semantic differences as explained later). The range is interpreted as a range of bytes and not a range of bits, so ``start=0`` and ``end=2`` means to look at the first three bytes. Note that bit positions are returned always as absolute values starting from bit zero even when start and end are used to specify a range. Like for the :meth:`~tredis.RedisClient.getrange` command start and end can contain negative values in order to index bytes starting from the end of the string, where ``-1`` is the last byte, ``-2`` is the penultimate, and so forth. Non-existent keys are treated as empty strings. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` :param key: The key to get :type key: :class:`str`, :class:`bytes` :param int bit: The bit value to search for (``1`` or ``0``) :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :returns: The position of the first bit set to ``1`` or ``0`` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`, :exc:`ValueError`
[ "Return", "the", "position", "of", "the", "first", "bit", "set", "to", "1", "or", "0", "in", "a", "string", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L150-L204
gmr/tredis
tredis/strings.py
StringsMixin.decrby
def decrby(self, key, decrement): """Decrements the number stored at key by decrement. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to decrement :type key: :class:`str`, :class:`bytes` :param int decrement: The amount to decrement by :returns: The value of key after the decrement :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DECRBY', key, ascii(decrement)])
python
def decrby(self, key, decrement): """Decrements the number stored at key by decrement. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to decrement :type key: :class:`str`, :class:`bytes` :param int decrement: The amount to decrement by :returns: The value of key after the decrement :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'DECRBY', key, ascii(decrement)])
[ "def", "decrby", "(", "self", ",", "key", ",", "decrement", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'DECRBY'", ",", "key", ",", "ascii", "(", "decrement", ")", "]", ")" ]
Decrements the number stored at key by decrement. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to decrement :type key: :class:`str`, :class:`bytes` :param int decrement: The amount to decrement by :returns: The value of key after the decrement :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Decrements", "the", "number", "stored", "at", "key", "by", "decrement", ".", "If", "the", "key", "does", "not", "exist", "it", "is", "set", "to", "0", "before", "performing", "the", "operation", ".", "An", "error", "is", "returned", "if", "the", "key", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L229-L251
gmr/tredis
tredis/strings.py
StringsMixin.getbit
def getbit(self, key, offset): """Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'GETBIT', key, ascii(offset)])
python
def getbit(self, key, offset): """Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'GETBIT', key, ascii(offset)])
[ "def", "getbit", "(", "self", ",", "key", ",", "offset", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'GETBIT'", ",", "key", ",", "ascii", "(", "offset", ")", "]", ")" ]
Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError`
[ "Returns", "the", "bit", "value", "at", "offset", "in", "the", "string", "value", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L269-L288
gmr/tredis
tredis/strings.py
StringsMixin.getrange
def getrange(self, key, start, end): """Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the length of the returned string. The complexity is ultimately determined by the returned length, but because creating a substring from an existing string is very cheap, it can be considered ``O(1)`` for small strings. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'GETRANGE', key, ascii(start), ascii(end)])
python
def getrange(self, key, start, end): """Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the length of the returned string. The complexity is ultimately determined by the returned length, but because creating a substring from an existing string is very cheap, it can be considered ``O(1)`` for small strings. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'GETRANGE', key, ascii(start), ascii(end)])
[ "def", "getrange", "(", "self", ",", "key", ",", "start", ",", "end", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'GETRANGE'", ",", "key", ",", "ascii", "(", "start", ")", ",", "ascii", "(", "end", ")", "]", ")" ]
Returns the bit value at offset in the string value stored at key. When offset is beyond the string length, the string is assumed to be a contiguous space with 0 bits. When key does not exist it is assumed to be an empty string, so offset is always out of range and the value is also assumed to be a contiguous space with 0 bits. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the length of the returned string. The complexity is ultimately determined by the returned length, but because creating a substring from an existing string is very cheap, it can be considered ``O(1)`` for small strings. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int start: The start position to evaluate in the string :param int end: The end position to evaluate in the string :rtype: bytes|None :raises: :exc:`~tredis.exceptions.RedisError`
[ "Returns", "the", "bit", "value", "at", "offset", "in", "the", "string", "value", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L290-L314
gmr/tredis
tredis/strings.py
StringsMixin.incrby
def incrby(self, key, increment): """Increments the number stored at key by increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param int increment: The amount to increment by :returns: The value of key after the increment :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'INCRBY', key, ascii(increment)])
python
def incrby(self, key, increment): """Increments the number stored at key by increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param int increment: The amount to increment by :returns: The value of key after the increment :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'INCRBY', key, ascii(increment)])
[ "def", "incrby", "(", "self", ",", "key", ",", "increment", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'INCRBY'", ",", "key", ",", "ascii", "(", "increment", ")", "]", ")" ]
Increments the number stored at key by increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if the key contains a value of the wrong type or contains a string that can not be represented as integer. This operation is limited to 64 bit signed integers. See :meth:`~tredis.RedisClient.incr` for extra information on increment/decrement operations. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param int increment: The amount to increment by :returns: The value of key after the increment :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Increments", "the", "number", "stored", "at", "key", "by", "increment", ".", "If", "the", "key", "does", "not", "exist", "it", "is", "set", "to", "0", "before", "performing", "the", "operation", ".", "An", "error", "is", "returned", "if", "the", "key", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L360-L382
gmr/tredis
tredis/strings.py
StringsMixin.incrbyfloat
def incrbyfloat(self, key, increment): """Increment the string representing a floating point number stored at key by the specified increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if one of the following conditions occur: - The key contains a value of the wrong type (not a string). - The current key content or the specified increment are not parsable as a double precision floating point number. If the command is successful the new incremented value is stored as the new value of the key (replacing the old one), and returned to the caller as a string. Both the value already contained in the string key and the increment argument can be optionally provided in exponential notation, however the value computed after the increment is stored consistently in the same format, that is, an integer number followed (if needed) by a dot, and a variable number of digits representing the decimal part of the number. Trailing zeroes are always removed. The precision of the output is fixed at 17 digits after the decimal point regardless of the actual internal precision of the computation. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param float increment: The amount to increment by :returns: The value of key after the increment :rtype: bytes :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'INCRBYFLOAT', key, ascii(increment)])
python
def incrbyfloat(self, key, increment): """Increment the string representing a floating point number stored at key by the specified increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if one of the following conditions occur: - The key contains a value of the wrong type (not a string). - The current key content or the specified increment are not parsable as a double precision floating point number. If the command is successful the new incremented value is stored as the new value of the key (replacing the old one), and returned to the caller as a string. Both the value already contained in the string key and the increment argument can be optionally provided in exponential notation, however the value computed after the increment is stored consistently in the same format, that is, an integer number followed (if needed) by a dot, and a variable number of digits representing the decimal part of the number. Trailing zeroes are always removed. The precision of the output is fixed at 17 digits after the decimal point regardless of the actual internal precision of the computation. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param float increment: The amount to increment by :returns: The value of key after the increment :rtype: bytes :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'INCRBYFLOAT', key, ascii(increment)])
[ "def", "incrbyfloat", "(", "self", ",", "key", ",", "increment", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'INCRBYFLOAT'", ",", "key", ",", "ascii", "(", "increment", ")", "]", ")" ]
Increment the string representing a floating point number stored at key by the specified increment. If the key does not exist, it is set to 0 before performing the operation. An error is returned if one of the following conditions occur: - The key contains a value of the wrong type (not a string). - The current key content or the specified increment are not parsable as a double precision floating point number. If the command is successful the new incremented value is stored as the new value of the key (replacing the old one), and returned to the caller as a string. Both the value already contained in the string key and the increment argument can be optionally provided in exponential notation, however the value computed after the increment is stored consistently in the same format, that is, an integer number followed (if needed) by a dot, and a variable number of digits representing the decimal part of the number. Trailing zeroes are always removed. The precision of the output is fixed at 17 digits after the decimal point regardless of the actual internal precision of the computation. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to increment :type key: :class:`str`, :class:`bytes` :param float increment: The amount to increment by :returns: The value of key after the increment :rtype: bytes :raises: :exc:`~tredis.exceptions.RedisError`
[ "Increment", "the", "string", "representing", "a", "floating", "point", "number", "stored", "at", "key", "by", "the", "specified", "increment", ".", "If", "the", "key", "does", "not", "exist", "it", "is", "set", "to", "0", "before", "performing", "the", "o...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L384-L420
gmr/tredis
tredis/strings.py
StringsMixin.mset
def mset(self, mapping): """Sets the given keys to their respective values. :meth:`~tredis.RedisClient.mset` replaces existing values with new values, just as regular :meth:`~tredis.RedisClient.set`. See :meth:`~tredis.RedisClient.msetnx` if you don't want to overwrite existing values. :meth:`~tredis.RedisClient.mset` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'MSET'] for key, value in mapping.items(): command += [key, value] return self._execute(command, b'OK')
python
def mset(self, mapping): """Sets the given keys to their respective values. :meth:`~tredis.RedisClient.mset` replaces existing values with new values, just as regular :meth:`~tredis.RedisClient.set`. See :meth:`~tredis.RedisClient.msetnx` if you don't want to overwrite existing values. :meth:`~tredis.RedisClient.mset` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'MSET'] for key, value in mapping.items(): command += [key, value] return self._execute(command, b'OK')
[ "def", "mset", "(", "self", ",", "mapping", ")", ":", "command", "=", "[", "b'MSET'", "]", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ")", ":", "command", "+=", "[", "key", ",", "value", "]", "return", "self", ".", "_execute", ...
Sets the given keys to their respective values. :meth:`~tredis.RedisClient.mset` replaces existing values with new values, just as regular :meth:`~tredis.RedisClient.set`. See :meth:`~tredis.RedisClient.msetnx` if you don't want to overwrite existing values. :meth:`~tredis.RedisClient.mset` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Sets", "the", "given", "keys", "to", "their", "respective", "values", ".", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "mset", "replaces", "existing", "values", "with", "new", "values", "just", "as", "regular", ":", "meth", ":", "~tredis", ".", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L440-L464
gmr/tredis
tredis/strings.py
StringsMixin.msetnx
def msetnx(self, mapping): """Sets the given keys to their respective values. :meth:`~tredis.RedisClient.msetnx` will not perform any operation at all even if just a single key already exists. Because of this semantic :meth:`~tredis.RedisClient.msetnx` can be used in order to set different keys representing different fields of an unique logic object in a way that ensures that either all the fields or none at all are set. :meth:`~tredis.RedisClient.msetnx` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'MSETNX'] for key, value in mapping.items(): command += [key, value] return self._execute(command, 1)
python
def msetnx(self, mapping): """Sets the given keys to their respective values. :meth:`~tredis.RedisClient.msetnx` will not perform any operation at all even if just a single key already exists. Because of this semantic :meth:`~tredis.RedisClient.msetnx` can be used in order to set different keys representing different fields of an unique logic object in a way that ensures that either all the fields or none at all are set. :meth:`~tredis.RedisClient.msetnx` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'MSETNX'] for key, value in mapping.items(): command += [key, value] return self._execute(command, 1)
[ "def", "msetnx", "(", "self", ",", "mapping", ")", ":", "command", "=", "[", "b'MSETNX'", "]", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ")", ":", "command", "+=", "[", "key", ",", "value", "]", "return", "self", ".", "_execut...
Sets the given keys to their respective values. :meth:`~tredis.RedisClient.msetnx` will not perform any operation at all even if just a single key already exists. Because of this semantic :meth:`~tredis.RedisClient.msetnx` can be used in order to set different keys representing different fields of an unique logic object in a way that ensures that either all the fields or none at all are set. :meth:`~tredis.RedisClient.msetnx` is atomic, so all given keys are set at once. It is not possible for clients to see that some of the keys were updated while others are unchanged. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(N)`` where ``N`` is the number of keys to set. :param dict mapping: A mapping of key/value pairs to set :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Sets", "the", "given", "keys", "to", "their", "respective", "values", ".", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "msetnx", "will", "not", "perform", "any", "operation", "at", "all", "even", "if", "just", "a", "single", "key", "already", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L466-L493
gmr/tredis
tredis/strings.py
StringsMixin.psetex
def psetex(self, key, milliseconds, value): """:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PSETEX', key, ascii(milliseconds), value], b'OK')
python
def psetex(self, key, milliseconds, value): """:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PSETEX', key, ascii(milliseconds), value], b'OK')
[ "def", "psetex", "(", "self", ",", "key", ",", "milliseconds", ",", "value", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'PSETEX'", ",", "key", ",", "ascii", "(", "milliseconds", ")", ",", "value", "]", ",", "b'OK'", ")" ]
:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "psetex", "works", "exactly", "like", ":", "meth", ":", "~tredis", ".", "RedisClient", ".", "psetex", "with", "the", "sole", "difference", "that", "the", "expire", "time", "is", "specified", "in", "mill...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L495-L514
gmr/tredis
tredis/strings.py
StringsMixin.set
def set(self, key, value, ex=None, px=None, nx=False, xx=False): """Set key to hold the string value. If key already holds a value, it is overwritten, regardless of its type. Any previous time to live associated with the key is discarded on successful :meth:`~tredis.RedisClient.set` operation. If the value is not one of :class:`str`, :class:`bytes`, or :class:`int`, a :exc:`ValueError` will be raised. .. note:: **Time complexity**: ``O(1)`` :param key: The key to remove :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :param int ex: Set the specified expire time, in seconds :param int px: Set the specified expire time, in milliseconds :param bool nx: Only set the key if it does not already exist :param bool xx: Only set the key if it already exist :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError` """ command = [b'SET', key, value] if ex: command += [b'EX', ascii(ex).encode('ascii')] if px: command += [b'PX', ascii(px).encode('ascii')] if nx: command.append(b'NX') if xx: command.append(b'XX') return self._execute(command, b'OK')
python
def set(self, key, value, ex=None, px=None, nx=False, xx=False): """Set key to hold the string value. If key already holds a value, it is overwritten, regardless of its type. Any previous time to live associated with the key is discarded on successful :meth:`~tredis.RedisClient.set` operation. If the value is not one of :class:`str`, :class:`bytes`, or :class:`int`, a :exc:`ValueError` will be raised. .. note:: **Time complexity**: ``O(1)`` :param key: The key to remove :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :param int ex: Set the specified expire time, in seconds :param int px: Set the specified expire time, in milliseconds :param bool nx: Only set the key if it does not already exist :param bool xx: Only set the key if it already exist :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError` """ command = [b'SET', key, value] if ex: command += [b'EX', ascii(ex).encode('ascii')] if px: command += [b'PX', ascii(px).encode('ascii')] if nx: command.append(b'NX') if xx: command.append(b'XX') return self._execute(command, b'OK')
[ "def", "set", "(", "self", ",", "key", ",", "value", ",", "ex", "=", "None", ",", "px", "=", "None", ",", "nx", "=", "False", ",", "xx", "=", "False", ")", ":", "command", "=", "[", "b'SET'", ",", "key", ",", "value", "]", "if", "ex", ":", ...
Set key to hold the string value. If key already holds a value, it is overwritten, regardless of its type. Any previous time to live associated with the key is discarded on successful :meth:`~tredis.RedisClient.set` operation. If the value is not one of :class:`str`, :class:`bytes`, or :class:`int`, a :exc:`ValueError` will be raised. .. note:: **Time complexity**: ``O(1)`` :param key: The key to remove :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :param int ex: Set the specified expire time, in seconds :param int px: Set the specified expire time, in milliseconds :param bool nx: Only set the key if it does not already exist :param bool xx: Only set the key if it already exist :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`ValueError`
[ "Set", "key", "to", "hold", "the", "string", "value", ".", "If", "key", "already", "holds", "a", "value", "it", "is", "overwritten", "regardless", "of", "its", "type", ".", "Any", "previous", "time", "to", "live", "associated", "with", "the", "key", "is"...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L516-L549
gmr/tredis
tredis/strings.py
StringsMixin.setbit
def setbit(self, key, offset, bit): """Sets or clears the bit at offset in the string value stored at key. The bit is either set or cleared depending on value, which can be either 0 or 1. When key does not exist, a new string value is created. The string is grown to make sure it can hold a bit at offset. The offset argument is required to be greater than or equal to 0, and smaller than 2 :sup:`32` (this limits bitmaps to 512MB). When the string at key is grown, added bits are set to 0. .. warning:: When setting the last possible bit (offset equal to 2 :sup:`32` -1) and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting bit number 2 :sup:`32` -1 (512MB allocation) takes ~300ms, setting bit number 2 :sup:`30` -1 (128MB allocation) takes ~80ms, setting bit number 2 :sup:`28` -1 (32MB allocation) takes ~30ms and setting bit number 2 :sup:`26` -1 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setbit` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :param int bit: The value (``0`` or ``1``) to set for the bit :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) return self._execute([b'SETBIT', key, ascii(offset), ascii(bit)])
python
def setbit(self, key, offset, bit): """Sets or clears the bit at offset in the string value stored at key. The bit is either set or cleared depending on value, which can be either 0 or 1. When key does not exist, a new string value is created. The string is grown to make sure it can hold a bit at offset. The offset argument is required to be greater than or equal to 0, and smaller than 2 :sup:`32` (this limits bitmaps to 512MB). When the string at key is grown, added bits are set to 0. .. warning:: When setting the last possible bit (offset equal to 2 :sup:`32` -1) and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting bit number 2 :sup:`32` -1 (512MB allocation) takes ~300ms, setting bit number 2 :sup:`30` -1 (128MB allocation) takes ~80ms, setting bit number 2 :sup:`28` -1 (32MB allocation) takes ~30ms and setting bit number 2 :sup:`26` -1 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setbit` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :param int bit: The value (``0`` or ``1``) to set for the bit :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ if 0 < bit > 1: raise ValueError('bit must be 1 or 0, not {}'.format(bit)) return self._execute([b'SETBIT', key, ascii(offset), ascii(bit)])
[ "def", "setbit", "(", "self", ",", "key", ",", "offset", ",", "bit", ")", ":", "if", "0", "<", "bit", ">", "1", ":", "raise", "ValueError", "(", "'bit must be 1 or 0, not {}'", ".", "format", "(", "bit", ")", ")", "return", "self", ".", "_execute", "...
Sets or clears the bit at offset in the string value stored at key. The bit is either set or cleared depending on value, which can be either 0 or 1. When key does not exist, a new string value is created. The string is grown to make sure it can hold a bit at offset. The offset argument is required to be greater than or equal to 0, and smaller than 2 :sup:`32` (this limits bitmaps to 512MB). When the string at key is grown, added bits are set to 0. .. warning:: When setting the last possible bit (offset equal to 2 :sup:`32` -1) and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting bit number 2 :sup:`32` -1 (512MB allocation) takes ~300ms, setting bit number 2 :sup:`30` -1 (128MB allocation) takes ~80ms, setting bit number 2 :sup:`28` -1 (32MB allocation) takes ~30ms and setting bit number 2 :sup:`26` -1 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setbit` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param int offset: The bit offset to fetch the bit from :param int bit: The value (``0`` or ``1``) to set for the bit :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Sets", "or", "clears", "the", "bit", "at", "offset", "in", "the", "string", "value", "stored", "at", "key", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L551-L587
gmr/tredis
tredis/strings.py
StringsMixin.setex
def setex(self, key, seconds, value): """Set key to hold the string value and set key to timeout after a given number of seconds. :meth:`~tredis.RedisClient.setex` is atomic, and can be reproduced by using :meth:`~tredis.RedisClient.set` and :meth:`~tredis.RedisClient.expire` inside an :meth:`~tredis.RedisClient.multi` / :meth:`~tredis.RedisClient.exec` block. It is provided as a faster alternative to the given sequence of operations, because this operation is very common when Redis is used as a cache. An error is returned when seconds is invalid. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int seconds: Number of seconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETEX', key, ascii(seconds), value], b'OK')
python
def setex(self, key, seconds, value): """Set key to hold the string value and set key to timeout after a given number of seconds. :meth:`~tredis.RedisClient.setex` is atomic, and can be reproduced by using :meth:`~tredis.RedisClient.set` and :meth:`~tredis.RedisClient.expire` inside an :meth:`~tredis.RedisClient.multi` / :meth:`~tredis.RedisClient.exec` block. It is provided as a faster alternative to the given sequence of operations, because this operation is very common when Redis is used as a cache. An error is returned when seconds is invalid. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int seconds: Number of seconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETEX', key, ascii(seconds), value], b'OK')
[ "def", "setex", "(", "self", ",", "key", ",", "seconds", ",", "value", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'SETEX'", ",", "key", ",", "ascii", "(", "seconds", ")", ",", "value", "]", ",", "b'OK'", ")" ]
Set key to hold the string value and set key to timeout after a given number of seconds. :meth:`~tredis.RedisClient.setex` is atomic, and can be reproduced by using :meth:`~tredis.RedisClient.set` and :meth:`~tredis.RedisClient.expire` inside an :meth:`~tredis.RedisClient.multi` / :meth:`~tredis.RedisClient.exec` block. It is provided as a faster alternative to the given sequence of operations, because this operation is very common when Redis is used as a cache. An error is returned when seconds is invalid. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int seconds: Number of seconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
[ "Set", "key", "to", "hold", "the", "string", "value", "and", "set", "key", "to", "timeout", "after", "a", "given", "number", "of", "seconds", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L589-L616
gmr/tredis
tredis/strings.py
StringsMixin.setrange
def setrange(self, key, offset, value): """Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. If the offset is larger than the current length of the string at key, the string is padded with zero-bytes to make offset fit. Non-existing keys are considered as empty strings, so this command will make sure it holds a string large enough to be able to set value at offset. .. note:: The maximum offset that you can set is 2 :sup:`29` -1 (536870911), as Redis Strings are limited to 512 megabytes. If you need to grow beyond this size, you can use multiple keys. .. warning:: When setting the last possible byte and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting byte number 536870911 (512MB allocation) takes ~300ms, setting byte number 134217728 (128MB allocation) takes ~80ms, setting bit number 33554432 (32MB allocation) takes ~30ms and setting bit number 8388608 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setrange` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)``, not counting the time taken to copy the new string in place. Usually, this string is very small so the amortized complexity is ``O(1)``. Otherwise, complexity is ``O(M)`` with ``M`` being the length of the value argument. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :returns: The length of the string after it was modified by the command :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETRANGE', key, ascii(offset), value])
python
def setrange(self, key, offset, value): """Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. If the offset is larger than the current length of the string at key, the string is padded with zero-bytes to make offset fit. Non-existing keys are considered as empty strings, so this command will make sure it holds a string large enough to be able to set value at offset. .. note:: The maximum offset that you can set is 2 :sup:`29` -1 (536870911), as Redis Strings are limited to 512 megabytes. If you need to grow beyond this size, you can use multiple keys. .. warning:: When setting the last possible byte and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting byte number 536870911 (512MB allocation) takes ~300ms, setting byte number 134217728 (128MB allocation) takes ~80ms, setting bit number 33554432 (32MB allocation) takes ~30ms and setting bit number 8388608 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setrange` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)``, not counting the time taken to copy the new string in place. Usually, this string is very small so the amortized complexity is ``O(1)``. Otherwise, complexity is ``O(M)`` with ``M`` being the length of the value argument. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :returns: The length of the string after it was modified by the command :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'SETRANGE', key, ascii(offset), value])
[ "def", "setrange", "(", "self", ",", "key", ",", "offset", ",", "value", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'SETRANGE'", ",", "key", ",", "ascii", "(", "offset", ")", ",", "value", "]", ")" ]
Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. If the offset is larger than the current length of the string at key, the string is padded with zero-bytes to make offset fit. Non-existing keys are considered as empty strings, so this command will make sure it holds a string large enough to be able to set value at offset. .. note:: The maximum offset that you can set is 2 :sup:`29` -1 (536870911), as Redis Strings are limited to 512 megabytes. If you need to grow beyond this size, you can use multiple keys. .. warning:: When setting the last possible byte and the string value stored at key does not yet hold a string value, or holds a small string value, Redis needs to allocate all intermediate memory which can block the server for some time. On a 2010 MacBook Pro, setting byte number 536870911 (512MB allocation) takes ~300ms, setting byte number 134217728 (128MB allocation) takes ~80ms, setting bit number 33554432 (32MB allocation) takes ~30ms and setting bit number 8388608 (8MB allocation) takes ~8ms. Note that once this first allocation is done, subsequent calls to :meth:`~tredis.RedisClient.setrange` for the same key will not have the allocation overhead. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)``, not counting the time taken to copy the new string in place. Usually, this string is very small so the amortized complexity is ``O(1)``. Otherwise, complexity is ``O(M)`` with ``M`` being the length of the value argument. :param key: The key to get the bit from :type key: :class:`str`, :class:`bytes` :param value: The value to set :type value: :class:`str`, :class:`bytes`, :class:`int` :returns: The length of the string after it was modified by the command :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Overwrites", "part", "of", "the", "string", "stored", "at", "key", "starting", "at", "the", "specified", "offset", "for", "the", "entire", "length", "of", "value", ".", "If", "the", "offset", "is", "larger", "than", "the", "current", "length", "of", "the"...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/strings.py#L638-L678
taddeus/wspy
extension.py
Extension.conflicts
def conflicts(self, ext): """ Check if the extension conflicts with an already accepted extension. This may be the case when the two extensions use the same reserved bits, or have the same name (when the same extension is negotiated multiple times with different parameters). """ return ext.rsv1 and self.rsv1 \ or ext.rsv2 and self.rsv2 \ or ext.rsv3 and self.rsv3 \ or set(ext.names) & set(self.names) \ or set(ext.opcodes) & set(self.opcodes)
python
def conflicts(self, ext): """ Check if the extension conflicts with an already accepted extension. This may be the case when the two extensions use the same reserved bits, or have the same name (when the same extension is negotiated multiple times with different parameters). """ return ext.rsv1 and self.rsv1 \ or ext.rsv2 and self.rsv2 \ or ext.rsv3 and self.rsv3 \ or set(ext.names) & set(self.names) \ or set(ext.opcodes) & set(self.opcodes)
[ "def", "conflicts", "(", "self", ",", "ext", ")", ":", "return", "ext", ".", "rsv1", "and", "self", ".", "rsv1", "or", "ext", ".", "rsv2", "and", "self", ".", "rsv2", "or", "ext", ".", "rsv3", "and", "self", ".", "rsv3", "or", "set", "(", "ext", ...
Check if the extension conflicts with an already accepted extension. This may be the case when the two extensions use the same reserved bits, or have the same name (when the same extension is negotiated multiple times with different parameters).
[ "Check", "if", "the", "extension", "conflicts", "with", "an", "already", "accepted", "extension", ".", "This", "may", "be", "the", "case", "when", "the", "two", "extensions", "use", "the", "same", "reserved", "bits", "or", "have", "the", "same", "name", "(...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/extension.py#L31-L42
taddeus/wspy
extension.py
Extension.negotiate_safe
def negotiate_safe(self, name, params): """ `name` and `params` are sent in the HTTP request by the client. Check if the extension name is supported by this extension, and validate the parameters. Returns a dict with accepted parameters, or None if not accepted. """ for param in params.iterkeys(): if param not in self.defaults: return try: return dict(self.negotiate(name, params)) except (KeyError, ValueError, AssertionError): pass
python
def negotiate_safe(self, name, params): """ `name` and `params` are sent in the HTTP request by the client. Check if the extension name is supported by this extension, and validate the parameters. Returns a dict with accepted parameters, or None if not accepted. """ for param in params.iterkeys(): if param not in self.defaults: return try: return dict(self.negotiate(name, params)) except (KeyError, ValueError, AssertionError): pass
[ "def", "negotiate_safe", "(", "self", ",", "name", ",", "params", ")", ":", "for", "param", "in", "params", ".", "iterkeys", "(", ")", ":", "if", "param", "not", "in", "self", ".", "defaults", ":", "return", "try", ":", "return", "dict", "(", "self",...
`name` and `params` are sent in the HTTP request by the client. Check if the extension name is supported by this extension, and validate the parameters. Returns a dict with accepted parameters, or None if not accepted.
[ "name", "and", "params", "are", "sent", "in", "the", "HTTP", "request", "by", "the", "client", ".", "Check", "if", "the", "extension", "name", "is", "supported", "by", "this", "extension", "and", "validate", "the", "parameters", ".", "Returns", "a", "dict"...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/extension.py#L51-L65
theiviaxx/Frog
frog/views/userpref.py
index
def index(request): """Handles a request based on method and calls the appropriate function""" if request.method == 'GET': return get(request) elif request.method == 'POST': return post(request) return HttpResponse('')
python
def index(request): """Handles a request based on method and calls the appropriate function""" if request.method == 'GET': return get(request) elif request.method == 'POST': return post(request) return HttpResponse('')
[ "def", "index", "(", "request", ")", ":", "if", "request", ".", "method", "==", "'GET'", ":", "return", "get", "(", "request", ")", "elif", "request", ".", "method", "==", "'POST'", ":", "return", "post", "(", "request", ")", "return", "HttpResponse", ...
Handles a request based on method and calls the appropriate function
[ "Handles", "a", "request", "based", "on", "method", "and", "calls", "the", "appropriate", "function" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/userpref.py#L40-L46
theiviaxx/Frog
frog/views/userpref.py
get
def get(request): """Gets the currently logged in users preferences :returns: json """ res = Result() obj, created = UserPref.objects.get_or_create(user=request.user, defaults={'data': json.dumps(DefaultPrefs.copy())}) data = obj.json() data['subscriptions'] = [_.json() for _ in GallerySubscription.objects.filter(user=request.user)] res.append(data) return JsonResponse(res.asDict())
python
def get(request): """Gets the currently logged in users preferences :returns: json """ res = Result() obj, created = UserPref.objects.get_or_create(user=request.user, defaults={'data': json.dumps(DefaultPrefs.copy())}) data = obj.json() data['subscriptions'] = [_.json() for _ in GallerySubscription.objects.filter(user=request.user)] res.append(data) return JsonResponse(res.asDict())
[ "def", "get", "(", "request", ")", ":", "res", "=", "Result", "(", ")", "obj", ",", "created", "=", "UserPref", ".", "objects", ".", "get_or_create", "(", "user", "=", "request", ".", "user", ",", "defaults", "=", "{", "'data'", ":", "json", ".", "...
Gets the currently logged in users preferences :returns: json
[ "Gets", "the", "currently", "logged", "in", "users", "preferences" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/userpref.py#L49-L62
theiviaxx/Frog
frog/views/userpref.py
post
def post(request): """Sets a key to a value on the currently logged in users preferences :param key: Key to set :type key: str :param val: Value to set :type val: primitive :returns: json """ data = request.POST or json.loads(request.body)['body'] key = data.get('key', None) val = data.get('val', None) res = Result() if key is not None and val is not None: obj, created = UserPref.objects.get_or_create(user=request.user) if created: obj.data = json.dumps(DefaultPrefs.copy()) obj.save() try: val = json.loads(val) except (TypeError, ValueError): pass obj.setKey(key, val) obj.save() res.append(obj.json()) return JsonResponse(res.asDict())
python
def post(request): """Sets a key to a value on the currently logged in users preferences :param key: Key to set :type key: str :param val: Value to set :type val: primitive :returns: json """ data = request.POST or json.loads(request.body)['body'] key = data.get('key', None) val = data.get('val', None) res = Result() if key is not None and val is not None: obj, created = UserPref.objects.get_or_create(user=request.user) if created: obj.data = json.dumps(DefaultPrefs.copy()) obj.save() try: val = json.loads(val) except (TypeError, ValueError): pass obj.setKey(key, val) obj.save() res.append(obj.json()) return JsonResponse(res.asDict())
[ "def", "post", "(", "request", ")", ":", "data", "=", "request", ".", "POST", "or", "json", ".", "loads", "(", "request", ".", "body", ")", "[", "'body'", "]", "key", "=", "data", ".", "get", "(", "'key'", ",", "None", ")", "val", "=", "data", ...
Sets a key to a value on the currently logged in users preferences :param key: Key to set :type key: str :param val: Value to set :type val: primitive :returns: json
[ "Sets", "a", "key", "to", "a", "value", "on", "the", "currently", "logged", "in", "users", "preferences" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/userpref.py#L66-L92
mk-fg/feedjack
feedjack/fjupdate.py
get_modified_date
def get_modified_date(parsed, raw): 'Return best possible guess to post modification timestamp.' if parsed: return feedparser_ts(parsed) if not raw: return None # Parse weird timestamps that feedparser can't handle, e.g.: July 30, 2013 ts, val = None, raw.replace('_', ' ') if not ts: # coreutils' "date" parses virtually everything, but is more expensive to use from subprocess import Popen, PIPE with open(os.devnull, 'w') as devnull: proc = Popen(['date', '+%s', '-d', val], stdout=PIPE, stderr=devnull) val = proc.stdout.read() if not proc.wait(): ts = datetime.fromtimestamp(int(val.strip()), tz=timezone.utc) if ts: return ts raise ValueError('Unrecognized raw value format: {0!r}'.format(val))
python
def get_modified_date(parsed, raw): 'Return best possible guess to post modification timestamp.' if parsed: return feedparser_ts(parsed) if not raw: return None # Parse weird timestamps that feedparser can't handle, e.g.: July 30, 2013 ts, val = None, raw.replace('_', ' ') if not ts: # coreutils' "date" parses virtually everything, but is more expensive to use from subprocess import Popen, PIPE with open(os.devnull, 'w') as devnull: proc = Popen(['date', '+%s', '-d', val], stdout=PIPE, stderr=devnull) val = proc.stdout.read() if not proc.wait(): ts = datetime.fromtimestamp(int(val.strip()), tz=timezone.utc) if ts: return ts raise ValueError('Unrecognized raw value format: {0!r}'.format(val))
[ "def", "get_modified_date", "(", "parsed", ",", "raw", ")", ":", "if", "parsed", ":", "return", "feedparser_ts", "(", "parsed", ")", "if", "not", "raw", ":", "return", "None", "# Parse weird timestamps that feedparser can't handle, e.g.: July 30, 2013", "ts", ",", "...
Return best possible guess to post modification timestamp.
[ "Return", "best", "possible", "guess", "to", "post", "modification", "timestamp", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjupdate.py#L61-L77
mk-fg/feedjack
feedjack/fjupdate.py
FeedProcessor.process_entry
def process_entry(self, entry): 'Construct a Post from a feedparser entry and save/update it in db' from feedjack.models import Post, Tag ## Construct a Post object from feedparser entry (FeedParserDict) post = Post(feed=self.feed) post.link = entry.get('link', self.feed.link) post.title = entry.get('title', post.link) post.guid = self._get_guid(entry) if 'author_detail' in entry: post.author = entry.author_detail.get('name', '') post.author_email = entry.author_detail.get('email', '') if not post.author: post.author = entry.get('author', entry.get('creator', '')) if not post.author_email: post.author_email = 'nospam@nospam.com' try: post.content = entry.content[0].value except: post.content = entry.get('summary', entry.get('description', '')) # Try to get the post date from "updated" then "published" then "created" ts_parsed = ts_raw = None for k in self.post_timestamp_keys: try: post.date_modified = get_modified_date( entry.get('{0}_parsed'.format(k)), entry.get(k) ) except ValueError as err: log.warn( 'Failed to process post timestamp:' ' {0} (feed_id: {1}, post_guid: {2})'.format(err, self.feed.id, post.guid) ) if post.date_modified: break post.comments = entry.get('comments', '') enclosures = entry.get('enclosures', list()) if 'media_content' in entry: for mc in entry.media_content: if 'url' in mc: e = dict(href=mc['url'], medium=mc.get('medium', 'image')) else: e = entry.media_content e['type'] = 'application/x-media-content' # special ct for these things enclosures.append(e) assert enclosures, enclosures post.enclosures = enclosures ## Get a list of tag objects from an entry # Note that these objects can't go into m2m field until properly saved fcat = list() if entry.has_key('tags'): for tcat in entry.tags: qcat = tcat.label if tcat.label is not None else tcat.term if not qcat: continue qcat = qcat.strip() if ',' in qcat or '/' in qcat: qcat = qcat.replace(',', '/').split('/') else: qcat = [qcat] for zcat in qcat: tagname = ' '.join(zcat.lower().split()).strip()[:255] if not tagname: continue if not Tag.objects.filter(name=tagname): cobj = Tag(name=tagname) cobj.save() fcat.append(Tag.objects.get(name=tagname)) ## Some feedback post_base_fields = 'title link guid author author_email'.split() log.debug('[{0}] Entry\n{1}'.format(self.feed.id, '\n'.join( [' {0}: {1}'.format(key, getattr(post, key)) for key in post_base_fields] + ['tags: {0}'.format(' '.join(it.imap(op.attrgetter('name'), fcat)))] ))) ## Store / update a post if post.guid in self.postdict: # post exists, update if it was modified (and feed is mutable) post_old = self.postdict[post.guid] changed = post_old.content != post.content or ( post.date_modified and post_old.date_modified != post.date_modified ) if not self.feed.immutable and changed: retval = ENTRY_UPDATED log.extra('[{0}] Updating existing post: {1}'.format(self.feed.id, post.link)) # Update fields for field in post_base_fields + ['content', 'comments']: setattr(post_old, field, getattr(post, field)) post_old.date_modified = post.date_modified or post_old.date_modified # Update tags post_old.tags.clear() for tcat in fcat: post_old.tags.add(tcat) post_old.save() else: retval = ENTRY_SAME log.extra( ( '[{0}] Post has not changed: {1}' if not changed else '[{0}] Post changed, but feed is marked as immutable: {1}' )\ .format(self.feed.id, post.link) ) else: # new post, store it into database retval = ENTRY_NEW log.extra( '[{0}] Saving new post: {1} (timestamp: {2})'\ .format(self.feed.id, post.guid, post.date_modified) ) # Try hard to set date_modified: feed.modified, http.modified and now() as a last resort if not post.date_modified and self.fpf: try: post.date_modified = get_modified_date( self.fpf.feed.get('modified_parsed') or self.fpf.get('modified_parsed'), self.fpf.feed.get('modified') or self.fpf.get('modified') ) except ValueError as err: log.warn(( 'Failed to process feed/http timestamp: {0} (feed_id: {1},' ' post_guid: {2}), falling back to "now"' ).format(err, self.feed.id, post.guid)) if not post.date_modified: post.date_modified = timezone.now() log.debug(( '[{0}] Using current time for post' ' ({1}) timestamp' ).format(self.feed.id, post.guid)) else: log.debug( '[{0}] Using timestamp from feed/http for post ({1}): {2}'\ .format(self.feed.id, post.guid, post.date_modified) ) if self.options.hidden: post.hidden = True try: post.save() except IntegrityError: log.error( 'IntegrityError while saving (supposedly) new'\ ' post with guid: {0.guid}, link: {0.link}, title: {0.title}'.format(post) ) raise for tcat in fcat: post.tags.add(tcat) self.postdict[post.guid] = post return retval
python
def process_entry(self, entry): 'Construct a Post from a feedparser entry and save/update it in db' from feedjack.models import Post, Tag ## Construct a Post object from feedparser entry (FeedParserDict) post = Post(feed=self.feed) post.link = entry.get('link', self.feed.link) post.title = entry.get('title', post.link) post.guid = self._get_guid(entry) if 'author_detail' in entry: post.author = entry.author_detail.get('name', '') post.author_email = entry.author_detail.get('email', '') if not post.author: post.author = entry.get('author', entry.get('creator', '')) if not post.author_email: post.author_email = 'nospam@nospam.com' try: post.content = entry.content[0].value except: post.content = entry.get('summary', entry.get('description', '')) # Try to get the post date from "updated" then "published" then "created" ts_parsed = ts_raw = None for k in self.post_timestamp_keys: try: post.date_modified = get_modified_date( entry.get('{0}_parsed'.format(k)), entry.get(k) ) except ValueError as err: log.warn( 'Failed to process post timestamp:' ' {0} (feed_id: {1}, post_guid: {2})'.format(err, self.feed.id, post.guid) ) if post.date_modified: break post.comments = entry.get('comments', '') enclosures = entry.get('enclosures', list()) if 'media_content' in entry: for mc in entry.media_content: if 'url' in mc: e = dict(href=mc['url'], medium=mc.get('medium', 'image')) else: e = entry.media_content e['type'] = 'application/x-media-content' # special ct for these things enclosures.append(e) assert enclosures, enclosures post.enclosures = enclosures ## Get a list of tag objects from an entry # Note that these objects can't go into m2m field until properly saved fcat = list() if entry.has_key('tags'): for tcat in entry.tags: qcat = tcat.label if tcat.label is not None else tcat.term if not qcat: continue qcat = qcat.strip() if ',' in qcat or '/' in qcat: qcat = qcat.replace(',', '/').split('/') else: qcat = [qcat] for zcat in qcat: tagname = ' '.join(zcat.lower().split()).strip()[:255] if not tagname: continue if not Tag.objects.filter(name=tagname): cobj = Tag(name=tagname) cobj.save() fcat.append(Tag.objects.get(name=tagname)) ## Some feedback post_base_fields = 'title link guid author author_email'.split() log.debug('[{0}] Entry\n{1}'.format(self.feed.id, '\n'.join( [' {0}: {1}'.format(key, getattr(post, key)) for key in post_base_fields] + ['tags: {0}'.format(' '.join(it.imap(op.attrgetter('name'), fcat)))] ))) ## Store / update a post if post.guid in self.postdict: # post exists, update if it was modified (and feed is mutable) post_old = self.postdict[post.guid] changed = post_old.content != post.content or ( post.date_modified and post_old.date_modified != post.date_modified ) if not self.feed.immutable and changed: retval = ENTRY_UPDATED log.extra('[{0}] Updating existing post: {1}'.format(self.feed.id, post.link)) # Update fields for field in post_base_fields + ['content', 'comments']: setattr(post_old, field, getattr(post, field)) post_old.date_modified = post.date_modified or post_old.date_modified # Update tags post_old.tags.clear() for tcat in fcat: post_old.tags.add(tcat) post_old.save() else: retval = ENTRY_SAME log.extra( ( '[{0}] Post has not changed: {1}' if not changed else '[{0}] Post changed, but feed is marked as immutable: {1}' )\ .format(self.feed.id, post.link) ) else: # new post, store it into database retval = ENTRY_NEW log.extra( '[{0}] Saving new post: {1} (timestamp: {2})'\ .format(self.feed.id, post.guid, post.date_modified) ) # Try hard to set date_modified: feed.modified, http.modified and now() as a last resort if not post.date_modified and self.fpf: try: post.date_modified = get_modified_date( self.fpf.feed.get('modified_parsed') or self.fpf.get('modified_parsed'), self.fpf.feed.get('modified') or self.fpf.get('modified') ) except ValueError as err: log.warn(( 'Failed to process feed/http timestamp: {0} (feed_id: {1},' ' post_guid: {2}), falling back to "now"' ).format(err, self.feed.id, post.guid)) if not post.date_modified: post.date_modified = timezone.now() log.debug(( '[{0}] Using current time for post' ' ({1}) timestamp' ).format(self.feed.id, post.guid)) else: log.debug( '[{0}] Using timestamp from feed/http for post ({1}): {2}'\ .format(self.feed.id, post.guid, post.date_modified) ) if self.options.hidden: post.hidden = True try: post.save() except IntegrityError: log.error( 'IntegrityError while saving (supposedly) new'\ ' post with guid: {0.guid}, link: {0.link}, title: {0.title}'.format(post) ) raise for tcat in fcat: post.tags.add(tcat) self.postdict[post.guid] = post return retval
[ "def", "process_entry", "(", "self", ",", "entry", ")", ":", "from", "feedjack", ".", "models", "import", "Post", ",", "Tag", "## Construct a Post object from feedparser entry (FeedParserDict)", "post", "=", "Post", "(", "feed", "=", "self", ".", "feed", ")", "p...
Construct a Post from a feedparser entry and save/update it in db
[ "Construct", "a", "Post", "from", "a", "feedparser", "entry", "and", "save", "/", "update", "it", "in", "db" ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjupdate.py#L110-L235
mk-fg/feedjack
feedjack/fjupdate.py
FeedProcessor._process
def _process(self): 'Downloads and parses a feed.' ret_values = { ENTRY_NEW: 0, ENTRY_UPDATED: 0, ENTRY_SAME: 0, ENTRY_ERR: 0 } report_errors = not self.options.report_after\ or not self.feed.last_checked\ or (self.feed.last_checked + self.options.report_after < timezone.now()) feedparser_kws = dict() if sys.hexversion >= 0x2070900 and not self.feed.verify_tls_certs: import urllib2, ssl ctx = ssl.create_default_context() ctx.check_hostname, ctx.verify_mode = False, ssl.CERT_NONE feedparser_kws['handlers'] = [urllib2.HTTPSHandler(context=ctx)] try: self.fpf = feedparser.parse( self.feed.feed_url, agent=USER_AGENT, etag=self.feed.etag if not self.options.force else '', **feedparser_kws ) except KeyboardInterrupt: raise except: if report_errors: log.error( 'Feed cannot be parsed: {0} (#{1})'\ .format(self.feed.feed_url, self.feed.id) ) return FEED_ERRPARSE, ret_values if hasattr(self.fpf, 'status'): log.extra('[{0}] HTTP status {1}: {2}'.format( self.feed.id, self.fpf.status, self.feed.feed_url )) if self.fpf.status == 304: log.extra(( '[{0}] Feed has not changed since ' 'last check: {1}' ).format(self.feed.id, self.feed.feed_url)) # Fast-path: just update last_checked timestamp self.feed.last_checked = timezone.now() self.feed.save() return FEED_SAME, ret_values if self.fpf.status >= 400: if report_errors: log.warn('[{0}] HTTP error {1}: {2}'.format( self.feed.id, self.fpf.status, self.feed.feed_url )) return FEED_ERRFETCH, ret_values if self.fpf.bozo: bozo = getattr(self.fpf, 'bozo_exception', 'unknown error') if not self.feed.skip_errors: if report_errors: log.warn( '[{0}] Failed to fetch feed: {1} ({2})'\ .format(self.feed.id, self.feed.feed_url, bozo) ) return FEED_ERRFETCH, ret_values elif report_errors: log.info( '[{0}] Skipped feed error: {1} ({2})'\ .format(self.feed.id, self.feed.feed_url, bozo) ) self.feed.title = self.fpf.feed.get('title', '')[:200] self.feed.tagline = self.fpf.feed.get('tagline', '') self.feed.link = self.fpf.feed.get('link', '') self.feed.last_checked = timezone.now() log.debug('[{0}] Feed info for: {1}\n{2}'.format( self.feed.id, self.feed.feed_url, '\n'.join( ' {0}: {1}'.format(key, getattr(self.feed, key)) for key in ['title', 'tagline', 'link', 'last_checked'] ))) guids = filter(None, it.imap(self._get_guid, self.fpf.entries)) if guids: from feedjack.models import Post self.postdict = dict( (post.guid, post) for post in Post.objects.filter( feed=self.feed.id, guid__in=guids ) ) if self.options.max_diff: # Do not calculate diff for empty (probably just-added) feeds if not self.postdict and Post.objects.filter(feed=self.feed.id).count() == 0: diff = 0 else: diff = op.truediv(len(guids) - len(self.postdict), len(guids)) * 100 if diff > self.options.max_diff: log.warn( '[{0}] Feed validation failed: {1} (diff: {2}% > {3}%)'\ .format(self.feed.id, self.feed.feed_url, round(diff, 1), self.options.max_diff) ) return FEED_INVALID, ret_values else: self.postdict = dict() self.feed.save() # etag/mtime aren't updated yet for entry in self.fpf.entries: try: with transaction.atomic(): ret_entry = self.process_entry(entry) except: print_exc(self.feed.id) ret_entry = ENTRY_ERR ret_values[ret_entry] += 1 if not ret_values[ENTRY_ERR]: # etag/mtime updated only if there's no errors self.feed.etag = self.fpf.get('etag') or '' try: self.feed.last_modified = feedparser_ts(self.fpf.modified_parsed) except AttributeError: pass self.feed.save() return FEED_OK if ret_values[ENTRY_NEW]\ or ret_values[ENTRY_UPDATED] else FEED_SAME, ret_values
python
def _process(self): 'Downloads and parses a feed.' ret_values = { ENTRY_NEW: 0, ENTRY_UPDATED: 0, ENTRY_SAME: 0, ENTRY_ERR: 0 } report_errors = not self.options.report_after\ or not self.feed.last_checked\ or (self.feed.last_checked + self.options.report_after < timezone.now()) feedparser_kws = dict() if sys.hexversion >= 0x2070900 and not self.feed.verify_tls_certs: import urllib2, ssl ctx = ssl.create_default_context() ctx.check_hostname, ctx.verify_mode = False, ssl.CERT_NONE feedparser_kws['handlers'] = [urllib2.HTTPSHandler(context=ctx)] try: self.fpf = feedparser.parse( self.feed.feed_url, agent=USER_AGENT, etag=self.feed.etag if not self.options.force else '', **feedparser_kws ) except KeyboardInterrupt: raise except: if report_errors: log.error( 'Feed cannot be parsed: {0} (#{1})'\ .format(self.feed.feed_url, self.feed.id) ) return FEED_ERRPARSE, ret_values if hasattr(self.fpf, 'status'): log.extra('[{0}] HTTP status {1}: {2}'.format( self.feed.id, self.fpf.status, self.feed.feed_url )) if self.fpf.status == 304: log.extra(( '[{0}] Feed has not changed since ' 'last check: {1}' ).format(self.feed.id, self.feed.feed_url)) # Fast-path: just update last_checked timestamp self.feed.last_checked = timezone.now() self.feed.save() return FEED_SAME, ret_values if self.fpf.status >= 400: if report_errors: log.warn('[{0}] HTTP error {1}: {2}'.format( self.feed.id, self.fpf.status, self.feed.feed_url )) return FEED_ERRFETCH, ret_values if self.fpf.bozo: bozo = getattr(self.fpf, 'bozo_exception', 'unknown error') if not self.feed.skip_errors: if report_errors: log.warn( '[{0}] Failed to fetch feed: {1} ({2})'\ .format(self.feed.id, self.feed.feed_url, bozo) ) return FEED_ERRFETCH, ret_values elif report_errors: log.info( '[{0}] Skipped feed error: {1} ({2})'\ .format(self.feed.id, self.feed.feed_url, bozo) ) self.feed.title = self.fpf.feed.get('title', '')[:200] self.feed.tagline = self.fpf.feed.get('tagline', '') self.feed.link = self.fpf.feed.get('link', '') self.feed.last_checked = timezone.now() log.debug('[{0}] Feed info for: {1}\n{2}'.format( self.feed.id, self.feed.feed_url, '\n'.join( ' {0}: {1}'.format(key, getattr(self.feed, key)) for key in ['title', 'tagline', 'link', 'last_checked'] ))) guids = filter(None, it.imap(self._get_guid, self.fpf.entries)) if guids: from feedjack.models import Post self.postdict = dict( (post.guid, post) for post in Post.objects.filter( feed=self.feed.id, guid__in=guids ) ) if self.options.max_diff: # Do not calculate diff for empty (probably just-added) feeds if not self.postdict and Post.objects.filter(feed=self.feed.id).count() == 0: diff = 0 else: diff = op.truediv(len(guids) - len(self.postdict), len(guids)) * 100 if diff > self.options.max_diff: log.warn( '[{0}] Feed validation failed: {1} (diff: {2}% > {3}%)'\ .format(self.feed.id, self.feed.feed_url, round(diff, 1), self.options.max_diff) ) return FEED_INVALID, ret_values else: self.postdict = dict() self.feed.save() # etag/mtime aren't updated yet for entry in self.fpf.entries: try: with transaction.atomic(): ret_entry = self.process_entry(entry) except: print_exc(self.feed.id) ret_entry = ENTRY_ERR ret_values[ret_entry] += 1 if not ret_values[ENTRY_ERR]: # etag/mtime updated only if there's no errors self.feed.etag = self.fpf.get('etag') or '' try: self.feed.last_modified = feedparser_ts(self.fpf.modified_parsed) except AttributeError: pass self.feed.save() return FEED_OK if ret_values[ENTRY_NEW]\ or ret_values[ENTRY_UPDATED] else FEED_SAME, ret_values
[ "def", "_process", "(", "self", ")", ":", "ret_values", "=", "{", "ENTRY_NEW", ":", "0", ",", "ENTRY_UPDATED", ":", "0", ",", "ENTRY_SAME", ":", "0", ",", "ENTRY_ERR", ":", "0", "}", "report_errors", "=", "not", "self", ".", "options", ".", "report_aft...
Downloads and parses a feed.
[ "Downloads", "and", "parses", "a", "feed", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjupdate.py#L251-L351
bcho/gzbus
gzbus/query.py
query_realtime_routine
def query_realtime_routine(bus_name, cur_station=None): '''Get real time routine. TODO support fuzzy matching. :param bus_name: the routine name of the bus. :param cur_station: current station, deaults to starting station of the routine. ''' routines = query_routines(bus_name) if not routines: return rv = [] for routine in routines: bid = routine['bid'] _cur_station = cur_station or routine['starting_station'] page = _get_realtime_page(bus_name, bid, _cur_station) rv.append(extract_bus_routine(page)) return rv
python
def query_realtime_routine(bus_name, cur_station=None): '''Get real time routine. TODO support fuzzy matching. :param bus_name: the routine name of the bus. :param cur_station: current station, deaults to starting station of the routine. ''' routines = query_routines(bus_name) if not routines: return rv = [] for routine in routines: bid = routine['bid'] _cur_station = cur_station or routine['starting_station'] page = _get_realtime_page(bus_name, bid, _cur_station) rv.append(extract_bus_routine(page)) return rv
[ "def", "query_realtime_routine", "(", "bus_name", ",", "cur_station", "=", "None", ")", ":", "routines", "=", "query_routines", "(", "bus_name", ")", "if", "not", "routines", ":", "return", "rv", "=", "[", "]", "for", "routine", "in", "routines", ":", "bid...
Get real time routine. TODO support fuzzy matching. :param bus_name: the routine name of the bus. :param cur_station: current station, deaults to starting station of the routine.
[ "Get", "real", "time", "routine", "." ]
train
https://github.com/bcho/gzbus/blob/4dd2cc2e5068331d0f4bed885cf999a1d107b8b4/gzbus/query.py#L44-L64
theiviaxx/Frog
frog/__init__.py
getRoot
def getRoot(): """Convenience to return the media root with forward slashes""" root = settings.MEDIA_ROOT.replace('\\', '/') if not root.endswith('/'): root += '/' return path.Path(root)
python
def getRoot(): """Convenience to return the media root with forward slashes""" root = settings.MEDIA_ROOT.replace('\\', '/') if not root.endswith('/'): root += '/' return path.Path(root)
[ "def", "getRoot", "(", ")", ":", "root", "=", "settings", ".", "MEDIA_ROOT", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "if", "not", "root", ".", "endswith", "(", "'/'", ")", ":", "root", "+=", "'/'", "return", "path", ".", "Path", "(", "root"...
Convenience to return the media root with forward slashes
[ "Convenience", "to", "return", "the", "media", "root", "with", "forward", "slashes" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/__init__.py#L27-L33
theiviaxx/Frog
frog/video_thread.py
emailUser
def emailUser(video, error=None): """Emails the author of the video that it has finished processing""" html = render_to_string('frog/video_email.html', { 'user': video.author, 'error': error, 'video': video, 'SITE_URL': FROG_SITE_URL, }) subject, from_email, to = 'Video Processing Finished{}'.format(error or ''), 'noreply@frogmediaserver.com', video.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, from_email, [to], html_message=html_content)
python
def emailUser(video, error=None): """Emails the author of the video that it has finished processing""" html = render_to_string('frog/video_email.html', { 'user': video.author, 'error': error, 'video': video, 'SITE_URL': FROG_SITE_URL, }) subject, from_email, to = 'Video Processing Finished{}'.format(error or ''), 'noreply@frogmediaserver.com', video.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, from_email, [to], html_message=html_content)
[ "def", "emailUser", "(", "video", ",", "error", "=", "None", ")", ":", "html", "=", "render_to_string", "(", "'frog/video_email.html'", ",", "{", "'user'", ":", "video", ".", "author", ",", "'error'", ":", "error", ",", "'video'", ":", "video", ",", "'SI...
Emails the author of the video that it has finished processing
[ "Emails", "the", "author", "of", "the", "video", "that", "it", "has", "finished", "processing" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/video_thread.py#L139-L151
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zadd
def zadd(self, key, *members, **kwargs): """Adds all the specified members with the specified scores to the sorted set stored at key. It is possible to specify multiple score / member pairs. If a specified member is already a member of the sorted set, the score is updated and the element reinserted at the right position to ensure the correct ordering. If key does not exist, a new sorted set with the specified members as sole members is created, like if the sorted set was empty. If the key exists but does not hold a sorted set, an error is returned. The score values should be the string representation of a double precision floating point number. +inf and -inf values are valid values as well. **Members parameters** ``members`` could be either: - a single dict where keys correspond to scores and values to elements - multiple strings paired as score then element .. code:: python yield client.zadd('myzset', {'1': 'one', '2': 'two'}) yield client.zadd('myzset', '1', 'one', '2', 'two') **ZADD options (Redis 3.0.2 or greater)** ZADD supports a list of options. Options are: - ``xx``: Only update elements that already exist. Never add elements. - ``nx``: Don't update already existing elements. Always add new elements. - ``ch``: Modify the return value from the number of new elements added, to the total number of elements changed (CH is an abbreviation of changed). Changed elements are new elements added and elements already existing for which the score was updated. So elements specified in the command having the same score as they had in the past are not counted. Note: normally the return value of ``ZADD`` only counts the number of new elements added. - ``incr``: When this option is specified ``ZADD`` acts like :meth:`~tredis.RedisClient.zincrby`. Only one score-element pair can be specified in this mode. .. note:: **Time complexity**: ``O(log(N))`` for each item added, where ``N`` is the number of elements in the sorted set. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: Elements to add :type members: :class:`dict`, :class:`str`, :class:`bytes` :keyword bool xx: Only update elements that already exist :keyword bool nx: Don't update already existing elements :keyword bool ch: Return the number of changed elements :keyword bool incr: Increment the score of an element :rtype: int, :class:`str`, :class:`bytes` :returns: Number of elements changed, or the new score if incr is set :raises: :exc:`~tredis.exceptions.RedisError` """ xx = kwargs.pop('xx', False) nx = kwargs.pop('nx', False) ch = kwargs.pop('ch', False) incr = kwargs.pop('incr', False) command = [b'ZADD', key] if xx: command += ['XX'] if nx: command += ['NX'] if ch: command += ['CH'] if incr: command += ['INCR'] if len(members) == 1: for k in members[0]: command += [k, members[0][k]] else: command += list(members) return self._execute(command)
python
def zadd(self, key, *members, **kwargs): """Adds all the specified members with the specified scores to the sorted set stored at key. It is possible to specify multiple score / member pairs. If a specified member is already a member of the sorted set, the score is updated and the element reinserted at the right position to ensure the correct ordering. If key does not exist, a new sorted set with the specified members as sole members is created, like if the sorted set was empty. If the key exists but does not hold a sorted set, an error is returned. The score values should be the string representation of a double precision floating point number. +inf and -inf values are valid values as well. **Members parameters** ``members`` could be either: - a single dict where keys correspond to scores and values to elements - multiple strings paired as score then element .. code:: python yield client.zadd('myzset', {'1': 'one', '2': 'two'}) yield client.zadd('myzset', '1', 'one', '2', 'two') **ZADD options (Redis 3.0.2 or greater)** ZADD supports a list of options. Options are: - ``xx``: Only update elements that already exist. Never add elements. - ``nx``: Don't update already existing elements. Always add new elements. - ``ch``: Modify the return value from the number of new elements added, to the total number of elements changed (CH is an abbreviation of changed). Changed elements are new elements added and elements already existing for which the score was updated. So elements specified in the command having the same score as they had in the past are not counted. Note: normally the return value of ``ZADD`` only counts the number of new elements added. - ``incr``: When this option is specified ``ZADD`` acts like :meth:`~tredis.RedisClient.zincrby`. Only one score-element pair can be specified in this mode. .. note:: **Time complexity**: ``O(log(N))`` for each item added, where ``N`` is the number of elements in the sorted set. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: Elements to add :type members: :class:`dict`, :class:`str`, :class:`bytes` :keyword bool xx: Only update elements that already exist :keyword bool nx: Don't update already existing elements :keyword bool ch: Return the number of changed elements :keyword bool incr: Increment the score of an element :rtype: int, :class:`str`, :class:`bytes` :returns: Number of elements changed, or the new score if incr is set :raises: :exc:`~tredis.exceptions.RedisError` """ xx = kwargs.pop('xx', False) nx = kwargs.pop('nx', False) ch = kwargs.pop('ch', False) incr = kwargs.pop('incr', False) command = [b'ZADD', key] if xx: command += ['XX'] if nx: command += ['NX'] if ch: command += ['CH'] if incr: command += ['INCR'] if len(members) == 1: for k in members[0]: command += [k, members[0][k]] else: command += list(members) return self._execute(command)
[ "def", "zadd", "(", "self", ",", "key", ",", "*", "members", ",", "*", "*", "kwargs", ")", ":", "xx", "=", "kwargs", ".", "pop", "(", "'xx'", ",", "False", ")", "nx", "=", "kwargs", ".", "pop", "(", "'nx'", ",", "False", ")", "ch", "=", "kwar...
Adds all the specified members with the specified scores to the sorted set stored at key. It is possible to specify multiple score / member pairs. If a specified member is already a member of the sorted set, the score is updated and the element reinserted at the right position to ensure the correct ordering. If key does not exist, a new sorted set with the specified members as sole members is created, like if the sorted set was empty. If the key exists but does not hold a sorted set, an error is returned. The score values should be the string representation of a double precision floating point number. +inf and -inf values are valid values as well. **Members parameters** ``members`` could be either: - a single dict where keys correspond to scores and values to elements - multiple strings paired as score then element .. code:: python yield client.zadd('myzset', {'1': 'one', '2': 'two'}) yield client.zadd('myzset', '1', 'one', '2', 'two') **ZADD options (Redis 3.0.2 or greater)** ZADD supports a list of options. Options are: - ``xx``: Only update elements that already exist. Never add elements. - ``nx``: Don't update already existing elements. Always add new elements. - ``ch``: Modify the return value from the number of new elements added, to the total number of elements changed (CH is an abbreviation of changed). Changed elements are new elements added and elements already existing for which the score was updated. So elements specified in the command having the same score as they had in the past are not counted. Note: normally the return value of ``ZADD`` only counts the number of new elements added. - ``incr``: When this option is specified ``ZADD`` acts like :meth:`~tredis.RedisClient.zincrby`. Only one score-element pair can be specified in this mode. .. note:: **Time complexity**: ``O(log(N))`` for each item added, where ``N`` is the number of elements in the sorted set. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: Elements to add :type members: :class:`dict`, :class:`str`, :class:`bytes` :keyword bool xx: Only update elements that already exist :keyword bool nx: Don't update already existing elements :keyword bool ch: Return the number of changed elements :keyword bool incr: Increment the score of an element :rtype: int, :class:`str`, :class:`bytes` :returns: Number of elements changed, or the new score if incr is set :raises: :exc:`~tredis.exceptions.RedisError`
[ "Adds", "all", "the", "specified", "members", "with", "the", "specified", "scores", "to", "the", "sorted", "set", "stored", "at", "key", ".", "It", "is", "possible", "to", "specify", "multiple", "score", "/", "member", "pairs", ".", "If", "a", "specified",...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L7-L88
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zrange
def zrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the lowest to the highest score. Lexicographical order is used for elements with equal score. See :meth:`tredis.Client.zrevrange` when you need the elements ordered from highest to lowest score (and descending lexicographical order for elements with equal score). Both start and stop are zero-based indexes, where ``0`` is the first element, ``1`` is the next element and so on. They can also be negative numbers indicating offsets from the end of the sorted set, with ``-1`` being the last element of the sorted set, ``-2`` the penultimate element and so on. ``start`` and ``stop`` are inclusive ranges, so for example ``ZRANGE myzset 0 1`` will return both the first and the second element of the sorted set. Out of range indexes will not produce an error. If start is larger than the largest index in the sorted set, or ``start > stop``, an empty list is returned. If stop is larger than the end of the sorted set Redis will treat it like it is the last element of the sorted set. It is possible to pass the ``WITHSCORES`` option in order to return the scores of the elements together with the elements. The returned list will contain ``value1,score1,...,valueN,scoreN`` instead of ``value1,...,valueN``. Client libraries are free to return a more appropriate data type (suggestion: an array with (value, score) arrays/tuples). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
python
def zrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the lowest to the highest score. Lexicographical order is used for elements with equal score. See :meth:`tredis.Client.zrevrange` when you need the elements ordered from highest to lowest score (and descending lexicographical order for elements with equal score). Both start and stop are zero-based indexes, where ``0`` is the first element, ``1`` is the next element and so on. They can also be negative numbers indicating offsets from the end of the sorted set, with ``-1`` being the last element of the sorted set, ``-2`` the penultimate element and so on. ``start`` and ``stop`` are inclusive ranges, so for example ``ZRANGE myzset 0 1`` will return both the first and the second element of the sorted set. Out of range indexes will not produce an error. If start is larger than the largest index in the sorted set, or ``start > stop``, an empty list is returned. If stop is larger than the end of the sorted set Redis will treat it like it is the last element of the sorted set. It is possible to pass the ``WITHSCORES`` option in order to return the scores of the elements together with the elements. The returned list will contain ``value1,score1,...,valueN,scoreN`` instead of ``value1,...,valueN``. Client libraries are free to return a more appropriate data type (suggestion: an array with (value, score) arrays/tuples). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
[ "def", "zrange", "(", "self", ",", "key", ",", "start", "=", "0", ",", "stop", "=", "-", "1", ",", "with_scores", "=", "False", ")", ":", "command", "=", "[", "b'ZRANGE'", ",", "key", ",", "start", ",", "stop", "]", "if", "with_scores", ":", "com...
Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the lowest to the highest score. Lexicographical order is used for elements with equal score. See :meth:`tredis.Client.zrevrange` when you need the elements ordered from highest to lowest score (and descending lexicographical order for elements with equal score). Both start and stop are zero-based indexes, where ``0`` is the first element, ``1`` is the next element and so on. They can also be negative numbers indicating offsets from the end of the sorted set, with ``-1`` being the last element of the sorted set, ``-2`` the penultimate element and so on. ``start`` and ``stop`` are inclusive ranges, so for example ``ZRANGE myzset 0 1`` will return both the first and the second element of the sorted set. Out of range indexes will not produce an error. If start is larger than the largest index in the sorted set, or ``start > stop``, an empty list is returned. If stop is larger than the end of the sorted set Redis will treat it like it is the last element of the sorted set. It is possible to pass the ``WITHSCORES`` option in order to return the scores of the elements together with the elements. The returned list will contain ``value1,score1,...,valueN,scoreN`` instead of ``value1,...,valueN``. Client libraries are free to return a more appropriate data type (suggestion: an array with (value, score) arrays/tuples). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError`
[ "Returns", "the", "specified", "range", "of", "elements", "in", "the", "sorted", "set", "stored", "at", "key", ".", "The", "elements", "are", "considered", "to", "be", "ordered", "from", "the", "lowest", "to", "the", "highest", "score", ".", "Lexicographical...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L106-L156
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zrangebyscore
def zrangebyscore(self, key, min_score, max_score, with_scores=False, offset=0, count=0): """Returns all the elements in the sorted set at key with a score between min and max (including elements with score equal to min or max). The elements are considered to be ordered from low to high scores. The elements having the same score are returned in lexicographical order (this follows from a property of the sorted set implementation in Redis and does not involve further computation). The optional ``offset`` and ``count`` arguments can be used to only get a range of the matching elements (similar to SELECT LIMIT offset, count in SQL). Keep in mind that if offset is large, the sorted set needs to be traversed for offset elements before getting to the elements to return, which can add up to ``O(N)`` time complexity. The optional ``with_scores`` argument makes the command return both the element and its score, instead of the element alone. This option is available since Redis 2.0. **Exclusive intervals and infinity** ``min_score`` and ``max_score`` can be ``-inf`` and ``+inf``, so that you are not required to know the highest or lowest score in the sorted set to get all elements from or up to a certain score. By default, the interval specified by ``min_score`` and ``max_score`` is closed (inclusive). It is possible to specify an open interval (exclusive) by prefixing the score with the character ``(``. For example: .. code:: ZRANGEBYSCORE zset (1 5 Will return all elements with ``1 < score <= 5`` while: .. code:: ZRANGEBYSCORE zset (5 (10 Will return all the elements with ``5 < score < 10`` (5 and 10 excluded). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements being returned. If ``M`` is constant (e.g. always asking for the first 10 elements with ``count``), you can consider it ``O(log(N))``. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :param bool with_scores: Return elements and scores :param offset: The number of elements to skip :type min_score: :class:`str`, :class:`bytes` :param count: The number of elements to return :type min_score: :class:`str`, :class:`bytes` :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZRANGEBYSCORE', key, min_score, max_score] if with_scores: command += ['WITHSCORES'] if offset or count: command += ['LIMIT', offset, count] return self._execute(command)
python
def zrangebyscore(self, key, min_score, max_score, with_scores=False, offset=0, count=0): """Returns all the elements in the sorted set at key with a score between min and max (including elements with score equal to min or max). The elements are considered to be ordered from low to high scores. The elements having the same score are returned in lexicographical order (this follows from a property of the sorted set implementation in Redis and does not involve further computation). The optional ``offset`` and ``count`` arguments can be used to only get a range of the matching elements (similar to SELECT LIMIT offset, count in SQL). Keep in mind that if offset is large, the sorted set needs to be traversed for offset elements before getting to the elements to return, which can add up to ``O(N)`` time complexity. The optional ``with_scores`` argument makes the command return both the element and its score, instead of the element alone. This option is available since Redis 2.0. **Exclusive intervals and infinity** ``min_score`` and ``max_score`` can be ``-inf`` and ``+inf``, so that you are not required to know the highest or lowest score in the sorted set to get all elements from or up to a certain score. By default, the interval specified by ``min_score`` and ``max_score`` is closed (inclusive). It is possible to specify an open interval (exclusive) by prefixing the score with the character ``(``. For example: .. code:: ZRANGEBYSCORE zset (1 5 Will return all elements with ``1 < score <= 5`` while: .. code:: ZRANGEBYSCORE zset (5 (10 Will return all the elements with ``5 < score < 10`` (5 and 10 excluded). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements being returned. If ``M`` is constant (e.g. always asking for the first 10 elements with ``count``), you can consider it ``O(log(N))``. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :param bool with_scores: Return elements and scores :param offset: The number of elements to skip :type min_score: :class:`str`, :class:`bytes` :param count: The number of elements to return :type min_score: :class:`str`, :class:`bytes` :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZRANGEBYSCORE', key, min_score, max_score] if with_scores: command += ['WITHSCORES'] if offset or count: command += ['LIMIT', offset, count] return self._execute(command)
[ "def", "zrangebyscore", "(", "self", ",", "key", ",", "min_score", ",", "max_score", ",", "with_scores", "=", "False", ",", "offset", "=", "0", ",", "count", "=", "0", ")", ":", "command", "=", "[", "b'ZRANGEBYSCORE'", ",", "key", ",", "min_score", ","...
Returns all the elements in the sorted set at key with a score between min and max (including elements with score equal to min or max). The elements are considered to be ordered from low to high scores. The elements having the same score are returned in lexicographical order (this follows from a property of the sorted set implementation in Redis and does not involve further computation). The optional ``offset`` and ``count`` arguments can be used to only get a range of the matching elements (similar to SELECT LIMIT offset, count in SQL). Keep in mind that if offset is large, the sorted set needs to be traversed for offset elements before getting to the elements to return, which can add up to ``O(N)`` time complexity. The optional ``with_scores`` argument makes the command return both the element and its score, instead of the element alone. This option is available since Redis 2.0. **Exclusive intervals and infinity** ``min_score`` and ``max_score`` can be ``-inf`` and ``+inf``, so that you are not required to know the highest or lowest score in the sorted set to get all elements from or up to a certain score. By default, the interval specified by ``min_score`` and ``max_score`` is closed (inclusive). It is possible to specify an open interval (exclusive) by prefixing the score with the character ``(``. For example: .. code:: ZRANGEBYSCORE zset (1 5 Will return all elements with ``1 < score <= 5`` while: .. code:: ZRANGEBYSCORE zset (5 (10 Will return all the elements with ``5 < score < 10`` (5 and 10 excluded). .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements being returned. If ``M`` is constant (e.g. always asking for the first 10 elements with ``count``), you can consider it ``O(log(N))``. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :param bool with_scores: Return elements and scores :param offset: The number of elements to skip :type min_score: :class:`str`, :class:`bytes` :param count: The number of elements to return :type min_score: :class:`str`, :class:`bytes` :rtype: list :raises: :exc:`~tredis.exceptions.RedisError`
[ "Returns", "all", "the", "elements", "in", "the", "sorted", "set", "at", "key", "with", "a", "score", "between", "min", "and", "max", "(", "including", "elements", "with", "score", "equal", "to", "min", "or", "max", ")", ".", "The", "elements", "are", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L158-L234
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zrem
def zrem(self, key, *members): """Removes the specified members from the sorted set stored at key. Non existing members are ignored. An error is returned when key exists and does not hold a sorted set. .. note:: **Time complexity**: ``O(M*log(N))`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements to be removed. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: One or more member values to remove :type members: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREM', key] + list(members))
python
def zrem(self, key, *members): """Removes the specified members from the sorted set stored at key. Non existing members are ignored. An error is returned when key exists and does not hold a sorted set. .. note:: **Time complexity**: ``O(M*log(N))`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements to be removed. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: One or more member values to remove :type members: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREM', key] + list(members))
[ "def", "zrem", "(", "self", ",", "key", ",", "*", "members", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'ZREM'", ",", "key", "]", "+", "list", "(", "members", ")", ")" ]
Removes the specified members from the sorted set stored at key. Non existing members are ignored. An error is returned when key exists and does not hold a sorted set. .. note:: **Time complexity**: ``O(M*log(N))`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements to be removed. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: One or more member values to remove :type members: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Removes", "the", "specified", "members", "from", "the", "sorted", "set", "stored", "at", "key", ".", "Non", "existing", "members", "are", "ignored", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L236-L255
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zremrangebyscore
def zremrangebyscore(self, key, min_score, max_score): """Removes all elements in the sorted set stored at key with a score between min and max. Intervals are described in :meth:`~tredis.RedisClient.zrangebyscore`. Returns the number of elements removed. .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and M the number of elements removed by the operation. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREMRANGEBYSCORE', key, min_score, max_score])
python
def zremrangebyscore(self, key, min_score, max_score): """Removes all elements in the sorted set stored at key with a score between min and max. Intervals are described in :meth:`~tredis.RedisClient.zrangebyscore`. Returns the number of elements removed. .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and M the number of elements removed by the operation. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREMRANGEBYSCORE', key, min_score, max_score])
[ "def", "zremrangebyscore", "(", "self", ",", "key", ",", "min_score", ",", "max_score", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'ZREMRANGEBYSCORE'", ",", "key", ",", "min_score", ",", "max_score", "]", ")" ]
Removes all elements in the sorted set stored at key with a score between min and max. Intervals are described in :meth:`~tredis.RedisClient.zrangebyscore`. Returns the number of elements removed. .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and M the number of elements removed by the operation. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param min_score: Lowest score definition :type min_score: :class:`str`, :class:`bytes` :param max_score: Highest score definition :type max_score: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError`
[ "Removes", "all", "elements", "in", "the", "sorted", "set", "stored", "at", "key", "with", "a", "score", "between", "min", "and", "max", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L257-L280
gmr/tredis
tredis/sortedsets.py
SortedSetsMixin.zrevrange
def zrevrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the highest to the lowest score. Descending lexicographical order is used for elements with equal score. Apart from the reversed ordering, :py:meth:`~tredis.Client.zrevrange` is similar to :py:meth:`~tredis.Client.zrange` . .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZREVRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
python
def zrevrange(self, key, start=0, stop=-1, with_scores=False): """Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the highest to the lowest score. Descending lexicographical order is used for elements with equal score. Apart from the reversed ordering, :py:meth:`~tredis.Client.zrevrange` is similar to :py:meth:`~tredis.Client.zrange` . .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError` """ command = [b'ZREVRANGE', key, start, stop] if with_scores: command += ['WITHSCORES'] return self._execute(command)
[ "def", "zrevrange", "(", "self", ",", "key", ",", "start", "=", "0", ",", "stop", "=", "-", "1", ",", "with_scores", "=", "False", ")", ":", "command", "=", "[", "b'ZREVRANGE'", ",", "key", ",", "start", ",", "stop", "]", "if", "with_scores", ":", ...
Returns the specified range of elements in the sorted set stored at key. The elements are considered to be ordered from the highest to the lowest score. Descending lexicographical order is used for elements with equal score. Apart from the reversed ordering, :py:meth:`~tredis.Client.zrevrange` is similar to :py:meth:`~tredis.Client.zrange` . .. note:: **Time complexity**: ``O(log(N)+M)`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements returned. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param int start: The starting index of the sorted set :param int stop: The ending index of the sorted set :param bool with_scores: Return the scores with the elements :rtype: list :raises: :exc:`~tredis.exceptions.RedisError`
[ "Returns", "the", "specified", "range", "of", "elements", "in", "the", "sorted", "set", "stored", "at", "key", ".", "The", "elements", "are", "considered", "to", "be", "ordered", "from", "the", "highest", "to", "the", "lowest", "score", ".", "Descending", ...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/sortedsets.py#L282-L309
mk-fg/feedjack
feedjack/fjcache.py
getkey
def getkey(stype, site_id=None, key=None): 'Returns the cache key depending on its type.' base = '{0}.feedjack'.format(settings.CACHE_MIDDLEWARE_KEY_PREFIX) if stype == T_HOST: return '{0}.hostcache'.format(base) elif stype == T_ITEM: return '{0}.{1}.item.{2}'.format(base, site_id, str2md5(key)) elif stype == T_META: return '{0}.{1}.meta'.format(base, site_id) elif stype == T_INTERVAL: return '{0}.interval.{1}'.format(base, str2md5(key))
python
def getkey(stype, site_id=None, key=None): 'Returns the cache key depending on its type.' base = '{0}.feedjack'.format(settings.CACHE_MIDDLEWARE_KEY_PREFIX) if stype == T_HOST: return '{0}.hostcache'.format(base) elif stype == T_ITEM: return '{0}.{1}.item.{2}'.format(base, site_id, str2md5(key)) elif stype == T_META: return '{0}.{1}.meta'.format(base, site_id) elif stype == T_INTERVAL: return '{0}.interval.{1}'.format(base, str2md5(key))
[ "def", "getkey", "(", "stype", ",", "site_id", "=", "None", ",", "key", "=", "None", ")", ":", "base", "=", "'{0}.feedjack'", ".", "format", "(", "settings", ".", "CACHE_MIDDLEWARE_KEY_PREFIX", ")", "if", "stype", "==", "T_HOST", ":", "return", "'{0}.hostc...
Returns the cache key depending on its type.
[ "Returns", "the", "cache", "key", "depending", "on", "its", "type", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L28-L34
mk-fg/feedjack
feedjack/fjcache.py
feed_interval_get
def feed_interval_get(feed_id, parameters): 'Get adaptive interval between checks for a feed.' val = cache.get(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) )) return val if isinstance(val, tuple) else (val, None)
python
def feed_interval_get(feed_id, parameters): 'Get adaptive interval between checks for a feed.' val = cache.get(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) )) return val if isinstance(val, tuple) else (val, None)
[ "def", "feed_interval_get", "(", "feed_id", ",", "parameters", ")", ":", "val", "=", "cache", ".", "get", "(", "getkey", "(", "T_INTERVAL", ",", "key", "=", "feed_interval_key", "(", "feed_id", ",", "parameters", ")", ")", ")", "return", "val", "if", "is...
Get adaptive interval between checks for a feed.
[ "Get", "adaptive", "interval", "between", "checks", "for", "a", "feed", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L50-L54
mk-fg/feedjack
feedjack/fjcache.py
feed_interval_set
def feed_interval_set(feed_id, parameters, interval, interval_ts): 'Set adaptive interval between checks for a feed.' cache.set(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ), (interval, interval_ts))
python
def feed_interval_set(feed_id, parameters, interval, interval_ts): 'Set adaptive interval between checks for a feed.' cache.set(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ), (interval, interval_ts))
[ "def", "feed_interval_set", "(", "feed_id", ",", "parameters", ",", "interval", ",", "interval_ts", ")", ":", "cache", ".", "set", "(", "getkey", "(", "T_INTERVAL", ",", "key", "=", "feed_interval_key", "(", "feed_id", ",", "parameters", ")", ")", ",", "("...
Set adaptive interval between checks for a feed.
[ "Set", "adaptive", "interval", "between", "checks", "for", "a", "feed", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L56-L59
mk-fg/feedjack
feedjack/fjcache.py
feed_interval_delete
def feed_interval_delete(feed_id, parameters): 'Invalidate cached adaptive interval value.' cache.delete(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ))
python
def feed_interval_delete(feed_id, parameters): 'Invalidate cached adaptive interval value.' cache.delete(getkey( T_INTERVAL, key=feed_interval_key(feed_id, parameters) ))
[ "def", "feed_interval_delete", "(", "feed_id", ",", "parameters", ")", ":", "cache", ".", "delete", "(", "getkey", "(", "T_INTERVAL", ",", "key", "=", "feed_interval_key", "(", "feed_id", ",", "parameters", ")", ")", ")" ]
Invalidate cached adaptive interval value.
[ "Invalidate", "cached", "adaptive", "interval", "value", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L61-L64
mk-fg/feedjack
feedjack/fjcache.py
cache_set
def cache_set(site, key, data): '''Sets cache data for a site. All keys related to a site are stored in a meta key. This key is per-site.''' tkey = getkey(T_ITEM, site.id, key) mkey = getkey(T_META, site.id) tmp = cache.get(mkey) longdur = 365*24*60*60 if not tmp: tmp = [tkey] cache.set(mkey, [tkey], longdur) elif tkey not in tmp: tmp.append(tkey) cache.set(mkey, tmp, longdur) cache.set(tkey, data, site.cache_duration)
python
def cache_set(site, key, data): '''Sets cache data for a site. All keys related to a site are stored in a meta key. This key is per-site.''' tkey = getkey(T_ITEM, site.id, key) mkey = getkey(T_META, site.id) tmp = cache.get(mkey) longdur = 365*24*60*60 if not tmp: tmp = [tkey] cache.set(mkey, [tkey], longdur) elif tkey not in tmp: tmp.append(tkey) cache.set(mkey, tmp, longdur) cache.set(tkey, data, site.cache_duration)
[ "def", "cache_set", "(", "site", ",", "key", ",", "data", ")", ":", "tkey", "=", "getkey", "(", "T_ITEM", ",", "site", ".", "id", ",", "key", ")", "mkey", "=", "getkey", "(", "T_META", ",", "site", ".", "id", ")", "tmp", "=", "cache", ".", "get...
Sets cache data for a site. All keys related to a site are stored in a meta key. This key is per-site.
[ "Sets", "cache", "data", "for", "a", "site", ".", "All", "keys", "related", "to", "a", "site", "are", "stored", "in", "a", "meta", "key", ".", "This", "key", "is", "per", "-", "site", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L71-L84
mk-fg/feedjack
feedjack/fjcache.py
cache_delsite
def cache_delsite(site_id): 'Removes all cache data from a site.' mkey = getkey(T_META, site_id) tmp = cache.get(mkey) if not tmp: return for tkey in tmp: cache.delete(tkey) cache.delete(mkey)
python
def cache_delsite(site_id): 'Removes all cache data from a site.' mkey = getkey(T_META, site_id) tmp = cache.get(mkey) if not tmp: return for tkey in tmp: cache.delete(tkey) cache.delete(mkey)
[ "def", "cache_delsite", "(", "site_id", ")", ":", "mkey", "=", "getkey", "(", "T_META", ",", "site_id", ")", "tmp", "=", "cache", ".", "get", "(", "mkey", ")", "if", "not", "tmp", ":", "return", "for", "tkey", "in", "tmp", ":", "cache", ".", "delet...
Removes all cache data from a site.
[ "Removes", "all", "cache", "data", "from", "a", "site", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/fjcache.py#L86-L94
ianclegg/ntlmlib
ntlmlib/security.py
Ntlm1Sealing.wrap
def wrap(self, message): """ NTM GSSwrap() :param message: The message to be encrypted :return: The signed and encrypted message """ cipher_text = _Ntlm1Session.encrypt(self, message) signature = _Ntlm1Session.sign(self, message) return cipher_text, signature
python
def wrap(self, message): """ NTM GSSwrap() :param message: The message to be encrypted :return: The signed and encrypted message """ cipher_text = _Ntlm1Session.encrypt(self, message) signature = _Ntlm1Session.sign(self, message) return cipher_text, signature
[ "def", "wrap", "(", "self", ",", "message", ")", ":", "cipher_text", "=", "_Ntlm1Session", ".", "encrypt", "(", "self", ",", "message", ")", "signature", "=", "_Ntlm1Session", ".", "sign", "(", "self", ",", "message", ")", "return", "cipher_text", ",", "...
NTM GSSwrap() :param message: The message to be encrypted :return: The signed and encrypted message
[ "NTM", "GSSwrap", "()", ":", "param", "message", ":", "The", "message", "to", "be", "encrypted", ":", "return", ":", "The", "signed", "and", "encrypted", "message" ]
train
https://github.com/ianclegg/ntlmlib/blob/49eadfe4701bcce84a4ca9cbab5b6d5d72eaad05/ntlmlib/security.py#L170-L178
ianclegg/ntlmlib
ntlmlib/security.py
Ntlm1Sealing.unwrap
def unwrap(self, message, signature): """ NTLM GSSUnwrap() :param message: The message to be encrypted :return: The signed and encrypted message """ plain_text = _Ntlm1Session.decrypt(self, message) _Ntlm1Session.verify(self, plain_text, signature) return plain_text
python
def unwrap(self, message, signature): """ NTLM GSSUnwrap() :param message: The message to be encrypted :return: The signed and encrypted message """ plain_text = _Ntlm1Session.decrypt(self, message) _Ntlm1Session.verify(self, plain_text, signature) return plain_text
[ "def", "unwrap", "(", "self", ",", "message", ",", "signature", ")", ":", "plain_text", "=", "_Ntlm1Session", ".", "decrypt", "(", "self", ",", "message", ")", "_Ntlm1Session", ".", "verify", "(", "self", ",", "plain_text", ",", "signature", ")", "return",...
NTLM GSSUnwrap() :param message: The message to be encrypted :return: The signed and encrypted message
[ "NTLM", "GSSUnwrap", "()", ":", "param", "message", ":", "The", "message", "to", "be", "encrypted", ":", "return", ":", "The", "signed", "and", "encrypted", "message" ]
train
https://github.com/ianclegg/ntlmlib/blob/49eadfe4701bcce84a4ca9cbab5b6d5d72eaad05/ntlmlib/security.py#L180-L188
ianclegg/ntlmlib
ntlmlib/security.py
_Ntlm2Session._weaken_key
def _weaken_key(flags, key): """ NOTE: Key weakening in NTLM2 (Extended Session Security) is performed simply by truncating the master key (or secondary master key, if key exchange is performed) to the appropriate length. 128-bit keys are supported under NTLM2. In this case, the master key is used directly in the generation of subkeys (with no weakening performed). :param flags: The negotiated NTLM flags :return: The 16-byte key to be used to sign messages """ if flags & NegotiateFlag.NTLMSSP_KEY_128: return key if flags & NegotiateFlag.NTLMSSP_NEGOTIATE_56: return key[:7] else: return key[:5]
python
def _weaken_key(flags, key): """ NOTE: Key weakening in NTLM2 (Extended Session Security) is performed simply by truncating the master key (or secondary master key, if key exchange is performed) to the appropriate length. 128-bit keys are supported under NTLM2. In this case, the master key is used directly in the generation of subkeys (with no weakening performed). :param flags: The negotiated NTLM flags :return: The 16-byte key to be used to sign messages """ if flags & NegotiateFlag.NTLMSSP_KEY_128: return key if flags & NegotiateFlag.NTLMSSP_NEGOTIATE_56: return key[:7] else: return key[:5]
[ "def", "_weaken_key", "(", "flags", ",", "key", ")", ":", "if", "flags", "&", "NegotiateFlag", ".", "NTLMSSP_KEY_128", ":", "return", "key", "if", "flags", "&", "NegotiateFlag", ".", "NTLMSSP_NEGOTIATE_56", ":", "return", "key", "[", ":", "7", "]", "else",...
NOTE: Key weakening in NTLM2 (Extended Session Security) is performed simply by truncating the master key (or secondary master key, if key exchange is performed) to the appropriate length. 128-bit keys are supported under NTLM2. In this case, the master key is used directly in the generation of subkeys (with no weakening performed). :param flags: The negotiated NTLM flags :return: The 16-byte key to be used to sign messages
[ "NOTE", ":", "Key", "weakening", "in", "NTLM2", "(", "Extended", "Session", "Security", ")", "is", "performed", "simply", "by", "truncating", "the", "master", "key", "(", "or", "secondary", "master", "key", "if", "key", "exchange", "is", "performed", ")", ...
train
https://github.com/ianclegg/ntlmlib/blob/49eadfe4701bcce84a4ca9cbab5b6d5d72eaad05/ntlmlib/security.py#L245-L258
ianclegg/ntlmlib
ntlmlib/security.py
_Ntlm2Session.sign
def sign(self, message): """ Generates a signature for the supplied message using NTLM2 Session Security Note: [MS-NLMP] Section 3.4.4 The message signature for NTLM with extended session security is a 16-byte value that contains the following components, as described by the NTLMSSP_MESSAGE_SIGNATURE structure: - A 4-byte version-number value that is set to 1 - The first eight bytes of the message's HMAC_MD5 - The 4-byte sequence number (SeqNum) :param message: The message to be signed :return: The signature for supplied message """ hmac_context = hmac.new(self.outgoing_signing_key) hmac_context.update(struct.pack('<i', self.outgoing_sequence) + message) # If a key exchange key is negotiated the first 8 bytes of the HMAC MD5 are encrypted with RC4 if self.key_exchange: checksum = self.outgoing_seal.update(hmac_context.digest()[:8]) else: checksum = hmac_context.digest()[:8] mac = _Ntlm2MessageSignature() mac['checksum'] = struct.unpack('<q', checksum)[0] mac['sequence'] = self.outgoing_sequence #logger.debug("Signing Sequence Number: %s", str(self.outgoing_sequence)) # Increment the sequence number after signing each message self.outgoing_sequence += 1 return str(mac)
python
def sign(self, message): """ Generates a signature for the supplied message using NTLM2 Session Security Note: [MS-NLMP] Section 3.4.4 The message signature for NTLM with extended session security is a 16-byte value that contains the following components, as described by the NTLMSSP_MESSAGE_SIGNATURE structure: - A 4-byte version-number value that is set to 1 - The first eight bytes of the message's HMAC_MD5 - The 4-byte sequence number (SeqNum) :param message: The message to be signed :return: The signature for supplied message """ hmac_context = hmac.new(self.outgoing_signing_key) hmac_context.update(struct.pack('<i', self.outgoing_sequence) + message) # If a key exchange key is negotiated the first 8 bytes of the HMAC MD5 are encrypted with RC4 if self.key_exchange: checksum = self.outgoing_seal.update(hmac_context.digest()[:8]) else: checksum = hmac_context.digest()[:8] mac = _Ntlm2MessageSignature() mac['checksum'] = struct.unpack('<q', checksum)[0] mac['sequence'] = self.outgoing_sequence #logger.debug("Signing Sequence Number: %s", str(self.outgoing_sequence)) # Increment the sequence number after signing each message self.outgoing_sequence += 1 return str(mac)
[ "def", "sign", "(", "self", ",", "message", ")", ":", "hmac_context", "=", "hmac", ".", "new", "(", "self", ".", "outgoing_signing_key", ")", "hmac_context", ".", "update", "(", "struct", ".", "pack", "(", "'<i'", ",", "self", ".", "outgoing_sequence", "...
Generates a signature for the supplied message using NTLM2 Session Security Note: [MS-NLMP] Section 3.4.4 The message signature for NTLM with extended session security is a 16-byte value that contains the following components, as described by the NTLMSSP_MESSAGE_SIGNATURE structure: - A 4-byte version-number value that is set to 1 - The first eight bytes of the message's HMAC_MD5 - The 4-byte sequence number (SeqNum) :param message: The message to be signed :return: The signature for supplied message
[ "Generates", "a", "signature", "for", "the", "supplied", "message", "using", "NTLM2", "Session", "Security", "Note", ":", "[", "MS", "-", "NLMP", "]", "Section", "3", ".", "4", ".", "4", "The", "message", "signature", "for", "NTLM", "with", "extended", "...
train
https://github.com/ianclegg/ntlmlib/blob/49eadfe4701bcce84a4ca9cbab5b6d5d72eaad05/ntlmlib/security.py#L260-L288